Coverage for jetgp/hyperparameter_optimizers/cobyla.py: 94%

32 statements  

« prev     ^ index     » next       coverage.py v7.10.7, created at 2026-03-31 11:46 -0500

1import numpy as np 

2from scipy.optimize import minimize 

3 

4def cobyla(func, lb, ub, **kwargs): 

5 """ 

6 COBYLA optimizer with bounds as inequality constraints and multi-start. 

7 

8 Parameters 

9 ---------- 

10 func : callable 

11 Function to minimize. 

12 lb, ub : array-like 

13 Lower and upper bounds. 

14 kwargs : dict 

15 Optional arguments: 

16 - x0 : initial guess for first restart 

17 - n_restart_optimizer : number of random restarts (default=10) 

18 - debug : bool, print intermediate results (default=False) 

19 - Any COBYLA options: maxiter, rhobeg, catol, f_target 

20 """ 

21 x0 = kwargs.pop("x0", None) 

22 n_restart_optimizer = kwargs.pop("n_restart_optimizer", 10) 

23 debug = kwargs.pop("debug", False) 

24 

25 # Extract COBYLA options 

26 options = {} 

27 for key in ["maxiter", "rhobeg", "catol", "f_target", "disp"]: 

28 if key in kwargs: 

29 options[key] = kwargs.pop(key) 

30 

31 lb = np.array(lb) 

32 ub = np.array(ub) 

33 best_x = None 

34 best_val = np.inf 

35 

36 # Inequality constraints for bounds 

37 def make_constraints(lb, ub): 

38 cons = [] 

39 for i in range(len(lb)): 

40 cons.append(lambda x, i=i: x[i] - lb[i]) # x[i] >= lb[i] 

41 cons.append(lambda x, i=i: ub[i] - x[i]) # x[i] <= ub[i] 

42 return [{"type": "ineq", "fun": c} for c in cons] 

43 

44 constraints = make_constraints(lb, ub) 

45 

46 for i in range(n_restart_optimizer): 

47 if x0 is not None and i == 0: 

48 x_init = np.array(x0) 

49 else: 

50 x_init = np.random.uniform(lb, ub) 

51 

52 res = minimize( 

53 func, 

54 x_init, 

55 method="COBYLA", 

56 constraints=constraints, 

57 options=options, 

58 **kwargs # any extra kwargs 

59 ) 

60 

61 if res.fun < best_val: 

62 best_val = res.fun 

63 best_x = res.x 

64 

65 if debug: 

66 print(f"[COBYLA] Restart {i+1}/{n_restart_optimizer} -> best_val={best_val}") 

67 

68 return best_x, best_val