Skip to content

Commit 286c8b2

Browse files
committed
Rename GA -> GD
1 parent bb45e0e commit 286c8b2

1 file changed

Lines changed: 3 additions & 1 deletion

File tree

popt/update_schemes/enopt.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def __init__(self, fun, x, args, jac, hess, bounds=None, **options):
6767
- beta: momentum coefficient for running accelerated optimization (default 0.0)
6868
- alpha_maxiter: maximum number of backtracing trials (default 5)
6969
- resample: number indicating how many times resampling is tried if no improvement is found
70-
- optimizer: 'GA' (gradient accent) or Adam (default 'GA')
70+
- optimizer: 'GD' (gradient descent) or Adam (default 'GD')
7171
- nesterov: use Nesterov acceleration if true (default false)
7272
- hessian: use Hessian approximation (if the algorithm permits use of Hessian) (default false)
7373
- normalize: normalize the gradient if true (default true)
@@ -138,6 +138,8 @@ def __set__variable(var_name=None, defalut=None):
138138
self.optimizer = opt.AdaMax(self.alpha, self.beta)
139139
elif optimizer == 'Steihaug':
140140
self.optimizer = opt.Steihaug(delta0=3.0)
141+
else:
142+
raise ValueError(f'Optimizer {optimizer} not recognized for EnOpt!')
141143

142144
# The EnOpt class self-ignites, and it is possible to send the EnOpt class as a callale method to scipy.minimize
143145
self.run_loop() # run_loop resides in the Optimization class (super)

0 commit comments

Comments
 (0)