From 2fba7bc6d7dca98c250a87b0e16e7e0e8c660434 Mon Sep 17 00:00:00 2001 From: Rafael Pastrana Date: Wed, 9 Oct 2024 14:42:16 -0400 Subject: [PATCH] [Opt] Exposed `Powell` and `NelderMead` gradient-free optimizers --- CHANGELOG.md | 2 ++ .../optimization/optimizers/gradient_free.py | 24 +++++++++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 src/jax_fdm/optimization/optimizers/gradient_free.py diff --git a/CHANGELOG.md b/CHANGELOG.md index f9884f8..437a513 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +- Wrapped two gradient-free optimizers from scipy: Nelder-Mead and Powell. They are available as `jax_fdm.optimizers.NelderMead` and `jax_fdm.optimizers.Powell`, respectively. +- Linked two evolutionary optimizers from scipy They are available as `jax_fdm.optimizers.DualAnnealing` and `jax_fdm.optimizers.DifferentialEvolution`. - Added support for kwargs in `LossPlotter.plot()`. The kwargs control the parameters of the equilibrium model used to plot the loss history. - Added `VertexSupportParameter.index()`. This change might appear redundant, but it was necessary to deal with the method resolution order of the parent classes of `VertexSupportParameter`. - Added `VertexGroupSupportParameter.index()` for similar reasons as the listed above. diff --git a/src/jax_fdm/optimization/optimizers/gradient_free.py b/src/jax_fdm/optimization/optimizers/gradient_free.py new file mode 100644 index 0000000..ba47668 --- /dev/null +++ b/src/jax_fdm/optimization/optimizers/gradient_free.py @@ -0,0 +1,24 @@ +""" +A collection of scipy-powered, gradient-free optimizers. +""" +from jax_fdm.optimization.optimizers import Optimizer + + +# ========================================================================== +# Optimizers +# ========================================================================== + +class Powell(Optimizer): + """ + The modified Powell algorithm for gradient-free optimization with box constraints. + """ + def __init__(self, **kwargs): + super().__init__(name="Powell", disp=0, **kwargs) + + +class NelderMead(Optimizer): + """ + The Nelder-Mead gradient-free optimizer with box constraints. + """ + def __init__(self, **kwargs): + super().__init__(name="Nelder-Mead", disp=0, **kwargs)