diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index f50f0b740..dd05b6a1f 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -70,9 +70,10 @@ jobs:
micromamba activate optimagic
pytest -m "not slow and not jax"
run-tests-with-old-pandas:
- # This job is only for testing if optimagic works with older pandas versions, as
- # many pandas functions we use will be deprecated in pandas 3. optimagic's behavior
- # for older verions is handled in src/optimagic/compat.py.
+ # This job is only for testing if optimagic works with pandas<2, as many pandas
+ # functions we use will be deprecated in pandas 3. optimagic's behavior for older
+ # verions is handled in src/optimagic/compat.py. For compatibility with we have to
+ # restrict numpy<2.
name: Run tests for ${{ matrix.os}} on ${{ matrix.python-version }} with pandas 1
runs-on: ${{ matrix.os }}
strategy:
@@ -96,6 +97,32 @@ jobs:
run: |
micromamba activate optimagic
pytest -m "not slow and not jax"
+ run-tests-with-old-numpy:
+ # This job is only for testing if optimagic works with numpy<2. Because we already
+ # test pandas<2 with numpy<2, in this environment we restrict pandas>=2.
+ name: Run tests for ${{ matrix.os}} on ${{ matrix.python-version }} with numpy 1
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os:
+ - ubuntu-latest
+ python-version:
+ - '3.10'
+ steps:
+ - uses: actions/checkout@v4
+ - name: create build environment
+ uses: mamba-org/setup-micromamba@v1
+ with:
+ environment-file: ./.tools/envs/testenv-numpy.yml
+ cache-environment: true
+ create-args: |
+ python=${{ matrix.python-version }}
+ - name: run pytest
+ shell: bash -l {0}
+ run: |
+ micromamba activate optimagic
+ pytest -m "not slow and not jax"
code-in-docs:
name: Run code snippets in documentation
runs-on: ubuntu-latest
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index b22b11601..aacd207c6 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -6,7 +6,7 @@ repos:
- id: check-useless-excludes
# - id: identity # Prints all files passed to pre-commits. Debugging.
- repo: https://github.com/lyz-code/yamlfix
- rev: 1.16.0
+ rev: 1.17.0
hooks:
- id: yamlfix
exclude: tests/optimagic/optimizers/_pounders/fixtures
@@ -19,7 +19,7 @@ repos:
always_run: true
require_serial: true
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.6.0
+ rev: v5.0.0
hooks:
- id: check-added-large-files
args:
@@ -56,7 +56,7 @@ repos:
- id: yamllint
exclude: tests/optimagic/optimizers/_pounders/fixtures
- repo: https://github.com/PyCQA/docformatter
- rev: v1.7.5
+ rev: eb1df34
hooks:
- id: docformatter
args:
@@ -68,7 +68,7 @@ repos:
- --blank
exclude: src/optimagic/optimization/algo_options.py
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.5.7
+ rev: v0.7.2
hooks:
# Run the linter.
- id: ruff
@@ -85,7 +85,7 @@ repos:
- pyi
- jupyter
- repo: https://github.com/executablebooks/mdformat
- rev: 0.7.17
+ rev: 0.7.18
hooks:
- id: mdformat
additional_dependencies:
@@ -97,7 +97,7 @@ repos:
- '88'
files: (README\.md)
- repo: https://github.com/executablebooks/mdformat
- rev: 0.7.17
+ rev: 0.7.18
hooks:
- id: mdformat
additional_dependencies:
@@ -108,7 +108,7 @@ repos:
- '88'
files: (docs/.)
- repo: https://github.com/kynan/nbstripout
- rev: 0.7.1
+ rev: 0.8.0
hooks:
- id: nbstripout
exclude: |
@@ -119,12 +119,12 @@ repos:
args:
- --drop-empty-cells
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.11.1
+ rev: v1.13.0
hooks:
- id: mypy
files: src|tests
additional_dependencies:
- - numpy<2.0
+ - numpy
- packaging
- pandas-stubs
- sqlalchemy-stubs
diff --git a/.tools/envs/testenv-linux.yml b/.tools/envs/testenv-linux.yml
index fa5ece402..1e1c0846f 100644
--- a/.tools/envs/testenv-linux.yml
+++ b/.tools/envs/testenv-linux.yml
@@ -8,7 +8,7 @@ dependencies:
- jax
- cyipopt>=1.4.0 # dev, tests
- pygmo>=2.19.0 # dev, tests
- - nlopt # dev, tests
+ - nlopt # dev, tests, docs
- pip # dev, tests, docs
- pytest # dev, tests
- pytest-cov # tests
@@ -16,19 +16,19 @@ dependencies:
- statsmodels # dev, tests
- cloudpickle # run, tests
- joblib # run, tests
- - numpy<2.0 # run, tests
+ - numpy >= 2 # run, tests
- pandas # run, tests
- plotly # run, tests
- pybaum >= 0.1.2 # run, tests
- scipy>=1.2.1 # run, tests
- sqlalchemy # run, tests
- seaborn # dev, tests
- - mypy>=1.11 # dev, tests
+ - mypy=1.13 # dev, tests
- pyyaml # dev, tests
- jinja2 # dev, tests
- annotated-types # dev, tests
- pip: # dev, tests, docs
- - DFO-LS # dev, tests
+ - DFO-LS>=1.5.3 # dev, tests
- Py-BOBYQA # dev, tests
- fides==0.7.4 # dev, tests
- kaleido # dev, tests
@@ -37,4 +37,5 @@ dependencies:
- types-openpyxl # dev, tests
- types-jinja2 # dev, tests
- sqlalchemy-stubs # dev, tests
+ - sphinxcontrib-mermaid # dev, tests, docs
- -e ../../
diff --git a/.tools/envs/testenv-numpy.yml b/.tools/envs/testenv-numpy.yml
new file mode 100644
index 000000000..34681b9ba
--- /dev/null
+++ b/.tools/envs/testenv-numpy.yml
@@ -0,0 +1,38 @@
+---
+name: optimagic
+channels:
+ - conda-forge
+ - nodefaults
+dependencies:
+ - pandas>=2
+ - numpy<2
+ - cyipopt>=1.4.0 # dev, tests
+ - pygmo>=2.19.0 # dev, tests
+ - nlopt # dev, tests, docs
+ - pip # dev, tests, docs
+ - pytest # dev, tests
+ - pytest-cov # tests
+ - pytest-xdist # dev, tests
+ - statsmodels # dev, tests
+ - cloudpickle # run, tests
+ - joblib # run, tests
+ - plotly # run, tests
+ - pybaum >= 0.1.2 # run, tests
+ - scipy>=1.2.1 # run, tests
+ - sqlalchemy # run, tests
+ - seaborn # dev, tests
+ - mypy=1.13 # dev, tests
+ - pyyaml # dev, tests
+ - jinja2 # dev, tests
+ - annotated-types # dev, tests
+ - pip: # dev, tests, docs
+ - DFO-LS>=1.5.3 # dev, tests
+ - Py-BOBYQA # dev, tests
+ - fides==0.7.4 # dev, tests
+ - kaleido # dev, tests
+ - types-cffi # dev, tests
+ - types-openpyxl # dev, tests
+ - types-jinja2 # dev, tests
+ - sqlalchemy-stubs # dev, tests
+ - sphinxcontrib-mermaid # dev, tests, docs
+ - -e ../../
diff --git a/.tools/envs/testenv-others.yml b/.tools/envs/testenv-others.yml
index 4467a19b0..444205593 100644
--- a/.tools/envs/testenv-others.yml
+++ b/.tools/envs/testenv-others.yml
@@ -6,7 +6,7 @@ channels:
dependencies:
- cyipopt>=1.4.0 # dev, tests
- pygmo>=2.19.0 # dev, tests
- - nlopt # dev, tests
+ - nlopt # dev, tests, docs
- pip # dev, tests, docs
- pytest # dev, tests
- pytest-cov # tests
@@ -14,19 +14,19 @@ dependencies:
- statsmodels # dev, tests
- cloudpickle # run, tests
- joblib # run, tests
- - numpy<2.0 # run, tests
+ - numpy >= 2 # run, tests
- pandas # run, tests
- plotly # run, tests
- pybaum >= 0.1.2 # run, tests
- scipy>=1.2.1 # run, tests
- sqlalchemy # run, tests
- seaborn # dev, tests
- - mypy>=1.11 # dev, tests
+ - mypy=1.13 # dev, tests
- pyyaml # dev, tests
- jinja2 # dev, tests
- annotated-types # dev, tests
- pip: # dev, tests, docs
- - DFO-LS # dev, tests
+ - DFO-LS>=1.5.3 # dev, tests
- Py-BOBYQA # dev, tests
- fides==0.7.4 # dev, tests
- kaleido # dev, tests
@@ -35,4 +35,5 @@ dependencies:
- types-openpyxl # dev, tests
- types-jinja2 # dev, tests
- sqlalchemy-stubs # dev, tests
+ - sphinxcontrib-mermaid # dev, tests, docs
- -e ../../
diff --git a/.tools/envs/testenv-pandas.yml b/.tools/envs/testenv-pandas.yml
index 757d5f39a..ff4996dc5 100644
--- a/.tools/envs/testenv-pandas.yml
+++ b/.tools/envs/testenv-pandas.yml
@@ -4,10 +4,11 @@ channels:
- conda-forge
- nodefaults
dependencies:
- - pandas<2.0.0
+ - pandas<2
+ - numpy<2
- cyipopt>=1.4.0 # dev, tests
- pygmo>=2.19.0 # dev, tests
- - nlopt # dev, tests
+ - nlopt # dev, tests, docs
- pip # dev, tests, docs
- pytest # dev, tests
- pytest-cov # tests
@@ -15,18 +16,17 @@ dependencies:
- statsmodels # dev, tests
- cloudpickle # run, tests
- joblib # run, tests
- - numpy<2.0 # run, tests
- plotly # run, tests
- pybaum >= 0.1.2 # run, tests
- scipy>=1.2.1 # run, tests
- sqlalchemy # run, tests
- seaborn # dev, tests
- - mypy>=1.11 # dev, tests
+ - mypy=1.13 # dev, tests
- pyyaml # dev, tests
- jinja2 # dev, tests
- annotated-types # dev, tests
- pip: # dev, tests, docs
- - DFO-LS # dev, tests
+ - DFO-LS>=1.5.3 # dev, tests
- Py-BOBYQA # dev, tests
- fides==0.7.4 # dev, tests
- kaleido # dev, tests
@@ -34,4 +34,5 @@ dependencies:
- types-openpyxl # dev, tests
- types-jinja2 # dev, tests
- sqlalchemy-stubs # dev, tests
+ - sphinxcontrib-mermaid # dev, tests, docs
- -e ../../
diff --git a/.tools/update_envs.py b/.tools/update_envs.py
index 0c773a5b8..a62a44e3d 100644
--- a/.tools/update_envs.py
+++ b/.tools/update_envs.py
@@ -33,20 +33,27 @@ def main():
## test environment others
test_env_others = deepcopy(test_env)
- ## test environment for pandas version 1
+ ## test environment for pandas version < 2 (requires numpy < 2)
test_env_pandas = deepcopy(test_env)
- test_env_pandas = [line for line in test_env_pandas if "pandas" not in line]
- test_env_pandas.insert(_insert_idx, " - pandas<2.0.0")
-
- # create docs testing environment
-
+ for pkg in ["numpy", "pandas"]:
+ test_env_pandas = [line for line in test_env_pandas if pkg not in line]
+ test_env_pandas.insert(_insert_idx, f" - {pkg}<2")
+
+ ## test environment for numpy version < 2 (with pandas >= 2)
+ test_env_numpy = deepcopy(test_env)
+ for pkg in ["numpy", "pandas"]:
+ test_env_numpy = [line for line in test_env_numpy if pkg not in line]
+ test_env_numpy.insert(_insert_idx, " - numpy<2")
+ test_env_numpy.insert(_insert_idx, " - pandas>=2")
+
+ # test environment for documentation
docs_env = [line for line in lines if _keep_line(line, "docs")]
docs_env.append(" - -e ../../") # add local installation
# write environments
for name, env in zip(
- ["linux", "others", "pandas"],
- [test_env_linux, test_env_others, test_env_pandas],
+ ["linux", "others", "pandas", "numpy"],
+ [test_env_linux, test_env_others, test_env_pandas, test_env_numpy],
strict=False,
):
# Specify newline to avoid wrong line endings on Windows.
diff --git a/docs/rtd_environment.yml b/docs/rtd_environment.yml
index 8407ede25..1929ec914 100644
--- a/docs/rtd_environment.yml
+++ b/docs/rtd_environment.yml
@@ -27,6 +27,7 @@ dependencies:
- patsy
- joblib
- plotly
+ - nlopt
- annotated-types
- pip:
- ../
@@ -38,3 +39,5 @@ dependencies:
- types-openpyxl # dev, tests
- types-jinja2 # dev, tests
- sqlalchemy-stubs # dev, tests
+ - sphinxcontrib-mermaid # dev, tests, docs
+ - fides==0.7.4 # dev, tests
diff --git a/docs/source/algorithms.md b/docs/source/algorithms.md
index 363ce5faf..006e25402 100644
--- a/docs/source/algorithms.md
+++ b/docs/source/algorithms.md
@@ -1083,6 +1083,8 @@ install each of them separately:
```{eval-rst}
.. dropdown:: nag_dfols
+ *Note*: We recommend to install `DFO-LS` version 1.5.3 or higher. Versions of 1.5.0 or lower also work but the versions `1.5.1` and `1.5.2` contain bugs that can lead to errors being raised.
+
.. code-block::
"nag_dfols"
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 38a4c6bb4..bf50593c3 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -48,6 +48,7 @@
"sphinxcontrib.bibtex",
"sphinx_panels",
"sphinx_design",
+ "sphinxcontrib.mermaid",
]
myst_enable_extensions = [
@@ -55,6 +56,9 @@
"dollarmath",
"html_image",
]
+myst_fence_as_directive = ["mermaid"]
+
+
copybutton_prompt_text = ">>> "
copybutton_only_copy_prompt_lines = False
diff --git a/docs/source/estimagic/explanation/bootstrap_montecarlo_comparison.ipynb b/docs/source/estimagic/explanation/bootstrap_montecarlo_comparison.ipynb
index 8162fc476..7cd6f36b2 100644
--- a/docs/source/estimagic/explanation/bootstrap_montecarlo_comparison.ipynb
+++ b/docs/source/estimagic/explanation/bootstrap_montecarlo_comparison.ipynb
@@ -42,13 +42,14 @@
"metadata": {},
"outputs": [],
"source": [
- "import estimagic as em\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import pandas as pd\n",
"import scipy\n",
"import statsmodels.api as sm\n",
- "from joblib import Parallel, delayed"
+ "from joblib import Parallel, delayed\n",
+ "\n",
+ "import estimagic as em"
]
},
{
diff --git a/docs/source/estimagic/tutorials/bootstrap_overview.ipynb b/docs/source/estimagic/tutorials/bootstrap_overview.ipynb
index 7d5121f41..ed669c4b1 100644
--- a/docs/source/estimagic/tutorials/bootstrap_overview.ipynb
+++ b/docs/source/estimagic/tutorials/bootstrap_overview.ipynb
@@ -19,11 +19,12 @@
"metadata": {},
"outputs": [],
"source": [
- "import estimagic as em\n",
"import numpy as np\n",
"import pandas as pd\n",
"import seaborn as sns\n",
- "import statsmodels.api as sm"
+ "import statsmodels.api as sm\n",
+ "\n",
+ "import estimagic as em"
]
},
{
diff --git a/docs/source/estimagic/tutorials/estimation_tables_overview.ipynb b/docs/source/estimagic/tutorials/estimation_tables_overview.ipynb
index 632fd9c29..f3a99aec6 100644
--- a/docs/source/estimagic/tutorials/estimation_tables_overview.ipynb
+++ b/docs/source/estimagic/tutorials/estimation_tables_overview.ipynb
@@ -24,11 +24,12 @@
"outputs": [],
"source": [
"# Make necessary imports\n",
- "import estimagic as em\n",
"import pandas as pd\n",
"import statsmodels.formula.api as sm\n",
- "from estimagic.config import EXAMPLE_DIR\n",
- "from IPython.core.display import HTML"
+ "from IPython.core.display import HTML\n",
+ "\n",
+ "import estimagic as em\n",
+ "from estimagic.config import EXAMPLE_DIR"
]
},
{
diff --git a/docs/source/estimagic/tutorials/likelihood_overview.ipynb b/docs/source/estimagic/tutorials/likelihood_overview.ipynb
index 46820fe1b..958e02f2e 100644
--- a/docs/source/estimagic/tutorials/likelihood_overview.ipynb
+++ b/docs/source/estimagic/tutorials/likelihood_overview.ipynb
@@ -34,11 +34,12 @@
"metadata": {},
"outputs": [],
"source": [
- "import estimagic as em\n",
"import numpy as np\n",
"import pandas as pd\n",
"from scipy.stats import norm\n",
"\n",
+ "import estimagic as em\n",
+ "\n",
"rng = np.random.default_rng(seed=0)"
]
},
diff --git a/docs/source/estimagic/tutorials/msm_overview.ipynb b/docs/source/estimagic/tutorials/msm_overview.ipynb
index 0687884f9..e2c0fc104 100644
--- a/docs/source/estimagic/tutorials/msm_overview.ipynb
+++ b/docs/source/estimagic/tutorials/msm_overview.ipynb
@@ -42,10 +42,11 @@
"metadata": {},
"outputs": [],
"source": [
- "import estimagic as em\n",
"import numpy as np\n",
"import pandas as pd\n",
"\n",
+ "import estimagic as em\n",
+ "\n",
"rng = np.random.default_rng(seed=0)"
]
},
diff --git a/docs/source/explanation/why_optimization_is_hard.ipynb b/docs/source/explanation/why_optimization_is_hard.ipynb
index caf6a8519..57bc18b25 100644
--- a/docs/source/explanation/why_optimization_is_hard.ipynb
+++ b/docs/source/explanation/why_optimization_is_hard.ipynb
@@ -28,8 +28,9 @@
"outputs": [],
"source": [
"import numpy as np\n",
- "import optimagic as om\n",
- "import seaborn as sns"
+ "import seaborn as sns\n",
+ "\n",
+ "import optimagic as om"
]
},
{
diff --git a/docs/source/how_to/how_to_algorithm_selection.ipynb b/docs/source/how_to/how_to_algorithm_selection.ipynb
index b91a7e8f3..fceb44038 100644
--- a/docs/source/how_to/how_to_algorithm_selection.ipynb
+++ b/docs/source/how_to/how_to_algorithm_selection.ipynb
@@ -5,26 +5,86 @@
"metadata": {},
"source": [
"(how-to-select-algorithms)=\n",
+ "# How to select a local optimizer\n",
"\n",
- "# Which optimizer to use\n",
+ "This guide explains how to choose a local optimizer that works well for your problem. \n",
+ "Depending on your [strategy for global optimization](how_to_globalization.ipynb) it \n",
+ "is also relevant for global optimization problems. \n",
"\n",
- "This is a very very very short and oversimplifying guide on selecting an optimization algorithm based on a minimum of information. \n",
+ "## Important facts \n",
"\n",
+ "- There is no optimizer that works well for all problems \n",
+ "- Making the right choice can lead to enormous speedups\n",
+ "- Making the wrong choice can mean that you [don't solve your problem at all](algo-selection-how-important). Sometimes,\n",
+ "optimizers fail silently!\n",
"\n",
- "To select an optimizer, you need to answer two questions:\n",
"\n",
- "1. Is your criterion function differentiable?\n",
+ "## The three steps for selecting algorithms\n",
"\n",
- "2. Do you have a nonlinear least squares structure (i.e. do you sum some kind of squared residuals at the end of your criterion function)?"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## Define some inputs\n",
+ "Algorithm selection is a mix of theory and experimentation. We recommend the following \n",
+ "steps:\n",
+ "\n",
+ "1. **Theory**: Based on the properties of your problem, start with 3 to 5 candidate algorithms. \n",
+ "You may use the decision tree below.\n",
+ "2. **Experiments**: Run the candidate algorithms for a small number of function \n",
+ "evaluations and compare the results in a *criterion plot*. As a rule of thumb, use \n",
+ "between `n_params` and `10 * n_params` evaluations. \n",
+ "3. **Optimization**: Re-run the algorithm with the best results until \n",
+ "convergence. Use the best parameter vector from the experiments as start parameters.\n",
+ "\n",
+ "We will walk you through the steps in an [example](algo-selection-example-problem)\n",
+ "below. These steps work well for most problems but sometimes you need \n",
+ "[variations](algo-selection-steps-variations).\n",
+ "\n",
+ "\n",
+ "## A decision tree \n",
+ "\n",
+ "This is a practical guide for narrowing down the set of algorithms to experiment with:\n",
+ "\n",
+ "```{mermaid}\n",
+ "graph LR\n",
+ " classDef highlight fill:#FF4500;\n",
+ " A[\"Do you have
nonlinear
constraints?\"] -- yes --> B[\"differentiable?\"]\n",
+ " B[\"Is your objective function differentiable?\"] -- yes --> C[\"ipopt
nlopt_slsqp
scipy_trust_constr\"]\n",
+ " B[\"differentiable?\"] -- no --> D[\"scipy_cobyla
nlopt_cobyla\"]\n",
+ "\n",
+ " A[\"Do you have
nonlinear constraints?\"] -- no --> E[\"Can you exploit
a least-squares
structure?\"]\n",
+ " E[\"Can you exploit
a least-squares
structure?\"] -- yes --> F[\"differentiable?\"]\n",
+ " E[\"Can you exploit
a least-squares
structure?\"] -- no --> G[\"differentiable?\"]\n",
+ "\n",
+ " F[\"differentiable?\"] -- yes --> H[\"scipy_ls_lm
scipy_ls_trf
scipy_ls_dogleg\"]\n",
+ " F[\"differentiable?\"] -- no --> I[\"nag_dflos
pounders
tao_pounders\"]\n",
+ "\n",
+ " G[\"differentiable?\"] -- yes --> J[\"scipy_lbfgsb
nlopt_lbfgsb
fides\"]\n",
+ " G[\"differentiable?\"] -- no --> K[\"nlopt_bobyqa
nlopt_neldermead
neldermead_parallel\"]\n",
"\n",
- "Again, we use versions of the sphere function to illustrate how to select these algorithms in practice"
+ "```\n",
+ "\n",
+ "Going through the different questions will give you a list of candidate algorithms. \n",
+ "All algorithms in that list are designed for the same problem class but use different \n",
+ "approaches to solve the problem. Which of them works best for your problem can only be \n",
+ "found out through experimentation.\n",
+ "\n",
+ "```{note}\n",
+ "Many books on numerical optimization focus strongly on the inner workings of algorithms.\n",
+ "They will, for example, describe the difference between a trust-region algorithm and a \n",
+ "line-search algorithm in a lot of detail. We have an [intuitive explanation](../explanation/explanation_of_numerical_optimizers.md) of this too. Understanding these details is important for configuring and\n",
+ "troubleshooting optimizations, but not for algorithm selection. For example, If you have\n",
+ "a scalar, differentiable problem without nonlinear constraints, the decision tree \n",
+ "suggests `fides` and two variants of `lbfgsb`. `fides` is a trust-region algorithm, \n",
+ "`lbfgsb` is a line-search algorithm. Both are designed to solve the same kinds of \n",
+ "problems and which one works best needs to be found out through experimentation.\n",
+ "```\n",
+ "\n",
+ "(algo-selection-example-problem)=\n",
+ "\n",
+ "## An example problem\n",
+ "\n",
+ "As an example we use the [Trid function](https://www.sfu.ca/~ssurjano/trid.html). The Trid function has no local minimum except \n",
+ "the global one. It is defined for any number of dimensions, we will pick 20. As starting \n",
+ "values we will pick the vector [0, 1, ..., 19]. \n",
+ "\n",
+ "A Python implementation of the function and its gradient looks like this:"
]
},
{
@@ -33,8 +93,9 @@
"metadata": {},
"outputs": [],
"source": [
- "import numpy as np\n",
- "import optimagic as om"
+ "import warnings\n",
+ "\n",
+ "warnings.filterwarnings(\"ignore\")"
]
},
{
@@ -43,25 +104,57 @@
"metadata": {},
"outputs": [],
"source": [
- "@om.mark.least_squares\n",
- "def sphere(params):\n",
- " return params\n",
+ "import numpy as np\n",
+ "\n",
+ "import optimagic as om\n",
"\n",
"\n",
- "def sphere_gradient(params):\n",
- " return params * 2\n",
+ "def trid_scalar(x):\n",
+ " \"\"\"Implement Trid function: https://www.sfu.ca/~ssurjano/trid.html.\"\"\"\n",
+ " return ((x - 1) ** 2).sum() - (x[1:] * x[:-1]).sum()\n",
"\n",
"\n",
- "start_params = np.arange(5)"
+ "def trid_gradient(x):\n",
+ " \"\"\"Calculate gradient of trid function.\"\"\"\n",
+ " l1 = np.insert(x, 0, 0)\n",
+ " l1 = np.delete(l1, [-1])\n",
+ " l2 = np.append(x, 0)\n",
+ " l2 = np.delete(l2, [0])\n",
+ " return 2 * (x - 1) - l1 - l2"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "## Differentiable criterion function\n",
+ "### Step 1: Theory\n",
+ "\n",
+ "\n",
+ "\n",
+ "Let's go through the decision tree for the Trid function:\n",
+ "\n",
+ "1. **No** nonlinear constraints our solution needs to satisfy\n",
+ "2. **No** least-squares structure we can exploit \n",
+ "3. **Yes**, the function is differentiable. We even have a closed form gradient that \n",
+ "we would like to use. \n",
+ "\n",
+ "We therefore end up with the candidate algorithms `scipy_lbfgsb`, `nlopt_lbfgsb`, and \n",
+ "`fides`.\n",
"\n",
- "Use `scipy_lbfsgsb` as optimizer and provide the closed form derivative if you can. If you do not provide a derivative, optimagic will calculate it numerically. However, this is less precise and slower. "
+ "```{note}\n",
+ "If your function is differentiable but you do not have a closed form gradient (yet), \n",
+ "we suggest to use at least one gradient based optimizer and one gradient free optimizer.\n",
+ "in your experiments. Optimagic will use numerical gradients in that case. For details, \n",
+ "see [here](how_to_derivatives.ipynb).\n",
+ "```\n",
+ "\n",
+ "\n",
+ "### Step 2: Experiments\n",
+ "\n",
+ "To find out which algorithms work well for our problem, we simply run optimizations with\n",
+ "all candidate algorithms in a loop and store the result in a dictionary. We limit the \n",
+ "number of function evaluations to 8. Since some algorithms only support a maximum number\n",
+ "of iterations as stopping criterion we also limit the number of iterations to 8.\n"
]
},
{
@@ -70,40 +163,57 @@
"metadata": {},
"outputs": [],
"source": [
- "res = om.minimize(\n",
- " fun=sphere,\n",
- " params=start_params,\n",
- " algorithm=\"scipy_lbfgsb\",\n",
- " jac=sphere_gradient,\n",
- ")\n",
- "res.n_fun_evals"
+ "results = {}\n",
+ "for algo in [\"scipy_lbfgsb\", \"nlopt_lbfgsb\", \"fides\"]:\n",
+ " results[algo] = om.minimize(\n",
+ " fun=trid_scalar,\n",
+ " jac=trid_gradient,\n",
+ " params=np.arange(20),\n",
+ " algorithm=algo,\n",
+ " algo_options={\"stopping_maxfun\": 8, \"stopping_maxiter\": 8},\n",
+ " )\n",
+ "\n",
+ "fig = om.criterion_plot(results, max_evaluations=8)\n",
+ "fig.show(renderer=\"png\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "Note that this solves a 5 dimensional problem with just 3 criterion evaluations. For higher dimensions, you will need more, but it scales very well to dozens and hundreds of parameters. \n",
+ "All optimizers work pretty well here and since this is a very simple problem, any of them \n",
+ "would probably find the optimum in a reasonable time. However, `nlopt_lbfgsb` is a bit \n",
+ "better than the others, so we will select it for the next step. In more difficult\n",
+ "examples, the difference between optimizers can be much more pronounced.\n",
"\n",
- "If you are worried about being stuck in a local optimum, use multistart optimization."
+ "### Step 3: Optimization \n",
+ "\n",
+ "All that is left to do is to run the optimization until convergence with the best \n",
+ "optimizer. To avoid duplicated calculations, we can already start from the previously \n",
+ "best parameter vector:"
]
},
{
- "cell_type": "markdown",
+ "cell_type": "code",
+ "execution_count": null,
"metadata": {},
+ "outputs": [],
"source": [
- "## Not differentiable, only scalar output"
+ "best_x = results[\"nlopt_lbfgsb\"].params\n",
+ "results[\"nlopt_lbfgsb_complete\"] = om.minimize(\n",
+ " fun=trid_scalar,\n",
+ " jac=trid_gradient,\n",
+ " params=best_x,\n",
+ " algorithm=\"nlopt_lbfgsb\",\n",
+ ")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "Use `nag_pybobyqa`. Note that for this you need to install the `PyBOBYQA` package if you do not already have it:\n",
- " \n",
- "`pip install Py-BOBYQA`\n",
- "\n",
- "Then you select the algorithm as follows:"
+ "Looking at the result in a criterion plot we can see that the optimizer converges after \n",
+ "a bit more than 30 function evaluations. "
]
},
{
@@ -112,26 +222,75 @@
"metadata": {},
"outputs": [],
"source": [
- "res = om.minimize(\n",
- " fun=sphere,\n",
- " params=start_params,\n",
- " algorithm=\"nag_pybobyqa\",\n",
- ")\n",
- "res.n_fun_evals"
+ "fig = om.criterion_plot(results)\n",
+ "fig.show(renderer=\"png\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "## Not differentiable, least squares structure\n",
+ "(algo-selection-steps-variations)=\n",
+ "\n",
+ "## Variations of the four steps\n",
+ "\n",
+ "The four steps described above work very well in most situations. However, sometimes \n",
+ "it makes sense to deviate: \n",
"\n",
- "Use `nag_dfols`. To use `nag_dfols`, you need to install it via:\n",
+ "- If you are unsure about some of the questions in step 1, select more algorithms for \n",
+ "the experimentation phase and run more than 1 algorithm until convergence. \n",
+ "- If it is very important to find a precise optimum, run more than 1 algorithm until \n",
+ "convergence. \n",
+ "- If you have a very fast objective function, simply run all candidate algorithms until \n",
+ "convergence. \n",
+ "- If you have a differentiable objective function but no closed form derivative, use \n",
+ "at least one gradient based optimizer and one gradient free optimizer in the \n",
+ "experiments. See [here](how_to_derivatives.ipynb) to learn more about derivatives.\n",
"\n",
- "`pip install DFO-LS`\n",
"\n",
+ "(algo-selection-how-important)=\n",
+ "\n",
+ "## How important was it?\n",
+ "\n",
+ "The Trid function is differentiable and very well behaved in almost every aspect. \n",
+ "Moreover, it has a very short runtime. One would think that any optimizer can find its \n",
+ "optimum. So let's compare the selected optimizer with a few others:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "results = {}\n",
+ "for algo in [\"nlopt_lbfgsb\", \"scipy_neldermead\", \"scipy_cobyla\"]:\n",
+ " results[algo] = om.minimize(\n",
+ " fun=trid_scalar,\n",
+ " jac=trid_gradient,\n",
+ " params=np.arange(20),\n",
+ " algorithm=algo,\n",
+ " )\n",
+ "\n",
+ "fig = om.criterion_plot(results)\n",
+ "fig.show(renderer=\"png\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We can see that our chosen optimizer solves the problem with less than 35 function \n",
+ "evaluations. At this point, the two gradient-free optimizers have not yet made \n",
+ "significant progress. CoByLA gets reasonably close to an optimum after about 4k \n",
+ "evaluations. Nelder-Mead gets stuck after 8k evaluations and fails to solve the problem. \n",
"\n",
- "This optimizer will only work if your criterion function returns a dictionary that contains the entry `root_contributions`. This needs to be a numpy array or pytree that contains the residuals of the least squares problem. "
+ "This example shows not only that the choice of optimizer is important but that the commonly \n",
+ "held belief that gradient free optimizers are generally more robust than gradient based \n",
+ "ones is dangerous! The Nelder-Mead algorithm did \"converge\" and reports success, but\n",
+ "did not find the optimum. It did not even get stuck in a local optimum because we know \n",
+ "that the Trid function does not have local optima except the global one. It just got \n",
+ "stuck somewhere. "
]
},
{
@@ -140,12 +299,7 @@
"metadata": {},
"outputs": [],
"source": [
- "res = om.minimize(\n",
- " fun=sphere,\n",
- " params=start_params,\n",
- " algorithm=\"nag_dfols\",\n",
- ")\n",
- "res.n_fun_evals"
+ "results[\"scipy_neldermead\"].success"
]
}
],
@@ -165,7 +319,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.10.14"
+ "version": "3.10.15"
}
},
"nbformat": 4,
diff --git a/docs/source/how_to/how_to_bounds.ipynb b/docs/source/how_to/how_to_bounds.ipynb
index f5dcae403..b87a15be2 100644
--- a/docs/source/how_to/how_to_bounds.ipynb
+++ b/docs/source/how_to/how_to_bounds.ipynb
@@ -32,6 +32,7 @@
"outputs": [],
"source": [
"import numpy as np\n",
+ "\n",
"import optimagic as om"
]
},
diff --git a/docs/source/how_to/how_to_criterion_function.ipynb b/docs/source/how_to/how_to_criterion_function.ipynb
index 98afbf877..8d99020e0 100644
--- a/docs/source/how_to/how_to_criterion_function.ipynb
+++ b/docs/source/how_to/how_to_criterion_function.ipynb
@@ -25,6 +25,7 @@
"outputs": [],
"source": [
"import numpy as np\n",
+ "\n",
"import optimagic as om\n",
"\n",
"\n",
diff --git a/docs/source/how_to/how_to_derivatives.ipynb b/docs/source/how_to/how_to_derivatives.ipynb
index 6e079f508..0dda5e7fd 100644
--- a/docs/source/how_to/how_to_derivatives.ipynb
+++ b/docs/source/how_to/how_to_derivatives.ipynb
@@ -38,6 +38,7 @@
"outputs": [],
"source": [
"import numpy as np\n",
+ "\n",
"import optimagic as om\n",
"\n",
"\n",
diff --git a/docs/source/how_to/how_to_errors_during_optimization.ipynb b/docs/source/how_to/how_to_errors_during_optimization.ipynb
index 647000a95..8a69f85c3 100644
--- a/docs/source/how_to/how_to_errors_during_optimization.ipynb
+++ b/docs/source/how_to/how_to_errors_during_optimization.ipynb
@@ -49,9 +49,10 @@
"import warnings\n",
"\n",
"import numpy as np\n",
- "import optimagic as om\n",
"from scipy.optimize import minimize as scipy_minimize\n",
"\n",
+ "import optimagic as om\n",
+ "\n",
"warnings.simplefilter(\"ignore\")"
]
},
diff --git a/docs/source/how_to/how_to_globalization.ipynb b/docs/source/how_to/how_to_globalization.ipynb
new file mode 100644
index 000000000..1ddf45cbc
--- /dev/null
+++ b/docs/source/how_to/how_to_globalization.ipynb
@@ -0,0 +1,20 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# How to choose a strategy for global optimization\n",
+ "\n",
+ "(to be written)"
+ ]
+ }
+ ],
+ "metadata": {
+ "language_info": {
+ "name": "python"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/docs/source/how_to/how_to_logging.ipynb b/docs/source/how_to/how_to_logging.ipynb
index c957341d9..dea47d11f 100644
--- a/docs/source/how_to/how_to_logging.ipynb
+++ b/docs/source/how_to/how_to_logging.ipynb
@@ -30,6 +30,7 @@
"from pathlib import Path\n",
"\n",
"import numpy as np\n",
+ "\n",
"import optimagic as om"
]
},
diff --git a/docs/source/how_to/how_to_multistart.ipynb b/docs/source/how_to/how_to_multistart.ipynb
index a4506d4ac..73ff8f171 100644
--- a/docs/source/how_to/how_to_multistart.ipynb
+++ b/docs/source/how_to/how_to_multistart.ipynb
@@ -36,6 +36,7 @@
"outputs": [],
"source": [
"import numpy as np\n",
+ "\n",
"import optimagic as om\n",
"\n",
"\n",
@@ -435,7 +436,7 @@
"metadata": {},
"outputs": [],
"source": [
- "np.row_stack(res.multistart_info.exploration_sample).shape"
+ "np.vstack(res.multistart_info.exploration_sample).shape"
]
},
{
diff --git a/docs/source/how_to/how_to_slice_plot.ipynb b/docs/source/how_to/how_to_slice_plot.ipynb
index bd4d0e9f6..b30de384a 100644
--- a/docs/source/how_to/how_to_slice_plot.ipynb
+++ b/docs/source/how_to/how_to_slice_plot.ipynb
@@ -31,6 +31,7 @@
"outputs": [],
"source": [
"import numpy as np\n",
+ "\n",
"import optimagic as om"
]
},
diff --git a/docs/source/how_to/how_to_visualize_histories.ipynb b/docs/source/how_to/how_to_visualize_histories.ipynb
index 37b3e43a1..b9da50889 100644
--- a/docs/source/how_to/how_to_visualize_histories.ipynb
+++ b/docs/source/how_to/how_to_visualize_histories.ipynb
@@ -23,6 +23,7 @@
"outputs": [],
"source": [
"import numpy as np\n",
+ "\n",
"import optimagic as om"
]
},
diff --git a/docs/source/how_to/index.md b/docs/source/how_to/index.md
index 8b7cdd37d..412b0f252 100644
--- a/docs/source/how_to/index.md
+++ b/docs/source/how_to/index.md
@@ -15,6 +15,7 @@ how_to_derivatives
how_to_algorithm_selection
how_to_bounds
how_to_constraints
+how_to_globalization
how_to_multistart
how_to_visualize_histories
how_to_specify_algorithm_and_algo_options
diff --git a/docs/source/installation.md b/docs/source/installation.md
index 1c4ea3ac5..912e213b6 100644
--- a/docs/source/installation.md
+++ b/docs/source/installation.md
@@ -40,6 +40,10 @@ pip install Py-BOBYQA
pip install DFO-LS
```
+*Note*: We recommend to install `DFO-LS` version 1.5.3 or higher. Versions of 1.5.0 or
+lower also work but the versions `1.5.1` and `1.5.2` contain bugs that can lead to
+errors being raised.
+
```
conda install -c conda-forge petsc4py
```
diff --git a/docs/source/tutorials/numdiff_overview.ipynb b/docs/source/tutorials/numdiff_overview.ipynb
index 5068523a7..b9c3b24bb 100644
--- a/docs/source/tutorials/numdiff_overview.ipynb
+++ b/docs/source/tutorials/numdiff_overview.ipynb
@@ -17,8 +17,9 @@
"outputs": [],
"source": [
"import numpy as np\n",
- "import optimagic as om\n",
- "import pandas as pd"
+ "import pandas as pd\n",
+ "\n",
+ "import optimagic as om"
]
},
{
diff --git a/docs/source/tutorials/optimization_overview.ipynb b/docs/source/tutorials/optimization_overview.ipynb
index 31e6b46b4..c65c63e8b 100644
--- a/docs/source/tutorials/optimization_overview.ipynb
+++ b/docs/source/tutorials/optimization_overview.ipynb
@@ -16,15 +16,18 @@
"outputs": [],
"source": [
"import numpy as np\n",
- "import optimagic as om\n",
- "import pandas as pd"
+ "import pandas as pd\n",
+ "\n",
+ "import optimagic as om"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "## Basic usage of `minimize`"
+ "## Basic usage of `minimize`\n",
+ "\n",
+ "The basic usage of `optimagic.minimize` is very similar to `scipy.optimize.minimize`"
]
},
{
@@ -43,13 +46,13 @@
"metadata": {},
"outputs": [],
"source": [
- "res = om.minimize(\n",
+ "lbfgsb_res = om.minimize(\n",
" fun=sphere,\n",
" params=np.arange(5),\n",
" algorithm=\"scipy_lbfgsb\",\n",
")\n",
"\n",
- "res.params.round(5)"
+ "lbfgsb_res.params.round(5)"
]
},
{
@@ -58,7 +61,7 @@
"source": [
"## `params` do not have to be vectors\n",
"\n",
- "In optimagic, params can by arbitrary [pytrees](https://jax.readthedocs.io/en/latest/pytrees.html). Examples are (nested) dictionaries of numbers, arrays and pandas objects. "
+ "In optimagic, params can by arbitrary [pytrees](https://jax.readthedocs.io/en/latest/pytrees.html). Examples are (nested) dictionaries of numbers, arrays and pandas objects. This is very useful if you have many parameters!"
]
},
{
@@ -77,20 +80,22 @@
"metadata": {},
"outputs": [],
"source": [
- "res = om.minimize(\n",
+ "nm_res = om.minimize(\n",
" fun=dict_sphere,\n",
" params={\"a\": 0, \"b\": 1, \"c\": pd.Series([2, 3, 4])},\n",
- " algorithm=\"scipy_powell\",\n",
+ " algorithm=\"scipy_neldermead\",\n",
")\n",
"\n",
- "res.params"
+ "nm_res.params"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "## The result contains all you need to know"
+ "## You can compare optimizers\n",
+ "\n",
+ "In practice, it is super hard to pick the right optimizer for your problem. With optimagic, you can simply try a few and compare their results!"
]
},
{
@@ -99,29 +104,16 @@
"metadata": {},
"outputs": [],
"source": [
- "res = om.minimize(\n",
- " fun=dict_sphere,\n",
- " params={\"a\": 0, \"b\": 1, \"c\": pd.Series([2, 3, 4])},\n",
- " algorithm=\"scipy_neldermead\",\n",
- ")\n",
- "res"
+ "results = {\"lbfgsb\": lbfgsb_res, \"nelder_mead\": nm_res}\n",
+ "fig = om.criterion_plot(results, max_evaluations=300)\n",
+ "fig.show(renderer=\"png\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "## You can visualize the convergence"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "fig = om.criterion_plot(res, max_evaluations=300)\n",
- "fig.show(renderer=\"png\")"
+ "You can also zoom in on the history of specific parameters. This can be super helpful to diagnose problems in the optimization. "
]
},
{
@@ -131,7 +123,7 @@
"outputs": [],
"source": [
"fig = om.params_plot(\n",
- " res,\n",
+ " nm_res,\n",
" max_evaluations=300,\n",
" # optionally select a subset of parameters to plot\n",
" selector=lambda params: params[\"c\"],\n",
@@ -145,20 +137,23 @@
"source": [
"## There are many optimizers\n",
"\n",
- "If you install some optional dependencies, you can choose from a large (and growing) set of optimization algorithms -- all with the same interface!\n",
+ "By default, optimagic comes with optimizers from scipy, including global optimizers \n",
+ "and least-squares optimizers. But we also have wrappers for algorithms from **NlOpt**, \n",
+ "**Pygmo**, as well as several optimizers from individual packages like **fides**, \n",
+ "**ipopt**, **pybobyqa** and **dfols**. \n",
"\n",
- "For example, we wrap optimizers from `scipy.optimize`, `nlopt`, `cyipopt`, `pygmo`, `fides`, `tao` and others. \n",
+ "To use optimizers that are not from scipy, follow our [installation guide](../installation.md) for optional dependencies. To see which optimizers we have, check out the [full list](../algorithms.md).\n",
"\n",
- "We also have some optimizers that are not part of other packages. Examples are a `parallel Nelder-Mead` algorithm, The `BHHH` algorithm and a `parallel Pounders` algorithm.\n",
- "\n",
- "See the full list [here](../how_to_guides/optimization/how_to_specify_algorithm_and_algo_options"
+ "If you are missing your favorite optimizer in the list, let us know with an [issue](https://github.com/optimagic-dev/optimagic/issues)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "## You can add bounds"
+ "## You can add bounds\n",
+ "\n",
+ "As any optimizer library, optimagic lets you specify bounds for the parameters."
]
},
{
@@ -183,7 +178,9 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "## You can fix parameters "
+ "## You can fix parameters \n",
+ "\n",
+ "On top of bounds, you can also fix one or more parameters during the optimization. "
]
},
{
diff --git a/environment.yml b/environment.yml
index d321d992e..cfd6bf6eb 100644
--- a/environment.yml
+++ b/environment.yml
@@ -8,7 +8,7 @@ dependencies:
- cyipopt>=1.4.0 # dev, tests
- pygmo>=2.19.0 # dev, tests
- jupyterlab # dev, docs
- - nlopt # dev, tests
+ - nlopt # dev, tests, docs
- pdbpp # dev
- pip # dev, tests, docs
- pytest # dev, tests
@@ -19,7 +19,7 @@ dependencies:
- toml # dev
- cloudpickle # run, tests
- joblib # run, tests
- - numpy<2.0 # run, tests
+ - numpy >= 2 # run, tests
- pandas # run, tests
- plotly # run, tests
- pybaum >= 0.1.2 # run, tests
@@ -32,17 +32,17 @@ dependencies:
- sphinx-panels # docs
- sphinxcontrib-bibtex # docs
- seaborn # dev, tests
- - mypy>=1.11 # dev, tests
+ - mypy=1.13 # dev, tests
- pyyaml # dev, tests
- jinja2 # dev, tests
- furo # dev, docs
- annotated-types # dev, tests
- pip: # dev, tests, docs
- - DFO-LS # dev, tests
+ - DFO-LS>=1.5.3 # dev, tests
- Py-BOBYQA # dev, tests
- fides==0.7.4 # dev, tests
- kaleido # dev, tests
- - pre-commit # dev
+ - pre-commit>=4 # dev
- -e . # dev
# type stubs
- pandas-stubs # dev, tests
@@ -50,3 +50,4 @@ dependencies:
- types-openpyxl # dev, tests
- types-jinja2 # dev, tests
- sqlalchemy-stubs # dev, tests
+ - sphinxcontrib-mermaid # dev, tests, docs
diff --git a/pyproject.toml b/pyproject.toml
index 275fb09ed..fe3c69225 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -8,7 +8,7 @@ requires-python = ">=3.10"
dependencies = [
"cloudpickle",
"joblib",
- "numpy<2.0",
+ "numpy",
"pandas",
"plotly",
"pybaum>=0.1.2",
@@ -40,8 +40,9 @@ classifiers = [
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
- "Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3 :: Only",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"Topic :: Scientific/Engineering",
]
authors = [
diff --git a/src/estimagic/__init__.py b/src/estimagic/__init__.py
index e3fa3e908..44a640486 100644
--- a/src/estimagic/__init__.py
+++ b/src/estimagic/__init__.py
@@ -1,6 +1,17 @@
import warnings
from dataclasses import dataclass
+from estimagic import utilities
+from estimagic.bootstrap import BootstrapResult, bootstrap
+from estimagic.estimate_ml import LikelihoodResult, estimate_ml
+from estimagic.estimate_msm import MomentsResult, estimate_msm
+from estimagic.estimation_table import (
+ estimation_table,
+ render_html,
+ render_latex,
+)
+from estimagic.lollipop_plot import lollipop_plot
+from estimagic.msm_weighting import get_moments_cov
from optimagic import OptimizeLogReader as _OptimizeLogReader
from optimagic import OptimizeResult as _OptimizeResult
from optimagic import __version__
@@ -22,18 +33,6 @@
from optimagic import traceback_report as _traceback_report
from optimagic.decorators import deprecated
-from estimagic import utilities
-from estimagic.bootstrap import BootstrapResult, bootstrap
-from estimagic.estimate_ml import LikelihoodResult, estimate_ml
-from estimagic.estimate_msm import MomentsResult, estimate_msm
-from estimagic.estimation_table import (
- estimation_table,
- render_html,
- render_latex,
-)
-from estimagic.lollipop_plot import lollipop_plot
-from estimagic.msm_weighting import get_moments_cov
-
MSG = (
"estimagic.{name} has been deprecated in version 0.5.0. Use optimagic.{name} "
"instead. This function will be removed in version 0.6.0."
diff --git a/src/estimagic/bootstrap.py b/src/estimagic/bootstrap.py
index 68dde6405..179b4e5cc 100644
--- a/src/estimagic/bootstrap.py
+++ b/src/estimagic/bootstrap.py
@@ -5,16 +5,16 @@
import numpy as np
import pandas as pd
-from optimagic.batch_evaluators import joblib_batch_evaluator
-from optimagic.parameters.block_trees import matrix_to_block_tree
-from optimagic.parameters.tree_registry import get_registry
-from optimagic.utilities import get_rng
from pybaum import leaf_names, tree_flatten, tree_just_flatten, tree_unflatten
from estimagic.bootstrap_ci import calculate_ci
from estimagic.bootstrap_helpers import check_inputs
from estimagic.bootstrap_outcomes import get_bootstrap_outcomes
from estimagic.shared_covs import calculate_estimation_summary
+from optimagic.batch_evaluators import joblib_batch_evaluator
+from optimagic.parameters.block_trees import matrix_to_block_tree
+from optimagic.parameters.tree_registry import get_registry
+from optimagic.utilities import get_rng
def bootstrap(
diff --git a/src/estimagic/bootstrap_outcomes.py b/src/estimagic/bootstrap_outcomes.py
index f9caf1c7d..5ba5e6bea 100644
--- a/src/estimagic/bootstrap_outcomes.py
+++ b/src/estimagic/bootstrap_outcomes.py
@@ -1,7 +1,6 @@
-from optimagic.batch_evaluators import process_batch_evaluator
-
from estimagic.bootstrap_helpers import check_inputs
from estimagic.bootstrap_samples import get_bootstrap_indices
+from optimagic.batch_evaluators import process_batch_evaluator
def get_bootstrap_outcomes(
diff --git a/src/estimagic/estimate_ml.py b/src/estimagic/estimate_ml.py
index 4199eeb85..4044fee1d 100644
--- a/src/estimagic/estimate_ml.py
+++ b/src/estimagic/estimate_ml.py
@@ -5,6 +5,26 @@
import numpy as np
import pandas as pd
+
+from estimagic.ml_covs import (
+ cov_cluster_robust,
+ cov_hessian,
+ cov_jacobian,
+ cov_robust,
+ cov_strata_robust,
+)
+from estimagic.shared_covs import (
+ FreeParams,
+ calculate_ci,
+ calculate_estimation_summary,
+ calculate_free_estimates,
+ calculate_p_values,
+ calculate_summary_data_estimation,
+ get_derivative_case,
+ transform_covariance,
+ transform_free_cov_to_cov,
+ transform_free_values_to_params_tree,
+)
from optimagic import deprecations, mark
from optimagic.deprecations import (
replace_and_warn_about_deprecated_bounds,
@@ -32,26 +52,6 @@
from optimagic.typing import AggregationLevel
from optimagic.utilities import get_rng, to_pickle
-from estimagic.ml_covs import (
- cov_cluster_robust,
- cov_hessian,
- cov_jacobian,
- cov_robust,
- cov_strata_robust,
-)
-from estimagic.shared_covs import (
- FreeParams,
- calculate_ci,
- calculate_estimation_summary,
- calculate_free_estimates,
- calculate_p_values,
- calculate_summary_data_estimation,
- get_derivative_case,
- transform_covariance,
- transform_free_cov_to_cov,
- transform_free_values_to_params_tree,
-)
-
def estimate_ml(
loglike,
@@ -95,8 +95,8 @@ def estimate_ml(
optimize_options to False. Pytrees can be a numpy array, a pandas Series, a
DataFrame with "value" column, a float and any kind of (nested) dictionary
or list containing these elements. See :ref:`params` for examples.
- optimize_options (dict, str or False): Keyword arguments that govern the
- numerical optimization. Valid entries are all arguments of
+ optimize_options (dict, Algorithm, str or False): Keyword arguments that govern
+ the numerical optimization. Valid entries are all arguments of
:func:`~estimagic.optimization.optimize.minimize` except for those that are
passed explicilty to ``estimate_ml``. If you pass False as optimize_options
you signal that ``params`` are already the optimal parameters and no
@@ -199,7 +199,10 @@ def estimate_ml(
is_optimized = optimize_options is False
if not is_optimized:
- if isinstance(optimize_options, str):
+ # If optimize_options is not a dictionary and not False, we assume it represents
+ # an algorithm. The actual testing of whether it is a valid algorithm is done
+ # when `maximize` is called.
+ if not isinstance(optimize_options, dict):
optimize_options = {"algorithm": optimize_options}
check_optimization_options(
diff --git a/src/estimagic/estimate_msm.py b/src/estimagic/estimate_msm.py
index 0ec6933b8..b207e9829 100644
--- a/src/estimagic/estimate_msm.py
+++ b/src/estimagic/estimate_msm.py
@@ -9,29 +9,6 @@
import numpy as np
import pandas as pd
-from optimagic import deprecations, mark
-from optimagic.deprecations import (
- replace_and_warn_about_deprecated_bounds,
-)
-from optimagic.differentiation.derivatives import first_derivative
-from optimagic.differentiation.numdiff_options import (
- NumdiffPurpose,
- get_default_numdiff_options,
- pre_process_numdiff_options,
-)
-from optimagic.exceptions import InvalidFunctionError
-from optimagic.optimization.fun_value import LeastSquaresFunctionValue
-from optimagic.optimization.optimize import minimize
-from optimagic.optimization.optimize_result import OptimizeResult
-from optimagic.parameters.block_trees import block_tree_to_matrix, matrix_to_block_tree
-from optimagic.parameters.bounds import Bounds, pre_process_bounds
-from optimagic.parameters.conversion import Converter, get_converter
-from optimagic.parameters.space_conversion import InternalParams
-from optimagic.parameters.tree_registry import get_registry
-from optimagic.shared.check_option_dicts import (
- check_optimization_options,
-)
-from optimagic.utilities import get_rng, to_pickle
from pybaum import leaf_names, tree_just_flatten
from estimagic.msm_covs import cov_optimal, cov_robust
@@ -56,6 +33,29 @@
transform_free_cov_to_cov,
transform_free_values_to_params_tree,
)
+from optimagic import deprecations, mark
+from optimagic.deprecations import (
+ replace_and_warn_about_deprecated_bounds,
+)
+from optimagic.differentiation.derivatives import first_derivative
+from optimagic.differentiation.numdiff_options import (
+ NumdiffPurpose,
+ get_default_numdiff_options,
+ pre_process_numdiff_options,
+)
+from optimagic.exceptions import InvalidFunctionError
+from optimagic.optimization.fun_value import LeastSquaresFunctionValue
+from optimagic.optimization.optimize import minimize
+from optimagic.optimization.optimize_result import OptimizeResult
+from optimagic.parameters.block_trees import block_tree_to_matrix, matrix_to_block_tree
+from optimagic.parameters.bounds import Bounds, pre_process_bounds
+from optimagic.parameters.conversion import Converter, get_converter
+from optimagic.parameters.space_conversion import InternalParams
+from optimagic.parameters.tree_registry import get_registry
+from optimagic.shared.check_option_dicts import (
+ check_optimization_options,
+)
+from optimagic.utilities import get_rng, to_pickle
def estimate_msm(
@@ -107,8 +107,8 @@ def estimate_msm(
optimize_options to False. Pytrees can be a numpy array, a pandas Series, a
DataFrame with "value" column, a float and any kind of (nested) dictionary
or list containing these elements. See :ref:`params` for examples.
- optimize_options (dict, str or False): Keyword arguments that govern the
- numerical optimization. Valid entries are all arguments of
+ optimize_options (dict, Algorithm, str or False): Keyword arguments that govern
+ the numerical optimization. Valid entries are all arguments of
:func:`~estimagic.optimization.optimize.minimize` except for those that can
be passed explicitly to ``estimate_msm``. If you pass False as
``optimize_options`` you signal that ``params`` are already
@@ -199,7 +199,10 @@ def estimate_msm(
is_optimized = optimize_options is False
if not is_optimized:
- if isinstance(optimize_options, str):
+ # If optimize_options is not a dictionary and not False, we assume it represents
+ # an algorithm. The actual testing of whether it is a valid algorithm is done
+ # when `minimize` is called.
+ if not isinstance(optimize_options, dict):
optimize_options = {"algorithm": optimize_options}
check_optimization_options(
diff --git a/src/estimagic/estimation_table.py b/src/estimagic/estimation_table.py
index 85d75199a..9c953007c 100644
--- a/src/estimagic/estimation_table.py
+++ b/src/estimagic/estimation_table.py
@@ -6,6 +6,7 @@
import numpy as np
import pandas as pd
+
from optimagic.shared.compat import pd_df_map
suppress_performance_warnings = np.testing.suppress_warnings()
@@ -1473,8 +1474,7 @@ def _get_digits_after_decimal(df):
)
except KeyError:
trail_length = 0
- if trail_length > max_trail:
- max_trail = trail_length
+ max_trail = max(trail_length, max_trail)
return max_trail
diff --git a/src/estimagic/examples/logit.py b/src/estimagic/examples/logit.py
index 890bb9e7c..925356c5c 100644
--- a/src/estimagic/examples/logit.py
+++ b/src/estimagic/examples/logit.py
@@ -2,6 +2,7 @@
import numpy as np
import pandas as pd
+
from optimagic import mark
diff --git a/src/estimagic/lollipop_plot.py b/src/estimagic/lollipop_plot.py
index beea44c0d..f787a0b31 100644
--- a/src/estimagic/lollipop_plot.py
+++ b/src/estimagic/lollipop_plot.py
@@ -2,6 +2,7 @@
import pandas as pd
import plotly.graph_objects as go
+
from optimagic.config import PLOTLY_PALETTE, PLOTLY_TEMPLATE
from optimagic.visualization.plotting_utilities import create_grid_plot, create_ind_dict
diff --git a/src/estimagic/ml_covs.py b/src/estimagic/ml_covs.py
index d61abafae..39e445501 100644
--- a/src/estimagic/ml_covs.py
+++ b/src/estimagic/ml_covs.py
@@ -2,10 +2,10 @@
import numpy as np
import pandas as pd
-from optimagic.exceptions import INVALID_INFERENCE_MSG
-from optimagic.utilities import robust_inverse
from estimagic.shared_covs import process_pandas_arguments
+from optimagic.exceptions import INVALID_INFERENCE_MSG
+from optimagic.utilities import robust_inverse
def cov_hessian(hess):
diff --git a/src/estimagic/msm_covs.py b/src/estimagic/msm_covs.py
index 9e61d93c6..958bde683 100644
--- a/src/estimagic/msm_covs.py
+++ b/src/estimagic/msm_covs.py
@@ -1,8 +1,8 @@
import pandas as pd
-from optimagic.exceptions import INVALID_INFERENCE_MSG
-from optimagic.utilities import robust_inverse
from estimagic.shared_covs import process_pandas_arguments
+from optimagic.exceptions import INVALID_INFERENCE_MSG
+from optimagic.utilities import robust_inverse
def cov_robust(jac, weights, moments_cov):
diff --git a/src/estimagic/msm_sensitivity.py b/src/estimagic/msm_sensitivity.py
index 188c29847..e8147fc29 100644
--- a/src/estimagic/msm_sensitivity.py
+++ b/src/estimagic/msm_sensitivity.py
@@ -10,11 +10,11 @@
import numpy as np
import pandas as pd
-from optimagic.exceptions import INVALID_SENSITIVITY_MSG
-from optimagic.utilities import robust_inverse
from estimagic.msm_covs import cov_robust
from estimagic.shared_covs import process_pandas_arguments
+from optimagic.exceptions import INVALID_SENSITIVITY_MSG
+from optimagic.utilities import robust_inverse
def calculate_sensitivity_to_bias(jac, weights):
diff --git a/src/estimagic/msm_weighting.py b/src/estimagic/msm_weighting.py
index dd00028eb..4b26a7e42 100644
--- a/src/estimagic/msm_weighting.py
+++ b/src/estimagic/msm_weighting.py
@@ -2,13 +2,13 @@
import numpy as np
import pandas as pd
-from optimagic.parameters.block_trees import block_tree_to_matrix, matrix_to_block_tree
-from optimagic.parameters.tree_registry import get_registry
-from optimagic.utilities import robust_inverse
from pybaum import tree_just_flatten
from scipy.linalg import block_diag
from estimagic.bootstrap import bootstrap
+from optimagic.parameters.block_trees import block_tree_to_matrix, matrix_to_block_tree
+from optimagic.parameters.tree_registry import get_registry
+from optimagic.utilities import robust_inverse
def get_moments_cov(
diff --git a/src/estimagic/shared_covs.py b/src/estimagic/shared_covs.py
index e6a993d16..c4cccc3a2 100644
--- a/src/estimagic/shared_covs.py
+++ b/src/estimagic/shared_covs.py
@@ -3,9 +3,10 @@
import numpy as np
import pandas as pd
import scipy
+from pybaum import tree_just_flatten, tree_unflatten
+
from optimagic.parameters.block_trees import matrix_to_block_tree
from optimagic.parameters.tree_registry import get_registry
-from pybaum import tree_just_flatten, tree_unflatten
def transform_covariance(
diff --git a/src/optimagic/algorithms.py b/src/optimagic/algorithms.py
index 242a5538b..9e42ea828 100644
--- a/src/optimagic/algorithms.py
+++ b/src/optimagic/algorithms.py
@@ -40,10 +40,12 @@
name = candidate.__algo_info__.name
if issubclass(candidate, Algorithm) and candidate is not Algorithm:
ALL_ALGORITHMS[name] = candidate
- if candidate.__algo_info__.is_available:
+ if candidate.__algo_info__.is_available: # type: ignore[attr-defined]
AVAILABLE_ALGORITHMS[name] = candidate
GLOBAL_ALGORITHMS = [
- name for name, algo in ALL_ALGORITHMS.items() if algo.__algo_info__.is_global
+ name
+ for name, algo in ALL_ALGORITHMS.items()
+ if algo.__algo_info__.is_global # type: ignore[attr-defined]
]
diff --git a/src/optimagic/differentiation/richardson_extrapolation.py b/src/optimagic/differentiation/richardson_extrapolation.py
index fe1842e40..3aa189527 100644
--- a/src/optimagic/differentiation/richardson_extrapolation.py
+++ b/src/optimagic/differentiation/richardson_extrapolation.py
@@ -272,7 +272,7 @@ def _compute_step_ratio(steps):
"""
ratios = steps[1:, :] / steps[:-1, :]
- ratios = ratios[np.isfinite(ratios)]
+ finite_ratios = ratios[np.isfinite(ratios)]
- step_ratio = ratios.flat[0]
+ step_ratio = finite_ratios.item(0)
return step_ratio
diff --git a/src/optimagic/optimization/algorithm.py b/src/optimagic/optimization/algorithm.py
index add879d28..3bef1d09f 100644
--- a/src/optimagic/optimization/algorithm.py
+++ b/src/optimagic/optimization/algorithm.py
@@ -154,7 +154,7 @@ def _solve_internal_problem(
def __post_init__(self) -> None:
for field in self.__dataclass_fields__:
raw_value = getattr(self, field)
- target_type = self.__dataclass_fields__[field].type
+ target_type = typing.cast(type, self.__dataclass_fields__[field].type)
if target_type in TYPE_CONVERTERS:
try:
value = TYPE_CONVERTERS[target_type](raw_value)
diff --git a/src/optimagic/optimization/create_optimization_problem.py b/src/optimagic/optimization/create_optimization_problem.py
index d11a9bcb3..1403427d0 100644
--- a/src/optimagic/optimization/create_optimization_problem.py
+++ b/src/optimagic/optimization/create_optimization_problem.py
@@ -1,7 +1,7 @@
import warnings
from dataclasses import dataclass
from pathlib import Path
-from typing import Any, Callable, Type, cast
+from typing import Any, Callable, Type
from optimagic import deprecations
from optimagic.algorithms import ALL_ALGORITHMS
@@ -591,4 +591,4 @@ def pre_process_user_algorithm(
elif isinstance(algorithm, type) and issubclass(algorithm, Algorithm):
algorithm = algorithm()
- return cast(Algorithm, algorithm)
+ return algorithm
diff --git a/src/optimagic/optimizers/_pounders/gqtpar.py b/src/optimagic/optimizers/_pounders/gqtpar.py
index a31e59b6a..bf9eb68dd 100644
--- a/src/optimagic/optimizers/_pounders/gqtpar.py
+++ b/src/optimagic/optimizers/_pounders/gqtpar.py
@@ -75,7 +75,7 @@ def gqtpar(model, x_candidate, *, k_easy=0.1, k_hard=0.2, maxiter=200):
zero_threshold = (
model.square_terms.shape[0]
* np.finfo(float).eps
- * np.linalg.norm(model.square_terms, np.Inf)
+ * np.linalg.norm(model.square_terms, np.inf)
)
stopping_criteria = {
"k_easy": k_easy,
@@ -175,7 +175,7 @@ def _get_initial_guess_for_lambdas(
gradient_norm = np.linalg.norm(main_model.linear_terms)
model_hessian = main_model.square_terms
- hessian_infinity_norm = np.linalg.norm(model_hessian, np.Inf)
+ hessian_infinity_norm = np.linalg.norm(model_hessian, np.inf)
hessian_frobenius_norm = np.linalg.norm(model_hessian, "fro")
hessian_gershgorin_lower, hessian_gershgorin_upper = _compute_gershgorin_bounds(
diff --git a/src/optimagic/optimizers/nag_optimizers.py b/src/optimagic/optimizers/nag_optimizers.py
index 5cff71a33..e708b5915 100644
--- a/src/optimagic/optimizers/nag_optimizers.py
+++ b/src/optimagic/optimizers/nag_optimizers.py
@@ -630,7 +630,7 @@ def nag_dfols_internal(
fun=res["solution_criterion"],
success=res["success"],
message=res["message"],
- n_iterations=int(res["n_iterations"]),
+ n_iterations=res["n_iterations"],
n_fun_evals=res["n_fun_evals"],
)
return out
@@ -857,7 +857,7 @@ def nag_pybobyqa_internal(
fun=res["solution_criterion"],
success=res["success"],
message=res["message"],
- n_iterations=int(res["n_iterations"]),
+ n_iterations=res["n_iterations"],
)
return out
@@ -876,8 +876,13 @@ def _process_nag_result(nag_result_obj, len_x):
results (dict): See :ref:`internal_optimizer_output` for details.
"""
+ if hasattr(nag_result_obj, "f"):
+ solution_fun = nag_result_obj.f
+ else:
+ solution_fun = nag_result_obj.obj
+
processed = {
- "solution_criterion": nag_result_obj.f,
+ "solution_criterion": solution_fun,
"n_fun_evals": nag_result_obj.nx,
"message": nag_result_obj.msg,
"success": nag_result_obj.flag == nag_result_obj.EXIT_SUCCESS,
@@ -885,9 +890,8 @@ def _process_nag_result(nag_result_obj, len_x):
"diagnostic_info": nag_result_obj.diagnostic_info,
}
try:
- processed["n_iterations"] = nag_result_obj.diagnostic_info["iters_total"].iloc[
- -1
- ]
+ n_iterations = int(nag_result_obj.diagnostic_info["iters_total"].iloc[-1])
+ processed["n_iterations"] = n_iterations
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
diff --git a/src/optimagic/parameters/kernel_transformations.py b/src/optimagic/parameters/kernel_transformations.py
index b4a8540c9..d371b6851 100644
--- a/src/optimagic/parameters/kernel_transformations.py
+++ b/src/optimagic/parameters/kernel_transformations.py
@@ -496,7 +496,7 @@ def _transformation_matrix(dim):
rows = [_unit_vector_or_zeros(i, dim**2) for i in indices]
- transformer = np.row_stack(rows)
+ transformer = np.vstack(rows)
return transformer
diff --git a/src/optimagic/shared/check_option_dicts.py b/src/optimagic/shared/check_option_dicts.py
index 82ace0201..c4c45fcc7 100644
--- a/src/optimagic/shared/check_option_dicts.py
+++ b/src/optimagic/shared/check_option_dicts.py
@@ -41,6 +41,6 @@ def check_optimization_options(options, usage, algorithm_mandatory=True):
msg = (
"The following are not valid entries of optimize_options because they are "
"not only relevant for minimization but also for inference: "
- "{invalid_general}"
+ f"{invalid_general}"
)
raise ValueError(msg)
diff --git a/tests/estimagic/examples/test_logit.py b/tests/estimagic/examples/test_logit.py
index a2c150ad1..03175e68e 100644
--- a/tests/estimagic/examples/test_logit.py
+++ b/tests/estimagic/examples/test_logit.py
@@ -1,8 +1,9 @@
"""Tests for the logit example."""
-from estimagic.examples.logit import logit_grad, logit_hess, logit_jac, logit_loglike
from numpy.testing import assert_array_almost_equal as aaae
+from estimagic.examples.logit import logit_grad, logit_hess, logit_jac, logit_loglike
+
def test_logit_loglikes(logit_inputs, logit_object):
x = logit_inputs["params"]["value"].to_numpy()
diff --git a/tests/estimagic/test_bootstrap.py b/tests/estimagic/test_bootstrap.py
index 726a7d72a..7fa1380e2 100644
--- a/tests/estimagic/test_bootstrap.py
+++ b/tests/estimagic/test_bootstrap.py
@@ -3,6 +3,7 @@
import pytest
import seaborn as sns
import statsmodels.api as sm
+
from estimagic import bootstrap
diff --git a/tests/estimagic/test_bootstrap_ci.py b/tests/estimagic/test_bootstrap_ci.py
index 797fd77df..a6684b3a2 100644
--- a/tests/estimagic/test_bootstrap_ci.py
+++ b/tests/estimagic/test_bootstrap_ci.py
@@ -3,9 +3,10 @@
import numpy as np
import pandas as pd
import pytest
+from pybaum import tree_just_flatten
+
from estimagic.bootstrap_ci import calculate_ci, check_inputs
from optimagic.parameters.tree_registry import get_registry
-from pybaum import tree_just_flatten
def aaae(obj1, obj2, decimal=6):
diff --git a/tests/estimagic/test_bootstrap_outcomes.py b/tests/estimagic/test_bootstrap_outcomes.py
index f3bdd1080..987deaf55 100644
--- a/tests/estimagic/test_bootstrap_outcomes.py
+++ b/tests/estimagic/test_bootstrap_outcomes.py
@@ -3,11 +3,12 @@
import numpy as np
import pandas as pd
import pytest
+from numpy.testing import assert_array_almost_equal as aaae
+
from estimagic.bootstrap_outcomes import (
_get_bootstrap_outcomes_from_indices,
get_bootstrap_outcomes,
)
-from numpy.testing import assert_array_almost_equal as aaae
from optimagic.batch_evaluators import joblib_batch_evaluator
from optimagic.utilities import get_rng
diff --git a/tests/estimagic/test_bootstrap_samples.py b/tests/estimagic/test_bootstrap_samples.py
index a645253a7..3abf5920e 100644
--- a/tests/estimagic/test_bootstrap_samples.py
+++ b/tests/estimagic/test_bootstrap_samples.py
@@ -1,15 +1,16 @@
import numpy as np
import pandas as pd
import pytest
+from numpy.testing import assert_array_equal as aae
+from pandas.testing import assert_frame_equal as afe
+
from estimagic.bootstrap_samples import (
_convert_cluster_ids_to_indices,
_get_bootstrap_samples_from_indices,
get_bootstrap_indices,
get_bootstrap_samples,
)
-from numpy.testing import assert_array_equal as aae
from optimagic.utilities import get_rng
-from pandas.testing import assert_frame_equal as afe
@pytest.fixture()
diff --git a/tests/estimagic/test_estimate_ml.py b/tests/estimagic/test_estimate_ml.py
index e3667a6d2..f3e806311 100644
--- a/tests/estimagic/test_estimate_ml.py
+++ b/tests/estimagic/test_estimate_ml.py
@@ -1,11 +1,15 @@
import itertools
import numpy as np
-import optimagic as om
import pandas as pd
import pytest
import scipy as sp
import statsmodels.api as sm
+from numpy.testing import assert_array_equal
+from scipy.stats import multivariate_normal
+from statsmodels.base.model import GenericLikelihoodModel
+
+import optimagic as om
from estimagic.estimate_ml import estimate_ml
from estimagic.examples.logit import (
logit_hess,
@@ -13,11 +17,9 @@
logit_loglike,
scalar_logit_fun_and_jac,
)
-from numpy.testing import assert_array_equal
from optimagic import mark
+from optimagic.optimizers import scipy_optimizers
from optimagic.parameters.bounds import Bounds
-from scipy.stats import multivariate_normal
-from statsmodels.base.model import GenericLikelihoodModel
def aaae(obj1, obj2, decimal=3):
@@ -348,6 +350,34 @@ def test_estimate_ml_optimize_options_false(fitted_logit_model, logit_np_inputs)
aaae(got.cov(method="jacobian"), fitted_logit_model.covjac, decimal=4)
+def test_estimate_ml_algorithm_type(logit_np_inputs):
+ """Test that estimate_ml computes correct covariances given correct params."""
+ kwargs = {"y": logit_np_inputs["y"], "x": logit_np_inputs["x"]}
+
+ params = pd.DataFrame({"value": logit_np_inputs["params"]})
+
+ estimate_ml(
+ loglike=logit_loglike,
+ params=params,
+ loglike_kwargs=kwargs,
+ optimize_options=scipy_optimizers.ScipyLBFGSB,
+ )
+
+
+def test_estimate_ml_algorithm(logit_np_inputs):
+ """Test that estimate_ml computes correct covariances given correct params."""
+ kwargs = {"y": logit_np_inputs["y"], "x": logit_np_inputs["x"]}
+
+ params = pd.DataFrame({"value": logit_np_inputs["params"]})
+
+ estimate_ml(
+ loglike=logit_loglike,
+ params=params,
+ loglike_kwargs=kwargs,
+ optimize_options=scipy_optimizers.ScipyLBFGSB(stopping_maxfun=10),
+ )
+
+
# ======================================================================================
# Univariate normal case using dict params
# ======================================================================================
diff --git a/tests/estimagic/test_estimate_msm.py b/tests/estimagic/test_estimate_msm.py
index bfda6c957..2684513d9 100644
--- a/tests/estimagic/test_estimate_msm.py
+++ b/tests/estimagic/test_estimate_msm.py
@@ -5,10 +5,12 @@
import numpy as np
import pandas as pd
import pytest
-from estimagic.estimate_msm import estimate_msm
from numpy.testing import assert_array_almost_equal as aaae
from numpy.testing import assert_array_equal
+
+from estimagic.estimate_msm import estimate_msm
from optimagic.optimization.optimize_result import OptimizeResult
+from optimagic.optimizers import scipy_optimizers
from optimagic.shared.check_option_dicts import (
check_optimization_options,
)
@@ -160,6 +162,40 @@ def test_estimate_msm_with_jacobian():
aaae(calculated.cov(), cov_np)
+def test_estimate_msm_with_algorithm_type():
+ start_params = np.array([3, 2, 1])
+ expected_params = np.zeros(3)
+ empirical_moments = _sim_np(expected_params)
+ if isinstance(empirical_moments, dict):
+ empirical_moments = empirical_moments["simulated_moments"]
+
+ estimate_msm(
+ simulate_moments=_sim_np,
+ empirical_moments=empirical_moments,
+ moments_cov=cov_np,
+ params=start_params,
+ optimize_options=scipy_optimizers.ScipyLBFGSB,
+ jacobian=lambda x: np.eye(len(x)),
+ )
+
+
+def test_estimate_msm_with_algorithm():
+ start_params = np.array([3, 2, 1])
+ expected_params = np.zeros(3)
+ empirical_moments = _sim_np(expected_params)
+ if isinstance(empirical_moments, dict):
+ empirical_moments = empirical_moments["simulated_moments"]
+
+ estimate_msm(
+ simulate_moments=_sim_np,
+ empirical_moments=empirical_moments,
+ moments_cov=cov_np,
+ params=start_params,
+ optimize_options=scipy_optimizers.ScipyLBFGSB(stopping_maxfun=10),
+ jacobian=lambda x: np.eye(len(x)),
+ )
+
+
def test_to_pickle(tmp_path):
start_params = np.array([3, 2, 1])
diff --git a/tests/estimagic/test_estimate_msm_dict_params_and_moments.py b/tests/estimagic/test_estimate_msm_dict_params_and_moments.py
index 2852d23eb..b1cbcd250 100644
--- a/tests/estimagic/test_estimate_msm_dict_params_and_moments.py
+++ b/tests/estimagic/test_estimate_msm_dict_params_and_moments.py
@@ -2,11 +2,12 @@
import numpy as np
import pandas as pd
-from estimagic.estimate_msm import estimate_msm
from numpy.testing import assert_array_almost_equal as aaae
-from optimagic.parameters.tree_registry import get_registry
from pybaum import tree_just_flatten
+from estimagic.estimate_msm import estimate_msm
+from optimagic.parameters.tree_registry import get_registry
+
def test_estimate_msm_dict_params_and_moments():
def simulate_moments(params):
diff --git a/tests/estimagic/test_estimation_table.py b/tests/estimagic/test_estimation_table.py
index da1e1aa29..6e1b3baae 100644
--- a/tests/estimagic/test_estimation_table.py
+++ b/tests/estimagic/test_estimation_table.py
@@ -5,6 +5,9 @@
import pandas as pd
import pytest
import statsmodels.api as sm
+from pandas.testing import assert_frame_equal as afe
+from pandas.testing import assert_series_equal as ase
+
from estimagic.config import EXAMPLE_DIR
from estimagic.estimation_table import (
_apply_number_format,
@@ -25,8 +28,6 @@
render_html,
render_latex,
)
-from pandas.testing import assert_frame_equal as afe
-from pandas.testing import assert_series_equal as ase
# ======================================================================================
diff --git a/tests/estimagic/test_lollipop_plot.py b/tests/estimagic/test_lollipop_plot.py
index 06a4552b3..c83e9fb00 100644
--- a/tests/estimagic/test_lollipop_plot.py
+++ b/tests/estimagic/test_lollipop_plot.py
@@ -1,5 +1,6 @@
import numpy as np
import pandas as pd
+
from estimagic.lollipop_plot import lollipop_plot
diff --git a/tests/estimagic/test_ml_covs.py b/tests/estimagic/test_ml_covs.py
index 084a4912e..9d752cf51 100644
--- a/tests/estimagic/test_ml_covs.py
+++ b/tests/estimagic/test_ml_covs.py
@@ -4,6 +4,8 @@
import numpy as np
import pandas as pd
import pytest
+from numpy.testing import assert_array_almost_equal as aaae
+
from estimagic import ml_covs
from estimagic.ml_covs import (
_clustering,
@@ -15,7 +17,6 @@
cov_robust,
cov_strata_robust,
)
-from numpy.testing import assert_array_almost_equal as aaae
@pytest.fixture()
diff --git a/tests/estimagic/test_msm_covs.py b/tests/estimagic/test_msm_covs.py
index a9261184a..a72501e63 100644
--- a/tests/estimagic/test_msm_covs.py
+++ b/tests/estimagic/test_msm_covs.py
@@ -3,11 +3,12 @@
import numpy as np
import pandas as pd
import pytest
-from estimagic.msm_covs import cov_optimal, cov_robust
from numpy.testing import assert_array_almost_equal as aaae
-from optimagic.utilities import get_rng
from pandas.testing import assert_frame_equal
+from estimagic.msm_covs import cov_optimal, cov_robust
+from optimagic.utilities import get_rng
+
rng = get_rng(seed=1234)
jac_np = rng.uniform(size=(10, 5))
diff --git a/tests/estimagic/test_msm_sensitivity.py b/tests/estimagic/test_msm_sensitivity.py
index 92a438422..fd2d0c1da 100644
--- a/tests/estimagic/test_msm_sensitivity.py
+++ b/tests/estimagic/test_msm_sensitivity.py
@@ -1,6 +1,9 @@
import numpy as np
import pandas as pd
import pytest
+from numpy.testing import assert_array_almost_equal as aaae
+from scipy import stats
+
from estimagic.config import EXAMPLE_DIR
from estimagic.msm_covs import cov_optimal
from estimagic.msm_sensitivity import (
@@ -11,9 +14,7 @@
calculate_sensitivity_to_bias,
calculate_sensitivity_to_weighting,
)
-from numpy.testing import assert_array_almost_equal as aaae
from optimagic.differentiation.derivatives import first_derivative
-from scipy import stats
def simulate_aggregated_moments(params, x, y):
diff --git a/tests/estimagic/test_msm_sensitivity_via_estimate_msm.py b/tests/estimagic/test_msm_sensitivity_via_estimate_msm.py
index 994b6c526..98d38ec7e 100644
--- a/tests/estimagic/test_msm_sensitivity_via_estimate_msm.py
+++ b/tests/estimagic/test_msm_sensitivity_via_estimate_msm.py
@@ -1,11 +1,12 @@
import numpy as np
import pandas as pd
import pytest
-from estimagic.config import EXAMPLE_DIR
-from estimagic.estimate_msm import estimate_msm
from numpy.testing import assert_array_almost_equal as aaae
from scipy import stats
+from estimagic.config import EXAMPLE_DIR
+from estimagic.estimate_msm import estimate_msm
+
def simulate_aggregated_moments(params, x, y):
"""Calculate aggregated moments for example from Honore, DePaula, Jorgensen."""
diff --git a/tests/estimagic/test_msm_weighting.py b/tests/estimagic/test_msm_weighting.py
index 18fff92fd..ac8255607 100644
--- a/tests/estimagic/test_msm_weighting.py
+++ b/tests/estimagic/test_msm_weighting.py
@@ -3,12 +3,13 @@
import numpy as np
import pandas as pd
import pytest
+from numpy.testing import assert_array_almost_equal as aaae
+
from estimagic.msm_weighting import (
_assemble_block_diagonal_matrix,
get_moments_cov,
get_weighting_matrix,
)
-from numpy.testing import assert_array_almost_equal as aaae
from optimagic.parameters.block_trees import block_tree_to_matrix
from optimagic.utilities import get_rng
diff --git a/tests/estimagic/test_shared.py b/tests/estimagic/test_shared.py
index 9aaaa9ae7..9a4240c74 100644
--- a/tests/estimagic/test_shared.py
+++ b/tests/estimagic/test_shared.py
@@ -3,6 +3,9 @@
import numpy as np
import pandas as pd
import pytest
+from numpy.testing import assert_array_almost_equal as aaae
+from pybaum import leaf_names, tree_equal
+
from estimagic.shared_covs import (
_to_numpy,
calculate_estimation_summary,
@@ -12,10 +15,8 @@
transform_free_cov_to_cov,
transform_free_values_to_params_tree,
)
-from numpy.testing import assert_array_almost_equal as aaae
from optimagic.parameters.tree_registry import get_registry
from optimagic.utilities import get_rng
-from pybaum import leaf_names, tree_equal
@pytest.fixture()
diff --git a/tests/optimagic/benchmarking/test_benchmark_reports.py b/tests/optimagic/benchmarking/test_benchmark_reports.py
index 29cead39a..d069966bd 100644
--- a/tests/optimagic/benchmarking/test_benchmark_reports.py
+++ b/tests/optimagic/benchmarking/test_benchmark_reports.py
@@ -2,6 +2,7 @@
import numpy as np
import pytest
+
from optimagic import (
OptimizeResult,
convergence_report,
diff --git a/tests/optimagic/benchmarking/test_cartis_roberts.py b/tests/optimagic/benchmarking/test_cartis_roberts.py
index 8fdf71998..c399d89d7 100644
--- a/tests/optimagic/benchmarking/test_cartis_roberts.py
+++ b/tests/optimagic/benchmarking/test_cartis_roberts.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal
+
from optimagic.benchmarking.cartis_roberts import (
CARTIS_ROBERTS_PROBLEMS,
get_start_points_bdvalues,
diff --git a/tests/optimagic/benchmarking/test_get_benchmark_problems.py b/tests/optimagic/benchmarking/test_get_benchmark_problems.py
index 821d77deb..134ae4f41 100644
--- a/tests/optimagic/benchmarking/test_get_benchmark_problems.py
+++ b/tests/optimagic/benchmarking/test_get_benchmark_problems.py
@@ -2,6 +2,7 @@
import numpy as np
import pytest
+
from optimagic.benchmarking.get_benchmark_problems import (
_step_func,
get_benchmark_problems,
diff --git a/tests/optimagic/benchmarking/test_more_wild.py b/tests/optimagic/benchmarking/test_more_wild.py
index b7427cca0..bb3515574 100644
--- a/tests/optimagic/benchmarking/test_more_wild.py
+++ b/tests/optimagic/benchmarking/test_more_wild.py
@@ -1,5 +1,6 @@
import numpy as np
import pytest
+
from optimagic.benchmarking.more_wild import (
MORE_WILD_PROBLEMS,
get_start_points_mancino,
diff --git a/tests/optimagic/benchmarking/test_noise_distributions.py b/tests/optimagic/benchmarking/test_noise_distributions.py
index d8e2183ad..f3d549a36 100644
--- a/tests/optimagic/benchmarking/test_noise_distributions.py
+++ b/tests/optimagic/benchmarking/test_noise_distributions.py
@@ -1,6 +1,7 @@
import numpy as np
import pandas as pd
import pytest
+
from optimagic.benchmarking.get_benchmark_problems import _sample_from_distribution
from optimagic.benchmarking.noise_distributions import NOISE_DISTRIBUTIONS
from optimagic.utilities import get_rng
diff --git a/tests/optimagic/benchmarking/test_run_benchmark.py b/tests/optimagic/benchmarking/test_run_benchmark.py
index 26e827bbb..d501dd17e 100644
--- a/tests/optimagic/benchmarking/test_run_benchmark.py
+++ b/tests/optimagic/benchmarking/test_run_benchmark.py
@@ -1,4 +1,5 @@
import pytest
+
from optimagic import get_benchmark_problems
from optimagic.benchmarking.run_benchmark import run_benchmark
diff --git a/tests/optimagic/differentiation/test_compare_derivatives_with_jax.py b/tests/optimagic/differentiation/test_compare_derivatives_with_jax.py
index ffa08e388..87b5554d8 100644
--- a/tests/optimagic/differentiation/test_compare_derivatives_with_jax.py
+++ b/tests/optimagic/differentiation/test_compare_derivatives_with_jax.py
@@ -7,9 +7,10 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+from pybaum import tree_equal
+
from optimagic.config import IS_JAX_INSTALLED
from optimagic.differentiation.derivatives import first_derivative, second_derivative
-from pybaum import tree_equal
if not IS_JAX_INSTALLED:
pytestmark = pytest.mark.skip(reason="jax is not installed.")
diff --git a/tests/optimagic/differentiation/test_derivatives.py b/tests/optimagic/differentiation/test_derivatives.py
index cfdd73197..670c1c0ab 100644
--- a/tests/optimagic/differentiation/test_derivatives.py
+++ b/tests/optimagic/differentiation/test_derivatives.py
@@ -7,6 +7,9 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+from pandas.testing import assert_frame_equal
+from scipy.optimize._numdiff import approx_derivative
+
from optimagic.differentiation.derivatives import (
Evals,
NumdiffResult,
@@ -31,8 +34,6 @@
logit_loglikeobs_jacobian,
)
from optimagic.parameters.bounds import Bounds
-from pandas.testing import assert_frame_equal
-from scipy.optimize._numdiff import approx_derivative
@pytest.fixture()
diff --git a/tests/optimagic/differentiation/test_finite_differences.py b/tests/optimagic/differentiation/test_finite_differences.py
index 11f4d676a..3e55aabfb 100644
--- a/tests/optimagic/differentiation/test_finite_differences.py
+++ b/tests/optimagic/differentiation/test_finite_differences.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.differentiation.derivatives import Evals
from optimagic.differentiation.finite_differences import jacobian
from optimagic.differentiation.generate_steps import Steps
diff --git a/tests/optimagic/differentiation/test_generate_steps.py b/tests/optimagic/differentiation/test_generate_steps.py
index 4999ceb40..bf85261b1 100644
--- a/tests/optimagic/differentiation/test_generate_steps.py
+++ b/tests/optimagic/differentiation/test_generate_steps.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.differentiation.generate_steps import (
_calculate_or_validate_base_steps,
_fillna,
diff --git a/tests/optimagic/differentiation/test_numdiff_options.py b/tests/optimagic/differentiation/test_numdiff_options.py
index b4245aeab..9ffc37616 100644
--- a/tests/optimagic/differentiation/test_numdiff_options.py
+++ b/tests/optimagic/differentiation/test_numdiff_options.py
@@ -1,4 +1,5 @@
import pytest
+
from optimagic.differentiation.numdiff_options import (
NumdiffOptions,
pre_process_numdiff_options,
diff --git a/tests/optimagic/examples/test_criterion_functions.py b/tests/optimagic/examples/test_criterion_functions.py
index 8fdf5d038..b709b38dc 100644
--- a/tests/optimagic/examples/test_criterion_functions.py
+++ b/tests/optimagic/examples/test_criterion_functions.py
@@ -3,6 +3,8 @@
import pytest
from numpy.testing import assert_array_almost_equal as aaae
from numpy.testing import assert_array_equal
+from pandas.testing import assert_frame_equal
+
from optimagic.examples.criterion_functions import (
rhe_fun_and_gradient,
rhe_function_value,
@@ -26,7 +28,6 @@
trid_scalar,
)
from optimagic.optimization.fun_value import FunctionValue
-from pandas.testing import assert_frame_equal
TRID_GRAD = pd.DataFrame({"value": [7, 1, -6, 11, -19.0]})
RHE_GRAD = pd.DataFrame({"value": [90, 72, 36, 28, -10.0]})
diff --git a/tests/optimagic/logging/test_base.py b/tests/optimagic/logging/test_base.py
index be3f123d6..1d903cecd 100644
--- a/tests/optimagic/logging/test_base.py
+++ b/tests/optimagic/logging/test_base.py
@@ -1,6 +1,7 @@
from dataclasses import dataclass
import pytest
+
from optimagic.logging.base import InputType, NonUpdatableKeyValueStore, OutputType
from optimagic.typing import DictLikeAccess
diff --git a/tests/optimagic/logging/test_logger.py b/tests/optimagic/logging/test_logger.py
index 87f37caeb..ff099d55f 100644
--- a/tests/optimagic/logging/test_logger.py
+++ b/tests/optimagic/logging/test_logger.py
@@ -3,6 +3,8 @@
import numpy as np
import pandas as pd
import pytest
+from pybaum import tree_equal, tree_just_flatten
+
from optimagic.logging.logger import (
LogOptions,
LogReader,
@@ -13,7 +15,6 @@
from optimagic.optimization.optimize import minimize
from optimagic.parameters.tree_registry import get_registry
from optimagic.typing import Direction
-from pybaum import tree_equal, tree_just_flatten
@pytest.fixture()
diff --git a/tests/optimagic/logging/test_sqlalchemy.py b/tests/optimagic/logging/test_sqlalchemy.py
index 589a66c9d..49571fcf6 100644
--- a/tests/optimagic/logging/test_sqlalchemy.py
+++ b/tests/optimagic/logging/test_sqlalchemy.py
@@ -4,6 +4,8 @@
import numpy as np
import pytest
+from sqlalchemy import inspect
+
from optimagic.logging import ExistenceStrategy
from optimagic.logging.logger import LogStore, SQLiteLogOptions
from optimagic.logging.sqlalchemy import IterationStore, StepStore
@@ -13,7 +15,6 @@
StepStatus,
StepType,
)
-from sqlalchemy import inspect
class TestIterationStore:
diff --git a/tests/optimagic/logging/test_types.py b/tests/optimagic/logging/test_types.py
index 466a5e278..0c43ee7a8 100644
--- a/tests/optimagic/logging/test_types.py
+++ b/tests/optimagic/logging/test_types.py
@@ -1,4 +1,5 @@
import pytest
+
from optimagic.logging.types import (
IterationStateWithId,
ProblemInitializationWithId,
diff --git a/tests/optimagic/optimization/test_algorithm.py b/tests/optimagic/optimization/test_algorithm.py
index 51cf1e2b3..7d78c02da 100644
--- a/tests/optimagic/optimization/test_algorithm.py
+++ b/tests/optimagic/optimization/test_algorithm.py
@@ -2,6 +2,7 @@
import numpy as np
import pytest
+
from optimagic.exceptions import InvalidAlgoInfoError, InvalidAlgoOptionError
from optimagic.optimization.algorithm import AlgoInfo, Algorithm, InternalOptimizeResult
from optimagic.optimization.history import HistoryEntry
diff --git a/tests/optimagic/optimization/test_convergence_report.py b/tests/optimagic/optimization/test_convergence_report.py
index ac7148dc7..058698a58 100644
--- a/tests/optimagic/optimization/test_convergence_report.py
+++ b/tests/optimagic/optimization/test_convergence_report.py
@@ -1,6 +1,7 @@
import numpy as np
import pandas as pd
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.optimization.convergence_report import get_convergence_report
from optimagic.typing import Direction
diff --git a/tests/optimagic/optimization/test_create_optimization_problem.py b/tests/optimagic/optimization/test_create_optimization_problem.py
index fb34728f3..48b3d8a5d 100644
--- a/tests/optimagic/optimization/test_create_optimization_problem.py
+++ b/tests/optimagic/optimization/test_create_optimization_problem.py
@@ -1,4 +1,5 @@
import pytest
+
from optimagic.optimization.create_optimization_problem import (
pre_process_user_algorithm,
)
diff --git a/tests/optimagic/optimization/test_error_penalty.py b/tests/optimagic/optimization/test_error_penalty.py
index 4f23bf962..d1100cf31 100644
--- a/tests/optimagic/optimization/test_error_penalty.py
+++ b/tests/optimagic/optimization/test_error_penalty.py
@@ -3,6 +3,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.differentiation.derivatives import first_derivative
from optimagic.optimization.error_penalty import (
_likelihood_penalty,
diff --git a/tests/optimagic/optimization/test_fun_value.py b/tests/optimagic/optimization/test_fun_value.py
index fd5ef9ea5..a6de8c611 100644
--- a/tests/optimagic/optimization/test_fun_value.py
+++ b/tests/optimagic/optimization/test_fun_value.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
from numpy.testing import assert_almost_equal as aae
+
from optimagic.exceptions import InvalidFunctionError
from optimagic.optimization.fun_value import (
FunctionValue,
diff --git a/tests/optimagic/optimization/test_function_formats_ls.py b/tests/optimagic/optimization/test_function_formats_ls.py
index 7abcb361f..83e087404 100644
--- a/tests/optimagic/optimization/test_function_formats_ls.py
+++ b/tests/optimagic/optimization/test_function_formats_ls.py
@@ -7,6 +7,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic import mark, maximize, minimize
from optimagic.exceptions import InvalidFunctionError
from optimagic.optimization.fun_value import (
diff --git a/tests/optimagic/optimization/test_function_formats_scalar.py b/tests/optimagic/optimization/test_function_formats_scalar.py
index 36e845267..2134bf8a8 100644
--- a/tests/optimagic/optimization/test_function_formats_scalar.py
+++ b/tests/optimagic/optimization/test_function_formats_scalar.py
@@ -4,6 +4,7 @@
import pytest
from numpy.testing import assert_array_almost_equal as aaae
from numpy.typing import NDArray
+
from optimagic import mark, maximize, minimize
from optimagic.exceptions import InvalidFunctionError
from optimagic.optimization.fun_value import FunctionValue, ScalarFunctionValue
diff --git a/tests/optimagic/optimization/test_history.py b/tests/optimagic/optimization/test_history.py
index c7fd3623a..bd6dae3dd 100644
--- a/tests/optimagic/optimization/test_history.py
+++ b/tests/optimagic/optimization/test_history.py
@@ -1,5 +1,6 @@
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.optimization.history import History, HistoryEntry
from optimagic.typing import EvalTask
diff --git a/tests/optimagic/optimization/test_history_collection.py b/tests/optimagic/optimization/test_history_collection.py
index 8ac2edd18..b94d097db 100644
--- a/tests/optimagic/optimization/test_history_collection.py
+++ b/tests/optimagic/optimization/test_history_collection.py
@@ -5,6 +5,7 @@
import pytest
from numpy.testing import assert_array_almost_equal as aaae
from numpy.testing import assert_array_equal as aae
+
from optimagic import SQLiteLogReader, mark
from optimagic.algorithms import AVAILABLE_ALGORITHMS
from optimagic.logging import SQLiteLogOptions
diff --git a/tests/optimagic/optimization/test_history_tools.py b/tests/optimagic/optimization/test_history_tools.py
index 2bfcbac2c..4b4f4d100 100644
--- a/tests/optimagic/optimization/test_history_tools.py
+++ b/tests/optimagic/optimization/test_history_tools.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.optimization.history_tools import get_history_arrays
from optimagic.typing import Direction
diff --git a/tests/optimagic/optimization/test_internal_optimization_problem.py b/tests/optimagic/optimization/test_internal_optimization_problem.py
index 0526f2b84..a0bb24a25 100644
--- a/tests/optimagic/optimization/test_internal_optimization_problem.py
+++ b/tests/optimagic/optimization/test_internal_optimization_problem.py
@@ -3,6 +3,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic import NumdiffOptions
from optimagic.batch_evaluators import process_batch_evaluator
from optimagic.config import CRITERION_PENALTY_CONSTANT, CRITERION_PENALTY_SLOPE
diff --git a/tests/optimagic/optimization/test_jax_derivatives.py b/tests/optimagic/optimization/test_jax_derivatives.py
index 94bd7fbc9..92433afae 100644
--- a/tests/optimagic/optimization/test_jax_derivatives.py
+++ b/tests/optimagic/optimization/test_jax_derivatives.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.config import IS_JAX_INSTALLED
from optimagic.optimization.optimize import minimize
diff --git a/tests/optimagic/optimization/test_many_algorithms.py b/tests/optimagic/optimization/test_many_algorithms.py
index 30abc86a4..1882aeb08 100644
--- a/tests/optimagic/optimization/test_many_algorithms.py
+++ b/tests/optimagic/optimization/test_many_algorithms.py
@@ -10,6 +10,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic import mark
from optimagic.algorithms import AVAILABLE_ALGORITHMS, GLOBAL_ALGORITHMS
from optimagic.optimization.optimize import minimize
@@ -85,3 +86,15 @@ def test_global_algorithms_on_sum_of_squares(algorithm):
)
assert res.success in [True, None]
aaae(res.params, np.array([0.2, 0]), decimal=1)
+
+
+def test_nag_dfols_starting_at_optimum():
+ # From issue: https://github.com/optimagic-dev/optimagic/issues/538
+ params = np.zeros(2, dtype=float)
+ res = minimize(
+ fun=sos,
+ params=params,
+ algorithm="nag_dfols",
+ bounds=Bounds(-1 * np.ones_like(params), np.ones_like(params)),
+ )
+ aaae(res.params, params)
diff --git a/tests/optimagic/optimization/test_multistart.py b/tests/optimagic/optimization/test_multistart.py
index c06b74909..06ec00236 100644
--- a/tests/optimagic/optimization/test_multistart.py
+++ b/tests/optimagic/optimization/test_multistart.py
@@ -5,6 +5,7 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.optimization.algorithm import InternalOptimizeResult
from optimagic.optimization.multistart import (
_draw_exploration_sample,
diff --git a/tests/optimagic/optimization/test_multistart_options.py b/tests/optimagic/optimization/test_multistart_options.py
index 20785fdb9..a8f356fb8 100644
--- a/tests/optimagic/optimization/test_multistart_options.py
+++ b/tests/optimagic/optimization/test_multistart_options.py
@@ -1,5 +1,6 @@
import numpy as np
import pytest
+
from optimagic.exceptions import InvalidMultistartError
from optimagic.optimization.multistart_options import (
MultistartOptions,
diff --git a/tests/optimagic/optimization/test_optimize.py b/tests/optimagic/optimization/test_optimize.py
index e3f565bc0..7e4cb4184 100644
--- a/tests/optimagic/optimization/test_optimize.py
+++ b/tests/optimagic/optimization/test_optimize.py
@@ -3,6 +3,7 @@
import numpy as np
import pandas as pd
import pytest
+
from optimagic.examples.criterion_functions import sos_scalar
from optimagic.exceptions import InvalidFunctionError, InvalidNumdiffOptionsError
from optimagic.optimization.optimize import maximize, minimize
diff --git a/tests/optimagic/optimization/test_optimize_result.py b/tests/optimagic/optimization/test_optimize_result.py
index 24100bf6b..6716252c0 100644
--- a/tests/optimagic/optimization/test_optimize_result.py
+++ b/tests/optimagic/optimization/test_optimize_result.py
@@ -1,6 +1,7 @@
import numpy as np
import pandas as pd
import pytest
+
from optimagic.optimization.optimize_result import OptimizeResult, _create_stars
from optimagic.utilities import get_rng
diff --git a/tests/optimagic/optimization/test_params_versions.py b/tests/optimagic/optimization/test_params_versions.py
index 9aab8d861..f3399cb12 100644
--- a/tests/optimagic/optimization/test_params_versions.py
+++ b/tests/optimagic/optimization/test_params_versions.py
@@ -2,6 +2,8 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+from pybaum import tree_just_flatten
+
from optimagic.examples.criterion_functions import (
sos_gradient,
sos_ls,
@@ -10,7 +12,6 @@
)
from optimagic.optimization.optimize import minimize
from optimagic.parameters.tree_registry import get_registry
-from pybaum import tree_just_flatten
REGISTRY = get_registry(extended=True)
diff --git a/tests/optimagic/optimization/test_pygmo_optimizers.py b/tests/optimagic/optimization/test_pygmo_optimizers.py
index 751ac3cc0..a615ead31 100644
--- a/tests/optimagic/optimization/test_pygmo_optimizers.py
+++ b/tests/optimagic/optimization/test_pygmo_optimizers.py
@@ -2,6 +2,7 @@
import numpy as np
import pytest
+
from optimagic.optimizers.pygmo_optimizers import (
_convert_str_to_int,
get_population_size,
diff --git a/tests/optimagic/optimization/test_scipy_aliases.py b/tests/optimagic/optimization/test_scipy_aliases.py
index fe8e5984c..0402d596a 100644
--- a/tests/optimagic/optimization/test_scipy_aliases.py
+++ b/tests/optimagic/optimization/test_scipy_aliases.py
@@ -1,7 +1,8 @@
import numpy as np
-import optimagic as om
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
+import optimagic as om
from optimagic.exceptions import AliasError
diff --git a/tests/optimagic/optimization/test_useful_exceptions.py b/tests/optimagic/optimization/test_useful_exceptions.py
index fa5e3c470..e972db761 100644
--- a/tests/optimagic/optimization/test_useful_exceptions.py
+++ b/tests/optimagic/optimization/test_useful_exceptions.py
@@ -1,6 +1,7 @@
import numpy as np
import pandas as pd
import pytest
+
from optimagic.exceptions import (
InvalidFunctionError,
InvalidKwargsError,
diff --git a/tests/optimagic/optimization/test_with_advanced_constraints.py b/tests/optimagic/optimization/test_with_advanced_constraints.py
index 3d6c6432d..df3372199 100644
--- a/tests/optimagic/optimization/test_with_advanced_constraints.py
+++ b/tests/optimagic/optimization/test_with_advanced_constraints.py
@@ -9,10 +9,11 @@
import itertools
import numpy as np
-import optimagic as om
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
+import optimagic as om
from optimagic.examples.criterion_functions import sos_gradient, sos_scalar
from optimagic.optimization.optimize import minimize
diff --git a/tests/optimagic/optimization/test_with_bounds.py b/tests/optimagic/optimization/test_with_bounds.py
index 487feaba2..ed29665f9 100644
--- a/tests/optimagic/optimization/test_with_bounds.py
+++ b/tests/optimagic/optimization/test_with_bounds.py
@@ -1,7 +1,8 @@
import numpy as np
-from optimagic.optimization.optimize import maximize, minimize
from scipy.optimize import Bounds as ScipyBounds
+from optimagic.optimization.optimize import maximize, minimize
+
def test_minimize_with_scipy_bounds():
minimize(
diff --git a/tests/optimagic/optimization/test_with_constraints.py b/tests/optimagic/optimization/test_with_constraints.py
index 832e5ec81..8c2836857 100644
--- a/tests/optimagic/optimization/test_with_constraints.py
+++ b/tests/optimagic/optimization/test_with_constraints.py
@@ -10,11 +10,12 @@
from copy import deepcopy
import numpy as np
-import optimagic as om
import pandas as pd
import pytest
import statsmodels.api as sm
from numpy.testing import assert_array_almost_equal as aaae
+
+import optimagic as om
from optimagic import mark
from optimagic.examples.criterion_functions import (
rhe_function_value,
diff --git a/tests/optimagic/optimization/test_with_logging.py b/tests/optimagic/optimization/test_with_logging.py
index 94fb8aed9..b279f5202 100644
--- a/tests/optimagic/optimization/test_with_logging.py
+++ b/tests/optimagic/optimization/test_with_logging.py
@@ -11,6 +11,8 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+from pybaum import tree_just_flatten
+
from optimagic import mark
from optimagic.examples.criterion_functions import (
sos_derivatives,
@@ -20,7 +22,6 @@
from optimagic.logging.types import ExistenceStrategy
from optimagic.optimization.optimize import minimize
from optimagic.parameters.tree_registry import get_registry
-from pybaum import tree_just_flatten
@mark.least_squares
diff --git a/tests/optimagic/optimization/test_with_multistart.py b/tests/optimagic/optimization/test_with_multistart.py
index 332de6447..d9f8325d7 100644
--- a/tests/optimagic/optimization/test_with_multistart.py
+++ b/tests/optimagic/optimization/test_with_multistart.py
@@ -1,10 +1,11 @@
import functools
import numpy as np
-import optimagic as om
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
+import optimagic as om
from optimagic.examples.criterion_functions import (
sos_ls,
sos_scalar,
diff --git a/tests/optimagic/optimization/test_with_nonlinear_constraints.py b/tests/optimagic/optimization/test_with_nonlinear_constraints.py
index 59e036e5f..bc28a2dcf 100644
--- a/tests/optimagic/optimization/test_with_nonlinear_constraints.py
+++ b/tests/optimagic/optimization/test_with_nonlinear_constraints.py
@@ -2,9 +2,10 @@
import warnings
import numpy as np
-import optimagic as om
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
+import optimagic as om
from optimagic import maximize, minimize
from optimagic.algorithms import AVAILABLE_ALGORITHMS
from optimagic.config import IS_CYIPOPT_INSTALLED
@@ -40,7 +41,7 @@ def constraint_func(x):
return np.array([value - 1, 2 - value])
def constraint_jac(x):
- return 2 * np.row_stack((x.reshape(1, -1), -x.reshape(1, -1)))
+ return 2 * np.vstack((x.reshape(1, -1), -x.reshape(1, -1)))
constraints_long = om.NonlinearConstraint(
func=constraint_func,
diff --git a/tests/optimagic/optimization/test_with_scaling.py b/tests/optimagic/optimization/test_with_scaling.py
index 8d9f378ef..1cddd6df8 100644
--- a/tests/optimagic/optimization/test_with_scaling.py
+++ b/tests/optimagic/optimization/test_with_scaling.py
@@ -1,6 +1,7 @@
import numpy as np
-import optimagic as om
from numpy.testing import assert_array_almost_equal as aaae
+
+import optimagic as om
from optimagic.optimization.optimize import maximize, minimize
from optimagic.parameters.scaling import ScalingOptions
diff --git a/tests/optimagic/optimizers/_pounders/test_linear_subsolvers.py b/tests/optimagic/optimizers/_pounders/test_linear_subsolvers.py
index 3fa78a9f8..a6f5bf43b 100644
--- a/tests/optimagic/optimizers/_pounders/test_linear_subsolvers.py
+++ b/tests/optimagic/optimizers/_pounders/test_linear_subsolvers.py
@@ -5,6 +5,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.optimizers._pounders.linear_subsolvers import (
LinearModel,
improve_geomtery_trsbox_linear,
diff --git a/tests/optimagic/optimizers/_pounders/test_pounders_history.py b/tests/optimagic/optimizers/_pounders/test_pounders_history.py
index 38d3d3711..f02d7df99 100644
--- a/tests/optimagic/optimizers/_pounders/test_pounders_history.py
+++ b/tests/optimagic/optimizers/_pounders/test_pounders_history.py
@@ -3,6 +3,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.optimizers._pounders.pounders_history import LeastSquaresHistory
ENTRIES = [
diff --git a/tests/optimagic/optimizers/_pounders/test_pounders_unit.py b/tests/optimagic/optimizers/_pounders/test_pounders_unit.py
index e17ffe262..fc60e850d 100644
--- a/tests/optimagic/optimizers/_pounders/test_pounders_unit.py
+++ b/tests/optimagic/optimizers/_pounders/test_pounders_unit.py
@@ -9,6 +9,7 @@
import pytest
import yaml
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.optimizers._pounders.pounders_auxiliary import (
add_geomtery_points_to_make_main_model_fully_linear,
create_initial_residual_model,
diff --git a/tests/optimagic/optimizers/_pounders/test_quadratic_subsolvers.py b/tests/optimagic/optimizers/_pounders/test_quadratic_subsolvers.py
index c7a890f8a..5a3818d0f 100644
--- a/tests/optimagic/optimizers/_pounders/test_quadratic_subsolvers.py
+++ b/tests/optimagic/optimizers/_pounders/test_quadratic_subsolvers.py
@@ -3,6 +3,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.optimizers._pounders._conjugate_gradient import (
minimize_trust_cg,
)
diff --git a/tests/optimagic/optimizers/test_bhhh.py b/tests/optimagic/optimizers/test_bhhh.py
index d686f5375..14bdd0f31 100644
--- a/tests/optimagic/optimizers/test_bhhh.py
+++ b/tests/optimagic/optimizers/test_bhhh.py
@@ -6,10 +6,11 @@
import pytest
import statsmodels.api as sm
from numpy.testing import assert_array_almost_equal as aaae
+from scipy.stats import norm
+
from optimagic import mark, minimize
from optimagic.optimizers.bhhh import bhhh_internal
from optimagic.utilities import get_rng
-from scipy.stats import norm
def generate_test_data():
diff --git a/tests/optimagic/optimizers/test_fides_options.py b/tests/optimagic/optimizers/test_fides_options.py
index cb7b020be..44c8caa5b 100644
--- a/tests/optimagic/optimizers/test_fides_options.py
+++ b/tests/optimagic/optimizers/test_fides_options.py
@@ -3,12 +3,14 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.config import IS_FIDES_INSTALLED
from optimagic.optimization.optimize import minimize
from optimagic.parameters.bounds import Bounds
if IS_FIDES_INSTALLED:
from fides.hessian_approximation import FX, SR1, Broyden
+
from optimagic.optimizers.fides import Fides
else:
FX = lambda: None
diff --git a/tests/optimagic/optimizers/test_ipopt_options.py b/tests/optimagic/optimizers/test_ipopt_options.py
index 24ffdf60c..888392a20 100644
--- a/tests/optimagic/optimizers/test_ipopt_options.py
+++ b/tests/optimagic/optimizers/test_ipopt_options.py
@@ -3,6 +3,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.config import IS_CYIPOPT_INSTALLED
from optimagic.optimization.optimize import minimize
from optimagic.optimizers.ipopt import Ipopt
diff --git a/tests/optimagic/optimizers/test_nag_optimizers.py b/tests/optimagic/optimizers/test_nag_optimizers.py
index 5562d5333..b01f06123 100644
--- a/tests/optimagic/optimizers/test_nag_optimizers.py
+++ b/tests/optimagic/optimizers/test_nag_optimizers.py
@@ -1,4 +1,5 @@
import pytest
+
from optimagic.optimizers.nag_optimizers import (
_build_options_dict,
_change_evals_per_point_interface,
diff --git a/tests/optimagic/optimizers/test_neldermead.py b/tests/optimagic/optimizers/test_neldermead.py
index 62ece7a81..8ece1e830 100644
--- a/tests/optimagic/optimizers/test_neldermead.py
+++ b/tests/optimagic/optimizers/test_neldermead.py
@@ -1,5 +1,6 @@
import numpy as np
import pytest
+
from optimagic.optimizers.neldermead import (
_gao_han,
_init_algo_params,
diff --git a/tests/optimagic/optimizers/test_pounders_integration.py b/tests/optimagic/optimizers/test_pounders_integration.py
index b87160865..c83a697bd 100644
--- a/tests/optimagic/optimizers/test_pounders_integration.py
+++ b/tests/optimagic/optimizers/test_pounders_integration.py
@@ -8,8 +8,8 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
-from optimagic.optimizers.pounders import internal_solve_pounders
+from optimagic.optimizers.pounders import internal_solve_pounders
from tests.optimagic.optimizers._pounders.test_pounders_unit import FIXTURES_DIR
diff --git a/tests/optimagic/optimizers/test_tao_optimizers.py b/tests/optimagic/optimizers/test_tao_optimizers.py
index c2e560a6c..897f8347a 100644
--- a/tests/optimagic/optimizers/test_tao_optimizers.py
+++ b/tests/optimagic/optimizers/test_tao_optimizers.py
@@ -5,6 +5,7 @@
import numpy as np
import pandas as pd
import pytest
+
from optimagic.config import IS_PETSC4PY_INSTALLED
from optimagic.optimization.optimize import minimize
from optimagic.utilities import get_rng
diff --git a/tests/optimagic/parameters/test_block_trees.py b/tests/optimagic/parameters/test_block_trees.py
index 27642854d..08b2307cd 100644
--- a/tests/optimagic/parameters/test_block_trees.py
+++ b/tests/optimagic/parameters/test_block_trees.py
@@ -2,6 +2,9 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_equal
+from pybaum import tree_equal
+from pybaum import tree_just_flatten as tree_leaves
+
from optimagic import second_derivative
from optimagic.parameters.block_trees import (
block_tree_to_hessian,
@@ -10,8 +13,6 @@
matrix_to_block_tree,
)
from optimagic.parameters.tree_registry import get_registry
-from pybaum import tree_equal
-from pybaum import tree_just_flatten as tree_leaves
def test_matrix_to_block_tree_array_and_scalar():
diff --git a/tests/optimagic/parameters/test_bounds.py b/tests/optimagic/parameters/test_bounds.py
index ccb3ca959..791706d56 100644
--- a/tests/optimagic/parameters/test_bounds.py
+++ b/tests/optimagic/parameters/test_bounds.py
@@ -2,6 +2,7 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_equal
+
from optimagic.exceptions import InvalidBoundsError
from optimagic.parameters.bounds import Bounds, get_internal_bounds, pre_process_bounds
diff --git a/tests/optimagic/parameters/test_check_constraints.py b/tests/optimagic/parameters/test_check_constraints.py
index 79f3ea121..85952336f 100644
--- a/tests/optimagic/parameters/test_check_constraints.py
+++ b/tests/optimagic/parameters/test_check_constraints.py
@@ -1,6 +1,7 @@
import numpy as np
-import optimagic as om
import pytest
+
+import optimagic as om
from optimagic.exceptions import InvalidParamsError
from optimagic.parameters.check_constraints import _iloc
from optimagic.parameters.constraint_tools import check_constraints
diff --git a/tests/optimagic/parameters/test_constraint_tools.py b/tests/optimagic/parameters/test_constraint_tools.py
index 3b2151762..29e511c6d 100644
--- a/tests/optimagic/parameters/test_constraint_tools.py
+++ b/tests/optimagic/parameters/test_constraint_tools.py
@@ -1,5 +1,6 @@
-import optimagic as om
import pytest
+
+import optimagic as om
from optimagic.exceptions import InvalidParamsError
from optimagic.parameters.constraint_tools import check_constraints, count_free_params
diff --git a/tests/optimagic/parameters/test_conversion.py b/tests/optimagic/parameters/test_conversion.py
index f03dbbe99..da4942443 100644
--- a/tests/optimagic/parameters/test_conversion.py
+++ b/tests/optimagic/parameters/test_conversion.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.parameters.bounds import Bounds
from optimagic.parameters.conversion import (
_is_fast_deriv_eval,
diff --git a/tests/optimagic/parameters/test_kernel_transformations.py b/tests/optimagic/parameters/test_kernel_transformations.py
index a7aab19ee..0c81599cf 100644
--- a/tests/optimagic/parameters/test_kernel_transformations.py
+++ b/tests/optimagic/parameters/test_kernel_transformations.py
@@ -2,9 +2,10 @@
from itertools import product
import numpy as np
-import optimagic.parameters.kernel_transformations as kt
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
+import optimagic.parameters.kernel_transformations as kt
from optimagic.differentiation.derivatives import first_derivative
from optimagic.parameters.kernel_transformations import cov_matrix_to_sdcorr_params
from optimagic.utilities import get_rng
diff --git a/tests/optimagic/parameters/test_nonlinear_constraints.py b/tests/optimagic/parameters/test_nonlinear_constraints.py
index 44c607f5e..67af8678d 100644
--- a/tests/optimagic/parameters/test_nonlinear_constraints.py
+++ b/tests/optimagic/parameters/test_nonlinear_constraints.py
@@ -5,6 +5,9 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_equal
+from pandas.testing import assert_frame_equal
+from pybaum import tree_just_flatten
+
from optimagic.differentiation.numdiff_options import NumdiffOptions
from optimagic.exceptions import InvalidConstraintError
from optimagic.parameters.nonlinear_constraints import (
@@ -20,8 +23,6 @@
vector_as_list_of_scalar_constraints,
)
from optimagic.parameters.tree_registry import get_registry
-from pandas.testing import assert_frame_equal
-from pybaum import tree_just_flatten
@dataclass
diff --git a/tests/optimagic/parameters/test_process_constraints.py b/tests/optimagic/parameters/test_process_constraints.py
index 469b485bd..66eb42fc1 100644
--- a/tests/optimagic/parameters/test_process_constraints.py
+++ b/tests/optimagic/parameters/test_process_constraints.py
@@ -1,9 +1,10 @@
"""Test the pc processing."""
import numpy as np
-import optimagic as om
import pandas as pd
import pytest
+
+import optimagic as om
from optimagic.exceptions import InvalidConstraintError
from optimagic.parameters.bounds import Bounds
from optimagic.parameters.constraint_tools import check_constraints
diff --git a/tests/optimagic/parameters/test_process_selectors.py b/tests/optimagic/parameters/test_process_selectors.py
index b40c39879..7ad9c78e6 100644
--- a/tests/optimagic/parameters/test_process_selectors.py
+++ b/tests/optimagic/parameters/test_process_selectors.py
@@ -2,11 +2,12 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_equal as aae
+from pybaum import tree_flatten, tree_just_flatten, tree_unflatten
+
from optimagic.exceptions import InvalidConstraintError
from optimagic.parameters.process_selectors import process_selectors
from optimagic.parameters.tree_conversion import TreeConverter
from optimagic.parameters.tree_registry import get_registry
-from pybaum import tree_flatten, tree_just_flatten, tree_unflatten
@pytest.mark.parametrize("constraints", [None, []])
diff --git a/tests/optimagic/parameters/test_scale_conversion.py b/tests/optimagic/parameters/test_scale_conversion.py
index aaa73310d..0b64da03a 100644
--- a/tests/optimagic/parameters/test_scale_conversion.py
+++ b/tests/optimagic/parameters/test_scale_conversion.py
@@ -2,6 +2,7 @@
import pytest
from numpy.testing import assert_array_almost_equal as aaae
from numpy.testing import assert_array_equal as aae
+
from optimagic import first_derivative
from optimagic.parameters.conversion import InternalParams
from optimagic.parameters.scale_conversion import get_scale_converter
diff --git a/tests/optimagic/parameters/test_scaling.py b/tests/optimagic/parameters/test_scaling.py
index 2ae0395f0..13be92823 100644
--- a/tests/optimagic/parameters/test_scaling.py
+++ b/tests/optimagic/parameters/test_scaling.py
@@ -1,4 +1,5 @@
import pytest
+
from optimagic.exceptions import InvalidScalingError
from optimagic.parameters.scaling import (
ScalingOptions,
diff --git a/tests/optimagic/parameters/test_space_conversion.py b/tests/optimagic/parameters/test_space_conversion.py
index 9a87924ca..0c4b22365 100644
--- a/tests/optimagic/parameters/test_space_conversion.py
+++ b/tests/optimagic/parameters/test_space_conversion.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic import first_derivative
from optimagic.parameters.space_conversion import (
InternalParams,
diff --git a/tests/optimagic/parameters/test_tree_conversion.py b/tests/optimagic/parameters/test_tree_conversion.py
index 41129f085..7f22c918e 100644
--- a/tests/optimagic/parameters/test_tree_conversion.py
+++ b/tests/optimagic/parameters/test_tree_conversion.py
@@ -2,6 +2,7 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_equal as aae
+
from optimagic.parameters.bounds import Bounds
from optimagic.parameters.tree_conversion import get_tree_converter
from optimagic.typing import AggregationLevel
diff --git a/tests/optimagic/parameters/test_tree_registry.py b/tests/optimagic/parameters/test_tree_registry.py
index 9c3ca7720..6f7362538 100644
--- a/tests/optimagic/parameters/test_tree_registry.py
+++ b/tests/optimagic/parameters/test_tree_registry.py
@@ -1,10 +1,11 @@
import numpy as np
import pandas as pd
import pytest
-from optimagic.parameters.tree_registry import get_registry
from pandas.testing import assert_frame_equal
from pybaum import leaf_names, tree_flatten, tree_unflatten
+from optimagic.parameters.tree_registry import get_registry
+
@pytest.fixture()
def value_df():
diff --git a/tests/optimagic/shared/test_process_user_functions.py b/tests/optimagic/shared/test_process_user_functions.py
index a1cad7a3f..742573283 100644
--- a/tests/optimagic/shared/test_process_user_functions.py
+++ b/tests/optimagic/shared/test_process_user_functions.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
from numpy.typing import NDArray
+
from optimagic import mark
from optimagic.exceptions import InvalidKwargsError
from optimagic.optimization.fun_value import (
diff --git a/tests/optimagic/test_batch_evaluators.py b/tests/optimagic/test_batch_evaluators.py
index 8006eb523..aaf07d054 100644
--- a/tests/optimagic/test_batch_evaluators.py
+++ b/tests/optimagic/test_batch_evaluators.py
@@ -2,6 +2,7 @@
import warnings
import pytest
+
from optimagic.batch_evaluators import process_batch_evaluator
batch_evaluators = ["joblib"]
diff --git a/tests/optimagic/test_constraints.py b/tests/optimagic/test_constraints.py
index 56250fb9a..075591591 100644
--- a/tests/optimagic/test_constraints.py
+++ b/tests/optimagic/test_constraints.py
@@ -1,4 +1,5 @@
import pytest
+
from optimagic.constraints import (
Constraint,
DecreasingConstraint,
diff --git a/tests/optimagic/test_decorators.py b/tests/optimagic/test_decorators.py
index c0db497b0..63927aa22 100644
--- a/tests/optimagic/test_decorators.py
+++ b/tests/optimagic/test_decorators.py
@@ -1,4 +1,5 @@
import pytest
+
from optimagic.decorators import (
catch,
unpack,
diff --git a/tests/optimagic/test_deprecations.py b/tests/optimagic/test_deprecations.py
index 3240fb8e8..b7fcbeb71 100644
--- a/tests/optimagic/test_deprecations.py
+++ b/tests/optimagic/test_deprecations.py
@@ -6,10 +6,12 @@
import warnings
-import estimagic as em
import numpy as np
-import optimagic as om
import pytest
+from numpy.testing import assert_almost_equal as aaae
+
+import estimagic as em
+import optimagic as om
from estimagic import (
OptimizeLogReader,
OptimizeResult,
@@ -32,7 +34,6 @@
traceback_report,
utilities,
)
-from numpy.testing import assert_almost_equal as aaae
from optimagic.deprecations import (
convert_dict_to_function_value,
handle_log_options_throw_deprecated_warning,
diff --git a/tests/optimagic/test_mark.py b/tests/optimagic/test_mark.py
index 6231f9ad9..f13b2c1ff 100644
--- a/tests/optimagic/test_mark.py
+++ b/tests/optimagic/test_mark.py
@@ -1,8 +1,9 @@
import functools
from dataclasses import dataclass
-import optimagic as om
import pytest
+
+import optimagic as om
from optimagic.optimization.algorithm import AlgoInfo, Algorithm
from optimagic.typing import AggregationLevel
diff --git a/tests/optimagic/test_type_conversion.py b/tests/optimagic/test_type_conversion.py
index e0d11aebc..c2d9e657d 100644
--- a/tests/optimagic/test_type_conversion.py
+++ b/tests/optimagic/test_type_conversion.py
@@ -1,5 +1,6 @@
import numpy as np
import pytest
+
from optimagic.type_conversion import TYPE_CONVERTERS
from optimagic.typing import (
GtOneFloat,
diff --git a/tests/optimagic/test_utilities.py b/tests/optimagic/test_utilities.py
index c5bd0e873..08a4e53d0 100644
--- a/tests/optimagic/test_utilities.py
+++ b/tests/optimagic/test_utilities.py
@@ -2,6 +2,7 @@
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
+
from optimagic.config import IS_JAX_INSTALLED
from optimagic.utilities import (
calculate_trustregion_initial_radius,
diff --git a/tests/optimagic/visualization/test_convergence_plot.py b/tests/optimagic/visualization/test_convergence_plot.py
index 45c28ce48..c6d119573 100644
--- a/tests/optimagic/visualization/test_convergence_plot.py
+++ b/tests/optimagic/visualization/test_convergence_plot.py
@@ -1,4 +1,5 @@
import pytest
+
from optimagic import get_benchmark_problems
from optimagic.benchmarking.run_benchmark import run_benchmark
from optimagic.visualization.convergence_plot import (
diff --git a/tests/optimagic/visualization/test_deviation_plot.py b/tests/optimagic/visualization/test_deviation_plot.py
index e7060c313..41ec73907 100644
--- a/tests/optimagic/visualization/test_deviation_plot.py
+++ b/tests/optimagic/visualization/test_deviation_plot.py
@@ -1,4 +1,5 @@
import pytest
+
from optimagic import get_benchmark_problems
from optimagic.benchmarking.run_benchmark import run_benchmark
from optimagic.visualization.deviation_plot import (
diff --git a/tests/optimagic/visualization/test_history_plots.py b/tests/optimagic/visualization/test_history_plots.py
index 81ed33824..70078b137 100644
--- a/tests/optimagic/visualization/test_history_plots.py
+++ b/tests/optimagic/visualization/test_history_plots.py
@@ -1,8 +1,9 @@
import itertools
import numpy as np
-import optimagic as om
import pytest
+
+import optimagic as om
from optimagic.logging import SQLiteLogOptions
from optimagic.optimization.optimize import minimize
from optimagic.parameters.bounds import Bounds
diff --git a/tests/optimagic/visualization/test_profile_plot.py b/tests/optimagic/visualization/test_profile_plot.py
index 929e914c5..30f84a9b4 100644
--- a/tests/optimagic/visualization/test_profile_plot.py
+++ b/tests/optimagic/visualization/test_profile_plot.py
@@ -1,6 +1,7 @@
import numpy as np
import pandas as pd
import pytest
+
from optimagic import get_benchmark_problems
from optimagic.benchmarking.run_benchmark import run_benchmark
from optimagic.visualization.profile_plot import (
diff --git a/tests/optimagic/visualization/test_slice_plot.py b/tests/optimagic/visualization/test_slice_plot.py
index 5acb0d255..1eebd27d5 100644
--- a/tests/optimagic/visualization/test_slice_plot.py
+++ b/tests/optimagic/visualization/test_slice_plot.py
@@ -1,5 +1,6 @@
import numpy as np
import pytest
+
from optimagic import mark
from optimagic.parameters.bounds import Bounds
from optimagic.visualization.slice_plot import slice_plot