diff --git a/.gitignore b/.gitignore
index 9842ebd..0726c8b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,18 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
# Prerequisites
*.d
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 36e4745..889719f 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,3 +1,20 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+ci:
+ skip: [mypy]
+
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
@@ -28,22 +45,24 @@ repos:
rev: v3.15.0
hooks:
- id: pyupgrade
- args: [--py37-plus]
+ args: [--py37-plus, --keep-runtime-typing]
- repo: https://github.com/pycqa/isort
- rev: 5.13.0
+ rev: 5.13.2
hooks:
- id: isort
name: isort (python)
args: ["--profile", "black", "--filter-files"]
- repo: https://github.com/psf/black
- rev: 23.11.0
+ rev: 23.12.0
hooks:
- id: black
+ stages: [commit]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.7.1
hooks:
- id: mypy
additional_dependencies: [types-all]
+ exclude: 'test/conftest.py'
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index 02f531a..4c3b4cc 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -1,3 +1,18 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
@@ -9,7 +24,7 @@ version: 2
build:
os: ubuntu-22.04
tools:
- python: "3.8"
+ python: "3.10"
# Build documentation in the docs/ directory with Sphinx
sphinx:
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 22782e4..201dbdf 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -11,7 +11,7 @@
"python.analysis.diagnosticSeverityOverrides": {
"reportPrivateImportUsage": "information"
},
- "python.defaultInterpreterPath": "${env:CONDA_PREFIX}/envs/dxtb/bin/python",
+ "python.defaultInterpreterPath": "${env:CONDA_PREFIX}/envs/torch/bin/python",
"python.testing.pytestArgs": [],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
diff --git a/README.rst b/README.rst
index add77fc..77846cf 100644
--- a/README.rst
+++ b/README.rst
@@ -60,8 +60,7 @@ The project can easily be installed with ``pip``.
.. code::
- pip install tad-dftd3
-
+ pip install tad-dftd3
From source
~~~~~~~~~~~
@@ -71,8 +70,8 @@ Obtain the source by cloning the repository with
.. code::
- git clone https://github.com/dftd3/tad-dftd3
- cd tad-dftd3
+ git clone https://github.com/dftd3/tad-dftd3
+ cd tad-dftd3
We recommend using a `conda `__ environment to install the package.
You can setup the environment manager using a `mambaforge `__ installer.
@@ -80,41 +79,55 @@ Install the required dependencies from the conda-forge channel.
.. code::
- mamba env create -n torch -f environment.yml
- mamba activate torch
+ mamba env create -n torch -f environment.yml
+ mamba activate torch
-For development, install the following additional dependencies
+Install this project with ``pip`` in the environment
.. code::
- mamba install black coverage covdefaults mypy pre-commit pylint pytest tox
+ pip install .
+The following dependencies are required
-Install this project with pip in the environment
+- `numpy `__
+- `tad_mctc `__
+- `torch `__
+- `pytest `__ (tests only)
+
+
+Development
+-----------
+
+For development, additionally install the following tools in your environment.
.. code::
- pip install .
+ mamba install black covdefaults coverage mypy pre-commit pylint tox
-Add the option ``-e`` for installing in development mode.
+With pip, add the option ``-e`` for installing in development mode, and add ``[dev]`` for the development dependencies
-The following dependencies are required
+.. code::
-- `numpy `__
-- `torch `__
-- `pytest `__ (tests only)
+ pip install -e .[dev]
+
+The pre-commit hooks are initialized by running the following command in the root of the repository.
+
+.. code::
+
+ pre-commit install
-You can check your installation by running the test suite with pytest
+For testing all Python environments, simply run `tox`.
.. code::
- pytest tests/ --pyargs tad_dftd3
+ tox
-or tox for testing multiple Python versions
+Note that this randomizes the order of tests but skips "large" tests. To modify this behavior, `tox` has to skip the optional *posargs*.
.. code::
- tox
+ tox -- test
Example
@@ -124,121 +137,133 @@ The following example shows how to calculate the DFT-D3 dispersion energy for a
.. code:: python
- import torch
- import tad_dftd3 as d3
-
- numbers = d3.util.to_number(symbols="C C C C N C S H H H H H".split())
- positions = torch.tensor(
- [
- [-2.56745685564671, -0.02509985979910, 0.00000000000000],
- [-1.39177582455797, +2.27696188880014, 0.00000000000000],
- [+1.27784995624894, +2.45107479759386, 0.00000000000000],
- [+2.62801937615793, +0.25927727028120, 0.00000000000000],
- [+1.41097033661123, -1.99890996077412, 0.00000000000000],
- [-1.17186102298849, -2.34220576284180, 0.00000000000000],
- [-2.39505990368378, -5.22635838332362, 0.00000000000000],
- [+2.41961980455457, -3.62158019253045, 0.00000000000000],
- [-2.51744374846065, +3.98181713686746, 0.00000000000000],
- [+2.24269048384775, +4.24389473203647, 0.00000000000000],
- [+4.66488984573956, +0.17907568006409, 0.00000000000000],
- [-4.60044244782237, -0.17794734637413, 0.00000000000000],
- ]
- )
- param = {
- "a1": torch.tensor(0.49484001),
- "s8": torch.tensor(0.78981345),
- "a2": torch.tensor(5.73083694),
- }
-
- energy = d3.dftd3(numbers, positions, param)
-
- torch.set_printoptions(precision=10)
- print(energy)
- # tensor([-0.0004075971, -0.0003940886, -0.0003817684, -0.0003949536,
- # -0.0003577212, -0.0004110279, -0.0005385976, -0.0001808242,
- # -0.0001563670, -0.0001503394, -0.0001577045, -0.0001764488])
+ import torch
+ import tad_dftd3 as d3
+ import tad_mctc as mctc
+
+ numbers = mctc.convert.symbol_to_number(symbols="C C C C N C S H H H H H".split())
+ positions = torch.tensor(
+ [
+ [-2.56745685564671, -0.02509985979910, 0.00000000000000],
+ [-1.39177582455797, +2.27696188880014, 0.00000000000000],
+ [+1.27784995624894, +2.45107479759386, 0.00000000000000],
+ [+2.62801937615793, +0.25927727028120, 0.00000000000000],
+ [+1.41097033661123, -1.99890996077412, 0.00000000000000],
+ [-1.17186102298849, -2.34220576284180, 0.00000000000000],
+ [-2.39505990368378, -5.22635838332362, 0.00000000000000],
+ [+2.41961980455457, -3.62158019253045, 0.00000000000000],
+ [-2.51744374846065, +3.98181713686746, 0.00000000000000],
+ [+2.24269048384775, +4.24389473203647, 0.00000000000000],
+ [+4.66488984573956, +0.17907568006409, 0.00000000000000],
+ [-4.60044244782237, -0.17794734637413, 0.00000000000000],
+ ]
+ )
+ param = {
+ "a1": torch.tensor(0.49484001),
+ "s8": torch.tensor(0.78981345),
+ "a2": torch.tensor(5.73083694),
+ }
+
+ energy = d3.dftd3(numbers, positions, param)
+
+ torch.set_printoptions(precision=10)
+ print(energy)
+ # tensor([-0.0004075971, -0.0003940886, -0.0003817684, -0.0003949536,
+ # -0.0003577212, -0.0004110279, -0.0005385976, -0.0001808242,
+ # -0.0001563670, -0.0001503394, -0.0001577045, -0.0001764488])
The next example shows the calculation of dispersion energies for a batch of structures, while retaining access to all intermediates used for calculating the dispersion energy.
.. code:: python
- import torch
- import tad_dftd3 as d3
-
- sample1 = dict(
- numbers=d3.util.to_number("Pb H H H H Bi H H H".split()),
- positions=torch.tensor(
- [
- [-0.00000020988889, -4.98043478877778, +0.00000000000000],
- [+3.06964045311111, -6.06324400177778, +0.00000000000000],
- [-1.53482054188889, -6.06324400177778, -2.65838526500000],
- [-1.53482054188889, -6.06324400177778, +2.65838526500000],
- [-0.00000020988889, -1.72196703577778, +0.00000000000000],
- [-0.00000020988889, +4.77334244722222, +0.00000000000000],
- [+1.35700257511111, +6.70626379422222, -2.35039772300000],
- [-2.71400388988889, +6.70626379422222, +0.00000000000000],
- [+1.35700257511111, +6.70626379422222, +2.35039772300000],
- ]
- ),
- )
- sample2 = dict(
- numbers=d3.util.to_number("C C C C C C I H H H H H S H C H H H".split(" ")),
- positions=torch.tensor(
- [
- [-1.42754169820131, -1.50508961850828, -1.93430551124333],
- [+1.19860572924150, -1.66299114873979, -2.03189643761298],
- [+2.65876001301880, +0.37736955363609, -1.23426391650599],
- [+1.50963368042358, +2.57230374419743, -0.34128058818180],
- [-1.12092277855371, +2.71045691257517, -0.25246348639234],
- [-2.60071517756218, +0.67879949508239, -1.04550707592673],
- [-2.86169588073340, +5.99660765711210, +1.08394899986031],
- [+2.09930989272956, -3.36144811062374, -2.72237695164263],
- [+2.64405246349916, +4.15317840474646, +0.27856972788526],
- [+4.69864865613751, +0.26922271535391, -1.30274048619151],
- [-4.63786461351839, +0.79856258572808, -0.96906659938432],
- [-2.57447518692275, -3.08132039046931, -2.54875517521577],
- [-5.88211879210329, 11.88491819358157, +2.31866455902233],
- [-8.18022701418703, 10.95619984550779, +1.83940856333092],
- [-5.08172874482867, 12.66714386256482, -0.92419491629867],
- [-3.18311711399702, 13.44626574330220, -0.86977613647871],
- [-5.07177399637298, 10.99164969235585, -2.10739192258756],
- [-6.35955320518616, 14.08073002965080, -1.68204314084441],
- ]
- ),
- )
- numbers = d3.util.pack(
- (
- sample1["numbers"],
- sample2["numbers"],
- )
- )
- positions = d3.util.pack(
- (
- sample1["positions"],
- sample2["positions"],
- )
- )
- ref = d3.reference.Reference()
- rcov = d3.data.covalent_rad_d3[numbers]
- rvdw = d3.data.vdw_rad_d3[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
- r4r2 = d3.data.sqrt_z_r4_over_r2[numbers]
- param = {
- "a1": torch.tensor(0.49484001),
- "s8": torch.tensor(0.78981345),
- "a2": torch.tensor(5.73083694),
- }
-
- cn = d3.ncoord.coordination_number(numbers, positions, d3.ncoord.exp_count, rcov)
- weights = d3.model.weight_references(numbers, cn, ref, d3.model.gaussian_weight)
- c6 = d3.model.atomic_c6(numbers, weights, ref)
- energy = d3.disp.dispersion(
- numbers, positions, c6, rvdw, r4r2, d3.disp.rational_damping, **param
- )
-
- torch.set_printoptions(precision=10)
- print(torch.sum(energy, dim=-1))
- # tensor([-0.0014092578, -0.0057840119])
+ import torch
+ import tad_dftd3 as d3
+ import tad_mctc as mctc
+
+ sample1 = dict(
+ numbers=mctc.convert.symbol_to_number("Pb H H H H Bi H H H".split()),
+ positions=torch.tensor(
+ [
+ [-0.00000020988889, -4.98043478877778, +0.00000000000000],
+ [+3.06964045311111, -6.06324400177778, +0.00000000000000],
+ [-1.53482054188889, -6.06324400177778, -2.65838526500000],
+ [-1.53482054188889, -6.06324400177778, +2.65838526500000],
+ [-0.00000020988889, -1.72196703577778, +0.00000000000000],
+ [-0.00000020988889, +4.77334244722222, +0.00000000000000],
+ [+1.35700257511111, +6.70626379422222, -2.35039772300000],
+ [-2.71400388988889, +6.70626379422222, +0.00000000000000],
+ [+1.35700257511111, +6.70626379422222, +2.35039772300000],
+ ]
+ ),
+ )
+ sample2 = dict(
+ numbers=mctc.convert.symbol_to_number(
+ "C C C C C C I H H H H H S H C H H H".split(" ")
+ ),
+ positions=torch.tensor(
+ [
+ [-1.42754169820131, -1.50508961850828, -1.93430551124333],
+ [+1.19860572924150, -1.66299114873979, -2.03189643761298],
+ [+2.65876001301880, +0.37736955363609, -1.23426391650599],
+ [+1.50963368042358, +2.57230374419743, -0.34128058818180],
+ [-1.12092277855371, +2.71045691257517, -0.25246348639234],
+ [-2.60071517756218, +0.67879949508239, -1.04550707592673],
+ [-2.86169588073340, +5.99660765711210, +1.08394899986031],
+ [+2.09930989272956, -3.36144811062374, -2.72237695164263],
+ [+2.64405246349916, +4.15317840474646, +0.27856972788526],
+ [+4.69864865613751, +0.26922271535391, -1.30274048619151],
+ [-4.63786461351839, +0.79856258572808, -0.96906659938432],
+ [-2.57447518692275, -3.08132039046931, -2.54875517521577],
+ [-5.88211879210329, 11.88491819358157, +2.31866455902233],
+ [-8.18022701418703, 10.95619984550779, +1.83940856333092],
+ [-5.08172874482867, 12.66714386256482, -0.92419491629867],
+ [-3.18311711399702, 13.44626574330220, -0.86977613647871],
+ [-5.07177399637298, 10.99164969235585, -2.10739192258756],
+ [-6.35955320518616, 14.08073002965080, -1.68204314084441],
+ ]
+ ),
+ )
+ numbers = mctc.batch.pack(
+ (
+ sample1["numbers"],
+ sample2["numbers"],
+ )
+ )
+ positions = mctc.batch.pack(
+ (
+ sample1["positions"],
+ sample2["positions"],
+ )
+ )
+ ref = d3.reference.Reference()
+ rcov = d3.data.COV_D3[numbers]
+ rvdw = d3.data.VDW_D3[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
+ r4r2 = d3.data.R4R2[numbers]
+ param = {
+ "a1": torch.tensor(0.49484001),
+ "s8": torch.tensor(0.78981345),
+ "a2": torch.tensor(5.73083694),
+ }
+
+ cn = mctc.ncoord.cn_d3(
+ numbers, positions, counting_function=mctc.ncoord.exp_count, rcov=rcov
+ )
+ weights = d3.model.weight_references(numbers, cn, ref, d3.model.gaussian_weight)
+ c6 = d3.model.atomic_c6(numbers, weights, ref)
+ energy = d3.disp.dispersion(
+ numbers,
+ positions,
+ param,
+ c6,
+ rvdw,
+ r4r2,
+ d3.disp.rational_damping,
+ )
+
+ torch.set_printoptions(precision=10)
+ print(torch.sum(energy, dim=-1))
+ # tensor([-0.0014092578, -0.0057840119])
Contributing
diff --git a/docs/installation.rst b/docs/installation.rst
index adf8d43..6c6db70 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -1,26 +1,25 @@
Installation
-============
+------------
-PyPI
-----
+pip
+~~~
-*tad-dftd3* can easily be installed with ``pip``.
+The project can easily be installed with ``pip``.
.. code::
- pip install tad-dftd3
+ pip install tad-dftd3
-
-From Source
------------
+From source
+~~~~~~~~~~~
This project is hosted on GitHub at `dftd3/tad-dftd3 `__.
Obtain the source by cloning the repository with
.. code::
- git clone https://github.com/dftd3/tad-dftd3
- cd tad-dftd3
+ git clone https://github.com/dftd3/tad-dftd3
+ cd tad-dftd3
We recommend using a `conda `__ environment to install the package.
You can setup the environment manager using a `mambaforge `__ installer.
@@ -28,38 +27,18 @@ Install the required dependencies from the conda-forge channel.
.. code::
- mamba env create -n torch -f environment.yml
- mamba activate torch
-
-For development, install the following additional dependencies
-
-.. code::
-
- mamba install black coverage covdefaults mypy pre-commit pylint pytest tox
-
+ mamba env create -n torch -f environment.yml
+ mamba activate torch
-Install this project with pip in the environment
+Install this project with ``pip`` in the environment
.. code::
- pip install .
-
-Add the option ``-e`` for installing in development mode.
+ pip install .
The following dependencies are required
- `numpy `__
+- `tad_mctc `__
- `torch `__
- `pytest `__ (tests only)
-
-You can check your installation by running the test suite with pytest
-
-.. code::
-
- pytest tests/ --pyargs tad_dftd3
-
-or tox for testing multiple Python versions
-
-.. code::
-
- tox
diff --git a/docs/modules/atm.rst b/docs/modules/damping/atm.rst
similarity index 100%
rename from docs/modules/atm.rst
rename to docs/modules/damping/atm.rst
diff --git a/docs/modules/damping.rst b/docs/modules/damping/index.rst
similarity index 100%
rename from docs/modules/damping.rst
rename to docs/modules/damping/index.rst
diff --git a/docs/modules/rational.rst b/docs/modules/damping/rational.rst
similarity index 100%
rename from docs/modules/rational.rst
rename to docs/modules/damping/rational.rst
diff --git a/docs/modules/data.rst b/docs/modules/data.rst
deleted file mode 100644
index 6066259..0000000
--- a/docs/modules/data.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-.. automodule:: tad_dftd3.data
- :members:
diff --git a/docs/modules/data/index.rst b/docs/modules/data/index.rst
new file mode 100644
index 0000000..0593c57
--- /dev/null
+++ b/docs/modules/data/index.rst
@@ -0,0 +1,8 @@
+.. _data:
+
+.. automodule:: tad_dftd3.data
+
+.. toctree::
+
+ r4r2
+ radii
diff --git a/docs/modules/data/r4r2.rst b/docs/modules/data/r4r2.rst
new file mode 100644
index 0000000..83d7a89
--- /dev/null
+++ b/docs/modules/data/r4r2.rst
@@ -0,0 +1,2 @@
+.. automodule:: tad_dftd3.data.r4r2
+ :members:
diff --git a/docs/modules/data/radii.rst b/docs/modules/data/radii.rst
new file mode 100644
index 0000000..e0e9ded
--- /dev/null
+++ b/docs/modules/data/radii.rst
@@ -0,0 +1,2 @@
+.. automodule:: tad_dftd3.data.radii
+ :members:
diff --git a/docs/modules/defaults.rst b/docs/modules/defaults.rst
new file mode 100644
index 0000000..53c520f
--- /dev/null
+++ b/docs/modules/defaults.rst
@@ -0,0 +1,2 @@
+.. automodule:: tad_dftd3.defaults
+ :members:
diff --git a/docs/modules/dftd3.rst b/docs/modules/dftd3.rst
deleted file mode 100644
index 01b3d4b..0000000
--- a/docs/modules/dftd3.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-DFT-D3 wrapper
-==============
-
-.. autofunction:: tad_dftd3.dftd3
diff --git a/docs/modules/index.rst b/docs/modules/index.rst
index d01151d..7f1ce71 100644
--- a/docs/modules/index.rst
+++ b/docs/modules/index.rst
@@ -7,11 +7,10 @@ The following modules are contained with `tad-dftd3`.
.. toctree::
- dftd3
+ damping/index
+ data/index
+ defaults
disp
- damping
model
- ncoord
reference
- data
- util
+ typing/index
diff --git a/docs/modules/ncoord.rst b/docs/modules/ncoord.rst
deleted file mode 100644
index f99aa69..0000000
--- a/docs/modules/ncoord.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-.. automodule:: tad_dftd3.ncoord
- :members:
diff --git a/docs/modules/typing/builtin.rst b/docs/modules/typing/builtin.rst
new file mode 100644
index 0000000..8f79631
--- /dev/null
+++ b/docs/modules/typing/builtin.rst
@@ -0,0 +1,2 @@
+.. automodule:: tad_dftd3.typing.builtin
+ :members:
diff --git a/docs/modules/typing/d3.rst b/docs/modules/typing/d3.rst
new file mode 100644
index 0000000..71b1236
--- /dev/null
+++ b/docs/modules/typing/d3.rst
@@ -0,0 +1,2 @@
+.. automodule:: tad_dftd3.typing.d3
+ :members:
diff --git a/docs/modules/typing/index.rst b/docs/modules/typing/index.rst
new file mode 100644
index 0000000..ab1c00c
--- /dev/null
+++ b/docs/modules/typing/index.rst
@@ -0,0 +1,9 @@
+.. _typing:
+
+.. automodule:: tad_dftd4.typing
+
+.. toctree::
+
+ builtin
+ d3
+ pytorch
diff --git a/docs/modules/typing/pytorch.rst b/docs/modules/typing/pytorch.rst
new file mode 100644
index 0000000..461eeb7
--- /dev/null
+++ b/docs/modules/typing/pytorch.rst
@@ -0,0 +1,2 @@
+.. automodule:: tad_dftd3.typing.pytorch
+ :members:
diff --git a/docs/modules/util.rst b/docs/modules/util.rst
deleted file mode 100644
index d769597..0000000
--- a/docs/modules/util.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-.. automodule:: tad_dftd3.util
- :members:
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 2db0fd7..22ce86b 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -3,5 +3,6 @@ sphinx-book-theme
sphinx-copybutton
sphinx-design
numpy
+tad-mctc
torch
jinja2<3.1
diff --git a/environment.yml b/environment.yml
index ab64996..3b10d90 100644
--- a/environment.yml
+++ b/environment.yml
@@ -12,10 +12,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-name: tad-dftd3
+name: torch
channels:
+ - defaults
- conda-forge
+ - pytorch
dependencies:
- numpy
- - python >=3.8
- - pytorch
+ - python>=3.8
+ - pytorch>=1.11.0
+ - pip:
+ - tad-mctc
diff --git a/examples/batch.py b/examples/batch.py
index 3d20080..935ba27 100644
--- a/examples/batch.py
+++ b/examples/batch.py
@@ -1,10 +1,11 @@
# SPDX-Identifier: CC0-1.0
+import tad_mctc as mctc
import torch
import tad_dftd3 as d3
sample1 = dict(
- numbers=d3.utils.to_number("Pb H H H H Bi H H H".split()),
+ numbers=mctc.convert.symbol_to_number("Pb H H H H Bi H H H".split()),
positions=torch.tensor(
[
[-0.00000020988889, -4.98043478877778, +0.00000000000000],
@@ -20,7 +21,9 @@
),
)
sample2 = dict(
- numbers=d3.utils.to_number("C C C C C C I H H H H H S H C H H H".split(" ")),
+ numbers=mctc.convert.symbol_to_number(
+ "C C C C C C I H H H H H S H C H H H".split(" ")
+ ),
positions=torch.tensor(
[
[-1.42754169820131, -1.50508961850828, -1.93430551124333],
@@ -44,29 +47,31 @@
]
),
)
-numbers = d3.utils.pack(
+numbers = mctc.batch.pack(
(
sample1["numbers"],
sample2["numbers"],
)
)
-positions = d3.utils.pack(
+positions = mctc.batch.pack(
(
sample1["positions"],
sample2["positions"],
)
)
ref = d3.reference.Reference()
-rcov = d3.data.covalent_rad_d3[numbers]
-rvdw = d3.data.vdw_rad_d3[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
-r4r2 = d3.data.sqrt_z_r4_over_r2[numbers]
+rcov = d3.data.COV_D3[numbers]
+rvdw = d3.data.VDW_D3[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
+r4r2 = d3.data.R4R2[numbers]
param = {
"a1": torch.tensor(0.49484001),
"s8": torch.tensor(0.78981345),
"a2": torch.tensor(5.73083694),
}
-cn = d3.ncoord.coordination_number(numbers, positions, d3.ncoord.exp_count, rcov)
+cn = mctc.ncoord.cn_d3(
+ numbers, positions, counting_function=mctc.ncoord.exp_count, rcov=rcov
+)
weights = d3.model.weight_references(numbers, cn, ref, d3.model.gaussian_weight)
c6 = d3.model.atomic_c6(numbers, weights, ref)
energy = d3.disp.dispersion(
@@ -81,3 +86,4 @@
torch.set_printoptions(precision=10)
print(torch.sum(energy, dim=-1))
+# tensor([-0.0014092578, -0.0057840119])
diff --git a/examples/single.py b/examples/single.py
index 36adfc5..d2fb93c 100644
--- a/examples/single.py
+++ b/examples/single.py
@@ -1,9 +1,10 @@
# SPDX-Identifier: CC0-1.0
+import tad_mctc as mctc
import torch
import tad_dftd3 as d3
-numbers = d3.utils.to_number(symbols="C C C C N C S H H H H H".split())
+numbers = mctc.convert.symbol_to_number(symbols="C C C C N C S H H H H H".split())
positions = torch.Tensor(
[
[-2.56745685564671, -0.02509985979910, 0.00000000000000],
diff --git a/pyproject.toml b/pyproject.toml
index ad5b6c9..b932220 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -18,10 +18,11 @@ build-backend = "setuptools.build_meta"
[tool.pytest.ini_options]
addopts = "--doctest-modules"
-testpaths = ["tests"]
+testpaths = ["test"]
pythonpath = ["src"]
markers = [
"grad: Marks tests which perform 'gradcheck' evaluations, this can be slow.",
+ "large: Marks tests for large molecules, this can be slow.",
]
@@ -32,7 +33,11 @@ disallow_incomplete_defs = true
disallow_untyped_defs = true
warn_redundant_casts = true
warn_unreachable = true
-warn_unused_ignores = false
+warn_unused_ignores = true
+exclude = '''
+ (?x)
+ ^test?s/conftest.py$
+'''
[tool.coverage.run]
diff --git a/setup.cfg b/setup.cfg
index 51f6942..6830f29 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -24,6 +24,7 @@ project_urls =
packages = find:
install_requires =
numpy
+ tad-mctc
torch
python_requires = >=3.8
include_package_data = True
@@ -56,3 +57,4 @@ tox =
tad_dftd3 =
py.typed
*.npy
+ *.npz
diff --git a/src/tad_dftd3/__init__.py b/src/tad_dftd3/__init__.py
index 54df97f..294eafe 100644
--- a/src/tad_dftd3/__init__.py
+++ b/src/tad_dftd3/__init__.py
@@ -32,11 +32,12 @@
-------
>>> import torch
>>> import tad_dftd3 as d3
->>> numbers = d3.util.pack(( # S22 system 4: formamide dimer
-... d3.util.to_number("C C N N H H H H H H O O".split()),
-... d3.util.to_number("C O N H H H".split()),
+>>> import tad_mctc as mctc
+>>> numbers = mctc.batch.pack(( # S22 system 4: formamide dimer
+... mctc.convert.symbol_to_number("C C N N H H H H H H O O".split()),
+... mctc.convert.symbol_to_number("C O N H H H".split()),
... ))
->>> positions = d3.util.pack((
+>>> positions = mctcd3.batch.pack((
... torch.tensor([ # coordinates in Bohr
... [-3.81469488143921, +0.09993441402912, 0.00000000000000],
... [+3.81469488143921, -0.09993441402912, 0.00000000000000],
@@ -74,95 +75,6 @@
"""
import torch
-from . import constants, damping, data, defaults, disp, model, ncoord, reference
-from ._typing import (
- DD,
- CountingFunction,
- DampingFunction,
- Dict,
- Optional,
- Tensor,
- WeightingFunction,
-)
-from .utils import misc
-
-
-def dftd3(
- numbers: Tensor,
- positions: Tensor,
- param: Dict[str, Tensor],
- *,
- ref: Optional[reference.Reference] = None,
- rcov: Optional[Tensor] = None,
- rvdw: Optional[Tensor] = None,
- r4r2: Optional[Tensor] = None,
- cutoff: Optional[Tensor] = None,
- counting_function: CountingFunction = ncoord.exp_count,
- weighting_function: WeightingFunction = model.gaussian_weight,
- damping_function: DampingFunction = damping.rational_damping,
-) -> Tensor:
- """
- Evaluate DFT-D3 dispersion energy for a batch of geometries.
-
- Parameters
- ----------
- numbers : torch.Tensor
- Atomic numbers of the atoms in the system.
- positions : torch.Tensor
- Cartesian coordinates of the atoms in the system.
- param : dict[str, Tensor]
- DFT-D3 damping parameters.
- ref : reference.Reference, optional
- Reference C6 coefficients.
- rcov : torch.Tensor, optional
- Covalent radii of the atoms in the system.
- rvdw : torch.Tensor, optional
- Van der Waals radii of the atoms in the system.
- r4r2 : torch.Tensor, optional
- r⁴ over r² expectation values of the atoms in the system.
- damping_function : Callable, optional
- Damping function evaluate distance dependent contributions.
- weighting_function : Callable, optional
- Function to calculate weight of individual reference systems.
- counting_function : Callable, optional
- Calculates counting value in range 0 to 1 for each atom pair.
-
- Returns
- -------
- Tensor
- Atom-resolved DFT-D3 dispersion energy for each geometry.
- """
- dd: DD = {"device": positions.device, "dtype": positions.dtype}
-
- if torch.max(numbers) >= constants.MAX_ELEMENT:
- raise ValueError(
- f"No D3 parameters available for Z > {constants.MAX_ELEMENT-1} "
- f"({constants.PSE_Z2S[constants.MAX_ELEMENT]})."
- )
-
- if cutoff is None:
- cutoff = torch.tensor(defaults.D3_DISP_CUTOFF, **dd)
- if ref is None:
- ref = reference.Reference(**dd)
- if rcov is None:
- rcov = data.covalent_rad_d3.to(**dd)[numbers]
- if rvdw is None:
- rvdw = data.vdw_rad_d3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
- if r4r2 is None:
- r4r2 = data.sqrt_z_r4_over_r2.to(**dd)[numbers]
-
- cn = ncoord.coordination_number(numbers, positions, counting_function, rcov)
- weights = model.weight_references(numbers, cn, ref, weighting_function)
- c6 = model.atomic_c6(numbers, weights, ref)
- energy = disp.dispersion(
- numbers,
- positions,
- param,
- c6,
- rvdw,
- r4r2,
- damping_function,
- cutoff=cutoff,
- )
-
- return energy
+from . import damping, data, defaults, disp, model, reference
+from .__version__ import __version__
+from .disp import dftd3
diff --git a/src/tad_dftd3/__version__.py b/src/tad_dftd3/__version__.py
index be774a8..d6a77e8 100644
--- a/src/tad_dftd3/__version__.py
+++ b/src/tad_dftd3/__version__.py
@@ -13,10 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""
-Version module for tad_dftd3.
+Version module for *tad-dftd3*.
"""
-import torch
-
__version__ = "0.1.4"
-
-__torch_version__ = torch.__version__
diff --git a/src/tad_dftd3/_typing.py b/src/tad_dftd3/_typing.py
deleted file mode 100644
index c408ea3..0000000
--- a/src/tad_dftd3/_typing.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Type annotations for this project.
-"""
-
-# pylint: disable=unused-import
-from typing import (
- Any,
- Callable,
- Dict,
- List,
- NoReturn,
- Optional,
- Protocol,
- Tuple,
- TypedDict,
- Union,
-)
-
-import torch
-from torch import Tensor
-
-TensorOrTensors = Union[List[Tensor], Tuple[Tensor, ...], Tensor]
-MaybeTensor = Union[Tensor, Optional[Tensor]]
-
-CountingFunction = Callable[[Tensor, Tensor], Tensor]
-WeightingFunction = Callable[[Tensor], Tensor]
-DampingFunction = Callable[[int, Tensor, Tensor, Dict[str, Tensor]], Tensor]
-Size = Union[Tuple[int], List[int], torch.Size]
-
-
-class Molecule(TypedDict):
- """Representation of fundamental molecular structure (atom types and postions)."""
-
- numbers: Tensor
- """Tensor of atomic numbers"""
-
- positions: Tensor
- """Tensor of 3D coordinates of shape (n, 3)"""
-
-
-class DD(TypedDict):
- """Collection of torch.device and torch.dtype."""
-
- device: Union[torch.device, None]
- """Device on which a tensor lives."""
-
- dtype: torch.dtype
- """Floating point precision of a tensor."""
diff --git a/src/tad_dftd3/constants.py b/src/tad_dftd3/constants.py
deleted file mode 100644
index 028db59..0000000
--- a/src/tad_dftd3/constants.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This module contains fundamental constants and conversion factors.
-"""
-
-MAX_ELEMENT = 104
-"""Atomic number (+1 for dummy) of last element supported by DFT-D3."""
-
-BOHR_TO_ANGSTROM = 0.529177210903
-"""Bohr radius in Angstroms."""
-
-ANGSTROM_TO_BOHR = 1.0 / BOHR_TO_ANGSTROM
-"""Conversion factor from Angstrom to Bohr."""
-
-PSE_S2Z = {
- "H": 1,
- "He": 2,
- "Li": 3,
- "Be": 4,
- "B": 5,
- "C": 6,
- "N": 7,
- "O": 8,
- "F": 9,
- "Ne": 10,
- "Na": 11,
- "Mg": 12,
- "Al": 13,
- "Si": 14,
- "P": 15,
- "S": 16,
- "Cl": 17,
- "Ar": 18,
- "K": 19,
- "Ca": 20,
- "Sc": 21,
- "Ti": 22,
- "V": 23,
- "Cr": 24,
- "Mn": 25,
- "Fe": 26,
- "Co": 27,
- "Ni": 28,
- "Cu": 29,
- "Zn": 30,
- "Ga": 31,
- "Ge": 32,
- "As": 33,
- "Se": 34,
- "Br": 35,
- "Kr": 36,
- "Rb": 37,
- "Sr": 38,
- "Y": 39,
- "Zr": 40,
- "Nb": 41,
- "Mo": 42,
- "Tc": 43,
- "Ru": 44,
- "Rh": 45,
- "Pd": 46,
- "Ag": 47,
- "Cd": 48,
- "In": 49,
- "Sn": 50,
- "Sb": 51,
- "Te": 52,
- "I": 53,
- "Xe": 54,
- "Cs": 55,
- "Ba": 56,
- "La": 57,
- "Ce": 58,
- "Pr": 59,
- "Nd": 60,
- "Pm": 61,
- "Sm": 62,
- "Eu": 63,
- "Gd": 64,
- "Tb": 65,
- "Dy": 66,
- "Ho": 67,
- "Er": 68,
- "Tm": 69,
- "Yb": 70,
- "Lu": 71,
- "Hf": 72,
- "Ta": 73,
- "W": 74,
- "Re": 75,
- "Os": 76,
- "Ir": 77,
- "Pt": 78,
- "Au": 79,
- "Hg": 80,
- "Tl": 81,
- "Pb": 82,
- "Bi": 83,
- "Po": 84,
- "At": 85,
- "Rn": 86,
- "Fr": 87,
- "Ra": 88,
- "Ac": 89,
- "Th": 90,
- "Pa": 91,
- "U": 92,
- "Np": 93,
- "Pu": 94,
- "Am": 95,
- "Cm": 96,
- "Bk": 97,
- "Cf": 98,
- "Es": 99,
- "Fm": 100,
- "Md": 101,
- "No": 102,
- "Lr": 103,
- "Rf": 104,
- "Db": 105,
- "Sg": 106,
- "Bh": 107,
- "Hs": 108,
- "Mt": 109,
- "Ds": 110,
- "Rg": 111,
- "Cn": 112,
- "Nh": 113,
- "Fl": 114,
- "Mc": 115,
- "Lv": 116,
- "Ts": 117,
- "Og": 118,
-}
-"""PSE with mapping from symbol to atomic number."""
-
-PSE_Z2S = {v: k for k, v in PSE_S2Z.items()}
-"""PSE with mapping from atomic number to symbol."""
diff --git a/src/tad_dftd3/damping/__init__.py b/src/tad_dftd3/damping/__init__.py
index 2f60b2a..8bb819d 100644
--- a/src/tad_dftd3/damping/__init__.py
+++ b/src/tad_dftd3/damping/__init__.py
@@ -18,5 +18,5 @@
Available damping schemes for two- and three-body dispersion terms.
"""
-from .atm import dispersion_atm
-from .rational import rational_damping
+from .atm import *
+from .rational import *
diff --git a/src/tad_dftd3/damping/atm.py b/src/tad_dftd3/damping/atm.py
index 86bda63..52647a6 100644
--- a/src/tad_dftd3/damping/atm.py
+++ b/src/tad_dftd3/damping/atm.py
@@ -31,10 +31,13 @@
\dfrac{1}{1+ 6 \left(\overline{R}_\text{ABC}\right)^{-16}}
"""
import torch
+from tad_mctc import storch
+from tad_mctc.batch import real_pairs, real_triples
from .. import defaults
-from .._typing import DD, Tensor
-from ..utils import cdist, real_pairs, real_triples
+from ..typing import DD, Tensor
+
+__all__ = ["dispersion_atm"]
def dispersion_atm(
@@ -83,18 +86,16 @@ def dispersion_atm(
cutoff2 = cutoff * cutoff
srvdw = rs9 * rvdw
- mask_pairs = real_pairs(numbers, diagonal=False)
- mask_triples = real_triples(numbers, self=False)
+ mask_pairs = real_pairs(numbers, mask_diagonal=True)
+ mask_triples = real_triples(numbers, mask_self=True)
eps = torch.tensor(torch.finfo(positions.dtype).eps, **dd)
zero = torch.tensor(0.0, **dd)
one = torch.tensor(1.0, **dd)
# C9_ABC = s9 * sqrt(|C6_AB * C6_AC * C6_BC|)
- c9 = s9 * torch.sqrt(
- torch.clamp(
- torch.abs(c6.unsqueeze(-1) * c6.unsqueeze(-2) * c6.unsqueeze(-3)), min=eps
- )
+ c9 = s9 * storch.sqrt(
+ torch.abs(c6.unsqueeze(-1) * c6.unsqueeze(-2) * c6.unsqueeze(-3))
)
r0ij = srvdw.unsqueeze(-1)
@@ -107,7 +108,7 @@ def dispersion_atm(
distances = torch.pow(
torch.where(
mask_pairs,
- cdist(positions, positions, p=2),
+ storch.cdist(positions, positions, p=2),
eps,
),
2.0,
@@ -124,7 +125,7 @@ def dispersion_atm(
# dividing by tiny numbers leads to huge numbers, which result in NaN's
# upon exponentiation in the subsequent step
- mask = real_triples(numbers, self=False)
+ mask = real_triples(numbers, mask_self=True)
base = r0 / torch.where(mask_triples, r1, one)
# to fix the previous mask, we mask again (not strictly necessary because
diff --git a/src/tad_dftd3/damping/rational.py b/src/tad_dftd3/damping/rational.py
index 9cd2818..5d08877 100644
--- a/src/tad_dftd3/damping/rational.py
+++ b/src/tad_dftd3/damping/rational.py
@@ -25,10 +25,14 @@
\dfrac{R^n_{\text{AB}}}{R^n_{\text{AB}} +
\left( a_1 R_0^{\text{AB}} + a_2 \right)^n}
"""
+from typing import Dict
+
import torch
from .. import defaults
-from .._typing import DD, Dict, Tensor
+from ..typing import DD, Tensor
+
+__all__ = ["rational_damping"]
def rational_damping(
diff --git a/src/tad_dftd3/data/__init__.py b/src/tad_dftd3/data/__init__.py
new file mode 100644
index 0000000..f5645e9
--- /dev/null
+++ b/src/tad_dftd3/data/__init__.py
@@ -0,0 +1,22 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Atomic data
+===========
+
+Data arrays for atomic constants like covalent radii or van-der-Waals radii.
+"""
+from .r4r2 import *
+from .radii import *
diff --git a/src/tad_dftd3/data/r4r2.py b/src/tad_dftd3/data/r4r2.py
new file mode 100644
index 0000000..4033c2a
--- /dev/null
+++ b/src/tad_dftd3/data/r4r2.py
@@ -0,0 +1,60 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Data: Expectation values
+========================
+
+PBE0/def2-QZVP atomic values calculated by S. Grimme in Gaussian (2010),
+rare gases recalculated by J. Mewes with PBE0/aug-cc-pVQZ in Dirac (2018).
+Also new super heavies Cn,Nh,Fl,Lv,Og and Am-Rg calculated at 4c-PBE/Dyall-AE4Z
+(Dirac 2022)
+"""
+import torch
+
+__all__ = ["R4R2"]
+
+
+# fmt: off
+r4_over_r2 = torch.tensor([
+ 0.0000, # None
+ 8.0589, 3.4698, # H,He
+ 29.0974,14.8517,11.8799, 7.8715, 5.5588, 4.7566, 3.8025, 3.1036, # Li-Ne
+ 26.1552,17.2304,17.7210,12.7442, 9.5361, 8.1652, 6.7463, 5.6004, # Na-Ar
+ 29.2012,22.3934, # K,Ca
+ 19.0598,16.8590,15.4023,12.5589,13.4788, # Sc-
+ 12.2309,11.2809,10.5569,10.1428, 9.4907, # -Zn
+ 13.4606,10.8544, 8.9386, 8.1350, 7.1251, 6.1971, # Ga-Kr
+ 30.0162,24.4103, # Rb,Sr
+ 20.3537,17.4780,13.5528,11.8451,11.0355, # Y-
+ 10.1997, 9.5414, 9.0061, 8.6417, 8.9975, # -Cd
+ 14.0834,11.8333,10.0179, 9.3844, 8.4110, 7.5152, # In-Xe
+ 32.7622,27.5708, # Cs,Ba
+ 23.1671,21.6003,20.9615,20.4562,20.1010,19.7475,19.4828, # La-Eu
+ 15.6013,19.2362,17.4717,17.8321,17.4237,17.1954,17.1631, # Gd-Yb
+ 14.5716,15.8758,13.8989,12.4834,11.4421, # Lu-
+ 10.2671, 8.3549, 7.8496, 7.3278, 7.4820, # -Hg
+ 13.5124,11.6554,10.0959, 9.7340, 8.8584, 8.0125, # Tl-Rn
+ 29.8135,26.3157, # Fr,Ra
+ 19.1885,15.8542,16.1305,15.6161,15.1226,16.1576,14.6510, # Ac-Am
+ 14.7178,13.9108,13.5623,13.2326,12.9189,12.6133,12.3142, # Cm-No
+ 14.8326,12.3771,10.6378, 9.3638, 8.2297, # Lr-
+ 7.5667, 6.9456, 6.3946, 5.9159, 5.4929, # -Cn
+ 6.7286, 6.5144,10.9169,10.3600, 9.4723, 8.6641, # Nh-Og
+]) # fmt: on
+"""Actually calculated r⁴ over r² expectation values."""
+# fmt: on
+
+R4R2 = torch.sqrt(0.5 * (r4_over_r2 * torch.sqrt(torch.arange(r4_over_r2.shape[0]))))
+"""r⁴ over r² expectation values."""
diff --git a/src/tad_dftd3/data.py b/src/tad_dftd3/data/radii.py
similarity index 96%
rename from src/tad_dftd3/data.py
rename to src/tad_dftd3/data/radii.py
index 1d551f6..9dbe2b8 100644
--- a/src/tad_dftd3/data.py
+++ b/src/tad_dftd3/data/radii.py
@@ -13,92 +13,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""
-Atomic data
+Data: Radii
===========
Data arrays for atomic constants like covalent radii or van-der-Waals radii.
"""
import torch
+from tad_mctc.data.radii import COV_D3
+from tad_mctc.typing import Tensor
+from tad_mctc.units.length import ANGSTROM_TO_BOHR
-from . import constants
-from ._typing import Tensor
-
-# fmt: off
-covalent_rad_2009 = constants.ANGSTROM_TO_BOHR * torch.tensor([
- 0.00, # None
- 0.32,0.46, # H,He
- 1.20,0.94,0.77,0.75,0.71,0.63,0.64,0.67, # Li-Ne
- 1.40,1.25,1.13,1.04,1.10,1.02,0.99,0.96, # Na-Ar
- 1.76,1.54, # K,Ca
- 1.33,1.22,1.21,1.10,1.07, # Sc-
- 1.04,1.00,0.99,1.01,1.09, # -Zn
- 1.12,1.09,1.15,1.10,1.14,1.17, # Ga-Kr
- 1.89,1.67, # Rb,Sr
- 1.47,1.39,1.32,1.24,1.15, # Y-
- 1.13,1.13,1.08,1.15,1.23, # -Cd
- 1.28,1.26,1.26,1.23,1.32,1.31, # In-Xe
- 2.09,1.76, # Cs,Ba
- 1.62,1.47,1.58,1.57,1.56,1.55,1.51, # La-Eu
- 1.52,1.51,1.50,1.49,1.49,1.48,1.53, # Gd-Yb
- 1.46,1.37,1.31,1.23,1.18, # Lu-
- 1.16,1.11,1.12,1.13,1.32, # -Hg
- 1.30,1.30,1.36,1.31,1.38,1.42, # Tl-Rn
- 2.01,1.81, # Fr,Ra
- 1.67,1.58,1.52,1.53,1.54,1.55,1.49, # Ac-Am
- 1.49,1.51,1.51,1.48,1.50,1.56,1.58, # Cm-No
- 1.45,1.41,1.34,1.29,1.27, # Lr-
- 1.21,1.16,1.15,1.09,1.22, # -Cn
- 1.36,1.43,1.46,1.58,1.48,1.57 ]) # Nh-Og
-# fmt: on
-"""
-Covalent radii (taken from Pyykko and Atsumi, Chem. Eur. J. 15, 2009, 188-197).
-Values for metals decreased by 10 %.
-"""
-
-
-covalent_rad_d3 = 4.0 / 3.0 * covalent_rad_2009
-"""D3 covalent radii used to construct the coordination number"""
-
-
-# fmt: off
-r4_over_r2 = torch.tensor([
- 0.0000, # None
- 8.0589, 3.4698, # H,He
- 29.0974,14.8517,11.8799, 7.8715, 5.5588, 4.7566, 3.8025, 3.1036, # Li-Ne
- 26.1552,17.2304,17.7210,12.7442, 9.5361, 8.1652, 6.7463, 5.6004, # Na-Ar
- 29.2012,22.3934, # K,Ca
- 19.0598,16.8590,15.4023,12.5589,13.4788, # Sc-
- 12.2309,11.2809,10.5569,10.1428, 9.4907, # -Zn
- 13.4606,10.8544, 8.9386, 8.1350, 7.1251, 6.1971, # Ga-Kr
- 30.0162,24.4103, # Rb,Sr
- 20.3537,17.4780,13.5528,11.8451,11.0355, # Y-
- 10.1997, 9.5414, 9.0061, 8.6417, 8.9975, # -Cd
- 14.0834,11.8333,10.0179, 9.3844, 8.4110, 7.5152, # In-Xe
- 32.7622,27.5708, # Cs,Ba
- 23.1671,21.6003,20.9615,20.4562,20.1010,19.7475,19.4828, # La-Eu
- 15.6013,19.2362,17.4717,17.8321,17.4237,17.1954,17.1631, # Gd-Yb
- 14.5716,15.8758,13.8989,12.4834,11.4421, # Lu-
- 10.2671, 8.3549, 7.8496, 7.3278, 7.4820, # -Hg
- 13.5124,11.6554,10.0959, 9.7340, 8.8584, 8.0125, # Tl-Rn
- 29.8135,26.3157, # Fr,Ra
- 19.1885,15.8542,16.1305,15.6161,15.1226,16.1576,14.6510, # Ac-Am
- 14.7178,13.9108,13.5623,13.2326,12.9189,12.6133,12.3142, # Cm-No
- 14.8326,12.3771,10.6378, 9.3638, 8.2297, # Lr-
- 7.5667, 6.9456, 6.3946, 5.9159, 5.4929, # -Cn
- 6.7286, 6.5144,10.9169,10.3600, 9.4723, 8.6641, # Nh-Og
-]) # fmt: on
-"""
-PBE0/def2-QZVP atomic values calculated by S. Grimme in Gaussian (2010),
-rare gases recalculated by J. Mewes with PBE0/aug-cc-pVQZ in Dirac (2018).
-Also new super heavies Cn,Nh,Fl,Lv,Og and Am-Rg calculated at 4c-PBE/Dyall-AE4Z (Dirac 2022)
-"""
-
-sqrt_z_r4_over_r2 = torch.sqrt(
- 0.5 * (r4_over_r2 * torch.sqrt(torch.arange(r4_over_r2.shape[0])))
-)
+__all__ = ["COV_D3", "VDW_D3"]
-_vdw_rad_d3 = constants.ANGSTROM_TO_BOHR * torch.tensor(
+_vdw_rad_d3 = ANGSTROM_TO_BOHR * torch.tensor(
[
2.1823,
1.8547,
@@ -5475,4 +5403,4 @@ def _load_vdw_rad_d3(dtype: torch.dtype = torch.double) -> Tensor:
return rad
-vdw_rad_d3 = _load_vdw_rad_d3()
+VDW_D3 = _load_vdw_rad_d3()
diff --git a/src/tad_dftd3/defaults.py b/src/tad_dftd3/defaults.py
index 68e2c29..9c142cc 100644
--- a/src/tad_dftd3/defaults.py
+++ b/src/tad_dftd3/defaults.py
@@ -13,8 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""
-Default values
-==============
+Defaults
+========
This module defines the default values for all parameters within DFT-D3.
"""
@@ -52,3 +52,8 @@
ALP = 14.0
"""Exponent of zero damping function (14.0)."""
+
+# other
+
+MAX_ELEMENT = 104
+"""Atomic number (+1 for dummy) of last element supported by DFT-D3."""
diff --git a/src/tad_dftd3/disp.py b/src/tad_dftd3/disp.py
index db2602d..b5d4476 100644
--- a/src/tad_dftd3/disp.py
+++ b/src/tad_dftd3/disp.py
@@ -52,12 +52,106 @@
>>> print(torch.sum(energy[0] - energy[1] - energy[2])) # energy in Hartree
tensor(-0.0003964, dtype=torch.float64)
"""
+from typing import Dict, Optional
+
import torch
+from tad_mctc import ncoord, storch
+from tad_mctc.batch import real_pairs
+from tad_mctc.data import pse
-from . import constants, data, defaults
-from ._typing import DD, Any, DampingFunction, Dict, Optional, Tensor
+from . import data, defaults, model
from .damping import dispersion_atm, rational_damping
-from .utils import cdist, real_pairs
+from .reference import Reference
+from .typing import (
+ DD,
+ Any,
+ CountingFunction,
+ DampingFunction,
+ Tensor,
+ WeightingFunction,
+)
+
+
+def dftd3(
+ numbers: Tensor,
+ positions: Tensor,
+ param: Dict[str, Tensor],
+ *,
+ ref: Optional[Reference] = None,
+ rcov: Optional[Tensor] = None,
+ rvdw: Optional[Tensor] = None,
+ r4r2: Optional[Tensor] = None,
+ cutoff: Optional[Tensor] = None,
+ counting_function: CountingFunction = ncoord.exp_count,
+ weighting_function: WeightingFunction = model.gaussian_weight,
+ damping_function: DampingFunction = rational_damping,
+) -> Tensor:
+ """
+ Evaluate DFT-D3 dispersion energy for a batch of geometries.
+
+ Parameters
+ ----------
+ numbers : torch.Tensor
+ Atomic numbers of the atoms in the system.
+ positions : torch.Tensor
+ Cartesian coordinates of the atoms in the system.
+ param : dict[str, Tensor]
+ DFT-D3 damping parameters.
+ ref : reference.Reference, optional
+ Reference C6 coefficients.
+ rcov : torch.Tensor, optional
+ Covalent radii of the atoms in the system.
+ rvdw : torch.Tensor, optional
+ Van der Waals radii of the atoms in the system.
+ r4r2 : torch.Tensor, optional
+ r⁴ over r² expectation values of the atoms in the system.
+ damping_function : Callable, optional
+ Damping function evaluate distance dependent contributions.
+ weighting_function : Callable, optional
+ Function to calculate weight of individual reference systems.
+ counting_function : Callable, optional
+ Calculates counting value in range 0 to 1 for each atom pair.
+
+ Returns
+ -------
+ Tensor
+ Atom-resolved DFT-D3 dispersion energy for each geometry.
+ """
+ dd: DD = {"device": positions.device, "dtype": positions.dtype}
+
+ if torch.max(numbers) >= defaults.MAX_ELEMENT:
+ raise ValueError(
+ f"No D3 parameters available for Z > {defaults.MAX_ELEMENT-1} "
+ f"({pse.Z2S[defaults.MAX_ELEMENT]})."
+ )
+
+ if cutoff is None:
+ cutoff = torch.tensor(defaults.D3_DISP_CUTOFF, **dd)
+ if ref is None:
+ ref = Reference(**dd)
+ if rcov is None:
+ rcov = data.COV_D3.to(**dd)[numbers]
+ if rvdw is None:
+ rvdw = data.VDW_D3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
+ if r4r2 is None:
+ r4r2 = data.R4R2.to(**dd)[numbers]
+
+ cn = ncoord.cn_d3(
+ numbers, positions, counting_function=counting_function, rcov=rcov
+ )
+ weights = model.weight_references(numbers, cn, ref, weighting_function)
+ c6 = model.atomic_c6(numbers, weights, ref)
+
+ return dispersion(
+ numbers,
+ positions,
+ param,
+ c6,
+ rvdw,
+ r4r2,
+ damping_function,
+ cutoff=cutoff,
+ )
def dispersion(
@@ -102,7 +196,7 @@ def dispersion(
if cutoff is None:
cutoff = torch.tensor(defaults.D3_DISP_CUTOFF, **dd)
if r4r2 is None:
- r4r2 = data.sqrt_z_r4_over_r2.to(**dd)[numbers]
+ r4r2 = data.R4R2.to(**dd)[numbers]
if numbers.shape != positions.shape[:-1]:
raise ValueError(
@@ -112,10 +206,10 @@ def dispersion(
raise ValueError(
"Shape of expectation values is not consistent with atomic numbers.",
)
- if torch.max(numbers) >= constants.MAX_ELEMENT:
+ if torch.max(numbers) >= defaults.MAX_ELEMENT:
raise ValueError(
- f"No D3 parameters available for Z > {constants.MAX_ELEMENT-1} "
- f"({constants.PSE_Z2S[constants.MAX_ELEMENT]})."
+ f"No D3 parameters available for Z > {defaults.MAX_ELEMENT-1} "
+ f"({pse.Z2S[defaults.MAX_ELEMENT]})."
)
# two-body dispersion
@@ -126,9 +220,7 @@ def dispersion(
# three-body dispersion
if "s9" in param and param["s9"] != 0.0:
if rvdw is None:
- rvdw = data.vdw_rad_d3.to(**dd)[
- numbers.unsqueeze(-1), numbers.unsqueeze(-2)
- ]
+ rvdw = data.VDW_D3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
energy += dispersion3(numbers, positions, param, c6, rvdw, cutoff)
@@ -166,10 +258,10 @@ def dispersion2(
"""
dd: DD = {"device": positions.device, "dtype": positions.dtype}
- mask = real_pairs(numbers, diagonal=False)
+ mask = real_pairs(numbers, mask_diagonal=True)
distances = torch.where(
mask,
- cdist(positions, positions, p=2),
+ storch.cdist(positions, positions, p=2),
torch.tensor(torch.finfo(positions.dtype).eps, **dd),
)
diff --git a/src/tad_dftd3/exception.py b/src/tad_dftd3/exception.py
index 450c84d..393350d 100644
--- a/src/tad_dftd3/exception.py
+++ b/src/tad_dftd3/exception.py
@@ -13,11 +13,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""
+Exceptions
+==========
+
Possible exceptions which can be raised by this module.
"""
+__all__ = ["DFTD3Error"]
class DFTD3Error(Exception):
- """Base class for exceptions raised by this module."""
+ """
+ Base class for exceptions raised by this module.
+ """
pass
diff --git a/src/tad_dftd3/model.py b/src/tad_dftd3/model.py
index ffe48c1..b7dabc2 100644
--- a/src/tad_dftd3/model.py
+++ b/src/tad_dftd3/model.py
@@ -22,7 +22,8 @@
--------
>>> import torch
>>> import tad_dftd3 as d3
->>> numbers = d3.util.to_number(["O", "H", "H"])
+>>> import tad_mctc as mctc
+>>> numbers = mctc.convert.symbol_to_number(["O", "H", "H"])
>>> positions = torch.Tensor([
... [+0.00000000000000, +0.00000000000000, -0.73578586109551],
... [+1.44183152868459, +0.00000000000000, +0.36789293054775],
@@ -30,7 +31,7 @@
... ])
>>> ref = d3.reference.Reference()
>>> rcov = d3.data.covalent_rad_d3[numbers]
->>> cn = d3.ncoord.coordination_number(numbers, positions, rcov, d3.ncoord.exp_count)
+>>> cn = mctc.ncoord.cn_d3(numbers, positions, rcov=rcov, counting_function=d3.ncoord.exp_count)
>>> weights = d3.model.weight_references(numbers, cn, ref, d3.model.gaussian_weight)
>>> c6 = d3.model.atomic_c6(numbers, weights, ref)
>>> torch.set_printoptions(precision=7)
@@ -40,10 +41,10 @@
[ 5.4368822, 3.0930154, 3.0930154]], dtype=torch.float64)
"""
import torch
+from tad_mctc.batch import real_atoms
-from ._typing import Any, Tensor, WeightingFunction
from .reference import Reference
-from .utils import real_atoms
+from .typing import Any, Tensor, WeightingFunction
def atomic_c6(numbers: Tensor, weights: Tensor, reference: Reference) -> Tensor:
diff --git a/src/tad_dftd3/ncoord/__init__.py b/src/tad_dftd3/ncoord/__init__.py
deleted file mode 100644
index c4e611f..0000000
--- a/src/tad_dftd3/ncoord/__init__.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Coordination number
-===================
-
-Evaluates a fractional coordination number for a given geometry or batch of geometries.
-
-Examples
---------
->>> import torch
->>> import tad_dftd3 as d3
->>> numbers = d3.util.pack((
-... torch.tensor([7, 1, 1, 1]),
-... torch.tensor([6, 8, 7, 1, 1, 1]),
-... torch.tensor([6, 8, 8, 1, 1]),
-... ))
->>> positions = d3.util.pack((
-... torch.tensor([
-... [+0.00000000000000, +0.00000000000000, -0.54524837997150],
-... [-0.88451840382282, +1.53203081565085, +0.18174945999050],
-... [-0.88451840382282, -1.53203081565085, +0.18174945999050],
-... [+1.76903680764564, +0.00000000000000, +0.18174945999050],
-... ]),
-... torch.tensor([
-... [-0.55569743203406, +1.09030425468557, +0.00000000000000],
-... [+0.51473634678469, +3.15152550263611, +0.00000000000000],
-... [+0.59869690244446, -1.16861263789477, +0.00000000000000],
-... [-0.45355203669134, -2.74568780438064, +0.00000000000000],
-... [+2.52721209544999, -1.29200800956867, +0.00000000000000],
-... [-2.63139587595376, +0.96447869452240, +0.00000000000000],
-... ]),
-... torch.tensor([
-... [-0.53424386915034, -0.55717948166537, +0.00000000000000],
-... [+0.21336223456096, +1.81136801357186, +0.00000000000000],
-... [+0.82345103924195, -2.42214694643037, +0.00000000000000],
-... [-2.59516465056138, -0.70672678063558, +0.00000000000000],
-... [+2.09259524590881, +1.87468519515944, +0.00000000000000],
-... ]),
-... ))
->>> rcov = d3.data.covalent_rad_d3[numbers]
->>> cn = d3.ncoord.coordination_number(numbers, positions, rcov, d3.ncoord.exp_count)
->>> torch.set_printoptions(precision=7)
->>> print(cn)
-tensor([[2.9901006, 0.9977214, 0.9977214, 0.9977214, 0.0000000, 0.0000000],
- [3.0059586, 1.0318390, 3.0268824, 1.0061584, 1.0036336, 0.9989871],
- [3.0093639, 2.0046251, 1.0187057, 0.9978270, 1.0069743, 0.0000000]])
-"""
-from .count import dexp_count, exp_count
-from .d3 import coordination_number_d3 as coordination_number
diff --git a/src/tad_dftd3/ncoord/count.py b/src/tad_dftd3/ncoord/count.py
deleted file mode 100644
index d62f450..0000000
--- a/src/tad_dftd3/ncoord/count.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Coordination number: Counting functions
-=======================================
-
-This module contains the exponential and the error function counting functions
-for the determination of the coordination number.
-
-Only the exponential counting function is used within the D3 model.
-Additionally, the analytical derivatives for the counting functions is also
-provided and can be used for checking the autograd results.
-"""
-import torch
-
-from .. import defaults
-from .._typing import Tensor
-
-__all__ = ["exp_count", "dexp_count"]
-
-
-def exp_count(r: Tensor, r0: Tensor, kcn: float = defaults.D3_KCN) -> Tensor:
- """
- Exponential counting function for coordination number contributions.
-
- Parameters
- ----------
- r : Tensor
- Current distance.
- r0 : Tensor
- Cutoff radius.
- kcn : float
- Steepness of the counting function.
-
- Returns
- -------
- Tensor
- Count of coordination number contribution.
- """
- return 1.0 / (1.0 + torch.exp(-kcn * (r0 / r - 1.0)))
-
-
-def dexp_count(r: Tensor, r0: Tensor, kcn: float = defaults.D3_KCN) -> Tensor:
- """
- Derivative of the exponential counting function w.r.t. the distance.
-
- Parameters
- ----------
- r : Tensor
- Internuclear distances.
- r0 : Tensor
- Covalent atomic radii (R_AB = R_A + R_B).
- kcn : float, optional
- Steepness of the counting function. Defaults to `defaults.D4_KCN`.
-
- Returns
- -------
- Tensor
- Derivative of count of coordination number contribution.
- """
- expterm = torch.exp(-kcn * (r0 / r - 1.0))
- return (-kcn * r0 * expterm) / (r**2 * ((expterm + 1.0) ** 2))
diff --git a/src/tad_dftd3/ncoord/d3.py b/src/tad_dftd3/ncoord/d3.py
deleted file mode 100644
index 3df4c01..0000000
--- a/src/tad_dftd3/ncoord/d3.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Coordination number: D3
-=======================
-
-Calculation of D3 coordination number.
-"""
-import torch
-
-from .. import data, defaults
-from .._typing import DD, Any, CountingFunction, Optional, Tensor
-from ..utils import cdist, real_pairs
-from .count import exp_count
-
-__all__ = ["coordination_number_d3"]
-
-
-def coordination_number_d3(
- numbers: Tensor,
- positions: Tensor,
- counting_function: CountingFunction = exp_count,
- rcov: Optional[Tensor] = None,
- cutoff: Optional[Tensor] = None,
- **kwargs: Any,
-) -> Tensor:
- """
- Calculate the coordination number of each atom in the system.
-
- Parameters
- ----------
- numbers : Tensor
- The atomic numbers of the atoms in the system.
- positions : Tensor
- The positions of the atoms in the system.
- counting_function : Callable
- Calculates counting value in range 0 to 1 from a batch of
- distances and covalent radii, additional parameters can
- be passed through via key-value arguments.
- rcov : Tensor
- Covalent radii for all atoms in the system.
- cutoff : float
- Real-space cutoff for the evaluation of counting function
-
- Returns
- -------
- Tensor: The coordination number of each atom in the system.
- """
- dd: DD = {"device": positions.device, "dtype": positions.dtype}
-
- if cutoff is None:
- cutoff = torch.tensor(defaults.D3_CN_CUTOFF, **dd)
- if rcov is None:
- rcov = data.covalent_rad_d3.to(**dd)[numbers]
- if numbers.shape != rcov.shape:
- raise ValueError(
- "Shape of covalent radii is not consistent with atomic numbers"
- )
- if numbers.shape != positions.shape[:-1]:
- raise ValueError("Shape of positions is not consistent with atomic numbers")
-
- mask = real_pairs(numbers, diagonal=False)
- distances = torch.where(
- mask,
- cdist(positions, positions, p=2),
- torch.tensor(torch.finfo(positions.dtype).eps, **dd),
- )
-
- rc = rcov.unsqueeze(-2) + rcov.unsqueeze(-1)
- cf = torch.where(
- mask * (distances <= cutoff),
- counting_function(distances, rc.to(**dd), **kwargs),
- torch.tensor(0.0, **dd),
- )
- return torch.sum(cf, dim=-1)
diff --git a/src/tad_dftd3/reference.py b/src/tad_dftd3/reference.py
index 1874e82..b6a8379 100644
--- a/src/tad_dftd3/reference.py
+++ b/src/tad_dftd3/reference.py
@@ -20,11 +20,11 @@
C6 dispersion coefficients.
"""
import os.path as op
+from typing import Optional
import torch
-from ._typing import Any, NoReturn, Optional, Tensor
-from .utils import get_default_device, get_default_dtype
+from .typing import Any, NoReturn, Tensor, get_default_device, get_default_dtype
def _load_cn(
diff --git a/src/tad_dftd3/typing/__init__.py b/src/tad_dftd3/typing/__init__.py
new file mode 100644
index 0000000..45c1d5b
--- /dev/null
+++ b/src/tad_dftd3/typing/__init__.py
@@ -0,0 +1,23 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Type annotations
+================
+
+All type annotations for this project.
+"""
+from .builtin import *
+from .d3 import *
+from .pytorch import *
diff --git a/src/tad_dftd3/typing/builtin.py b/src/tad_dftd3/typing/builtin.py
new file mode 100644
index 0000000..dd86c75
--- /dev/null
+++ b/src/tad_dftd3/typing/builtin.py
@@ -0,0 +1,24 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Type annotations: Built-ins
+===========================
+
+Built-in type annotations are imported from the *tad-mctc* library, which
+handles some version checking.
+"""
+from tad_mctc.typing import Any, Callable, NoReturn, TypedDict
+
+__all__ = ["Any", "Callable", "NoReturn", "TypedDict"]
diff --git a/src/tad_dftd3/typing/d3.py b/src/tad_dftd3/typing/d3.py
new file mode 100644
index 0000000..305e95a
--- /dev/null
+++ b/src/tad_dftd3/typing/d3.py
@@ -0,0 +1,26 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Type annotations: D3
+====================
+
+DFT-D3-specific type annotations.
+"""
+from tad_mctc.typing import Callable, Tensor
+
+__all__ = ["WeightingFunction"]
+
+
+WeightingFunction = Callable[[Tensor], Tensor]
diff --git a/src/tad_dftd3/typing/pytorch.py b/src/tad_dftd3/typing/pytorch.py
new file mode 100644
index 0000000..7411476
--- /dev/null
+++ b/src/tad_dftd3/typing/pytorch.py
@@ -0,0 +1,41 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Type annotations: PyTorch
+=========================
+
+PyTorch-related type annotations for this project.
+"""
+from tad_mctc.typing import (
+ DD,
+ CountingFunction,
+ DampingFunction,
+ Molecule,
+ Tensor,
+ TensorOrTensors,
+ get_default_device,
+ get_default_dtype,
+)
+
+__all__ = [
+ "DD",
+ "CountingFunction",
+ "DampingFunction",
+ "Molecule",
+ "Tensor",
+ "TensorOrTensors",
+ "get_default_device",
+ "get_default_dtype",
+]
diff --git a/src/tad_dftd3/utils/__init__.py b/src/tad_dftd3/utils/__init__.py
deleted file mode 100644
index 22b6141..0000000
--- a/src/tad_dftd3/utils/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-"""
-Utility functions
-=================
-
-This module contains a collection of utility functions for masking, distance
-calculation, derivatives and others.
-"""
-from .distance import *
-from .grad import *
-from .misc import *
diff --git a/src/tad_dftd3/utils/distance.py b/src/tad_dftd3/utils/distance.py
deleted file mode 100644
index b54a921..0000000
--- a/src/tad_dftd3/utils/distance.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Utility functions: Distance
-===========================
-
-Functions for calculating the cartesian distance of two vectors.
-"""
-import torch
-
-from .._typing import Optional, Tensor
-
-__all__ = ["cdist"]
-
-
-def euclidean_dist_quadratic_expansion(x: Tensor, y: Tensor) -> Tensor:
- """
- Computation of euclidean distance matrix via quadratic expansion (sum of
- squared differences or L2-norm of differences).
-
- While this is significantly faster than the "direct expansion" or
- "broadcast" approach, it only works for euclidean (p=2) distances.
- Additionally, it has issues with numerical stability (the diagonal slightly
- deviates from zero for ``x=y``). The numerical stability should not pose
- problems, since we must remove zeros anyway for batched calculations.
-
- For more information, see \
- `this Jupyter notebook `__ or \
- `this discussion thread in the PyTorch forum `__.
-
- Parameters
- ----------
- x : Tensor
- First tensor.
- y : Tensor
- Second tensor (with same shape as first tensor).
-
- Returns
- -------
- Tensor
- Pair-wise distance matrix.
- """
- eps = torch.tensor(
- torch.finfo(x.dtype).eps,
- device=x.device,
- dtype=x.dtype,
- )
-
- # using einsum is slightly faster than `torch.pow(x, 2).sum(-1)`
- xnorm = torch.einsum("...ij,...ij->...i", x, x)
- ynorm = torch.einsum("...ij,...ij->...i", y, y)
-
- n = xnorm.unsqueeze(-1) + ynorm.unsqueeze(-2)
-
- # x @ y.mT
- prod = torch.einsum("...ik,...jk->...ij", x, y)
-
- # important: remove negative values that give NaN in backward
- return torch.sqrt(torch.clamp(n - 2.0 * prod, min=eps))
-
-
-def cdist_direct_expansion(x: Tensor, y: Tensor, p: int = 2) -> Tensor:
- """
- Computation of cartesian distance matrix.
-
- Contrary to `euclidean_dist_quadratic_expansion`, this function allows
- arbitrary powers but is considerably slower.
-
- Parameters
- ----------
- x : Tensor
- First tensor.
- y : Tensor
- Second tensor (with same shape as first tensor).
- p : int, optional
- Power used in the distance evaluation (p-norm). Defaults to 2.
-
- Returns
- -------
- Tensor
- Pair-wise distance matrix.
- """
- eps = torch.tensor(
- torch.finfo(x.dtype).eps,
- device=x.device,
- dtype=x.dtype,
- )
-
- # unsqueeze different dimension to create matrix
- diff = torch.abs(x.unsqueeze(-2) - y.unsqueeze(-3))
-
- # einsum is nearly twice as fast!
- if p == 2:
- distances = torch.einsum("...ijk,...ijk->...ij", diff, diff)
- else:
- distances = torch.sum(torch.pow(diff, p), -1)
-
- return torch.pow(torch.clamp(distances, min=eps), 1.0 / p)
-
-
-def cdist(x: Tensor, y: Optional[Tensor] = None, p: int = 2) -> Tensor:
- """
- Wrapper for cartesian distance computation.
-
- This currently replaces the use of ``torch.cdist``, which does not handle
- zeros well and produces nan's in the backward pass.
-
- Additionally, ``torch.cdist`` does not return zero for distances between
- same vectors (see `here
- `__).
-
- Parameters
- ----------
- x : Tensor
- First tensor.
- y : Tensor | None, optional
- Second tensor. If no second tensor is given (default), the first tensor
- is used as the second tensor, too.
- p : int, optional
- Power used in the distance evaluation (p-norm). Defaults to 2.
-
- Returns
- -------
- Tensor
- Pair-wise distance matrix.
- """
- if y is None:
- y = x
-
- # faster
- if p == 2:
- return euclidean_dist_quadratic_expansion(x, y)
-
- return cdist_direct_expansion(x, y, p=p)
diff --git a/src/tad_dftd3/utils/grad.py b/src/tad_dftd3/utils/grad.py
deleted file mode 100644
index 10d152d..0000000
--- a/src/tad_dftd3/utils/grad.py
+++ /dev/null
@@ -1,157 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Utility functions: Gradient
-===========================
-
-Utilities for calculating gradients and Hessians.
-"""
-import torch
-
-from ..__version__ import __torch_version__
-from .._typing import Any, Callable, Tensor, Tuple
-
-__all__ = ["jac", "hessian"]
-
-
-if __torch_version__ < (2, 0, 0): # type: ignore # pragma: no cover
- try:
- from functorch import jacrev # type: ignore
- except ModuleNotFoundError:
- jacrev = None
- from torch.autograd.functional import jacobian # type: ignore
-
-else: # pragma: no cover
- from torch.func import jacrev # type: ignore
-
-
-def jac(f: Callable[..., Tensor], argnums: int = 0) -> Any: # pragma: no cover
- """
- Wrapper for Jacobian calcluation.
-
- Parameters
- ----------
- f : Callable[[Any], Tensor]
- The function whose result is differentiated.
- argnums : int, optional
- The variable w.r.t. which will be differentiated. Defaults to 0.
- """
-
- if jacrev is None:
-
- def wrap(*inps: Any) -> Any:
- """
- Wrapper to imitate the calling signature of functorch's `jacrev`
- with `torch.autograd.functional.jacobian`.
-
- Parameters
- ----------
- inps : tuple[Any, ...]
- The input parameters of the function `f`.
-
- Returns
- -------
- Any
- Jacobian function.
-
- Raises
- ------
- RuntimeError
- The parameter selected for differentiation (via `argnums`) is
- not a tensor.
- """
- diffarg = inps[argnums]
- if not isinstance(diffarg, Tensor):
- raise RuntimeError(
- f"The {argnums}'th input parameter must be a tensor but is "
- f"of type '{type(diffarg)}'."
- )
-
- before = inps[:argnums]
- after = inps[(argnums + 1) :]
-
- # `jacobian` only takes tensors, requiring another wrapper than
- # passes the non-tensor arguments to the function `f`
- def _f(arg: Tensor) -> Tensor:
- return f(*(*before, arg, *after))
-
- return jacobian(_f, inputs=diffarg) # type: ignore # pylint: disable=used-before-assignment
-
- return wrap
-
- return jacrev(f, argnums=argnums) # type: ignore
-
-
-def hessian(
- f: Callable[..., Tensor],
- inputs: Tuple[Any, ...],
- argnums: int,
- is_batched: bool = False,
-) -> Tensor:
- """
- Wrapper for Hessian. The Hessian is the Jacobian of the gradient.
-
- PyTorch, however, suggests calculating the Jacobian of the Jacobian, which
- does not yield the correct shape in this case.
-
- Parameters
- ----------
- f : Callable[[Any], Tensor]
- The function whose result is differentiated.
- inputs : tuple[Any, ...]
- The input parameters of `f`.
- argnums : int, optional
- The variable w.r.t. which will be differentiated. Defaults to 0.
-
- Returns
- -------
- Tensor
- The Hessian.
-
- Raises
- ------
- RuntimeError
- The parameter selected for differentiation (via `argnums`) is not a
- tensor.
- """
-
- def _grad(*inps: Tuple[Any, ...]) -> Tensor:
- e = f(*inps).sum()
-
- if not isinstance(inps[argnums], Tensor): # pragma: no cover
- raise RuntimeError(
- f"The {argnums}'th input parameter must be a tensor but is of "
- f"type '{type(inps[argnums])}'."
- )
-
- # catch missing gradients
- if e.grad_fn is None:
- return torch.zeros_like(inps[argnums]) # type: ignore
-
- (g,) = torch.autograd.grad(
- e,
- inps[argnums],
- create_graph=True,
- )
- return g
-
- _jac = jac(_grad, argnums=argnums)
-
- if is_batched:
- raise NotImplementedError("Batched Hessian not available.")
- # dims = Tuple(None if x != argnums else 0 for x in range(len(inputs)))
- # _jac = torch.func.vmap(_jac, in_dims=dims)
-
- return _jac(*inputs) # type: ignore
diff --git a/src/tad_dftd3/utils/misc.py b/src/tad_dftd3/utils/misc.py
deleted file mode 100644
index d97b6ed..0000000
--- a/src/tad_dftd3/utils/misc.py
+++ /dev/null
@@ -1,188 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Miscellaneous functions
-=======================
-
-Utilities for working with tensors as well as translating between element
-symbols and atomic numbers.
-"""
-import torch
-
-from .._typing import List, Optional, Size, Tensor, TensorOrTensors, Union
-from ..constants import PSE_S2Z
-
-__all__ = [
- "real_atoms",
- "real_pairs",
- "real_triples",
- "pack",
- "to_number",
- "get_default_device",
- "get_default_dtype",
-]
-
-
-def real_atoms(numbers: Tensor) -> Tensor:
- """
- Create a mask for atoms, discerning padding and actual atoms.
- Padding value is zero.
-
- Parameters
- ----------
- numbers : Tensor
- Atomic numbers for all atoms.
-
- Returns
- -------
- Tensor
- Mask for atoms that discerns padding and real atoms.
- """
- return numbers != 0
-
-
-def real_pairs(numbers: Tensor, diagonal: bool = False) -> Tensor:
- """
- Create a mask for pairs of atoms from atomic numbers, discerning padding
- and actual atoms. Padding value is zero.
-
- Parameters
- ----------
- numbers : Tensor
- Atomic numbers for all atoms.
- diagonal : bool, optional
- Flag for also writing `False` to the diagonal, i.e., to all pairs
- with the same indices. Defaults to `False`, i.e., writing False
- to the diagonal.
-
- Returns
- -------
- Tensor
- Mask for atom pairs that discerns padding and real atoms.
- """
- real = real_atoms(numbers)
- mask = real.unsqueeze(-2) * real.unsqueeze(-1)
- if diagonal is False:
- mask *= ~torch.diag_embed(torch.ones_like(real))
- return mask
-
-
-def real_triples(
- numbers: torch.Tensor, diagonal: bool = False, self: bool = True
-) -> Tensor:
- """
- Create a mask for triples from atomic numbers. Padding value is zero.
-
- Parameters
- ----------
- numbers : torch.Tensor
- Atomic numbers for all atoms.
- diagonal : bool, optional
- Flag for also writing `False` to the space diagonal, i.e., to all
- triples with the same indices. Defaults to `False`, i.e., writing False
- to the diagonal.
- self : bool, optional
- Flag for also writing `False` to all triples where at least two indices
- are identical. Defaults to `True`, i.e., not writing `False`.
-
- Returns
- -------
- Tensor
- Mask for triples.
- """
- real = real_pairs(numbers, diagonal=True)
- mask = real.unsqueeze(-3) * real.unsqueeze(-2) * real.unsqueeze(-1)
-
- if diagonal is False:
- mask *= ~torch.diag_embed(torch.ones_like(real))
-
- if self is False:
- mask *= ~torch.diag_embed(torch.ones_like(real), offset=0, dim1=-3, dim2=-2)
- mask *= ~torch.diag_embed(torch.ones_like(real), offset=0, dim1=-3, dim2=-1)
- mask *= ~torch.diag_embed(torch.ones_like(real), offset=0, dim1=-2, dim2=-1)
-
- return mask
-
-
-def pack(
- tensors: TensorOrTensors,
- axis: int = 0,
- value: Union[int, float] = 0,
- size: Optional[Size] = None,
-) -> Tensor:
- """
- Pad a list of variable length tensors with zeros, or some other value, and
- pack them into a single tensor.
-
- Parameters
- ----------
- tensors : list[Tensor] | tuple[Tensor] | Tensor
- List of tensors to be packed, all with identical dtypes.
- axis : int
- Axis along which tensors should be packed; 0 for first axis -1
- for the last axis, etc. This will be a new dimension.
- value : int | float
- The value with which the tensor is to be padded.
- size :
- Size of each dimension to which tensors should be padded.
- This to the largest size encountered along each dimension.
-
- Returns
- -------
- padded : Tensor
- Input tensors padded and packed into a single tensor.
- """
- if isinstance(tensors, Tensor):
- return tensors
-
- _count = len(tensors)
- _device = tensors[0].device
- _dtype = tensors[0].dtype
-
- if size is None:
- size = torch.tensor([i.shape for i in tensors]).max(0).values.tolist()
- assert size is not None
-
- padded = torch.full((_count, *size), value, dtype=_dtype, device=_device)
-
- for n, source in enumerate(tensors):
- padded[(n, *[slice(0, s) for s in source.shape])] = source
-
- if axis != 0:
- axis = padded.dim() + 1 + axis if axis < 0 else axis
- order = list(range(1, padded.dim()))
- order.insert(axis, 0)
- padded = padded.permute(order)
-
- return padded
-
-
-def to_number(symbols: List[str]) -> Tensor:
- """
- Obtain atomic numbers from element symbols.
- """
- return torch.flatten(
- torch.tensor([PSE_S2Z.get(symbol.capitalize(), 0) for symbol in symbols])
- )
-
-
-def get_default_device() -> torch.device:
- """Default device for tensors."""
- return torch.tensor(1.0).device
-
-
-def get_default_dtype() -> torch.dtype:
- """Default data type for floating point tensors."""
- return torch.tensor(1.0).dtype
diff --git a/tests/__init__.py b/test/__init__.py
similarity index 100%
rename from tests/__init__.py
rename to test/__init__.py
diff --git a/tests/conftest.py b/test/conftest.py
similarity index 97%
rename from tests/conftest.py
rename to test/conftest.py
index 2293af1..d5b38c2 100644
--- a/tests/conftest.py
+++ b/test/conftest.py
@@ -17,11 +17,14 @@
"""
from __future__ import annotations
+import numpy as np
import pytest
import torch
# avoid randomness and non-deterministic algorithms
+np.random.seed(0)
torch.manual_seed(0)
+torch.use_deterministic_algorithms(True)
torch.set_printoptions(precision=10)
@@ -141,7 +144,7 @@ def pytest_configure(config: pytest.Config) -> None:
if torch.__version__ < (2, 0, 0): # type: ignore
torch.set_default_tensor_type("torch.cuda.FloatTensor") # type: ignore
else:
- torch.set_default_device(DEVICE) # type: ignore
+ torch.set_default_device(DEVICE) # type: ignore[attr-defined]
else:
torch.use_deterministic_algorithms(True)
DEVICE = None
diff --git a/tests/test_grad/__init__.py b/test/test_disp/__init__.py
similarity index 100%
rename from tests/test_grad/__init__.py
rename to test/test_disp/__init__.py
diff --git a/test/test_disp/samples.py b/test/test_disp/samples.py
new file mode 100644
index 0000000..58c956b
--- /dev/null
+++ b/test/test_disp/samples.py
@@ -0,0 +1,1650 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Collection of test samples.
+"""
+from typing import Dict
+
+import torch
+from tad_mctc.data.molecules import merge_nested_dicts, mols
+from tad_mctc.typing import Molecule, Tensor, TypedDict
+
+
+class Refs(TypedDict):
+ """
+ Format of reference values. Note that energies and gradients are calculated
+ with different parameters.
+ """
+
+ cn: Tensor
+ """Coordination number."""
+
+ weights: Tensor
+ """Weights for atomic reference systems."""
+
+ c6: Tensor
+ """C6 coefficients."""
+
+ disp2: Tensor
+ """Two-body dispersion energy."""
+
+ disp3: Tensor
+ """Three-body (ATM) dispersion energy."""
+
+
+class Record(Molecule, Refs):
+ """Store for molecular information and reference values."""
+
+
+refs: Dict[str, Refs] = {
+ "LiH": Refs(
+ {
+ "cn": torch.tensor([], dtype=torch.double),
+ "weights": torch.tensor([], dtype=torch.double),
+ "c6": torch.tensor([], dtype=torch.double),
+ "disp2": torch.tensor(
+ [
+ -1.5918418587455960e-04,
+ -1.5918418587455960e-04,
+ ],
+ dtype=torch.double,
+ ),
+ "disp3": torch.tensor(
+ [
+ +0.0000000000000000e00,
+ +0.0000000000000000e00,
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "SiH4": Refs(
+ {
+ "cn": torch.tensor(
+ [
+ +3.89022710629348e00,
+ +9.72651698030399e-01,
+ +9.72651698030399e-01,
+ +9.72651698030399e-01,
+ +9.72651698030399e-01,
+ ],
+ dtype=torch.double,
+ ),
+ "weights": torch.tensor(
+ [
+ [
+ +5.00040274964878e-27,
+ +9.51882822683413e-16,
+ +2.54542759053980e-07,
+ +2.64839020628269e-02,
+ +9.73515843394413e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.77454000458529e-01,
+ +2.25459995414711e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.77454000458529e-01,
+ +2.25459995414711e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.77454000458529e-01,
+ +2.25459995414711e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.77454000458529e-01,
+ +2.25459995414711e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "c6": torch.tensor(
+ [
+ [
+ +1.51435152661277e02,
+ +2.13999949401839e01,
+ +2.13999949401839e01,
+ +2.13999949401839e01,
+ +2.13999949401839e01,
+ ],
+ [
+ +2.13999949401839e01,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ ],
+ [
+ +2.13999949401839e01,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ ],
+ [
+ +2.13999949401839e01,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ ],
+ [
+ +2.13999949401839e01,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "disp2": torch.tensor(
+ [
+ -9.2481575005393872e-04,
+ -3.6494949521315417e-04,
+ -3.6494949521315417e-04,
+ -3.6494949521315417e-04,
+ -3.6494949521315417e-04,
+ ],
+ dtype=torch.double,
+ ),
+ "disp3": torch.tensor(
+ [
+ +1.2843453312590819e-09,
+ +7.5348323667640688e-08,
+ +7.5348323667640688e-08,
+ +7.5348323667640688e-08,
+ +7.5348323667640688e-08,
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "MB16_43_01": Refs(
+ {
+ "cn": torch.tensor(
+ [
+ +4.15066368951397e00,
+ +9.78868026389781e-01,
+ +2.01080985633859e00,
+ +1.47865697827818e00,
+ +1.03577822442117e00,
+ +1.01206994314781e00,
+ +1.50329777127401e00,
+ +1.99858468272609e00,
+ +3.89181927539324e00,
+ +1.04323373360740e00,
+ +1.01526584450636e00,
+ +1.99315213227354e00,
+ +4.63526560889683e00,
+ +3.87312260639335e00,
+ +3.99316800677884e00,
+ +5.45068226903888e0,
+ ],
+ dtype=torch.double,
+ ),
+ "weights": torch.tensor(
+ [
+ [
+ +4.61254014807976e-13,
+ +9.99999999999539e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.78431945472983e-01,
+ +2.15680545270172e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.33252077840319e-08,
+ +1.55830681937747e-02,
+ +9.84416838481017e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.99424904108747e-01,
+ +5.75095891252906e-04,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +1.35771400228363e-02,
+ +9.86422859977164e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.82992148346892e-01,
+ +1.70078516531077e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.99519469064248e-01,
+ +4.80530935751615e-04,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +1.13181694792597e-07,
+ +1.71503960869602e-02,
+ +9.82849490731345e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +1.25926325849160e-25,
+ +6.73263145629432e-14,
+ +1.94165275506323e-05,
+ +9.99980583472382e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.86403420777318e-01,
+ +1.35965792226822e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.83377538259043e-01,
+ +1.66224617409573e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +6.63636803493899e-06,
+ +9.99993363631965e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +4.78432084484299e-38,
+ +4.72470789879862e-24,
+ +2.64845507076682e-13,
+ +7.08386079833514e-06,
+ +9.99992916138937e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +5.57929648633356e-26,
+ +1.48261370770972e-14,
+ +2.19715394953033e-06,
+ +1.59978977357256e-01,
+ +8.40018825488779e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +1.11473605172390e-26,
+ +1.33471958830444e-14,
+ +8.80046323582265e-06,
+ +9.99991199536751e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +3.64404060381414e-41,
+ +1.64269207706493e-24,
+ +4.50618875164815e-11,
+ +9.99999999954938e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "c6": torch.tensor(
+ [
+ [
+ +1.86105200000295e02,
+ +2.10637967443952e01,
+ +3.33393205851663e01,
+ +2.07648885028596e01,
+ +2.50462879811640e01,
+ +2.09988662957750e01,
+ +2.07635420397407e01,
+ +3.33469365793004e01,
+ +4.44925237765056e01,
+ +2.09502948932743e01,
+ +2.09933789215108e01,
+ +1.11982907430142e02,
+ +6.38123316167329e01,
+ +6.45263838745924e01,
+ +4.44924561012183e01,
+ +1.60659400002538e02,
+ ],
+ [
+ +2.10637967443952e01,
+ +3.10104597907844e00,
+ +5.44104128537895e00,
+ +3.06460553022247e00,
+ +4.32334790530218e00,
+ +3.09313018967101e00,
+ +3.06444138044805e00,
+ +5.44213006593611e00,
+ +6.85278222181144e00,
+ +3.08720876141063e00,
+ +3.09246121386693e00,
+ +1.67260314058114e01,
+ +9.32232615546217e00,
+ +9.40964391592266e00,
+ +6.85277268893649e00,
+ +2.13390348985262e01,
+ ],
+ [
+ +3.33393205851663e01,
+ +5.44104128537895e00,
+ +1.04060662211154e01,
+ +5.38395096621487e00,
+ +8.57335406068136e00,
+ +5.42863982058108e00,
+ +5.38369379696184e00,
+ +1.04078420241680e01,
+ +1.26306609797298e01,
+ +5.41936287038137e00,
+ +5.42759175329962e00,
+ +2.97398273677055e01,
+ +1.63311877211176e01,
+ +1.64664919883409e01,
+ +1.26306453073937e01,
+ +3.55362026390315e01,
+ ],
+ [
+ +2.07648885028596e01,
+ +3.06460553022247e00,
+ +5.38395096621487e00,
+ +3.02866858604329e00,
+ +4.28051674110831e00,
+ +3.05679911479676e00,
+ +3.02850670435781e00,
+ +5.38502623652600e00,
+ +6.77670142181813e00,
+ +3.05095950404731e00,
+ +3.05613938235978e00,
+ +1.65326001720343e01,
+ +9.21222013302869e00,
+ +9.29832544621136e00,
+ +6.77669200846439e00,
+ +2.10658829179373e01,
+ ],
+ [
+ +2.50462879811640e01,
+ +4.32334790530218e00,
+ +8.57335406068136e00,
+ +4.28051674110831e00,
+ +7.16510167612946e00,
+ +4.31404389049233e00,
+ +4.28032380369504e00,
+ +8.57471450429471e00,
+ +1.02476219308165e01,
+ +4.30708399636246e00,
+ +4.31325759357762e00,
+ +2.37716272051766e01,
+ +1.29661524144552e01,
+ +1.30671606537876e01,
+ +1.02476098345869e01,
+ +2.75317129235774e01,
+ ],
+ [
+ +2.09988662957750e01,
+ +3.09313018967101e00,
+ +5.42863982058108e00,
+ +3.05679911479676e00,
+ +4.31404389049233e00,
+ +3.08523815906549e00,
+ +3.05663545770877e00,
+ +5.42972566637027e00,
+ +6.83625554309033e00,
+ +3.07933450364290e00,
+ +3.08457119115511e00,
+ +1.66840132375542e01,
+ +9.29840833574711e00,
+ +9.38546272191831e00,
+ +6.83624603617842e00,
+ +2.12796993628112e01,
+ ],
+ [
+ +2.07635420397407e01,
+ +3.06444138044805e00,
+ +5.38369379696184e00,
+ +3.02850670435781e00,
+ +4.28032380369504e00,
+ +3.05663545770877e00,
+ +3.02834483288917e00,
+ +5.38476900641466e00,
+ +6.77635870797818e00,
+ +3.05079621551476e00,
+ +3.05597576690949e00,
+ +1.65317288410165e01,
+ +9.21172414905175e00,
+ +9.29782400064014e00,
+ +6.77634929516284e00,
+ +2.10646524765623e01,
+ ],
+ [
+ +3.33469365793004e01,
+ +5.44213006593611e00,
+ +1.04078420241680e01,
+ +5.38502623652600e00,
+ +8.57471450429471e00,
+ +5.42972566637027e00,
+ +5.38476900641466e00,
+ +1.04096182819417e01,
+ +1.26329675237980e01,
+ +5.42044652080921e00,
+ +5.42867735106695e00,
+ +2.97456233348293e01,
+ +1.63344547422583e01,
+ +1.64697916829337e01,
+ +1.26329518479543e01,
+ +3.55438470017986e01,
+ ],
+ [
+ +4.44925237765056e01,
+ +6.85278222181144e00,
+ +1.26306609797298e01,
+ +6.77670142181813e00,
+ +1.02476219308165e01,
+ +6.83625554309033e00,
+ +6.77635870797818e00,
+ +1.26329675237980e01,
+ +1.55817742139549e01,
+ +6.82389271515335e00,
+ +6.83485884749660e00,
+ +3.72381957696309e01,
+ +2.05886604700550e01,
+ +2.07697483350778e01,
+ +1.55817539255380e01,
+ +4.59400237556194e01,
+ ],
+ [
+ +2.09502948932743e01,
+ +3.08720876141063e00,
+ +5.41936287038137e00,
+ +3.05095950404731e00,
+ +4.30708399636246e00,
+ +3.07933450364290e00,
+ +3.05079621551476e00,
+ +5.42044652080921e00,
+ +6.82389271515335e00,
+ +3.07344414324100e00,
+ +3.07866903774297e00,
+ +1.66525814300412e01,
+ +9.28051654425321e00,
+ +9.36737391255552e00,
+ +6.82388322766317e00,
+ +2.12353132504549e01,
+ ],
+ [
+ +2.09933789215108e01,
+ +3.09246121386693e00,
+ +5.42759175329962e00,
+ +3.05613938235978e00,
+ +4.31325759357762e00,
+ +3.08457119115511e00,
+ +3.05597576690949e00,
+ +5.42867735106695e00,
+ +6.83485884749660e00,
+ +3.07866903774297e00,
+ +3.08390439293497e00,
+ +1.66804622161148e01,
+ +9.29638700321132e00,
+ +9.38341913120739e00,
+ +6.83484934277887e00,
+ +2.12746848234302e01,
+ ],
+ [
+ +1.11982907430142e02,
+ +1.67260314058114e01,
+ +2.97398273677055e01,
+ +1.65326001720343e01,
+ +2.37716272051766e01,
+ +1.66840132375542e01,
+ +1.65317288410165e01,
+ +2.97456233348293e01,
+ +3.72381957696309e01,
+ +1.66525814300412e01,
+ +1.66804622161148e01,
+ +9.03985128506635e01,
+ +5.02708240732723e01,
+ +5.07336615572416e01,
+ +3.72381448273806e01,
+ +1.14240508062473e02,
+ ],
+ [
+ +6.38123316167329e01,
+ +9.32232615546217e00,
+ +1.63311877211176e01,
+ +9.21222013302869e00,
+ +1.29661524144552e01,
+ +9.29840833574711e00,
+ +9.21172414905175e00,
+ +1.63344547422583e01,
+ +2.05886604700550e01,
+ +9.28051654425321e00,
+ +9.29638700321132e00,
+ +5.02708240732723e01,
+ +2.80315233611672e01,
+ +2.82953233553807e01,
+ +2.05886317918059e01,
+ +6.43332290431569e01,
+ ],
+ [
+ +6.45263838745924e01,
+ +9.40964391592266e00,
+ +1.64664919883409e01,
+ +9.29832544621136e00,
+ +1.30671606537876e01,
+ +9.38546272191831e00,
+ +9.29782400064014e00,
+ +1.64697916829337e01,
+ +2.07697483350778e01,
+ +9.36737391255552e00,
+ +9.38341913120739e00,
+ +5.07336615572416e01,
+ +2.82953233553807e01,
+ +2.85620489152008e01,
+ +2.07697193680983e01,
+ +6.49891385224176e01,
+ ],
+ [
+ +4.44924561012183e01,
+ +6.85277268893649e00,
+ +1.26306453073937e01,
+ +6.77669200846439e00,
+ +1.02476098345869e01,
+ +6.83624603617842e00,
+ +6.77634929516284e00,
+ +1.26329518479543e01,
+ +1.55817539255380e01,
+ +6.82388322766317e00,
+ +6.83484934277887e00,
+ +3.72381448273806e01,
+ +2.05886317918059e01,
+ +2.07697193680983e01,
+ +1.55817336371518e01,
+ +4.59399560920098e01,
+ ],
+ [
+ +1.60659400002538e02,
+ +2.13390348985262e01,
+ +3.55362026390315e01,
+ +2.10658829179373e01,
+ +2.75317129235774e01,
+ +2.12796993628112e01,
+ +2.10646524765623e01,
+ +3.55438470017986e01,
+ +4.59400237556194e01,
+ +2.12353132504549e01,
+ +2.12746848234302e01,
+ +1.14240508062473e02,
+ +6.43332290431569e01,
+ +6.49891385224176e01,
+ +4.59399560920098e01,
+ +1.53594500003907e02,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "disp2": torch.tensor(
+ [
+ -2.8788632548321321e-03,
+ -6.3435979775151754e-04,
+ -9.6167619562274962e-04,
+ -7.9723260613915258e-04,
+ -7.9238263177385578e-04,
+ -7.4485995467369389e-04,
+ -1.0311812354479540e-03,
+ -1.0804678845482093e-03,
+ -2.1424517331896948e-03,
+ -5.3905710617330410e-04,
+ -7.3549132878459982e-04,
+ -2.9718856310496566e-03,
+ -1.9053629060228276e-03,
+ -1.8362475794413465e-03,
+ -1.7182276597931356e-03,
+ -4.2417715940356341e-03,
+ ],
+ dtype=torch.double,
+ ),
+ "disp3": torch.tensor(
+ [
+ +7.0507158322062093e-06,
+ +1.0788932851583596e-06,
+ +5.0390835073232118e-06,
+ +3.5279992165310452e-06,
+ +2.0456532138274277e-06,
+ +4.4882779886109463e-06,
+ +3.4024596216497734e-06,
+ +7.0699209168125984e-06,
+ +1.3788482957103818e-06,
+ +4.2106212983235953e-06,
+ +3.6457830677850229e-06,
+ +9.5860600613146586e-06,
+ +2.7971201776362010e-06,
+ +3.7713122361185403e-06,
+ +4.6694314609246109e-06,
+ +1.1117738755494003e-06,
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "PbH4-BiH3": Refs(
+ {
+ "cn": torch.tensor(
+ [
+ +3.93882078385452e00,
+ +9.83202447815541e-01,
+ +9.83202575698739e-01,
+ +9.83202575698739e-01,
+ +9.86589814809524e-01,
+ +2.97146042634822e00,
+ +9.87045550753296e-01,
+ +9.87045669088046e-01,
+ +9.87045550753296e-01,
+ ],
+ dtype=torch.double,
+ ),
+ "weights": torch.tensor(
+ [
+ [
+ +1.10706478210448e-27,
+ +2.26599265549586e-16,
+ +1.05725184914546e-07,
+ +1.41167956987467e-02,
+ +9.85883098576068e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79089158960743e-01,
+ +2.09108410392566e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79089178059127e-01,
+ +2.09108219408730e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79089178059127e-01,
+ +2.09108219408730e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79589093181984e-01,
+ +2.04109068180158e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +4.51567208622703e-16,
+ +1.31099357580350e-07,
+ +1.71917663233654e-02,
+ +9.82808102577277e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79655454782451e-01,
+ +2.03445452175495e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79655471986197e-01,
+ +2.03445280138028e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79655454782451e-01,
+ +2.03445452175495e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "c6": torch.tensor(
+ [
+ [
+ +4.56209027097378e02,
+ +3.72742207576559e01,
+ +3.72742203407386e01,
+ +3.72742203407386e01,
+ +3.72633072045004e01,
+ +4.93665090064696e02,
+ +3.72618585322083e01,
+ +3.72618581566509e01,
+ +3.72618585322083e01,
+ ],
+ [
+ +3.72742207576559e01,
+ +3.09876483566212e00,
+ +3.09876480252469e00,
+ +3.09876480252469e00,
+ +3.09789740447628e00,
+ +4.02583668373402e01,
+ +3.09778226108473e00,
+ +3.09778223123467e00,
+ +3.09778226108473e00,
+ ],
+ [
+ +3.72742203407386e01,
+ +3.09876480252469e00,
+ +3.09876476938727e00,
+ +3.09876476938727e00,
+ +3.09789737134977e00,
+ +4.02583663834069e01,
+ +3.09778222795966e00,
+ +3.09778219810960e00,
+ +3.09778222795966e00,
+ ],
+ [
+ +3.72742203407386e01,
+ +3.09876480252469e00,
+ +3.09876476938727e00,
+ +3.09876476938727e00,
+ +3.09789737134977e00,
+ +4.02583663834069e01,
+ +3.09778222795966e00,
+ +3.09778219810960e00,
+ +3.09778222795966e00,
+ ],
+ [
+ +3.72633072045004e01,
+ +3.09789740447628e00,
+ +3.09789737134977e00,
+ +3.09789737134977e00,
+ +3.09703025884030e00,
+ +4.02464843239764e01,
+ +3.09691515335283e00,
+ +3.09691512351259e00,
+ +3.09691515335283e00,
+ ],
+ [
+ +4.93665090064696e02,
+ +4.02583668373402e01,
+ +4.02583663834069e01,
+ +4.02583663834069e01,
+ +4.02464843239764e01,
+ +5.34419964675118e02,
+ +4.02449070312629e01,
+ +4.02449066223616e01,
+ +4.02449070312629e01,
+ ],
+ [
+ +3.72618585322083e01,
+ +3.09778226108473e00,
+ +3.09778222795966e00,
+ +3.09778222795966e00,
+ +3.09691515335283e00,
+ +4.02449070312629e01,
+ +3.09680005289677e00,
+ +3.09680002305784e00,
+ +3.09680005289677e00,
+ ],
+ [
+ +3.72618581566509e01,
+ +3.09778223123467e00,
+ +3.09778219810960e00,
+ +3.09778219810960e00,
+ +3.09691512351259e00,
+ +4.02449066223616e01,
+ +3.09680002305784e00,
+ +3.09679999321891e00,
+ +3.09680002305784e00,
+ ],
+ [
+ +3.72618585322083e01,
+ +3.09778226108473e00,
+ +3.09778222795966e00,
+ +3.09778222795966e00,
+ +3.09691515335283e00,
+ +4.02449070312629e01,
+ +3.09680005289677e00,
+ +3.09680002305784e00,
+ +3.09680005289677e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "disp2": torch.tensor(
+ [
+ -1.7789829941052290e-03,
+ -4.2874420697641040e-04,
+ -4.2874425413182740e-04,
+ -4.2874425413182740e-04,
+ -6.4605081235581219e-04,
+ -1.8277741525957012e-03,
+ -4.4890931954739776e-04,
+ -4.4890934300120941e-04,
+ -4.4890931954739776e-04,
+ ],
+ dtype=torch.double,
+ ),
+ "disp3": torch.tensor(
+ [
+ +1.5164457178775542e-07,
+ +3.1871289285333041e-07,
+ +3.1871279049093017e-07,
+ +3.1871279049093017e-07,
+ -5.9772721699589883e-07,
+ -3.5376082968855901e-07,
+ +1.4591177238904105e-07,
+ +1.4591163155676249e-07,
+ +1.4591177238904105e-07,
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "C6H5I-CH3SH": Refs(
+ {
+ "cn": torch.tensor(
+ [
+ +3.13936895934395e00,
+ +3.13131666863102e00,
+ +3.13937683960227e00,
+ +3.31534291514346e00,
+ +3.13765455567338e00,
+ +3.31481155018318e00,
+ +1.53636056287591e00,
+ +1.00352466398219e00,
+ +1.01223354855399e00,
+ +1.00366192372190e00,
+ +1.01219594356898e00,
+ +1.00366200689047e00,
+ +2.15705640674763e00,
+ +9.98181081558970e-01,
+ +3.98411287017616e00,
+ +1.01462256394391e00,
+ +1.01235611510819e00,
+ +1.00858912903507e00,
+ ],
+ dtype=torch.double,
+ ),
+ "weights": torch.tensor(
+ [
+ [
+ +7.66915348045866e-18,
+ +9.04559372001613e-09,
+ +5.55474084124553e-03,
+ +9.36199574012947e-01,
+ +5.82456761002137e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.33337260445326e-18,
+ +1.03304923176417e-08,
+ +5.94352226939526e-03,
+ +9.39218013018462e-01,
+ +5.48384543816506e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +7.66767874029992e-18,
+ +9.04441693210364e-09,
+ +5.55437244105958e-03,
+ +9.36196513315798e-01,
+ +5.82491051987251e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.61278067966140e-20,
+ +4.54846017777575e-10,
+ +1.16048785372819e-03,
+ +7.99583198542082e-01,
+ +1.99256313149343e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +7.99678634625602e-18,
+ +9.30523441007744e-09,
+ +5.63544094450366e-03,
+ +9.36860440392255e-01,
+ +5.75041093580066e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.74417166809199e-20,
+ +4.59132999088564e-10,
+ +1.16639851843437e-03,
+ +8.00245977284164e-01,
+ +1.98587623738269e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +2.54205655667171e-04,
+ +9.99745794344333e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.81918044661934e-01,
+ +1.80819553380659e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.83012088864378e-01,
+ +1.69879111356215e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.81935812878635e-01,
+ +1.80641871213652e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.83007507536486e-01,
+ +1.69924924635144e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.81935823639575e-01,
+ +1.80641763604250e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.18809691426923e-09,
+ +5.00585297181196e-03,
+ +9.94994137840091e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.81212828585247e-01,
+ +1.87871714147530e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +2.61014227249546e-28,
+ +2.42411690886486e-16,
+ +1.38694757870492e-07,
+ +2.01508154369845e-02,
+ +9.79849045868257e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.83300662218751e-01,
+ +1.66993377812491e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.83027012438879e-01,
+ +1.69729875611206e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.82562408977771e-01,
+ +1.74375910222289e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "c6": torch.tensor(
+ [
+ [
+ +2.53086694842437e01,
+ +2.53236159084896e01,
+ +2.53086544809279e01,
+ +2.47100760277660e01,
+ +2.53119158180357e01,
+ +2.47128891340438e01,
+ +9.48922900325656e01,
+ +8.82857683343213e00,
+ +8.82319289903895e00,
+ +8.82848939371737e00,
+ +8.82321544435180e00,
+ +8.82848934076138e00,
+ +5.62455312853600e01,
+ +8.83204729384078e00,
+ +2.14360070777824e01,
+ +8.82177279187685e00,
+ +8.82311945817995e00,
+ +8.82540583248536e00,
+ ],
+ [
+ +2.53236159084896e01,
+ +2.53385737670487e01,
+ +2.53236008936914e01,
+ +2.47245622275532e01,
+ +2.53268647265581e01,
+ +2.47273774996866e01,
+ +9.49508531711154e01,
+ +8.83383171969513e00,
+ +8.82844384885730e00,
+ +8.83374421604913e00,
+ +8.82846641065407e00,
+ +8.83374416305442e00,
+ +5.62800562259229e01,
+ +8.83730471751867e00,
+ +2.14479713482813e01,
+ +8.82702270338885e00,
+ +8.82837035430227e00,
+ +8.83065840028219e00,
+ ],
+ [
+ +2.53086544809279e01,
+ +2.53236008936914e01,
+ +2.53086394776235e01,
+ +2.47100614866121e01,
+ +2.53119008122251e01,
+ +2.47128745907148e01,
+ +9.48922312429173e01,
+ +8.82857155866703e00,
+ +8.82318762822647e00,
+ +8.82848411901647e00,
+ +8.82321017352276e00,
+ +8.82848406606051e00,
+ +5.62454966276673e01,
+ +8.83204201652784e00,
+ +2.14359950691858e01,
+ +8.82176752210693e00,
+ +8.82311418742137e00,
+ +8.82540056004824e00,
+ ],
+ [
+ +2.47100760277660e01,
+ +2.47245622275532e01,
+ +2.47100614866121e01,
+ +2.41300091503453e01,
+ +2.47132223699295e01,
+ +2.41327350652656e01,
+ +9.25450400227954e01,
+ +8.61818543474823e00,
+ +8.61295975815401e00,
+ +8.61810056527593e00,
+ +8.61298164075975e00,
+ +8.61810051387655e00,
+ +5.48621486974196e01,
+ +8.62155388287635e00,
+ +2.09574675249769e01,
+ +8.61158139426314e00,
+ +8.61288847604877e00,
+ +8.61510764363531e00,
+ ],
+ [
+ +2.53119158180357e01,
+ +2.53268647265581e01,
+ +2.53119008122251e01,
+ +2.47132223699295e01,
+ +2.53151626915744e01,
+ +2.47160359467776e01,
+ +9.49050104461900e01,
+ +8.82971816324588e00,
+ +8.82433337366177e00,
+ +8.82963070964208e00,
+ +8.82435592255574e00,
+ +8.82963065667768e00,
+ +5.62530302631426e01,
+ +8.83318917490684e00,
+ +2.14386054857927e01,
+ +8.82291304092804e00,
+ +8.82425992113731e00,
+ +8.82654665861330e00,
+ ],
+ [
+ +2.47128891340438e01,
+ +2.47273774996866e01,
+ +2.47128745907148e01,
+ +2.41327350652656e01,
+ +2.47160359467776e01,
+ +2.41354613905380e01,
+ +9.25560734133844e01,
+ +8.61917409179224e00,
+ +8.61394767066187e00,
+ +8.61908921022803e00,
+ +8.61396955638537e00,
+ +8.61908915882133e00,
+ +5.48686508434178e01,
+ +8.62254301984509e00,
+ +2.09597156158777e01,
+ +8.61256911038653e00,
+ +8.61387637840061e00,
+ +8.61609586216640e00,
+ ],
+ [
+ +9.48922900325656e01,
+ +9.49508531711154e01,
+ +9.48922312429173e01,
+ +9.25450400227954e01,
+ +9.49050104461900e01,
+ +9.25560734133844e01,
+ +3.58497831396092e02,
+ +3.31610125661753e01,
+ +3.31400786291125e01,
+ +3.31606725810729e01,
+ +3.31401662903144e01,
+ +3.31606723751682e01,
+ +2.12283949836107e02,
+ +3.31745064898504e01,
+ +7.97027494030697e01,
+ +3.31345569359076e01,
+ +3.31397930746818e01,
+ +3.31486830076409e01,
+ ],
+ [
+ +8.82857683343213e00,
+ +8.83383171969513e00,
+ +8.82857155866703e00,
+ +8.61818543474823e00,
+ +8.82971816324588e00,
+ +8.61917409179224e00,
+ +3.31610125661753e01,
+ +3.08895723244586e00,
+ +3.08706250254790e00,
+ +3.08892646040635e00,
+ +3.08707043676017e00,
+ +3.08892644176992e00,
+ +1.96792520574283e01,
+ +3.09017856699708e00,
+ +7.46755679047458e00,
+ +3.08656273429260e00,
+ +3.08703665702655e00,
+ +3.08784128454205e00,
+ ],
+ [
+ +8.82319289903895e00,
+ +8.82844384885730e00,
+ +8.82318762822647e00,
+ +8.61295975815401e00,
+ +8.82433337366177e00,
+ +8.61394767066187e00,
+ +3.31400786291125e01,
+ +3.08706250254790e00,
+ +3.08516914014558e00,
+ +3.08703175271770e00,
+ +3.08517706863144e00,
+ +3.08703173409473e00,
+ +1.96668860512657e01,
+ +3.08828295561747e00,
+ +7.46319796171210e00,
+ +3.08466973259124e00,
+ +3.08514331327788e00,
+ +3.08594736006437e00,
+ ],
+ [
+ +8.82848939371737e00,
+ +8.83374421604913e00,
+ +8.82848411901647e00,
+ +8.61810056527593e00,
+ +8.82963070964208e00,
+ +8.61908921022803e00,
+ +3.31606725810729e01,
+ +3.08892646040635e00,
+ +3.08703175271770e00,
+ +3.08889568872755e00,
+ +3.08703968683697e00,
+ +3.08889567009134e00,
+ +1.96790512228764e01,
+ +3.09014778064156e00,
+ +7.46748599935495e00,
+ +3.08653199032049e00,
+ +3.08700590749930e00,
+ +3.08781052558326e00,
+ ],
+ [
+ +8.82321544435180e00,
+ +8.82846641065407e00,
+ +8.82321017352276e00,
+ +8.61298164075975e00,
+ +8.82435592255574e00,
+ +8.61396955638537e00,
+ +3.31401662903144e01,
+ +3.08707043676017e00,
+ +3.08517706863144e00,
+ +3.08703968683697e00,
+ +3.08518499714127e00,
+ +3.08703966821393e00,
+ +1.96669378341176e01,
+ +3.08829089352095e00,
+ +7.46321621437845e00,
+ +3.08467765956666e00,
+ +3.08515124168563e00,
+ +3.08595529090393e00,
+ ],
+ [
+ +8.82848934076138e00,
+ +8.83374416305442e00,
+ +8.82848406606051e00,
+ +8.61810051387655e00,
+ +8.82963065667768e00,
+ +8.61908915882133e00,
+ +3.31606723751682e01,
+ +3.08892644176992e00,
+ +3.08703173409473e00,
+ +3.08889567009134e00,
+ +3.08703966821393e00,
+ +3.08889565145513e00,
+ +1.96790511012452e01,
+ +3.09014776199646e00,
+ +7.46748595648182e00,
+ +3.08653197170106e00,
+ +3.08700588887651e00,
+ +3.08781050695476e00,
+ ],
+ [
+ +5.62455312853600e01,
+ +5.62800562259229e01,
+ +5.62454966276673e01,
+ +5.48621486974196e01,
+ +5.62530302631426e01,
+ +5.48686508434178e01,
+ +2.12283949836107e02,
+ +1.96792520574283e01,
+ +1.96668860512657e01,
+ +1.96790512228764e01,
+ +1.96669378341176e01,
+ +1.96790511012452e01,
+ +1.25836586473867e02,
+ +1.96872231305630e01,
+ +4.72941948267372e01,
+ +1.96636243001677e01,
+ +1.96667173697683e01,
+ +1.96719687931115e01,
+ ],
+ [
+ +8.83204729384078e00,
+ +8.83730471751867e00,
+ +8.83204201652784e00,
+ +8.62155388287635e00,
+ +8.83318917490684e00,
+ +8.62254301984509e00,
+ +3.31745064898504e01,
+ +3.09017856699708e00,
+ +3.08828295561747e00,
+ +3.09014778064156e00,
+ +3.08829089352095e00,
+ +3.09014776199646e00,
+ +1.96872231305630e01,
+ +3.09140046974751e00,
+ +7.47036647230640e00,
+ +3.08778295485591e00,
+ +3.08825709807205e00,
+ +3.08906209992291e00,
+ ],
+ [
+ +2.14360070777824e01,
+ +2.14479713482813e01,
+ +2.14359950691858e01,
+ +2.09574675249769e01,
+ +2.14386054857927e01,
+ +2.09597156158777e01,
+ +7.97027494030697e01,
+ +7.46755679047458e00,
+ +7.46319796171210e00,
+ +7.46748599935495e00,
+ +7.46321621437845e00,
+ +7.46748595648182e00,
+ +4.72941948267372e01,
+ +7.47036647230640e00,
+ +1.83413164591959e01,
+ +7.46204824414648e00,
+ +7.46313850405433e00,
+ +7.46498955077155e00,
+ ],
+ [
+ +8.82177279187685e00,
+ +8.82702270338885e00,
+ +8.82176752210693e00,
+ +8.61158139426314e00,
+ +8.82291304092804e00,
+ +8.61256911038653e00,
+ +3.31345569359076e01,
+ +3.08656273429260e00,
+ +3.08466973259124e00,
+ +3.08653199032049e00,
+ +3.08467765956666e00,
+ +3.08653197170106e00,
+ +1.96636243001677e01,
+ +3.08778295485591e00,
+ +7.46204824414648e00,
+ +3.08417042017811e00,
+ +3.08464391064378e00,
+ +3.08544780425280e00,
+ ],
+ [
+ +8.82311945817995e00,
+ +8.82837035430227e00,
+ +8.82311418742137e00,
+ +8.61288847604877e00,
+ +8.82425992113731e00,
+ +8.61387637840061e00,
+ +3.31397930746818e01,
+ +3.08703665702655e00,
+ +3.08514331327788e00,
+ +3.08700590749930e00,
+ +3.08515124168563e00,
+ +3.08700588887651e00,
+ +1.96667173697683e01,
+ +3.08825709807205e00,
+ +7.46313850405433e00,
+ +3.08464391064378e00,
+ +3.08511748666464e00,
+ +3.08592152552955e00,
+ ],
+ [
+ +8.82540583248536e00,
+ +8.83065840028219e00,
+ +8.82540056004824e00,
+ +8.61510764363531e00,
+ +8.82654665861330e00,
+ +8.61609586216640e00,
+ +3.31486830076409e01,
+ +3.08784128454205e00,
+ +3.08594736006437e00,
+ +3.08781052558326e00,
+ +3.08595529090393e00,
+ +3.08781050695476e00,
+ +1.96719687931115e01,
+ +3.08906209992291e00,
+ +7.46498955077155e00,
+ +3.08544780425280e00,
+ +3.08592152552955e00,
+ +3.08672581101038e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "disp2": torch.tensor(
+ [
+ -1.9420461943405458e-03,
+ -1.8659072210258116e-03,
+ -1.9421688758887014e-03,
+ -2.2256063318899419e-03,
+ -2.3963299472900094e-03,
+ -2.2258129538456762e-03,
+ -4.5810403655531691e-03,
+ -6.0279450821464173e-04,
+ -7.9994791096430059e-04,
+ -6.1485615934089312e-04,
+ -7.9989323817241818e-04,
+ -6.1484107713457887e-04,
+ -2.2996378209045958e-03,
+ -5.6155104045316131e-04,
+ -1.1544788441618554e-03,
+ -5.5259186314968840e-04,
+ -6.8597888322421800e-04,
+ -5.0103989808744046e-04,
+ ],
+ dtype=torch.double,
+ ),
+ "disp3": torch.tensor(
+ [
+ -1.2978866706459067e-06,
+ -6.8327757407160399e-07,
+ -1.2942593535913288e-06,
+ -5.7304824129487952e-07,
+ -8.9195765730180898e-07,
+ -4.8897672215875848e-07,
+ -5.9620837808702434e-06,
+ -5.1712490636531602e-07,
+ +2.1379354562450553e-06,
+ +7.7699432620597416e-07,
+ +2.1956704534880581e-06,
+ +7.6716763665232290e-07,
+ -9.5275400116253198e-07,
+ +6.0068639199219523e-07,
+ -2.6385604432973588e-07,
+ +1.1560414358817309e-06,
+ -2.6528734005501400e-07,
+ -1.3951746669187961e-06,
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "AmF3": Refs(
+ {
+ "cn": torch.tensor(
+ [
+ +2.99072690000000e00,
+ +0.99768090000000e00,
+ +0.99767850000000e00,
+ +0.99768040000000e00,
+ ],
+ dtype=torch.double,
+ ),
+ "weights": torch.tensor(
+ [
+ [
+ +3.01777620000000e-16,
+ +3.48560790000000e-08,
+ +6.05574020000000e-03,
+ +9.93942080000000e-01,
+ +2.12835020000000e-06,
+ +3.22313320000000e-14,
+ +0.00000000000000e00,
+ ],
+ [
+ +0.01831650000000e00,
+ 0.981683500000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ ],
+ [
+ 0.018316800000000e00,
+ 0.981683200000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ ],
+ [
+ 0.018316600000000e00,
+ 0.981683400000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "c6": torch.tensor(
+ [
+ [
+ +524.180114700000e00,
+ +54.4235535000000e00,
+ +54.4235573000000e00,
+ +54.4235573000000e00,
+ ],
+ [
+ +54.4235535000000e00,
+ +7.17594720000000e00,
+ +7.17594770000000e00,
+ +7.17594770000000e00,
+ ],
+ [
+ +54.4235535000000e00,
+ +7.17594770000000e00,
+ +7.17594860000000e00,
+ +7.17594810000000e00,
+ ],
+ [
+ +54.4235535000000e00,
+ +7.17594770000000e00,
+ +7.17594810000000e00,
+ +7.17594810000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "disp2": torch.tensor(
+ [
+ -1.048180025875288e-03,
+ -4.430683267237130e-04,
+ -4.430435696703567e-04,
+ -4.430709410870264e-04,
+ ],
+ dtype=torch.double,
+ ),
+ "disp3": torch.tensor(
+ [
+ 1.475402588166237e-08,
+ 2.297333064597274e-07,
+ 2.297265476250950e-07,
+ 2.297346486316179e-07,
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+}
+
+samples: Dict[str, Record] = merge_nested_dicts(mols, refs)
diff --git a/tests/test_dftd3.py b/test/test_disp/test_dftd3.py
similarity index 88%
rename from tests/test_dftd3.py
rename to test/test_disp/test_dftd3.py
index 13a37f6..e43d962 100644
--- a/tests/test_dftd3.py
+++ b/test/test_disp/test_dftd3.py
@@ -17,11 +17,13 @@
"""
import pytest
import torch
+from tad_mctc.batch import pack
+from tad_mctc.ncoord import exp_count
-from tad_dftd3 import damping, data, dftd3, model, ncoord, reference, utils
-from tad_dftd3._typing import DD
+from tad_dftd3 import damping, data, dftd3, model, reference
+from tad_dftd3.typing import DD
-from .conftest import DEVICE
+from ..conftest import DEVICE
from .samples import samples
@@ -57,9 +59,9 @@ def test_single(dtype: torch.dtype, name: str) -> None:
positions = sample["positions"].to(**dd)
ref = (sample["disp2"] + sample["disp3"]).to(**dd)
- rcov = data.covalent_rad_d3.to(**dd)[numbers]
- rvdw = data.vdw_rad_d3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
- r4r2 = data.sqrt_z_r4_over_r2.to(**dd)[numbers]
+ rcov = data.COV_D3.to(**dd)[numbers]
+ rvdw = data.VDW_D3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
+ r4r2 = data.R4R2.to(**dd)[numbers]
cutoff = torch.tensor(50, **dd)
param = {
@@ -80,7 +82,7 @@ def test_single(dtype: torch.dtype, name: str) -> None:
rvdw=rvdw,
r4r2=r4r2,
cutoff=cutoff,
- counting_function=ncoord.exp_count,
+ counting_function=exp_count,
weighting_function=model.gaussian_weight,
damping_function=damping.rational_damping,
)
@@ -94,19 +96,19 @@ def test_batch(dtype: torch.dtype) -> None:
dd: DD = {"device": DEVICE, "dtype": dtype}
sample1, sample2 = (samples["PbH4-BiH3"], samples["C6H5I-CH3SH"])
- numbers = utils.pack(
+ numbers = pack(
(
sample1["numbers"].to(DEVICE),
sample2["numbers"].to(DEVICE),
)
)
- positions = utils.pack(
+ positions = pack(
(
sample1["positions"].to(**dd),
sample2["positions"].to(**dd),
)
)
- ref = utils.pack(
+ ref = pack(
(
sample1["disp2"].to(**dd),
sample2["disp2"].to(**dd),
diff --git a/tests/test_disp.py b/test/test_disp/test_disp.py
similarity index 91%
rename from tests/test_disp.py
rename to test/test_disp/test_disp.py
index c83c6ce..5820b2c 100644
--- a/tests/test_disp.py
+++ b/test/test_disp/test_disp.py
@@ -19,11 +19,12 @@
import pytest
import torch
+from tad_mctc.batch import pack
-from tad_dftd3 import damping, data, disp, utils
-from tad_dftd3._typing import DD
+from tad_dftd3 import damping, data, disp
+from tad_dftd3.typing import DD
-from .conftest import DEVICE
+from ..conftest import DEVICE
from .samples import samples
sample_list = ["AmF3", "SiH4", "PbH4-BiH3", "C6H5I-CH3SH", "MB16_43_01"]
@@ -72,8 +73,8 @@ def test_disp2_single(dtype: torch.dtype, name: str) -> None:
positions = sample["positions"].to(**dd)
ref = sample["disp2"].to(**dd)
c6 = sample["c6"].to(**dd)
- rvdw = data.vdw_rad_d3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
- r4r2 = data.sqrt_z_r4_over_r2.to(**dd)[numbers]
+ rvdw = data.VDW_D3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
+ r4r2 = data.R4R2.to(**dd)[numbers]
cutoff = torch.tensor(50.0, **dd)
par = {k: v.to(**dd) for k, v in param_noatm.items()}
@@ -101,25 +102,25 @@ def test_disp2_batch(dtype: torch.dtype, name1: str, name2: str) -> None:
tol = sqrt(torch.finfo(dtype).eps)
sample1, sample2 = samples[name1], samples[name2]
- numbers = utils.pack(
+ numbers = pack(
[
sample1["numbers"].to(DEVICE),
sample2["numbers"].to(DEVICE),
]
)
- positions = utils.pack(
+ positions = pack(
[
sample1["positions"].to(**dd),
sample2["positions"].to(**dd),
]
)
- c6 = utils.pack(
+ c6 = pack(
[
sample1["c6"].to(**dd),
sample2["c6"].to(**dd),
]
)
- ref = utils.pack(
+ ref = pack(
[
sample1["disp2"].to(**dd),
sample2["disp2"].to(**dd),
@@ -146,7 +147,7 @@ def test_atm_single(dtype: torch.dtype, name: str) -> None:
c6 = sample["c6"].to(**dd)
ref = sample["disp3"].to(**dd)
- rvdw = data.vdw_rad_d3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
+ rvdw = data.VDW_D3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
par = {k: v.to(**dd) for k, v in param.items()}
@@ -172,25 +173,25 @@ def test_atm_batch(dtype: torch.dtype, name1: str, name2: str) -> None:
tol = sqrt(torch.finfo(dtype).eps)
sample1, sample2 = samples[name1], samples[name2]
- numbers = utils.pack(
+ numbers = pack(
[
sample1["numbers"].to(DEVICE),
sample2["numbers"].to(DEVICE),
]
)
- positions = utils.pack(
+ positions = pack(
[
sample1["positions"].to(**dd),
sample2["positions"].to(**dd),
]
)
- c6 = utils.pack(
+ c6 = pack(
[
sample1["c6"].to(**dd),
sample2["c6"].to(**dd),
]
)
- ref = utils.pack(
+ ref = pack(
[
sample1["disp3"].to(**dd),
sample2["disp3"].to(**dd),
@@ -199,7 +200,7 @@ def test_atm_batch(dtype: torch.dtype, name1: str, name2: str) -> None:
par = {k: v.to(**dd) for k, v in param.items()}
- rvdw = data.vdw_rad_d3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
+ rvdw = data.VDW_D3.to(**dd)[numbers.unsqueeze(-1), numbers.unsqueeze(-2)]
energy = damping.dispersion_atm(
numbers,
diff --git a/tests/test_utils/test_exception.py b/test/test_disp/test_exception.py
similarity index 100%
rename from tests/test_utils/test_exception.py
rename to test/test_disp/test_exception.py
diff --git a/tests/test_model/__init__.py b/test/test_grad/__init__.py
similarity index 100%
rename from tests/test_model/__init__.py
rename to test/test_grad/__init__.py
diff --git a/test/test_grad/samples.py b/test/test_grad/samples.py
new file mode 100644
index 0000000..3470795
--- /dev/null
+++ b/test/test_grad/samples.py
@@ -0,0 +1,3588 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Collection of test samples.
+"""
+from typing import Dict
+
+import torch
+from tad_mctc.data.molecules import merge_nested_dicts, mols
+from tad_mctc.typing import Molecule, Tensor, TypedDict
+
+
+class Refs(TypedDict):
+ """
+ Format of reference values. Note that energies and gradients are calculated
+ with different parameters.
+ """
+
+ grad: Tensor
+ """Nuclear gradient (reference with GFN1-xTB parameters)."""
+
+ hessian: Tensor
+ """Nuclear Hessian (reference with GFN1-xTB parameters)."""
+
+
+class Record(Molecule, Refs):
+ """Store for molecular information and reference values."""
+
+
+refs: Dict[str, Refs] = {
+ "LiH": Refs(
+ {
+ "grad": torch.tensor(
+ [
+ [
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +2.35781197246301e-06,
+ ],
+ [
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ -2.35781197246301e-06,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "hessian": torch.tensor(
+ [
+ -7.81784784406455e-07,
+ +7.81784784406455e-07,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +7.81784784406455e-07,
+ -7.81784784406455e-07,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ -7.81784784406455e-07,
+ +7.81784784406455e-07,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +7.81784784406455e-07,
+ -7.81784784406455e-07,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ -1.63891846815177e-05,
+ +1.63891846815177e-05,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +1.63891846815177e-05,
+ -1.63891846815177e-05,
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "SiH4": Refs(
+ {
+ "grad": torch.tensor(
+ [
+ [
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ -7.37831467548165e-07,
+ -7.37831467548165e-07,
+ +7.37831467548165e-07,
+ ],
+ [
+ +7.37831467548165e-07,
+ +7.37831467548165e-07,
+ +7.37831467548165e-07,
+ ],
+ [
+ -7.37831467548165e-07,
+ +7.37831467548165e-07,
+ -7.37831467548165e-07,
+ ],
+ [
+ +7.37831467548165e-07,
+ -7.37831467548165e-07,
+ -7.37831467548165e-07,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "hessian": torch.tensor(
+ [
+ -6.80725771067933e-05,
+ +1.70181442766941e-05,
+ +1.70181442766983e-05,
+ +1.70181442766983e-05,
+ +1.70181442766941e-05,
+ +0.00000000000000e00,
+ +1.45384560849674e-05,
+ +1.45384560849717e-05,
+ -1.45384560849717e-05,
+ -1.45384560849674e-05,
+ -1.27054942088145e-17,
+ -1.45384560849674e-05,
+ +1.45384560849717e-05,
+ +1.45384560849717e-05,
+ -1.45384560849674e-05,
+ +1.70181440743930e-05,
+ -1.49785955400277e-05,
+ -5.70567229126333e-08,
+ -1.92543508854213e-06,
+ -5.70567229126333e-08,
+ +1.45384559037866e-05,
+ -1.49796911426310e-05,
+ +4.41235239417783e-07,
+ +1.41193487852742e-06,
+ -1.41193487910287e-06,
+ -1.45384559037823e-05,
+ +1.49796911426310e-05,
+ +1.41193487910287e-06,
+ -1.41193487852742e-06,
+ -4.41235239417783e-07,
+ +1.70181440744014e-05,
+ -5.70567229126333e-08,
+ -1.49785955400319e-05,
+ -5.70567229126333e-08,
+ -1.92543508854425e-06,
+ +1.45384559037866e-05,
+ +4.41235239417783e-07,
+ -1.49796911426352e-05,
+ -1.41193487910287e-06,
+ +1.41193487853165e-06,
+ +1.45384559037781e-05,
+ -1.41193487910287e-06,
+ -1.49796911426352e-05,
+ +4.41235239417783e-07,
+ +1.41193487853165e-06,
+ +1.70181440743887e-05,
+ -1.92543508854001e-06,
+ -5.70567229126333e-08,
+ -1.49785955400235e-05,
+ -5.70567229126333e-08,
+ -1.45384559037823e-05,
+ -1.41193487852742e-06,
+ +1.41193487910287e-06,
+ +1.49796911426268e-05,
+ -4.41235239417783e-07,
+ +1.45384559037781e-05,
+ +1.41193487852742e-06,
+ +4.41235239417783e-07,
+ -1.49796911426268e-05,
+ -1.41193487910287e-06,
+ +1.70181440744014e-05,
+ -5.70567229126333e-08,
+ -1.92543508854636e-06,
+ -5.70567229126333e-08,
+ -1.49785955400319e-05,
+ -1.45384559037866e-05,
+ +1.41193487910287e-06,
+ -1.41193487853165e-06,
+ -4.41235239417783e-07,
+ +1.49796911426352e-05,
+ -1.45384559037866e-05,
+ -4.41235239417783e-07,
+ -1.41193487853165e-06,
+ +1.41193487910287e-06,
+ +1.49796911426352e-05,
+ +0.00000000000000e00,
+ +1.45384560849717e-05,
+ +1.45384560849674e-05,
+ -1.45384560849674e-05,
+ -1.45384560849717e-05,
+ -6.80725771067933e-05,
+ +1.70181442766983e-05,
+ +1.70181442766941e-05,
+ +1.70181442766941e-05,
+ +1.70181442766983e-05,
+ +4.23516473627150e-18,
+ -1.45384560849717e-05,
+ +1.45384560849674e-05,
+ -1.45384560849674e-05,
+ +1.45384560849717e-05,
+ +1.45384559037866e-05,
+ -1.49796911426352e-05,
+ +4.41235239417783e-07,
+ -1.41193487910287e-06,
+ +1.41193487853165e-06,
+ +1.70181440744014e-05,
+ -1.49785955400319e-05,
+ -5.70567229126333e-08,
+ -5.70567229126333e-08,
+ -1.92543508854636e-06,
+ -1.45384559037781e-05,
+ +1.49796911426352e-05,
+ +1.41193487910287e-06,
+ -4.41235239417783e-07,
+ -1.41193487853165e-06,
+ +1.45384559037866e-05,
+ +4.41235239417783e-07,
+ -1.49796911426310e-05,
+ +1.41193487852742e-06,
+ -1.41193487910287e-06,
+ +1.70181440743930e-05,
+ -5.70567229126333e-08,
+ -1.49785955400277e-05,
+ -1.92543508854001e-06,
+ -5.70567229126333e-08,
+ +1.45384559037823e-05,
+ -1.41193487910287e-06,
+ -1.49796911426310e-05,
+ +1.41193487852742e-06,
+ +4.41235239417783e-07,
+ -1.45384559037866e-05,
+ +1.41193487910287e-06,
+ -1.41193487852742e-06,
+ +1.49796911426310e-05,
+ -4.41235239417783e-07,
+ +1.70181440743930e-05,
+ -5.70567229126333e-08,
+ -1.92543508854001e-06,
+ -1.49785955400277e-05,
+ -5.70567229126333e-08,
+ -1.45384559037823e-05,
+ -4.41235239417783e-07,
+ -1.41193487852742e-06,
+ +1.49796911426310e-05,
+ +1.41193487910287e-06,
+ -1.45384559037823e-05,
+ -1.41193487853165e-06,
+ +1.41193487910287e-06,
+ -4.41235239417783e-07,
+ +1.49796911426310e-05,
+ +1.70181440743972e-05,
+ -1.92543508854636e-06,
+ -5.70567229126333e-08,
+ -5.70567229126333e-08,
+ -1.49785955400277e-05,
+ +1.45384559037823e-05,
+ +1.41193487853165e-06,
+ +4.41235239417783e-07,
+ -1.41193487910287e-06,
+ -1.49796911426310e-05,
+ +0.00000000000000e00,
+ -1.45384560849696e-05,
+ +1.45384560849696e-05,
+ +1.45384560849674e-05,
+ -1.45384560849674e-05,
+ +0.00000000000000e00,
+ -1.45384560849696e-05,
+ +1.45384560849696e-05,
+ -1.45384560849674e-05,
+ +1.45384560849674e-05,
+ -6.80725771067933e-05,
+ +1.70181442766983e-05,
+ +1.70181442766983e-05,
+ +1.70181442766941e-05,
+ +1.70181442766941e-05,
+ -1.45384559037823e-05,
+ +1.49796911426247e-05,
+ -1.41193487852742e-06,
+ +1.41193487910287e-06,
+ -4.41235239417783e-07,
+ -1.45384559037823e-05,
+ +1.49796911426247e-05,
+ -1.41193487852742e-06,
+ -4.41235239417783e-07,
+ +1.41193487910287e-06,
+ +1.70181440743845e-05,
+ -1.49785955400235e-05,
+ -1.92543508854213e-06,
+ -5.70567229126333e-08,
+ -5.70567229126333e-08,
+ +1.45384559037823e-05,
+ +1.41193487852742e-06,
+ -1.49796911426247e-05,
+ +4.41235239417783e-07,
+ -1.41193487910287e-06,
+ +1.45384559037823e-05,
+ +1.41193487852742e-06,
+ -1.49796911426247e-05,
+ -1.41193487910287e-06,
+ +4.41235239417783e-07,
+ +1.70181440743845e-05,
+ -1.92543508854213e-06,
+ -1.49785955400235e-05,
+ -5.70567229126333e-08,
+ -5.70567229126333e-08,
+ +1.45384559037823e-05,
+ -1.41193487910287e-06,
+ +4.41235239417783e-07,
+ -1.49796911426289e-05,
+ +1.41193487853165e-06,
+ -1.45384559037823e-05,
+ -4.41235239417783e-07,
+ +1.41193487910287e-06,
+ +1.49796911426289e-05,
+ -1.41193487853165e-06,
+ +1.70181440743972e-05,
+ -5.70567229126333e-08,
+ -5.70567229126333e-08,
+ -1.49785955400277e-05,
+ -1.92543508854636e-06,
+ -1.45384559037823e-05,
+ -4.41235239417783e-07,
+ +1.41193487910287e-06,
+ -1.41193487853165e-06,
+ +1.49796911426289e-05,
+ +1.45384559037823e-05,
+ -1.41193487910287e-06,
+ +4.41235239417783e-07,
+ +1.41193487853165e-06,
+ -1.49796911426289e-05,
+ +1.70181440743972e-05,
+ -5.70567229126333e-08,
+ -5.70567229126333e-08,
+ -1.92543508854636e-06,
+ -1.49785955400277e-05,
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "MB16_43_01": Refs(
+ {
+ "grad": torch.tensor(
+ [
+ [
+ -5.22604735731951e-05,
+ +7.57147038627570e-05,
+ +3.67793033568259e-06,
+ ],
+ [
+ +3.04279179089300e-05,
+ +3.93865603948018e-06,
+ -4.80630087807750e-05,
+ ],
+ [
+ -5.65165374475145e-05,
+ +1.03902867985891e-04,
+ +2.63237102979009e-05,
+ ],
+ [
+ +1.50668534907175e-05,
+ +1.73612108984945e-05,
+ -6.64630748967830e-05,
+ ],
+ [
+ -4.37809939508503e-05,
+ +2.89855745485217e-05,
+ +4.78712582873749e-05,
+ ],
+ [
+ -4.99927548251511e-05,
+ -5.11044626786480e-05,
+ +3.10795082433519e-05,
+ ],
+ [
+ +4.62374054045158e-05,
+ +1.12008826683273e-05,
+ -3.52205497890963e-05,
+ ],
+ [
+ +1.02760000394114e-04,
+ -4.70879992429260e-05,
+ +6.03786034525043e-05,
+ ],
+ [
+ -6.16946875634895e-05,
+ +2.06590347011126e-05,
+ +4.55261050870207e-05,
+ ],
+ [
+ +8.07830307796794e-05,
+ -1.88063755972256e-05,
+ +4.55153736736531e-05,
+ ],
+ [
+ -3.24620330933132e-05,
+ -3.08010755078516e-05,
+ +5.96097648724992e-05,
+ ],
+ [
+ +5.39286653929828e-05,
+ -1.05567380476723e-04,
+ -1.52019596084091e-04,
+ ],
+ [
+ +6.80186584793621e-05,
+ +5.42998426000167e-06,
+ -1.06317383896321e-04,
+ ],
+ [
+ -6.02912847795852e-05,
+ +6.80699694022760e-05,
+ +8.12417270007505e-06,
+ ],
+ [
+ -6.21385767002360e-05,
+ -5.48568033283525e-05,
+ +7.06375030172422e-05,
+ ],
+ [
+ +2.19148100830330e-05,
+ -2.70387875351359e-05,
+ +9.33968347976166e-06,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "hessian": torch.tensor(
+ [
+ -1.58037767579124e-05,
+ -4.45458195710743e-06,
+ +2.73415495058188e-07,
+ +1.50839790266160e-06,
+ -3.58852773410805e-07,
+ -2.15253985381985e-06,
+ -1.61412508999683e-06,
+ -1.05835787609667e-06,
+ -2.83859298628899e-06,
+ +1.92972031881405e-06,
+ -2.04766443035652e-06,
+ -3.33319888288613e-06,
+ +1.40539843353433e-07,
+ +4.17693103172410e-05,
+ -3.31669309766960e-06,
+ -8.64300017162532e-06,
+ -1.97186953986456e-05,
+ +1.33972844486552e-06,
+ -1.37650778335397e-06,
+ -1.24848492833138e-06,
+ +6.69003997809208e-07,
+ -3.89074605832687e-07,
+ +2.13482688025386e-06,
+ -1.03124093584552e-06,
+ +4.39708952677936e-06,
+ -2.48751314195795e-06,
+ +1.56775778848513e-07,
+ +2.71925629352610e-06,
+ -3.56729066393067e-07,
+ +2.68196507557451e-06,
+ +5.85060879209248e-06,
+ +6.65899107075041e-06,
+ +8.41467182177841e-05,
+ +1.02409471768511e-06,
+ -2.32668147985907e-07,
+ -3.56889914492162e-06,
+ -1.87932836515623e-06,
+ +1.72975676378175e-07,
+ -6.50795268979727e-07,
+ +7.34350675768111e-07,
+ +2.47490054193717e-06,
+ +1.93844391137499e-06,
+ -1.14329369911993e-07,
+ +1.45170929043061e-07,
+ +3.53626219900074e-07,
+ -8.83753950286709e-05,
+ +2.17501901634675e-07,
+ +3.61363253380330e-06,
+ -4.45458202270166e-06,
+ -6.28616389684880e-05,
+ +5.27522755261933e-07,
+ -1.16115766310197e-06,
+ +9.18864019838457e-08,
+ +3.73885461300979e-07,
+ -4.30050245699251e-07,
+ -1.53398070558853e-06,
+ +1.69343550812694e-06,
+ -1.15413670611351e-06,
+ +1.11769758047084e-07,
+ -3.53623909424587e-06,
+ +7.38058667847997e-05,
+ +9.10197858110338e-07,
+ +9.19226505690881e-07,
+ -3.30200562735750e-06,
+ +1.48165533349885e-06,
+ +9.98685464463175e-06,
+ -4.73288503657562e-07,
+ +3.50827012992276e-07,
+ -3.10928247124892e-08,
+ +2.00172792058607e-07,
+ +1.18017858881069e-07,
+ +2.75916873780668e-08,
+ -1.66372198492009e-07,
+ -8.71615443428857e-08,
+ +6.01480024369893e-08,
+ -2.34539310654889e-06,
+ -8.48006783063673e-06,
+ -4.59402821977018e-07,
+ +5.02527189071087e-07,
+ -6.85015691089322e-07,
+ +8.87509051248637e-07,
+ +3.79002594187564e-05,
+ -6.27629439051695e-07,
+ -1.99161873652058e-08,
+ -1.41608270994172e-07,
+ -4.05755037486126e-07,
+ +3.69329444719453e-07,
+ -9.07994920841971e-08,
+ -2.79936709172279e-06,
+ +3.58672610011144e-07,
+ -2.52106625912808e-07,
+ +1.15442521861353e-06,
+ -3.61623640187931e-05,
+ -1.19767162827293e-06,
+ -1.59163707956706e-06,
+ +2.61865912725295e-06,
+ +2.73415453248642e-07,
+ +5.27522758616184e-07,
+ +7.64231639773736e-06,
+ -1.99793154551331e-07,
+ -7.93716774807006e-07,
+ -1.28852634020526e-06,
+ +4.13537055009186e-08,
+ +1.90013731162756e-06,
+ -9.60815728872190e-07,
+ +9.13122297162262e-07,
+ -1.51736977884988e-06,
+ -9.97002457195774e-08,
+ +4.60347237760117e-07,
+ +4.64118591106920e-07,
+ -2.84966190516889e-06,
+ -4.51274982531428e-06,
+ -2.55594510061518e-07,
+ -4.92120428884585e-07,
+ +8.66921100451366e-06,
+ -9.10797920983535e-07,
+ -7.26007980133853e-07,
+ +3.20728742501074e-07,
+ -1.06649756361551e-06,
+ -2.85423654072044e-06,
+ +1.10607147347572e-06,
+ -9.02600972817733e-07,
+ -2.85705760705833e-07,
+ -3.73689650864971e-06,
+ -2.11748167943619e-06,
+ +1.14323291705207e-07,
+ +1.22243185151781e-07,
+ +3.01536216885628e-06,
+ -6.64318956294481e-07,
+ -6.35545662355330e-07,
+ +6.81555939979520e-06,
+ -1.71549758651453e-06,
+ +7.72655629120668e-07,
+ -8.69894115778942e-09,
+ -8.53239472517920e-07,
+ +3.96678520346789e-07,
+ +7.87884988235612e-08,
+ +6.82156299830938e-08,
+ +1.12805320335132e-07,
+ -2.16941049624225e-06,
+ -2.45329935999374e-06,
+ -1.18242964120964e-07,
+ -9.10094224916731e-09,
+ +3.82651383410194e-07,
+ +1.50839794268390e-06,
+ -1.16115767596840e-06,
+ -1.99793170865185e-07,
+ +5.38904243126327e-06,
+ +9.08387430488718e-08,
+ +1.76734036307330e-07,
+ -4.81973772145042e-07,
+ -3.74646649491908e-07,
+ -2.19190989367475e-06,
+ +2.99313476632393e-07,
+ -5.96023226704257e-08,
+ -1.15283607587604e-06,
+ +7.54730391003917e-07,
+ -6.29211986536544e-07,
+ -5.40625700877115e-07,
+ -1.42729977296979e-06,
+ -1.03824751320457e-06,
+ +3.50402418974156e-07,
+ -9.01199627347405e-07,
+ +1.77190909602493e-06,
+ -2.79555909772969e-08,
+ +6.09471850259921e-07,
+ +4.66356281747477e-08,
+ -6.59726483037974e-07,
+ -1.12732613517216e-07,
+ -5.36529892847357e-07,
+ +1.24556363012844e-07,
+ +2.20763108992441e-07,
+ -4.07446909993053e-07,
+ -3.08204641211065e-07,
+ +7.80723577217921e-07,
+ +8.75812293815606e-08,
+ -1.65140168344814e-06,
+ -2.05540289242899e-08,
+ -1.71886288773324e-06,
+ +5.70216702367344e-06,
+ -4.81436199398580e-07,
+ -8.70503497911912e-07,
+ -4.15797640526791e-07,
+ +1.38275001454610e-06,
+ +5.09863342850272e-07,
+ +1.21272078293253e-06,
+ -3.20818027125960e-07,
+ -8.13758873382652e-08,
+ +3.73648434783859e-07,
+ -1.79236890506993e-06,
+ -1.58002474229528e-06,
+ -2.48006098833801e-07,
+ -3.58852732109478e-07,
+ +9.18864026275908e-08,
+ -7.93716774976413e-07,
+ +9.08387430996937e-08,
+ -3.00729426068816e-07,
+ -1.26757236400732e-06,
+ +1.12991800432175e-06,
+ +3.39944551654636e-06,
+ -1.57684091248145e-06,
+ +9.58613823357716e-07,
+ -8.51475747277574e-07,
+ +6.45167083785628e-07,
+ +8.35820069636250e-07,
+ +4.17047233641050e-06,
+ -1.88512807421722e-06,
+ -4.28784594874886e-06,
+ +1.14850796828123e-06,
+ -2.78202322408840e-08,
+ -5.17800473790017e-07,
+ -1.93147106584946e-08,
+ -2.55840080682986e-06,
+ +2.83348176431002e-07,
+ -1.79681605398058e-07,
+ -2.17478744302774e-06,
+ +3.35361500092138e-07,
+ -4.19361935822605e-07,
+ +7.94962713855012e-07,
+ -1.60680908716535e-06,
+ -3.15758129676942e-07,
+ +3.36475595907253e-06,
+ +9.45116031606224e-07,
+ +9.47682075636046e-07,
+ -5.40859819302488e-06,
+ -1.48418927836810e-07,
+ +7.04363226409046e-07,
+ -4.81240475393814e-07,
+ +1.13601598539299e-05,
+ +1.70202203974559e-07,
+ -1.44459915140142e-06,
+ -1.30412252418360e-06,
+ +1.62925935949905e-06,
+ -3.43035941042765e-07,
+ +9.81975704431579e-08,
+ -2.52950123547828e-06,
+ -1.72197411650663e-06,
+ -3.20828958740901e-06,
+ +8.69506465458569e-07,
+ +1.75809147262855e-06,
+ -2.15253988478738e-06,
+ +3.73885463283036e-07,
+ -1.28852634159439e-06,
+ +1.76734050291844e-07,
+ -1.26757236370239e-06,
+ -4.97944826004075e-07,
+ +1.27398049886720e-06,
+ +1.00750934030618e-06,
+ -7.90980814578831e-07,
+ +1.97844348319247e-06,
+ -2.05690562916612e-07,
+ -6.11566606387783e-06,
+ +1.34505665035556e-06,
+ -1.29441153213621e-06,
+ +1.22894993292798e-05,
+ -4.83177642605462e-06,
+ -3.91988383755242e-07,
+ +2.10099265751414e-07,
+ +2.79725418311305e-07,
+ +6.09249245444346e-07,
+ +2.48049895985240e-07,
+ -8.09157258733974e-06,
+ +1.34612685973010e-06,
+ +4.04360014811817e-07,
+ -1.02743429315796e-06,
+ +4.28151831225804e-07,
+ -1.01504075496639e-07,
+ +2.15396305663657e-08,
+ +1.55779135027015e-06,
+ -4.09257880338329e-07,
+ +6.62069415370765e-06,
+ -1.70403044522500e-06,
+ +2.25719649035427e-07,
+ -4.06977312161533e-07,
+ -7.36707456445843e-09,
+ -8.57538632795642e-07,
+ +1.53570009051756e-07,
+ -5.32154730515728e-06,
+ -1.01377874949909e-06,
+ +2.61979183328485e-07,
+ +1.17120306508678e-07,
+ +7.88690204521513e-08,
+ -2.47574266588804e-07,
+ +3.16781666328224e-06,
+ -2.43250561098756e-06,
+ -6.67026769020661e-08,
+ +5.42933503541634e-06,
+ +9.19581761287008e-07,
+ -1.61412499465481e-06,
+ -4.30050248545282e-07,
+ +4.13537001815517e-08,
+ -4.81973805391086e-07,
+ +1.12991800293262e-06,
+ +1.27398050672766e-06,
+ +8.41274715319816e-06,
+ -9.98467174147559e-07,
+ -3.91503096739863e-06,
+ -1.45869558111340e-06,
+ +5.28661500698042e-07,
+ -1.61342572762164e-06,
+ +6.64320077558580e-07,
+ -2.85058631914985e-06,
+ -1.84531690078924e-06,
+ +3.15669077784523e-06,
+ +8.29077189998359e-07,
+ +1.21205073823958e-07,
+ -1.02786661953656e-06,
+ +4.30911713138248e-08,
+ -1.89396527754555e-07,
+ +1.35018301328563e-06,
+ -6.85166979492412e-06,
+ +3.87275777175315e-07,
+ -4.87149225547199e-07,
+ +6.65524885262649e-07,
+ +7.78371941350547e-07,
+ -1.31692528362621e-06,
+ +1.19096287142508e-06,
+ +1.93723238134640e-07,
+ +5.57046856219739e-08,
+ +4.25788760396702e-06,
+ -7.56141634274463e-07,
+ +3.75096227373613e-07,
+ -8.51783828712015e-07,
+ -4.23455520594165e-07,
+ -1.42149787587024e-06,
+ -1.02596443344563e-06,
+ +5.44436496475737e-06,
+ -4.88749686406689e-07,
+ +1.73187083696015e-06,
+ -8.80890370683843e-07,
+ -1.40637529969173e-06,
+ -5.01047874522896e-07,
+ +3.24999872385035e-06,
+ +1.03815977020505e-06,
+ -1.26978961470948e-07,
+ -3.95660503651677e-06,
+ -1.05835790533624e-06,
+ -1.53398070223428e-06,
+ +1.90013731328774e-06,
+ -3.74646631797390e-07,
+ +3.39944551922298e-06,
+ +1.00750934715020e-06,
+ -9.98467171301528e-07,
+ -5.40788343969469e-06,
+ -7.01295463839463e-06,
+ +3.02925178698014e-05,
+ -4.04679362875436e-06,
+ -4.92214842889432e-06,
+ -2.27890119284862e-06,
+ -2.80125040154897e-07,
+ -7.70440143038858e-06,
+ -9.80949839518813e-07,
+ -8.13161924134807e-07,
+ +7.15841391442747e-08,
+ -2.84918952200928e-06,
+ -6.71057467728405e-07,
+ -2.21166634770229e-06,
+ +3.78666940671949e-07,
+ +3.79556320785762e-07,
+ -1.78880123787148e-06,
+ +2.13788940919829e-06,
+ +7.92081850794224e-06,
+ -3.39155594342340e-08,
+ -7.80936548744728e-07,
+ +3.48447396191936e-07,
+ -1.65097498064197e-06,
+ -2.85908114505240e-07,
+ -1.51351010937566e-07,
+ +7.90209516189938e-08,
+ -2.27701700810518e-07,
+ +3.99128224316602e-07,
+ +1.39585403904408e-06,
+ -1.25488383258195e-06,
+ +2.69691699121598e-07,
+ -4.78035062299572e-07,
+ +1.17108617458749e-05,
+ -1.09934801093312e-06,
+ -7.38203103781479e-06,
+ +8.27077948805570e-07,
+ -3.83407514829674e-06,
+ -9.27455256691245e-07,
+ +7.19292799845531e-07,
+ -5.33332693329960e-08,
+ -1.44064089830357e-07,
+ -2.83859298872845e-06,
+ +1.69343551968047e-06,
+ -9.60815727787988e-07,
+ -2.19190990975990e-06,
+ -1.57684091393835e-06,
+ -7.90980817018286e-07,
+ -3.91503099291126e-06,
+ -7.01295464672943e-06,
+ +1.73165237012075e-05,
+ +9.96402859716359e-07,
+ -3.06096459485629e-07,
+ -5.75738821648555e-06,
+ -4.87786295947160e-06,
+ +1.21435078803694e-05,
+ -3.70092403581936e-07,
+ -1.55130392471114e-06,
+ -2.23353423890781e-06,
+ -1.54104805740908e-07,
+ +8.81368314987444e-07,
+ -8.89117609323821e-08,
+ +1.05109830920698e-06,
+ -1.12694195551847e-06,
+ -4.57619969580758e-07,
+ +2.19159509509963e-06,
+ +7.45714828567914e-06,
+ -8.38431932168466e-07,
+ -1.99976691598518e-07,
+ +2.87415985061629e-06,
+ +3.76206669271551e-08,
+ -1.15895490108303e-05,
+ +9.08359290292381e-07,
+ +1.28772055229070e-06,
+ +1.02390754856001e-05,
+ -2.85708745134943e-06,
+ +1.41186695886179e-07,
+ +4.95488660883880e-07,
+ +1.33614917188844e-06,
+ +1.52254199340133e-07,
+ +1.73852010895416e-06,
+ -1.08397622925897e-06,
+ -1.08371441083288e-06,
+ +2.82254249642656e-07,
+ +2.37089442825208e-07,
+ +3.54500423699160e-06,
+ +3.23230452013636e-06,
+ -1.70299033651641e-05,
+ +4.09881412258327e-08,
+ +6.14366543292291e-07,
+ +1.92972031424007e-06,
+ -1.15413670819721e-06,
+ +9.13122294180706e-07,
+ +2.99313474184468e-07,
+ +9.58613820240635e-07,
+ +1.97844348329412e-06,
+ -1.45869559121004e-06,
+ +3.02925181074450e-05,
+ +9.96402855650601e-07,
+ -2.61095426936443e-05,
+ +2.33512276436856e-06,
+ -5.14085336542698e-06,
+ -2.56625819742727e-06,
+ +3.15240531673300e-06,
+ +1.26136127523511e-06,
+ -7.68753714964102e-06,
+ -2.22980284200248e-06,
+ -2.72611395212534e-08,
+ -8.68497536325838e-07,
+ -5.14170421225867e-07,
+ -4.18038842881245e-07,
+ +4.19099985191605e-07,
+ +6.95662416730456e-07,
+ +8.31719749194319e-06,
+ -8.02933391907327e-07,
+ -3.13729379362295e-06,
+ +2.25342858623998e-07,
+ -6.65722255357363e-07,
+ +4.52557802100382e-07,
+ -2.05029217006394e-06,
+ +2.68351451933465e-07,
+ +3.35800386605398e-07,
+ +1.48358124472429e-06,
+ +1.01373435335136e-07,
+ +6.43208276828659e-08,
+ +1.16290018664178e-06,
+ -3.14801990220935e-07,
+ +7.34930322030198e-08,
+ -9.13456240952037e-07,
+ -8.15855407239547e-06,
+ +2.61288524691162e-07,
+ +1.31040301788344e-05,
+ -9.33995928863141e-07,
+ -2.06589359809102e-06,
+ -1.37912968459881e-06,
+ +1.23494857147473e-07,
+ -2.02364296516709e-07,
+ -2.40628647545686e-06,
+ -2.04766446881182e-06,
+ +1.11769759131286e-07,
+ -1.51736977949363e-06,
+ -5.96023079320524e-08,
+ -8.51475747379218e-07,
+ -2.05690564034696e-07,
+ +5.28661506085171e-07,
+ -4.04679363342998e-06,
+ -3.06096460705356e-07,
+ +2.33512277100930e-06,
+ +1.00668546393188e-05,
+ -1.74072351656298e-06,
+ +3.54272203952999e-07,
+ -7.89057548311177e-07,
+ +1.13244280123517e-06,
+ -2.96464965425820e-06,
+ +3.14308301607724e-08,
+ +6.42481842780721e-08,
+ -3.07153919676005e-07,
+ +1.25339725272724e-07,
+ +8.09977454997447e-07,
+ -1.05016313875462e-07,
+ +7.75222659850700e-07,
+ -1.91089980685201e-08,
+ -1.83258517563023e-07,
+ +2.26437393565060e-07,
+ -1.34068036112113e-06,
+ -4.20672323405281e-07,
+ +6.13994356824685e-07,
+ +4.93654582814883e-07,
+ -1.69962595599372e-07,
+ -5.94452158159087e-07,
+ +2.33210947518577e-09,
+ -2.61280305422255e-07,
+ +1.28301466317333e-07,
+ -3.21786846978591e-07,
+ +8.47638712014485e-08,
+ -2.69391770955257e-07,
+ -1.42085519795631e-06,
+ +7.86450629859453e-07,
+ +2.31026332369070e-07,
+ -9.88023219249329e-07,
+ +6.53439492503949e-06,
+ -2.90748965349604e-06,
+ -2.05173636726988e-06,
+ -2.45629103553846e-07,
+ -2.14808943331220e-06,
+ +2.84701256343622e-06,
+ -3.33319881265016e-06,
+ -3.53623910847602e-06,
+ -9.97002454146455e-08,
+ -1.15283610706380e-06,
+ +6.45167085073118e-07,
+ -6.11566605896504e-06,
+ -1.61342572843479e-06,
+ -4.92214843543341e-06,
+ -5.75738821804409e-06,
+ -5.14085335356852e-06,
+ -1.74072351615640e-06,
+ +4.29459979923202e-05,
+ -8.15992815716071e-07,
+ -2.99237429753502e-06,
+ -6.27461458238923e-06,
+ -9.60037976308233e-08,
+ +3.27632676211062e-06,
+ -2.74467703195787e-06,
+ -3.71857091065262e-06,
+ +1.96494607101338e-07,
+ -1.65886808096258e-06,
+ -1.50233764908666e-08,
+ -1.34880532418344e-06,
+ -7.80347130263529e-07,
+ +2.87083615045517e-06,
+ -6.17956676735238e-07,
+ -4.33830986678327e-07,
+ +7.28717331662247e-06,
+ -3.66823384225645e-07,
+ -2.56412173591446e-06,
+ +7.49375223942713e-07,
+ -1.31181422282066e-07,
+ -4.30488429863379e-07,
+ +1.15340868732622e-06,
+ -2.17534686583611e-06,
+ -6.90392864134259e-08,
+ -2.51827930195994e-06,
+ +3.09780246393550e-06,
+ -5.29874968399201e-07,
+ -3.89129478327852e-06,
+ +3.55728563051122e-06,
+ -1.92900412660570e-06,
+ -2.89592602099143e-06,
+ +3.25348266380708e-06,
+ +2.37239873267779e-07,
+ -1.68510309211841e-06,
+ +4.85950763258317e-06,
+ -3.43700760940032e-08,
+ +1.40540016148154e-07,
+ +7.38058668571702e-05,
+ +4.60347225224029e-07,
+ +7.54730445527428e-07,
+ +8.35820067976065e-07,
+ +1.34505664381646e-06,
+ +6.64320048047952e-07,
+ -2.27890120931495e-06,
+ -4.87786291949164e-06,
+ -2.56625819214178e-06,
+ +3.54272187825491e-07,
+ -8.15992825982110e-07,
+ -6.28336488906151e-05,
+ -2.09063281694322e-06,
+ -2.41284661975578e-06,
+ -4.84810017821566e-07,
+ -1.42717239935697e-07,
+ -8.12237674146198e-06,
+ -2.10955583581610e-06,
+ -2.85262320035229e-07,
+ -3.39620059370193e-07,
+ +1.56227981286129e-06,
+ +6.06762951616327e-07,
+ +4.22931507218884e-07,
+ +3.93368477168924e-09,
+ +4.41072803111689e-07,
+ +6.16252799353169e-07,
+ -7.41155855899000e-07,
+ +8.75892584881404e-06,
+ -3.61694349804479e-07,
+ +2.67168622837023e-07,
+ -5.76945628062388e-07,
+ -1.70892667003811e-06,
+ -3.56365426719139e-05,
+ -2.46609490356267e-06,
+ +2.48126472039494e-06,
+ -1.70921043011790e-06,
+ -2.47742120495711e-06,
+ +2.52268434244027e-07,
+ -1.14605818890854e-06,
+ +3.18655912769729e-06,
+ -1.41223753269301e-06,
+ -2.05144690612684e-06,
+ +2.32819690582737e-07,
+ +4.30391907939201e-05,
+ -3.24565253637116e-07,
+ -7.99263429149700e-07,
+ +5.39664424505096e-07,
+ +4.17693102894244e-05,
+ +9.10197865377880e-07,
+ +4.64118599983825e-07,
+ -6.29211957263085e-07,
+ +4.17047226017753e-06,
+ -1.29441152888361e-06,
+ -2.85058632751853e-06,
+ -2.80125046626229e-07,
+ +1.21435081009368e-05,
+ +3.15240532164579e-06,
+ -7.89057544076013e-07,
+ -2.99237431122308e-06,
+ -2.09063280101900e-06,
+ -5.18609975264726e-05,
+ +1.43573524962848e-06,
+ -1.25835064409253e-06,
+ +2.66742072905986e-05,
+ -4.60658159274855e-07,
+ +1.75011237117239e-06,
+ -2.84862643320843e-07,
+ +1.62305946855323e-06,
+ -2.99490068568454e-07,
+ +2.02034141827178e-07,
+ -1.45786593517758e-06,
+ -1.78333023319327e-05,
+ -2.05939139027818e-06,
+ +3.60858538349710e-07,
+ -2.12645527617285e-06,
+ -2.61901550078010e-07,
+ -2.67885990056253e-06,
+ -5.12357338996214e-06,
+ +1.97608883492018e-06,
+ -1.20312260426623e-04,
+ -1.31823726898407e-06,
+ -8.08332930535364e-07,
+ -1.87995292300254e-06,
+ -5.30363879255310e-06,
+ -4.62595519412656e-08,
+ +6.07550388951716e-07,
+ -4.49143803068651e-08,
+ -1.03128629026872e-05,
+ -2.83319361115526e-07,
+ -1.30398359278927e-07,
+ -2.09962513670367e-06,
+ -7.36445507508910e-07,
+ +1.41872356172136e-04,
+ -1.25199160121246e-07,
+ +9.21540140339872e-07,
+ -3.31669319013171e-06,
+ +9.19226512179153e-07,
+ -2.84966190659190e-06,
+ -5.40625669579248e-07,
+ -1.88512807804581e-06,
+ +1.22894993156595e-05,
+ -1.84531685640471e-06,
+ -7.70440143018530e-06,
+ -3.70092408054270e-07,
+ +1.26136129373431e-06,
+ +1.13244279967663e-06,
+ -6.27461457937379e-06,
+ -2.41284657510020e-06,
+ +1.43573524454628e-06,
+ +1.15322763729990e-05,
+ -1.37116084482822e-06,
+ -1.19864671228149e-06,
+ +5.41024809096868e-07,
+ +4.46675228543084e-08,
+ +8.15690536423437e-07,
+ +1.67296579387866e-06,
+ +6.58962003686976e-06,
+ +1.01393639434150e-07,
+ -2.25939747427572e-07,
+ +2.21085806556369e-07,
+ +3.05530762399287e-07,
+ -1.49200294385422e-07,
+ +7.90079071976801e-07,
+ +3.10501502892459e-07,
+ +3.85931646496973e-06,
+ -1.39981980427724e-05,
+ +3.20108849214388e-07,
+ +5.14572421622466e-06,
+ -1.62981439565932e-06,
+ +2.96557822922075e-08,
+ -1.59920091851297e-06,
+ +6.41233507468037e-07,
+ +5.39202489207208e-06,
+ -1.40968775111979e-07,
+ -4.74709817150352e-08,
+ +2.47610811588144e-07,
+ -2.13562919383408e-07,
+ -2.02995515382676e-06,
+ +4.81317330853744e-06,
+ -7.94809432096252e-07,
+ -5.18262572016387e-06,
+ -5.25054796691950e-06,
+ +6.19533745424637e-07,
+ -8.64300020740399e-06,
+ -3.30200562444370e-06,
+ -4.51274982941392e-06,
+ -1.42729975478399e-06,
+ -4.28784596817979e-06,
+ -4.83177644765396e-06,
+ +3.15669072478708e-06,
+ -9.80949584189201e-07,
+ -1.55130390656769e-06,
+ -7.68753726265216e-06,
+ -2.96464965874747e-06,
+ -9.60037789622171e-08,
+ -4.84810014196265e-07,
+ -1.25835054239776e-06,
+ -1.37116081069279e-06,
+ +4.02427526654555e-05,
+ -5.41881661341326e-06,
+ -6.56128427609274e-07,
+ +2.52454612668593e-06,
+ +1.10260748031481e-07,
+ +2.08689209198591e-06,
+ -1.68645244240351e-06,
+ +3.49803472149876e-06,
+ -7.58897005499293e-07,
+ +2.08376633874894e-06,
+ +1.09870497336901e-06,
+ -5.72455263937342e-07,
+ -1.72005087581938e-07,
+ -9.64593103581398e-07,
+ +1.13646190951676e-05,
+ +2.50677002616842e-06,
+ -1.50442461777317e-05,
+ +2.87224551341210e-05,
+ +2.58792717002632e-06,
+ +6.05570447709562e-07,
+ -3.01146970722995e-07,
+ +6.76153179907308e-07,
+ +1.09709756277359e-06,
+ -5.50632861789219e-07,
+ +9.82869454295193e-07,
+ +1.26971892408409e-06,
+ -3.61110170354195e-06,
+ +2.57336075022610e-06,
+ -1.23473675403031e-07,
+ -1.82628905122145e-06,
+ -2.45268064455380e-05,
+ +4.69700117965459e-07,
+ -8.04540203237097e-06,
+ -1.97186954528557e-05,
+ +1.48165526870083e-06,
+ -2.55594563797288e-07,
+ -1.03824744760186e-06,
+ +1.14850792470986e-06,
+ -3.91988418957932e-07,
+ +8.29077319696044e-07,
+ -8.13161968858146e-07,
+ -2.23353411659625e-06,
+ -2.22980283847882e-06,
+ +3.14307767976967e-08,
+ +3.27632686355128e-06,
+ -1.42717045592457e-07,
+ +2.66742072151788e-05,
+ -1.19864686291783e-06,
+ -5.41881665251231e-06,
+ -1.03654002603913e-06,
+ -2.72246879870016e-06,
+ -5.70908399155352e-07,
+ +1.06909920195183e-06,
+ -9.79054938842963e-07,
+ -3.45479813028106e-06,
+ -1.70408839163831e-06,
+ -1.38978786195598e-06,
+ +2.89807408780429e-06,
+ +2.55604144298963e-07,
+ -9.33174274683171e-07,
+ -9.77654345880093e-06,
+ -2.56142604426788e-07,
+ +1.44153166709910e-05,
+ -7.02991006471540e-07,
+ +4.88840378612293e-06,
+ +7.30120269068849e-05,
+ -6.34968493224157e-07,
+ +1.20584208600000e-06,
+ +2.67423863225581e-06,
+ -1.83086648526822e-06,
+ +7.36485264795999e-07,
+ +2.15349661321080e-07,
+ -7.55458777234100e-07,
+ +2.92204709684767e-06,
+ -1.38834950944678e-06,
+ -1.13982428401641e-06,
+ -1.29060915098999e-06,
+ -7.94905190797243e-07,
+ -7.92763189570889e-05,
+ +3.24970708376587e-06,
+ +3.09560411654386e-06,
+ +1.33972843845771e-06,
+ +9.98685388486437e-06,
+ -4.92120428677062e-07,
+ +3.50402417856072e-07,
+ -2.78202321815917e-08,
+ +2.10099265120374e-07,
+ +1.21205094165454e-07,
+ +7.15841413380901e-08,
+ -1.54104806443945e-07,
+ -2.72611373443787e-08,
+ +6.42481835962105e-08,
+ -2.74467703573141e-06,
+ -8.12237598645069e-06,
+ -4.60658162074299e-07,
+ +5.41024806471066e-07,
+ -6.56128443338676e-07,
+ -2.72246878564738e-06,
+ +7.64432690002143e-06,
+ +9.70177161941895e-08,
+ -3.82576701099687e-07,
+ -3.22016017163481e-08,
+ -3.90742347400092e-08,
+ -3.02926455231553e-07,
+ -1.29664388214934e-06,
+ -1.16177050306907e-06,
+ -1.10632576306641e-06,
+ -5.28963812584504e-08,
+ -3.71301441822553e-06,
+ +5.63994939499811e-06,
+ -2.50886664010042e-07,
+ -3.10019894955368e-07,
+ -2.01048872582854e-06,
+ -4.96345715069463e-07,
+ -4.80928053925181e-06,
+ +4.14630003338953e-07,
+ +8.70725299045687e-09,
+ +2.94881146613974e-08,
+ -1.71361528279409e-07,
+ -2.28216641853069e-07,
+ -8.70917430885367e-07,
+ +1.50290567615735e-07,
+ -3.44848027290421e-07,
+ -8.73348867386887e-08,
+ +1.29172158131472e-06,
+ +4.24674812256700e-06,
+ +4.16950812992282e-07,
+ -6.04129251778520e-07,
+ +1.05389756621678e-06,
+ -1.37650787839107e-06,
+ -4.73288505622679e-07,
+ +8.66921099729694e-06,
+ -9.01199626703660e-07,
+ -5.17800467928549e-07,
+ +2.79725419598796e-07,
+ -1.02786662505922e-06,
+ -2.84918952024745e-06,
+ +8.81368307601316e-07,
+ -8.68497538765293e-07,
+ -3.07153919032260e-07,
+ -3.71857090645134e-06,
+ -2.10955583656149e-06,
+ +1.75011250408880e-06,
+ +4.46675115379483e-08,
+ +2.52454608435122e-06,
+ -5.70908489415183e-07,
+ +9.70177163297148e-08,
+ -1.97037126916625e-06,
+ -4.28077719976440e-07,
+ -2.08450290594520e-06,
+ +3.21026728339552e-06,
+ -8.86157286669764e-07,
+ +1.96680987987107e-06,
+ -4.18324708696382e-06,
+ +4.24064501995848e-07,
+ +1.10658737188573e-06,
+ +5.44710667982904e-06,
+ -3.66148191689961e-07,
+ +7.96305555809994e-06,
+ -3.18975100171526e-06,
+ -6.53574503983959e-06,
+ +5.30716958354539e-06,
+ +4.03740673265453e-07,
+ -5.68137304854334e-06,
+ +1.50077111254045e-06,
+ +1.53038599723593e-06,
+ -1.19161886101234e-07,
+ +5.41944276659530e-07,
+ -4.04384484848275e-07,
+ +2.23679742398586e-08,
+ -5.24745434008368e-08,
+ -1.04611488677982e-06,
+ +4.29699348152013e-06,
+ +2.01258076312294e-06,
+ -8.03667041668854e-06,
+ +9.74833123159593e-08,
+ -3.73257908371585e-07,
+ -1.24848493207526e-06,
+ +3.50827024782975e-07,
+ -9.10797907363245e-07,
+ +1.77190907118993e-06,
+ -1.93147104213254e-08,
+ +6.09249256879290e-07,
+ +4.30911550677328e-08,
+ -6.71057453040854e-07,
+ -8.89117748067818e-08,
+ -5.14170419294632e-07,
+ +1.25339736555203e-07,
+ +1.96494581741172e-07,
+ -2.85262315207141e-07,
+ -2.84862616419077e-07,
+ +8.15690558378531e-07,
+ +1.10260743779375e-07,
+ +1.06909921589399e-06,
+ -3.82576702222006e-07,
+ -4.28077719823975e-07,
+ +1.13347653741336e-05,
+ -1.56095413474149e-07,
+ +2.52463796904538e-07,
+ -2.49831547695356e-07,
+ -1.60296646290782e-07,
+ -2.01235498738871e-06,
+ -1.48742714295474e-07,
+ -1.85336843799584e-08,
+ -5.32706960757379e-06,
+ +3.54750870263248e-07,
+ -1.51580456811352e-06,
+ -7.79728028088675e-08,
+ -2.53372286339139e-06,
+ +1.31047359681610e-06,
+ +8.41458806387536e-09,
+ +1.51974554660122e-06,
+ -1.34210916380819e-06,
+ +3.67911452510612e-08,
+ -9.25900911320841e-07,
+ +7.07093538316597e-08,
+ -1.64180172715621e-06,
+ +2.06799337176084e-07,
+ -7.86316853812572e-07,
+ -3.78182125270962e-07,
+ +1.52472649726765e-06,
+ -1.73040677517141e-07,
+ +5.22422660458329e-07,
+ -2.10010389686940e-06,
+ +2.14737263060486e-06,
+ +6.69004013055801e-07,
+ -3.10928252884716e-08,
+ -7.26007981353581e-07,
+ -2.79555905368398e-08,
+ -2.55840077913189e-06,
+ +2.48049897577662e-07,
+ -1.89396529939900e-07,
+ -2.21166634702466e-06,
+ +1.05109830819054e-06,
+ -4.18038844575311e-07,
+ +8.09977455759777e-07,
+ -1.65886808189432e-06,
+ -3.39620059844532e-07,
+ +1.62305946941721e-06,
+ +1.67296579469181e-06,
+ +2.08689210059177e-06,
+ -9.79054923748836e-07,
+ -3.22016014876492e-08,
+ -2.08450291241653e-06,
+ -1.56095413982369e-07,
+ +1.51397438025679e-05,
+ -3.27993359435072e-06,
+ -4.28393159916437e-07,
+ +4.96199402502976e-08,
+ -1.35436833594360e-06,
+ -3.65358605830090e-08,
+ -2.08559267156561e-06,
+ +5.91993252717902e-07,
+ -4.29928488101215e-07,
+ +3.31772706685808e-06,
+ -4.51216063451554e-06,
+ -3.72031646564712e-06,
+ -3.02682631358041e-06,
+ +3.46862988703203e-08,
+ +1.92665131490413e-06,
+ +5.33353611655626e-08,
+ +4.96889929318256e-06,
+ -1.25417513149910e-06,
+ +1.70064438131446e-07,
+ +6.43715897473667e-07,
+ -1.41578773922205e-06,
+ +1.22380077703925e-07,
+ -1.89040649555331e-07,
+ +2.50997962133537e-06,
+ +4.30445551909977e-07,
+ -1.83385954653343e-06,
+ -1.68960378525121e-06,
+ -1.45086468894226e-06,
+ -3.89074635987060e-07,
+ +2.00172792922580e-07,
+ +3.20728741145821e-07,
+ +6.09471866802475e-07,
+ +2.83348175956664e-07,
+ -8.09157229802717e-06,
+ +1.35018300664489e-06,
+ +3.78666940671949e-07,
+ -1.12694196368387e-06,
+ +4.19099981566304e-07,
+ -1.05016313367242e-07,
+ -1.50233756777149e-08,
+ +1.56227982021353e-06,
+ -2.99490074328278e-07,
+ +6.58961974911573e-06,
+ -1.68645241365521e-06,
+ -3.45479809155471e-06,
+ -3.90742346849520e-08,
+ +3.21026728393762e-06,
+ +2.52463783335070e-07,
+ -3.27993359531634e-06,
+ +1.29876788916412e-05,
+ +2.12870800444343e-07,
+ -1.53071126869582e-06,
+ -2.45673053950486e-06,
+ -3.13929113976587e-07,
+ -1.59158676364159e-07,
+ -3.12187240536260e-06,
+ -4.59662090982175e-07,
+ -5.51606104861452e-06,
+ +4.77236551248454e-06,
+ -1.10371520676064e-06,
+ +9.93590250841512e-07,
+ -1.63713486804650e-07,
+ -1.13025133143766e-07,
+ -9.12418465028085e-07,
+ -1.28314303491283e-06,
+ +7.93048861138840e-07,
+ -6.91401465160307e-07,
+ +3.81429495938958e-08,
+ +2.67491888184077e-07,
+ +1.39042035467141e-08,
+ -2.02414293856148e-07,
+ +1.67831678730360e-08,
+ -1.40517746527111e-06,
+ -8.55419255505399e-07,
+ +3.14717981859144e-06,
+ +3.56571459506202e-07,
+ +2.13482703977558e-06,
+ +1.18017861007122e-07,
+ -1.06649756788455e-06,
+ +4.66356100821240e-08,
+ -1.79681609048770e-07,
+ +1.34612687614560e-06,
+ -6.85166991787095e-06,
+ +3.79556326960632e-07,
+ -4.57619937088575e-07,
+ +6.95662435128011e-07,
+ +7.75222656318572e-07,
+ -1.34880533676188e-06,
+ +6.06763013178682e-07,
+ +2.02034150619380e-07,
+ +1.01393580624652e-07,
+ +3.49803481894143e-06,
+ -1.70408848673470e-06,
+ -3.02926485627331e-07,
+ -8.86157277725096e-07,
+ -2.49831556640024e-07,
+ -4.28393156926411e-07,
+ +2.12870817435824e-07,
+ +8.90150170091328e-06,
+ -2.06373772146498e-06,
+ -1.18617997451906e-06,
+ -1.40427656204485e-06,
+ -1.63632013246552e-07,
+ -4.52918584581408e-06,
+ +2.02585357161575e-06,
+ -2.02770500267087e-06,
+ -1.92591197709698e-06,
+ +5.73179996992350e-06,
+ +4.75995062393816e-07,
+ -2.27445529349792e-07,
+ +5.61437183314260e-07,
+ +7.85667419387877e-08,
+ +1.58452172755830e-07,
+ -6.97605646582118e-07,
+ -3.35204258352707e-07,
+ +1.49431482060491e-06,
+ +2.87453109546702e-07,
+ +8.65109063013137e-07,
+ -9.11125319681965e-07,
+ -1.30567979320681e-06,
+ +4.84954776856783e-06,
+ -2.56640941874450e-07,
+ -8.33670480061442e-08,
+ -4.95380738493306e-06,
+ -1.03124094140205e-06,
+ +2.75916932564754e-08,
+ -2.85423654061879e-06,
+ -6.59726482140120e-07,
+ -2.17478744319715e-06,
+ +4.04360017081866e-07,
+ +3.87275785747288e-07,
+ -1.78880048462202e-06,
+ +2.19159509679369e-06,
+ +8.31719663450868e-06,
+ -1.91089979329948e-08,
+ -7.80347130466816e-07,
+ +4.22931507117240e-07,
+ -1.45786593876900e-06,
+ -2.25939748274605e-07,
+ -7.58897027352743e-07,
+ -1.38978785348565e-06,
+ -1.29664388217051e-06,
+ +1.96680988044705e-06,
+ -1.60296646849824e-07,
+ +4.96199398945438e-08,
+ -1.53071126835700e-06,
+ -2.06373770770070e-06,
+ +1.42072706298284e-05,
+ -3.08168849891477e-06,
+ +3.98735823706713e-06,
+ -1.54028535155533e-06,
+ -3.41454150639352e-06,
+ -2.77463003262250e-06,
+ -1.37991926763806e-06,
+ -1.71302580688406e-06,
+ +1.34209135673618e-07,
+ -2.79623067260883e-07,
+ -8.17201939730633e-07,
+ -4.07557766476111e-07,
+ -1.62945463959079e-06,
+ +6.08899581553161e-07,
+ +4.19793537835577e-08,
+ +1.49234305387444e-06,
+ +3.30322323398351e-06,
+ +4.95708017107875e-07,
+ -2.14578711976677e-06,
+ +6.28212048082565e-09,
+ -1.62452932142153e-06,
+ +1.70938588804626e-06,
+ -4.86910646274691e-07,
+ +5.33434990515440e-10,
+ -2.67290183322022e-07,
+ +4.39708978039797e-06,
+ -1.66372192308669e-07,
+ +1.10607148714684e-06,
+ -1.12732644560974e-07,
+ +3.35361489995506e-07,
+ -1.02743429974787e-06,
+ -4.87149241810232e-07,
+ +2.13788941898999e-06,
+ +7.45714854510839e-06,
+ -8.02933395430984e-07,
+ -1.83258511616852e-07,
+ +2.87083612694153e-06,
+ +3.93365170352298e-09,
+ -1.78333024817898e-05,
+ +2.21085846909019e-07,
+ +2.08376641952200e-06,
+ +2.89807420479648e-06,
+ -1.16177050374670e-06,
+ -4.18324710219347e-06,
+ -2.01235496936385e-06,
+ -1.35436834048370e-06,
+ -2.45673053088206e-06,
+ -1.18617997020767e-06,
+ -3.08168849671249e-06,
+ +7.80553222488588e-06,
+ -1.66726367868331e-06,
+ -1.40156213593529e-06,
+ -7.23125526357049e-06,
+ -2.20382150993411e-06,
+ +2.15470895057021e-05,
+ -2.18153043311873e-06,
+ -2.12892300044675e-06,
+ -1.69924383120405e-05,
+ +1.65688567399058e-07,
+ -2.40098852038057e-07,
+ +2.57726759042304e-07,
+ -2.18404617172629e-07,
+ +3.12384883179072e-07,
+ +3.06994964117870e-07,
+ +4.77820269651928e-07,
+ -9.15598820665133e-06,
+ -7.57010038286493e-08,
+ +1.27178647374069e-06,
+ -4.58914808416791e-06,
+ -1.43991465479569e-08,
+ +2.92158063049103e-05,
+ +1.57098600824641e-07,
+ -8.79128599946613e-07,
+ -2.48751313299634e-06,
+ -8.71615405820595e-08,
+ -9.02600972614446e-07,
+ -5.36529892694891e-07,
+ -4.19361935653198e-07,
+ +4.28151831310508e-07,
+ +6.65524867068381e-07,
+ +7.92081774850944e-06,
+ -8.38431935031437e-07,
+ -3.13729293039474e-06,
+ +2.26437394293508e-07,
+ -6.17956675718798e-07,
+ +4.41072789728568e-07,
+ -2.05939138961749e-06,
+ +3.05530765889062e-07,
+ +1.09870500850394e-06,
+ +2.55604128493328e-07,
+ -1.10632576288430e-06,
+ +4.24064502317720e-07,
+ -1.48742714363236e-07,
+ -3.65358605321870e-08,
+ -3.13929113824121e-07,
+ -1.40427658928543e-06,
+ +3.98735823970987e-06,
+ -1.66726367937788e-06,
+ +7.81044613674007e-06,
+ -7.70906117330346e-07,
+ -3.58454973026863e-06,
+ -2.07405380698624e-06,
+ +8.49053598560084e-08,
+ -1.85429878588305e-06,
+ +3.98503793880994e-07,
+ -1.20318157311402e-06,
+ -3.23303555253135e-07,
+ -5.14489840806171e-08,
+ -7.86713621561707e-07,
+ +1.12287948172496e-07,
+ +1.32396994910922e-08,
+ +8.57538996308302e-07,
+ -2.25968046107161e-06,
+ -7.31383501329018e-08,
+ +3.48453910846847e-06,
+ -6.79166345016712e-08,
+ -7.17397592069416e-07,
+ +7.51494568071616e-07,
+ -6.58813465512621e-08,
+ -1.83389229095523e-08,
+ +3.47900720716967e-07,
+ +1.56775770107133e-07,
+ +6.01480028266245e-08,
+ -2.85705759791037e-07,
+ +1.24556364757732e-07,
+ +7.94962714634282e-07,
+ -1.01504076174266e-07,
+ +7.78371954394854e-07,
+ -3.39155592987087e-08,
+ -1.99976693733041e-07,
+ +2.25342862520349e-07,
+ -1.34068036545794e-06,
+ -4.33830989016138e-07,
+ +6.16252807721855e-07,
+ +3.60858533131987e-07,
+ -1.49200294724235e-07,
+ -5.72455271560638e-07,
+ -9.33174256929361e-07,
+ -5.28963809365778e-08,
+ +1.10658737178409e-06,
+ -1.85336854302792e-08,
+ -2.08559267170114e-06,
+ -1.59158676601328e-07,
+ -1.63631992841529e-07,
+ -1.54028535148757e-06,
+ -1.40156213707031e-06,
+ -7.70906117482812e-07,
+ +1.26678407589510e-05,
+ -2.86059910209067e-06,
+ -5.24727272601304e-07,
+ -3.65064381346446e-06,
+ +9.44568065121753e-07,
+ -5.57284737460865e-07,
+ -1.52293587966085e-06,
+ -8.47517032682293e-08,
+ -1.10663399088509e-06,
+ -3.76958544809564e-07,
+ -1.59331466985759e-07,
+ -2.12732228649381e-07,
+ -9.24229928598805e-07,
+ +1.03402960225838e-08,
+ +1.34511329382043e-06,
+ -7.14459002078499e-08,
+ -2.52441026490512e-07,
+ +9.95939462356595e-07,
+ -1.05848170912871e-06,
+ +1.50303640147954e-06,
+ +1.26919796876880e-06,
+ +6.46314956865716e-07,
+ +2.71925636806500e-06,
+ -2.34539311544274e-06,
+ -3.73689650621026e-06,
+ +2.20763076449435e-07,
+ -1.60680908696206e-06,
+ +2.15396401208974e-08,
+ -1.31692530249810e-06,
+ -7.80936547525000e-07,
+ +2.87415984492423e-06,
+ -6.65722251698181e-07,
+ -4.20672314596139e-07,
+ +7.28717331008338e-06,
+ -7.41155860100283e-07,
+ -2.12645525770754e-06,
+ +7.90079094812809e-07,
+ -1.72005091952628e-07,
+ -9.77654355631137e-06,
+ -3.71301441485858e-06,
+ +5.44710667373040e-06,
+ -5.32706958251855e-06,
+ +5.91993254920188e-07,
+ -3.12187240783594e-06,
+ -4.52918584693216e-06,
+ -3.41454150388631e-06,
+ -7.23125525420231e-06,
+ -3.58454973833238e-06,
+ -2.86059909179075e-06,
+ +4.39511567679169e-05,
+ -1.26998730868418e-06,
+ -8.37612014253572e-07,
+ -3.81140833639464e-06,
+ -5.12617639850978e-07,
+ +2.55670112186278e-07,
+ +1.10284419807148e-06,
+ +4.33001191647761e-06,
+ +1.44854210549182e-06,
+ +2.42028161387734e-06,
+ -2.14221100304198e-08,
+ -1.34822253794481e-06,
+ -1.64984636253507e-06,
+ -4.61029479725811e-06,
+ -7.22072780155747e-07,
+ +9.61870923351076e-07,
+ -2.99694188126624e-06,
+ +2.98187751888394e-07,
+ +2.39477390400413e-06,
+ -1.48734791782866e-06,
+ -3.76034138434956e-07,
+ -3.56729115952965e-07,
+ -8.48006705449621e-06,
+ -2.11748167031788e-06,
+ -4.07446911242427e-07,
+ -3.15758133175189e-07,
+ +1.55779136281047e-06,
+ +1.19096283138584e-06,
+ +3.48447408283332e-07,
+ +3.76206723651067e-08,
+ +4.52557801956387e-07,
+ +6.13994359946002e-07,
+ -3.66823408196677e-07,
+ +8.75892501459247e-06,
+ -2.61901539003054e-07,
+ +3.10501489496633e-07,
+ -9.64593108790650e-07,
+ -2.56142530485047e-07,
+ +5.63994939164386e-06,
+ -3.66148183821025e-07,
+ +3.54750872554472e-07,
+ -4.29928487999571e-07,
+ -4.59662070234103e-07,
+ +2.02585333952873e-06,
+ -2.77463003893713e-06,
+ -2.20382153728904e-06,
+ -2.07405379281961e-06,
+ -5.24727285311033e-07,
+ -1.26998732914003e-06,
+ +9.44182187454569e-06,
+ -2.62096284501807e-06,
+ -2.82242109891917e-06,
+ -1.65989027796858e-06,
+ +8.75857955760900e-07,
+ +4.36935081631816e-06,
+ +2.03065590631855e-06,
+ -1.74721668367130e-06,
+ +3.97138334045678e-07,
+ -1.42700870752941e-06,
+ +8.22926056555969e-07,
+ +1.74513424293832e-06,
+ -5.30307710992859e-08,
+ +7.97081318422508e-07,
+ -1.05446687775364e-06,
+ +1.38723953114692e-07,
+ -8.15587452310468e-06,
+ +1.18090257362051e-07,
+ -4.23085867758805e-07,
+ +1.56572459085957e-06,
+ +2.68196483610135e-06,
+ -4.59402820723409e-07,
+ +1.14323301361383e-07,
+ -3.08204625617189e-07,
+ +3.36475597404807e-06,
+ -4.09257873121608e-07,
+ +1.93723213502922e-07,
+ -1.65097497474662e-06,
+ -1.15895492488127e-05,
+ -2.05029217866979e-06,
+ +4.93654587456623e-07,
+ -2.56412173804899e-06,
+ -3.61694345399908e-07,
+ -2.67885984669123e-06,
+ +3.85931654276123e-06,
+ +1.13646191961848e-05,
+ +1.44153164778675e-05,
+ -2.50886665644815e-07,
+ +7.96305553993956e-06,
+ -1.51580458505418e-06,
+ +3.31772710411059e-06,
+ -5.51606104465041e-06,
+ -2.02770504699610e-06,
+ -1.37991926892555e-06,
+ +2.15470894673315e-05,
+ +8.49053607369227e-08,
+ -3.65064381515853e-06,
+ -8.37612026179796e-07,
+ -2.62096286783714e-06,
+ -1.71726712772026e-06,
+ -1.78384598725531e-05,
+ -9.97277162921538e-06,
+ -1.08526583028424e-05,
+ +4.58363492594641e-07,
+ -3.48115387244793e-06,
+ +5.92891732752307e-07,
+ -6.35732067185026e-06,
+ -6.37263401668750e-07,
+ -1.36095632003862e-07,
+ +3.05819416692625e-08,
+ +1.15239572125140e-05,
+ +1.40626547339979e-07,
+ +1.12189590537254e-06,
+ +3.59929673264818e-06,
+ +4.01049306814733e-07,
+ +9.07213236659392e-06,
+ -4.37745086760786e-07,
+ -5.03855827023851e-06,
+ +5.85060882414421e-06,
+ +5.02527195457716e-07,
+ +1.22243180510041e-07,
+ +7.80723594065406e-07,
+ +9.45116043566329e-07,
+ +6.62069386283653e-06,
+ +5.57046637685238e-08,
+ -2.85908112370717e-07,
+ +9.08359275316839e-07,
+ +2.68351443090441e-07,
+ -1.69962595565490e-07,
+ +7.49375224349288e-07,
+ +2.67168629748812e-07,
+ -5.12357353131499e-06,
+ -1.39981977760926e-05,
+ +2.50677007866752e-06,
+ -7.02990935185248e-07,
+ -3.10019895823577e-07,
+ -3.18975103180187e-06,
+ -7.79728161242254e-08,
+ -4.51216063216078e-06,
+ +4.77236551278947e-06,
+ -1.92591201701764e-06,
+ -1.71302581003502e-06,
+ -2.18153038009447e-06,
+ -1.85429878444310e-06,
+ +9.44568064816821e-07,
+ -3.81140834293373e-06,
+ -2.82242111876092e-06,
+ -1.78384596737714e-05,
+ +3.80380053408740e-05,
+ -2.81498748056546e-06,
+ -2.09286230270426e-05,
+ -5.74532541564054e-07,
+ -1.07555820701860e-07,
+ -2.03522760330952e-06,
+ -4.84269379086885e-07,
+ +3.14082548136902e-06,
+ -5.49628425454765e-08,
+ +7.18532831432868e-10,
+ -7.97213361778792e-07,
+ -1.71140034584713e-08,
+ +1.35804605377914e-06,
+ -1.39830558698768e-06,
+ -3.61359338583783e-07,
+ +2.44913155658768e-05,
+ -1.10019067661238e-06,
+ -1.13155145218481e-06,
+ +6.65899104825322e-06,
+ -6.85015678485472e-07,
+ +3.01536219045562e-06,
+ +8.75812012007745e-08,
+ +9.47682105349962e-07,
+ -1.70403046023443e-06,
+ +4.25788752614163e-06,
+ -1.51351009700898e-07,
+ +1.28772044256605e-06,
+ +3.35800375814198e-07,
+ -5.94452133764539e-07,
+ -1.31181433141029e-07,
+ -5.76945685237112e-07,
+ +1.97608874565985e-06,
+ +3.20108997631501e-07,
+ -1.50442462326533e-05,
+ +4.88840379667696e-06,
+ -2.01048867064011e-06,
+ -6.53574509745477e-06,
+ -2.53372280762274e-06,
+ -3.72031646512196e-06,
+ -1.10371522986770e-06,
+ +5.73179986755110e-06,
+ +1.34209161186251e-07,
+ -2.12892290217399e-06,
+ +3.98503738993259e-07,
+ -5.57284698497350e-07,
+ -5.12617681863812e-07,
+ -1.65989023438873e-06,
+ -9.97277163790593e-06,
+ -2.81498753170931e-06,
+ +2.23975463931819e-05,
+ -2.69281510578221e-05,
+ +1.09210915830193e-06,
+ -8.00126481641450e-07,
+ +2.21531897149220e-06,
+ +7.07113954521131e-08,
+ +4.28668007767554e-07,
+ -7.59537784636400e-07,
+ -1.61902940903349e-07,
+ -1.11577529611766e-06,
+ +1.80469425380581e-07,
+ +6.08979511376772e-07,
+ -4.51553088133940e-07,
+ -2.73620129869888e-06,
+ +2.30771728007981e-05,
+ +2.27122274709234e-08,
+ +5.25710645038163e-06,
+ +8.41467170320862e-05,
+ +8.87509044239439e-07,
+ -6.64318972316109e-07,
+ -1.65140167190520e-06,
+ -5.40859782223409e-06,
+ +2.25719655165828e-07,
+ -7.56141677422321e-07,
+ +7.90209212761560e-08,
+ +1.02390749446912e-05,
+ +1.48358122763540e-06,
+ +2.33211951676136e-09,
+ -4.30488499326433e-07,
+ -1.70892662061585e-06,
+ -1.20312258093045e-04,
+ +5.14572412999882e-06,
+ +2.87224542823489e-05,
+ +7.30120259477535e-05,
+ -4.96345700839310e-07,
+ +5.30716908659116e-06,
+ +1.31047357784892e-06,
+ -3.02682613824459e-06,
+ +9.93590280377551e-07,
+ +4.75995061491725e-07,
+ -2.79623041436966e-07,
+ -1.69924370335184e-05,
+ -1.20318157230086e-06,
+ -1.52293584315585e-06,
+ +2.55670165214775e-07,
+ +8.75857942936821e-07,
+ -1.08526603507267e-05,
+ -2.09286220958485e-05,
+ -2.69281502868379e-05,
+ -3.42398403980715e-04,
+ -2.22385902793769e-06,
+ +4.02159147420923e-07,
+ +4.06357697084513e-06,
+ +7.32834411243963e-06,
+ -2.44759166903461e-06,
+ -1.65136743086089e-07,
+ -1.56620656158510e-06,
+ -1.11601438020656e-05,
+ -2.81931784402924e-07,
+ -6.20794856899488e-07,
+ -1.20194793191714e-06,
+ -5.63975880902320e-07,
+ +3.69589667947645e-04,
+ -5.08430445998970e-06,
+ -1.36694514795416e-05,
+ +1.02409473191526e-06,
+ +3.79002570421175e-05,
+ -6.35545661677704e-07,
+ -2.05540273064570e-08,
+ -1.48418927904573e-07,
+ -4.06977311517788e-07,
+ +3.75096210975055e-07,
+ -2.27701706773629e-07,
+ -2.85708744802906e-06,
+ +1.01373437910116e-07,
+ -2.61280304642984e-07,
+ +1.15340868553051e-06,
+ -3.56365402457066e-05,
+ -1.31823726620580e-06,
+ -1.62981439230507e-06,
+ +2.58792718413789e-06,
+ -6.34968508369106e-07,
+ -4.80928064293288e-06,
+ +4.03740673231572e-07,
+ +8.41458684414792e-09,
+ +3.46862987347951e-08,
+ -1.63713486703006e-07,
+ -2.27445554506670e-07,
+ -8.17201936511908e-07,
+ +1.65688565162891e-07,
+ -3.23303552559570e-07,
+ -8.47517033021107e-08,
+ +1.10284420633852e-06,
+ +4.36935090123745e-06,
+ +4.58363491476557e-07,
+ -5.74532542411087e-07,
+ +1.09210920429582e-06,
+ -2.22385903089595e-06,
+ -1.83368968106049e-05,
+ +3.19422661369426e-07,
+ -2.50331754091571e-07,
+ +1.21130672902032e-07,
+ +2.08292946320056e-07,
+ -9.33509707285818e-07,
+ +1.28691353956650e-06,
+ +1.61470481015408e-06,
+ +1.17842147681396e-07,
+ +2.80872569668501e-07,
+ -1.62595104345525e-06,
+ +2.32772754248930e-05,
+ +7.82690330538251e-07,
+ +1.30502200801981e-06,
+ -5.94361876467155e-06,
+ -2.32668109632255e-07,
+ -6.27629443066631e-07,
+ +6.81555941261928e-06,
+ -1.71886290210739e-06,
+ +7.04363232406039e-07,
+ -7.36707634322762e-09,
+ -8.51783829931743e-07,
+ +3.99128222334545e-07,
+ +1.41186696258873e-07,
+ +6.43208275812220e-08,
+ +1.28301464470801e-07,
+ -2.17534686221081e-06,
+ -2.46609491548889e-06,
+ -8.08332942783460e-07,
+ +2.96557806659043e-08,
+ +6.05570445337870e-07,
+ +1.20584205550681e-06,
+ +4.14630003410951e-07,
+ -5.68137322987615e-06,
+ +1.51974555959470e-06,
+ +1.92665132274766e-06,
+ -1.13025135108883e-07,
+ +5.61437186355108e-07,
+ -4.07557766425289e-07,
+ -2.40098833420273e-07,
+ -5.14489840467358e-08,
+ -1.10663399383276e-06,
+ +4.33001192782785e-06,
+ +2.03065591215037e-06,
+ -3.48115372675827e-06,
+ -1.07555825851820e-07,
+ -8.00126472069978e-07,
+ +4.02159031718340e-07,
+ +3.19422662047053e-07,
+ +6.13359317231109e-06,
+ +1.63681169238329e-06,
+ -2.50638532687695e-06,
+ -1.30385757038035e-06,
+ -1.11860493984999e-06,
+ -8.64172284993886e-07,
+ -7.75361080543454e-07,
+ -2.47743787934278e-07,
+ -1.11449952727976e-06,
+ +5.42820451131590e-07,
+ +2.47632863108367e-07,
+ +4.16959259617537e-06,
+ -2.84139392197410e-06,
+ -2.68001402924351e-06,
+ -3.56889925259645e-06,
+ -1.99161727792985e-08,
+ -1.71549756682949e-06,
+ +5.70216686160215e-06,
+ -4.81240475698746e-07,
+ -8.57538621140469e-07,
+ -4.23455551595571e-07,
+ +1.39585405158017e-06,
+ +4.95488644688610e-07,
+ +1.16290019118188e-06,
+ -3.21786833832639e-07,
+ -6.90393187023218e-08,
+ +2.48126479222334e-06,
+ -1.87995288478441e-06,
+ -1.59920088862965e-06,
+ -3.01146974009483e-07,
+ +2.67423872644587e-06,
+ +8.70725033077341e-09,
+ +1.50077111186282e-06,
+ -1.34210907519160e-06,
+ +5.33353627071625e-08,
+ -9.12418448222952e-07,
+ +7.85667403887174e-08,
+ -1.62945463735463e-06,
+ +2.57726743101144e-07,
+ -7.86713620765496e-07,
+ -3.76958541760245e-07,
+ +1.44854207784467e-06,
+ -1.74721680340364e-06,
+ +5.92891754639638e-07,
+ -2.03522758772411e-06,
+ +2.21531894704682e-06,
+ +4.06357714140580e-06,
+ -2.50331752465268e-07,
+ +1.63681167278295e-06,
+ -6.74969446816647e-06,
+ +7.62664933004400e-07,
+ +9.07335393579253e-07,
+ -9.73935059357179e-07,
+ +2.46861180900894e-06,
+ -3.20802711252391e-06,
+ +1.28021103039979e-06,
+ +8.03407596729872e-07,
+ -1.66794153345202e-06,
+ +1.86702528586521e-06,
+ +1.84314493206186e-06,
+ +2.92491749028477e-06,
+ -5.70777735904578e-06,
+ -1.87932851660572e-06,
+ -1.41608271366867e-07,
+ +7.72655630577564e-07,
+ -4.81436214069190e-07,
+ +1.13601601766834e-05,
+ +1.53570010847466e-07,
+ -1.42149789416615e-06,
+ -1.25488383058295e-06,
+ +1.33614915064485e-06,
+ -3.14801994489981e-07,
+ +8.47638716080243e-08,
+ -2.51827929958825e-06,
+ -1.70921045017564e-06,
+ -5.30363895450580e-06,
+ +6.41233491882631e-07,
+ +6.76153093238897e-07,
+ -1.83086673189033e-06,
+ +2.94881146021051e-08,
+ +1.53038600286023e-06,
+ +3.67911577024455e-08,
+ +4.96889953626408e-06,
+ -1.28314303633585e-06,
+ +1.58452146802740e-07,
+ +6.08899580502840e-07,
+ -2.18404596674432e-07,
+ +1.12287947698158e-07,
+ -1.59331465766032e-07,
+ +2.42028161292866e-06,
+ +3.97138331915390e-07,
+ -6.35732084217164e-06,
+ -4.84269311290368e-07,
+ +7.07115533051732e-08,
+ +7.32834530276868e-06,
+ +1.21130672122761e-07,
+ -2.50638531488297e-06,
+ +7.62664945777656e-07,
+ -7.16323555257915e-06,
+ -2.01251380096718e-06,
+ +8.48319534159836e-07,
+ -6.61852919102695e-07,
+ -2.65387594556709e-06,
+ -9.36792846393348e-08,
+ -4.81124439130642e-07,
+ +2.79569030333651e-06,
+ +1.67061999366172e-06,
+ +7.42263244382924e-06,
+ -2.09039692409499e-06,
+ -3.28633901479187e-06,
+ +1.72975672990043e-07,
+ -4.05755038299278e-07,
+ -8.69894156436523e-09,
+ -8.70503498377780e-07,
+ +1.70202204245610e-07,
+ -5.32154701320196e-06,
+ -1.02596442873613e-06,
+ +2.69691699392649e-07,
+ +1.52254198696388e-07,
+ +7.34930336937977e-08,
+ -2.69391773564118e-07,
+ +3.09780246302070e-06,
+ -2.47742120112852e-06,
+ -4.62595519751469e-08,
+ +5.39202461820939e-06,
+ +1.09709755642085e-06,
+ +7.36485264118372e-07,
+ -1.71361528546224e-07,
+ -1.19161887388725e-07,
+ -9.25900911473307e-07,
+ -1.25417513305764e-06,
+ +7.93048889226452e-07,
+ -6.97605638975762e-07,
+ +4.19793533092193e-08,
+ +3.12384883196013e-07,
+ +1.32396997452021e-08,
+ -2.12732231021074e-07,
+ -2.14221114534351e-08,
+ -1.42700870367117e-06,
+ -6.37263402786833e-07,
+ +3.14082546405567e-06,
+ +4.28667994367492e-07,
+ -2.44759166855815e-06,
+ +2.08292946726632e-07,
+ -1.30385757046505e-06,
+ +9.07335394155235e-07,
+ -2.01251380161092e-06,
+ +1.18287618697428e-05,
+ -1.79951021464053e-07,
+ -1.56235967692126e-06,
+ -3.05709675544436e-07,
+ -3.87011275857060e-07,
+ -5.99296186000421e-07,
+ -6.39957453674699e-06,
+ +8.67757747439763e-07,
+ -1.38428367028126e-06,
+ +3.47995465329785e-06,
+ -7.09953528257867e-07,
+ -6.50795248752581e-07,
+ +3.69329440348762e-07,
+ -8.53239469739652e-07,
+ -4.15797653808268e-07,
+ -1.44459914642087e-06,
+ -1.01377876664304e-06,
+ +5.44436499992618e-06,
+ -4.78035065619941e-07,
+ +1.73852007619092e-06,
+ -9.13456247694420e-07,
+ -1.42085519399220e-06,
+ -5.29874958471975e-07,
+ +2.52268441392985e-07,
+ +6.07550381531707e-07,
+ -1.40968723239682e-07,
+ -5.50632864431961e-07,
+ +2.15349629066066e-07,
+ -2.28216617187470e-07,
+ +5.41944269679979e-07,
+ +7.07093854090480e-08,
+ +1.70064434895780e-07,
+ -6.91401482168729e-07,
+ -3.35203963492068e-07,
+ +1.49234306634277e-06,
+ +3.06994985124287e-07,
+ +8.57538971557999e-07,
+ -9.24229910404537e-07,
+ -1.34822253885960e-06,
+ +8.22926112951423e-07,
+ -1.36095658194121e-07,
+ -5.49628844227854e-08,
+ -7.59537800679204e-07,
+ -1.65136745106262e-07,
+ -9.33509726801457e-07,
+ -1.11860494144241e-06,
+ -9.73935034860986e-07,
+ +8.48319507867933e-07,
+ -1.79951014653908e-07,
+ +1.10704729991035e-05,
+ -2.76458066186905e-06,
+ -2.20942624940446e-06,
+ -1.88538081997178e-06,
+ +1.00053915033058e-06,
+ -1.61334510502672e-06,
+ +2.52209612542449e-06,
+ -2.71547689880425e-06,
+ -1.89668257465634e-06,
+ +1.01460198995154e-06,
+ +7.34350669771117e-07,
+ -9.07995106850406e-08,
+ +3.96678519736925e-07,
+ +1.38275001388541e-06,
+ -1.30412252479347e-06,
+ +2.61979184073874e-07,
+ -4.88749671160096e-07,
+ +1.17108610042806e-05,
+ -1.08397623051258e-06,
+ -8.15855322454937e-06,
+ +7.86450629554521e-07,
+ -3.89129478331240e-06,
+ -1.14605816251500e-06,
+ -4.49143848130803e-08,
+ -4.74709850692857e-08,
+ +9.82869456057022e-07,
+ -7.55458762495727e-07,
+ -8.70917427984280e-07,
+ -4.04384484340055e-07,
+ -1.64180172652941e-06,
+ +6.43715897405905e-07,
+ +3.81429496616585e-08,
+ +1.49431484195861e-06,
+ +3.30322323300095e-06,
+ +4.77820269160649e-07,
+ -2.25968045975024e-06,
+ +1.03402955143640e-08,
+ -1.64984636981955e-06,
+ +1.74513425107830e-06,
+ +3.05819440409548e-08,
+ +7.18531408417517e-10,
+ -1.61902982458785e-07,
+ -1.56620657617313e-06,
+ +1.28691354305628e-06,
+ -8.64172284858360e-07,
+ +2.46861180924610e-06,
+ -6.61852918221780e-07,
+ -1.56235967712455e-06,
+ -2.76458067911464e-06,
+ +2.54245036322722e-05,
+ -2.36933071388412e-06,
+ +3.79129826593219e-06,
+ -1.80076970150619e-06,
+ -1.10698034430385e-05,
+ -6.56372217679396e-06,
+ -2.49168411452255e-06,
+ -1.74391566007295e-06,
+ +4.87070694657794e-07,
+ +2.47490064059957e-06,
+ -2.79936711274615e-06,
+ +7.87884915051966e-08,
+ +5.09863326511007e-07,
+ +1.62925936485230e-06,
+ +1.17120298207755e-07,
+ +1.73187085579816e-06,
+ -1.09934802221560e-06,
+ -1.08371441079900e-06,
+ +2.61288528519751e-07,
+ +2.31026326507602e-07,
+ +3.55728564372493e-06,
+ +3.18655913884424e-06,
+ -1.03128627898625e-05,
+ +2.47610794783010e-07,
+ +1.26971892582897e-06,
+ +2.92204706848901e-06,
+ +1.50290569186981e-07,
+ +2.23679620425842e-08,
+ +2.06799358131680e-07,
+ -1.41578774542233e-06,
+ +2.67491878019682e-07,
+ +2.87453131832138e-07,
+ +4.95708017717738e-07,
+ -9.15598792685941e-06,
+ -7.31383500143172e-08,
+ +1.34511329527733e-06,
+ -4.61029478712759e-06,
+ -5.30307481150469e-08,
+ +1.15239569747688e-05,
+ -7.97213382717446e-07,
+ -1.11577531548084e-06,
+ -1.11601443176694e-05,
+ +1.61470481154321e-06,
+ -7.75361072259472e-07,
+ -3.20802707952351e-06,
+ -2.65387596406629e-06,
+ -3.05709673782608e-07,
+ -2.20942625025149e-06,
+ -2.36933071429070e-06,
+ +2.24293319104441e-05,
+ -1.87669345616195e-06,
+ -1.80919295407337e-06,
+ -9.22161749440870e-06,
+ -6.10047782783978e-06,
+ +1.86252714369580e-05,
+ -3.09046834821923e-07,
+ -6.70404519861867e-07,
+ +1.93844392174268e-06,
+ +3.58672592884137e-07,
+ +6.82156298136873e-08,
+ +1.21272078440636e-06,
+ -3.43035940703952e-07,
+ +7.88690220445733e-08,
+ -8.80890390335008e-07,
+ -7.38203032400319e-06,
+ +2.82254253877821e-07,
+ +1.31040293270242e-05,
+ -9.88023219554261e-07,
+ -1.92900412667346e-06,
+ -1.41223752202040e-06,
+ -2.83319357388581e-07,
+ -2.13562920467611e-07,
+ -3.61110173057925e-06,
+ -1.38834952865749e-06,
+ -3.44848026773731e-07,
+ -5.24745433330741e-08,
+ -7.86316856285908e-07,
+ +1.22380077958035e-07,
+ +1.39042047325602e-08,
+ +8.65109030063555e-07,
+ -2.14578711722567e-06,
+ -7.57010022701086e-08,
+ +3.48453910477541e-06,
+ -7.14459009193576e-08,
+ -7.22072779512002e-07,
+ +7.97081308042119e-07,
+ +1.40626547136691e-07,
+ -1.71140017644054e-08,
+ +1.80469484029142e-07,
+ -2.81931770119831e-07,
+ +1.17842152560306e-07,
+ -2.47743787798753e-07,
+ +1.28021102785870e-06,
+ -9.36792832502008e-08,
+ -3.87011276602449e-07,
+ -1.88538079425585e-06,
+ +3.79129825488688e-06,
+ -1.87669345873693e-06,
+ +1.32908151591530e-05,
+ -4.92284062838621e-07,
+ -5.59087104290055e-06,
+ -4.15038457174757e-06,
+ -9.56601781718324e-07,
+ -1.88496510047031e-06,
+ -6.32619663991984e-07,
+ -1.14329376586612e-07,
+ -2.52106627268061e-07,
+ +1.12805320673945e-07,
+ -3.20818026897261e-07,
+ +9.81975708836151e-08,
+ -2.47574268553920e-07,
+ -1.40637529142469e-06,
+ +8.27077955480190e-07,
+ +2.37089445027493e-07,
+ -9.33995925508891e-07,
+ +6.53439512378730e-06,
+ -2.89592601313097e-06,
+ -2.05144689809696e-06,
+ -1.30398357246048e-07,
+ -2.02995535880874e-06,
+ +2.57336072750867e-06,
+ -1.13982426714352e-06,
+ -8.73348870520909e-08,
+ -1.04611488484859e-06,
+ -3.78182125999410e-07,
+ -1.89040650554830e-07,
+ -2.02414294737062e-07,
+ -9.11125309246519e-07,
+ +6.28212075187620e-09,
+ +1.27178646930224e-06,
+ -6.79166338579261e-08,
+ -2.52441165607203e-07,
+ +9.61870914745221e-07,
+ -1.05446686937649e-06,
+ +1.12189590618570e-06,
+ +1.35804619306524e-06,
+ +6.08979484390302e-07,
+ -6.20794856795726e-07,
+ +2.80872569770145e-07,
+ -1.11449952931264e-06,
+ +8.03407594154892e-07,
+ -4.81124437504339e-07,
+ -5.99296184577405e-07,
+ +1.00053913637148e-06,
+ -1.80076970353907e-06,
+ -1.80919294675500e-06,
+ -4.92284062940265e-07,
+ -1.46021786379648e-05,
+ +3.64835623395550e-06,
+ +2.67792104246753e-06,
+ -1.32262379444226e-06,
+ +1.76234495892464e-05,
+ -3.19178201214081e-06,
+ +1.45170851149911e-07,
+ +1.15442522330609e-06,
+ -2.16941049688599e-06,
+ -8.13758567603758e-08,
+ -2.52950123622367e-06,
+ +3.16781665867438e-06,
+ -5.01047861546351e-07,
+ -3.83407514999081e-06,
+ +3.54500423251926e-06,
+ -2.06589358650361e-06,
+ -2.90748964668590e-06,
+ +3.25348266468799e-06,
+ +2.32819701628047e-07,
+ -2.09962514693583e-06,
+ +4.81317331714329e-06,
+ -1.23473667390099e-07,
+ -1.29060903952046e-06,
+ +1.29172157967994e-06,
+ +4.29699348307867e-06,
+ +1.52472647234794e-06,
+ +2.50997962128455e-06,
+ +1.67831645865482e-08,
+ -1.30567978078083e-06,
+ -1.62452931918536e-06,
+ -4.58914810397154e-06,
+ -7.17397594932387e-07,
+ +9.95939463135866e-07,
+ -2.99694188201163e-06,
+ +1.38723933709167e-07,
+ +3.59929672749822e-06,
+ -1.39830558800412e-06,
+ -4.51553137329613e-07,
+ -1.20194802881771e-06,
+ -1.62595104945224e-06,
+ +5.42820453113647e-07,
+ -1.66794156448730e-06,
+ +2.79569030523386e-06,
+ -6.39957453613712e-06,
+ -1.61334511549605e-06,
+ -1.10698034387356e-05,
+ -9.22161747638384e-06,
+ -5.59087104500119e-06,
+ +3.64835624662712e-06,
+ +4.77957414366458e-05,
+ -4.09784758955019e-07,
+ -2.12273994953236e-06,
+ -1.26008441211873e-05,
+ -1.25818735776505e-06,
+ +3.53626177751715e-07,
+ -3.61623616168449e-05,
+ -2.45329934982935e-06,
+ +3.73648402283205e-07,
+ -1.72197411250864e-06,
+ -2.43250562372694e-06,
+ +3.24999900906328e-06,
+ -9.27455259537276e-07,
+ +3.23230447446434e-06,
+ -1.37912968141396e-06,
+ -2.05173635703772e-06,
+ +2.37239866389871e-07,
+ +4.30391882974768e-05,
+ -7.36445504222422e-07,
+ -7.94809364333617e-07,
+ -1.82628935871135e-06,
+ -7.94905191813682e-07,
+ +4.24674825539024e-06,
+ +2.01258075478814e-06,
+ -1.73040635317959e-07,
+ +4.30445548250794e-07,
+ -1.40517747811213e-06,
+ +4.84954801763787e-06,
+ +1.70938589824453e-06,
+ -1.43991323347441e-08,
+ +7.51494539441902e-07,
+ -1.05848169032458e-06,
+ +2.98187714957757e-07,
+ -8.15587482912498e-06,
+ +4.01049287773432e-07,
+ -3.61359371516424e-07,
+ -2.73620168818157e-06,
+ -5.63975927874532e-07,
+ +2.32772754305851e-05,
+ +2.47632854146759e-07,
+ +1.86702511923689e-06,
+ +1.67061996266031e-06,
+ +8.67757755977855e-07,
+ +2.52209644699208e-06,
+ -6.56372215219613e-06,
+ -6.10047783542920e-06,
+ -4.15038459607435e-06,
+ +2.67792105869668e-06,
+ -4.09784744386053e-07,
+ -1.25274547156727e-05,
+ -2.41765156411626e-06,
+ -1.97555574086043e-06,
+ +1.57867864804719e-06,
+ -8.83753938743344e-05,
+ -1.19767163615881e-06,
+ -1.18242984170234e-07,
+ -1.79236886817317e-06,
+ -3.20828991225462e-06,
+ -6.67026933090942e-08,
+ +1.03815984099160e-06,
+ +7.19292834252009e-07,
+ -1.70299038055704e-05,
+ +1.23494884430404e-07,
+ -2.45629115903586e-07,
+ -1.68510300511119e-06,
+ -3.24565167290577e-07,
+ +1.41872354067751e-04,
+ -5.18262556771488e-06,
+ -2.45268049972303e-05,
+ -7.92763178935713e-05,
+ +4.16950807715266e-07,
+ -8.03667030358423e-06,
+ +5.22422655181313e-07,
+ -1.83385974069879e-06,
+ -8.55419305166940e-07,
+ -2.56640938952186e-07,
+ -4.86910678301007e-07,
+ +2.92158064478386e-05,
+ -6.58813581725542e-08,
+ +1.50303637594997e-06,
+ +2.39477383636008e-06,
+ +1.18090227817542e-07,
+ +9.07213404124582e-06,
+ +2.44913143791667e-05,
+ +2.30771714472310e-05,
+ +3.69589668558887e-04,
+ +7.82690324871600e-07,
+ +4.16959287019900e-06,
+ +1.84314491010677e-06,
+ +7.42263270775623e-06,
+ -1.38428367472818e-06,
+ -2.71547692125062e-06,
+ -2.49168412594055e-06,
+ +1.86252723000677e-05,
+ -9.56601782040197e-07,
+ -1.32262378177065e-06,
+ -2.12274006749016e-06,
+ -2.41765161384556e-06,
+ -4.01573429984211e-04,
+ +9.53108602783602e-07,
+ +1.15983816754088e-05,
+ +2.17501942461663e-07,
+ -1.59163708796962e-06,
+ -9.10093960642452e-09,
+ -1.58002475888019e-06,
+ +8.69506464780943e-07,
+ +5.42933475630204e-06,
+ -1.26978968281093e-07,
+ -5.33332735342795e-08,
+ +4.09881442073887e-08,
+ -2.02364299837078e-07,
+ -2.14808965208387e-06,
+ +4.85950762529869e-06,
+ -7.99263441618025e-07,
+ -1.25199157681791e-07,
+ -5.25054749705338e-06,
+ +4.69700143325625e-07,
+ +3.24970701932360e-06,
+ -6.04129252629788e-07,
+ +9.74833057429837e-08,
+ -2.10010388335076e-06,
+ -1.68960378315057e-06,
+ +3.14717979843206e-06,
+ -8.33670534271551e-08,
+ +5.33435702023115e-10,
+ +1.57098618917264e-07,
+ -1.83389220625193e-08,
+ +1.26919809477342e-06,
+ -1.48734790725768e-06,
+ -4.23085862401322e-07,
+ -4.37745073004971e-07,
+ -1.10019078723489e-06,
+ +2.27122514927778e-08,
+ -5.08430445638557e-06,
+ +1.30502201039150e-06,
+ -2.84139392082213e-06,
+ +2.92491750810634e-06,
+ -2.09039692395947e-06,
+ +3.47995465245081e-06,
+ -1.89668257031953e-06,
+ -1.74391566051341e-06,
+ -3.09046857657931e-07,
+ -1.88496509908117e-06,
+ +1.76234495633611e-05,
+ -1.26008441104131e-05,
+ -1.97555573042498e-06,
+ +9.53108508788356e-07,
+ +4.70980278371037e-06,
+ -5.69149697574243e-07,
+ +3.61363253116055e-06,
+ +2.61865910498445e-06,
+ +3.82651364207957e-07,
+ -2.48006071000299e-07,
+ +1.75809147147659e-06,
+ +9.19581784817583e-07,
+ -3.95660495777658e-06,
+ -1.44064020560003e-07,
+ +6.14366618966215e-07,
+ -2.40628650529783e-06,
+ +2.84701254416622e-06,
+ -3.43700764666977e-08,
+ +5.39664491208940e-07,
+ +9.21540100597086e-07,
+ +6.19533596922820e-07,
+ -8.04540197750018e-06,
+ +3.09560411185976e-06,
+ +1.05389750664495e-06,
+ -3.73257910446815e-07,
+ +2.14737256473111e-06,
+ -1.45086467842211e-06,
+ +3.56571491727335e-07,
+ -4.95380731818686e-06,
+ -2.67290209232760e-07,
+ -8.79128581709993e-07,
+ +3.47900774418856e-07,
+ +6.46314915005348e-07,
+ -3.76034103350851e-07,
+ +1.56572454247281e-06,
+ -5.03855818816102e-06,
+ -1.13155135072720e-06,
+ +5.25710643327156e-06,
+ -1.36694518690031e-05,
+ -5.94361870613310e-06,
+ -2.68001401298895e-06,
+ -5.70777739017424e-06,
+ -3.28633898482385e-06,
+ -7.09953545910034e-07,
+ +1.01460197993961e-06,
+ +4.87070644166160e-07,
+ -6.70404513356654e-07,
+ -6.32619597610011e-07,
+ -3.19178206315761e-06,
+ -1.25818739853275e-06,
+ +1.57867823102746e-06,
+ +1.15983821954870e-05,
+ -5.69149643254020e-07,
+ +2.36405646742266e-05,
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "PbH4-BiH3": Refs(
+ {
+ "grad": torch.tensor(
+ [
+ [
+ +1.8639257945602507e-011,
+ -5.7589265336385818e-005,
+ +0.0000000000000000,
+ ],
+ [
+ +1.1824683859244095e-005,
+ -1.7476694890228468e-005,
+ +0.0000000000000000,
+ ],
+ [
+ -5.9123515874157095e-006,
+ -1.7476703035791685e-005,
+ -1.0240481396622120e-005,
+ ],
+ [
+ -5.9123515874157095e-006,
+ -1.7476703035791685e-005,
+ +1.0240481396622120e-005,
+ ],
+ [
+ -1.5695651885305123e-012,
+ -1.8948774924372469e-005,
+ +0.0000000000000000,
+ ],
+ [
+ +1.4523056809395272e-011,
+ +9.0457983350326898e-005,
+ +0.0000000000000000,
+ ],
+ [
+ +3.3441903047977048e-006,
+ +1.2836714954362650e-005,
+ -5.7923124762056244e-006,
+ ],
+ [
+ -6.6883928867521314e-006,
+ +1.2836727963519770e-005,
+ +0.0000000000000000,
+ ],
+ [
+ +3.3441903047977048e-006,
+ +1.2836714954362650e-005,
+ +5.7923124762056244e-006,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "hessian": torch.tensor(
+ [
+ -2.10251881000936e-05,
+ +1.56778573675227e-05,
+ +4.58658049531517e-06,
+ +4.58658049531093e-06,
+ +1.27004565632629e-06,
+ -3.20178128905553e-06,
+ -6.99597297336907e-07,
+ -4.94900030634601e-07,
+ -6.99597297347495e-07,
+ +3.24937085159800e-11,
+ -5.10398603376360e-06,
+ +2.55197525080619e-06,
+ +2.55197525082313e-06,
+ +3.40254828977947e-12,
+ -6.58246243970262e-13,
+ +6.08526809019224e-07,
+ -1.21705332402484e-06,
+ +6.08526809002283e-07,
+ -2.11754359404454e-17,
+ +2.12017134651781e-18,
+ +6.40348511345190e-06,
+ -6.40348511345190e-06,
+ +2.62532909257553e-21,
+ +1.48230765769503e-17,
+ -1.18182232642152e-07,
+ -2.11499742872152e-18,
+ +1.18182232633681e-07,
+ +1.56778573633057e-05,
+ -8.10455889687384e-06,
+ -2.56401693069407e-06,
+ -2.56401693068984e-06,
+ -1.72557088875752e-06,
+ -6.80418585446685e-07,
+ -2.08619712547455e-08,
+ +2.44881170954203e-09,
+ -2.08619712632158e-08,
+ -5.20807345800377e-06,
+ +6.37758712365301e-06,
+ +1.76719491040619e-07,
+ +1.76719491057559e-07,
+ +6.94930612315755e-07,
+ -2.14921929991179e-06,
+ -1.64684822667006e-08,
+ -3.57269959229162e-08,
+ -1.64684822921116e-08,
+ +2.11809935601860e-18,
+ +2.18427784399067e-18,
+ -7.04550495576572e-07,
+ +7.04550495593512e-07,
+ -2.11753390052173e-18,
+ +1.58818677610181e-17,
+ +2.35575185880953e-09,
+ -1.05465528100511e-18,
+ -2.35575186727986e-09,
+ +4.58658047648103e-06,
+ -2.56401693817761e-06,
+ +1.19859490580249e-06,
+ -9.75712621134828e-07,
+ -1.06323616861457e-06,
+ -1.13330792285054e-06,
+ -1.61850669504910e-08,
+ -2.24901027324464e-08,
+ -1.02265618319088e-08,
+ +2.60402027059268e-06,
+ -8.28517453482867e-08,
+ -3.18877666059694e-06,
+ -9.38670696245148e-08,
+ -3.47467129345474e-07,
+ +1.07461024833385e-06,
+ +3.88511321231704e-08,
+ -2.23825707012409e-08,
+ +1.78635242957124e-08,
+ +6.40348781270446e-06,
+ -1.12946188851450e-06,
+ -5.37111517004220e-06,
+ +2.12454327151317e-07,
+ -3.82398697073881e-07,
+ +2.61475696160849e-07,
+ +2.22443467234179e-09,
+ -3.98465104432747e-09,
+ +7.31813601620337e-09,
+ +4.58658047646621e-06,
+ -2.56401693812679e-06,
+ -9.75712621164474e-07,
+ +1.19859490578979e-06,
+ -1.06323616861510e-06,
+ -1.13330792285001e-06,
+ -1.02265618276736e-08,
+ -2.24901027493871e-08,
+ -1.61850669356679e-08,
+ +2.60402027069432e-06,
+ -8.28517453652273e-08,
+ -9.38670696245148e-08,
+ -3.18877666061388e-06,
+ -3.47467129294652e-07,
+ +1.07461024840161e-06,
+ +1.78635243126531e-08,
+ -2.23825706758299e-08,
+ +3.88511321485814e-08,
+ -6.40348781266846e-06,
+ +1.12946188851020e-06,
+ -2.12454327193668e-07,
+ +5.37111517006762e-06,
+ +3.82398697068585e-07,
+ -2.61475696178848e-07,
+ -7.31813601620337e-09,
+ +3.98465104432437e-09,
+ -2.22443465116597e-09,
+ +1.27004564782676e-06,
+ -1.72557088721505e-06,
+ -1.06323616846207e-06,
+ -1.06323616846630e-06,
+ +5.74542830317392e-06,
+ -2.15084144784711e-06,
+ -4.03075144340199e-07,
+ -2.06438990331727e-07,
+ -4.03075144346552e-07,
+ -4.20805968195936e-14,
+ +1.22691265025225e-06,
+ -6.13456419739706e-07,
+ -6.13456419722765e-07,
+ -3.88618716200273e-14,
+ +4.03187682893047e-14,
+ +4.15028885527964e-07,
+ -8.30057541145775e-07,
+ +4.15028885502553e-07,
+ +6.35319946880474e-18,
+ -2.08449009492384e-18,
+ -3.82398696905593e-07,
+ +3.82398696888652e-07,
+ +3.17641394188197e-18,
+ +1.05879118406788e-17,
+ -1.13528159871704e-07,
+ +1.06189311136495e-18,
+ +1.13528159850528e-07,
+ -3.20178129073190e-06,
+ -6.80418583448746e-07,
+ -1.13330792258796e-06,
+ -1.13330792258796e-06,
+ -2.15084144751022e-06,
+ -3.54608405623710e-06,
+ +2.21880801397117e-06,
+ +7.40812519518843e-06,
+ +2.21880801396481e-06,
+ +2.95174041459179e-13,
+ -2.23850988653813e-06,
+ +1.11925527065577e-06,
+ +1.11925527063883e-06,
+ +1.20448085099562e-13,
+ -4.38168788361934e-11,
+ +2.44187608865813e-06,
+ -4.88370943079062e-06,
+ +2.44187608865813e-06,
+ +2.11758236813575e-18,
+ -2.15067060237983e-18,
+ +2.61475703404039e-07,
+ -2.61475703412510e-07,
+ +2.11758236813575e-18,
+ +2.11758236813575e-18,
+ -2.99609392867838e-06,
+ -2.11758236813575e-18,
+ +2.99609392867838e-06,
+ -6.99597297409720e-07,
+ -2.08619711996883e-08,
+ -1.61850669526086e-08,
+ -1.02265618467319e-08,
+ -4.03075144381751e-07,
+ +2.21880801037128e-06,
+ +1.56645109601592e-06,
+ -2.05489603810740e-06,
+ -5.80417026472373e-07,
+ +5.86639614110710e-07,
+ -2.83448339398387e-08,
+ +4.10119676358346e-08,
+ +1.98053356890602e-08,
+ +4.16523173119916e-07,
+ +2.58090457439333e-06,
+ -3.53991084071089e-06,
+ -3.42333365498907e-08,
+ -4.23956536889402e-08,
+ -1.18182242325100e-07,
+ +4.92465210582561e-09,
+ -3.44455333126017e-10,
+ -7.31813661759677e-09,
+ -1.13528159804769e-07,
+ -2.99609315114236e-06,
+ +2.37925005318782e-06,
+ +8.96233635949202e-07,
+ -4.49421960218199e-08,
+ -4.94900031165328e-07,
+ +2.44881177306950e-09,
+ -2.24901027705629e-08,
+ -2.24901027620926e-08,
+ -2.06438990588991e-07,
+ +7.40812519342078e-06,
+ -2.05489603733872e-06,
+ -2.55446270323975e-06,
+ -2.05489603733660e-06,
+ -1.17327908422582e-06,
+ -3.96106930960452e-08,
+ -1.26671498912659e-08,
+ -1.26671498912659e-08,
+ -8.33046191690200e-07,
+ -5.16176592178156e-06,
+ +7.66285422685160e-08,
+ +7.07977910593748e-06,
+ +7.66285422685160e-08,
+ -6.46234853557053e-23,
+ +0.00000000000000e00,
+ -1.41576950968389e-09,
+ +1.41576950968389e-09,
+ +1.05880733993921e-18,
+ +1.05879118406788e-18,
+ +8.06350015246183e-07,
+ -3.18361138256347e-18,
+ -8.06350015250418e-07,
+ -6.99597297408661e-07,
+ -2.08619711996883e-08,
+ -1.02265618509670e-08,
+ -1.61850669568438e-08,
+ -4.03075144381751e-07,
+ +2.21880801037181e-06,
+ -5.80417026463903e-07,
+ -2.05489603811163e-06,
+ +1.56645109602439e-06,
+ +5.86639614144591e-07,
+ -2.83448339567793e-08,
+ +1.98053357060009e-08,
+ +4.10119676019533e-08,
+ +4.16523173119916e-07,
+ +2.58090457439333e-06,
+ -4.23956536804699e-08,
+ -3.42333365414203e-08,
+ -3.53991084070242e-06,
+ +1.18182242325100e-07,
+ -4.92465210790985e-09,
+ +7.31813660912644e-09,
+ +3.44455333126017e-10,
+ +1.13528159807945e-07,
+ +2.99609315113919e-06,
+ +4.49421960175848e-08,
+ -8.96233635948144e-07,
+ -2.37925005317512e-06,
+ +3.23527312819770e-11,
+ -5.20807327771280e-06,
+ +2.60402026397311e-06,
+ +2.60402026396464e-06,
+ -4.22509242250484e-14,
+ +2.96235479621207e-13,
+ +5.86639614398701e-07,
+ -1.17327908576318e-06,
+ +5.86639614411407e-07,
+ -3.20372390189388e-05,
+ +2.68706524448502e-06,
+ +2.68704763582201e-06,
+ +2.68704763582201e-06,
+ +2.51496339929614e-05,
+ -1.45217700840380e-05,
+ +4.44940438142187e-06,
+ +4.44940583105947e-06,
+ +4.44940438142187e-06,
+ +2.11745312116504e-18,
+ +2.11758135839379e-18,
+ +4.51029293013639e-06,
+ -4.51029293015333e-06,
+ -1.05879118406788e-18,
+ -6.35274710440725e-18,
+ -1.01608960056912e-06,
+ -2.11861634390144e-18,
+ +1.01608960057335e-06,
+ -5.10398583972321e-06,
+ +6.37758692743783e-06,
+ -8.28517446410142e-08,
+ -8.28517446367790e-08,
+ +1.22691264981584e-06,
+ -2.23850988721152e-06,
+ -2.83448338996046e-08,
+ -3.96106932654518e-08,
+ -2.83448338996046e-08,
+ +2.68706523116966e-06,
+ -5.49938460264073e-06,
+ -9.08884371539039e-07,
+ -9.08884371555980e-07,
+ -2.40377543717912e-06,
+ +6.63265673557945e-06,
+ +1.61748374949468e-07,
+ +7.77100662498773e-08,
+ +1.61748374974879e-07,
+ -2.11751774465040e-18,
+ -9.92606637644046e-20,
+ -1.56223424521617e-07,
+ +1.56223424521617e-07,
+ -1.05877502819654e-18,
+ -1.05879118406788e-18,
+ -3.09916397702080e-08,
+ -2.11758236813575e-18,
+ +3.09916397617376e-08,
+ +2.55197522778559e-06,
+ +1.76719483205564e-07,
+ -3.18877664705712e-06,
+ -9.38670690527675e-08,
+ -6.13456419269212e-07,
+ +1.11925527105705e-06,
+ +4.10119677057148e-08,
+ -1.26671500903187e-08,
+ +1.98053357292943e-08,
+ +2.68704762274383e-06,
+ -9.08884371522098e-07,
+ -5.49936911935280e-06,
+ -9.08882828871814e-07,
+ -2.40377658448525e-06,
+ +6.63265835856234e-06,
+ +1.61748396226936e-07,
+ +1.61748477923263e-07,
+ +7.77100487925282e-08,
+ +4.42015025542016e-06,
+ -6.35999247242776e-09,
+ -5.52311996845925e-06,
+ +1.49862471489072e-07,
+ -1.06253707467083e-06,
+ +1.93860581242985e-06,
+ +9.05150212611737e-09,
+ +4.00431583416556e-08,
+ +3.43038358178335e-08,
+ +2.55197522778453e-06,
+ +1.76719483205564e-07,
+ -9.38670690442972e-08,
+ -3.18877664704865e-06,
+ -6.13456419271329e-07,
+ +1.11925527105758e-06,
+ +1.98053357250591e-08,
+ -1.26671500945538e-08,
+ +4.10119677099500e-08,
+ +2.68704762267606e-06,
+ -9.08884371522098e-07,
+ -9.08882828888754e-07,
+ -5.49936911938668e-06,
+ -2.40377658450219e-06,
+ +6.63265835856234e-06,
+ +7.77100487840579e-08,
+ +1.61748477931734e-07,
+ +1.61748396235406e-07,
+ -4.42015025542652e-06,
+ +6.35999247229567e-09,
+ -1.49862471472131e-07,
+ +5.52311996846772e-06,
+ +1.06253707466977e-06,
+ -1.93860581243832e-06,
+ -3.43038358135983e-08,
+ -4.00431583458928e-08,
+ -9.05150212188221e-09,
+ +3.39128772280722e-12,
+ +6.94930608021298e-07,
+ -3.47467117491248e-07,
+ -3.47467117482778e-07,
+ -3.92354806764619e-14,
+ +1.20831896903786e-13,
+ +4.16523173750955e-07,
+ -8.33046193405441e-07,
+ +4.16523173755190e-07,
+ +2.51496340058363e-05,
+ -2.40377542989464e-06,
+ -2.40377657714994e-06,
+ -2.40377657716688e-06,
+ -1.36350475531565e-05,
+ -1.07611292097430e-05,
+ +2.15262348769313e-06,
+ +2.15262436581219e-06,
+ +2.15262348771007e-06,
+ -5.16987882845642e-22,
+ +2.11757731942596e-18,
+ -6.01830354969181e-07,
+ +6.01830354969181e-07,
+ +1.24400209309733e-21,
+ -1.05879118406788e-17,
+ -7.21439285180113e-07,
+ -1.06292708713064e-18,
+ +7.21439285192818e-07,
+ -6.54498408620596e-13,
+ -2.14921930571397e-06,
+ +1.07461024989663e-06,
+ +1.07461024990510e-06,
+ +4.03722122953958e-14,
+ -4.39188573678781e-11,
+ +2.58090460442912e-06,
+ -5.16176587000668e-06,
+ +2.58090460442276e-06,
+ -1.45217700828860e-05,
+ +6.63265673612155e-06,
+ +6.63265835915526e-06,
+ +6.63265835913832e-06,
+ -1.07611291950047e-05,
+ -7.02283344589702e-06,
+ +4.13593064406373e-06,
+ +4.13589798121120e-06,
+ +4.13593064402137e-06,
+ +2.11764699162111e-18,
+ -2.11758337787771e-18,
+ +1.86127847559573e-06,
+ -1.86127847557032e-06,
+ +0.00000000000000e00,
+ +2.01170324972896e-17,
+ -4.47025769820642e-06,
+ +5.16987882845642e-22,
+ +4.47025769819371e-06,
+ +6.08526809568353e-07,
+ -1.64684827325687e-08,
+ +3.88511322459902e-08,
+ +1.78635243973564e-08,
+ +4.15028885613928e-07,
+ +2.44187610335151e-06,
+ -3.53991085921644e-06,
+ +7.66285408158545e-08,
+ -4.23956540489292e-08,
+ +4.44940438310747e-06,
+ +1.61748374974879e-07,
+ +1.61748396201525e-07,
+ +7.77100487840579e-08,
+ +2.15262348466922e-06,
+ +4.13593062372646e-06,
+ -9.91706932484866e-06,
+ -6.11047417481777e-07,
+ -6.11048569311060e-07,
+ -1.05399942983574e-06,
+ -3.53533065134485e-08,
+ -3.41449779861253e-09,
+ -3.09405239449378e-08,
+ -7.18851102231071e-07,
+ -4.22945217333026e-06,
+ +6.13130533183975e-06,
+ +4.71237152384566e-09,
+ -6.40066697102940e-08,
+ -1.21705332445233e-06,
+ -3.57269962956107e-08,
+ -2.23825706377134e-08,
+ -2.23825706419486e-08,
+ -8.30057541387905e-07,
+ -4.88370935980555e-06,
+ -3.42333366303588e-08,
+ +7.07977903647655e-06,
+ -3.42333366324764e-08,
+ +4.44940583258413e-06,
+ +7.77100662159959e-08,
+ +1.61748477923263e-07,
+ +1.61748477923263e-07,
+ +2.15262436275440e-06,
+ +4.13589796118734e-06,
+ -6.11047417363193e-07,
+ -9.91704034361637e-06,
+ -6.11047417363193e-07,
+ -2.11758236813575e-18,
+ -2.15067060237983e-18,
+ +3.19388249571742e-08,
+ -3.19388249487039e-08,
+ +3.17636547426796e-18,
+ -2.11758236813575e-18,
+ +6.87185346832299e-08,
+ -2.11654839237006e-18,
+ -6.87185346705244e-08,
+ +6.08526809568353e-07,
+ -1.64684827325687e-08,
+ +1.78635243973564e-08,
+ +3.88511322417550e-08,
+ +4.15028885617104e-07,
+ +2.44187610333774e-06,
+ -4.23956540256358e-08,
+ +7.66285407650326e-08,
+ -3.53991085918044e-06,
+ +4.44940438310747e-06,
+ +1.61748374974879e-07,
+ +7.77100487671172e-08,
+ +1.61748396218465e-07,
+ +2.15262348465228e-06,
+ +4.13593062365870e-06,
+ -6.11048569277179e-07,
+ -6.11047417447896e-07,
+ -9.91706932479784e-06,
+ +1.05399942984209e-06,
+ +3.53533065135147e-08,
+ +3.09405239449378e-08,
+ +3.41449779014220e-09,
+ +7.18851102234247e-07,
+ +4.22945217332073e-06,
+ +6.40066696637072e-08,
+ -4.71237152384617e-09,
+ -6.13130533178893e-06,
+ -5.50571092597868e-17,
+ +0.00000000000000e00,
+ +6.40348795906333e-06,
+ -6.40348795902097e-06,
+ -5.29104786349837e-22,
+ -3.70576914423756e-18,
+ -1.18182242408442e-07,
+ +0.00000000000000e00,
+ +1.18182242406324e-07,
+ -6.77626357803440e-17,
+ +0.00000000000000e00,
+ +4.42015038074634e-06,
+ -4.42015038071246e-06,
+ +1.69406589450860e-17,
+ +0.00000000000000e00,
+ -1.05399942934022e-06,
+ +0.00000000000000e00,
+ +1.05399942937410e-06,
+ -2.10250692972243e-05,
+ +8.89530708556727e-07,
+ +1.19806845201433e-05,
+ +1.19806845200586e-05,
+ +1.27004546204882e-06,
+ -3.20178147228887e-06,
+ -5.63132203607377e-07,
+ -7.67830034077425e-07,
+ -5.63132203607377e-07,
+ -5.29392360859670e-18,
+ -8.47032947254300e-18,
+ -1.12946188905247e-06,
+ +1.12946188906941e-06,
+ +5.29528877972484e-19,
+ +1.58818677610181e-18,
+ +4.92465210702326e-09,
+ -4.23516473627150e-18,
+ -4.92465210067051e-09,
+ -6.77626357803440e-17,
+ +0.00000000000000e00,
+ -6.35999238706884e-09,
+ +6.35999238706884e-09,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ -3.53533064165212e-08,
+ +0.00000000000000e00,
+ +3.53533064419322e-08,
+ +8.89530700163080e-07,
+ +4.29960486408968e-06,
+ -1.50514786563553e-06,
+ -1.50514786563553e-06,
+ -8.42458431855913e-07,
+ -1.28427081321833e-06,
+ -1.88294474777752e-08,
+ -1.44516929699990e-08,
+ -1.88294474693049e-08,
+ +6.40348522763317e-06,
+ -7.04550501878497e-07,
+ -5.37111531029815e-06,
+ -2.12454332860319e-07,
+ -3.82398697223586e-07,
+ +2.61475702846586e-07,
+ -3.44455326773270e-10,
+ -1.41576948850807e-09,
+ +7.31813661124402e-09,
+ +4.51029303796368e-06,
+ -1.56223429341234e-07,
+ -5.52312008963578e-06,
+ -1.49862476435744e-07,
+ -6.01830364193370e-07,
+ +1.86127847123351e-06,
+ -3.41449806966307e-09,
+ +3.19388246353017e-08,
+ +3.09405236569466e-08,
+ +1.19806845033878e-05,
+ -1.50514786653837e-06,
+ -5.00342777186618e-06,
+ -3.09345106980407e-06,
+ -1.50479164181523e-06,
+ -8.31382144832865e-07,
+ -2.35063546090021e-08,
+ -1.72013426514562e-08,
+ -1.77631125208703e-09,
+ -6.40348522758658e-06,
+ +7.04550501878497e-07,
+ +2.12454332868789e-07,
+ +5.37111531024733e-06,
+ +3.82398697229408e-07,
+ -2.61475702831763e-07,
+ -7.31813661547918e-09,
+ +1.41576948850807e-09,
+ +3.44455320420523e-10,
+ -4.51029303796368e-06,
+ +1.56223429324294e-07,
+ +1.49862476401863e-07,
+ +5.52312008956802e-06,
+ +6.01830364193370e-07,
+ -1.86127847130128e-06,
+ -3.09405236400059e-08,
+ -3.19388246353017e-08,
+ +3.41449804425208e-09,
+ +1.19806845033201e-05,
+ -1.50514786654052e-06,
+ -3.09345106980407e-06,
+ -5.00342777179842e-06,
+ -1.50479164182370e-06,
+ -8.31382144826513e-07,
+ -1.77631124361670e-09,
+ -1.72013426525124e-08,
+ -2.35063546301779e-08,
+ +3.17656742265969e-18,
+ +0.00000000000000e00,
+ -3.82398696774303e-07,
+ +3.82398696782773e-07,
+ -2.11514283156357e-18,
+ +6.35274710440725e-18,
+ -1.13528159856881e-07,
+ +8.47032947254300e-18,
+ +1.13528159842058e-07,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ -1.06253707516383e-06,
+ +1.06253707518077e-06,
+ -3.38813178901720e-17,
+ -6.77626357803440e-17,
+ -7.18851102072271e-07,
+ +0.00000000000000e00,
+ +7.18851102038390e-07,
+ +1.27004545353263e-06,
+ -8.42458431860658e-07,
+ -1.50479164087277e-06,
+ -1.50479164088124e-06,
+ +5.74542672852916e-06,
+ -2.15084138588029e-06,
+ -2.71984122278350e-07,
+ -4.68620837948104e-07,
+ -2.71984122295291e-07,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +2.61475696454134e-07,
+ -2.61475696441429e-07,
+ -1.29246970711411e-22,
+ -2.64697796016969e-18,
+ -2.99609319592711e-06,
+ +0.00000000000000e00,
+ +2.99609319592500e-06,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +1.93860581179563e-06,
+ -1.93860581177869e-06,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ -4.22945220586585e-06,
+ +0.00000000000000e00,
+ +4.22945220586585e-06,
+ -3.20178147390532e-06,
+ -1.28427081322053e-06,
+ -8.31382143635372e-07,
+ -8.31382143643843e-07,
+ -2.15084138551640e-06,
+ -3.54614562775819e-06,
+ +5.67839795983496e-06,
+ +4.89007668017723e-07,
+ +5.67839795982649e-06,
+ -1.18182232564080e-07,
+ +2.35575190539634e-09,
+ +2.22443468928245e-09,
+ -7.31813600773304e-09,
+ -1.13528159773637e-07,
+ -2.99609396778218e-06,
+ +2.37925009617263e-06,
+ +8.06350016461675e-07,
+ +4.49421969048518e-08,
+ -1.01608959937480e-06,
+ -3.09916395626849e-08,
+ +9.05150227434814e-09,
+ -3.43038356780730e-08,
+ -7.21439283824860e-07,
+ -4.47025771088650e-06,
+ +6.13130536114709e-06,
+ +6.87185353735617e-08,
+ +6.40066704387423e-08,
+ -5.63132203973192e-07,
+ -1.88294474748053e-08,
+ -2.35063545878263e-08,
+ -1.77631122667604e-09,
+ -2.71984122469811e-07,
+ +5.67839795628484e-06,
+ -1.18086774516732e-06,
+ -1.07190989968547e-06,
+ -2.54639187169835e-06,
+ +1.05888811929591e-18,
+ -1.69406589450860e-17,
+ -3.98465104058706e-09,
+ +3.98465104058706e-09,
+ +5.29274422998896e-19,
+ +4.76456032830544e-18,
+ +8.96233636242083e-07,
+ +0.00000000000000e00,
+ -8.96233636254789e-07,
+ +6.77626357803440e-17,
+ +0.00000000000000e00,
+ +4.00431582864129e-08,
+ -4.00431582694722e-08,
+ +0.00000000000000e00,
+ -6.77626357803440e-17,
+ +4.71237139275924e-09,
+ +0.00000000000000e00,
+ -4.71237140969990e-09,
+ -7.67830034079374e-07,
+ -1.44516929720134e-08,
+ -1.72013426519659e-08,
+ -1.72013426434956e-08,
+ -4.68620837942433e-07,
+ +4.89007666103822e-07,
+ -1.07190989936978e-06,
+ +2.94011738295056e-06,
+ -1.07190989939095e-06,
+ +1.18182232565138e-07,
+ -2.35575191386667e-09,
+ +7.31813602043854e-09,
+ -2.22443468504729e-09,
+ +1.13528159772048e-07,
+ +2.99609396777477e-06,
+ -4.49421969344979e-08,
+ -8.06350016410853e-07,
+ -2.37925009620016e-06,
+ +1.01608959944256e-06,
+ +3.09916395796256e-08,
+ +3.43038356950137e-08,
+ -9.05150225740748e-09,
+ +7.21439283841801e-07,
+ +4.47025771075098e-06,
+ -6.40066704641533e-08,
+ -6.87185354074430e-08,
+ -6.13130536120639e-06,
+ -5.63132203964722e-07,
+ -1.88294474745737e-08,
+ -1.77631122667604e-09,
+ -2.35063546132373e-08,
+ -2.71984122468752e-07,
+ +5.67839795628589e-06,
+ -2.54639187165177e-06,
+ -1.07190989968441e-06,
+ -1.18086774522661e-06,
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "AmF3": Refs(
+ {
+ "grad": torch.tensor(
+ [
+ [
+ -3.091609121445480e-10,
+ 2.958185285646392e-12,
+ 3.762196005417977e-07,
+ ],
+ [
+ -1.982582438864074e-05,
+ -5.360338422731795e-06,
+ -1.431825059800939e-07,
+ ],
+ [
+ 5.276022219053056e-06,
+ 1.985418497606805e-05,
+ -1.170419972562382e-07,
+ ],
+ [
+ 1.455011133049982e-05,
+ -1.449384951152156e-05,
+ -1.159950973054663e-07,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "hessian": torch.tensor(
+ [],
+ dtype=torch.double,
+ ),
+ }
+ ),
+}
+
+samples: Dict[str, Record] = merge_nested_dicts(mols, refs)
diff --git a/tests/test_grad/test_hessian.py b/test/test_grad/test_hessian.py
similarity index 87%
rename from tests/test_grad/test_hessian.py
rename to test/test_grad/test_hessian.py
index df3c6c6..8d9fe12 100644
--- a/tests/test_grad/test_hessian.py
+++ b/test/test_grad/test_hessian.py
@@ -19,13 +19,15 @@
import pytest
import torch
+from tad_mctc.autograd import hessian
+from tad_mctc.batch import pack
+from tad_mctc.convert import reshape_fortran
-from tad_dftd3 import dftd3, utils
-from tad_dftd3._typing import DD, Tensor
+from tad_dftd3 import dftd3
+from tad_dftd3.typing import DD, Tensor
from ..conftest import DEVICE
-from ..samples import samples
-from ..utils import reshape_fortran
+from .samples import samples
sample_list = ["LiH", "SiH4", "PbH4-BiH3", "MB16_43_01"]
@@ -40,7 +42,7 @@ def test_fail() -> None:
# differentiable variable is not a tensor
with pytest.raises(RuntimeError):
- utils.hessian(dftd3, (numbers, positions, param), argnums=2)
+ hessian(dftd3, (numbers, positions, param), argnums=2)
def test_zeros() -> None:
@@ -49,7 +51,7 @@ def test_zeros() -> None:
def dummy(x: Tensor) -> Tensor:
return torch.zeros_like(x)
- hess = utils.hessian(dummy, (d,), argnums=0)
+ hess = hessian(dummy, (d,), argnums=0)
zeros = torch.zeros([*d.shape, *d.shape])
assert pytest.approx(zeros.cpu()) == hess.detach().cpu()
@@ -80,7 +82,7 @@ def test_single(dtype: torch.dtype, name: str) -> None:
# variable to be differentiated
positions.requires_grad_(True)
- hess = utils.hessian(dftd3, (numbers, positions, param), argnums=1)
+ hess = hessian(dftd3, (numbers, positions, param), argnums=1)
assert pytest.approx(ref.cpu(), abs=tol, rel=tol) == hess.detach().cpu()
positions.detach_()
@@ -94,13 +96,13 @@ def skip_test_batch(dtype: torch.dtype, name1: str, name2: str) -> None:
dd: DD = {"device": DEVICE, "dtype": dtype}
sample1, sample2 = samples[name1], samples[name2]
- numbers = utils.pack(
+ numbers = pack(
[
sample1["numbers"].to(DEVICE),
sample2["numbers"].to(DEVICE),
]
)
- positions = utils.pack(
+ positions = pack(
[
sample1["positions"].to(**dd),
sample2["positions"].to(**dd),
@@ -116,7 +118,7 @@ def skip_test_batch(dtype: torch.dtype, name1: str, name2: str) -> None:
"a2": torch.tensor(5.00000000, **dd),
}
- ref = utils.pack(
+ ref = pack(
[
reshape_fortran(
sample1["hessian"].to(**dd),
@@ -132,7 +134,7 @@ def skip_test_batch(dtype: torch.dtype, name1: str, name2: str) -> None:
# variable to be differentiated
positions.requires_grad_(True)
- hess = utils.hessian(dftd3, (numbers, positions, param), argnums=1)
+ hess = hessian(dftd3, (numbers, positions, param), argnums=1)
assert pytest.approx(ref.cpu(), abs=tol, rel=tol) == hess.detach().cpu()
positions.detach_()
diff --git a/tests/test_grad/test_nan.py b/test/test_grad/test_nan.py
similarity index 95%
rename from tests/test_grad/test_nan.py
rename to test/test_grad/test_nan.py
index 6e120e4..deb7e83 100644
--- a/tests/test_grad/test_nan.py
+++ b/test/test_grad/test_nan.py
@@ -19,12 +19,13 @@
import pytest
import torch
+from tad_mctc.batch import pack
+from tad_mctc.data.molecules import mols as samples
-from tad_dftd3 import dftd3, utils
-from tad_dftd3._typing import DD
+from tad_dftd3 import dftd3
+from tad_dftd3.typing import DD
from ..conftest import DEVICE
-from ..molecules import mols as samples
tol = 1e-8
@@ -94,13 +95,13 @@ def test_single(dtype: torch.dtype) -> None:
def test_batch(dtype: torch.dtype, name: str) -> None:
dd: DD = {"device": DEVICE, "dtype": dtype}
- nums = utils.pack(
+ nums = pack(
(
numbers.to(DEVICE),
samples[name]["numbers"].to(DEVICE),
)
)
- pos = utils.pack(
+ pos = pack(
(
positions.to(**dd),
samples[name]["positions"].to(**dd),
diff --git a/tests/test_grad/test_param.py b/test/test_grad/test_param.py
similarity index 85%
rename from tests/test_grad/test_param.py
rename to test/test_grad/test_param.py
index ee5d9d5..dc80e94 100644
--- a/tests/test_grad/test_param.py
+++ b/test/test_grad/test_param.py
@@ -19,13 +19,14 @@
import pytest
import torch
+from tad_mctc.autograd import dgradcheck, dgradgradcheck
+from tad_mctc.batch import pack
+from tad_mctc.data.molecules import mols as samples
-from tad_dftd3 import dftd3, utils
-from tad_dftd3._typing import DD, Callable, Tensor, Tuple
+from tad_dftd3 import dftd3
+from tad_dftd3.typing import DD, Callable, Tensor
-from ..conftest import DEVICE
-from ..samples import samples
-from ..utils import dgradcheck, dgradgradcheck
+from ..conftest import DEVICE, FAST_MODE
sample_list = ["LiH", "AmF3", "SiH4", "MB16_43_01"]
@@ -34,9 +35,9 @@
def gradchecker(
dtype: torch.dtype, name: str
-) -> Tuple[
+) -> tuple[
Callable[[Tensor, Tensor, Tensor, Tensor], Tensor], # autograd function
- Tuple[Tensor, Tensor, Tensor, Tensor], # differentiable variables
+ tuple[Tensor, Tensor, Tensor, Tensor], # differentiable variables
]:
dd: DD = {"device": DEVICE, "dtype": dtype}
@@ -69,7 +70,7 @@ def test_gradcheck(dtype: torch.dtype, name: str) -> None:
gradient from `torch.autograd.gradcheck`.
"""
func, diffvars = gradchecker(dtype, name)
- assert dgradcheck(func, diffvars, atol=tol)
+ assert dgradcheck(func, diffvars, atol=tol, fast_mode=FAST_MODE)
@pytest.mark.grad
@@ -81,25 +82,25 @@ def test_gradgradcheck(dtype: torch.dtype, name: str) -> None:
gradient from `torch.autograd.gradgradcheck`.
"""
func, diffvars = gradchecker(dtype, name)
- assert dgradgradcheck(func, diffvars, atol=tol)
+ assert dgradgradcheck(func, diffvars, atol=tol, fast_mode=FAST_MODE)
def gradchecker_batch(
dtype: torch.dtype, name1: str, name2: str
-) -> Tuple[
+) -> tuple[
Callable[[Tensor, Tensor, Tensor, Tensor], Tensor], # autograd function
- Tuple[Tensor, Tensor, Tensor, Tensor], # differentiable variables
+ tuple[Tensor, Tensor, Tensor, Tensor], # differentiable variables
]:
dd: DD = {"device": DEVICE, "dtype": dtype}
sample1, sample2 = samples[name1], samples[name2]
- numbers = utils.pack(
+ numbers = pack(
[
sample1["numbers"].to(DEVICE),
sample2["numbers"].to(DEVICE),
]
)
- positions = utils.pack(
+ positions = pack(
[
sample1["positions"].to(**dd),
sample2["positions"].to(**dd),
@@ -132,7 +133,7 @@ def test_gradcheck_batch(dtype: torch.dtype, name1: str, name2: str) -> None:
gradient from `torch.autograd.gradcheck`.
"""
func, diffvars = gradchecker_batch(dtype, name1, name2)
- assert dgradcheck(func, diffvars, atol=tol)
+ assert dgradcheck(func, diffvars, atol=tol, fast_mode=FAST_MODE)
@pytest.mark.grad
@@ -145,4 +146,4 @@ def test_gradgradcheck_batch(dtype: torch.dtype, name1: str, name2: str) -> None
gradient from `torch.autograd.gradgradcheck`.
"""
func, diffvars = gradchecker_batch(dtype, name1, name2)
- assert dgradgradcheck(func, diffvars, atol=tol)
+ assert dgradgradcheck(func, diffvars, atol=tol, fast_mode=FAST_MODE)
diff --git a/tests/test_grad/test_pos.py b/test/test_grad/test_pos.py
similarity index 91%
rename from tests/test_grad/test_pos.py
rename to test/test_grad/test_pos.py
index 0875b4a..8b02302 100644
--- a/tests/test_grad/test_pos.py
+++ b/test/test_grad/test_pos.py
@@ -19,13 +19,14 @@
import pytest
import torch
+from tad_mctc.autograd import dgradcheck, dgradgradcheck
+from tad_mctc.batch import pack
-from tad_dftd3 import dftd3, utils
-from tad_dftd3._typing import DD, Callable, Tensor, Tuple
+from tad_dftd3 import dftd3
+from tad_dftd3.typing import DD, Callable, Tensor
-from ..conftest import DEVICE
-from ..samples import samples
-from ..utils import dgradcheck, dgradgradcheck
+from ..conftest import DEVICE, FAST_MODE
+from .samples import samples
sample_list = ["LiH", "AmF3", "SiH4", "MB16_43_01"]
@@ -34,7 +35,7 @@
def gradchecker(
dtype: torch.dtype, name: str
-) -> Tuple[
+) -> tuple[
Callable[[Tensor], Tensor], # autograd function
Tensor, # differentiable variables
]:
@@ -70,7 +71,7 @@ def test_gradcheck(dtype: torch.dtype, name: str) -> None:
gradient from `torch.autograd.gradcheck`.
"""
func, diffvars = gradchecker(dtype, name)
- assert dgradcheck(func, diffvars, atol=tol)
+ assert dgradcheck(func, diffvars, atol=tol, fast_mode=FAST_MODE)
@pytest.mark.grad
@@ -82,25 +83,25 @@ def test_gradgradcheck(dtype: torch.dtype, name: str) -> None:
gradient from `torch.autograd.gradgradcheck`.
"""
func, diffvars = gradchecker(dtype, name)
- assert dgradgradcheck(func, diffvars, atol=tol)
+ assert dgradgradcheck(func, diffvars, atol=tol, fast_mode=FAST_MODE)
def gradchecker_batch(
dtype: torch.dtype, name1: str, name2: str
-) -> Tuple[
+) -> tuple[
Callable[[Tensor], Tensor], # autograd function
Tensor, # differentiable variables
]:
dd: DD = {"device": DEVICE, "dtype": dtype}
sample1, sample2 = samples[name1], samples[name2]
- numbers = utils.pack(
+ numbers = pack(
[
sample1["numbers"].to(DEVICE),
sample2["numbers"].to(DEVICE),
]
)
- positions = utils.pack(
+ positions = pack(
[
sample1["positions"].to(**dd),
sample2["positions"].to(**dd),
@@ -133,7 +134,7 @@ def test_gradcheck_batch(dtype: torch.dtype, name1: str, name2: str) -> None:
gradient from `torch.autograd.gradcheck`.
"""
func, diffvars = gradchecker_batch(dtype, name1, name2)
- assert dgradcheck(func, diffvars, atol=tol)
+ assert dgradcheck(func, diffvars, atol=tol, fast_mode=FAST_MODE)
@pytest.mark.grad
@@ -146,7 +147,7 @@ def test_gradgradcheck_batch(dtype: torch.dtype, name1: str, name2: str) -> None
gradient from `torch.autograd.gradgradcheck`.
"""
func, diffvars = gradchecker_batch(dtype, name1, name2)
- assert dgradgradcheck(func, diffvars, atol=tol)
+ assert dgradgradcheck(func, diffvars, atol=tol, fast_mode=FAST_MODE)
@pytest.mark.grad
diff --git a/tests/test_utils/__init__.py b/test/test_model/__init__.py
similarity index 100%
rename from tests/test_utils/__init__.py
rename to test/test_model/__init__.py
diff --git a/test/test_model/samples.py b/test/test_model/samples.py
new file mode 100644
index 0000000..c4bdfff
--- /dev/null
+++ b/test/test_model/samples.py
@@ -0,0 +1,1477 @@
+# This file is part of tad-dftd3.
+# SPDX-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Collection of test samples.
+"""
+from typing import Dict
+
+import torch
+from tad_mctc.data.molecules import merge_nested_dicts, mols
+
+from tad_dftd3.typing import Molecule, Tensor, TypedDict
+
+
+class Refs(TypedDict):
+ """
+ Format of reference values. Note that energies and gradients are calculated
+ with different parameters.
+ """
+
+ cn: Tensor
+ """Coordination number."""
+
+ weights: Tensor
+ """Weights for atomic reference systems."""
+
+ c6: Tensor
+ """C6 coefficients."""
+
+
+class Record(Molecule, Refs):
+ """Store for molecular information and reference values."""
+
+
+refs: Dict[str, Refs] = {
+ "LiH": Refs(
+ {
+ "cn": torch.tensor([], dtype=torch.double),
+ "weights": torch.tensor([], dtype=torch.double),
+ "c6": torch.tensor([], dtype=torch.double),
+ }
+ ),
+ "SiH4": Refs(
+ {
+ "cn": torch.tensor(
+ [
+ +3.89022710629348e00,
+ +9.72651698030399e-01,
+ +9.72651698030399e-01,
+ +9.72651698030399e-01,
+ +9.72651698030399e-01,
+ ],
+ dtype=torch.double,
+ ),
+ "weights": torch.tensor(
+ [
+ [
+ +5.00040274964878e-27,
+ +9.51882822683413e-16,
+ +2.54542759053980e-07,
+ +2.64839020628269e-02,
+ +9.73515843394413e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.77454000458529e-01,
+ +2.25459995414711e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.77454000458529e-01,
+ +2.25459995414711e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.77454000458529e-01,
+ +2.25459995414711e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.77454000458529e-01,
+ +2.25459995414711e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "c6": torch.tensor(
+ [
+ [
+ +1.51435152661277e02,
+ +2.13999949401839e01,
+ +2.13999949401839e01,
+ +2.13999949401839e01,
+ +2.13999949401839e01,
+ ],
+ [
+ +2.13999949401839e01,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ ],
+ [
+ +2.13999949401839e01,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ ],
+ [
+ +2.13999949401839e01,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ ],
+ [
+ +2.13999949401839e01,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ +3.10444218682464e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "MB16_43_01": Refs(
+ {
+ "cn": torch.tensor(
+ [
+ +4.15066368951397e00,
+ +9.78868026389781e-01,
+ +2.01080985633859e00,
+ +1.47865697827818e00,
+ +1.03577822442117e00,
+ +1.01206994314781e00,
+ +1.50329777127401e00,
+ +1.99858468272609e00,
+ +3.89181927539324e00,
+ +1.04323373360740e00,
+ +1.01526584450636e00,
+ +1.99315213227354e00,
+ +4.63526560889683e00,
+ +3.87312260639335e00,
+ +3.99316800677884e00,
+ +5.45068226903888e0,
+ ],
+ dtype=torch.double,
+ ),
+ "weights": torch.tensor(
+ [
+ [
+ +4.61254014807976e-13,
+ +9.99999999999539e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.78431945472983e-01,
+ +2.15680545270172e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.33252077840319e-08,
+ +1.55830681937747e-02,
+ +9.84416838481017e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.99424904108747e-01,
+ +5.75095891252906e-04,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +1.35771400228363e-02,
+ +9.86422859977164e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.82992148346892e-01,
+ +1.70078516531077e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.99519469064248e-01,
+ +4.80530935751615e-04,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +1.13181694792597e-07,
+ +1.71503960869602e-02,
+ +9.82849490731345e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +1.25926325849160e-25,
+ +6.73263145629432e-14,
+ +1.94165275506323e-05,
+ +9.99980583472382e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.86403420777318e-01,
+ +1.35965792226822e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.83377538259043e-01,
+ +1.66224617409573e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +6.63636803493899e-06,
+ +9.99993363631965e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +4.78432084484299e-38,
+ +4.72470789879862e-24,
+ +2.64845507076682e-13,
+ +7.08386079833514e-06,
+ +9.99992916138937e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +5.57929648633356e-26,
+ +1.48261370770972e-14,
+ +2.19715394953033e-06,
+ +1.59978977357256e-01,
+ +8.40018825488779e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +1.11473605172390e-26,
+ +1.33471958830444e-14,
+ +8.80046323582265e-06,
+ +9.99991199536751e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +3.64404060381414e-41,
+ +1.64269207706493e-24,
+ +4.50618875164815e-11,
+ +9.99999999954938e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "c6": torch.tensor(
+ [
+ [
+ +1.86105200000295e02,
+ +2.10637967443952e01,
+ +3.33393205851663e01,
+ +2.07648885028596e01,
+ +2.50462879811640e01,
+ +2.09988662957750e01,
+ +2.07635420397407e01,
+ +3.33469365793004e01,
+ +4.44925237765056e01,
+ +2.09502948932743e01,
+ +2.09933789215108e01,
+ +1.11982907430142e02,
+ +6.38123316167329e01,
+ +6.45263838745924e01,
+ +4.44924561012183e01,
+ +1.60659400002538e02,
+ ],
+ [
+ +2.10637967443952e01,
+ +3.10104597907844e00,
+ +5.44104128537895e00,
+ +3.06460553022247e00,
+ +4.32334790530218e00,
+ +3.09313018967101e00,
+ +3.06444138044805e00,
+ +5.44213006593611e00,
+ +6.85278222181144e00,
+ +3.08720876141063e00,
+ +3.09246121386693e00,
+ +1.67260314058114e01,
+ +9.32232615546217e00,
+ +9.40964391592266e00,
+ +6.85277268893649e00,
+ +2.13390348985262e01,
+ ],
+ [
+ +3.33393205851663e01,
+ +5.44104128537895e00,
+ +1.04060662211154e01,
+ +5.38395096621487e00,
+ +8.57335406068136e00,
+ +5.42863982058108e00,
+ +5.38369379696184e00,
+ +1.04078420241680e01,
+ +1.26306609797298e01,
+ +5.41936287038137e00,
+ +5.42759175329962e00,
+ +2.97398273677055e01,
+ +1.63311877211176e01,
+ +1.64664919883409e01,
+ +1.26306453073937e01,
+ +3.55362026390315e01,
+ ],
+ [
+ +2.07648885028596e01,
+ +3.06460553022247e00,
+ +5.38395096621487e00,
+ +3.02866858604329e00,
+ +4.28051674110831e00,
+ +3.05679911479676e00,
+ +3.02850670435781e00,
+ +5.38502623652600e00,
+ +6.77670142181813e00,
+ +3.05095950404731e00,
+ +3.05613938235978e00,
+ +1.65326001720343e01,
+ +9.21222013302869e00,
+ +9.29832544621136e00,
+ +6.77669200846439e00,
+ +2.10658829179373e01,
+ ],
+ [
+ +2.50462879811640e01,
+ +4.32334790530218e00,
+ +8.57335406068136e00,
+ +4.28051674110831e00,
+ +7.16510167612946e00,
+ +4.31404389049233e00,
+ +4.28032380369504e00,
+ +8.57471450429471e00,
+ +1.02476219308165e01,
+ +4.30708399636246e00,
+ +4.31325759357762e00,
+ +2.37716272051766e01,
+ +1.29661524144552e01,
+ +1.30671606537876e01,
+ +1.02476098345869e01,
+ +2.75317129235774e01,
+ ],
+ [
+ +2.09988662957750e01,
+ +3.09313018967101e00,
+ +5.42863982058108e00,
+ +3.05679911479676e00,
+ +4.31404389049233e00,
+ +3.08523815906549e00,
+ +3.05663545770877e00,
+ +5.42972566637027e00,
+ +6.83625554309033e00,
+ +3.07933450364290e00,
+ +3.08457119115511e00,
+ +1.66840132375542e01,
+ +9.29840833574711e00,
+ +9.38546272191831e00,
+ +6.83624603617842e00,
+ +2.12796993628112e01,
+ ],
+ [
+ +2.07635420397407e01,
+ +3.06444138044805e00,
+ +5.38369379696184e00,
+ +3.02850670435781e00,
+ +4.28032380369504e00,
+ +3.05663545770877e00,
+ +3.02834483288917e00,
+ +5.38476900641466e00,
+ +6.77635870797818e00,
+ +3.05079621551476e00,
+ +3.05597576690949e00,
+ +1.65317288410165e01,
+ +9.21172414905175e00,
+ +9.29782400064014e00,
+ +6.77634929516284e00,
+ +2.10646524765623e01,
+ ],
+ [
+ +3.33469365793004e01,
+ +5.44213006593611e00,
+ +1.04078420241680e01,
+ +5.38502623652600e00,
+ +8.57471450429471e00,
+ +5.42972566637027e00,
+ +5.38476900641466e00,
+ +1.04096182819417e01,
+ +1.26329675237980e01,
+ +5.42044652080921e00,
+ +5.42867735106695e00,
+ +2.97456233348293e01,
+ +1.63344547422583e01,
+ +1.64697916829337e01,
+ +1.26329518479543e01,
+ +3.55438470017986e01,
+ ],
+ [
+ +4.44925237765056e01,
+ +6.85278222181144e00,
+ +1.26306609797298e01,
+ +6.77670142181813e00,
+ +1.02476219308165e01,
+ +6.83625554309033e00,
+ +6.77635870797818e00,
+ +1.26329675237980e01,
+ +1.55817742139549e01,
+ +6.82389271515335e00,
+ +6.83485884749660e00,
+ +3.72381957696309e01,
+ +2.05886604700550e01,
+ +2.07697483350778e01,
+ +1.55817539255380e01,
+ +4.59400237556194e01,
+ ],
+ [
+ +2.09502948932743e01,
+ +3.08720876141063e00,
+ +5.41936287038137e00,
+ +3.05095950404731e00,
+ +4.30708399636246e00,
+ +3.07933450364290e00,
+ +3.05079621551476e00,
+ +5.42044652080921e00,
+ +6.82389271515335e00,
+ +3.07344414324100e00,
+ +3.07866903774297e00,
+ +1.66525814300412e01,
+ +9.28051654425321e00,
+ +9.36737391255552e00,
+ +6.82388322766317e00,
+ +2.12353132504549e01,
+ ],
+ [
+ +2.09933789215108e01,
+ +3.09246121386693e00,
+ +5.42759175329962e00,
+ +3.05613938235978e00,
+ +4.31325759357762e00,
+ +3.08457119115511e00,
+ +3.05597576690949e00,
+ +5.42867735106695e00,
+ +6.83485884749660e00,
+ +3.07866903774297e00,
+ +3.08390439293497e00,
+ +1.66804622161148e01,
+ +9.29638700321132e00,
+ +9.38341913120739e00,
+ +6.83484934277887e00,
+ +2.12746848234302e01,
+ ],
+ [
+ +1.11982907430142e02,
+ +1.67260314058114e01,
+ +2.97398273677055e01,
+ +1.65326001720343e01,
+ +2.37716272051766e01,
+ +1.66840132375542e01,
+ +1.65317288410165e01,
+ +2.97456233348293e01,
+ +3.72381957696309e01,
+ +1.66525814300412e01,
+ +1.66804622161148e01,
+ +9.03985128506635e01,
+ +5.02708240732723e01,
+ +5.07336615572416e01,
+ +3.72381448273806e01,
+ +1.14240508062473e02,
+ ],
+ [
+ +6.38123316167329e01,
+ +9.32232615546217e00,
+ +1.63311877211176e01,
+ +9.21222013302869e00,
+ +1.29661524144552e01,
+ +9.29840833574711e00,
+ +9.21172414905175e00,
+ +1.63344547422583e01,
+ +2.05886604700550e01,
+ +9.28051654425321e00,
+ +9.29638700321132e00,
+ +5.02708240732723e01,
+ +2.80315233611672e01,
+ +2.82953233553807e01,
+ +2.05886317918059e01,
+ +6.43332290431569e01,
+ ],
+ [
+ +6.45263838745924e01,
+ +9.40964391592266e00,
+ +1.64664919883409e01,
+ +9.29832544621136e00,
+ +1.30671606537876e01,
+ +9.38546272191831e00,
+ +9.29782400064014e00,
+ +1.64697916829337e01,
+ +2.07697483350778e01,
+ +9.36737391255552e00,
+ +9.38341913120739e00,
+ +5.07336615572416e01,
+ +2.82953233553807e01,
+ +2.85620489152008e01,
+ +2.07697193680983e01,
+ +6.49891385224176e01,
+ ],
+ [
+ +4.44924561012183e01,
+ +6.85277268893649e00,
+ +1.26306453073937e01,
+ +6.77669200846439e00,
+ +1.02476098345869e01,
+ +6.83624603617842e00,
+ +6.77634929516284e00,
+ +1.26329518479543e01,
+ +1.55817539255380e01,
+ +6.82388322766317e00,
+ +6.83484934277887e00,
+ +3.72381448273806e01,
+ +2.05886317918059e01,
+ +2.07697193680983e01,
+ +1.55817336371518e01,
+ +4.59399560920098e01,
+ ],
+ [
+ +1.60659400002538e02,
+ +2.13390348985262e01,
+ +3.55362026390315e01,
+ +2.10658829179373e01,
+ +2.75317129235774e01,
+ +2.12796993628112e01,
+ +2.10646524765623e01,
+ +3.55438470017986e01,
+ +4.59400237556194e01,
+ +2.12353132504549e01,
+ +2.12746848234302e01,
+ +1.14240508062473e02,
+ +6.43332290431569e01,
+ +6.49891385224176e01,
+ +4.59399560920098e01,
+ +1.53594500003907e02,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "PbH4-BiH3": Refs(
+ {
+ "cn": torch.tensor(
+ [
+ +3.93882078385452e00,
+ +9.83202447815541e-01,
+ +9.83202575698739e-01,
+ +9.83202575698739e-01,
+ +9.86589814809524e-01,
+ +2.97146042634822e00,
+ +9.87045550753296e-01,
+ +9.87045669088046e-01,
+ +9.87045550753296e-01,
+ ],
+ dtype=torch.double,
+ ),
+ "weights": torch.tensor(
+ [
+ [
+ +1.10706478210448e-27,
+ +2.26599265549586e-16,
+ +1.05725184914546e-07,
+ +1.41167956987467e-02,
+ +9.85883098576068e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79089158960743e-01,
+ +2.09108410392566e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79089178059127e-01,
+ +2.09108219408730e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79089178059127e-01,
+ +2.09108219408730e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79589093181984e-01,
+ +2.04109068180158e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +4.51567208622703e-16,
+ +1.31099357580350e-07,
+ +1.71917663233654e-02,
+ +9.82808102577277e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79655454782451e-01,
+ +2.03445452175495e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79655471986197e-01,
+ +2.03445280138028e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.79655454782451e-01,
+ +2.03445452175495e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "c6": torch.tensor(
+ [
+ [
+ +4.56209027097378e02,
+ +3.72742207576559e01,
+ +3.72742203407386e01,
+ +3.72742203407386e01,
+ +3.72633072045004e01,
+ +4.93665090064696e02,
+ +3.72618585322083e01,
+ +3.72618581566509e01,
+ +3.72618585322083e01,
+ ],
+ [
+ +3.72742207576559e01,
+ +3.09876483566212e00,
+ +3.09876480252469e00,
+ +3.09876480252469e00,
+ +3.09789740447628e00,
+ +4.02583668373402e01,
+ +3.09778226108473e00,
+ +3.09778223123467e00,
+ +3.09778226108473e00,
+ ],
+ [
+ +3.72742203407386e01,
+ +3.09876480252469e00,
+ +3.09876476938727e00,
+ +3.09876476938727e00,
+ +3.09789737134977e00,
+ +4.02583663834069e01,
+ +3.09778222795966e00,
+ +3.09778219810960e00,
+ +3.09778222795966e00,
+ ],
+ [
+ +3.72742203407386e01,
+ +3.09876480252469e00,
+ +3.09876476938727e00,
+ +3.09876476938727e00,
+ +3.09789737134977e00,
+ +4.02583663834069e01,
+ +3.09778222795966e00,
+ +3.09778219810960e00,
+ +3.09778222795966e00,
+ ],
+ [
+ +3.72633072045004e01,
+ +3.09789740447628e00,
+ +3.09789737134977e00,
+ +3.09789737134977e00,
+ +3.09703025884030e00,
+ +4.02464843239764e01,
+ +3.09691515335283e00,
+ +3.09691512351259e00,
+ +3.09691515335283e00,
+ ],
+ [
+ +4.93665090064696e02,
+ +4.02583668373402e01,
+ +4.02583663834069e01,
+ +4.02583663834069e01,
+ +4.02464843239764e01,
+ +5.34419964675118e02,
+ +4.02449070312629e01,
+ +4.02449066223616e01,
+ +4.02449070312629e01,
+ ],
+ [
+ +3.72618585322083e01,
+ +3.09778226108473e00,
+ +3.09778222795966e00,
+ +3.09778222795966e00,
+ +3.09691515335283e00,
+ +4.02449070312629e01,
+ +3.09680005289677e00,
+ +3.09680002305784e00,
+ +3.09680005289677e00,
+ ],
+ [
+ +3.72618581566509e01,
+ +3.09778223123467e00,
+ +3.09778219810960e00,
+ +3.09778219810960e00,
+ +3.09691512351259e00,
+ +4.02449066223616e01,
+ +3.09680002305784e00,
+ +3.09679999321891e00,
+ +3.09680002305784e00,
+ ],
+ [
+ +3.72618585322083e01,
+ +3.09778226108473e00,
+ +3.09778222795966e00,
+ +3.09778222795966e00,
+ +3.09691515335283e00,
+ +4.02449070312629e01,
+ +3.09680005289677e00,
+ +3.09680002305784e00,
+ +3.09680005289677e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "C6H5I-CH3SH": Refs(
+ {
+ "cn": torch.tensor(
+ [
+ +3.13936895934395e00,
+ +3.13131666863102e00,
+ +3.13937683960227e00,
+ +3.31534291514346e00,
+ +3.13765455567338e00,
+ +3.31481155018318e00,
+ +1.53636056287591e00,
+ +1.00352466398219e00,
+ +1.01223354855399e00,
+ +1.00366192372190e00,
+ +1.01219594356898e00,
+ +1.00366200689047e00,
+ +2.15705640674763e00,
+ +9.98181081558970e-01,
+ +3.98411287017616e00,
+ +1.01462256394391e00,
+ +1.01235611510819e00,
+ +1.00858912903507e00,
+ ],
+ dtype=torch.double,
+ ),
+ "weights": torch.tensor(
+ [
+ [
+ +7.66915348045866e-18,
+ +9.04559372001613e-09,
+ +5.55474084124553e-03,
+ +9.36199574012947e-01,
+ +5.82456761002137e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.33337260445326e-18,
+ +1.03304923176417e-08,
+ +5.94352226939526e-03,
+ +9.39218013018462e-01,
+ +5.48384543816506e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +7.66767874029992e-18,
+ +9.04441693210364e-09,
+ +5.55437244105958e-03,
+ +9.36196513315798e-01,
+ +5.82491051987251e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.61278067966140e-20,
+ +4.54846017777575e-10,
+ +1.16048785372819e-03,
+ +7.99583198542082e-01,
+ +1.99256313149343e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +7.99678634625602e-18,
+ +9.30523441007744e-09,
+ +5.63544094450366e-03,
+ +9.36860440392255e-01,
+ +5.75041093580066e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.74417166809199e-20,
+ +4.59132999088564e-10,
+ +1.16639851843437e-03,
+ +8.00245977284164e-01,
+ +1.98587623738269e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +2.54205655667171e-04,
+ +9.99745794344333e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.81918044661934e-01,
+ +1.80819553380659e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.83012088864378e-01,
+ +1.69879111356215e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.81935812878635e-01,
+ +1.80641871213652e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.83007507536486e-01,
+ +1.69924924635144e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.81935823639575e-01,
+ +1.80641763604250e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.18809691426923e-09,
+ +5.00585297181196e-03,
+ +9.94994137840091e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.81212828585247e-01,
+ +1.87871714147530e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +2.61014227249546e-28,
+ +2.42411690886486e-16,
+ +1.38694757870492e-07,
+ +2.01508154369845e-02,
+ +9.79849045868257e-01,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.83300662218751e-01,
+ +1.66993377812491e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.83027012438879e-01,
+ +1.69729875611206e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ [
+ +9.82562408977771e-01,
+ +1.74375910222289e-02,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ +0.00000000000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "c6": torch.tensor(
+ [
+ [
+ +2.53086694842437e01,
+ +2.53236159084896e01,
+ +2.53086544809279e01,
+ +2.47100760277660e01,
+ +2.53119158180357e01,
+ +2.47128891340438e01,
+ +9.48922900325656e01,
+ +8.82857683343213e00,
+ +8.82319289903895e00,
+ +8.82848939371737e00,
+ +8.82321544435180e00,
+ +8.82848934076138e00,
+ +5.62455312853600e01,
+ +8.83204729384078e00,
+ +2.14360070777824e01,
+ +8.82177279187685e00,
+ +8.82311945817995e00,
+ +8.82540583248536e00,
+ ],
+ [
+ +2.53236159084896e01,
+ +2.53385737670487e01,
+ +2.53236008936914e01,
+ +2.47245622275532e01,
+ +2.53268647265581e01,
+ +2.47273774996866e01,
+ +9.49508531711154e01,
+ +8.83383171969513e00,
+ +8.82844384885730e00,
+ +8.83374421604913e00,
+ +8.82846641065407e00,
+ +8.83374416305442e00,
+ +5.62800562259229e01,
+ +8.83730471751867e00,
+ +2.14479713482813e01,
+ +8.82702270338885e00,
+ +8.82837035430227e00,
+ +8.83065840028219e00,
+ ],
+ [
+ +2.53086544809279e01,
+ +2.53236008936914e01,
+ +2.53086394776235e01,
+ +2.47100614866121e01,
+ +2.53119008122251e01,
+ +2.47128745907148e01,
+ +9.48922312429173e01,
+ +8.82857155866703e00,
+ +8.82318762822647e00,
+ +8.82848411901647e00,
+ +8.82321017352276e00,
+ +8.82848406606051e00,
+ +5.62454966276673e01,
+ +8.83204201652784e00,
+ +2.14359950691858e01,
+ +8.82176752210693e00,
+ +8.82311418742137e00,
+ +8.82540056004824e00,
+ ],
+ [
+ +2.47100760277660e01,
+ +2.47245622275532e01,
+ +2.47100614866121e01,
+ +2.41300091503453e01,
+ +2.47132223699295e01,
+ +2.41327350652656e01,
+ +9.25450400227954e01,
+ +8.61818543474823e00,
+ +8.61295975815401e00,
+ +8.61810056527593e00,
+ +8.61298164075975e00,
+ +8.61810051387655e00,
+ +5.48621486974196e01,
+ +8.62155388287635e00,
+ +2.09574675249769e01,
+ +8.61158139426314e00,
+ +8.61288847604877e00,
+ +8.61510764363531e00,
+ ],
+ [
+ +2.53119158180357e01,
+ +2.53268647265581e01,
+ +2.53119008122251e01,
+ +2.47132223699295e01,
+ +2.53151626915744e01,
+ +2.47160359467776e01,
+ +9.49050104461900e01,
+ +8.82971816324588e00,
+ +8.82433337366177e00,
+ +8.82963070964208e00,
+ +8.82435592255574e00,
+ +8.82963065667768e00,
+ +5.62530302631426e01,
+ +8.83318917490684e00,
+ +2.14386054857927e01,
+ +8.82291304092804e00,
+ +8.82425992113731e00,
+ +8.82654665861330e00,
+ ],
+ [
+ +2.47128891340438e01,
+ +2.47273774996866e01,
+ +2.47128745907148e01,
+ +2.41327350652656e01,
+ +2.47160359467776e01,
+ +2.41354613905380e01,
+ +9.25560734133844e01,
+ +8.61917409179224e00,
+ +8.61394767066187e00,
+ +8.61908921022803e00,
+ +8.61396955638537e00,
+ +8.61908915882133e00,
+ +5.48686508434178e01,
+ +8.62254301984509e00,
+ +2.09597156158777e01,
+ +8.61256911038653e00,
+ +8.61387637840061e00,
+ +8.61609586216640e00,
+ ],
+ [
+ +9.48922900325656e01,
+ +9.49508531711154e01,
+ +9.48922312429173e01,
+ +9.25450400227954e01,
+ +9.49050104461900e01,
+ +9.25560734133844e01,
+ +3.58497831396092e02,
+ +3.31610125661753e01,
+ +3.31400786291125e01,
+ +3.31606725810729e01,
+ +3.31401662903144e01,
+ +3.31606723751682e01,
+ +2.12283949836107e02,
+ +3.31745064898504e01,
+ +7.97027494030697e01,
+ +3.31345569359076e01,
+ +3.31397930746818e01,
+ +3.31486830076409e01,
+ ],
+ [
+ +8.82857683343213e00,
+ +8.83383171969513e00,
+ +8.82857155866703e00,
+ +8.61818543474823e00,
+ +8.82971816324588e00,
+ +8.61917409179224e00,
+ +3.31610125661753e01,
+ +3.08895723244586e00,
+ +3.08706250254790e00,
+ +3.08892646040635e00,
+ +3.08707043676017e00,
+ +3.08892644176992e00,
+ +1.96792520574283e01,
+ +3.09017856699708e00,
+ +7.46755679047458e00,
+ +3.08656273429260e00,
+ +3.08703665702655e00,
+ +3.08784128454205e00,
+ ],
+ [
+ +8.82319289903895e00,
+ +8.82844384885730e00,
+ +8.82318762822647e00,
+ +8.61295975815401e00,
+ +8.82433337366177e00,
+ +8.61394767066187e00,
+ +3.31400786291125e01,
+ +3.08706250254790e00,
+ +3.08516914014558e00,
+ +3.08703175271770e00,
+ +3.08517706863144e00,
+ +3.08703173409473e00,
+ +1.96668860512657e01,
+ +3.08828295561747e00,
+ +7.46319796171210e00,
+ +3.08466973259124e00,
+ +3.08514331327788e00,
+ +3.08594736006437e00,
+ ],
+ [
+ +8.82848939371737e00,
+ +8.83374421604913e00,
+ +8.82848411901647e00,
+ +8.61810056527593e00,
+ +8.82963070964208e00,
+ +8.61908921022803e00,
+ +3.31606725810729e01,
+ +3.08892646040635e00,
+ +3.08703175271770e00,
+ +3.08889568872755e00,
+ +3.08703968683697e00,
+ +3.08889567009134e00,
+ +1.96790512228764e01,
+ +3.09014778064156e00,
+ +7.46748599935495e00,
+ +3.08653199032049e00,
+ +3.08700590749930e00,
+ +3.08781052558326e00,
+ ],
+ [
+ +8.82321544435180e00,
+ +8.82846641065407e00,
+ +8.82321017352276e00,
+ +8.61298164075975e00,
+ +8.82435592255574e00,
+ +8.61396955638537e00,
+ +3.31401662903144e01,
+ +3.08707043676017e00,
+ +3.08517706863144e00,
+ +3.08703968683697e00,
+ +3.08518499714127e00,
+ +3.08703966821393e00,
+ +1.96669378341176e01,
+ +3.08829089352095e00,
+ +7.46321621437845e00,
+ +3.08467765956666e00,
+ +3.08515124168563e00,
+ +3.08595529090393e00,
+ ],
+ [
+ +8.82848934076138e00,
+ +8.83374416305442e00,
+ +8.82848406606051e00,
+ +8.61810051387655e00,
+ +8.82963065667768e00,
+ +8.61908915882133e00,
+ +3.31606723751682e01,
+ +3.08892644176992e00,
+ +3.08703173409473e00,
+ +3.08889567009134e00,
+ +3.08703966821393e00,
+ +3.08889565145513e00,
+ +1.96790511012452e01,
+ +3.09014776199646e00,
+ +7.46748595648182e00,
+ +3.08653197170106e00,
+ +3.08700588887651e00,
+ +3.08781050695476e00,
+ ],
+ [
+ +5.62455312853600e01,
+ +5.62800562259229e01,
+ +5.62454966276673e01,
+ +5.48621486974196e01,
+ +5.62530302631426e01,
+ +5.48686508434178e01,
+ +2.12283949836107e02,
+ +1.96792520574283e01,
+ +1.96668860512657e01,
+ +1.96790512228764e01,
+ +1.96669378341176e01,
+ +1.96790511012452e01,
+ +1.25836586473867e02,
+ +1.96872231305630e01,
+ +4.72941948267372e01,
+ +1.96636243001677e01,
+ +1.96667173697683e01,
+ +1.96719687931115e01,
+ ],
+ [
+ +8.83204729384078e00,
+ +8.83730471751867e00,
+ +8.83204201652784e00,
+ +8.62155388287635e00,
+ +8.83318917490684e00,
+ +8.62254301984509e00,
+ +3.31745064898504e01,
+ +3.09017856699708e00,
+ +3.08828295561747e00,
+ +3.09014778064156e00,
+ +3.08829089352095e00,
+ +3.09014776199646e00,
+ +1.96872231305630e01,
+ +3.09140046974751e00,
+ +7.47036647230640e00,
+ +3.08778295485591e00,
+ +3.08825709807205e00,
+ +3.08906209992291e00,
+ ],
+ [
+ +2.14360070777824e01,
+ +2.14479713482813e01,
+ +2.14359950691858e01,
+ +2.09574675249769e01,
+ +2.14386054857927e01,
+ +2.09597156158777e01,
+ +7.97027494030697e01,
+ +7.46755679047458e00,
+ +7.46319796171210e00,
+ +7.46748599935495e00,
+ +7.46321621437845e00,
+ +7.46748595648182e00,
+ +4.72941948267372e01,
+ +7.47036647230640e00,
+ +1.83413164591959e01,
+ +7.46204824414648e00,
+ +7.46313850405433e00,
+ +7.46498955077155e00,
+ ],
+ [
+ +8.82177279187685e00,
+ +8.82702270338885e00,
+ +8.82176752210693e00,
+ +8.61158139426314e00,
+ +8.82291304092804e00,
+ +8.61256911038653e00,
+ +3.31345569359076e01,
+ +3.08656273429260e00,
+ +3.08466973259124e00,
+ +3.08653199032049e00,
+ +3.08467765956666e00,
+ +3.08653197170106e00,
+ +1.96636243001677e01,
+ +3.08778295485591e00,
+ +7.46204824414648e00,
+ +3.08417042017811e00,
+ +3.08464391064378e00,
+ +3.08544780425280e00,
+ ],
+ [
+ +8.82311945817995e00,
+ +8.82837035430227e00,
+ +8.82311418742137e00,
+ +8.61288847604877e00,
+ +8.82425992113731e00,
+ +8.61387637840061e00,
+ +3.31397930746818e01,
+ +3.08703665702655e00,
+ +3.08514331327788e00,
+ +3.08700590749930e00,
+ +3.08515124168563e00,
+ +3.08700588887651e00,
+ +1.96667173697683e01,
+ +3.08825709807205e00,
+ +7.46313850405433e00,
+ +3.08464391064378e00,
+ +3.08511748666464e00,
+ +3.08592152552955e00,
+ ],
+ [
+ +8.82540583248536e00,
+ +8.83065840028219e00,
+ +8.82540056004824e00,
+ +8.61510764363531e00,
+ +8.82654665861330e00,
+ +8.61609586216640e00,
+ +3.31486830076409e01,
+ +3.08784128454205e00,
+ +3.08594736006437e00,
+ +3.08781052558326e00,
+ +3.08595529090393e00,
+ +3.08781050695476e00,
+ +1.96719687931115e01,
+ +3.08906209992291e00,
+ +7.46498955077155e00,
+ +3.08544780425280e00,
+ +3.08592152552955e00,
+ +3.08672581101038e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+ "AmF3": Refs(
+ {
+ "cn": torch.tensor(
+ [
+ +2.99072690000000e00,
+ +0.99768090000000e00,
+ +0.99767850000000e00,
+ +0.99768040000000e00,
+ ],
+ dtype=torch.double,
+ ),
+ "weights": torch.tensor(
+ [
+ [
+ +3.01777620000000e-16,
+ +3.48560790000000e-08,
+ +6.05574020000000e-03,
+ +9.93942080000000e-01,
+ +2.12835020000000e-06,
+ +3.22313320000000e-14,
+ +0.00000000000000e00,
+ ],
+ [
+ +0.01831650000000e00,
+ 0.981683500000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ ],
+ [
+ 0.018316800000000e00,
+ 0.981683200000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ ],
+ [
+ 0.018316600000000e00,
+ 0.981683400000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ 0.000000000000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ "c6": torch.tensor(
+ [
+ [
+ +524.180114700000e00,
+ +54.4235535000000e00,
+ +54.4235573000000e00,
+ +54.4235573000000e00,
+ ],
+ [
+ +54.4235535000000e00,
+ +7.17594720000000e00,
+ +7.17594770000000e00,
+ +7.17594770000000e00,
+ ],
+ [
+ +54.4235535000000e00,
+ +7.17594770000000e00,
+ +7.17594860000000e00,
+ +7.17594810000000e00,
+ ],
+ [
+ +54.4235535000000e00,
+ +7.17594770000000e00,
+ +7.17594810000000e00,
+ +7.17594810000000e00,
+ ],
+ ],
+ dtype=torch.double,
+ ),
+ }
+ ),
+}
+
+samples: Dict[str, Record] = merge_nested_dicts(mols, refs)
diff --git a/tests/test_model/test_c6.py b/test/test_model/test_c6.py
similarity index 92%
rename from tests/test_model/test_c6.py
rename to test/test_model/test_c6.py
index fe1a19e..3e7b8a6 100644
--- a/tests/test_model/test_c6.py
+++ b/test/test_model/test_c6.py
@@ -17,12 +17,13 @@
"""
import pytest
import torch
+from tad_mctc.batch import pack
-from tad_dftd3 import model, reference, utils
-from tad_dftd3._typing import DD
+from tad_dftd3 import model, reference
+from tad_dftd3.typing import DD
from ..conftest import DEVICE
-from ..samples import samples
+from .samples import samples
sample_list = ["SiH4", "PbH4-BiH3", "C6H5I-CH3SH", "MB16_43_01"]
@@ -56,20 +57,20 @@ def test_batch(dtype: torch.dtype, name1: str, name2: str) -> None:
samples[name1],
samples[name2],
)
- numbers = utils.pack(
+ numbers = pack(
(
sample1["numbers"].to(DEVICE),
sample2["numbers"].to(DEVICE),
)
)
ref = reference.Reference(**dd)
- weights = utils.pack(
+ weights = pack(
(
sample1["weights"].to(**dd),
sample2["weights"].to(**dd),
)
)
- refc6 = utils.pack(
+ refc6 = pack(
(
sample1["c6"].to(**dd),
sample2["c6"].to(**dd),
diff --git a/tests/test_model/test_load.py b/test/test_model/test_load.py
similarity index 88%
rename from tests/test_model/test_load.py
rename to test/test_model/test_load.py
index a5c499f..c7a7dc4 100644
--- a/tests/test_model/test_load.py
+++ b/test/test_model/test_load.py
@@ -17,11 +17,11 @@
"""
import torch
-from tad_dftd3 import constants, reference
+from tad_dftd3 import defaults, reference
def test_ref() -> None:
c6 = reference._load_c6(dtype=torch.double)
assert c6.shape == torch.Size(
- (constants.MAX_ELEMENT, constants.MAX_ELEMENT, 7, 7),
+ (defaults.MAX_ELEMENT, defaults.MAX_ELEMENT, 7, 7),
)
diff --git a/tests/test_model/test_reference.py b/test/test_model/test_reference.py
similarity index 93%
rename from tests/test_model/test_reference.py
rename to test/test_model/test_reference.py
index addea24..05a55ab 100644
--- a/tests/test_model/test_reference.py
+++ b/test/test_model/test_reference.py
@@ -15,14 +15,16 @@
"""
Test the reference.
"""
+from typing import Union
+
import pytest
import torch
+from tad_mctc.convert import str_to_device
from tad_dftd3 import reference
-from tad_dftd3._typing import DD, Union
+from tad_dftd3.typing import DD
from ..conftest import DEVICE
-from ..utils import get_device_from_str
sample_list = ["SiH4", "PbH4-BiH3", "C6H5I-CH3SH", "MB16_43_01"]
@@ -52,8 +54,8 @@ def test_reference_move_both(dtype: torch.dtype) -> None:
@pytest.mark.parametrize("device_str", ["cpu", "cuda"])
@pytest.mark.parametrize("device_str2", ["cpu", "cuda"])
def test_reference_device(device_str: str, device_str2: str) -> None:
- device = get_device_from_str(device_str)
- device2 = get_device_from_str(device_str2)
+ device = str_to_device(device_str)
+ device2 = str_to_device(device_str2)
ref = reference.Reference(device=device2).to(device)
assert ref.device == device
diff --git a/tests/test_model/test_weights.py b/test/test_model/test_weights.py
similarity index 92%
rename from tests/test_model/test_weights.py
rename to test/test_model/test_weights.py
index 7e6bb19..38fe4ba 100644
--- a/tests/test_model/test_weights.py
+++ b/test/test_model/test_weights.py
@@ -17,12 +17,13 @@
"""
import pytest
import torch
+from tad_mctc.batch import pack
-from tad_dftd3 import model, reference, utils
-from tad_dftd3._typing import DD
+from tad_dftd3 import model, reference
+from tad_dftd3.typing import DD
from ..conftest import DEVICE
-from ..samples import samples
+from .samples import samples
sample_list = ["SiH4", "PbH4-BiH3", "C6H5I-CH3SH", "MB16_43_01"]
@@ -56,20 +57,20 @@ def test_batch(dtype: torch.dtype, name1: str, name2: str) -> None:
samples[name1],
samples[name2],
)
- numbers = utils.pack(
+ numbers = pack(
(
sample1["numbers"].to(DEVICE),
sample2["numbers"].to(DEVICE),
)
)
ref = reference.Reference(**dd)
- cn = utils.pack(
+ cn = pack(
(
sample1["cn"].to(**dd),
sample2["cn"].to(**dd),
)
)
- refgw = utils.pack(
+ refgw = pack(
(
sample1["weights"].to(**dd),
sample2["weights"].to(**dd),
diff --git a/tests/molecules.py b/tests/molecules.py
deleted file mode 100644
index c0df487..0000000
--- a/tests/molecules.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Molecules for testing. Taken from https://github.com/grimme-lab/mstore.
-"""
-import torch
-
-from tad_dftd3._typing import Dict, Molecule
-from tad_dftd3.utils import to_number
-
-mols: Dict[str, Molecule] = {
- "LiH": {
- "numbers": to_number("Li H".split()),
- "positions": torch.tensor(
- [
- [+0.00000000000000, +0.00000000000000, -1.50796743897235],
- [+0.00000000000000, +0.00000000000000, +1.50796743897235],
- ]
- ),
- },
- "SiH4": {
- "numbers": to_number("Si H H H H".split()),
- "positions": torch.tensor(
- [
- [+0.00000000000000, -0.00000000000000, +0.00000000000000],
- [+1.61768389755830, +1.61768389755830, -1.61768389755830],
- [-1.61768389755830, -1.61768389755830, -1.61768389755830],
- [+1.61768389755830, -1.61768389755830, +1.61768389755830],
- [-1.61768389755830, +1.61768389755830, +1.61768389755830],
- ],
- dtype=torch.float64,
- ),
- },
- "MB16_43_01": {
- "numbers": to_number("Na H O H F H H O N H H Cl B B N Al".split()),
- "positions": torch.tensor(
- [
- [-1.85528263484662, +3.58670515364616, -2.41763729306344],
- [+4.40178023537845, +0.02338844412653, -4.95457749372945],
- [-2.98706033463438, +4.76252065456814, +1.27043301573532],
- [+0.79980886075526, +1.41103455609189, -5.04655321620119],
- [-4.20647469409936, +1.84275767548460, +4.55038084858449],
- [-3.54356121843970, -3.18835665176557, +1.46240021785588],
- [+2.70032160109941, +1.06818452504054, -1.73234650374438],
- [+3.73114088824361, -2.07001543363453, +2.23160937604731],
- [-1.75306819230397, +0.35951417150421, +1.05323406177129],
- [+5.41755788583825, -1.57881830078929, +1.75394002750038],
- [-2.23462868255966, -2.13856505054269, +4.10922285746451],
- [+1.01565866207568, -3.21952154552768, -3.36050963020778],
- [+2.42119255723593, +0.26626435093114, -3.91862474360560],
- [-3.02526098819107, +2.53667889095925, +2.31664984740423],
- [-2.00438948664892, -2.29235136977220, +2.19782807357059],
- [+1.12226554109716, -1.36942007032045, +0.48455055461782],
- ],
- dtype=torch.float64,
- ),
- },
- "PbH4-BiH3": {
- "numbers": to_number("Pb H H H H Bi H H H".split()),
- "positions": torch.tensor(
- [
- [-0.00000020988889, -4.98043478877778, +0.00000000000000],
- [+3.06964045311111, -6.06324400177778, +0.00000000000000],
- [-1.53482054188889, -6.06324400177778, -2.65838526500000],
- [-1.53482054188889, -6.06324400177778, +2.65838526500000],
- [-0.00000020988889, -1.72196703577778, +0.00000000000000],
- [-0.00000020988889, +4.77334244722222, +0.00000000000000],
- [+1.35700257511111, +6.70626379422222, -2.35039772300000],
- [-2.71400388988889, +6.70626379422222, +0.00000000000000],
- [+1.35700257511111, +6.70626379422222, +2.35039772300000],
- ],
- dtype=torch.float64,
- ),
- },
- "C6H5I-CH3SH": {
- "numbers": to_number("C C C C C C I H H H H H S H C H H H".split()),
- "positions": torch.tensor(
- [
- [-1.42754169820131, -1.50508961850828, -1.93430551124333],
- [+1.19860572924150, -1.66299114873979, -2.03189643761298],
- [+2.65876001301880, +0.37736955363609, -1.23426391650599],
- [+1.50963368042358, +2.57230374419743, -0.34128058818180],
- [-1.12092277855371, +2.71045691257517, -0.25246348639234],
- [-2.60071517756218, +0.67879949508239, -1.04550707592673],
- [-2.86169588073340, +5.99660765711210, +1.08394899986031],
- [+2.09930989272956, -3.36144811062374, -2.72237695164263],
- [+2.64405246349916, +4.15317840474646, +0.27856972788526],
- [+4.69864865613751, +0.26922271535391, -1.30274048619151],
- [-4.63786461351839, +0.79856258572808, -0.96906659938432],
- [-2.57447518692275, -3.08132039046931, -2.54875517521577],
- [-5.88211879210329, 11.88491819358157, +2.31866455902233],
- [-8.18022701418703, 10.95619984550779, +1.83940856333092],
- [-5.08172874482867, 12.66714386256482, -0.92419491629867],
- [-3.18311711399702, 13.44626574330220, -0.86977613647871],
- [-5.07177399637298, 10.99164969235585, -2.10739192258756],
- [-6.35955320518616, 14.08073002965080, -1.68204314084441],
- ],
- dtype=torch.float64,
- ),
- },
- "C4H5NCS": {
- "numbers": to_number("C C C C N C S H H H H H".split()),
- "positions": torch.tensor(
- [
- [-2.56745685564671, -0.02509985979910, 0.00000000000000],
- [-1.39177582455797, +2.27696188880014, 0.00000000000000],
- [+1.27784995624894, +2.45107479759386, 0.00000000000000],
- [+2.62801937615793, +0.25927727028120, 0.00000000000000],
- [+1.41097033661123, -1.99890996077412, 0.00000000000000],
- [-1.17186102298849, -2.34220576284180, 0.00000000000000],
- [-2.39505990368378, -5.22635838332362, 0.00000000000000],
- [+2.41961980455457, -3.62158019253045, 0.00000000000000],
- [-2.51744374846065, +3.98181713686746, 0.00000000000000],
- [+2.24269048384775, +4.24389473203647, 0.00000000000000],
- [+4.66488984573956, +0.17907568006409, 0.00000000000000],
- [-4.60044244782237, -0.17794734637413, 0.00000000000000],
- ],
- dtype=torch.float64,
- ),
- },
- "AmF3": {
- "numbers": to_number("Am F F F".split()),
- "positions": torch.tensor(
- [
- [-1.13163973200000, -2.17446990100000, +1.10012477100000],
- [-4.66377948900000, -3.12947883400000, -0.36987606800000],
- [-0.19032564300000, +1.36339950600000, -0.36521789300000],
- [+1.46283310800000, -4.75734549200000, -0.36503081000000],
- ],
- dtype=torch.float64,
- ),
- },
-}
diff --git a/tests/samples.py b/tests/samples.py
deleted file mode 100644
index 3f8b8f6..0000000
--- a/tests/samples.py
+++ /dev/null
@@ -1,5170 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Collection of test samples.
-"""
-import torch
-
-from tad_dftd3._typing import Dict, Molecule, Tensor, TypedDict
-
-from .molecules import mols
-from .utils import merge_nested_dicts
-
-
-class Refs(TypedDict):
- """
- Format of reference values. Note that energies and gradients are calculated
- with different parameters.
- """
-
- cn: Tensor
- """Coordination number."""
-
- weights: Tensor
- """Weights for atomic reference systems."""
-
- c6: Tensor
- """C6 coefficients."""
-
- disp2: Tensor
- """Two-body dispersion energy."""
-
- disp3: Tensor
- """Three-body (ATM) dispersion energy."""
-
- grad: Tensor
- """Nuclear gradient (reference with GFN1-xTB parameters)."""
-
- hessian: Tensor
- """Nuclear Hessian (reference with GFN1-xTB parameters)."""
-
-
-class Record(Molecule, Refs):
- """Store for molecular information and reference values."""
-
-
-refs: Dict[str, Refs] = {
- "LiH": {
- "c6": torch.tensor([], dtype=torch.double),
- "cn": torch.tensor([], dtype=torch.double),
- "weights": torch.tensor([], dtype=torch.double),
- "disp2": torch.tensor(
- [
- -1.5918418587455960e-04,
- -1.5918418587455960e-04,
- ],
- dtype=torch.double,
- ),
- "disp3": torch.tensor(
- [
- +0.0000000000000000e00,
- +0.0000000000000000e00,
- ],
- dtype=torch.double,
- ),
- "grad": torch.tensor(
- [
- [
- +0.00000000000000e00,
- +0.00000000000000e00,
- +2.35781197246301e-06,
- ],
- [
- +0.00000000000000e00,
- +0.00000000000000e00,
- -2.35781197246301e-06,
- ],
- ],
- dtype=torch.double,
- ),
- "hessian": torch.tensor(
- [
- -7.81784784406455e-07,
- +7.81784784406455e-07,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +7.81784784406455e-07,
- -7.81784784406455e-07,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- -7.81784784406455e-07,
- +7.81784784406455e-07,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +7.81784784406455e-07,
- -7.81784784406455e-07,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- -1.63891846815177e-05,
- +1.63891846815177e-05,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +1.63891846815177e-05,
- -1.63891846815177e-05,
- ],
- dtype=torch.double,
- ),
- },
- "SiH4": {
- "cn": torch.tensor(
- [
- +3.89022710629348e00,
- +9.72651698030399e-01,
- +9.72651698030399e-01,
- +9.72651698030399e-01,
- +9.72651698030399e-01,
- ],
- dtype=torch.double,
- ),
- "weights": torch.tensor(
- [
- [
- +5.00040274964878e-27,
- +9.51882822683413e-16,
- +2.54542759053980e-07,
- +2.64839020628269e-02,
- +9.73515843394413e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.77454000458529e-01,
- +2.25459995414711e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.77454000458529e-01,
- +2.25459995414711e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.77454000458529e-01,
- +2.25459995414711e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.77454000458529e-01,
- +2.25459995414711e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- ],
- dtype=torch.double,
- ),
- "c6": torch.tensor(
- [
- [
- +1.51435152661277e02,
- +2.13999949401839e01,
- +2.13999949401839e01,
- +2.13999949401839e01,
- +2.13999949401839e01,
- ],
- [
- +2.13999949401839e01,
- +3.10444218682464e00,
- +3.10444218682464e00,
- +3.10444218682464e00,
- +3.10444218682464e00,
- ],
- [
- +2.13999949401839e01,
- +3.10444218682464e00,
- +3.10444218682464e00,
- +3.10444218682464e00,
- +3.10444218682464e00,
- ],
- [
- +2.13999949401839e01,
- +3.10444218682464e00,
- +3.10444218682464e00,
- +3.10444218682464e00,
- +3.10444218682464e00,
- ],
- [
- +2.13999949401839e01,
- +3.10444218682464e00,
- +3.10444218682464e00,
- +3.10444218682464e00,
- +3.10444218682464e00,
- ],
- ],
- dtype=torch.double,
- ),
- "disp2": torch.tensor(
- [
- -9.2481575005393872e-04,
- -3.6494949521315417e-04,
- -3.6494949521315417e-04,
- -3.6494949521315417e-04,
- -3.6494949521315417e-04,
- ],
- dtype=torch.double,
- ),
- "disp3": torch.tensor(
- [
- +1.2843453312590819e-09,
- +7.5348323667640688e-08,
- +7.5348323667640688e-08,
- +7.5348323667640688e-08,
- +7.5348323667640688e-08,
- ],
- dtype=torch.double,
- ),
- "grad": torch.tensor(
- [
- [
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- -7.37831467548165e-07,
- -7.37831467548165e-07,
- +7.37831467548165e-07,
- ],
- [
- +7.37831467548165e-07,
- +7.37831467548165e-07,
- +7.37831467548165e-07,
- ],
- [
- -7.37831467548165e-07,
- +7.37831467548165e-07,
- -7.37831467548165e-07,
- ],
- [
- +7.37831467548165e-07,
- -7.37831467548165e-07,
- -7.37831467548165e-07,
- ],
- ],
- dtype=torch.double,
- ),
- "hessian": torch.tensor(
- [
- -6.80725771067933e-05,
- +1.70181442766941e-05,
- +1.70181442766983e-05,
- +1.70181442766983e-05,
- +1.70181442766941e-05,
- +0.00000000000000e00,
- +1.45384560849674e-05,
- +1.45384560849717e-05,
- -1.45384560849717e-05,
- -1.45384560849674e-05,
- -1.27054942088145e-17,
- -1.45384560849674e-05,
- +1.45384560849717e-05,
- +1.45384560849717e-05,
- -1.45384560849674e-05,
- +1.70181440743930e-05,
- -1.49785955400277e-05,
- -5.70567229126333e-08,
- -1.92543508854213e-06,
- -5.70567229126333e-08,
- +1.45384559037866e-05,
- -1.49796911426310e-05,
- +4.41235239417783e-07,
- +1.41193487852742e-06,
- -1.41193487910287e-06,
- -1.45384559037823e-05,
- +1.49796911426310e-05,
- +1.41193487910287e-06,
- -1.41193487852742e-06,
- -4.41235239417783e-07,
- +1.70181440744014e-05,
- -5.70567229126333e-08,
- -1.49785955400319e-05,
- -5.70567229126333e-08,
- -1.92543508854425e-06,
- +1.45384559037866e-05,
- +4.41235239417783e-07,
- -1.49796911426352e-05,
- -1.41193487910287e-06,
- +1.41193487853165e-06,
- +1.45384559037781e-05,
- -1.41193487910287e-06,
- -1.49796911426352e-05,
- +4.41235239417783e-07,
- +1.41193487853165e-06,
- +1.70181440743887e-05,
- -1.92543508854001e-06,
- -5.70567229126333e-08,
- -1.49785955400235e-05,
- -5.70567229126333e-08,
- -1.45384559037823e-05,
- -1.41193487852742e-06,
- +1.41193487910287e-06,
- +1.49796911426268e-05,
- -4.41235239417783e-07,
- +1.45384559037781e-05,
- +1.41193487852742e-06,
- +4.41235239417783e-07,
- -1.49796911426268e-05,
- -1.41193487910287e-06,
- +1.70181440744014e-05,
- -5.70567229126333e-08,
- -1.92543508854636e-06,
- -5.70567229126333e-08,
- -1.49785955400319e-05,
- -1.45384559037866e-05,
- +1.41193487910287e-06,
- -1.41193487853165e-06,
- -4.41235239417783e-07,
- +1.49796911426352e-05,
- -1.45384559037866e-05,
- -4.41235239417783e-07,
- -1.41193487853165e-06,
- +1.41193487910287e-06,
- +1.49796911426352e-05,
- +0.00000000000000e00,
- +1.45384560849717e-05,
- +1.45384560849674e-05,
- -1.45384560849674e-05,
- -1.45384560849717e-05,
- -6.80725771067933e-05,
- +1.70181442766983e-05,
- +1.70181442766941e-05,
- +1.70181442766941e-05,
- +1.70181442766983e-05,
- +4.23516473627150e-18,
- -1.45384560849717e-05,
- +1.45384560849674e-05,
- -1.45384560849674e-05,
- +1.45384560849717e-05,
- +1.45384559037866e-05,
- -1.49796911426352e-05,
- +4.41235239417783e-07,
- -1.41193487910287e-06,
- +1.41193487853165e-06,
- +1.70181440744014e-05,
- -1.49785955400319e-05,
- -5.70567229126333e-08,
- -5.70567229126333e-08,
- -1.92543508854636e-06,
- -1.45384559037781e-05,
- +1.49796911426352e-05,
- +1.41193487910287e-06,
- -4.41235239417783e-07,
- -1.41193487853165e-06,
- +1.45384559037866e-05,
- +4.41235239417783e-07,
- -1.49796911426310e-05,
- +1.41193487852742e-06,
- -1.41193487910287e-06,
- +1.70181440743930e-05,
- -5.70567229126333e-08,
- -1.49785955400277e-05,
- -1.92543508854001e-06,
- -5.70567229126333e-08,
- +1.45384559037823e-05,
- -1.41193487910287e-06,
- -1.49796911426310e-05,
- +1.41193487852742e-06,
- +4.41235239417783e-07,
- -1.45384559037866e-05,
- +1.41193487910287e-06,
- -1.41193487852742e-06,
- +1.49796911426310e-05,
- -4.41235239417783e-07,
- +1.70181440743930e-05,
- -5.70567229126333e-08,
- -1.92543508854001e-06,
- -1.49785955400277e-05,
- -5.70567229126333e-08,
- -1.45384559037823e-05,
- -4.41235239417783e-07,
- -1.41193487852742e-06,
- +1.49796911426310e-05,
- +1.41193487910287e-06,
- -1.45384559037823e-05,
- -1.41193487853165e-06,
- +1.41193487910287e-06,
- -4.41235239417783e-07,
- +1.49796911426310e-05,
- +1.70181440743972e-05,
- -1.92543508854636e-06,
- -5.70567229126333e-08,
- -5.70567229126333e-08,
- -1.49785955400277e-05,
- +1.45384559037823e-05,
- +1.41193487853165e-06,
- +4.41235239417783e-07,
- -1.41193487910287e-06,
- -1.49796911426310e-05,
- +0.00000000000000e00,
- -1.45384560849696e-05,
- +1.45384560849696e-05,
- +1.45384560849674e-05,
- -1.45384560849674e-05,
- +0.00000000000000e00,
- -1.45384560849696e-05,
- +1.45384560849696e-05,
- -1.45384560849674e-05,
- +1.45384560849674e-05,
- -6.80725771067933e-05,
- +1.70181442766983e-05,
- +1.70181442766983e-05,
- +1.70181442766941e-05,
- +1.70181442766941e-05,
- -1.45384559037823e-05,
- +1.49796911426247e-05,
- -1.41193487852742e-06,
- +1.41193487910287e-06,
- -4.41235239417783e-07,
- -1.45384559037823e-05,
- +1.49796911426247e-05,
- -1.41193487852742e-06,
- -4.41235239417783e-07,
- +1.41193487910287e-06,
- +1.70181440743845e-05,
- -1.49785955400235e-05,
- -1.92543508854213e-06,
- -5.70567229126333e-08,
- -5.70567229126333e-08,
- +1.45384559037823e-05,
- +1.41193487852742e-06,
- -1.49796911426247e-05,
- +4.41235239417783e-07,
- -1.41193487910287e-06,
- +1.45384559037823e-05,
- +1.41193487852742e-06,
- -1.49796911426247e-05,
- -1.41193487910287e-06,
- +4.41235239417783e-07,
- +1.70181440743845e-05,
- -1.92543508854213e-06,
- -1.49785955400235e-05,
- -5.70567229126333e-08,
- -5.70567229126333e-08,
- +1.45384559037823e-05,
- -1.41193487910287e-06,
- +4.41235239417783e-07,
- -1.49796911426289e-05,
- +1.41193487853165e-06,
- -1.45384559037823e-05,
- -4.41235239417783e-07,
- +1.41193487910287e-06,
- +1.49796911426289e-05,
- -1.41193487853165e-06,
- +1.70181440743972e-05,
- -5.70567229126333e-08,
- -5.70567229126333e-08,
- -1.49785955400277e-05,
- -1.92543508854636e-06,
- -1.45384559037823e-05,
- -4.41235239417783e-07,
- +1.41193487910287e-06,
- -1.41193487853165e-06,
- +1.49796911426289e-05,
- +1.45384559037823e-05,
- -1.41193487910287e-06,
- +4.41235239417783e-07,
- +1.41193487853165e-06,
- -1.49796911426289e-05,
- +1.70181440743972e-05,
- -5.70567229126333e-08,
- -5.70567229126333e-08,
- -1.92543508854636e-06,
- -1.49785955400277e-05,
- ],
- dtype=torch.double,
- ),
- },
- "MB16_43_01": {
- "cn": torch.tensor(
- [
- +4.15066368951397e00,
- +9.78868026389781e-01,
- +2.01080985633859e00,
- +1.47865697827818e00,
- +1.03577822442117e00,
- +1.01206994314781e00,
- +1.50329777127401e00,
- +1.99858468272609e00,
- +3.89181927539324e00,
- +1.04323373360740e00,
- +1.01526584450636e00,
- +1.99315213227354e00,
- +4.63526560889683e00,
- +3.87312260639335e00,
- +3.99316800677884e00,
- +5.45068226903888e0,
- ],
- dtype=torch.double,
- ),
- "weights": torch.tensor(
- [
- [
- +4.61254014807976e-13,
- +9.99999999999539e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.78431945472983e-01,
- +2.15680545270172e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.33252077840319e-08,
- +1.55830681937747e-02,
- +9.84416838481017e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.99424904108747e-01,
- +5.75095891252906e-04,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +1.35771400228363e-02,
- +9.86422859977164e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.82992148346892e-01,
- +1.70078516531077e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.99519469064248e-01,
- +4.80530935751615e-04,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +1.13181694792597e-07,
- +1.71503960869602e-02,
- +9.82849490731345e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +1.25926325849160e-25,
- +6.73263145629432e-14,
- +1.94165275506323e-05,
- +9.99980583472382e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.86403420777318e-01,
- +1.35965792226822e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.83377538259043e-01,
- +1.66224617409573e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +6.63636803493899e-06,
- +9.99993363631965e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +4.78432084484299e-38,
- +4.72470789879862e-24,
- +2.64845507076682e-13,
- +7.08386079833514e-06,
- +9.99992916138937e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +5.57929648633356e-26,
- +1.48261370770972e-14,
- +2.19715394953033e-06,
- +1.59978977357256e-01,
- +8.40018825488779e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +1.11473605172390e-26,
- +1.33471958830444e-14,
- +8.80046323582265e-06,
- +9.99991199536751e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +3.64404060381414e-41,
- +1.64269207706493e-24,
- +4.50618875164815e-11,
- +9.99999999954938e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- ],
- dtype=torch.double,
- ),
- "c6": torch.tensor(
- [
- [
- +1.86105200000295e02,
- +2.10637967443952e01,
- +3.33393205851663e01,
- +2.07648885028596e01,
- +2.50462879811640e01,
- +2.09988662957750e01,
- +2.07635420397407e01,
- +3.33469365793004e01,
- +4.44925237765056e01,
- +2.09502948932743e01,
- +2.09933789215108e01,
- +1.11982907430142e02,
- +6.38123316167329e01,
- +6.45263838745924e01,
- +4.44924561012183e01,
- +1.60659400002538e02,
- ],
- [
- +2.10637967443952e01,
- +3.10104597907844e00,
- +5.44104128537895e00,
- +3.06460553022247e00,
- +4.32334790530218e00,
- +3.09313018967101e00,
- +3.06444138044805e00,
- +5.44213006593611e00,
- +6.85278222181144e00,
- +3.08720876141063e00,
- +3.09246121386693e00,
- +1.67260314058114e01,
- +9.32232615546217e00,
- +9.40964391592266e00,
- +6.85277268893649e00,
- +2.13390348985262e01,
- ],
- [
- +3.33393205851663e01,
- +5.44104128537895e00,
- +1.04060662211154e01,
- +5.38395096621487e00,
- +8.57335406068136e00,
- +5.42863982058108e00,
- +5.38369379696184e00,
- +1.04078420241680e01,
- +1.26306609797298e01,
- +5.41936287038137e00,
- +5.42759175329962e00,
- +2.97398273677055e01,
- +1.63311877211176e01,
- +1.64664919883409e01,
- +1.26306453073937e01,
- +3.55362026390315e01,
- ],
- [
- +2.07648885028596e01,
- +3.06460553022247e00,
- +5.38395096621487e00,
- +3.02866858604329e00,
- +4.28051674110831e00,
- +3.05679911479676e00,
- +3.02850670435781e00,
- +5.38502623652600e00,
- +6.77670142181813e00,
- +3.05095950404731e00,
- +3.05613938235978e00,
- +1.65326001720343e01,
- +9.21222013302869e00,
- +9.29832544621136e00,
- +6.77669200846439e00,
- +2.10658829179373e01,
- ],
- [
- +2.50462879811640e01,
- +4.32334790530218e00,
- +8.57335406068136e00,
- +4.28051674110831e00,
- +7.16510167612946e00,
- +4.31404389049233e00,
- +4.28032380369504e00,
- +8.57471450429471e00,
- +1.02476219308165e01,
- +4.30708399636246e00,
- +4.31325759357762e00,
- +2.37716272051766e01,
- +1.29661524144552e01,
- +1.30671606537876e01,
- +1.02476098345869e01,
- +2.75317129235774e01,
- ],
- [
- +2.09988662957750e01,
- +3.09313018967101e00,
- +5.42863982058108e00,
- +3.05679911479676e00,
- +4.31404389049233e00,
- +3.08523815906549e00,
- +3.05663545770877e00,
- +5.42972566637027e00,
- +6.83625554309033e00,
- +3.07933450364290e00,
- +3.08457119115511e00,
- +1.66840132375542e01,
- +9.29840833574711e00,
- +9.38546272191831e00,
- +6.83624603617842e00,
- +2.12796993628112e01,
- ],
- [
- +2.07635420397407e01,
- +3.06444138044805e00,
- +5.38369379696184e00,
- +3.02850670435781e00,
- +4.28032380369504e00,
- +3.05663545770877e00,
- +3.02834483288917e00,
- +5.38476900641466e00,
- +6.77635870797818e00,
- +3.05079621551476e00,
- +3.05597576690949e00,
- +1.65317288410165e01,
- +9.21172414905175e00,
- +9.29782400064014e00,
- +6.77634929516284e00,
- +2.10646524765623e01,
- ],
- [
- +3.33469365793004e01,
- +5.44213006593611e00,
- +1.04078420241680e01,
- +5.38502623652600e00,
- +8.57471450429471e00,
- +5.42972566637027e00,
- +5.38476900641466e00,
- +1.04096182819417e01,
- +1.26329675237980e01,
- +5.42044652080921e00,
- +5.42867735106695e00,
- +2.97456233348293e01,
- +1.63344547422583e01,
- +1.64697916829337e01,
- +1.26329518479543e01,
- +3.55438470017986e01,
- ],
- [
- +4.44925237765056e01,
- +6.85278222181144e00,
- +1.26306609797298e01,
- +6.77670142181813e00,
- +1.02476219308165e01,
- +6.83625554309033e00,
- +6.77635870797818e00,
- +1.26329675237980e01,
- +1.55817742139549e01,
- +6.82389271515335e00,
- +6.83485884749660e00,
- +3.72381957696309e01,
- +2.05886604700550e01,
- +2.07697483350778e01,
- +1.55817539255380e01,
- +4.59400237556194e01,
- ],
- [
- +2.09502948932743e01,
- +3.08720876141063e00,
- +5.41936287038137e00,
- +3.05095950404731e00,
- +4.30708399636246e00,
- +3.07933450364290e00,
- +3.05079621551476e00,
- +5.42044652080921e00,
- +6.82389271515335e00,
- +3.07344414324100e00,
- +3.07866903774297e00,
- +1.66525814300412e01,
- +9.28051654425321e00,
- +9.36737391255552e00,
- +6.82388322766317e00,
- +2.12353132504549e01,
- ],
- [
- +2.09933789215108e01,
- +3.09246121386693e00,
- +5.42759175329962e00,
- +3.05613938235978e00,
- +4.31325759357762e00,
- +3.08457119115511e00,
- +3.05597576690949e00,
- +5.42867735106695e00,
- +6.83485884749660e00,
- +3.07866903774297e00,
- +3.08390439293497e00,
- +1.66804622161148e01,
- +9.29638700321132e00,
- +9.38341913120739e00,
- +6.83484934277887e00,
- +2.12746848234302e01,
- ],
- [
- +1.11982907430142e02,
- +1.67260314058114e01,
- +2.97398273677055e01,
- +1.65326001720343e01,
- +2.37716272051766e01,
- +1.66840132375542e01,
- +1.65317288410165e01,
- +2.97456233348293e01,
- +3.72381957696309e01,
- +1.66525814300412e01,
- +1.66804622161148e01,
- +9.03985128506635e01,
- +5.02708240732723e01,
- +5.07336615572416e01,
- +3.72381448273806e01,
- +1.14240508062473e02,
- ],
- [
- +6.38123316167329e01,
- +9.32232615546217e00,
- +1.63311877211176e01,
- +9.21222013302869e00,
- +1.29661524144552e01,
- +9.29840833574711e00,
- +9.21172414905175e00,
- +1.63344547422583e01,
- +2.05886604700550e01,
- +9.28051654425321e00,
- +9.29638700321132e00,
- +5.02708240732723e01,
- +2.80315233611672e01,
- +2.82953233553807e01,
- +2.05886317918059e01,
- +6.43332290431569e01,
- ],
- [
- +6.45263838745924e01,
- +9.40964391592266e00,
- +1.64664919883409e01,
- +9.29832544621136e00,
- +1.30671606537876e01,
- +9.38546272191831e00,
- +9.29782400064014e00,
- +1.64697916829337e01,
- +2.07697483350778e01,
- +9.36737391255552e00,
- +9.38341913120739e00,
- +5.07336615572416e01,
- +2.82953233553807e01,
- +2.85620489152008e01,
- +2.07697193680983e01,
- +6.49891385224176e01,
- ],
- [
- +4.44924561012183e01,
- +6.85277268893649e00,
- +1.26306453073937e01,
- +6.77669200846439e00,
- +1.02476098345869e01,
- +6.83624603617842e00,
- +6.77634929516284e00,
- +1.26329518479543e01,
- +1.55817539255380e01,
- +6.82388322766317e00,
- +6.83484934277887e00,
- +3.72381448273806e01,
- +2.05886317918059e01,
- +2.07697193680983e01,
- +1.55817336371518e01,
- +4.59399560920098e01,
- ],
- [
- +1.60659400002538e02,
- +2.13390348985262e01,
- +3.55362026390315e01,
- +2.10658829179373e01,
- +2.75317129235774e01,
- +2.12796993628112e01,
- +2.10646524765623e01,
- +3.55438470017986e01,
- +4.59400237556194e01,
- +2.12353132504549e01,
- +2.12746848234302e01,
- +1.14240508062473e02,
- +6.43332290431569e01,
- +6.49891385224176e01,
- +4.59399560920098e01,
- +1.53594500003907e02,
- ],
- ],
- dtype=torch.double,
- ),
- "disp2": torch.tensor(
- [
- -2.8788632548321321e-03,
- -6.3435979775151754e-04,
- -9.6167619562274962e-04,
- -7.9723260613915258e-04,
- -7.9238263177385578e-04,
- -7.4485995467369389e-04,
- -1.0311812354479540e-03,
- -1.0804678845482093e-03,
- -2.1424517331896948e-03,
- -5.3905710617330410e-04,
- -7.3549132878459982e-04,
- -2.9718856310496566e-03,
- -1.9053629060228276e-03,
- -1.8362475794413465e-03,
- -1.7182276597931356e-03,
- -4.2417715940356341e-03,
- ],
- dtype=torch.double,
- ),
- "disp3": torch.tensor(
- [
- +7.0507158322062093e-06,
- +1.0788932851583596e-06,
- +5.0390835073232118e-06,
- +3.5279992165310452e-06,
- +2.0456532138274277e-06,
- +4.4882779886109463e-06,
- +3.4024596216497734e-06,
- +7.0699209168125984e-06,
- +1.3788482957103818e-06,
- +4.2106212983235953e-06,
- +3.6457830677850229e-06,
- +9.5860600613146586e-06,
- +2.7971201776362010e-06,
- +3.7713122361185403e-06,
- +4.6694314609246109e-06,
- +1.1117738755494003e-06,
- ],
- dtype=torch.double,
- ),
- "grad": torch.tensor(
- [
- [
- -5.22604735731951e-05,
- +7.57147038627570e-05,
- +3.67793033568259e-06,
- ],
- [
- +3.04279179089300e-05,
- +3.93865603948018e-06,
- -4.80630087807750e-05,
- ],
- [
- -5.65165374475145e-05,
- +1.03902867985891e-04,
- +2.63237102979009e-05,
- ],
- [
- +1.50668534907175e-05,
- +1.73612108984945e-05,
- -6.64630748967830e-05,
- ],
- [
- -4.37809939508503e-05,
- +2.89855745485217e-05,
- +4.78712582873749e-05,
- ],
- [
- -4.99927548251511e-05,
- -5.11044626786480e-05,
- +3.10795082433519e-05,
- ],
- [
- +4.62374054045158e-05,
- +1.12008826683273e-05,
- -3.52205497890963e-05,
- ],
- [
- +1.02760000394114e-04,
- -4.70879992429260e-05,
- +6.03786034525043e-05,
- ],
- [
- -6.16946875634895e-05,
- +2.06590347011126e-05,
- +4.55261050870207e-05,
- ],
- [
- +8.07830307796794e-05,
- -1.88063755972256e-05,
- +4.55153736736531e-05,
- ],
- [
- -3.24620330933132e-05,
- -3.08010755078516e-05,
- +5.96097648724992e-05,
- ],
- [
- +5.39286653929828e-05,
- -1.05567380476723e-04,
- -1.52019596084091e-04,
- ],
- [
- +6.80186584793621e-05,
- +5.42998426000167e-06,
- -1.06317383896321e-04,
- ],
- [
- -6.02912847795852e-05,
- +6.80699694022760e-05,
- +8.12417270007505e-06,
- ],
- [
- -6.21385767002360e-05,
- -5.48568033283525e-05,
- +7.06375030172422e-05,
- ],
- [
- +2.19148100830330e-05,
- -2.70387875351359e-05,
- +9.33968347976166e-06,
- ],
- ],
- dtype=torch.double,
- ),
- "hessian": torch.tensor(
- [
- -1.58037767579124e-05,
- -4.45458195710743e-06,
- +2.73415495058188e-07,
- +1.50839790266160e-06,
- -3.58852773410805e-07,
- -2.15253985381985e-06,
- -1.61412508999683e-06,
- -1.05835787609667e-06,
- -2.83859298628899e-06,
- +1.92972031881405e-06,
- -2.04766443035652e-06,
- -3.33319888288613e-06,
- +1.40539843353433e-07,
- +4.17693103172410e-05,
- -3.31669309766960e-06,
- -8.64300017162532e-06,
- -1.97186953986456e-05,
- +1.33972844486552e-06,
- -1.37650778335397e-06,
- -1.24848492833138e-06,
- +6.69003997809208e-07,
- -3.89074605832687e-07,
- +2.13482688025386e-06,
- -1.03124093584552e-06,
- +4.39708952677936e-06,
- -2.48751314195795e-06,
- +1.56775778848513e-07,
- +2.71925629352610e-06,
- -3.56729066393067e-07,
- +2.68196507557451e-06,
- +5.85060879209248e-06,
- +6.65899107075041e-06,
- +8.41467182177841e-05,
- +1.02409471768511e-06,
- -2.32668147985907e-07,
- -3.56889914492162e-06,
- -1.87932836515623e-06,
- +1.72975676378175e-07,
- -6.50795268979727e-07,
- +7.34350675768111e-07,
- +2.47490054193717e-06,
- +1.93844391137499e-06,
- -1.14329369911993e-07,
- +1.45170929043061e-07,
- +3.53626219900074e-07,
- -8.83753950286709e-05,
- +2.17501901634675e-07,
- +3.61363253380330e-06,
- -4.45458202270166e-06,
- -6.28616389684880e-05,
- +5.27522755261933e-07,
- -1.16115766310197e-06,
- +9.18864019838457e-08,
- +3.73885461300979e-07,
- -4.30050245699251e-07,
- -1.53398070558853e-06,
- +1.69343550812694e-06,
- -1.15413670611351e-06,
- +1.11769758047084e-07,
- -3.53623909424587e-06,
- +7.38058667847997e-05,
- +9.10197858110338e-07,
- +9.19226505690881e-07,
- -3.30200562735750e-06,
- +1.48165533349885e-06,
- +9.98685464463175e-06,
- -4.73288503657562e-07,
- +3.50827012992276e-07,
- -3.10928247124892e-08,
- +2.00172792058607e-07,
- +1.18017858881069e-07,
- +2.75916873780668e-08,
- -1.66372198492009e-07,
- -8.71615443428857e-08,
- +6.01480024369893e-08,
- -2.34539310654889e-06,
- -8.48006783063673e-06,
- -4.59402821977018e-07,
- +5.02527189071087e-07,
- -6.85015691089322e-07,
- +8.87509051248637e-07,
- +3.79002594187564e-05,
- -6.27629439051695e-07,
- -1.99161873652058e-08,
- -1.41608270994172e-07,
- -4.05755037486126e-07,
- +3.69329444719453e-07,
- -9.07994920841971e-08,
- -2.79936709172279e-06,
- +3.58672610011144e-07,
- -2.52106625912808e-07,
- +1.15442521861353e-06,
- -3.61623640187931e-05,
- -1.19767162827293e-06,
- -1.59163707956706e-06,
- +2.61865912725295e-06,
- +2.73415453248642e-07,
- +5.27522758616184e-07,
- +7.64231639773736e-06,
- -1.99793154551331e-07,
- -7.93716774807006e-07,
- -1.28852634020526e-06,
- +4.13537055009186e-08,
- +1.90013731162756e-06,
- -9.60815728872190e-07,
- +9.13122297162262e-07,
- -1.51736977884988e-06,
- -9.97002457195774e-08,
- +4.60347237760117e-07,
- +4.64118591106920e-07,
- -2.84966190516889e-06,
- -4.51274982531428e-06,
- -2.55594510061518e-07,
- -4.92120428884585e-07,
- +8.66921100451366e-06,
- -9.10797920983535e-07,
- -7.26007980133853e-07,
- +3.20728742501074e-07,
- -1.06649756361551e-06,
- -2.85423654072044e-06,
- +1.10607147347572e-06,
- -9.02600972817733e-07,
- -2.85705760705833e-07,
- -3.73689650864971e-06,
- -2.11748167943619e-06,
- +1.14323291705207e-07,
- +1.22243185151781e-07,
- +3.01536216885628e-06,
- -6.64318956294481e-07,
- -6.35545662355330e-07,
- +6.81555939979520e-06,
- -1.71549758651453e-06,
- +7.72655629120668e-07,
- -8.69894115778942e-09,
- -8.53239472517920e-07,
- +3.96678520346789e-07,
- +7.87884988235612e-08,
- +6.82156299830938e-08,
- +1.12805320335132e-07,
- -2.16941049624225e-06,
- -2.45329935999374e-06,
- -1.18242964120964e-07,
- -9.10094224916731e-09,
- +3.82651383410194e-07,
- +1.50839794268390e-06,
- -1.16115767596840e-06,
- -1.99793170865185e-07,
- +5.38904243126327e-06,
- +9.08387430488718e-08,
- +1.76734036307330e-07,
- -4.81973772145042e-07,
- -3.74646649491908e-07,
- -2.19190989367475e-06,
- +2.99313476632393e-07,
- -5.96023226704257e-08,
- -1.15283607587604e-06,
- +7.54730391003917e-07,
- -6.29211986536544e-07,
- -5.40625700877115e-07,
- -1.42729977296979e-06,
- -1.03824751320457e-06,
- +3.50402418974156e-07,
- -9.01199627347405e-07,
- +1.77190909602493e-06,
- -2.79555909772969e-08,
- +6.09471850259921e-07,
- +4.66356281747477e-08,
- -6.59726483037974e-07,
- -1.12732613517216e-07,
- -5.36529892847357e-07,
- +1.24556363012844e-07,
- +2.20763108992441e-07,
- -4.07446909993053e-07,
- -3.08204641211065e-07,
- +7.80723577217921e-07,
- +8.75812293815606e-08,
- -1.65140168344814e-06,
- -2.05540289242899e-08,
- -1.71886288773324e-06,
- +5.70216702367344e-06,
- -4.81436199398580e-07,
- -8.70503497911912e-07,
- -4.15797640526791e-07,
- +1.38275001454610e-06,
- +5.09863342850272e-07,
- +1.21272078293253e-06,
- -3.20818027125960e-07,
- -8.13758873382652e-08,
- +3.73648434783859e-07,
- -1.79236890506993e-06,
- -1.58002474229528e-06,
- -2.48006098833801e-07,
- -3.58852732109478e-07,
- +9.18864026275908e-08,
- -7.93716774976413e-07,
- +9.08387430996937e-08,
- -3.00729426068816e-07,
- -1.26757236400732e-06,
- +1.12991800432175e-06,
- +3.39944551654636e-06,
- -1.57684091248145e-06,
- +9.58613823357716e-07,
- -8.51475747277574e-07,
- +6.45167083785628e-07,
- +8.35820069636250e-07,
- +4.17047233641050e-06,
- -1.88512807421722e-06,
- -4.28784594874886e-06,
- +1.14850796828123e-06,
- -2.78202322408840e-08,
- -5.17800473790017e-07,
- -1.93147106584946e-08,
- -2.55840080682986e-06,
- +2.83348176431002e-07,
- -1.79681605398058e-07,
- -2.17478744302774e-06,
- +3.35361500092138e-07,
- -4.19361935822605e-07,
- +7.94962713855012e-07,
- -1.60680908716535e-06,
- -3.15758129676942e-07,
- +3.36475595907253e-06,
- +9.45116031606224e-07,
- +9.47682075636046e-07,
- -5.40859819302488e-06,
- -1.48418927836810e-07,
- +7.04363226409046e-07,
- -4.81240475393814e-07,
- +1.13601598539299e-05,
- +1.70202203974559e-07,
- -1.44459915140142e-06,
- -1.30412252418360e-06,
- +1.62925935949905e-06,
- -3.43035941042765e-07,
- +9.81975704431579e-08,
- -2.52950123547828e-06,
- -1.72197411650663e-06,
- -3.20828958740901e-06,
- +8.69506465458569e-07,
- +1.75809147262855e-06,
- -2.15253988478738e-06,
- +3.73885463283036e-07,
- -1.28852634159439e-06,
- +1.76734050291844e-07,
- -1.26757236370239e-06,
- -4.97944826004075e-07,
- +1.27398049886720e-06,
- +1.00750934030618e-06,
- -7.90980814578831e-07,
- +1.97844348319247e-06,
- -2.05690562916612e-07,
- -6.11566606387783e-06,
- +1.34505665035556e-06,
- -1.29441153213621e-06,
- +1.22894993292798e-05,
- -4.83177642605462e-06,
- -3.91988383755242e-07,
- +2.10099265751414e-07,
- +2.79725418311305e-07,
- +6.09249245444346e-07,
- +2.48049895985240e-07,
- -8.09157258733974e-06,
- +1.34612685973010e-06,
- +4.04360014811817e-07,
- -1.02743429315796e-06,
- +4.28151831225804e-07,
- -1.01504075496639e-07,
- +2.15396305663657e-08,
- +1.55779135027015e-06,
- -4.09257880338329e-07,
- +6.62069415370765e-06,
- -1.70403044522500e-06,
- +2.25719649035427e-07,
- -4.06977312161533e-07,
- -7.36707456445843e-09,
- -8.57538632795642e-07,
- +1.53570009051756e-07,
- -5.32154730515728e-06,
- -1.01377874949909e-06,
- +2.61979183328485e-07,
- +1.17120306508678e-07,
- +7.88690204521513e-08,
- -2.47574266588804e-07,
- +3.16781666328224e-06,
- -2.43250561098756e-06,
- -6.67026769020661e-08,
- +5.42933503541634e-06,
- +9.19581761287008e-07,
- -1.61412499465481e-06,
- -4.30050248545282e-07,
- +4.13537001815517e-08,
- -4.81973805391086e-07,
- +1.12991800293262e-06,
- +1.27398050672766e-06,
- +8.41274715319816e-06,
- -9.98467174147559e-07,
- -3.91503096739863e-06,
- -1.45869558111340e-06,
- +5.28661500698042e-07,
- -1.61342572762164e-06,
- +6.64320077558580e-07,
- -2.85058631914985e-06,
- -1.84531690078924e-06,
- +3.15669077784523e-06,
- +8.29077189998359e-07,
- +1.21205073823958e-07,
- -1.02786661953656e-06,
- +4.30911713138248e-08,
- -1.89396527754555e-07,
- +1.35018301328563e-06,
- -6.85166979492412e-06,
- +3.87275777175315e-07,
- -4.87149225547199e-07,
- +6.65524885262649e-07,
- +7.78371941350547e-07,
- -1.31692528362621e-06,
- +1.19096287142508e-06,
- +1.93723238134640e-07,
- +5.57046856219739e-08,
- +4.25788760396702e-06,
- -7.56141634274463e-07,
- +3.75096227373613e-07,
- -8.51783828712015e-07,
- -4.23455520594165e-07,
- -1.42149787587024e-06,
- -1.02596443344563e-06,
- +5.44436496475737e-06,
- -4.88749686406689e-07,
- +1.73187083696015e-06,
- -8.80890370683843e-07,
- -1.40637529969173e-06,
- -5.01047874522896e-07,
- +3.24999872385035e-06,
- +1.03815977020505e-06,
- -1.26978961470948e-07,
- -3.95660503651677e-06,
- -1.05835790533624e-06,
- -1.53398070223428e-06,
- +1.90013731328774e-06,
- -3.74646631797390e-07,
- +3.39944551922298e-06,
- +1.00750934715020e-06,
- -9.98467171301528e-07,
- -5.40788343969469e-06,
- -7.01295463839463e-06,
- +3.02925178698014e-05,
- -4.04679362875436e-06,
- -4.92214842889432e-06,
- -2.27890119284862e-06,
- -2.80125040154897e-07,
- -7.70440143038858e-06,
- -9.80949839518813e-07,
- -8.13161924134807e-07,
- +7.15841391442747e-08,
- -2.84918952200928e-06,
- -6.71057467728405e-07,
- -2.21166634770229e-06,
- +3.78666940671949e-07,
- +3.79556320785762e-07,
- -1.78880123787148e-06,
- +2.13788940919829e-06,
- +7.92081850794224e-06,
- -3.39155594342340e-08,
- -7.80936548744728e-07,
- +3.48447396191936e-07,
- -1.65097498064197e-06,
- -2.85908114505240e-07,
- -1.51351010937566e-07,
- +7.90209516189938e-08,
- -2.27701700810518e-07,
- +3.99128224316602e-07,
- +1.39585403904408e-06,
- -1.25488383258195e-06,
- +2.69691699121598e-07,
- -4.78035062299572e-07,
- +1.17108617458749e-05,
- -1.09934801093312e-06,
- -7.38203103781479e-06,
- +8.27077948805570e-07,
- -3.83407514829674e-06,
- -9.27455256691245e-07,
- +7.19292799845531e-07,
- -5.33332693329960e-08,
- -1.44064089830357e-07,
- -2.83859298872845e-06,
- +1.69343551968047e-06,
- -9.60815727787988e-07,
- -2.19190990975990e-06,
- -1.57684091393835e-06,
- -7.90980817018286e-07,
- -3.91503099291126e-06,
- -7.01295464672943e-06,
- +1.73165237012075e-05,
- +9.96402859716359e-07,
- -3.06096459485629e-07,
- -5.75738821648555e-06,
- -4.87786295947160e-06,
- +1.21435078803694e-05,
- -3.70092403581936e-07,
- -1.55130392471114e-06,
- -2.23353423890781e-06,
- -1.54104805740908e-07,
- +8.81368314987444e-07,
- -8.89117609323821e-08,
- +1.05109830920698e-06,
- -1.12694195551847e-06,
- -4.57619969580758e-07,
- +2.19159509509963e-06,
- +7.45714828567914e-06,
- -8.38431932168466e-07,
- -1.99976691598518e-07,
- +2.87415985061629e-06,
- +3.76206669271551e-08,
- -1.15895490108303e-05,
- +9.08359290292381e-07,
- +1.28772055229070e-06,
- +1.02390754856001e-05,
- -2.85708745134943e-06,
- +1.41186695886179e-07,
- +4.95488660883880e-07,
- +1.33614917188844e-06,
- +1.52254199340133e-07,
- +1.73852010895416e-06,
- -1.08397622925897e-06,
- -1.08371441083288e-06,
- +2.82254249642656e-07,
- +2.37089442825208e-07,
- +3.54500423699160e-06,
- +3.23230452013636e-06,
- -1.70299033651641e-05,
- +4.09881412258327e-08,
- +6.14366543292291e-07,
- +1.92972031424007e-06,
- -1.15413670819721e-06,
- +9.13122294180706e-07,
- +2.99313474184468e-07,
- +9.58613820240635e-07,
- +1.97844348329412e-06,
- -1.45869559121004e-06,
- +3.02925181074450e-05,
- +9.96402855650601e-07,
- -2.61095426936443e-05,
- +2.33512276436856e-06,
- -5.14085336542698e-06,
- -2.56625819742727e-06,
- +3.15240531673300e-06,
- +1.26136127523511e-06,
- -7.68753714964102e-06,
- -2.22980284200248e-06,
- -2.72611395212534e-08,
- -8.68497536325838e-07,
- -5.14170421225867e-07,
- -4.18038842881245e-07,
- +4.19099985191605e-07,
- +6.95662416730456e-07,
- +8.31719749194319e-06,
- -8.02933391907327e-07,
- -3.13729379362295e-06,
- +2.25342858623998e-07,
- -6.65722255357363e-07,
- +4.52557802100382e-07,
- -2.05029217006394e-06,
- +2.68351451933465e-07,
- +3.35800386605398e-07,
- +1.48358124472429e-06,
- +1.01373435335136e-07,
- +6.43208276828659e-08,
- +1.16290018664178e-06,
- -3.14801990220935e-07,
- +7.34930322030198e-08,
- -9.13456240952037e-07,
- -8.15855407239547e-06,
- +2.61288524691162e-07,
- +1.31040301788344e-05,
- -9.33995928863141e-07,
- -2.06589359809102e-06,
- -1.37912968459881e-06,
- +1.23494857147473e-07,
- -2.02364296516709e-07,
- -2.40628647545686e-06,
- -2.04766446881182e-06,
- +1.11769759131286e-07,
- -1.51736977949363e-06,
- -5.96023079320524e-08,
- -8.51475747379218e-07,
- -2.05690564034696e-07,
- +5.28661506085171e-07,
- -4.04679363342998e-06,
- -3.06096460705356e-07,
- +2.33512277100930e-06,
- +1.00668546393188e-05,
- -1.74072351656298e-06,
- +3.54272203952999e-07,
- -7.89057548311177e-07,
- +1.13244280123517e-06,
- -2.96464965425820e-06,
- +3.14308301607724e-08,
- +6.42481842780721e-08,
- -3.07153919676005e-07,
- +1.25339725272724e-07,
- +8.09977454997447e-07,
- -1.05016313875462e-07,
- +7.75222659850700e-07,
- -1.91089980685201e-08,
- -1.83258517563023e-07,
- +2.26437393565060e-07,
- -1.34068036112113e-06,
- -4.20672323405281e-07,
- +6.13994356824685e-07,
- +4.93654582814883e-07,
- -1.69962595599372e-07,
- -5.94452158159087e-07,
- +2.33210947518577e-09,
- -2.61280305422255e-07,
- +1.28301466317333e-07,
- -3.21786846978591e-07,
- +8.47638712014485e-08,
- -2.69391770955257e-07,
- -1.42085519795631e-06,
- +7.86450629859453e-07,
- +2.31026332369070e-07,
- -9.88023219249329e-07,
- +6.53439492503949e-06,
- -2.90748965349604e-06,
- -2.05173636726988e-06,
- -2.45629103553846e-07,
- -2.14808943331220e-06,
- +2.84701256343622e-06,
- -3.33319881265016e-06,
- -3.53623910847602e-06,
- -9.97002454146455e-08,
- -1.15283610706380e-06,
- +6.45167085073118e-07,
- -6.11566605896504e-06,
- -1.61342572843479e-06,
- -4.92214843543341e-06,
- -5.75738821804409e-06,
- -5.14085335356852e-06,
- -1.74072351615640e-06,
- +4.29459979923202e-05,
- -8.15992815716071e-07,
- -2.99237429753502e-06,
- -6.27461458238923e-06,
- -9.60037976308233e-08,
- +3.27632676211062e-06,
- -2.74467703195787e-06,
- -3.71857091065262e-06,
- +1.96494607101338e-07,
- -1.65886808096258e-06,
- -1.50233764908666e-08,
- -1.34880532418344e-06,
- -7.80347130263529e-07,
- +2.87083615045517e-06,
- -6.17956676735238e-07,
- -4.33830986678327e-07,
- +7.28717331662247e-06,
- -3.66823384225645e-07,
- -2.56412173591446e-06,
- +7.49375223942713e-07,
- -1.31181422282066e-07,
- -4.30488429863379e-07,
- +1.15340868732622e-06,
- -2.17534686583611e-06,
- -6.90392864134259e-08,
- -2.51827930195994e-06,
- +3.09780246393550e-06,
- -5.29874968399201e-07,
- -3.89129478327852e-06,
- +3.55728563051122e-06,
- -1.92900412660570e-06,
- -2.89592602099143e-06,
- +3.25348266380708e-06,
- +2.37239873267779e-07,
- -1.68510309211841e-06,
- +4.85950763258317e-06,
- -3.43700760940032e-08,
- +1.40540016148154e-07,
- +7.38058668571702e-05,
- +4.60347225224029e-07,
- +7.54730445527428e-07,
- +8.35820067976065e-07,
- +1.34505664381646e-06,
- +6.64320048047952e-07,
- -2.27890120931495e-06,
- -4.87786291949164e-06,
- -2.56625819214178e-06,
- +3.54272187825491e-07,
- -8.15992825982110e-07,
- -6.28336488906151e-05,
- -2.09063281694322e-06,
- -2.41284661975578e-06,
- -4.84810017821566e-07,
- -1.42717239935697e-07,
- -8.12237674146198e-06,
- -2.10955583581610e-06,
- -2.85262320035229e-07,
- -3.39620059370193e-07,
- +1.56227981286129e-06,
- +6.06762951616327e-07,
- +4.22931507218884e-07,
- +3.93368477168924e-09,
- +4.41072803111689e-07,
- +6.16252799353169e-07,
- -7.41155855899000e-07,
- +8.75892584881404e-06,
- -3.61694349804479e-07,
- +2.67168622837023e-07,
- -5.76945628062388e-07,
- -1.70892667003811e-06,
- -3.56365426719139e-05,
- -2.46609490356267e-06,
- +2.48126472039494e-06,
- -1.70921043011790e-06,
- -2.47742120495711e-06,
- +2.52268434244027e-07,
- -1.14605818890854e-06,
- +3.18655912769729e-06,
- -1.41223753269301e-06,
- -2.05144690612684e-06,
- +2.32819690582737e-07,
- +4.30391907939201e-05,
- -3.24565253637116e-07,
- -7.99263429149700e-07,
- +5.39664424505096e-07,
- +4.17693102894244e-05,
- +9.10197865377880e-07,
- +4.64118599983825e-07,
- -6.29211957263085e-07,
- +4.17047226017753e-06,
- -1.29441152888361e-06,
- -2.85058632751853e-06,
- -2.80125046626229e-07,
- +1.21435081009368e-05,
- +3.15240532164579e-06,
- -7.89057544076013e-07,
- -2.99237431122308e-06,
- -2.09063280101900e-06,
- -5.18609975264726e-05,
- +1.43573524962848e-06,
- -1.25835064409253e-06,
- +2.66742072905986e-05,
- -4.60658159274855e-07,
- +1.75011237117239e-06,
- -2.84862643320843e-07,
- +1.62305946855323e-06,
- -2.99490068568454e-07,
- +2.02034141827178e-07,
- -1.45786593517758e-06,
- -1.78333023319327e-05,
- -2.05939139027818e-06,
- +3.60858538349710e-07,
- -2.12645527617285e-06,
- -2.61901550078010e-07,
- -2.67885990056253e-06,
- -5.12357338996214e-06,
- +1.97608883492018e-06,
- -1.20312260426623e-04,
- -1.31823726898407e-06,
- -8.08332930535364e-07,
- -1.87995292300254e-06,
- -5.30363879255310e-06,
- -4.62595519412656e-08,
- +6.07550388951716e-07,
- -4.49143803068651e-08,
- -1.03128629026872e-05,
- -2.83319361115526e-07,
- -1.30398359278927e-07,
- -2.09962513670367e-06,
- -7.36445507508910e-07,
- +1.41872356172136e-04,
- -1.25199160121246e-07,
- +9.21540140339872e-07,
- -3.31669319013171e-06,
- +9.19226512179153e-07,
- -2.84966190659190e-06,
- -5.40625669579248e-07,
- -1.88512807804581e-06,
- +1.22894993156595e-05,
- -1.84531685640471e-06,
- -7.70440143018530e-06,
- -3.70092408054270e-07,
- +1.26136129373431e-06,
- +1.13244279967663e-06,
- -6.27461457937379e-06,
- -2.41284657510020e-06,
- +1.43573524454628e-06,
- +1.15322763729990e-05,
- -1.37116084482822e-06,
- -1.19864671228149e-06,
- +5.41024809096868e-07,
- +4.46675228543084e-08,
- +8.15690536423437e-07,
- +1.67296579387866e-06,
- +6.58962003686976e-06,
- +1.01393639434150e-07,
- -2.25939747427572e-07,
- +2.21085806556369e-07,
- +3.05530762399287e-07,
- -1.49200294385422e-07,
- +7.90079071976801e-07,
- +3.10501502892459e-07,
- +3.85931646496973e-06,
- -1.39981980427724e-05,
- +3.20108849214388e-07,
- +5.14572421622466e-06,
- -1.62981439565932e-06,
- +2.96557822922075e-08,
- -1.59920091851297e-06,
- +6.41233507468037e-07,
- +5.39202489207208e-06,
- -1.40968775111979e-07,
- -4.74709817150352e-08,
- +2.47610811588144e-07,
- -2.13562919383408e-07,
- -2.02995515382676e-06,
- +4.81317330853744e-06,
- -7.94809432096252e-07,
- -5.18262572016387e-06,
- -5.25054796691950e-06,
- +6.19533745424637e-07,
- -8.64300020740399e-06,
- -3.30200562444370e-06,
- -4.51274982941392e-06,
- -1.42729975478399e-06,
- -4.28784596817979e-06,
- -4.83177644765396e-06,
- +3.15669072478708e-06,
- -9.80949584189201e-07,
- -1.55130390656769e-06,
- -7.68753726265216e-06,
- -2.96464965874747e-06,
- -9.60037789622171e-08,
- -4.84810014196265e-07,
- -1.25835054239776e-06,
- -1.37116081069279e-06,
- +4.02427526654555e-05,
- -5.41881661341326e-06,
- -6.56128427609274e-07,
- +2.52454612668593e-06,
- +1.10260748031481e-07,
- +2.08689209198591e-06,
- -1.68645244240351e-06,
- +3.49803472149876e-06,
- -7.58897005499293e-07,
- +2.08376633874894e-06,
- +1.09870497336901e-06,
- -5.72455263937342e-07,
- -1.72005087581938e-07,
- -9.64593103581398e-07,
- +1.13646190951676e-05,
- +2.50677002616842e-06,
- -1.50442461777317e-05,
- +2.87224551341210e-05,
- +2.58792717002632e-06,
- +6.05570447709562e-07,
- -3.01146970722995e-07,
- +6.76153179907308e-07,
- +1.09709756277359e-06,
- -5.50632861789219e-07,
- +9.82869454295193e-07,
- +1.26971892408409e-06,
- -3.61110170354195e-06,
- +2.57336075022610e-06,
- -1.23473675403031e-07,
- -1.82628905122145e-06,
- -2.45268064455380e-05,
- +4.69700117965459e-07,
- -8.04540203237097e-06,
- -1.97186954528557e-05,
- +1.48165526870083e-06,
- -2.55594563797288e-07,
- -1.03824744760186e-06,
- +1.14850792470986e-06,
- -3.91988418957932e-07,
- +8.29077319696044e-07,
- -8.13161968858146e-07,
- -2.23353411659625e-06,
- -2.22980283847882e-06,
- +3.14307767976967e-08,
- +3.27632686355128e-06,
- -1.42717045592457e-07,
- +2.66742072151788e-05,
- -1.19864686291783e-06,
- -5.41881665251231e-06,
- -1.03654002603913e-06,
- -2.72246879870016e-06,
- -5.70908399155352e-07,
- +1.06909920195183e-06,
- -9.79054938842963e-07,
- -3.45479813028106e-06,
- -1.70408839163831e-06,
- -1.38978786195598e-06,
- +2.89807408780429e-06,
- +2.55604144298963e-07,
- -9.33174274683171e-07,
- -9.77654345880093e-06,
- -2.56142604426788e-07,
- +1.44153166709910e-05,
- -7.02991006471540e-07,
- +4.88840378612293e-06,
- +7.30120269068849e-05,
- -6.34968493224157e-07,
- +1.20584208600000e-06,
- +2.67423863225581e-06,
- -1.83086648526822e-06,
- +7.36485264795999e-07,
- +2.15349661321080e-07,
- -7.55458777234100e-07,
- +2.92204709684767e-06,
- -1.38834950944678e-06,
- -1.13982428401641e-06,
- -1.29060915098999e-06,
- -7.94905190797243e-07,
- -7.92763189570889e-05,
- +3.24970708376587e-06,
- +3.09560411654386e-06,
- +1.33972843845771e-06,
- +9.98685388486437e-06,
- -4.92120428677062e-07,
- +3.50402417856072e-07,
- -2.78202321815917e-08,
- +2.10099265120374e-07,
- +1.21205094165454e-07,
- +7.15841413380901e-08,
- -1.54104806443945e-07,
- -2.72611373443787e-08,
- +6.42481835962105e-08,
- -2.74467703573141e-06,
- -8.12237598645069e-06,
- -4.60658162074299e-07,
- +5.41024806471066e-07,
- -6.56128443338676e-07,
- -2.72246878564738e-06,
- +7.64432690002143e-06,
- +9.70177161941895e-08,
- -3.82576701099687e-07,
- -3.22016017163481e-08,
- -3.90742347400092e-08,
- -3.02926455231553e-07,
- -1.29664388214934e-06,
- -1.16177050306907e-06,
- -1.10632576306641e-06,
- -5.28963812584504e-08,
- -3.71301441822553e-06,
- +5.63994939499811e-06,
- -2.50886664010042e-07,
- -3.10019894955368e-07,
- -2.01048872582854e-06,
- -4.96345715069463e-07,
- -4.80928053925181e-06,
- +4.14630003338953e-07,
- +8.70725299045687e-09,
- +2.94881146613974e-08,
- -1.71361528279409e-07,
- -2.28216641853069e-07,
- -8.70917430885367e-07,
- +1.50290567615735e-07,
- -3.44848027290421e-07,
- -8.73348867386887e-08,
- +1.29172158131472e-06,
- +4.24674812256700e-06,
- +4.16950812992282e-07,
- -6.04129251778520e-07,
- +1.05389756621678e-06,
- -1.37650787839107e-06,
- -4.73288505622679e-07,
- +8.66921099729694e-06,
- -9.01199626703660e-07,
- -5.17800467928549e-07,
- +2.79725419598796e-07,
- -1.02786662505922e-06,
- -2.84918952024745e-06,
- +8.81368307601316e-07,
- -8.68497538765293e-07,
- -3.07153919032260e-07,
- -3.71857090645134e-06,
- -2.10955583656149e-06,
- +1.75011250408880e-06,
- +4.46675115379483e-08,
- +2.52454608435122e-06,
- -5.70908489415183e-07,
- +9.70177163297148e-08,
- -1.97037126916625e-06,
- -4.28077719976440e-07,
- -2.08450290594520e-06,
- +3.21026728339552e-06,
- -8.86157286669764e-07,
- +1.96680987987107e-06,
- -4.18324708696382e-06,
- +4.24064501995848e-07,
- +1.10658737188573e-06,
- +5.44710667982904e-06,
- -3.66148191689961e-07,
- +7.96305555809994e-06,
- -3.18975100171526e-06,
- -6.53574503983959e-06,
- +5.30716958354539e-06,
- +4.03740673265453e-07,
- -5.68137304854334e-06,
- +1.50077111254045e-06,
- +1.53038599723593e-06,
- -1.19161886101234e-07,
- +5.41944276659530e-07,
- -4.04384484848275e-07,
- +2.23679742398586e-08,
- -5.24745434008368e-08,
- -1.04611488677982e-06,
- +4.29699348152013e-06,
- +2.01258076312294e-06,
- -8.03667041668854e-06,
- +9.74833123159593e-08,
- -3.73257908371585e-07,
- -1.24848493207526e-06,
- +3.50827024782975e-07,
- -9.10797907363245e-07,
- +1.77190907118993e-06,
- -1.93147104213254e-08,
- +6.09249256879290e-07,
- +4.30911550677328e-08,
- -6.71057453040854e-07,
- -8.89117748067818e-08,
- -5.14170419294632e-07,
- +1.25339736555203e-07,
- +1.96494581741172e-07,
- -2.85262315207141e-07,
- -2.84862616419077e-07,
- +8.15690558378531e-07,
- +1.10260743779375e-07,
- +1.06909921589399e-06,
- -3.82576702222006e-07,
- -4.28077719823975e-07,
- +1.13347653741336e-05,
- -1.56095413474149e-07,
- +2.52463796904538e-07,
- -2.49831547695356e-07,
- -1.60296646290782e-07,
- -2.01235498738871e-06,
- -1.48742714295474e-07,
- -1.85336843799584e-08,
- -5.32706960757379e-06,
- +3.54750870263248e-07,
- -1.51580456811352e-06,
- -7.79728028088675e-08,
- -2.53372286339139e-06,
- +1.31047359681610e-06,
- +8.41458806387536e-09,
- +1.51974554660122e-06,
- -1.34210916380819e-06,
- +3.67911452510612e-08,
- -9.25900911320841e-07,
- +7.07093538316597e-08,
- -1.64180172715621e-06,
- +2.06799337176084e-07,
- -7.86316853812572e-07,
- -3.78182125270962e-07,
- +1.52472649726765e-06,
- -1.73040677517141e-07,
- +5.22422660458329e-07,
- -2.10010389686940e-06,
- +2.14737263060486e-06,
- +6.69004013055801e-07,
- -3.10928252884716e-08,
- -7.26007981353581e-07,
- -2.79555905368398e-08,
- -2.55840077913189e-06,
- +2.48049897577662e-07,
- -1.89396529939900e-07,
- -2.21166634702466e-06,
- +1.05109830819054e-06,
- -4.18038844575311e-07,
- +8.09977455759777e-07,
- -1.65886808189432e-06,
- -3.39620059844532e-07,
- +1.62305946941721e-06,
- +1.67296579469181e-06,
- +2.08689210059177e-06,
- -9.79054923748836e-07,
- -3.22016014876492e-08,
- -2.08450291241653e-06,
- -1.56095413982369e-07,
- +1.51397438025679e-05,
- -3.27993359435072e-06,
- -4.28393159916437e-07,
- +4.96199402502976e-08,
- -1.35436833594360e-06,
- -3.65358605830090e-08,
- -2.08559267156561e-06,
- +5.91993252717902e-07,
- -4.29928488101215e-07,
- +3.31772706685808e-06,
- -4.51216063451554e-06,
- -3.72031646564712e-06,
- -3.02682631358041e-06,
- +3.46862988703203e-08,
- +1.92665131490413e-06,
- +5.33353611655626e-08,
- +4.96889929318256e-06,
- -1.25417513149910e-06,
- +1.70064438131446e-07,
- +6.43715897473667e-07,
- -1.41578773922205e-06,
- +1.22380077703925e-07,
- -1.89040649555331e-07,
- +2.50997962133537e-06,
- +4.30445551909977e-07,
- -1.83385954653343e-06,
- -1.68960378525121e-06,
- -1.45086468894226e-06,
- -3.89074635987060e-07,
- +2.00172792922580e-07,
- +3.20728741145821e-07,
- +6.09471866802475e-07,
- +2.83348175956664e-07,
- -8.09157229802717e-06,
- +1.35018300664489e-06,
- +3.78666940671949e-07,
- -1.12694196368387e-06,
- +4.19099981566304e-07,
- -1.05016313367242e-07,
- -1.50233756777149e-08,
- +1.56227982021353e-06,
- -2.99490074328278e-07,
- +6.58961974911573e-06,
- -1.68645241365521e-06,
- -3.45479809155471e-06,
- -3.90742346849520e-08,
- +3.21026728393762e-06,
- +2.52463783335070e-07,
- -3.27993359531634e-06,
- +1.29876788916412e-05,
- +2.12870800444343e-07,
- -1.53071126869582e-06,
- -2.45673053950486e-06,
- -3.13929113976587e-07,
- -1.59158676364159e-07,
- -3.12187240536260e-06,
- -4.59662090982175e-07,
- -5.51606104861452e-06,
- +4.77236551248454e-06,
- -1.10371520676064e-06,
- +9.93590250841512e-07,
- -1.63713486804650e-07,
- -1.13025133143766e-07,
- -9.12418465028085e-07,
- -1.28314303491283e-06,
- +7.93048861138840e-07,
- -6.91401465160307e-07,
- +3.81429495938958e-08,
- +2.67491888184077e-07,
- +1.39042035467141e-08,
- -2.02414293856148e-07,
- +1.67831678730360e-08,
- -1.40517746527111e-06,
- -8.55419255505399e-07,
- +3.14717981859144e-06,
- +3.56571459506202e-07,
- +2.13482703977558e-06,
- +1.18017861007122e-07,
- -1.06649756788455e-06,
- +4.66356100821240e-08,
- -1.79681609048770e-07,
- +1.34612687614560e-06,
- -6.85166991787095e-06,
- +3.79556326960632e-07,
- -4.57619937088575e-07,
- +6.95662435128011e-07,
- +7.75222656318572e-07,
- -1.34880533676188e-06,
- +6.06763013178682e-07,
- +2.02034150619380e-07,
- +1.01393580624652e-07,
- +3.49803481894143e-06,
- -1.70408848673470e-06,
- -3.02926485627331e-07,
- -8.86157277725096e-07,
- -2.49831556640024e-07,
- -4.28393156926411e-07,
- +2.12870817435824e-07,
- +8.90150170091328e-06,
- -2.06373772146498e-06,
- -1.18617997451906e-06,
- -1.40427656204485e-06,
- -1.63632013246552e-07,
- -4.52918584581408e-06,
- +2.02585357161575e-06,
- -2.02770500267087e-06,
- -1.92591197709698e-06,
- +5.73179996992350e-06,
- +4.75995062393816e-07,
- -2.27445529349792e-07,
- +5.61437183314260e-07,
- +7.85667419387877e-08,
- +1.58452172755830e-07,
- -6.97605646582118e-07,
- -3.35204258352707e-07,
- +1.49431482060491e-06,
- +2.87453109546702e-07,
- +8.65109063013137e-07,
- -9.11125319681965e-07,
- -1.30567979320681e-06,
- +4.84954776856783e-06,
- -2.56640941874450e-07,
- -8.33670480061442e-08,
- -4.95380738493306e-06,
- -1.03124094140205e-06,
- +2.75916932564754e-08,
- -2.85423654061879e-06,
- -6.59726482140120e-07,
- -2.17478744319715e-06,
- +4.04360017081866e-07,
- +3.87275785747288e-07,
- -1.78880048462202e-06,
- +2.19159509679369e-06,
- +8.31719663450868e-06,
- -1.91089979329948e-08,
- -7.80347130466816e-07,
- +4.22931507117240e-07,
- -1.45786593876900e-06,
- -2.25939748274605e-07,
- -7.58897027352743e-07,
- -1.38978785348565e-06,
- -1.29664388217051e-06,
- +1.96680988044705e-06,
- -1.60296646849824e-07,
- +4.96199398945438e-08,
- -1.53071126835700e-06,
- -2.06373770770070e-06,
- +1.42072706298284e-05,
- -3.08168849891477e-06,
- +3.98735823706713e-06,
- -1.54028535155533e-06,
- -3.41454150639352e-06,
- -2.77463003262250e-06,
- -1.37991926763806e-06,
- -1.71302580688406e-06,
- +1.34209135673618e-07,
- -2.79623067260883e-07,
- -8.17201939730633e-07,
- -4.07557766476111e-07,
- -1.62945463959079e-06,
- +6.08899581553161e-07,
- +4.19793537835577e-08,
- +1.49234305387444e-06,
- +3.30322323398351e-06,
- +4.95708017107875e-07,
- -2.14578711976677e-06,
- +6.28212048082565e-09,
- -1.62452932142153e-06,
- +1.70938588804626e-06,
- -4.86910646274691e-07,
- +5.33434990515440e-10,
- -2.67290183322022e-07,
- +4.39708978039797e-06,
- -1.66372192308669e-07,
- +1.10607148714684e-06,
- -1.12732644560974e-07,
- +3.35361489995506e-07,
- -1.02743429974787e-06,
- -4.87149241810232e-07,
- +2.13788941898999e-06,
- +7.45714854510839e-06,
- -8.02933395430984e-07,
- -1.83258511616852e-07,
- +2.87083612694153e-06,
- +3.93365170352298e-09,
- -1.78333024817898e-05,
- +2.21085846909019e-07,
- +2.08376641952200e-06,
- +2.89807420479648e-06,
- -1.16177050374670e-06,
- -4.18324710219347e-06,
- -2.01235496936385e-06,
- -1.35436834048370e-06,
- -2.45673053088206e-06,
- -1.18617997020767e-06,
- -3.08168849671249e-06,
- +7.80553222488588e-06,
- -1.66726367868331e-06,
- -1.40156213593529e-06,
- -7.23125526357049e-06,
- -2.20382150993411e-06,
- +2.15470895057021e-05,
- -2.18153043311873e-06,
- -2.12892300044675e-06,
- -1.69924383120405e-05,
- +1.65688567399058e-07,
- -2.40098852038057e-07,
- +2.57726759042304e-07,
- -2.18404617172629e-07,
- +3.12384883179072e-07,
- +3.06994964117870e-07,
- +4.77820269651928e-07,
- -9.15598820665133e-06,
- -7.57010038286493e-08,
- +1.27178647374069e-06,
- -4.58914808416791e-06,
- -1.43991465479569e-08,
- +2.92158063049103e-05,
- +1.57098600824641e-07,
- -8.79128599946613e-07,
- -2.48751313299634e-06,
- -8.71615405820595e-08,
- -9.02600972614446e-07,
- -5.36529892694891e-07,
- -4.19361935653198e-07,
- +4.28151831310508e-07,
- +6.65524867068381e-07,
- +7.92081774850944e-06,
- -8.38431935031437e-07,
- -3.13729293039474e-06,
- +2.26437394293508e-07,
- -6.17956675718798e-07,
- +4.41072789728568e-07,
- -2.05939138961749e-06,
- +3.05530765889062e-07,
- +1.09870500850394e-06,
- +2.55604128493328e-07,
- -1.10632576288430e-06,
- +4.24064502317720e-07,
- -1.48742714363236e-07,
- -3.65358605321870e-08,
- -3.13929113824121e-07,
- -1.40427658928543e-06,
- +3.98735823970987e-06,
- -1.66726367937788e-06,
- +7.81044613674007e-06,
- -7.70906117330346e-07,
- -3.58454973026863e-06,
- -2.07405380698624e-06,
- +8.49053598560084e-08,
- -1.85429878588305e-06,
- +3.98503793880994e-07,
- -1.20318157311402e-06,
- -3.23303555253135e-07,
- -5.14489840806171e-08,
- -7.86713621561707e-07,
- +1.12287948172496e-07,
- +1.32396994910922e-08,
- +8.57538996308302e-07,
- -2.25968046107161e-06,
- -7.31383501329018e-08,
- +3.48453910846847e-06,
- -6.79166345016712e-08,
- -7.17397592069416e-07,
- +7.51494568071616e-07,
- -6.58813465512621e-08,
- -1.83389229095523e-08,
- +3.47900720716967e-07,
- +1.56775770107133e-07,
- +6.01480028266245e-08,
- -2.85705759791037e-07,
- +1.24556364757732e-07,
- +7.94962714634282e-07,
- -1.01504076174266e-07,
- +7.78371954394854e-07,
- -3.39155592987087e-08,
- -1.99976693733041e-07,
- +2.25342862520349e-07,
- -1.34068036545794e-06,
- -4.33830989016138e-07,
- +6.16252807721855e-07,
- +3.60858533131987e-07,
- -1.49200294724235e-07,
- -5.72455271560638e-07,
- -9.33174256929361e-07,
- -5.28963809365778e-08,
- +1.10658737178409e-06,
- -1.85336854302792e-08,
- -2.08559267170114e-06,
- -1.59158676601328e-07,
- -1.63631992841529e-07,
- -1.54028535148757e-06,
- -1.40156213707031e-06,
- -7.70906117482812e-07,
- +1.26678407589510e-05,
- -2.86059910209067e-06,
- -5.24727272601304e-07,
- -3.65064381346446e-06,
- +9.44568065121753e-07,
- -5.57284737460865e-07,
- -1.52293587966085e-06,
- -8.47517032682293e-08,
- -1.10663399088509e-06,
- -3.76958544809564e-07,
- -1.59331466985759e-07,
- -2.12732228649381e-07,
- -9.24229928598805e-07,
- +1.03402960225838e-08,
- +1.34511329382043e-06,
- -7.14459002078499e-08,
- -2.52441026490512e-07,
- +9.95939462356595e-07,
- -1.05848170912871e-06,
- +1.50303640147954e-06,
- +1.26919796876880e-06,
- +6.46314956865716e-07,
- +2.71925636806500e-06,
- -2.34539311544274e-06,
- -3.73689650621026e-06,
- +2.20763076449435e-07,
- -1.60680908696206e-06,
- +2.15396401208974e-08,
- -1.31692530249810e-06,
- -7.80936547525000e-07,
- +2.87415984492423e-06,
- -6.65722251698181e-07,
- -4.20672314596139e-07,
- +7.28717331008338e-06,
- -7.41155860100283e-07,
- -2.12645525770754e-06,
- +7.90079094812809e-07,
- -1.72005091952628e-07,
- -9.77654355631137e-06,
- -3.71301441485858e-06,
- +5.44710667373040e-06,
- -5.32706958251855e-06,
- +5.91993254920188e-07,
- -3.12187240783594e-06,
- -4.52918584693216e-06,
- -3.41454150388631e-06,
- -7.23125525420231e-06,
- -3.58454973833238e-06,
- -2.86059909179075e-06,
- +4.39511567679169e-05,
- -1.26998730868418e-06,
- -8.37612014253572e-07,
- -3.81140833639464e-06,
- -5.12617639850978e-07,
- +2.55670112186278e-07,
- +1.10284419807148e-06,
- +4.33001191647761e-06,
- +1.44854210549182e-06,
- +2.42028161387734e-06,
- -2.14221100304198e-08,
- -1.34822253794481e-06,
- -1.64984636253507e-06,
- -4.61029479725811e-06,
- -7.22072780155747e-07,
- +9.61870923351076e-07,
- -2.99694188126624e-06,
- +2.98187751888394e-07,
- +2.39477390400413e-06,
- -1.48734791782866e-06,
- -3.76034138434956e-07,
- -3.56729115952965e-07,
- -8.48006705449621e-06,
- -2.11748167031788e-06,
- -4.07446911242427e-07,
- -3.15758133175189e-07,
- +1.55779136281047e-06,
- +1.19096283138584e-06,
- +3.48447408283332e-07,
- +3.76206723651067e-08,
- +4.52557801956387e-07,
- +6.13994359946002e-07,
- -3.66823408196677e-07,
- +8.75892501459247e-06,
- -2.61901539003054e-07,
- +3.10501489496633e-07,
- -9.64593108790650e-07,
- -2.56142530485047e-07,
- +5.63994939164386e-06,
- -3.66148183821025e-07,
- +3.54750872554472e-07,
- -4.29928487999571e-07,
- -4.59662070234103e-07,
- +2.02585333952873e-06,
- -2.77463003893713e-06,
- -2.20382153728904e-06,
- -2.07405379281961e-06,
- -5.24727285311033e-07,
- -1.26998732914003e-06,
- +9.44182187454569e-06,
- -2.62096284501807e-06,
- -2.82242109891917e-06,
- -1.65989027796858e-06,
- +8.75857955760900e-07,
- +4.36935081631816e-06,
- +2.03065590631855e-06,
- -1.74721668367130e-06,
- +3.97138334045678e-07,
- -1.42700870752941e-06,
- +8.22926056555969e-07,
- +1.74513424293832e-06,
- -5.30307710992859e-08,
- +7.97081318422508e-07,
- -1.05446687775364e-06,
- +1.38723953114692e-07,
- -8.15587452310468e-06,
- +1.18090257362051e-07,
- -4.23085867758805e-07,
- +1.56572459085957e-06,
- +2.68196483610135e-06,
- -4.59402820723409e-07,
- +1.14323301361383e-07,
- -3.08204625617189e-07,
- +3.36475597404807e-06,
- -4.09257873121608e-07,
- +1.93723213502922e-07,
- -1.65097497474662e-06,
- -1.15895492488127e-05,
- -2.05029217866979e-06,
- +4.93654587456623e-07,
- -2.56412173804899e-06,
- -3.61694345399908e-07,
- -2.67885984669123e-06,
- +3.85931654276123e-06,
- +1.13646191961848e-05,
- +1.44153164778675e-05,
- -2.50886665644815e-07,
- +7.96305553993956e-06,
- -1.51580458505418e-06,
- +3.31772710411059e-06,
- -5.51606104465041e-06,
- -2.02770504699610e-06,
- -1.37991926892555e-06,
- +2.15470894673315e-05,
- +8.49053607369227e-08,
- -3.65064381515853e-06,
- -8.37612026179796e-07,
- -2.62096286783714e-06,
- -1.71726712772026e-06,
- -1.78384598725531e-05,
- -9.97277162921538e-06,
- -1.08526583028424e-05,
- +4.58363492594641e-07,
- -3.48115387244793e-06,
- +5.92891732752307e-07,
- -6.35732067185026e-06,
- -6.37263401668750e-07,
- -1.36095632003862e-07,
- +3.05819416692625e-08,
- +1.15239572125140e-05,
- +1.40626547339979e-07,
- +1.12189590537254e-06,
- +3.59929673264818e-06,
- +4.01049306814733e-07,
- +9.07213236659392e-06,
- -4.37745086760786e-07,
- -5.03855827023851e-06,
- +5.85060882414421e-06,
- +5.02527195457716e-07,
- +1.22243180510041e-07,
- +7.80723594065406e-07,
- +9.45116043566329e-07,
- +6.62069386283653e-06,
- +5.57046637685238e-08,
- -2.85908112370717e-07,
- +9.08359275316839e-07,
- +2.68351443090441e-07,
- -1.69962595565490e-07,
- +7.49375224349288e-07,
- +2.67168629748812e-07,
- -5.12357353131499e-06,
- -1.39981977760926e-05,
- +2.50677007866752e-06,
- -7.02990935185248e-07,
- -3.10019895823577e-07,
- -3.18975103180187e-06,
- -7.79728161242254e-08,
- -4.51216063216078e-06,
- +4.77236551278947e-06,
- -1.92591201701764e-06,
- -1.71302581003502e-06,
- -2.18153038009447e-06,
- -1.85429878444310e-06,
- +9.44568064816821e-07,
- -3.81140834293373e-06,
- -2.82242111876092e-06,
- -1.78384596737714e-05,
- +3.80380053408740e-05,
- -2.81498748056546e-06,
- -2.09286230270426e-05,
- -5.74532541564054e-07,
- -1.07555820701860e-07,
- -2.03522760330952e-06,
- -4.84269379086885e-07,
- +3.14082548136902e-06,
- -5.49628425454765e-08,
- +7.18532831432868e-10,
- -7.97213361778792e-07,
- -1.71140034584713e-08,
- +1.35804605377914e-06,
- -1.39830558698768e-06,
- -3.61359338583783e-07,
- +2.44913155658768e-05,
- -1.10019067661238e-06,
- -1.13155145218481e-06,
- +6.65899104825322e-06,
- -6.85015678485472e-07,
- +3.01536219045562e-06,
- +8.75812012007745e-08,
- +9.47682105349962e-07,
- -1.70403046023443e-06,
- +4.25788752614163e-06,
- -1.51351009700898e-07,
- +1.28772044256605e-06,
- +3.35800375814198e-07,
- -5.94452133764539e-07,
- -1.31181433141029e-07,
- -5.76945685237112e-07,
- +1.97608874565985e-06,
- +3.20108997631501e-07,
- -1.50442462326533e-05,
- +4.88840379667696e-06,
- -2.01048867064011e-06,
- -6.53574509745477e-06,
- -2.53372280762274e-06,
- -3.72031646512196e-06,
- -1.10371522986770e-06,
- +5.73179986755110e-06,
- +1.34209161186251e-07,
- -2.12892290217399e-06,
- +3.98503738993259e-07,
- -5.57284698497350e-07,
- -5.12617681863812e-07,
- -1.65989023438873e-06,
- -9.97277163790593e-06,
- -2.81498753170931e-06,
- +2.23975463931819e-05,
- -2.69281510578221e-05,
- +1.09210915830193e-06,
- -8.00126481641450e-07,
- +2.21531897149220e-06,
- +7.07113954521131e-08,
- +4.28668007767554e-07,
- -7.59537784636400e-07,
- -1.61902940903349e-07,
- -1.11577529611766e-06,
- +1.80469425380581e-07,
- +6.08979511376772e-07,
- -4.51553088133940e-07,
- -2.73620129869888e-06,
- +2.30771728007981e-05,
- +2.27122274709234e-08,
- +5.25710645038163e-06,
- +8.41467170320862e-05,
- +8.87509044239439e-07,
- -6.64318972316109e-07,
- -1.65140167190520e-06,
- -5.40859782223409e-06,
- +2.25719655165828e-07,
- -7.56141677422321e-07,
- +7.90209212761560e-08,
- +1.02390749446912e-05,
- +1.48358122763540e-06,
- +2.33211951676136e-09,
- -4.30488499326433e-07,
- -1.70892662061585e-06,
- -1.20312258093045e-04,
- +5.14572412999882e-06,
- +2.87224542823489e-05,
- +7.30120259477535e-05,
- -4.96345700839310e-07,
- +5.30716908659116e-06,
- +1.31047357784892e-06,
- -3.02682613824459e-06,
- +9.93590280377551e-07,
- +4.75995061491725e-07,
- -2.79623041436966e-07,
- -1.69924370335184e-05,
- -1.20318157230086e-06,
- -1.52293584315585e-06,
- +2.55670165214775e-07,
- +8.75857942936821e-07,
- -1.08526603507267e-05,
- -2.09286220958485e-05,
- -2.69281502868379e-05,
- -3.42398403980715e-04,
- -2.22385902793769e-06,
- +4.02159147420923e-07,
- +4.06357697084513e-06,
- +7.32834411243963e-06,
- -2.44759166903461e-06,
- -1.65136743086089e-07,
- -1.56620656158510e-06,
- -1.11601438020656e-05,
- -2.81931784402924e-07,
- -6.20794856899488e-07,
- -1.20194793191714e-06,
- -5.63975880902320e-07,
- +3.69589667947645e-04,
- -5.08430445998970e-06,
- -1.36694514795416e-05,
- +1.02409473191526e-06,
- +3.79002570421175e-05,
- -6.35545661677704e-07,
- -2.05540273064570e-08,
- -1.48418927904573e-07,
- -4.06977311517788e-07,
- +3.75096210975055e-07,
- -2.27701706773629e-07,
- -2.85708744802906e-06,
- +1.01373437910116e-07,
- -2.61280304642984e-07,
- +1.15340868553051e-06,
- -3.56365402457066e-05,
- -1.31823726620580e-06,
- -1.62981439230507e-06,
- +2.58792718413789e-06,
- -6.34968508369106e-07,
- -4.80928064293288e-06,
- +4.03740673231572e-07,
- +8.41458684414792e-09,
- +3.46862987347951e-08,
- -1.63713486703006e-07,
- -2.27445554506670e-07,
- -8.17201936511908e-07,
- +1.65688565162891e-07,
- -3.23303552559570e-07,
- -8.47517033021107e-08,
- +1.10284420633852e-06,
- +4.36935090123745e-06,
- +4.58363491476557e-07,
- -5.74532542411087e-07,
- +1.09210920429582e-06,
- -2.22385903089595e-06,
- -1.83368968106049e-05,
- +3.19422661369426e-07,
- -2.50331754091571e-07,
- +1.21130672902032e-07,
- +2.08292946320056e-07,
- -9.33509707285818e-07,
- +1.28691353956650e-06,
- +1.61470481015408e-06,
- +1.17842147681396e-07,
- +2.80872569668501e-07,
- -1.62595104345525e-06,
- +2.32772754248930e-05,
- +7.82690330538251e-07,
- +1.30502200801981e-06,
- -5.94361876467155e-06,
- -2.32668109632255e-07,
- -6.27629443066631e-07,
- +6.81555941261928e-06,
- -1.71886290210739e-06,
- +7.04363232406039e-07,
- -7.36707634322762e-09,
- -8.51783829931743e-07,
- +3.99128222334545e-07,
- +1.41186696258873e-07,
- +6.43208275812220e-08,
- +1.28301464470801e-07,
- -2.17534686221081e-06,
- -2.46609491548889e-06,
- -8.08332942783460e-07,
- +2.96557806659043e-08,
- +6.05570445337870e-07,
- +1.20584205550681e-06,
- +4.14630003410951e-07,
- -5.68137322987615e-06,
- +1.51974555959470e-06,
- +1.92665132274766e-06,
- -1.13025135108883e-07,
- +5.61437186355108e-07,
- -4.07557766425289e-07,
- -2.40098833420273e-07,
- -5.14489840467358e-08,
- -1.10663399383276e-06,
- +4.33001192782785e-06,
- +2.03065591215037e-06,
- -3.48115372675827e-06,
- -1.07555825851820e-07,
- -8.00126472069978e-07,
- +4.02159031718340e-07,
- +3.19422662047053e-07,
- +6.13359317231109e-06,
- +1.63681169238329e-06,
- -2.50638532687695e-06,
- -1.30385757038035e-06,
- -1.11860493984999e-06,
- -8.64172284993886e-07,
- -7.75361080543454e-07,
- -2.47743787934278e-07,
- -1.11449952727976e-06,
- +5.42820451131590e-07,
- +2.47632863108367e-07,
- +4.16959259617537e-06,
- -2.84139392197410e-06,
- -2.68001402924351e-06,
- -3.56889925259645e-06,
- -1.99161727792985e-08,
- -1.71549756682949e-06,
- +5.70216686160215e-06,
- -4.81240475698746e-07,
- -8.57538621140469e-07,
- -4.23455551595571e-07,
- +1.39585405158017e-06,
- +4.95488644688610e-07,
- +1.16290019118188e-06,
- -3.21786833832639e-07,
- -6.90393187023218e-08,
- +2.48126479222334e-06,
- -1.87995288478441e-06,
- -1.59920088862965e-06,
- -3.01146974009483e-07,
- +2.67423872644587e-06,
- +8.70725033077341e-09,
- +1.50077111186282e-06,
- -1.34210907519160e-06,
- +5.33353627071625e-08,
- -9.12418448222952e-07,
- +7.85667403887174e-08,
- -1.62945463735463e-06,
- +2.57726743101144e-07,
- -7.86713620765496e-07,
- -3.76958541760245e-07,
- +1.44854207784467e-06,
- -1.74721680340364e-06,
- +5.92891754639638e-07,
- -2.03522758772411e-06,
- +2.21531894704682e-06,
- +4.06357714140580e-06,
- -2.50331752465268e-07,
- +1.63681167278295e-06,
- -6.74969446816647e-06,
- +7.62664933004400e-07,
- +9.07335393579253e-07,
- -9.73935059357179e-07,
- +2.46861180900894e-06,
- -3.20802711252391e-06,
- +1.28021103039979e-06,
- +8.03407596729872e-07,
- -1.66794153345202e-06,
- +1.86702528586521e-06,
- +1.84314493206186e-06,
- +2.92491749028477e-06,
- -5.70777735904578e-06,
- -1.87932851660572e-06,
- -1.41608271366867e-07,
- +7.72655630577564e-07,
- -4.81436214069190e-07,
- +1.13601601766834e-05,
- +1.53570010847466e-07,
- -1.42149789416615e-06,
- -1.25488383058295e-06,
- +1.33614915064485e-06,
- -3.14801994489981e-07,
- +8.47638716080243e-08,
- -2.51827929958825e-06,
- -1.70921045017564e-06,
- -5.30363895450580e-06,
- +6.41233491882631e-07,
- +6.76153093238897e-07,
- -1.83086673189033e-06,
- +2.94881146021051e-08,
- +1.53038600286023e-06,
- +3.67911577024455e-08,
- +4.96889953626408e-06,
- -1.28314303633585e-06,
- +1.58452146802740e-07,
- +6.08899580502840e-07,
- -2.18404596674432e-07,
- +1.12287947698158e-07,
- -1.59331465766032e-07,
- +2.42028161292866e-06,
- +3.97138331915390e-07,
- -6.35732084217164e-06,
- -4.84269311290368e-07,
- +7.07115533051732e-08,
- +7.32834530276868e-06,
- +1.21130672122761e-07,
- -2.50638531488297e-06,
- +7.62664945777656e-07,
- -7.16323555257915e-06,
- -2.01251380096718e-06,
- +8.48319534159836e-07,
- -6.61852919102695e-07,
- -2.65387594556709e-06,
- -9.36792846393348e-08,
- -4.81124439130642e-07,
- +2.79569030333651e-06,
- +1.67061999366172e-06,
- +7.42263244382924e-06,
- -2.09039692409499e-06,
- -3.28633901479187e-06,
- +1.72975672990043e-07,
- -4.05755038299278e-07,
- -8.69894156436523e-09,
- -8.70503498377780e-07,
- +1.70202204245610e-07,
- -5.32154701320196e-06,
- -1.02596442873613e-06,
- +2.69691699392649e-07,
- +1.52254198696388e-07,
- +7.34930336937977e-08,
- -2.69391773564118e-07,
- +3.09780246302070e-06,
- -2.47742120112852e-06,
- -4.62595519751469e-08,
- +5.39202461820939e-06,
- +1.09709755642085e-06,
- +7.36485264118372e-07,
- -1.71361528546224e-07,
- -1.19161887388725e-07,
- -9.25900911473307e-07,
- -1.25417513305764e-06,
- +7.93048889226452e-07,
- -6.97605638975762e-07,
- +4.19793533092193e-08,
- +3.12384883196013e-07,
- +1.32396997452021e-08,
- -2.12732231021074e-07,
- -2.14221114534351e-08,
- -1.42700870367117e-06,
- -6.37263402786833e-07,
- +3.14082546405567e-06,
- +4.28667994367492e-07,
- -2.44759166855815e-06,
- +2.08292946726632e-07,
- -1.30385757046505e-06,
- +9.07335394155235e-07,
- -2.01251380161092e-06,
- +1.18287618697428e-05,
- -1.79951021464053e-07,
- -1.56235967692126e-06,
- -3.05709675544436e-07,
- -3.87011275857060e-07,
- -5.99296186000421e-07,
- -6.39957453674699e-06,
- +8.67757747439763e-07,
- -1.38428367028126e-06,
- +3.47995465329785e-06,
- -7.09953528257867e-07,
- -6.50795248752581e-07,
- +3.69329440348762e-07,
- -8.53239469739652e-07,
- -4.15797653808268e-07,
- -1.44459914642087e-06,
- -1.01377876664304e-06,
- +5.44436499992618e-06,
- -4.78035065619941e-07,
- +1.73852007619092e-06,
- -9.13456247694420e-07,
- -1.42085519399220e-06,
- -5.29874958471975e-07,
- +2.52268441392985e-07,
- +6.07550381531707e-07,
- -1.40968723239682e-07,
- -5.50632864431961e-07,
- +2.15349629066066e-07,
- -2.28216617187470e-07,
- +5.41944269679979e-07,
- +7.07093854090480e-08,
- +1.70064434895780e-07,
- -6.91401482168729e-07,
- -3.35203963492068e-07,
- +1.49234306634277e-06,
- +3.06994985124287e-07,
- +8.57538971557999e-07,
- -9.24229910404537e-07,
- -1.34822253885960e-06,
- +8.22926112951423e-07,
- -1.36095658194121e-07,
- -5.49628844227854e-08,
- -7.59537800679204e-07,
- -1.65136745106262e-07,
- -9.33509726801457e-07,
- -1.11860494144241e-06,
- -9.73935034860986e-07,
- +8.48319507867933e-07,
- -1.79951014653908e-07,
- +1.10704729991035e-05,
- -2.76458066186905e-06,
- -2.20942624940446e-06,
- -1.88538081997178e-06,
- +1.00053915033058e-06,
- -1.61334510502672e-06,
- +2.52209612542449e-06,
- -2.71547689880425e-06,
- -1.89668257465634e-06,
- +1.01460198995154e-06,
- +7.34350669771117e-07,
- -9.07995106850406e-08,
- +3.96678519736925e-07,
- +1.38275001388541e-06,
- -1.30412252479347e-06,
- +2.61979184073874e-07,
- -4.88749671160096e-07,
- +1.17108610042806e-05,
- -1.08397623051258e-06,
- -8.15855322454937e-06,
- +7.86450629554521e-07,
- -3.89129478331240e-06,
- -1.14605816251500e-06,
- -4.49143848130803e-08,
- -4.74709850692857e-08,
- +9.82869456057022e-07,
- -7.55458762495727e-07,
- -8.70917427984280e-07,
- -4.04384484340055e-07,
- -1.64180172652941e-06,
- +6.43715897405905e-07,
- +3.81429496616585e-08,
- +1.49431484195861e-06,
- +3.30322323300095e-06,
- +4.77820269160649e-07,
- -2.25968045975024e-06,
- +1.03402955143640e-08,
- -1.64984636981955e-06,
- +1.74513425107830e-06,
- +3.05819440409548e-08,
- +7.18531408417517e-10,
- -1.61902982458785e-07,
- -1.56620657617313e-06,
- +1.28691354305628e-06,
- -8.64172284858360e-07,
- +2.46861180924610e-06,
- -6.61852918221780e-07,
- -1.56235967712455e-06,
- -2.76458067911464e-06,
- +2.54245036322722e-05,
- -2.36933071388412e-06,
- +3.79129826593219e-06,
- -1.80076970150619e-06,
- -1.10698034430385e-05,
- -6.56372217679396e-06,
- -2.49168411452255e-06,
- -1.74391566007295e-06,
- +4.87070694657794e-07,
- +2.47490064059957e-06,
- -2.79936711274615e-06,
- +7.87884915051966e-08,
- +5.09863326511007e-07,
- +1.62925936485230e-06,
- +1.17120298207755e-07,
- +1.73187085579816e-06,
- -1.09934802221560e-06,
- -1.08371441079900e-06,
- +2.61288528519751e-07,
- +2.31026326507602e-07,
- +3.55728564372493e-06,
- +3.18655913884424e-06,
- -1.03128627898625e-05,
- +2.47610794783010e-07,
- +1.26971892582897e-06,
- +2.92204706848901e-06,
- +1.50290569186981e-07,
- +2.23679620425842e-08,
- +2.06799358131680e-07,
- -1.41578774542233e-06,
- +2.67491878019682e-07,
- +2.87453131832138e-07,
- +4.95708017717738e-07,
- -9.15598792685941e-06,
- -7.31383500143172e-08,
- +1.34511329527733e-06,
- -4.61029478712759e-06,
- -5.30307481150469e-08,
- +1.15239569747688e-05,
- -7.97213382717446e-07,
- -1.11577531548084e-06,
- -1.11601443176694e-05,
- +1.61470481154321e-06,
- -7.75361072259472e-07,
- -3.20802707952351e-06,
- -2.65387596406629e-06,
- -3.05709673782608e-07,
- -2.20942625025149e-06,
- -2.36933071429070e-06,
- +2.24293319104441e-05,
- -1.87669345616195e-06,
- -1.80919295407337e-06,
- -9.22161749440870e-06,
- -6.10047782783978e-06,
- +1.86252714369580e-05,
- -3.09046834821923e-07,
- -6.70404519861867e-07,
- +1.93844392174268e-06,
- +3.58672592884137e-07,
- +6.82156298136873e-08,
- +1.21272078440636e-06,
- -3.43035940703952e-07,
- +7.88690220445733e-08,
- -8.80890390335008e-07,
- -7.38203032400319e-06,
- +2.82254253877821e-07,
- +1.31040293270242e-05,
- -9.88023219554261e-07,
- -1.92900412667346e-06,
- -1.41223752202040e-06,
- -2.83319357388581e-07,
- -2.13562920467611e-07,
- -3.61110173057925e-06,
- -1.38834952865749e-06,
- -3.44848026773731e-07,
- -5.24745433330741e-08,
- -7.86316856285908e-07,
- +1.22380077958035e-07,
- +1.39042047325602e-08,
- +8.65109030063555e-07,
- -2.14578711722567e-06,
- -7.57010022701086e-08,
- +3.48453910477541e-06,
- -7.14459009193576e-08,
- -7.22072779512002e-07,
- +7.97081308042119e-07,
- +1.40626547136691e-07,
- -1.71140017644054e-08,
- +1.80469484029142e-07,
- -2.81931770119831e-07,
- +1.17842152560306e-07,
- -2.47743787798753e-07,
- +1.28021102785870e-06,
- -9.36792832502008e-08,
- -3.87011276602449e-07,
- -1.88538079425585e-06,
- +3.79129825488688e-06,
- -1.87669345873693e-06,
- +1.32908151591530e-05,
- -4.92284062838621e-07,
- -5.59087104290055e-06,
- -4.15038457174757e-06,
- -9.56601781718324e-07,
- -1.88496510047031e-06,
- -6.32619663991984e-07,
- -1.14329376586612e-07,
- -2.52106627268061e-07,
- +1.12805320673945e-07,
- -3.20818026897261e-07,
- +9.81975708836151e-08,
- -2.47574268553920e-07,
- -1.40637529142469e-06,
- +8.27077955480190e-07,
- +2.37089445027493e-07,
- -9.33995925508891e-07,
- +6.53439512378730e-06,
- -2.89592601313097e-06,
- -2.05144689809696e-06,
- -1.30398357246048e-07,
- -2.02995535880874e-06,
- +2.57336072750867e-06,
- -1.13982426714352e-06,
- -8.73348870520909e-08,
- -1.04611488484859e-06,
- -3.78182125999410e-07,
- -1.89040650554830e-07,
- -2.02414294737062e-07,
- -9.11125309246519e-07,
- +6.28212075187620e-09,
- +1.27178646930224e-06,
- -6.79166338579261e-08,
- -2.52441165607203e-07,
- +9.61870914745221e-07,
- -1.05446686937649e-06,
- +1.12189590618570e-06,
- +1.35804619306524e-06,
- +6.08979484390302e-07,
- -6.20794856795726e-07,
- +2.80872569770145e-07,
- -1.11449952931264e-06,
- +8.03407594154892e-07,
- -4.81124437504339e-07,
- -5.99296184577405e-07,
- +1.00053913637148e-06,
- -1.80076970353907e-06,
- -1.80919294675500e-06,
- -4.92284062940265e-07,
- -1.46021786379648e-05,
- +3.64835623395550e-06,
- +2.67792104246753e-06,
- -1.32262379444226e-06,
- +1.76234495892464e-05,
- -3.19178201214081e-06,
- +1.45170851149911e-07,
- +1.15442522330609e-06,
- -2.16941049688599e-06,
- -8.13758567603758e-08,
- -2.52950123622367e-06,
- +3.16781665867438e-06,
- -5.01047861546351e-07,
- -3.83407514999081e-06,
- +3.54500423251926e-06,
- -2.06589358650361e-06,
- -2.90748964668590e-06,
- +3.25348266468799e-06,
- +2.32819701628047e-07,
- -2.09962514693583e-06,
- +4.81317331714329e-06,
- -1.23473667390099e-07,
- -1.29060903952046e-06,
- +1.29172157967994e-06,
- +4.29699348307867e-06,
- +1.52472647234794e-06,
- +2.50997962128455e-06,
- +1.67831645865482e-08,
- -1.30567978078083e-06,
- -1.62452931918536e-06,
- -4.58914810397154e-06,
- -7.17397594932387e-07,
- +9.95939463135866e-07,
- -2.99694188201163e-06,
- +1.38723933709167e-07,
- +3.59929672749822e-06,
- -1.39830558800412e-06,
- -4.51553137329613e-07,
- -1.20194802881771e-06,
- -1.62595104945224e-06,
- +5.42820453113647e-07,
- -1.66794156448730e-06,
- +2.79569030523386e-06,
- -6.39957453613712e-06,
- -1.61334511549605e-06,
- -1.10698034387356e-05,
- -9.22161747638384e-06,
- -5.59087104500119e-06,
- +3.64835624662712e-06,
- +4.77957414366458e-05,
- -4.09784758955019e-07,
- -2.12273994953236e-06,
- -1.26008441211873e-05,
- -1.25818735776505e-06,
- +3.53626177751715e-07,
- -3.61623616168449e-05,
- -2.45329934982935e-06,
- +3.73648402283205e-07,
- -1.72197411250864e-06,
- -2.43250562372694e-06,
- +3.24999900906328e-06,
- -9.27455259537276e-07,
- +3.23230447446434e-06,
- -1.37912968141396e-06,
- -2.05173635703772e-06,
- +2.37239866389871e-07,
- +4.30391882974768e-05,
- -7.36445504222422e-07,
- -7.94809364333617e-07,
- -1.82628935871135e-06,
- -7.94905191813682e-07,
- +4.24674825539024e-06,
- +2.01258075478814e-06,
- -1.73040635317959e-07,
- +4.30445548250794e-07,
- -1.40517747811213e-06,
- +4.84954801763787e-06,
- +1.70938589824453e-06,
- -1.43991323347441e-08,
- +7.51494539441902e-07,
- -1.05848169032458e-06,
- +2.98187714957757e-07,
- -8.15587482912498e-06,
- +4.01049287773432e-07,
- -3.61359371516424e-07,
- -2.73620168818157e-06,
- -5.63975927874532e-07,
- +2.32772754305851e-05,
- +2.47632854146759e-07,
- +1.86702511923689e-06,
- +1.67061996266031e-06,
- +8.67757755977855e-07,
- +2.52209644699208e-06,
- -6.56372215219613e-06,
- -6.10047783542920e-06,
- -4.15038459607435e-06,
- +2.67792105869668e-06,
- -4.09784744386053e-07,
- -1.25274547156727e-05,
- -2.41765156411626e-06,
- -1.97555574086043e-06,
- +1.57867864804719e-06,
- -8.83753938743344e-05,
- -1.19767163615881e-06,
- -1.18242984170234e-07,
- -1.79236886817317e-06,
- -3.20828991225462e-06,
- -6.67026933090942e-08,
- +1.03815984099160e-06,
- +7.19292834252009e-07,
- -1.70299038055704e-05,
- +1.23494884430404e-07,
- -2.45629115903586e-07,
- -1.68510300511119e-06,
- -3.24565167290577e-07,
- +1.41872354067751e-04,
- -5.18262556771488e-06,
- -2.45268049972303e-05,
- -7.92763178935713e-05,
- +4.16950807715266e-07,
- -8.03667030358423e-06,
- +5.22422655181313e-07,
- -1.83385974069879e-06,
- -8.55419305166940e-07,
- -2.56640938952186e-07,
- -4.86910678301007e-07,
- +2.92158064478386e-05,
- -6.58813581725542e-08,
- +1.50303637594997e-06,
- +2.39477383636008e-06,
- +1.18090227817542e-07,
- +9.07213404124582e-06,
- +2.44913143791667e-05,
- +2.30771714472310e-05,
- +3.69589668558887e-04,
- +7.82690324871600e-07,
- +4.16959287019900e-06,
- +1.84314491010677e-06,
- +7.42263270775623e-06,
- -1.38428367472818e-06,
- -2.71547692125062e-06,
- -2.49168412594055e-06,
- +1.86252723000677e-05,
- -9.56601782040197e-07,
- -1.32262378177065e-06,
- -2.12274006749016e-06,
- -2.41765161384556e-06,
- -4.01573429984211e-04,
- +9.53108602783602e-07,
- +1.15983816754088e-05,
- +2.17501942461663e-07,
- -1.59163708796962e-06,
- -9.10093960642452e-09,
- -1.58002475888019e-06,
- +8.69506464780943e-07,
- +5.42933475630204e-06,
- -1.26978968281093e-07,
- -5.33332735342795e-08,
- +4.09881442073887e-08,
- -2.02364299837078e-07,
- -2.14808965208387e-06,
- +4.85950762529869e-06,
- -7.99263441618025e-07,
- -1.25199157681791e-07,
- -5.25054749705338e-06,
- +4.69700143325625e-07,
- +3.24970701932360e-06,
- -6.04129252629788e-07,
- +9.74833057429837e-08,
- -2.10010388335076e-06,
- -1.68960378315057e-06,
- +3.14717979843206e-06,
- -8.33670534271551e-08,
- +5.33435702023115e-10,
- +1.57098618917264e-07,
- -1.83389220625193e-08,
- +1.26919809477342e-06,
- -1.48734790725768e-06,
- -4.23085862401322e-07,
- -4.37745073004971e-07,
- -1.10019078723489e-06,
- +2.27122514927778e-08,
- -5.08430445638557e-06,
- +1.30502201039150e-06,
- -2.84139392082213e-06,
- +2.92491750810634e-06,
- -2.09039692395947e-06,
- +3.47995465245081e-06,
- -1.89668257031953e-06,
- -1.74391566051341e-06,
- -3.09046857657931e-07,
- -1.88496509908117e-06,
- +1.76234495633611e-05,
- -1.26008441104131e-05,
- -1.97555573042498e-06,
- +9.53108508788356e-07,
- +4.70980278371037e-06,
- -5.69149697574243e-07,
- +3.61363253116055e-06,
- +2.61865910498445e-06,
- +3.82651364207957e-07,
- -2.48006071000299e-07,
- +1.75809147147659e-06,
- +9.19581784817583e-07,
- -3.95660495777658e-06,
- -1.44064020560003e-07,
- +6.14366618966215e-07,
- -2.40628650529783e-06,
- +2.84701254416622e-06,
- -3.43700764666977e-08,
- +5.39664491208940e-07,
- +9.21540100597086e-07,
- +6.19533596922820e-07,
- -8.04540197750018e-06,
- +3.09560411185976e-06,
- +1.05389750664495e-06,
- -3.73257910446815e-07,
- +2.14737256473111e-06,
- -1.45086467842211e-06,
- +3.56571491727335e-07,
- -4.95380731818686e-06,
- -2.67290209232760e-07,
- -8.79128581709993e-07,
- +3.47900774418856e-07,
- +6.46314915005348e-07,
- -3.76034103350851e-07,
- +1.56572454247281e-06,
- -5.03855818816102e-06,
- -1.13155135072720e-06,
- +5.25710643327156e-06,
- -1.36694518690031e-05,
- -5.94361870613310e-06,
- -2.68001401298895e-06,
- -5.70777739017424e-06,
- -3.28633898482385e-06,
- -7.09953545910034e-07,
- +1.01460197993961e-06,
- +4.87070644166160e-07,
- -6.70404513356654e-07,
- -6.32619597610011e-07,
- -3.19178206315761e-06,
- -1.25818739853275e-06,
- +1.57867823102746e-06,
- +1.15983821954870e-05,
- -5.69149643254020e-07,
- +2.36405646742266e-05,
- ],
- dtype=torch.double,
- ),
- },
- "PbH4-BiH3": {
- "cn": torch.tensor(
- [
- +3.93882078385452e00,
- +9.83202447815541e-01,
- +9.83202575698739e-01,
- +9.83202575698739e-01,
- +9.86589814809524e-01,
- +2.97146042634822e00,
- +9.87045550753296e-01,
- +9.87045669088046e-01,
- +9.87045550753296e-01,
- ],
- dtype=torch.double,
- ),
- "weights": torch.tensor(
- [
- [
- +1.10706478210448e-27,
- +2.26599265549586e-16,
- +1.05725184914546e-07,
- +1.41167956987467e-02,
- +9.85883098576068e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.79089158960743e-01,
- +2.09108410392566e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.79089178059127e-01,
- +2.09108219408730e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.79089178059127e-01,
- +2.09108219408730e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.79589093181984e-01,
- +2.04109068180158e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +4.51567208622703e-16,
- +1.31099357580350e-07,
- +1.71917663233654e-02,
- +9.82808102577277e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.79655454782451e-01,
- +2.03445452175495e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.79655471986197e-01,
- +2.03445280138028e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.79655454782451e-01,
- +2.03445452175495e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- ],
- dtype=torch.double,
- ),
- "c6": torch.tensor(
- [
- [
- +4.56209027097378e02,
- +3.72742207576559e01,
- +3.72742203407386e01,
- +3.72742203407386e01,
- +3.72633072045004e01,
- +4.93665090064696e02,
- +3.72618585322083e01,
- +3.72618581566509e01,
- +3.72618585322083e01,
- ],
- [
- +3.72742207576559e01,
- +3.09876483566212e00,
- +3.09876480252469e00,
- +3.09876480252469e00,
- +3.09789740447628e00,
- +4.02583668373402e01,
- +3.09778226108473e00,
- +3.09778223123467e00,
- +3.09778226108473e00,
- ],
- [
- +3.72742203407386e01,
- +3.09876480252469e00,
- +3.09876476938727e00,
- +3.09876476938727e00,
- +3.09789737134977e00,
- +4.02583663834069e01,
- +3.09778222795966e00,
- +3.09778219810960e00,
- +3.09778222795966e00,
- ],
- [
- +3.72742203407386e01,
- +3.09876480252469e00,
- +3.09876476938727e00,
- +3.09876476938727e00,
- +3.09789737134977e00,
- +4.02583663834069e01,
- +3.09778222795966e00,
- +3.09778219810960e00,
- +3.09778222795966e00,
- ],
- [
- +3.72633072045004e01,
- +3.09789740447628e00,
- +3.09789737134977e00,
- +3.09789737134977e00,
- +3.09703025884030e00,
- +4.02464843239764e01,
- +3.09691515335283e00,
- +3.09691512351259e00,
- +3.09691515335283e00,
- ],
- [
- +4.93665090064696e02,
- +4.02583668373402e01,
- +4.02583663834069e01,
- +4.02583663834069e01,
- +4.02464843239764e01,
- +5.34419964675118e02,
- +4.02449070312629e01,
- +4.02449066223616e01,
- +4.02449070312629e01,
- ],
- [
- +3.72618585322083e01,
- +3.09778226108473e00,
- +3.09778222795966e00,
- +3.09778222795966e00,
- +3.09691515335283e00,
- +4.02449070312629e01,
- +3.09680005289677e00,
- +3.09680002305784e00,
- +3.09680005289677e00,
- ],
- [
- +3.72618581566509e01,
- +3.09778223123467e00,
- +3.09778219810960e00,
- +3.09778219810960e00,
- +3.09691512351259e00,
- +4.02449066223616e01,
- +3.09680002305784e00,
- +3.09679999321891e00,
- +3.09680002305784e00,
- ],
- [
- +3.72618585322083e01,
- +3.09778226108473e00,
- +3.09778222795966e00,
- +3.09778222795966e00,
- +3.09691515335283e00,
- +4.02449070312629e01,
- +3.09680005289677e00,
- +3.09680002305784e00,
- +3.09680005289677e00,
- ],
- ],
- dtype=torch.double,
- ),
- "disp2": torch.tensor(
- [
- -1.7789829941052290e-03,
- -4.2874420697641040e-04,
- -4.2874425413182740e-04,
- -4.2874425413182740e-04,
- -6.4605081235581219e-04,
- -1.8277741525957012e-03,
- -4.4890931954739776e-04,
- -4.4890934300120941e-04,
- -4.4890931954739776e-04,
- ],
- dtype=torch.double,
- ),
- "disp3": torch.tensor(
- [
- +1.5164457178775542e-07,
- +3.1871289285333041e-07,
- +3.1871279049093017e-07,
- +3.1871279049093017e-07,
- -5.9772721699589883e-07,
- -3.5376082968855901e-07,
- +1.4591177238904105e-07,
- +1.4591163155676249e-07,
- +1.4591177238904105e-07,
- ],
- dtype=torch.double,
- ),
- "grad": torch.tensor(
- [
- [
- +1.8639257945602507e-011,
- -5.7589265336385818e-005,
- +0.0000000000000000,
- ],
- [
- +1.1824683859244095e-005,
- -1.7476694890228468e-005,
- +0.0000000000000000,
- ],
- [
- -5.9123515874157095e-006,
- -1.7476703035791685e-005,
- -1.0240481396622120e-005,
- ],
- [
- -5.9123515874157095e-006,
- -1.7476703035791685e-005,
- +1.0240481396622120e-005,
- ],
- [
- -1.5695651885305123e-012,
- -1.8948774924372469e-005,
- +0.0000000000000000,
- ],
- [
- +1.4523056809395272e-011,
- +9.0457983350326898e-005,
- +0.0000000000000000,
- ],
- [
- +3.3441903047977048e-006,
- +1.2836714954362650e-005,
- -5.7923124762056244e-006,
- ],
- [
- -6.6883928867521314e-006,
- +1.2836727963519770e-005,
- +0.0000000000000000,
- ],
- [
- +3.3441903047977048e-006,
- +1.2836714954362650e-005,
- +5.7923124762056244e-006,
- ],
- ],
- dtype=torch.double,
- ),
- "hessian": torch.tensor(
- [
- -2.10251881000936e-05,
- +1.56778573675227e-05,
- +4.58658049531517e-06,
- +4.58658049531093e-06,
- +1.27004565632629e-06,
- -3.20178128905553e-06,
- -6.99597297336907e-07,
- -4.94900030634601e-07,
- -6.99597297347495e-07,
- +3.24937085159800e-11,
- -5.10398603376360e-06,
- +2.55197525080619e-06,
- +2.55197525082313e-06,
- +3.40254828977947e-12,
- -6.58246243970262e-13,
- +6.08526809019224e-07,
- -1.21705332402484e-06,
- +6.08526809002283e-07,
- -2.11754359404454e-17,
- +2.12017134651781e-18,
- +6.40348511345190e-06,
- -6.40348511345190e-06,
- +2.62532909257553e-21,
- +1.48230765769503e-17,
- -1.18182232642152e-07,
- -2.11499742872152e-18,
- +1.18182232633681e-07,
- +1.56778573633057e-05,
- -8.10455889687384e-06,
- -2.56401693069407e-06,
- -2.56401693068984e-06,
- -1.72557088875752e-06,
- -6.80418585446685e-07,
- -2.08619712547455e-08,
- +2.44881170954203e-09,
- -2.08619712632158e-08,
- -5.20807345800377e-06,
- +6.37758712365301e-06,
- +1.76719491040619e-07,
- +1.76719491057559e-07,
- +6.94930612315755e-07,
- -2.14921929991179e-06,
- -1.64684822667006e-08,
- -3.57269959229162e-08,
- -1.64684822921116e-08,
- +2.11809935601860e-18,
- +2.18427784399067e-18,
- -7.04550495576572e-07,
- +7.04550495593512e-07,
- -2.11753390052173e-18,
- +1.58818677610181e-17,
- +2.35575185880953e-09,
- -1.05465528100511e-18,
- -2.35575186727986e-09,
- +4.58658047648103e-06,
- -2.56401693817761e-06,
- +1.19859490580249e-06,
- -9.75712621134828e-07,
- -1.06323616861457e-06,
- -1.13330792285054e-06,
- -1.61850669504910e-08,
- -2.24901027324464e-08,
- -1.02265618319088e-08,
- +2.60402027059268e-06,
- -8.28517453482867e-08,
- -3.18877666059694e-06,
- -9.38670696245148e-08,
- -3.47467129345474e-07,
- +1.07461024833385e-06,
- +3.88511321231704e-08,
- -2.23825707012409e-08,
- +1.78635242957124e-08,
- +6.40348781270446e-06,
- -1.12946188851450e-06,
- -5.37111517004220e-06,
- +2.12454327151317e-07,
- -3.82398697073881e-07,
- +2.61475696160849e-07,
- +2.22443467234179e-09,
- -3.98465104432747e-09,
- +7.31813601620337e-09,
- +4.58658047646621e-06,
- -2.56401693812679e-06,
- -9.75712621164474e-07,
- +1.19859490578979e-06,
- -1.06323616861510e-06,
- -1.13330792285001e-06,
- -1.02265618276736e-08,
- -2.24901027493871e-08,
- -1.61850669356679e-08,
- +2.60402027069432e-06,
- -8.28517453652273e-08,
- -9.38670696245148e-08,
- -3.18877666061388e-06,
- -3.47467129294652e-07,
- +1.07461024840161e-06,
- +1.78635243126531e-08,
- -2.23825706758299e-08,
- +3.88511321485814e-08,
- -6.40348781266846e-06,
- +1.12946188851020e-06,
- -2.12454327193668e-07,
- +5.37111517006762e-06,
- +3.82398697068585e-07,
- -2.61475696178848e-07,
- -7.31813601620337e-09,
- +3.98465104432437e-09,
- -2.22443465116597e-09,
- +1.27004564782676e-06,
- -1.72557088721505e-06,
- -1.06323616846207e-06,
- -1.06323616846630e-06,
- +5.74542830317392e-06,
- -2.15084144784711e-06,
- -4.03075144340199e-07,
- -2.06438990331727e-07,
- -4.03075144346552e-07,
- -4.20805968195936e-14,
- +1.22691265025225e-06,
- -6.13456419739706e-07,
- -6.13456419722765e-07,
- -3.88618716200273e-14,
- +4.03187682893047e-14,
- +4.15028885527964e-07,
- -8.30057541145775e-07,
- +4.15028885502553e-07,
- +6.35319946880474e-18,
- -2.08449009492384e-18,
- -3.82398696905593e-07,
- +3.82398696888652e-07,
- +3.17641394188197e-18,
- +1.05879118406788e-17,
- -1.13528159871704e-07,
- +1.06189311136495e-18,
- +1.13528159850528e-07,
- -3.20178129073190e-06,
- -6.80418583448746e-07,
- -1.13330792258796e-06,
- -1.13330792258796e-06,
- -2.15084144751022e-06,
- -3.54608405623710e-06,
- +2.21880801397117e-06,
- +7.40812519518843e-06,
- +2.21880801396481e-06,
- +2.95174041459179e-13,
- -2.23850988653813e-06,
- +1.11925527065577e-06,
- +1.11925527063883e-06,
- +1.20448085099562e-13,
- -4.38168788361934e-11,
- +2.44187608865813e-06,
- -4.88370943079062e-06,
- +2.44187608865813e-06,
- +2.11758236813575e-18,
- -2.15067060237983e-18,
- +2.61475703404039e-07,
- -2.61475703412510e-07,
- +2.11758236813575e-18,
- +2.11758236813575e-18,
- -2.99609392867838e-06,
- -2.11758236813575e-18,
- +2.99609392867838e-06,
- -6.99597297409720e-07,
- -2.08619711996883e-08,
- -1.61850669526086e-08,
- -1.02265618467319e-08,
- -4.03075144381751e-07,
- +2.21880801037128e-06,
- +1.56645109601592e-06,
- -2.05489603810740e-06,
- -5.80417026472373e-07,
- +5.86639614110710e-07,
- -2.83448339398387e-08,
- +4.10119676358346e-08,
- +1.98053356890602e-08,
- +4.16523173119916e-07,
- +2.58090457439333e-06,
- -3.53991084071089e-06,
- -3.42333365498907e-08,
- -4.23956536889402e-08,
- -1.18182242325100e-07,
- +4.92465210582561e-09,
- -3.44455333126017e-10,
- -7.31813661759677e-09,
- -1.13528159804769e-07,
- -2.99609315114236e-06,
- +2.37925005318782e-06,
- +8.96233635949202e-07,
- -4.49421960218199e-08,
- -4.94900031165328e-07,
- +2.44881177306950e-09,
- -2.24901027705629e-08,
- -2.24901027620926e-08,
- -2.06438990588991e-07,
- +7.40812519342078e-06,
- -2.05489603733872e-06,
- -2.55446270323975e-06,
- -2.05489603733660e-06,
- -1.17327908422582e-06,
- -3.96106930960452e-08,
- -1.26671498912659e-08,
- -1.26671498912659e-08,
- -8.33046191690200e-07,
- -5.16176592178156e-06,
- +7.66285422685160e-08,
- +7.07977910593748e-06,
- +7.66285422685160e-08,
- -6.46234853557053e-23,
- +0.00000000000000e00,
- -1.41576950968389e-09,
- +1.41576950968389e-09,
- +1.05880733993921e-18,
- +1.05879118406788e-18,
- +8.06350015246183e-07,
- -3.18361138256347e-18,
- -8.06350015250418e-07,
- -6.99597297408661e-07,
- -2.08619711996883e-08,
- -1.02265618509670e-08,
- -1.61850669568438e-08,
- -4.03075144381751e-07,
- +2.21880801037181e-06,
- -5.80417026463903e-07,
- -2.05489603811163e-06,
- +1.56645109602439e-06,
- +5.86639614144591e-07,
- -2.83448339567793e-08,
- +1.98053357060009e-08,
- +4.10119676019533e-08,
- +4.16523173119916e-07,
- +2.58090457439333e-06,
- -4.23956536804699e-08,
- -3.42333365414203e-08,
- -3.53991084070242e-06,
- +1.18182242325100e-07,
- -4.92465210790985e-09,
- +7.31813660912644e-09,
- +3.44455333126017e-10,
- +1.13528159807945e-07,
- +2.99609315113919e-06,
- +4.49421960175848e-08,
- -8.96233635948144e-07,
- -2.37925005317512e-06,
- +3.23527312819770e-11,
- -5.20807327771280e-06,
- +2.60402026397311e-06,
- +2.60402026396464e-06,
- -4.22509242250484e-14,
- +2.96235479621207e-13,
- +5.86639614398701e-07,
- -1.17327908576318e-06,
- +5.86639614411407e-07,
- -3.20372390189388e-05,
- +2.68706524448502e-06,
- +2.68704763582201e-06,
- +2.68704763582201e-06,
- +2.51496339929614e-05,
- -1.45217700840380e-05,
- +4.44940438142187e-06,
- +4.44940583105947e-06,
- +4.44940438142187e-06,
- +2.11745312116504e-18,
- +2.11758135839379e-18,
- +4.51029293013639e-06,
- -4.51029293015333e-06,
- -1.05879118406788e-18,
- -6.35274710440725e-18,
- -1.01608960056912e-06,
- -2.11861634390144e-18,
- +1.01608960057335e-06,
- -5.10398583972321e-06,
- +6.37758692743783e-06,
- -8.28517446410142e-08,
- -8.28517446367790e-08,
- +1.22691264981584e-06,
- -2.23850988721152e-06,
- -2.83448338996046e-08,
- -3.96106932654518e-08,
- -2.83448338996046e-08,
- +2.68706523116966e-06,
- -5.49938460264073e-06,
- -9.08884371539039e-07,
- -9.08884371555980e-07,
- -2.40377543717912e-06,
- +6.63265673557945e-06,
- +1.61748374949468e-07,
- +7.77100662498773e-08,
- +1.61748374974879e-07,
- -2.11751774465040e-18,
- -9.92606637644046e-20,
- -1.56223424521617e-07,
- +1.56223424521617e-07,
- -1.05877502819654e-18,
- -1.05879118406788e-18,
- -3.09916397702080e-08,
- -2.11758236813575e-18,
- +3.09916397617376e-08,
- +2.55197522778559e-06,
- +1.76719483205564e-07,
- -3.18877664705712e-06,
- -9.38670690527675e-08,
- -6.13456419269212e-07,
- +1.11925527105705e-06,
- +4.10119677057148e-08,
- -1.26671500903187e-08,
- +1.98053357292943e-08,
- +2.68704762274383e-06,
- -9.08884371522098e-07,
- -5.49936911935280e-06,
- -9.08882828871814e-07,
- -2.40377658448525e-06,
- +6.63265835856234e-06,
- +1.61748396226936e-07,
- +1.61748477923263e-07,
- +7.77100487925282e-08,
- +4.42015025542016e-06,
- -6.35999247242776e-09,
- -5.52311996845925e-06,
- +1.49862471489072e-07,
- -1.06253707467083e-06,
- +1.93860581242985e-06,
- +9.05150212611737e-09,
- +4.00431583416556e-08,
- +3.43038358178335e-08,
- +2.55197522778453e-06,
- +1.76719483205564e-07,
- -9.38670690442972e-08,
- -3.18877664704865e-06,
- -6.13456419271329e-07,
- +1.11925527105758e-06,
- +1.98053357250591e-08,
- -1.26671500945538e-08,
- +4.10119677099500e-08,
- +2.68704762267606e-06,
- -9.08884371522098e-07,
- -9.08882828888754e-07,
- -5.49936911938668e-06,
- -2.40377658450219e-06,
- +6.63265835856234e-06,
- +7.77100487840579e-08,
- +1.61748477931734e-07,
- +1.61748396235406e-07,
- -4.42015025542652e-06,
- +6.35999247229567e-09,
- -1.49862471472131e-07,
- +5.52311996846772e-06,
- +1.06253707466977e-06,
- -1.93860581243832e-06,
- -3.43038358135983e-08,
- -4.00431583458928e-08,
- -9.05150212188221e-09,
- +3.39128772280722e-12,
- +6.94930608021298e-07,
- -3.47467117491248e-07,
- -3.47467117482778e-07,
- -3.92354806764619e-14,
- +1.20831896903786e-13,
- +4.16523173750955e-07,
- -8.33046193405441e-07,
- +4.16523173755190e-07,
- +2.51496340058363e-05,
- -2.40377542989464e-06,
- -2.40377657714994e-06,
- -2.40377657716688e-06,
- -1.36350475531565e-05,
- -1.07611292097430e-05,
- +2.15262348769313e-06,
- +2.15262436581219e-06,
- +2.15262348771007e-06,
- -5.16987882845642e-22,
- +2.11757731942596e-18,
- -6.01830354969181e-07,
- +6.01830354969181e-07,
- +1.24400209309733e-21,
- -1.05879118406788e-17,
- -7.21439285180113e-07,
- -1.06292708713064e-18,
- +7.21439285192818e-07,
- -6.54498408620596e-13,
- -2.14921930571397e-06,
- +1.07461024989663e-06,
- +1.07461024990510e-06,
- +4.03722122953958e-14,
- -4.39188573678781e-11,
- +2.58090460442912e-06,
- -5.16176587000668e-06,
- +2.58090460442276e-06,
- -1.45217700828860e-05,
- +6.63265673612155e-06,
- +6.63265835915526e-06,
- +6.63265835913832e-06,
- -1.07611291950047e-05,
- -7.02283344589702e-06,
- +4.13593064406373e-06,
- +4.13589798121120e-06,
- +4.13593064402137e-06,
- +2.11764699162111e-18,
- -2.11758337787771e-18,
- +1.86127847559573e-06,
- -1.86127847557032e-06,
- +0.00000000000000e00,
- +2.01170324972896e-17,
- -4.47025769820642e-06,
- +5.16987882845642e-22,
- +4.47025769819371e-06,
- +6.08526809568353e-07,
- -1.64684827325687e-08,
- +3.88511322459902e-08,
- +1.78635243973564e-08,
- +4.15028885613928e-07,
- +2.44187610335151e-06,
- -3.53991085921644e-06,
- +7.66285408158545e-08,
- -4.23956540489292e-08,
- +4.44940438310747e-06,
- +1.61748374974879e-07,
- +1.61748396201525e-07,
- +7.77100487840579e-08,
- +2.15262348466922e-06,
- +4.13593062372646e-06,
- -9.91706932484866e-06,
- -6.11047417481777e-07,
- -6.11048569311060e-07,
- -1.05399942983574e-06,
- -3.53533065134485e-08,
- -3.41449779861253e-09,
- -3.09405239449378e-08,
- -7.18851102231071e-07,
- -4.22945217333026e-06,
- +6.13130533183975e-06,
- +4.71237152384566e-09,
- -6.40066697102940e-08,
- -1.21705332445233e-06,
- -3.57269962956107e-08,
- -2.23825706377134e-08,
- -2.23825706419486e-08,
- -8.30057541387905e-07,
- -4.88370935980555e-06,
- -3.42333366303588e-08,
- +7.07977903647655e-06,
- -3.42333366324764e-08,
- +4.44940583258413e-06,
- +7.77100662159959e-08,
- +1.61748477923263e-07,
- +1.61748477923263e-07,
- +2.15262436275440e-06,
- +4.13589796118734e-06,
- -6.11047417363193e-07,
- -9.91704034361637e-06,
- -6.11047417363193e-07,
- -2.11758236813575e-18,
- -2.15067060237983e-18,
- +3.19388249571742e-08,
- -3.19388249487039e-08,
- +3.17636547426796e-18,
- -2.11758236813575e-18,
- +6.87185346832299e-08,
- -2.11654839237006e-18,
- -6.87185346705244e-08,
- +6.08526809568353e-07,
- -1.64684827325687e-08,
- +1.78635243973564e-08,
- +3.88511322417550e-08,
- +4.15028885617104e-07,
- +2.44187610333774e-06,
- -4.23956540256358e-08,
- +7.66285407650326e-08,
- -3.53991085918044e-06,
- +4.44940438310747e-06,
- +1.61748374974879e-07,
- +7.77100487671172e-08,
- +1.61748396218465e-07,
- +2.15262348465228e-06,
- +4.13593062365870e-06,
- -6.11048569277179e-07,
- -6.11047417447896e-07,
- -9.91706932479784e-06,
- +1.05399942984209e-06,
- +3.53533065135147e-08,
- +3.09405239449378e-08,
- +3.41449779014220e-09,
- +7.18851102234247e-07,
- +4.22945217332073e-06,
- +6.40066696637072e-08,
- -4.71237152384617e-09,
- -6.13130533178893e-06,
- -5.50571092597868e-17,
- +0.00000000000000e00,
- +6.40348795906333e-06,
- -6.40348795902097e-06,
- -5.29104786349837e-22,
- -3.70576914423756e-18,
- -1.18182242408442e-07,
- +0.00000000000000e00,
- +1.18182242406324e-07,
- -6.77626357803440e-17,
- +0.00000000000000e00,
- +4.42015038074634e-06,
- -4.42015038071246e-06,
- +1.69406589450860e-17,
- +0.00000000000000e00,
- -1.05399942934022e-06,
- +0.00000000000000e00,
- +1.05399942937410e-06,
- -2.10250692972243e-05,
- +8.89530708556727e-07,
- +1.19806845201433e-05,
- +1.19806845200586e-05,
- +1.27004546204882e-06,
- -3.20178147228887e-06,
- -5.63132203607377e-07,
- -7.67830034077425e-07,
- -5.63132203607377e-07,
- -5.29392360859670e-18,
- -8.47032947254300e-18,
- -1.12946188905247e-06,
- +1.12946188906941e-06,
- +5.29528877972484e-19,
- +1.58818677610181e-18,
- +4.92465210702326e-09,
- -4.23516473627150e-18,
- -4.92465210067051e-09,
- -6.77626357803440e-17,
- +0.00000000000000e00,
- -6.35999238706884e-09,
- +6.35999238706884e-09,
- +0.00000000000000e00,
- +0.00000000000000e00,
- -3.53533064165212e-08,
- +0.00000000000000e00,
- +3.53533064419322e-08,
- +8.89530700163080e-07,
- +4.29960486408968e-06,
- -1.50514786563553e-06,
- -1.50514786563553e-06,
- -8.42458431855913e-07,
- -1.28427081321833e-06,
- -1.88294474777752e-08,
- -1.44516929699990e-08,
- -1.88294474693049e-08,
- +6.40348522763317e-06,
- -7.04550501878497e-07,
- -5.37111531029815e-06,
- -2.12454332860319e-07,
- -3.82398697223586e-07,
- +2.61475702846586e-07,
- -3.44455326773270e-10,
- -1.41576948850807e-09,
- +7.31813661124402e-09,
- +4.51029303796368e-06,
- -1.56223429341234e-07,
- -5.52312008963578e-06,
- -1.49862476435744e-07,
- -6.01830364193370e-07,
- +1.86127847123351e-06,
- -3.41449806966307e-09,
- +3.19388246353017e-08,
- +3.09405236569466e-08,
- +1.19806845033878e-05,
- -1.50514786653837e-06,
- -5.00342777186618e-06,
- -3.09345106980407e-06,
- -1.50479164181523e-06,
- -8.31382144832865e-07,
- -2.35063546090021e-08,
- -1.72013426514562e-08,
- -1.77631125208703e-09,
- -6.40348522758658e-06,
- +7.04550501878497e-07,
- +2.12454332868789e-07,
- +5.37111531024733e-06,
- +3.82398697229408e-07,
- -2.61475702831763e-07,
- -7.31813661547918e-09,
- +1.41576948850807e-09,
- +3.44455320420523e-10,
- -4.51029303796368e-06,
- +1.56223429324294e-07,
- +1.49862476401863e-07,
- +5.52312008956802e-06,
- +6.01830364193370e-07,
- -1.86127847130128e-06,
- -3.09405236400059e-08,
- -3.19388246353017e-08,
- +3.41449804425208e-09,
- +1.19806845033201e-05,
- -1.50514786654052e-06,
- -3.09345106980407e-06,
- -5.00342777179842e-06,
- -1.50479164182370e-06,
- -8.31382144826513e-07,
- -1.77631124361670e-09,
- -1.72013426525124e-08,
- -2.35063546301779e-08,
- +3.17656742265969e-18,
- +0.00000000000000e00,
- -3.82398696774303e-07,
- +3.82398696782773e-07,
- -2.11514283156357e-18,
- +6.35274710440725e-18,
- -1.13528159856881e-07,
- +8.47032947254300e-18,
- +1.13528159842058e-07,
- +0.00000000000000e00,
- +0.00000000000000e00,
- -1.06253707516383e-06,
- +1.06253707518077e-06,
- -3.38813178901720e-17,
- -6.77626357803440e-17,
- -7.18851102072271e-07,
- +0.00000000000000e00,
- +7.18851102038390e-07,
- +1.27004545353263e-06,
- -8.42458431860658e-07,
- -1.50479164087277e-06,
- -1.50479164088124e-06,
- +5.74542672852916e-06,
- -2.15084138588029e-06,
- -2.71984122278350e-07,
- -4.68620837948104e-07,
- -2.71984122295291e-07,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +2.61475696454134e-07,
- -2.61475696441429e-07,
- -1.29246970711411e-22,
- -2.64697796016969e-18,
- -2.99609319592711e-06,
- +0.00000000000000e00,
- +2.99609319592500e-06,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +1.93860581179563e-06,
- -1.93860581177869e-06,
- +0.00000000000000e00,
- +0.00000000000000e00,
- -4.22945220586585e-06,
- +0.00000000000000e00,
- +4.22945220586585e-06,
- -3.20178147390532e-06,
- -1.28427081322053e-06,
- -8.31382143635372e-07,
- -8.31382143643843e-07,
- -2.15084138551640e-06,
- -3.54614562775819e-06,
- +5.67839795983496e-06,
- +4.89007668017723e-07,
- +5.67839795982649e-06,
- -1.18182232564080e-07,
- +2.35575190539634e-09,
- +2.22443468928245e-09,
- -7.31813600773304e-09,
- -1.13528159773637e-07,
- -2.99609396778218e-06,
- +2.37925009617263e-06,
- +8.06350016461675e-07,
- +4.49421969048518e-08,
- -1.01608959937480e-06,
- -3.09916395626849e-08,
- +9.05150227434814e-09,
- -3.43038356780730e-08,
- -7.21439283824860e-07,
- -4.47025771088650e-06,
- +6.13130536114709e-06,
- +6.87185353735617e-08,
- +6.40066704387423e-08,
- -5.63132203973192e-07,
- -1.88294474748053e-08,
- -2.35063545878263e-08,
- -1.77631122667604e-09,
- -2.71984122469811e-07,
- +5.67839795628484e-06,
- -1.18086774516732e-06,
- -1.07190989968547e-06,
- -2.54639187169835e-06,
- +1.05888811929591e-18,
- -1.69406589450860e-17,
- -3.98465104058706e-09,
- +3.98465104058706e-09,
- +5.29274422998896e-19,
- +4.76456032830544e-18,
- +8.96233636242083e-07,
- +0.00000000000000e00,
- -8.96233636254789e-07,
- +6.77626357803440e-17,
- +0.00000000000000e00,
- +4.00431582864129e-08,
- -4.00431582694722e-08,
- +0.00000000000000e00,
- -6.77626357803440e-17,
- +4.71237139275924e-09,
- +0.00000000000000e00,
- -4.71237140969990e-09,
- -7.67830034079374e-07,
- -1.44516929720134e-08,
- -1.72013426519659e-08,
- -1.72013426434956e-08,
- -4.68620837942433e-07,
- +4.89007666103822e-07,
- -1.07190989936978e-06,
- +2.94011738295056e-06,
- -1.07190989939095e-06,
- +1.18182232565138e-07,
- -2.35575191386667e-09,
- +7.31813602043854e-09,
- -2.22443468504729e-09,
- +1.13528159772048e-07,
- +2.99609396777477e-06,
- -4.49421969344979e-08,
- -8.06350016410853e-07,
- -2.37925009620016e-06,
- +1.01608959944256e-06,
- +3.09916395796256e-08,
- +3.43038356950137e-08,
- -9.05150225740748e-09,
- +7.21439283841801e-07,
- +4.47025771075098e-06,
- -6.40066704641533e-08,
- -6.87185354074430e-08,
- -6.13130536120639e-06,
- -5.63132203964722e-07,
- -1.88294474745737e-08,
- -1.77631122667604e-09,
- -2.35063546132373e-08,
- -2.71984122468752e-07,
- +5.67839795628589e-06,
- -2.54639187165177e-06,
- -1.07190989968441e-06,
- -1.18086774522661e-06,
- ],
- dtype=torch.double,
- ),
- },
- "C6H5I-CH3SH": {
- "cn": torch.tensor(
- [
- +3.13936895934395e00,
- +3.13131666863102e00,
- +3.13937683960227e00,
- +3.31534291514346e00,
- +3.13765455567338e00,
- +3.31481155018318e00,
- +1.53636056287591e00,
- +1.00352466398219e00,
- +1.01223354855399e00,
- +1.00366192372190e00,
- +1.01219594356898e00,
- +1.00366200689047e00,
- +2.15705640674763e00,
- +9.98181081558970e-01,
- +3.98411287017616e00,
- +1.01462256394391e00,
- +1.01235611510819e00,
- +1.00858912903507e00,
- ],
- dtype=torch.double,
- ),
- "weights": torch.tensor(
- [
- [
- +7.66915348045866e-18,
- +9.04559372001613e-09,
- +5.55474084124553e-03,
- +9.36199574012947e-01,
- +5.82456761002137e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.33337260445326e-18,
- +1.03304923176417e-08,
- +5.94352226939526e-03,
- +9.39218013018462e-01,
- +5.48384543816506e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +7.66767874029992e-18,
- +9.04441693210364e-09,
- +5.55437244105958e-03,
- +9.36196513315798e-01,
- +5.82491051987251e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.61278067966140e-20,
- +4.54846017777575e-10,
- +1.16048785372819e-03,
- +7.99583198542082e-01,
- +1.99256313149343e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +7.99678634625602e-18,
- +9.30523441007744e-09,
- +5.63544094450366e-03,
- +9.36860440392255e-01,
- +5.75041093580066e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.74417166809199e-20,
- +4.59132999088564e-10,
- +1.16639851843437e-03,
- +8.00245977284164e-01,
- +1.98587623738269e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +2.54205655667171e-04,
- +9.99745794344333e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.81918044661934e-01,
- +1.80819553380659e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.83012088864378e-01,
- +1.69879111356215e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.81935812878635e-01,
- +1.80641871213652e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.83007507536486e-01,
- +1.69924924635144e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.81935823639575e-01,
- +1.80641763604250e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.18809691426923e-09,
- +5.00585297181196e-03,
- +9.94994137840091e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.81212828585247e-01,
- +1.87871714147530e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +2.61014227249546e-28,
- +2.42411690886486e-16,
- +1.38694757870492e-07,
- +2.01508154369845e-02,
- +9.79849045868257e-01,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.83300662218751e-01,
- +1.66993377812491e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.83027012438879e-01,
- +1.69729875611206e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- [
- +9.82562408977771e-01,
- +1.74375910222289e-02,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- +0.00000000000000e00,
- ],
- ],
- dtype=torch.double,
- ),
- "c6": torch.tensor(
- [
- [
- +2.53086694842437e01,
- +2.53236159084896e01,
- +2.53086544809279e01,
- +2.47100760277660e01,
- +2.53119158180357e01,
- +2.47128891340438e01,
- +9.48922900325656e01,
- +8.82857683343213e00,
- +8.82319289903895e00,
- +8.82848939371737e00,
- +8.82321544435180e00,
- +8.82848934076138e00,
- +5.62455312853600e01,
- +8.83204729384078e00,
- +2.14360070777824e01,
- +8.82177279187685e00,
- +8.82311945817995e00,
- +8.82540583248536e00,
- ],
- [
- +2.53236159084896e01,
- +2.53385737670487e01,
- +2.53236008936914e01,
- +2.47245622275532e01,
- +2.53268647265581e01,
- +2.47273774996866e01,
- +9.49508531711154e01,
- +8.83383171969513e00,
- +8.82844384885730e00,
- +8.83374421604913e00,
- +8.82846641065407e00,
- +8.83374416305442e00,
- +5.62800562259229e01,
- +8.83730471751867e00,
- +2.14479713482813e01,
- +8.82702270338885e00,
- +8.82837035430227e00,
- +8.83065840028219e00,
- ],
- [
- +2.53086544809279e01,
- +2.53236008936914e01,
- +2.53086394776235e01,
- +2.47100614866121e01,
- +2.53119008122251e01,
- +2.47128745907148e01,
- +9.48922312429173e01,
- +8.82857155866703e00,
- +8.82318762822647e00,
- +8.82848411901647e00,
- +8.82321017352276e00,
- +8.82848406606051e00,
- +5.62454966276673e01,
- +8.83204201652784e00,
- +2.14359950691858e01,
- +8.82176752210693e00,
- +8.82311418742137e00,
- +8.82540056004824e00,
- ],
- [
- +2.47100760277660e01,
- +2.47245622275532e01,
- +2.47100614866121e01,
- +2.41300091503453e01,
- +2.47132223699295e01,
- +2.41327350652656e01,
- +9.25450400227954e01,
- +8.61818543474823e00,
- +8.61295975815401e00,
- +8.61810056527593e00,
- +8.61298164075975e00,
- +8.61810051387655e00,
- +5.48621486974196e01,
- +8.62155388287635e00,
- +2.09574675249769e01,
- +8.61158139426314e00,
- +8.61288847604877e00,
- +8.61510764363531e00,
- ],
- [
- +2.53119158180357e01,
- +2.53268647265581e01,
- +2.53119008122251e01,
- +2.47132223699295e01,
- +2.53151626915744e01,
- +2.47160359467776e01,
- +9.49050104461900e01,
- +8.82971816324588e00,
- +8.82433337366177e00,
- +8.82963070964208e00,
- +8.82435592255574e00,
- +8.82963065667768e00,
- +5.62530302631426e01,
- +8.83318917490684e00,
- +2.14386054857927e01,
- +8.82291304092804e00,
- +8.82425992113731e00,
- +8.82654665861330e00,
- ],
- [
- +2.47128891340438e01,
- +2.47273774996866e01,
- +2.47128745907148e01,
- +2.41327350652656e01,
- +2.47160359467776e01,
- +2.41354613905380e01,
- +9.25560734133844e01,
- +8.61917409179224e00,
- +8.61394767066187e00,
- +8.61908921022803e00,
- +8.61396955638537e00,
- +8.61908915882133e00,
- +5.48686508434178e01,
- +8.62254301984509e00,
- +2.09597156158777e01,
- +8.61256911038653e00,
- +8.61387637840061e00,
- +8.61609586216640e00,
- ],
- [
- +9.48922900325656e01,
- +9.49508531711154e01,
- +9.48922312429173e01,
- +9.25450400227954e01,
- +9.49050104461900e01,
- +9.25560734133844e01,
- +3.58497831396092e02,
- +3.31610125661753e01,
- +3.31400786291125e01,
- +3.31606725810729e01,
- +3.31401662903144e01,
- +3.31606723751682e01,
- +2.12283949836107e02,
- +3.31745064898504e01,
- +7.97027494030697e01,
- +3.31345569359076e01,
- +3.31397930746818e01,
- +3.31486830076409e01,
- ],
- [
- +8.82857683343213e00,
- +8.83383171969513e00,
- +8.82857155866703e00,
- +8.61818543474823e00,
- +8.82971816324588e00,
- +8.61917409179224e00,
- +3.31610125661753e01,
- +3.08895723244586e00,
- +3.08706250254790e00,
- +3.08892646040635e00,
- +3.08707043676017e00,
- +3.08892644176992e00,
- +1.96792520574283e01,
- +3.09017856699708e00,
- +7.46755679047458e00,
- +3.08656273429260e00,
- +3.08703665702655e00,
- +3.08784128454205e00,
- ],
- [
- +8.82319289903895e00,
- +8.82844384885730e00,
- +8.82318762822647e00,
- +8.61295975815401e00,
- +8.82433337366177e00,
- +8.61394767066187e00,
- +3.31400786291125e01,
- +3.08706250254790e00,
- +3.08516914014558e00,
- +3.08703175271770e00,
- +3.08517706863144e00,
- +3.08703173409473e00,
- +1.96668860512657e01,
- +3.08828295561747e00,
- +7.46319796171210e00,
- +3.08466973259124e00,
- +3.08514331327788e00,
- +3.08594736006437e00,
- ],
- [
- +8.82848939371737e00,
- +8.83374421604913e00,
- +8.82848411901647e00,
- +8.61810056527593e00,
- +8.82963070964208e00,
- +8.61908921022803e00,
- +3.31606725810729e01,
- +3.08892646040635e00,
- +3.08703175271770e00,
- +3.08889568872755e00,
- +3.08703968683697e00,
- +3.08889567009134e00,
- +1.96790512228764e01,
- +3.09014778064156e00,
- +7.46748599935495e00,
- +3.08653199032049e00,
- +3.08700590749930e00,
- +3.08781052558326e00,
- ],
- [
- +8.82321544435180e00,
- +8.82846641065407e00,
- +8.82321017352276e00,
- +8.61298164075975e00,
- +8.82435592255574e00,
- +8.61396955638537e00,
- +3.31401662903144e01,
- +3.08707043676017e00,
- +3.08517706863144e00,
- +3.08703968683697e00,
- +3.08518499714127e00,
- +3.08703966821393e00,
- +1.96669378341176e01,
- +3.08829089352095e00,
- +7.46321621437845e00,
- +3.08467765956666e00,
- +3.08515124168563e00,
- +3.08595529090393e00,
- ],
- [
- +8.82848934076138e00,
- +8.83374416305442e00,
- +8.82848406606051e00,
- +8.61810051387655e00,
- +8.82963065667768e00,
- +8.61908915882133e00,
- +3.31606723751682e01,
- +3.08892644176992e00,
- +3.08703173409473e00,
- +3.08889567009134e00,
- +3.08703966821393e00,
- +3.08889565145513e00,
- +1.96790511012452e01,
- +3.09014776199646e00,
- +7.46748595648182e00,
- +3.08653197170106e00,
- +3.08700588887651e00,
- +3.08781050695476e00,
- ],
- [
- +5.62455312853600e01,
- +5.62800562259229e01,
- +5.62454966276673e01,
- +5.48621486974196e01,
- +5.62530302631426e01,
- +5.48686508434178e01,
- +2.12283949836107e02,
- +1.96792520574283e01,
- +1.96668860512657e01,
- +1.96790512228764e01,
- +1.96669378341176e01,
- +1.96790511012452e01,
- +1.25836586473867e02,
- +1.96872231305630e01,
- +4.72941948267372e01,
- +1.96636243001677e01,
- +1.96667173697683e01,
- +1.96719687931115e01,
- ],
- [
- +8.83204729384078e00,
- +8.83730471751867e00,
- +8.83204201652784e00,
- +8.62155388287635e00,
- +8.83318917490684e00,
- +8.62254301984509e00,
- +3.31745064898504e01,
- +3.09017856699708e00,
- +3.08828295561747e00,
- +3.09014778064156e00,
- +3.08829089352095e00,
- +3.09014776199646e00,
- +1.96872231305630e01,
- +3.09140046974751e00,
- +7.47036647230640e00,
- +3.08778295485591e00,
- +3.08825709807205e00,
- +3.08906209992291e00,
- ],
- [
- +2.14360070777824e01,
- +2.14479713482813e01,
- +2.14359950691858e01,
- +2.09574675249769e01,
- +2.14386054857927e01,
- +2.09597156158777e01,
- +7.97027494030697e01,
- +7.46755679047458e00,
- +7.46319796171210e00,
- +7.46748599935495e00,
- +7.46321621437845e00,
- +7.46748595648182e00,
- +4.72941948267372e01,
- +7.47036647230640e00,
- +1.83413164591959e01,
- +7.46204824414648e00,
- +7.46313850405433e00,
- +7.46498955077155e00,
- ],
- [
- +8.82177279187685e00,
- +8.82702270338885e00,
- +8.82176752210693e00,
- +8.61158139426314e00,
- +8.82291304092804e00,
- +8.61256911038653e00,
- +3.31345569359076e01,
- +3.08656273429260e00,
- +3.08466973259124e00,
- +3.08653199032049e00,
- +3.08467765956666e00,
- +3.08653197170106e00,
- +1.96636243001677e01,
- +3.08778295485591e00,
- +7.46204824414648e00,
- +3.08417042017811e00,
- +3.08464391064378e00,
- +3.08544780425280e00,
- ],
- [
- +8.82311945817995e00,
- +8.82837035430227e00,
- +8.82311418742137e00,
- +8.61288847604877e00,
- +8.82425992113731e00,
- +8.61387637840061e00,
- +3.31397930746818e01,
- +3.08703665702655e00,
- +3.08514331327788e00,
- +3.08700590749930e00,
- +3.08515124168563e00,
- +3.08700588887651e00,
- +1.96667173697683e01,
- +3.08825709807205e00,
- +7.46313850405433e00,
- +3.08464391064378e00,
- +3.08511748666464e00,
- +3.08592152552955e00,
- ],
- [
- +8.82540583248536e00,
- +8.83065840028219e00,
- +8.82540056004824e00,
- +8.61510764363531e00,
- +8.82654665861330e00,
- +8.61609586216640e00,
- +3.31486830076409e01,
- +3.08784128454205e00,
- +3.08594736006437e00,
- +3.08781052558326e00,
- +3.08595529090393e00,
- +3.08781050695476e00,
- +1.96719687931115e01,
- +3.08906209992291e00,
- +7.46498955077155e00,
- +3.08544780425280e00,
- +3.08592152552955e00,
- +3.08672581101038e00,
- ],
- ],
- dtype=torch.double,
- ),
- "disp2": torch.tensor(
- [
- -1.9420461943405458e-03,
- -1.8659072210258116e-03,
- -1.9421688758887014e-03,
- -2.2256063318899419e-03,
- -2.3963299472900094e-03,
- -2.2258129538456762e-03,
- -4.5810403655531691e-03,
- -6.0279450821464173e-04,
- -7.9994791096430059e-04,
- -6.1485615934089312e-04,
- -7.9989323817241818e-04,
- -6.1484107713457887e-04,
- -2.2996378209045958e-03,
- -5.6155104045316131e-04,
- -1.1544788441618554e-03,
- -5.5259186314968840e-04,
- -6.8597888322421800e-04,
- -5.0103989808744046e-04,
- ],
- dtype=torch.double,
- ),
- "disp3": torch.tensor(
- [
- -1.2978866706459067e-06,
- -6.8327757407160399e-07,
- -1.2942593535913288e-06,
- -5.7304824129487952e-07,
- -8.9195765730180898e-07,
- -4.8897672215875848e-07,
- -5.9620837808702434e-06,
- -5.1712490636531602e-07,
- +2.1379354562450553e-06,
- +7.7699432620597416e-07,
- +2.1956704534880581e-06,
- +7.6716763665232290e-07,
- -9.5275400116253198e-07,
- +6.0068639199219523e-07,
- -2.6385604432973588e-07,
- +1.1560414358817309e-06,
- -2.6528734005501400e-07,
- -1.3951746669187961e-06,
- ],
- dtype=torch.double,
- ),
- "grad": torch.tensor([], dtype=torch.double),
- "hessian": torch.tensor([], dtype=torch.double),
- },
- "AmF3": {
- "cn": torch.tensor(
- [
- +2.99072690000000e00,
- +0.99768090000000e00,
- +0.99767850000000e00,
- +0.99768040000000e00,
- ],
- dtype=torch.double,
- ),
- "weights": torch.tensor(
- [
- [
- +3.01777620000000e-16,
- +3.48560790000000e-08,
- +6.05574020000000e-03,
- +9.93942080000000e-01,
- +2.12835020000000e-06,
- +3.22313320000000e-14,
- +0.00000000000000e00,
- ],
- [
- +0.01831650000000e00,
- 0.981683500000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- ],
- [
- 0.018316800000000e00,
- 0.981683200000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- ],
- [
- 0.018316600000000e00,
- 0.981683400000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- 0.000000000000000e00,
- ],
- ],
- dtype=torch.double,
- ),
- "c6": torch.tensor(
- [
- [
- +524.180114700000e00,
- +54.4235535000000e00,
- +54.4235573000000e00,
- +54.4235573000000e00,
- ],
- [
- +54.4235535000000e00,
- +7.17594720000000e00,
- +7.17594770000000e00,
- +7.17594770000000e00,
- ],
- [
- +54.4235535000000e00,
- +7.17594770000000e00,
- +7.17594860000000e00,
- +7.17594810000000e00,
- ],
- [
- +54.4235535000000e00,
- +7.17594770000000e00,
- +7.17594810000000e00,
- +7.17594810000000e00,
- ],
- ],
- dtype=torch.double,
- ),
- "disp2": torch.tensor(
- [
- -1.048180025875288e-03,
- -4.430683267237130e-04,
- -4.430435696703567e-04,
- -4.430709410870264e-04,
- ],
- dtype=torch.double,
- ),
- "disp3": torch.tensor(
- [
- 1.475402588166237e-08,
- 2.297333064597274e-07,
- 2.297265476250950e-07,
- 2.297346486316179e-07,
- ],
- dtype=torch.double,
- ),
- "grad": torch.tensor(
- [
- [
- -3.091609121445480e-10,
- 2.958185285646392e-12,
- 3.762196005417977e-07,
- ],
- [
- -1.982582438864074e-05,
- -5.360338422731795e-06,
- -1.431825059800939e-07,
- ],
- [
- 5.276022219053056e-06,
- 1.985418497606805e-05,
- -1.170419972562382e-07,
- ],
- [
- 1.455011133049982e-05,
- -1.449384951152156e-05,
- -1.159950973054663e-07,
- ],
- ],
- dtype=torch.double,
- ),
- "hessian": torch.tensor(
- [],
- dtype=torch.double,
- ),
- },
-}
-
-samples: Dict[str, Record] = merge_nested_dicts(mols, refs) # type: ignore
diff --git a/tests/test_ncoord/__init__.py b/tests/test_ncoord/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/tests/test_ncoord/test_cn.py b/tests/test_ncoord/test_cn.py
deleted file mode 100644
index 720f7b4..0000000
--- a/tests/test_ncoord/test_cn.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test coordination number.
-"""
-import pytest
-import torch
-
-from tad_dftd3 import data, ncoord, utils
-from tad_dftd3._typing import DD
-
-from ..conftest import DEVICE
-from ..samples import samples
-
-
-@pytest.mark.parametrize("dtype", [torch.float32, torch.float64])
-def test_single(dtype: torch.dtype) -> None:
- dd: DD = {"device": DEVICE, "dtype": dtype}
-
- sample = samples["PbH4-BiH3"]
- numbers = sample["numbers"].to(DEVICE)
- positions = sample["positions"].to(**dd)
- cutoff = torch.tensor(25, **dd)
- ref = sample["cn"].to(**dd)
-
- rcov = data.covalent_rad_d3.to(**dd)[numbers]
- cn = ncoord.coordination_number(
- numbers, positions, ncoord.exp_count, rcov, cutoff=cutoff
- )
- assert cn.dtype == dtype
- assert pytest.approx(cn.cpu()) == ref.cpu()
-
-
-@pytest.mark.parametrize("dtype", [torch.float32, torch.float64])
-def test_batch(dtype: torch.dtype) -> None:
- dd: DD = {"device": DEVICE, "dtype": dtype}
-
- sample1, sample2 = (
- samples["PbH4-BiH3"],
- samples["C6H5I-CH3SH"],
- )
- numbers = utils.pack(
- (
- sample1["numbers"].to(DEVICE),
- sample2["numbers"].to(DEVICE),
- )
- )
- positions = utils.pack(
- (
- sample1["positions"].to(**dd),
- sample2["positions"].to(**dd),
- )
- )
- ref = utils.pack(
- (
- sample1["cn"].to(**dd),
- sample2["cn"].to(**dd),
- )
- )
-
- cn = ncoord.coordination_number(numbers, positions)
- assert cn.dtype == dtype
- assert pytest.approx(cn.cpu()) == ref.cpu()
diff --git a/tests/test_ncoord/test_general.py b/tests/test_ncoord/test_general.py
deleted file mode 100644
index 8e926f8..0000000
--- a/tests/test_ncoord/test_general.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test error handling in coordination number calculation.
-"""
-import pytest
-import torch
-
-from tad_dftd3._typing import Any, CountingFunction, Optional, Protocol, Tensor
-from tad_dftd3.ncoord import coordination_number, exp_count
-
-
-class CNFunction(Protocol):
- """
- Type annotation for coordination number function.
- """
-
- def __call__(
- self,
- numbers: Tensor,
- positions: Tensor,
- counting_function: CountingFunction = exp_count,
- rcov: Optional[Tensor] = None,
- en: Optional[Tensor] = None,
- cutoff: Optional[Tensor] = None,
- **kwargs: Any,
- ) -> Tensor:
- ...
-
-
-@pytest.mark.parametrize("function", [coordination_number])
-@pytest.mark.parametrize("counting_function", [exp_count])
-def test_fail(function: CNFunction, counting_function: CountingFunction) -> None:
- numbers = torch.tensor([1, 1])
- positions = torch.tensor([[0.0, 0.0, 0.0], [0.0, 0.0, 1.0]])
-
- # rcov wrong shape
- with pytest.raises(ValueError):
- rcov = torch.tensor([1.0])
- function(numbers, positions, counting_function, rcov)
-
- # wrong numbers
- with pytest.raises(ValueError):
- numbers = torch.tensor([1])
- function(numbers, positions, counting_function)
diff --git a/tests/test_ncoord/test_grad.py b/tests/test_ncoord/test_grad.py
deleted file mode 100644
index a9b2cd9..0000000
--- a/tests/test_ncoord/test_grad.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test derivative (w.r.t. positions) of the exponential and error counting
-functions used for the coordination number.
-"""
-import pytest
-import torch
-
-from tad_dftd3._typing import DD, CountingFunction, Tuple
-from tad_dftd3.ncoord import dexp_count, exp_count
-
-from ..conftest import DEVICE
-
-
-@pytest.mark.parametrize("dtype", [torch.float, torch.double])
-@pytest.mark.parametrize("function", [(exp_count, dexp_count)])
-def test_count_grad(
- dtype: torch.dtype, function: Tuple[CountingFunction, CountingFunction]
-) -> None:
- dd: DD = {"device": DEVICE, "dtype": dtype}
-
- tol = torch.finfo(dtype).eps ** 0.5 * 10
- cf, dcf = function
-
- a = torch.rand(4, **dd)
- b = torch.rand(4, **dd)
-
- a_grad = a.detach().clone().requires_grad_(True)
- count = cf(a_grad, b)
-
- (grad_auto,) = torch.autograd.grad(count.sum(-1), a_grad)
- grad_expl = dcf(a, b)
-
- assert pytest.approx(grad_auto.cpu(), abs=tol) == grad_expl.cpu()
diff --git a/tests/test_utils/test_cdist.py b/tests/test_utils/test_cdist.py
deleted file mode 100644
index 7e2052e..0000000
--- a/tests/test_utils/test_cdist.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test the utility functions.
-"""
-
-import pytest
-import torch
-
-from tad_dftd3 import utils
-from tad_dftd3._typing import DD
-
-from ..conftest import DEVICE
-
-
-@pytest.mark.parametrize("dtype", [torch.float32, torch.float64])
-def test_all(dtype: torch.dtype) -> None:
- """
- The single precision test sometimes fails on my GPU with the following
- thresholds:
-
- ```
- tol = 1e-6 if dtype == torch.float else 1e-14
- ```
-
- Only one matrix element seems to be affected. It also appears that the
- failure only happens if `torch.rand` was run before. To be precise,
-
- ```
- pytest -vv test/test_ncoord/test_grad.py test/test_utils/ --cuda --slow
- ```
-
- fails, while
-
- ```
- pytest -vv test/test_utils/ --cuda --slow
- ```
-
- works. It also works if I remove the random tensors in the gradient test
- (test/test_ncoord/test_grad.py).
-
- It can be fixed with
-
- ```
- torch.use_deterministic_algorithms(True)
- ```
-
- and following the PyTorch instructions to set a specific
- environment variable.
-
- ```
- CUBLAS_WORKSPACE_CONFIG=:4096:8 pytest -vv test/test_ncoord/test_grad.py test/test_utils/ --cuda --slow
- ```
-
- (For simplicity, I just reduced the tolerances for single precision.)
- """
- dd: DD = {"device": DEVICE, "dtype": dtype}
- tol = 1e-6 if dtype == torch.float else 1e-14
-
- # only one element actually fails
- if "cuda" in str(DEVICE) and dtype == torch.float:
- tol = 1e-3
-
- x = torch.randn(2, 3, 4, **dd)
-
- d1 = utils.cdist(x)
- d2 = utils.distance.cdist_direct_expansion(x, x, p=2)
- d3 = utils.distance.euclidean_dist_quadratic_expansion(x, x)
-
- assert pytest.approx(d1.cpu(), abs=tol) == d2.cpu()
- assert pytest.approx(d2.cpu(), abs=tol) == d3.cpu()
- assert pytest.approx(d3.cpu(), abs=tol) == d1.cpu()
-
-
-@pytest.mark.parametrize("dtype", [torch.float32, torch.float64])
-@pytest.mark.parametrize("p", [2, 3, 4, 5])
-def test_ps(dtype: torch.dtype, p: int) -> None:
- dd: DD = {"device": DEVICE, "dtype": dtype}
- tol = 1e-6 if dtype == torch.float else 1e-14
-
- x = torch.randn(2, 4, 5, **dd)
- y = torch.randn(2, 4, 5, **dd)
-
- d1 = utils.cdist(x, y, p=p)
- d2 = torch.cdist(x, y, p=p)
-
- assert pytest.approx(d1.cpu(), abs=tol) == d2.cpu()
diff --git a/tests/test_utils/test_pack.py b/tests/test_utils/test_pack.py
deleted file mode 100644
index ae5cd0c..0000000
--- a/tests/test_utils/test_pack.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test the utility functions.
-"""
-
-import torch
-
-from tad_dftd3.utils import pack
-
-mol1 = torch.tensor([1, 1]) # H2
-mol2 = torch.tensor([8, 1, 1]) # H2O
-
-
-def test_single_tensor() -> None:
- # dummy test: only give single tensor
- assert (mol1 == pack(mol1)).all()
-
-
-def test_standard() -> None:
- # standard packing
- ref = torch.tensor(
- [
- [1, 1, 0], # H2
- [8, 1, 1], # H2O
- ],
- )
- packed = pack([mol1, mol2])
- assert (packed == ref).all()
-
-
-def test_axis() -> None:
- ref = torch.tensor(
- [
- [1, 1, 0], # H2
- [8, 1, 1], # H2O
- ],
- )
-
- # different axis
- packed = pack([mol1, mol2], axis=-1)
- assert (packed == ref.T).all()
-
-
-def test_size() -> None:
- ref = torch.tensor(
- [
- [1, 1, 0, 0], # H2
- [8, 1, 1, 0], # H2O
- ],
- )
-
- # one additional column of padding
- packed = pack([mol1, mol2], size=[4])
- assert (packed == ref).all()
diff --git a/tests/test_utils/test_real.py b/tests/test_utils/test_real.py
deleted file mode 100644
index c252b48..0000000
--- a/tests/test_utils/test_real.py
+++ /dev/null
@@ -1,299 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test the utility functions.
-"""
-
-import torch
-
-from tad_dftd3 import utils
-
-
-def test_real_atoms() -> None:
- numbers = torch.tensor(
- [
- [1, 1, 0, 0, 0], # H2
- [6, 1, 1, 1, 1], # CH4
- ],
- )
- ref = torch.tensor(
- [
- [True, True, False, False, False], # H2
- [True, True, True, True, True], # CH4
- ],
- )
- mask = utils.real_atoms(numbers)
- assert (mask == ref).all()
-
-
-def test_real_pairs_single() -> None:
- numbers = torch.tensor([6, 1, 1, 1, 1]) # CH4
- size = numbers.shape[0]
-
- ref = torch.full((size, size), True)
- mask = utils.real_pairs(numbers, diagonal=True)
- assert (mask == ref).all()
-
- ref *= ~torch.diag_embed(torch.ones(size, dtype=torch.bool))
- mask = utils.real_pairs(numbers, diagonal=False)
- assert (mask == ref).all()
-
-
-def test_real_pairs_batch() -> None:
- numbers = torch.tensor(
- [
- [1, 1, 0], # H2
- [8, 1, 1], # H2O
- ],
- )
-
- ref = torch.tensor(
- [
- [
- [True, True, False],
- [True, True, False],
- [False, False, False],
- ],
- [
- [True, True, True],
- [True, True, True],
- [True, True, True],
- ],
- ]
- )
- mask = utils.real_pairs(numbers, diagonal=True)
- assert (mask == ref).all()
-
- ref = torch.tensor(
- [
- [
- [False, True, False],
- [True, False, False],
- [False, False, False],
- ],
- [
- [False, True, True],
- [True, False, True],
- [True, True, False],
- ],
- ]
- )
- mask = utils.real_pairs(numbers, diagonal=False)
- assert (mask == ref).all()
-
-
-def test_real_triples_single() -> None:
- numbers = torch.tensor([8, 1, 1]) # H2O
- size = numbers.shape[0]
-
- ref = torch.full((size, size, size), True)
- mask = utils.real_triples(numbers, diagonal=True)
- assert (mask == ref).all()
-
- ref *= ~torch.diag_embed(torch.ones(size, dtype=torch.bool))
- mask = utils.real_pairs(numbers, diagonal=False)
- assert (mask == ref).all()
-
-
-def test_real_triples_batch() -> None:
- numbers = torch.tensor(
- [
- [1, 1, 0], # H2
- [8, 1, 1], # H2O
- ],
- )
-
- ref = torch.tensor(
- [
- [
- [
- [True, True, False],
- [True, True, False],
- [False, False, False],
- ],
- [
- [True, True, False],
- [True, True, False],
- [False, False, False],
- ],
- [
- [False, False, False],
- [False, False, False],
- [False, False, False],
- ],
- ],
- [
- [
- [True, True, True],
- [True, True, True],
- [True, True, True],
- ],
- [
- [True, True, True],
- [True, True, True],
- [True, True, True],
- ],
- [
- [True, True, True],
- [True, True, True],
- [True, True, True],
- ],
- ],
- ]
- )
- mask = utils.real_triples(numbers, diagonal=True)
- assert (mask == ref).all()
-
- ref = torch.tensor(
- [
- [
- [
- [False, True, False],
- [True, False, False],
- [False, False, False],
- ],
- [
- [False, True, False],
- [True, False, False],
- [False, False, False],
- ],
- [
- [False, False, False],
- [False, False, False],
- [False, False, False],
- ],
- ],
- [
- [
- [False, True, True],
- [True, False, True],
- [True, True, False],
- ],
- [
- [False, True, True],
- [True, False, True],
- [True, True, False],
- ],
- [
- [False, True, True],
- [True, False, True],
- [True, True, False],
- ],
- ],
- ]
- )
- mask = utils.real_triples(numbers, diagonal=False)
- assert (mask == ref).all()
-
-
-def test_real_triples_self_single() -> None:
- numbers = torch.tensor([8, 1, 1]) # H2O
-
- ref = torch.tensor(
- [
- [
- [False, False, False],
- [False, False, True],
- [False, True, False],
- ],
- [
- [False, False, True],
- [False, False, False],
- [True, False, False],
- ],
- [
- [False, True, False],
- [True, False, False],
- [False, False, False],
- ],
- ],
- dtype=torch.bool,
- )
-
- mask = utils.real_triples(numbers, self=False)
- assert (mask == ref).all()
-
-
-def test_real_triples_self_batch() -> None:
- numbers = torch.tensor(
- [
- [1, 1, 0], # H2
- [8, 1, 1], # H2O
- ],
- )
-
- ref = torch.tensor(
- [
- [
- [
- [False, False, False],
- [False, False, False],
- [False, False, False],
- ],
- [
- [False, False, False],
- [False, False, False],
- [False, False, False],
- ],
- [
- [False, False, False],
- [False, False, False],
- [False, False, False],
- ],
- ],
- [
- [
- [False, False, False],
- [False, False, True],
- [False, True, False],
- ],
- [
- [False, False, True],
- [False, False, False],
- [True, False, False],
- ],
- [
- [False, True, False],
- [True, False, False],
- [False, False, False],
- ],
- ],
- ]
- )
-
- mask = utils.real_triples(numbers, self=False)
- assert (mask == ref).all()
-
-
-def test_pack() -> None:
- mol1 = torch.tensor([1, 1]) # H2
- mol2 = torch.tensor([8, 1, 1]) # H2O
-
- # dummy test: only give single tensor
- assert (mol1 == utils.pack(mol1)).all()
-
- # standard packing
- ref = torch.tensor(
- [
- [1, 1, 0], # H2
- [8, 1, 1], # H2O
- ],
- )
- packed = utils.pack([mol1, mol2])
- assert (packed == ref).all()
-
- # different axis
- packed = utils.pack([mol1, mol2], axis=-1)
- assert (packed == ref.T).all()
diff --git a/tests/utils.py b/tests/utils.py
deleted file mode 100644
index b22902d..0000000
--- a/tests/utils.py
+++ /dev/null
@@ -1,225 +0,0 @@
-# This file is part of tad-dftd3.
-# SPDX-Identifier: Apache-2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Collection of utility functions for testing.
-"""
-import torch
-from torch.autograd.gradcheck import gradcheck, gradgradcheck
-
-from tad_dftd3._typing import (
- Any,
- Callable,
- Dict,
- Optional,
- Protocol,
- Size,
- Tensor,
- TensorOrTensors,
- Union,
-)
-
-from .conftest import FAST_MODE
-
-
-def merge_nested_dicts(a: Dict[str, Dict], b: Dict[str, Dict]) -> Dict: # type: ignore[type-arg]
- """
- Merge nested dictionaries. Dictionary `a` remains unaltered, while
- the corresponding keys of it are added to `b`.
-
- Parameters
- ----------
- a : dict
- First dictionary (not changed).
- b : dict
- Second dictionary (changed).
-
- Returns
- -------
- dict
- Merged dictionary `b`.
- """
- for key in b:
- if key in a:
- b[key].update(a[key])
- return b
-
-
-def get_device_from_str(s: str) -> torch.device:
- """
- Convert device name to `torch.device`. Critically, this also sets the index
- for CUDA devices to `torch.cuda.current_device()`.
-
- Parameters
- ----------
- s : str
- Name of the device as string.
-
- Returns
- -------
- torch.device
- Device as torch class.
-
- Raises
- ------
- KeyError
- Unknown device name is given.
- """
- d = {
- "cpu": torch.device("cpu"),
- "cuda": torch.device("cuda", index=torch.cuda.current_device()),
- }
-
- if s not in d:
- raise KeyError(f"Unknown device '{s}' given.")
-
- return d[s]
-
-
-def reshape_fortran(x: Tensor, shape: Size) -> Tensor:
- """
- Implements Fortran's `reshape` function (column-major).
-
- Parameters
- ----------
- x : Tensor
- Input tensor
- shape : Size
- Output size to which `x` is reshaped.
-
- Returns
- -------
- Tensor
- Reshaped tensor of size `shape`.
- """
- if len(x.shape) > 0:
- x = x.permute(*reversed(range(len(x.shape))))
- return x.reshape(*reversed(shape)).permute(*reversed(range(len(shape))))
-
-
-class _GradcheckFunction(Protocol):
- """
- Type annotation for gradcheck function.
- """
-
- def __call__( # type: ignore
- self,
- func: Callable[..., TensorOrTensors],
- inputs: TensorOrTensors,
- *,
- eps: float = 1e-6,
- atol: float = 1e-5,
- rtol: float = 1e-3,
- raise_exception: bool = True,
- check_sparse_nnz: bool = False,
- nondet_tol: float = 0.0,
- check_undefined_grad: bool = True,
- check_grad_dtypes: bool = False,
- check_batched_grad: bool = False,
- check_batched_forward_grad: bool = False,
- check_forward_ad: bool = False,
- check_backward_ad: bool = True,
- fast_mode: bool = False,
- ) -> bool:
- ...
-
-
-class _GradgradcheckFunction(Protocol):
- """
- Type annotation for gradgradcheck function.
- """
-
- def __call__( # type: ignore
- self,
- func: Callable[..., TensorOrTensors],
- inputs: TensorOrTensors,
- grad_outputs: Optional[TensorOrTensors] = None,
- *,
- eps: float = 1e-6,
- atol: float = 1e-5,
- rtol: float = 1e-3,
- gen_non_contig_grad_outputs: bool = False,
- raise_exception: bool = True,
- nondet_tol: float = 0.0,
- check_undefined_grad: bool = True,
- check_grad_dtypes: bool = False,
- check_batched_grad: bool = False,
- check_fwd_over_rev: bool = False,
- check_rev_over_rev: bool = True,
- fast_mode: bool = False,
- ) -> bool:
- ...
-
-
-def _wrap_gradcheck(
- gradcheck_func: Union[_GradcheckFunction, _GradgradcheckFunction],
- func: Callable[..., TensorOrTensors],
- diffvars: TensorOrTensors,
- **kwargs: Any,
-) -> bool:
- fast_mode = kwargs.pop("fast_mode", FAST_MODE)
- try:
- assert gradcheck_func(func, diffvars, fast_mode=fast_mode, **kwargs)
- finally:
- if isinstance(diffvars, Tensor):
- diffvars.detach_()
- else:
- for diffvar in diffvars:
- diffvar.detach_()
-
- return True
-
-
-def dgradcheck(
- func: Callable[..., TensorOrTensors], diffvars: TensorOrTensors, **kwargs: Any
-) -> bool:
- """
- Wrapper for `torch.autograd.gradcheck` that detaches the differentiated
- variables after the check.
-
- Parameters
- ----------
- func : Callable[..., TensorOrTensors]
- Forward function.
- diffvars : TensorOrTensors
- Variables w.r.t. which we differentiate.
-
- Returns
- -------
- bool
- Status of check.
- """
- return _wrap_gradcheck(gradcheck, func, diffvars, **kwargs)
-
-
-def dgradgradcheck(
- func: Callable[..., TensorOrTensors], diffvars: TensorOrTensors, **kwargs: Any
-) -> bool:
- """
- Wrapper for `torch.autograd.gradgradcheck` that detaches the differentiated
- variables after the check.
-
- Parameters
- ----------
- func : Callable[..., TensorOrTensors]
- Forward function.
- diffvars : TensorOrTensors
- Variables w.r.t. which we differentiate.
-
- Returns
- -------
- bool
- Status of check.
- """
- return _wrap_gradcheck(gradgradcheck, func, diffvars, **kwargs)
diff --git a/tox.ini b/tox.ini
index 9dc9f34..0360127 100644
--- a/tox.ini
+++ b/tox.ini
@@ -26,4 +26,4 @@ commands =
--cov=tad_dftd3 \
--cov-report=term-missing \
--cov-report=xml:coverage.xml \
- tests}
+ test}