diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 16d490f8..9ec992c3 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,21 +1,79 @@
-name: Run Tests
+name: Test & Publish
on: [push]
jobs:
- pytest:
- name: pytest
+ #Run pytest and build package
+ test_build:
runs-on: ubuntu-latest
+
steps:
- - uses: actions/checkout@master
- - name: Install
- uses: abatilo/actions-poetry@v1.5.0
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: Install python
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.x'
+
+ - name: Install poetry
+ uses: Gr1N/setup-poetry@v3
+
+ - name: Cache poetry dependencies
+ uses: actions/cache@v1
with:
- python_version: 3.7.7
- poetry_version: 1.0.10
- args: install -E experiments
+ path: ~/.cache/pypoetry/virtualenvs
+ key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}
+ restore-keys: |
+ ${{ runner.os }}-poetry-
+
+ - name: Install dependencies
+ run: poetry install -E experiments
+
- name: Run pytest
- uses: abatilo/actions-poetry@v1.5.0
+ run: poetry run pytest --doctest-modules --ignore=case_studies --ignore=experiments
+
+ - name: Build package
+ run: poetry build
+
+ - name: Save build
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ github.sha }}-build
+ path: dist/
+
+
+ # Publish to pypi on version change
+ publish:
+ needs: test_build
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ # Make sure to fetch the last two commits
+ # Needed forthe version bump and tag
+ fetch-depth: 2
+
+ - name: Install toml
+ run: pip install toml
+
+ - name: Check for version bump and tag
+ id: version_check
+ uses: salsify/action-detect-and-tag-new-version@v2
with:
- python_version: 3.7.7
- poetry_version: 1.0.10
- args: run python -m pytest --doctest-modules --ignore=case_studies --ignore=experiments
+ create-tag: $ {{ github.ref == 'refs/heads/master' }} # only create new tag on master
+ version-command: |
+ python get_version.py
+
+ - name: Download poetry build
+ uses: actions/download-artifact@v2
+ with:
+ name: ${{ github.sha }}-build
+
+ - name: Install poetry
+ uses: Gr1N/setup-poetry@v3
+
+ - name: Publish
+ # Only publish if there is a new tag
+ if: ${{ steps.version_check.tag }}
+ run: poetry publish -u {{ secrets.PYPI_USERNAME }} -p {{ $ secrets.PYPI_PASSWORD }}
diff --git a/.readthedocs.yml b/.readthedocs.yml
new file mode 100644
index 00000000..0ea67923
--- /dev/null
+++ b/.readthedocs.yml
@@ -0,0 +1,20 @@
+# .readthedocs.yml
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+# Required
+version: 2
+
+# Build documentation in the docs/ directory with Sphinx
+sphinx:
+ configuration: docs/source/conf.py
+
+
+# Optionally set the version of Python and requirements required to build your docs
+python:
+ version: 3.6
+ install:
+ - method: pip
+ path: .
+ extra_requirements:
+ - docs
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 00000000..e442a449
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,7 @@
+Copyright 2020 Kobi Felton, Jan Rittig, Alexei Lapkin
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/README.md b/README.md
index b444cdd1..2e33b9b9 100644
--- a/README.md
+++ b/README.md
@@ -1,10 +1,17 @@
# Summit
-![summit_banner](docs/source/_static/banner_4.png)
+![summit_banner](https://raw.githubusercontent.com/sustainable-processes/summit/master/docs/source/_static/banner_4.png)
+
+
+
+
+
+
+
Summit is a set of tools for optimising chemical processes. We’ve started by targeting reactions.
## What is Summit?
-Currently, reaction optimisation in the fine chemicals industry is done by intuition or design of experiments, Both scale poorly with the complexity of the problem.
+Currently, reaction optimisation in the fine chemicals industry is done by intuition or design of experiments. Both scale poorly with the complexity of the problem.
Summit uses recent advances in machine learning to make the process of reaction optimisation faster. Essentially, it applies algorithms that learn which conditions (e.g., temperature, stoichiometry, etc.) are important to maximising one or more objectives (e.g., yield, enantiomeric excess). This is achieved through an iterative cycle.
@@ -15,26 +22,15 @@ Summit has two key features:
To get started, see the Quick Start below or follow our [tutorial](https://gosummit.readthedocs.io/en/latest/tutorial.html).
-Currently, Summit has the following strategies implemented:
-
-- **TSEMO**: Multi-objective Bayesian optimisation strategy by [Bradford et al.]()
-- **Gryffin**: Single-objective Bayesian optimisation strategy designed for categoical variables [Häse et al.](https://arxiv.org/abs/2003.12127)
-- **SOBO**: Single-objective Bayesian optimisation strategy ([GpyOpt](https://gpyopt.readthedocs.io/))
-- **Nelder-Mead**: Single-objective optimisation stategy for local search
-- **SNOBFIT**: Single-objective optimisation strategy by [Huyer et al.](https://www.mat.univie.ac.at/~neum/ms/snobfit.pdf)
-- **Deep Raction Optimiser**: Deep reinforcement learning by [Zhou et al.](https://pubs.acs.org/doi/10.1021/acscentsci.7b00492)
-- **Factorial DoE**: Factorial design of experiments
-- **Random**: Random search
-
## Installation
To install summit, use the following command:
-```pip install git+https://github.com/sustainable-processes/summit.git@0.5.0#egg=summit```
+```pip install summit```
## Quick Start
-Below, we show how to use the Nelder-Mead strategy to optimise a benchmark representing a nucleophlic aromatic substitution (SnAr) reaction.
+Below, we show how to use the Nelder-Mead strategy to optimise a benchmark representing a nucleophlic aromatic substitution (SnAr) reaction.
```python
# Import summit
from summit.benchmarks import SnarBenchmark, MultitoSingleObjective
@@ -67,4 +63,3 @@ The documentation for summit can be found [here](https://gosummit.readthedocs.io
## Issues?
Submit an [issue](https://github.com/sustainable-processes/summit/issues) or send an email to kcmf2@cam.ac.uk.
-
diff --git a/README.rst b/README.rst
deleted file mode 100644
index 1c67398e..00000000
--- a/README.rst
+++ /dev/null
@@ -1,107 +0,0 @@
-.. role:: raw-html-m2r(raw)
- :format: html
-
-
-Summit
-======
-
-Summit is a set of tools for optimizing chemical processes.
-
-Installation
-------------
-
-If you want to use summit immediately without installing python on your computer, go to our `Jupyterhub `_\ , which already has it installed. You can find a description of Jupyterhub `here `_.
-
-To install locally:
-
-``pip install git+https://github.com/sustainable-processes/summit_private.git@0.3.0#egg=summit``
-
-You might need to enter your username and password for Github.
-
-Documentation
--------------
-
-The documentation for summit can be found on the `wiki `_.
-:raw-html-m2r:``
-
-Development
------------
-
-Downloading the code
-^^^^^^^^^^^^^^^^^^^^
-
-
-#. Clone the repository:
- ``git clone https://github.com/sustainable-processes/summit_private.git``
-#. Intall poetry by following the instructions `here `_. We use poetry for dependency management.
-#. Install all dependencies:
- ``poetry install``
-#. To run tests:
- ``poetry run pytest --doctest-modules --ignore=case_studies``
-
-Commit Worfklow
-^^^^^^^^^^^^^^^
-
-
-* Use the `project board `_ to keep track of issues. Issues will automatically be moved along in the board when they are closed in Github.
-* Write tests in the tests/ folder
-* Documentation follows the `numpy docstring format `_
-
- * Please include examples when possible that can be tested using `doctest `_
- * All publicly available classes and methods should have a docstring
-
-* Commit to a branch off master and submit pull requests to merge.
-
- * To create a branch locally and push it:
- .. code-block:: bash
-
- $ git checkout -b BRANCH_NAME
- # Once you've made some changes
- $ git commit -am "commit message"
- $ git push -u origin BRANCH_NAME
- #Now if you come back to Github, your branch should exist
-
- * All pull requests need one review.
- * Tests will be run automatically when a pull request is created, and all tests need to pass before the pull request is merged.
-
-Docker
-^^^^^^
-
-Sometimes, it is easier to run tests using a Docker container (e.g., on compute clusters). Here are the commands to build and run the docker containers using the included Dockferfile. The container entrypoint is python, so you just need to specify the file name.
-
-To build the container and upload the container to Docker Hub.:
-
-.. code-block::
-
- docker build . -t marcosfelt/summit:latest
- docker push marcosfelt/summit:latest
-
-You can change the tag from ``latest`` to whatever is most appropriate (e.g., the branch name). I have found that this takes up a lot of space on disk, so I have been running the commands on our private servers.
-
-Then, to run a container, here is an example with the SnAr experiment code. The home directory of the container is called ``summit_user``\ , hence we mount the current working directory into that folder. We remove the container upon finishing using ``--rm`` and make it interactive using ``--it`` (remove this if you just want the container to run in the background). `Neptune.ai `_ is used for the experiments so the API token is passed in. Finally, I specify the image name and the tag and before referencing the python file I want to run.
-
-.. code-block::
-
- export token= #place your neptune token here
- sudo docker run -v `pwd`/:/summit_user --rm -it --env NEPTUNE_API_TOKEN=$token summit:snar_benchmark snar_experiment_2.py
-
-Singularity (for running Docker containers on the HPC):
-
-.. code-block::
-
- export NEPTUNE_API_TOKEN=
- singularity exec -B `pwd`/:/summit_user docker://marcosfelt/summit:snar_benchmark snar_experiment.py
-
-Releases
-^^^^^^^^
-
-Below is the old process for building a release. In the future, we will have this automated using Github actions.
-
-
-#. Install `s3pypi `_ and `dephell `_
-#. Install AWS credentials to upload pypi.rxns.io (Kobi is the one who controls this).
-#. Bump the version in pyproject.toml and then run:
- ``dephell deps convert --from=pyproject.toml --to=setup.py``
-#. Go into setup.py and delete the lines for extras_install_requires
-#. Upload the package to the private pypi repository:
- ``s3pypi --bucket pypi.rxns.io``
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 56857e24..a2c71a09 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -12,6 +12,7 @@
#
import os
import sys
+import subprocess
sys.path.insert(0, os.path.abspath("../.."))
@@ -56,6 +57,34 @@
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
+# -- Google Colab badge -------------------------------------------------
+COLAB = "https://colab.research.google.com/github"
+
+
+def get_current_git_branch():
+ branch = subprocess.check_output("git rev-parse --abbrev-ref HEAD", shell=True)
+ branch = str(branch).replace("\\n", "").replace("b", "").replace("'", "")
+ return branch
+
+
+def get_colab_base_link(
+ user="sustainable-processes", repo="summit", docs_path="docs/source"
+):
+ branch = get_current_git_branch()
+ return f"{COLAB}/{user}/{repo}/blob/{branch}/{docs_path}"
+
+
+badge_link = "https://colab.research.google.com/assets/colab-badge.svg"
+nbsphinx_prolog = r"""
+{%% set docname = env.doc2path(env.docname, base=None) %%}
+.. role:: raw-html(raw)
+ :format: html
+
+.. |colab_badge| replace:: :raw-html:``
+""" % (
+ get_colab_base_link(),
+ badge_link,
+)
# -- Options for NBShpinx ----------------------------------------------------
diff --git a/docs/source/experiments_benchmarks/new_benchmarks.ipynb b/docs/source/experiments_benchmarks/new_benchmarks.ipynb
index 2c4cf56e..c82ed8d6 100644
--- a/docs/source/experiments_benchmarks/new_benchmarks.ipynb
+++ b/docs/source/experiments_benchmarks/new_benchmarks.ipynb
@@ -15,10 +15,22 @@
"source": [
"## Google Colab\n",
"\n",
- "If you would like to follow along with this tutorial, you can open it in Google Colab using the button below.\n",
- "\n",
- "{{ badge }}\n",
- "\n",
+ "If you would like to follow along with this tutorial, you can open it in Google Colab using the button below."
+ ]
+ },
+ {
+ "cell_type": "raw",
+ "metadata": {
+ "raw_mimetype": "text/restructuredtext"
+ },
+ "source": [
+ "|colab_badge|"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
"You will need to run the following cell to make sure Summit and all its dependencies are installed. If prompted, restart the runtime."
]
},
@@ -28,7 +40,7 @@
"metadata": {},
"outputs": [],
"source": [
- "!pip install git+https://github.com/sustainable-processes/summit.git#egg=summit"
+ "!pip install summit"
]
},
{
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 4930fb82..9eb1127c 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -8,7 +8,7 @@ Summit is a set of tools for optimising chemical processes. We’ve started by t
What is Summit?
##################
-Currently, reaction optimisation in the fine chemicals industry is done by intuition or design of experiments, Both scale poorly with the complexity of the problem.
+Currently, reaction optimisation in the fine chemicals industry is done by intuition or design of experiments. Both scale poorly with the complexity of the problem.
Summit uses recent advances in machine learning to make the process of reaction optimisation faster. Essentially, it applies algorithms that learn which conditions (e.g., temperature, stoichiometry, etc.) are important to maximising one or more objectives (e.g., yield, enantiomeric excess). This is achieved through an iterative cycle.
diff --git a/docs/source/installation.ipynb b/docs/source/installation.ipynb
deleted file mode 100644
index 0800c7c4..00000000
--- a/docs/source/installation.ipynb
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Installation"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "The easiest way to install Summit is using pip:\n",
- "\n",
- "```bash\n",
- "pip install git+https://github.com/sustainable-processes/summit_private.git#egg=summit\n",
- "```"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3.7.3 64-bit ('summit-TfmmV07p-py3.7': venv)",
- "language": "python",
- "name": "python37364bitsummittfmmv07ppy37venv6fc212842bc44e839a51e6623a646abd"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.7.3"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 4
-}
diff --git a/docs/source/installation.rst b/docs/source/installation.rst
new file mode 100644
index 00000000..8866c5f7
--- /dev/null
+++ b/docs/source/installation.rst
@@ -0,0 +1,37 @@
+Installation
+============
+
+The easiest way to install Summit is using pip or a depedency manager that supports pip:
+
+
+.. code-block:: bash
+
+ pip install summit
+
+You could also use poetry or pipenv:
+
+.. code-block:: bash
+
+ poetry add summit
+
+.. code-block:: bash
+
+ pipenv install summit
+
+
+Summit has a set of extra dependencies for running the code in the experiments folder on Github_. You can install them as follows:
+
+.. code-block:: bash
+
+ # with pip:
+ pip install summit[experiments]
+
+ # with poetry
+ poetry add summit -E experiments
+
+ # with pipenv
+ pipenv install summit[experiments]
+
+
+.. _Github: https://github.com/sustainable-processes/summit/tree/master/experiments
+
diff --git a/docs/source/tutorial.ipynb b/docs/source/tutorial.ipynb
index 5d25e57a..fb5f56a1 100644
--- a/docs/source/tutorial.ipynb
+++ b/docs/source/tutorial.ipynb
@@ -25,10 +25,22 @@
"source": [
"## Google Colab\n",
"\n",
- "If you would like to follow along with this tutorial, you can open it in Google Colab using the button below.\n",
- "\n",
- "{{ badge }}\n",
- "\n",
+ "If you would like to follow along with this tutorial, you can open it in Google Colab using the button below."
+ ]
+ },
+ {
+ "cell_type": "raw",
+ "metadata": {
+ "raw_mimetype": "text/restructuredtext"
+ },
+ "source": [
+ "|colab_badge|"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
"You will need to run the following cell to make sure Summit and all its dependencies are installed. If prompted, restart the runtime."
]
},
@@ -38,7 +50,7 @@
"metadata": {},
"outputs": [],
"source": [
- "!pip install git+https://github.com/sustainable-processes/summit.git#egg=summit"
+ "!pip install summit"
]
},
{
@@ -859,7 +871,7 @@
"\n",
"As you've seen above, Summit has special functionality for running closed-loop experiments. The `Runner` class takes a strategy and experiment. It then get experiment suggestions from the strategy and runs the experiments.\n",
"\n",
- "If you have an automated experiment (e.g., a flow reactor), you can create your own Runner that sends requests to your experimental setup. As an example of how to do this, look at [`NeptuneRunner`](https://github.com/sustainable-processes/summit/blob/8d64f6f7705392242619d8ca393281af1a9d7e97/summit/run.py#L219), which we created to log the results of benchmarks to [Neptune.ai](https://neptune.ai/) during our own work. Primarily you need to override the `run` method.\n"
+ "If you have an automated experiment (e.g., a flow reactor), you can create your own Runner that sends requests to your experimental setup. As an example of how to do this, look at [NeptuneRunner](https://github.com/sustainable-processes/summit/blob/8d64f6f7705392242619d8ca393281af1a9d7e97/summit/run.py#L219), which we created to log the results of benchmarks to [Neptune.ai](https://neptune.ai/) during our own work. Primarily you need to override the `run` method.\n"
]
}
],
diff --git a/experiments/cn_benchmark/slurm_runner.py b/experiments/cn_benchmark/slurm_runner.py
index c9a7d289..88e91b07 100644
--- a/experiments/cn_benchmark/slurm_runner.py
+++ b/experiments/cn_benchmark/slurm_runner.py
@@ -1,24 +1,30 @@
+"""
+# scp is Copyright (C) 2008 James Bardin
+"""
from summit import NeptuneRunner, get_summit_config_path
from paramiko import SSHClient
-from scp import SCPClient
import uuid
import pathlib
import os
+import locale
+import re
+from socket import timeout as SocketTimeout
+import types
class SlurmRunner(NeptuneRunner):
- """ Run an experiment on a remote server (e.g., HPC) using SLURM.
-
+ """Run an experiment on a remote server (e.g., HPC) using SLURM.
+
You need to set the environmental variables SSH_USER and SSH_PASSWORD
- with the information to log into the remote server.
+ with the information to log into the remote server.
- This runs the code inside a docker container.
+ This runs the code inside a docker container.
It also inherits NeptuneRunner so it will report up to Neptune. This means
the NEPTUNE_API_TOKEN environmental variable needs to be set, which will be
transferred to the remote server.
Parameters
- ----------
+ ----------
strategy : `summit.strategies.Strategy`
The summit strategy to be used. Note this should be an object
(i.e., you need to call the strategy and then pass it). This allows
@@ -50,8 +56,8 @@ class SlurmRunner(NeptuneRunner):
The reference for the hypervolume calculation if it is a multiobjective problem.
Should be an array of length the number of objectives. Default is at the origin.
Examples
- --------
-
+ --------
+
"""
def __init__(self, **kwargs):
@@ -118,3 +124,570 @@ def run(self, **kwargs):
# Close the ssh connection
scp.close()
+
+
+# this is quote from the shlex module, added in py3.3
+_find_unsafe = re.compile(br"[^\w@%+=:,./~-]").search
+
+
+def _sh_quote(s):
+ """Return a shell-escaped version of the string `s`."""
+ if not s:
+ return b""
+ if _find_unsafe(s) is None:
+ return s
+
+ # use single quotes, and put single quotes into double quotes
+ # the string $'b is then quoted as '$'"'"'b'
+ return b"'" + s.replace(b"'", b"'\"'\"'") + b"'"
+
+
+# Unicode conversion functions; assume UTF-8
+
+
+def asbytes(s):
+ """Turns unicode into bytes, if needed.
+
+ Assumes UTF-8.
+ """
+ if isinstance(s, bytes):
+ return s
+ else:
+ return s.encode("utf-8")
+
+
+def asunicode(s):
+ """Turns bytes into unicode, if needed.
+
+ Uses UTF-8.
+ """
+ if isinstance(s, bytes):
+ return s.decode("utf-8", "replace")
+ else:
+ return s
+
+
+# os.path.sep is unicode on Python 3, no matter the platform
+bytes_sep = asbytes(os.path.sep)
+
+
+# Unicode conversion function for Windows
+# Used to convert local paths if the local machine is Windows
+
+
+def asunicode_win(s):
+ """Turns bytes into unicode, if needed."""
+ if isinstance(s, bytes):
+ return s.decode(locale.getpreferredencoding())
+ else:
+ return s
+
+
+class SCPClient(object):
+ """
+ An scp1 implementation, compatible with openssh scp.
+ Raises SCPException for all transport related errors. Local filesystem
+ and OS errors pass through.
+
+ Main public methods are .put and .get
+ The get method is controlled by the remote scp instance, and behaves
+ accordingly. This means that symlinks are resolved, and the transfer is
+ halted after too many levels of symlinks are detected.
+ The put method uses os.walk for recursion, and sends files accordingly.
+ Since scp doesn't support symlinks, we send file symlinks as the file
+ (matching scp behaviour), but we make no attempt at symlinked directories.
+ """
+
+ def __init__(
+ self,
+ transport,
+ buff_size=16384,
+ socket_timeout=10.0,
+ progress=None,
+ progress4=None,
+ sanitize=_sh_quote,
+ ):
+ """
+ Create an scp1 client.
+
+ @param transport: an existing paramiko L{Transport}
+ @type transport: L{Transport}
+ @param buff_size: size of the scp send buffer.
+ @type buff_size: int
+ @param socket_timeout: channel socket timeout in seconds
+ @type socket_timeout: float
+ @param progress: callback - called with (filename, size, sent) during
+ transfers
+ @param progress4: callback - called with (filename, size, sent, peername)
+ during transfers. peername is a tuple contains (IP, PORT)
+ @param sanitize: function - called with filename, should return
+ safe or escaped string. Uses _sh_quote by default.
+ @type progress: function(string, int, int, tuple)
+ """
+ self.transport = transport
+ self.buff_size = buff_size
+ self.socket_timeout = socket_timeout
+ self.channel = None
+ self.preserve_times = False
+ if progress is not None and progress4 is not None:
+ raise TypeError("You may only set one of progress, progress4")
+ elif progress4 is not None:
+ self._progress = progress4
+ elif progress is not None:
+ self._progress = lambda *a: progress(*a[:3])
+ else:
+ self._progress = None
+ self._recv_dir = b""
+ self._depth = 0
+ self._rename = False
+ self._utime = None
+ self.sanitize = sanitize
+ self._dirtimes = {}
+ self.peername = self.transport.getpeername()
+
+ def __enter__(self):
+ self.channel = self._open()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.close()
+
+ def put(self, files, remote_path=b".", recursive=False, preserve_times=False):
+ """
+ Transfer files and directories to remote host.
+
+ @param files: A single path, or a list of paths to be transferred.
+ recursive must be True to transfer directories.
+ @type files: string OR list of strings
+ @param remote_path: path in which to receive the files on the remote
+ host. defaults to '.'
+ @type remote_path: str
+ @param recursive: transfer files and directories recursively
+ @type recursive: bool
+ @param preserve_times: preserve mtime and atime of transferred files
+ and directories.
+ @type preserve_times: bool
+ """
+ self.preserve_times = preserve_times
+ self.channel = self._open()
+ self._pushed = 0
+ self.channel.settimeout(self.socket_timeout)
+ scp_command = (b"scp -t ", b"scp -r -t ")[recursive]
+ self.channel.exec_command(scp_command + self.sanitize(asbytes(remote_path)))
+ self._recv_confirm()
+
+ if not isinstance(files, (list, tuple)):
+ files = [files]
+
+ if recursive:
+ self._send_recursive(files)
+ else:
+ self._send_files(files)
+
+ self.close()
+
+ def putfo(self, fl, remote_path, mode="0644", size=None):
+ """
+ Transfer file-like object to remote host.
+
+ @param fl: opened file or file-like object to copy
+ @type fl: file-like object
+ @param remote_path: full destination path
+ @type remote_path: str
+ @param mode: permissions (posix-style) for the uploaded file
+ @type mode: str
+ @param size: size of the file in bytes. If ``None``, the size will be
+ computed using `seek()` and `tell()`.
+ """
+ if size is None:
+ pos = fl.tell()
+ fl.seek(0, os.SEEK_END) # Seek to end
+ size = fl.tell() - pos
+ fl.seek(pos, os.SEEK_SET) # Seek back
+
+ self.channel = self._open()
+ self.channel.settimeout(self.socket_timeout)
+ self.channel.exec_command(b"scp -t " + self.sanitize(asbytes(remote_path)))
+ self._recv_confirm()
+ self._send_file(fl, remote_path, mode, size=size)
+ self.close()
+
+ def get(self, remote_path, local_path="", recursive=False, preserve_times=False):
+ """
+ Transfer files and directories from remote host to localhost.
+
+ @param remote_path: path to retrieve from remote host. since this is
+ evaluated by scp on the remote host, shell wildcards and
+ environment variables may be used.
+ @type remote_path: str
+ @param local_path: path in which to receive files locally
+ @type local_path: str
+ @param recursive: transfer files and directories recursively
+ @type recursive: bool
+ @param preserve_times: preserve mtime and atime of transferred files
+ and directories.
+ @type preserve_times: bool
+ """
+ if not isinstance(remote_path, (list, tuple)):
+ remote_path = [remote_path]
+ remote_path = [self.sanitize(asbytes(r)) for r in remote_path]
+ self._recv_dir = local_path or os.getcwd()
+ self._depth = 0
+ self._rename = len(remote_path) == 1 and not os.path.isdir(
+ os.path.abspath(local_path)
+ )
+ if len(remote_path) > 1:
+ if not os.path.exists(self._recv_dir):
+ raise SCPException(
+ "Local path '%s' does not exist" % asunicode(self._recv_dir)
+ )
+ elif not os.path.isdir(self._recv_dir):
+ raise SCPException(
+ "Local path '%s' is not a directory" % asunicode(self._recv_dir)
+ )
+ rcsv = (b"", b" -r")[recursive]
+ prsv = (b"", b" -p")[preserve_times]
+ self.channel = self._open()
+ self._pushed = 0
+ self.channel.settimeout(self.socket_timeout)
+ self.channel.exec_command(
+ b"scp" + rcsv + prsv + b" -f " + b" ".join(remote_path)
+ )
+ self._recv_all()
+ self.close()
+
+ def _open(self):
+ """open a scp channel"""
+ if self.channel is None or self.channel.closed:
+ self.channel = self.transport.open_session()
+
+ return self.channel
+
+ def close(self):
+ """close scp channel"""
+ if self.channel is not None:
+ self.channel.close()
+ self.channel = None
+
+ def _read_stats(self, name):
+ """return just the file stats needed for scp"""
+ if os.name == "nt":
+ name = asunicode(name)
+ stats = os.stat(name)
+ mode = oct(stats.st_mode)[-4:]
+ size = stats.st_size
+ atime = int(stats.st_atime)
+ mtime = int(stats.st_mtime)
+ return (mode, size, mtime, atime)
+
+ def _send_files(self, files):
+ for name in files:
+ (mode, size, mtime, atime) = self._read_stats(name)
+ if self.preserve_times:
+ self._send_time(mtime, atime)
+ fl = open(name, "rb")
+ self._send_file(fl, name, mode, size)
+ fl.close()
+
+ def _send_file(self, fl, name, mode, size):
+ basename = asbytes(os.path.basename(name))
+ # The protocol can't handle \n in the filename.
+ # Quote them as the control sequence \^J for now,
+ # which is how openssh handles it.
+ self.channel.sendall(
+ ("C%s %d " % (mode, size)).encode("ascii")
+ + basename.replace(b"\n", b"\\^J")
+ + b"\n"
+ )
+ self._recv_confirm()
+ file_pos = 0
+ if self._progress:
+ if size == 0:
+ # avoid divide-by-zero
+ self._progress(basename, 1, 1, self.peername)
+ else:
+ self._progress(basename, size, 0, self.peername)
+ buff_size = self.buff_size
+ chan = self.channel
+ while file_pos < size:
+ chan.sendall(fl.read(buff_size))
+ file_pos = fl.tell()
+ if self._progress:
+ self._progress(basename, size, file_pos, self.peername)
+ chan.sendall("\x00")
+ self._recv_confirm()
+
+ def _chdir(self, from_dir, to_dir):
+ # Pop until we're one level up from our next push.
+ # Push *once* into to_dir.
+ # This is dependent on the depth-first traversal from os.walk
+
+ # add path.sep to each when checking the prefix, so we can use
+ # path.dirname after
+ common = os.path.commonprefix([from_dir + bytes_sep, to_dir + bytes_sep])
+ # now take the dirname, since commonprefix is character based,
+ # and we either have a separator, or a partial name
+ common = os.path.dirname(common)
+ cur_dir = from_dir.rstrip(bytes_sep)
+ while cur_dir != common:
+ cur_dir = os.path.split(cur_dir)[0]
+ self._send_popd()
+ # now we're in our common base directory, so on
+ self._send_pushd(to_dir)
+
+ def _send_recursive(self, files):
+ for base in files:
+ if not os.path.isdir(base):
+ # filename mixed into the bunch
+ self._send_files([base])
+ continue
+ last_dir = asbytes(base)
+ for root, dirs, fls in os.walk(base):
+ self._chdir(last_dir, asbytes(root))
+ self._send_files([os.path.join(root, f) for f in fls])
+ last_dir = asbytes(root)
+ # back out of the directory
+ while self._pushed > 0:
+ self._send_popd()
+
+ def _send_pushd(self, directory):
+ (mode, size, mtime, atime) = self._read_stats(directory)
+ basename = asbytes(os.path.basename(directory))
+ if self.preserve_times:
+ self._send_time(mtime, atime)
+ self.channel.sendall(
+ ("D%s 0 " % mode).encode("ascii") + basename.replace(b"\n", b"\\^J") + b"\n"
+ )
+ self._recv_confirm()
+ self._pushed += 1
+
+ def _send_popd(self):
+ self.channel.sendall("E\n")
+ self._recv_confirm()
+ self._pushed -= 1
+
+ def _send_time(self, mtime, atime):
+ self.channel.sendall(("T%d 0 %d 0\n" % (mtime, atime)).encode("ascii"))
+ self._recv_confirm()
+
+ def _recv_confirm(self):
+ # read scp response
+ msg = b""
+ try:
+ msg = self.channel.recv(512)
+ except SocketTimeout:
+ raise SCPException("Timeout waiting for scp response")
+ # slice off the first byte, so this compare will work in py2 and py3
+ if msg and msg[0:1] == b"\x00":
+ return
+ elif msg and msg[0:1] == b"\x01":
+ raise SCPException(asunicode(msg[1:]))
+ elif self.channel.recv_stderr_ready():
+ msg = self.channel.recv_stderr(512)
+ raise SCPException(asunicode(msg))
+ elif not msg:
+ raise SCPException("No response from server")
+ else:
+ raise SCPException("Invalid response from server", msg)
+
+ def _recv_all(self):
+ # loop over scp commands, and receive as necessary
+ command = {
+ b"C": self._recv_file,
+ b"T": self._set_time,
+ b"D": self._recv_pushd,
+ b"E": self._recv_popd,
+ }
+ while not self.channel.closed:
+ # wait for command as long as we're open
+ self.channel.sendall("\x00")
+ msg = self.channel.recv(1024)
+ if not msg: # chan closed while recving
+ break
+ assert msg[-1:] == b"\n"
+ msg = msg[:-1]
+ code = msg[0:1]
+ if code not in command:
+ raise SCPException(asunicode(msg[1:]))
+ command[code](msg[1:])
+ # directory times can't be set until we're done writing files
+ self._set_dirtimes()
+
+ def _set_time(self, cmd):
+ try:
+ times = cmd.split(b" ")
+ mtime = int(times[0])
+ atime = int(times[2]) or mtime
+ except:
+ self.channel.send(b"\x01")
+ raise SCPException("Bad time format")
+ # save for later
+ self._utime = (atime, mtime)
+
+ def _recv_file(self, cmd):
+ chan = self.channel
+ parts = cmd.strip().split(b" ", 2)
+
+ try:
+ mode = int(parts[0], 8)
+ size = int(parts[1])
+ if self._rename:
+ path = self._recv_dir
+ self._rename = False
+ elif os.name == "nt":
+ name = parts[2].decode("utf-8")
+ assert not os.path.isabs(name)
+ path = os.path.join(asunicode_win(self._recv_dir), name)
+ else:
+ name = parts[2]
+ assert not os.path.isabs(name)
+ path = os.path.join(asbytes(self._recv_dir), name)
+ except:
+ chan.send("\x01")
+ chan.close()
+ raise SCPException("Bad file format")
+
+ try:
+ file_hdl = open(path, "wb")
+ except IOError as e:
+ chan.send(b"\x01" + str(e).encode("utf-8"))
+ chan.close()
+ raise
+
+ if self._progress:
+ if size == 0:
+ # avoid divide-by-zero
+ self._progress(path, 1, 1, self.peername)
+ else:
+ self._progress(path, size, 0, self.peername)
+ buff_size = self.buff_size
+ pos = 0
+ chan.send(b"\x00")
+ try:
+ while pos < size:
+ # we have to make sure we don't read the final byte
+ if size - pos <= buff_size:
+ buff_size = size - pos
+ data = chan.recv(buff_size)
+ if not data:
+ raise SCPException("Underlying channel was closed")
+ file_hdl.write(data)
+ pos = file_hdl.tell()
+ if self._progress:
+ self._progress(path, size, pos, self.peername)
+ msg = chan.recv(512)
+ if msg and msg[0:1] != b"\x00":
+ raise SCPException(asunicode(msg[1:]))
+ except SocketTimeout:
+ chan.close()
+ raise SCPException("Error receiving, socket.timeout")
+
+ file_hdl.truncate()
+ try:
+ os.utime(path, self._utime)
+ self._utime = None
+ os.chmod(path, mode)
+ # should we notify the other end?
+ finally:
+ file_hdl.close()
+ # '\x00' confirmation sent in _recv_all
+
+ def _recv_pushd(self, cmd):
+ parts = cmd.split(b" ", 2)
+ try:
+ mode = int(parts[0], 8)
+ if self._rename:
+ path = self._recv_dir
+ self._rename = False
+ elif os.name == "nt":
+ name = parts[2].decode("utf-8")
+ assert not os.path.isabs(name)
+ path = os.path.join(asunicode_win(self._recv_dir), name)
+ self._depth += 1
+ else:
+ name = parts[2]
+ assert not os.path.isabs(name)
+ path = os.path.join(asbytes(self._recv_dir), name)
+ self._depth += 1
+ except:
+ self.channel.send(b"\x01")
+ raise SCPException("Bad directory format")
+ try:
+ if not os.path.exists(path):
+ os.mkdir(path, mode)
+ elif os.path.isdir(path):
+ os.chmod(path, mode)
+ else:
+ raise SCPException("%s: Not a directory" % path)
+ self._dirtimes[path] = self._utime
+ self._utime = None
+ self._recv_dir = path
+ except (OSError, SCPException) as e:
+ self.channel.send(b"\x01" + asbytes(str(e)))
+ raise
+
+ def _recv_popd(self, *cmd):
+ if self._depth > 0:
+ self._depth -= 1
+ self._recv_dir = os.path.split(self._recv_dir)[0]
+
+ def _set_dirtimes(self):
+ try:
+ for d in self._dirtimes:
+ os.utime(d, self._dirtimes[d])
+ finally:
+ self._dirtimes = {}
+
+
+class SCPException(Exception):
+ """SCP exception class"""
+
+ pass
+
+
+def put(transport, files, remote_path=b".", recursive=False, preserve_times=False):
+ """
+ Transfer files and directories to remote host.
+
+ This is a convenience function that creates a SCPClient from the given
+ transport and closes it at the end, useful for one-off transfers.
+
+ @param files: A single path, or a list of paths to be transferred.
+ recursive must be True to transfer directories.
+ @type files: string OR list of strings
+ @param remote_path: path in which to receive the files on the remote host.
+ defaults to '.'
+ @type remote_path: str
+ @param recursive: transfer files and directories recursively
+ @type recursive: bool
+ @param preserve_times: preserve mtime and atime of transferred files and
+ directories.
+ @type preserve_times: bool
+ """
+ with SCPClient(transport) as client:
+ client.put(files, remote_path, recursive, preserve_times)
+
+
+def get(transport, remote_path, local_path="", recursive=False, preserve_times=False):
+ """
+ Transfer files and directories from remote host to localhost.
+
+ This is a convenience function that creates a SCPClient from the given
+ transport and closes it at the end, useful for one-off transfers.
+
+ @param transport: an paramiko L{Transport}
+ @type transport: L{Transport}
+ @param remote_path: path to retrieve from remote host. since this is
+ evaluated by scp on the remote host, shell wildcards and environment
+ variables may be used.
+ @type remote_path: str
+ @param local_path: path in which to receive files locally
+ @type local_path: str
+ @param recursive: transfer files and directories recursively
+ @type recursive: bool
+ @param preserve_times: preserve mtime and atime of transferred files
+ and directories.
+ @type preserve_times: bool
+ """
+ with SCPClient(transport) as client:
+ client.get(remote_path, local_path, recursive, preserve_times)
\ No newline at end of file
diff --git a/experiments/cn_benchmark/test_cn_experiment_MO.py b/experiments/cn_benchmark/test_cn_experiment_MO.py
index 7c69f0d5..2d7d8127 100644
--- a/experiments/cn_benchmark/test_cn_experiment_MO.py
+++ b/experiments/cn_benchmark/test_cn_experiment_MO.py
@@ -50,9 +50,9 @@
# Run experiments
@pytest.mark.parametrize("strategy", [Random])
-def test_baselines(strategy):
+def test_baselines(strategy, num_repeats=1):
"""Test Multiobjective CN Benchmark with baseline strategies (random, full factorial)"""
- for i in range(NUM_REPEATS):
+ for i in range(num_repeats):
experiment.reset()
s = strategy(experiment.domain, transform_descriptors=True)
@@ -181,4 +181,3 @@ def test_cn_experiment_no_descriptors(strategy, transform):
hypervolume_ref=HYPERVOLUME_REF,
)
r.run(save_at_end=True)
-
diff --git a/experiments/snar_benchmark/slurm_runner.py b/experiments/snar_benchmark/slurm_runner.py
index b9b34fdd..de58c512 100644
--- a/experiments/snar_benchmark/slurm_runner.py
+++ b/experiments/snar_benchmark/slurm_runner.py
@@ -1,24 +1,30 @@
+"""
+# scp is Copyright (C) 2008 James Bardin
+"""
from summit import NeptuneRunner, get_summit_config_path
from paramiko import SSHClient
-from scp import SCPClient
import uuid
import pathlib
import os
-import time
+import locale
+import re
+from socket import timeout as SocketTimeout
+import types
+
class SlurmRunner(NeptuneRunner):
- """ Run an experiment on a remote server (e.g., HPC) using SLURM.
-
+ """Run an experiment on a remote server (e.g., HPC) using SLURM.
+
You need to set the environmental variables SSH_USER and SSH_PASSWORD
- with the information to log into the remote server.
+ with the information to log into the remote server.
- This runs the code inside a docker container.
+ This runs the code inside a docker container.
It also inherits NeptuneRunner so it will report up to Neptune. This means
the NEPTUNE_API_TOKEN environmental variable needs to be set, which will be
transferred to the remote server.
Parameters
- ----------
+ ----------
strategy : `summit.strategies.Strategy`
The summit strategy to be used. Note this should be an object
(i.e., you need to call the strategy and then pass it). This allows
@@ -50,42 +56,44 @@ class SlurmRunner(NeptuneRunner):
The reference for the hypervolume calculation if it is a multiobjective problem.
Should be an array of length the number of objectives. Default is at the origin.
Examples
- --------
-
+ --------
+
"""
+
def __init__(self, **kwargs):
super().__init__(**kwargs)
- self.docker_container = kwargs.get('docker_container',
- "marcosfelt/summit:snar_benchmark")
- self.hostname = kwargs.get('hostname',"login-cpu.hpc.cam.ac.uk")
+ self.docker_container = kwargs.get(
+ "docker_container", "marcosfelt/summit:snar_benchmark"
+ )
+ self.hostname = kwargs.get("hostname", "login-cpu.hpc.cam.ac.uk")
def run(self, **kwargs):
# Set up file structure
base = pathlib.Path(".snar_benchmark")
uuid_val = str(uuid.uuid4())
- save_file_dir = base / uuid_val
+ save_file_dir = base / uuid_val
os.makedirs(save_file_dir, exist_ok=True)
# Save json
json_file_path = save_file_dir / "slurm_runner.json"
self.save(json_file_path)
- # Create python file
+ # Create python file
python_file_path = save_file_dir / "run.py"
- with open(python_file_path, 'w') as f:
+ with open(python_file_path, "w") as f:
f.write("from summit import NeptuneRunner\n")
f.write(f"""r = NeptuneRunner.load("slurm_runner.json")\n""")
f.write("r.run(save_at_end=True)")
-
+
# SSH into remote server
- username = os.getenv('SSH_USER')
+ username = os.getenv("SSH_USER")
if username is None:
raise ValueError("SSH_USER must be set")
- password = os.getenv('SSH_PASSWORD')
+ password = os.getenv("SSH_PASSWORD")
if password is None:
raise ValueError("SSH_PASSWORD must be set")
- neptune_api_token = os.getenv('NEPTUNE_API_TOKEN')
+ neptune_api_token = os.getenv("NEPTUNE_API_TOKEN")
if neptune_api_token is None:
raise ValueError("NEPTUNE_API_TOKEN must be set")
ssh = SSHClient()
@@ -99,13 +107,588 @@ def run(self, **kwargs):
# Copy files onto remote server
scp = SCPClient(ssh.get_transport())
- scp.put([str(python_file_path), str(json_file_path), "slurm_summit_snar_experiment.sh"],
- remote_path=remote_path)
+ scp.put(
+ [
+ str(python_file_path),
+ str(json_file_path),
+ "slurm_summit_snar_experiment.sh",
+ ],
+ remote_path=remote_path,
+ )
# Set the Neptune api token as an environmental variable in the remote environment
# Singularity automatically passes environmental variables to the Docker containers
- # Run the experiment
- ssh.exec_command(f"export NEPTUNE_API_TOKEN={neptune_api_token} && cd {remote_path} && sbatch slurm_summit_snar_experiment.sh {self.docker_container} run.py")
+ # Run the experiment
+ ssh.exec_command(
+ f"export NEPTUNE_API_TOKEN={neptune_api_token} && cd {remote_path} && sbatch slurm_summit_snar_experiment.sh {self.docker_container} run.py"
+ )
# Close the ssh connection
scp.close()
+
+
+# this is quote from the shlex module, added in py3.3
+_find_unsafe = re.compile(br"[^\w@%+=:,./~-]").search
+
+
+def _sh_quote(s):
+ """Return a shell-escaped version of the string `s`."""
+ if not s:
+ return b""
+ if _find_unsafe(s) is None:
+ return s
+
+ # use single quotes, and put single quotes into double quotes
+ # the string $'b is then quoted as '$'"'"'b'
+ return b"'" + s.replace(b"'", b"'\"'\"'") + b"'"
+
+
+# Unicode conversion functions; assume UTF-8
+
+
+def asbytes(s):
+ """Turns unicode into bytes, if needed.
+
+ Assumes UTF-8.
+ """
+ if isinstance(s, bytes):
+ return s
+ else:
+ return s.encode("utf-8")
+
+
+def asunicode(s):
+ """Turns bytes into unicode, if needed.
+
+ Uses UTF-8.
+ """
+ if isinstance(s, bytes):
+ return s.decode("utf-8", "replace")
+ else:
+ return s
+
+
+# os.path.sep is unicode on Python 3, no matter the platform
+bytes_sep = asbytes(os.path.sep)
+
+
+# Unicode conversion function for Windows
+# Used to convert local paths if the local machine is Windows
+
+
+def asunicode_win(s):
+ """Turns bytes into unicode, if needed."""
+ if isinstance(s, bytes):
+ return s.decode(locale.getpreferredencoding())
+ else:
+ return s
+
+
+class SCPClient(object):
+ """
+ An scp1 implementation, compatible with openssh scp.
+ Raises SCPException for all transport related errors. Local filesystem
+ and OS errors pass through.
+
+ Main public methods are .put and .get
+ The get method is controlled by the remote scp instance, and behaves
+ accordingly. This means that symlinks are resolved, and the transfer is
+ halted after too many levels of symlinks are detected.
+ The put method uses os.walk for recursion, and sends files accordingly.
+ Since scp doesn't support symlinks, we send file symlinks as the file
+ (matching scp behaviour), but we make no attempt at symlinked directories.
+ """
+
+ def __init__(
+ self,
+ transport,
+ buff_size=16384,
+ socket_timeout=10.0,
+ progress=None,
+ progress4=None,
+ sanitize=_sh_quote,
+ ):
+ """
+ Create an scp1 client.
+
+ @param transport: an existing paramiko L{Transport}
+ @type transport: L{Transport}
+ @param buff_size: size of the scp send buffer.
+ @type buff_size: int
+ @param socket_timeout: channel socket timeout in seconds
+ @type socket_timeout: float
+ @param progress: callback - called with (filename, size, sent) during
+ transfers
+ @param progress4: callback - called with (filename, size, sent, peername)
+ during transfers. peername is a tuple contains (IP, PORT)
+ @param sanitize: function - called with filename, should return
+ safe or escaped string. Uses _sh_quote by default.
+ @type progress: function(string, int, int, tuple)
+ """
+ self.transport = transport
+ self.buff_size = buff_size
+ self.socket_timeout = socket_timeout
+ self.channel = None
+ self.preserve_times = False
+ if progress is not None and progress4 is not None:
+ raise TypeError("You may only set one of progress, progress4")
+ elif progress4 is not None:
+ self._progress = progress4
+ elif progress is not None:
+ self._progress = lambda *a: progress(*a[:3])
+ else:
+ self._progress = None
+ self._recv_dir = b""
+ self._depth = 0
+ self._rename = False
+ self._utime = None
+ self.sanitize = sanitize
+ self._dirtimes = {}
+ self.peername = self.transport.getpeername()
+
+ def __enter__(self):
+ self.channel = self._open()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.close()
+
+ def put(self, files, remote_path=b".", recursive=False, preserve_times=False):
+ """
+ Transfer files and directories to remote host.
+
+ @param files: A single path, or a list of paths to be transferred.
+ recursive must be True to transfer directories.
+ @type files: string OR list of strings
+ @param remote_path: path in which to receive the files on the remote
+ host. defaults to '.'
+ @type remote_path: str
+ @param recursive: transfer files and directories recursively
+ @type recursive: bool
+ @param preserve_times: preserve mtime and atime of transferred files
+ and directories.
+ @type preserve_times: bool
+ """
+ self.preserve_times = preserve_times
+ self.channel = self._open()
+ self._pushed = 0
+ self.channel.settimeout(self.socket_timeout)
+ scp_command = (b"scp -t ", b"scp -r -t ")[recursive]
+ self.channel.exec_command(scp_command + self.sanitize(asbytes(remote_path)))
+ self._recv_confirm()
+
+ if not isinstance(files, (list, tuple)):
+ files = [files]
+
+ if recursive:
+ self._send_recursive(files)
+ else:
+ self._send_files(files)
+
+ self.close()
+
+ def putfo(self, fl, remote_path, mode="0644", size=None):
+ """
+ Transfer file-like object to remote host.
+
+ @param fl: opened file or file-like object to copy
+ @type fl: file-like object
+ @param remote_path: full destination path
+ @type remote_path: str
+ @param mode: permissions (posix-style) for the uploaded file
+ @type mode: str
+ @param size: size of the file in bytes. If ``None``, the size will be
+ computed using `seek()` and `tell()`.
+ """
+ if size is None:
+ pos = fl.tell()
+ fl.seek(0, os.SEEK_END) # Seek to end
+ size = fl.tell() - pos
+ fl.seek(pos, os.SEEK_SET) # Seek back
+
+ self.channel = self._open()
+ self.channel.settimeout(self.socket_timeout)
+ self.channel.exec_command(b"scp -t " + self.sanitize(asbytes(remote_path)))
+ self._recv_confirm()
+ self._send_file(fl, remote_path, mode, size=size)
+ self.close()
+
+ def get(self, remote_path, local_path="", recursive=False, preserve_times=False):
+ """
+ Transfer files and directories from remote host to localhost.
+
+ @param remote_path: path to retrieve from remote host. since this is
+ evaluated by scp on the remote host, shell wildcards and
+ environment variables may be used.
+ @type remote_path: str
+ @param local_path: path in which to receive files locally
+ @type local_path: str
+ @param recursive: transfer files and directories recursively
+ @type recursive: bool
+ @param preserve_times: preserve mtime and atime of transferred files
+ and directories.
+ @type preserve_times: bool
+ """
+ if not isinstance(remote_path, (list, tuple)):
+ remote_path = [remote_path]
+ remote_path = [self.sanitize(asbytes(r)) for r in remote_path]
+ self._recv_dir = local_path or os.getcwd()
+ self._depth = 0
+ self._rename = len(remote_path) == 1 and not os.path.isdir(
+ os.path.abspath(local_path)
+ )
+ if len(remote_path) > 1:
+ if not os.path.exists(self._recv_dir):
+ raise SCPException(
+ "Local path '%s' does not exist" % asunicode(self._recv_dir)
+ )
+ elif not os.path.isdir(self._recv_dir):
+ raise SCPException(
+ "Local path '%s' is not a directory" % asunicode(self._recv_dir)
+ )
+ rcsv = (b"", b" -r")[recursive]
+ prsv = (b"", b" -p")[preserve_times]
+ self.channel = self._open()
+ self._pushed = 0
+ self.channel.settimeout(self.socket_timeout)
+ self.channel.exec_command(
+ b"scp" + rcsv + prsv + b" -f " + b" ".join(remote_path)
+ )
+ self._recv_all()
+ self.close()
+
+ def _open(self):
+ """open a scp channel"""
+ if self.channel is None or self.channel.closed:
+ self.channel = self.transport.open_session()
+
+ return self.channel
+
+ def close(self):
+ """close scp channel"""
+ if self.channel is not None:
+ self.channel.close()
+ self.channel = None
+
+ def _read_stats(self, name):
+ """return just the file stats needed for scp"""
+ if os.name == "nt":
+ name = asunicode(name)
+ stats = os.stat(name)
+ mode = oct(stats.st_mode)[-4:]
+ size = stats.st_size
+ atime = int(stats.st_atime)
+ mtime = int(stats.st_mtime)
+ return (mode, size, mtime, atime)
+
+ def _send_files(self, files):
+ for name in files:
+ (mode, size, mtime, atime) = self._read_stats(name)
+ if self.preserve_times:
+ self._send_time(mtime, atime)
+ fl = open(name, "rb")
+ self._send_file(fl, name, mode, size)
+ fl.close()
+
+ def _send_file(self, fl, name, mode, size):
+ basename = asbytes(os.path.basename(name))
+ # The protocol can't handle \n in the filename.
+ # Quote them as the control sequence \^J for now,
+ # which is how openssh handles it.
+ self.channel.sendall(
+ ("C%s %d " % (mode, size)).encode("ascii")
+ + basename.replace(b"\n", b"\\^J")
+ + b"\n"
+ )
+ self._recv_confirm()
+ file_pos = 0
+ if self._progress:
+ if size == 0:
+ # avoid divide-by-zero
+ self._progress(basename, 1, 1, self.peername)
+ else:
+ self._progress(basename, size, 0, self.peername)
+ buff_size = self.buff_size
+ chan = self.channel
+ while file_pos < size:
+ chan.sendall(fl.read(buff_size))
+ file_pos = fl.tell()
+ if self._progress:
+ self._progress(basename, size, file_pos, self.peername)
+ chan.sendall("\x00")
+ self._recv_confirm()
+
+ def _chdir(self, from_dir, to_dir):
+ # Pop until we're one level up from our next push.
+ # Push *once* into to_dir.
+ # This is dependent on the depth-first traversal from os.walk
+
+ # add path.sep to each when checking the prefix, so we can use
+ # path.dirname after
+ common = os.path.commonprefix([from_dir + bytes_sep, to_dir + bytes_sep])
+ # now take the dirname, since commonprefix is character based,
+ # and we either have a separator, or a partial name
+ common = os.path.dirname(common)
+ cur_dir = from_dir.rstrip(bytes_sep)
+ while cur_dir != common:
+ cur_dir = os.path.split(cur_dir)[0]
+ self._send_popd()
+ # now we're in our common base directory, so on
+ self._send_pushd(to_dir)
+
+ def _send_recursive(self, files):
+ for base in files:
+ if not os.path.isdir(base):
+ # filename mixed into the bunch
+ self._send_files([base])
+ continue
+ last_dir = asbytes(base)
+ for root, dirs, fls in os.walk(base):
+ self._chdir(last_dir, asbytes(root))
+ self._send_files([os.path.join(root, f) for f in fls])
+ last_dir = asbytes(root)
+ # back out of the directory
+ while self._pushed > 0:
+ self._send_popd()
+
+ def _send_pushd(self, directory):
+ (mode, size, mtime, atime) = self._read_stats(directory)
+ basename = asbytes(os.path.basename(directory))
+ if self.preserve_times:
+ self._send_time(mtime, atime)
+ self.channel.sendall(
+ ("D%s 0 " % mode).encode("ascii") + basename.replace(b"\n", b"\\^J") + b"\n"
+ )
+ self._recv_confirm()
+ self._pushed += 1
+
+ def _send_popd(self):
+ self.channel.sendall("E\n")
+ self._recv_confirm()
+ self._pushed -= 1
+
+ def _send_time(self, mtime, atime):
+ self.channel.sendall(("T%d 0 %d 0\n" % (mtime, atime)).encode("ascii"))
+ self._recv_confirm()
+
+ def _recv_confirm(self):
+ # read scp response
+ msg = b""
+ try:
+ msg = self.channel.recv(512)
+ except SocketTimeout:
+ raise SCPException("Timeout waiting for scp response")
+ # slice off the first byte, so this compare will work in py2 and py3
+ if msg and msg[0:1] == b"\x00":
+ return
+ elif msg and msg[0:1] == b"\x01":
+ raise SCPException(asunicode(msg[1:]))
+ elif self.channel.recv_stderr_ready():
+ msg = self.channel.recv_stderr(512)
+ raise SCPException(asunicode(msg))
+ elif not msg:
+ raise SCPException("No response from server")
+ else:
+ raise SCPException("Invalid response from server", msg)
+
+ def _recv_all(self):
+ # loop over scp commands, and receive as necessary
+ command = {
+ b"C": self._recv_file,
+ b"T": self._set_time,
+ b"D": self._recv_pushd,
+ b"E": self._recv_popd,
+ }
+ while not self.channel.closed:
+ # wait for command as long as we're open
+ self.channel.sendall("\x00")
+ msg = self.channel.recv(1024)
+ if not msg: # chan closed while recving
+ break
+ assert msg[-1:] == b"\n"
+ msg = msg[:-1]
+ code = msg[0:1]
+ if code not in command:
+ raise SCPException(asunicode(msg[1:]))
+ command[code](msg[1:])
+ # directory times can't be set until we're done writing files
+ self._set_dirtimes()
+
+ def _set_time(self, cmd):
+ try:
+ times = cmd.split(b" ")
+ mtime = int(times[0])
+ atime = int(times[2]) or mtime
+ except:
+ self.channel.send(b"\x01")
+ raise SCPException("Bad time format")
+ # save for later
+ self._utime = (atime, mtime)
+
+ def _recv_file(self, cmd):
+ chan = self.channel
+ parts = cmd.strip().split(b" ", 2)
+
+ try:
+ mode = int(parts[0], 8)
+ size = int(parts[1])
+ if self._rename:
+ path = self._recv_dir
+ self._rename = False
+ elif os.name == "nt":
+ name = parts[2].decode("utf-8")
+ assert not os.path.isabs(name)
+ path = os.path.join(asunicode_win(self._recv_dir), name)
+ else:
+ name = parts[2]
+ assert not os.path.isabs(name)
+ path = os.path.join(asbytes(self._recv_dir), name)
+ except:
+ chan.send("\x01")
+ chan.close()
+ raise SCPException("Bad file format")
+
+ try:
+ file_hdl = open(path, "wb")
+ except IOError as e:
+ chan.send(b"\x01" + str(e).encode("utf-8"))
+ chan.close()
+ raise
+
+ if self._progress:
+ if size == 0:
+ # avoid divide-by-zero
+ self._progress(path, 1, 1, self.peername)
+ else:
+ self._progress(path, size, 0, self.peername)
+ buff_size = self.buff_size
+ pos = 0
+ chan.send(b"\x00")
+ try:
+ while pos < size:
+ # we have to make sure we don't read the final byte
+ if size - pos <= buff_size:
+ buff_size = size - pos
+ data = chan.recv(buff_size)
+ if not data:
+ raise SCPException("Underlying channel was closed")
+ file_hdl.write(data)
+ pos = file_hdl.tell()
+ if self._progress:
+ self._progress(path, size, pos, self.peername)
+ msg = chan.recv(512)
+ if msg and msg[0:1] != b"\x00":
+ raise SCPException(asunicode(msg[1:]))
+ except SocketTimeout:
+ chan.close()
+ raise SCPException("Error receiving, socket.timeout")
+
+ file_hdl.truncate()
+ try:
+ os.utime(path, self._utime)
+ self._utime = None
+ os.chmod(path, mode)
+ # should we notify the other end?
+ finally:
+ file_hdl.close()
+ # '\x00' confirmation sent in _recv_all
+
+ def _recv_pushd(self, cmd):
+ parts = cmd.split(b" ", 2)
+ try:
+ mode = int(parts[0], 8)
+ if self._rename:
+ path = self._recv_dir
+ self._rename = False
+ elif os.name == "nt":
+ name = parts[2].decode("utf-8")
+ assert not os.path.isabs(name)
+ path = os.path.join(asunicode_win(self._recv_dir), name)
+ self._depth += 1
+ else:
+ name = parts[2]
+ assert not os.path.isabs(name)
+ path = os.path.join(asbytes(self._recv_dir), name)
+ self._depth += 1
+ except:
+ self.channel.send(b"\x01")
+ raise SCPException("Bad directory format")
+ try:
+ if not os.path.exists(path):
+ os.mkdir(path, mode)
+ elif os.path.isdir(path):
+ os.chmod(path, mode)
+ else:
+ raise SCPException("%s: Not a directory" % path)
+ self._dirtimes[path] = self._utime
+ self._utime = None
+ self._recv_dir = path
+ except (OSError, SCPException) as e:
+ self.channel.send(b"\x01" + asbytes(str(e)))
+ raise
+
+ def _recv_popd(self, *cmd):
+ if self._depth > 0:
+ self._depth -= 1
+ self._recv_dir = os.path.split(self._recv_dir)[0]
+
+ def _set_dirtimes(self):
+ try:
+ for d in self._dirtimes:
+ os.utime(d, self._dirtimes[d])
+ finally:
+ self._dirtimes = {}
+
+
+class SCPException(Exception):
+ """SCP exception class"""
+
+ pass
+
+
+def put(transport, files, remote_path=b".", recursive=False, preserve_times=False):
+ """
+ Transfer files and directories to remote host.
+
+ This is a convenience function that creates a SCPClient from the given
+ transport and closes it at the end, useful for one-off transfers.
+
+ @param files: A single path, or a list of paths to be transferred.
+ recursive must be True to transfer directories.
+ @type files: string OR list of strings
+ @param remote_path: path in which to receive the files on the remote host.
+ defaults to '.'
+ @type remote_path: str
+ @param recursive: transfer files and directories recursively
+ @type recursive: bool
+ @param preserve_times: preserve mtime and atime of transferred files and
+ directories.
+ @type preserve_times: bool
+ """
+ with SCPClient(transport) as client:
+ client.put(files, remote_path, recursive, preserve_times)
+
+
+def get(transport, remote_path, local_path="", recursive=False, preserve_times=False):
+ """
+ Transfer files and directories from remote host to localhost.
+
+ This is a convenience function that creates a SCPClient from the given
+ transport and closes it at the end, useful for one-off transfers.
+
+ @param transport: an paramiko L{Transport}
+ @type transport: L{Transport}
+ @param remote_path: path to retrieve from remote host. since this is
+ evaluated by scp on the remote host, shell wildcards and environment
+ variables may be used.
+ @type remote_path: str
+ @param local_path: path in which to receive files locally
+ @type local_path: str
+ @param recursive: transfer files and directories recursively
+ @type recursive: bool
+ @param preserve_times: preserve mtime and atime of transferred files
+ and directories.
+ @type preserve_times: bool
+ """
+ with SCPClient(transport) as client:
+ client.get(remote_path, local_path, recursive, preserve_times)
\ No newline at end of file
diff --git a/experiments/snar_benchmark/visualization.ipynb b/experiments/snar_benchmark/visualization.ipynb
index 4fe5d4b3..a8396819 100644
--- a/experiments/snar_benchmark/visualization.ipynb
+++ b/experiments/snar_benchmark/visualization.ipynb
@@ -37,11 +37,6 @@
"from pymoo.optimize import minimize\n",
"from pymoo.factory import get_termination\n",
"\n",
- "\n",
- "from neptune.sessions import Session, HostedNeptuneBackend\n",
- "from dotenv import load_dotenv, find_dotenv\n",
- "_ = load_dotenv(find_dotenv()) #load neptune api token\n",
- "\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib.ticker as ticker\n",
"import mpl_toolkits.mplot3d.art3d as art3d\n",
diff --git a/get_version.py b/get_version.py
new file mode 100644
index 00000000..d7191da8
--- /dev/null
+++ b/get_version.py
@@ -0,0 +1,8 @@
+import toml
+
+with open("pyproject.toml", "r") as f:
+ text = f.read()
+
+toml_dict = toml.loads(text)
+
+print(toml_dict["tool"]["poetry"]["version"])
diff --git a/poetry.lock b/poetry.lock
index a34f90c4..817042c2 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -10,10 +10,10 @@ version = "0.10.0"
six = "*"
[[package]]
-category = "dev"
+category = "main"
description = "A configurable sidebar-enabled Sphinx theme"
name = "alabaster"
-optional = false
+optional = true
python-versions = "*"
version = "0.7.12"
@@ -136,10 +136,10 @@ future = ">=0.15.2"
numpy = ">=1.12"
[[package]]
-category = "dev"
+category = "main"
description = "Internationalization utilities"
name = "babel"
-optional = false
+optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.8.0"
@@ -391,6 +391,14 @@ version = "0.10.0"
[package.dependencies]
six = "*"
+[[package]]
+category = "main"
+description = "The Cython compiler for writing C extensions for the Python language."
+name = "cython"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+version = "0.29.21"
+
[[package]]
category = "dev"
description = "A backport of the dataclasses module for Python 3.6"
@@ -417,10 +425,21 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
version = "0.6.0"
[[package]]
-category = "dev"
+category = "main"
+description = "Tree is a library for working with nested data structures."
+name = "dm-tree"
+optional = false
+python-versions = "*"
+version = "0.1.5"
+
+[package.dependencies]
+six = ">=1.12.0"
+
+[[package]]
+category = "main"
description = "Docutils -- Python Documentation Utilities"
name = "docutils"
-optional = false
+optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
version = "0.16"
@@ -606,20 +625,19 @@ protobuf = ["grpcio-tools (>=1.32.0)"]
[[package]]
category = "main"
-description = ""
+description = "Bayesian optimization for categorical variables"
name = "gryffin"
optional = false
python-versions = ">=3.6"
-version = "0.1.0"
+version = "0.1.1"
[package.dependencies]
+Cython = "*"
numpy = "*"
sqlalchemy = "*"
+tensorflow = ">=2.2.0,<3.0.0"
+tensorflow-probability = ">=0.10.1git,<1.0.0"
-[package.source]
-reference = "8f928980e1625ca7563c16ec254c1758a11a531a"
-type = "git"
-url = "https://github.com/sustainable-processes/gryffin"
[[package]]
category = "main"
description = "Read and write HDF5 files from Python"
@@ -658,10 +676,10 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.10"
[[package]]
-category = "dev"
+category = "main"
description = "Getting image size from png/jpeg/jpeg2000/gif file"
name = "imagesize"
-optional = false
+optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "1.2.0"
@@ -1082,10 +1100,10 @@ traitlets = ">=4.1"
test = ["pytest", "pytest-cov", "testpath"]
[[package]]
-category = "dev"
+category = "main"
description = "Jupyter Notebook Tools for Sphinx"
name = "nbsphinx"
-optional = false
+optional = true
python-versions = ">=3"
version = "0.7.1"
@@ -1210,21 +1228,6 @@ optional = false
python-versions = ">=3.5"
version = "1.18.0"
-[[package]]
-category = "dev"
-description = "Sphinx extension to support docstrings in Numpy format"
-name = "numpydoc"
-optional = false
-python-versions = ">=3.5"
-version = "1.1.0"
-
-[package.dependencies]
-Jinja2 = ">=2.3"
-sphinx = ">=1.6.5"
-
-[package.extras]
-testing = ["matplotlib", "pytest", "pytest-cov"]
-
[[package]]
category = "main"
description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
@@ -1488,7 +1491,7 @@ description = "Pygments is a syntax highlighting package written in Python."
name = "pygments"
optional = false
python-versions = ">=3.5"
-version = "2.6.1"
+version = "2.7.0"
[[package]]
category = "main"
@@ -1595,17 +1598,6 @@ version = "2.8.1"
[package.dependencies]
six = ">=1.5"
-[[package]]
-category = "dev"
-description = "Add .env support to your django/flask apps in development and deployments"
-name = "python-dotenv"
-optional = false
-python-versions = "*"
-version = "0.13.0"
-
-[package.extras]
-cli = ["click (>=5.0)"]
-
[[package]]
category = "main"
description = "World timezone definitions, modern and historical"
@@ -1739,21 +1731,6 @@ version = "1.4.1"
[package.dependencies]
numpy = ">=1.13.3"
-[[package]]
-category = "main"
-description = "scp module for paramiko"
-name = "scp"
-optional = true
-python-versions = "*"
-version = "0.13.2"
-
-[package.dependencies]
-paramiko = "*"
-
-[package.source]
-reference = "549eb6060e28d6095504ddd2769e8cdc66caa170"
-type = "git"
-url = "https://github.com/jbardin/scp.py"
[[package]]
category = "main"
description = "Send file to trash natively under Mac OS X, Windows and Linux."
@@ -1787,10 +1764,10 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "3.0.4"
[[package]]
-category = "dev"
+category = "main"
description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms."
name = "snowballstemmer"
-optional = false
+optional = true
python-versions = "*"
version = "2.0.0"
@@ -1803,10 +1780,10 @@ python-versions = "*"
version = "1.9.6"
[[package]]
-category = "dev"
+category = "main"
description = "Python documentation generator"
name = "sphinx"
-optional = false
+optional = true
python-versions = ">=3.5"
version = "3.2.1"
@@ -1835,10 +1812,10 @@ lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.780)", "docutils-s
test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"]
[[package]]
-category = "dev"
+category = "main"
description = "Read the Docs theme for Sphinx"
name = "sphinx-rtd-theme"
-optional = false
+optional = true
python-versions = "*"
version = "0.5.0"
@@ -1849,10 +1826,10 @@ sphinx = "*"
dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"]
[[package]]
-category = "dev"
+category = "main"
description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books"
name = "sphinxcontrib-applehelp"
-optional = false
+optional = true
python-versions = ">=3.5"
version = "1.0.2"
@@ -1861,10 +1838,10 @@ lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]
[[package]]
-category = "dev"
+category = "main"
description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
name = "sphinxcontrib-devhelp"
-optional = false
+optional = true
python-versions = ">=3.5"
version = "1.0.2"
@@ -1873,10 +1850,10 @@ lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]
[[package]]
-category = "dev"
+category = "main"
description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
name = "sphinxcontrib-htmlhelp"
-optional = false
+optional = true
python-versions = ">=3.5"
version = "1.0.3"
@@ -1885,10 +1862,10 @@ lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest", "html5lib"]
[[package]]
-category = "dev"
+category = "main"
description = "A sphinx extension which renders display math in HTML via JavaScript"
name = "sphinxcontrib-jsmath"
-optional = false
+optional = true
python-versions = ">=3.5"
version = "1.0.1"
@@ -1896,10 +1873,10 @@ version = "1.0.1"
test = ["pytest", "flake8", "mypy"]
[[package]]
-category = "dev"
+category = "main"
description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document."
name = "sphinxcontrib-qthelp"
-optional = false
+optional = true
python-versions = ">=3.5"
version = "1.0.3"
@@ -1908,10 +1885,10 @@ lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]
[[package]]
-category = "dev"
+category = "main"
description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)."
name = "sphinxcontrib-serializinghtml"
-optional = false
+optional = true
python-versions = ">=3.5"
version = "1.1.4"
@@ -1956,7 +1933,7 @@ description = "SnobFit - Stable Noisy Optimization by Branch and FIT"
name = "sqsnobfit"
optional = false
python-versions = "*"
-version = "0.4.3"
+version = "0.4.4"
[package.dependencies]
SQCommon = "*"
@@ -1981,7 +1958,7 @@ description = "TensorBoard lets you watch Tensors Flow"
name = "tensorboard"
optional = false
python-versions = ">= 2.7, != 3.0.*, != 3.1.*"
-version = "2.2.2"
+version = "2.3.0"
[package.dependencies]
absl-py = ">=0.4"
@@ -2015,7 +1992,7 @@ description = "TensorFlow is an open source machine learning framework for every
name = "tensorflow"
optional = false
python-versions = "*"
-version = "2.2.0"
+version = "2.3.0"
[package.dependencies]
absl-py = ">=0.7.0"
@@ -2024,31 +2001,25 @@ gast = "0.3.3"
google-pasta = ">=0.1.8"
grpcio = ">=1.8.6"
h5py = ">=2.10.0,<2.11.0"
-keras-preprocessing = ">=1.1.0"
-numpy = ">=1.16.0,<2.0"
+keras-preprocessing = ">=1.1.1,<1.2"
+numpy = ">=1.16.0,<1.19.0"
opt-einsum = ">=2.3.2"
-protobuf = ">=3.8.0"
+protobuf = ">=3.9.2"
+scipy = "1.4.1"
six = ">=1.12.0"
-tensorboard = ">=2.2.0,<2.3.0"
-tensorflow-estimator = ">=2.2.0,<2.3.0"
+tensorboard = ">=2.3.0,<3"
+tensorflow-estimator = ">=2.3.0,<2.4.0"
termcolor = ">=1.1.0"
+wheel = ">=0.26"
wrapt = ">=1.11.1"
-[package.dependencies.scipy]
-python = ">=3"
-version = "1.4.1"
-
-[package.dependencies.wheel]
-python = ">=3"
-version = ">=0.26"
-
[[package]]
category = "main"
description = "TensorFlow Estimator."
name = "tensorflow-estimator"
optional = false
python-versions = "*"
-version = "2.2.0"
+version = "2.3.0"
[[package]]
category = "main"
@@ -2056,17 +2027,19 @@ description = "Probabilistic modeling and statistical inference in TensorFlow"
name = "tensorflow-probability"
optional = false
python-versions = "*"
-version = "0.10.1"
+version = "0.11.0"
[package.dependencies]
cloudpickle = "1.3"
decorator = "*"
+dm-tree = "*"
gast = ">=0.3.2"
numpy = ">=1.13.3"
six = ">=1.10.0"
[package.extras]
jax = ["jax", "jaxlib"]
+tfds = ["tensorflow-datasets (>=2.2.0)"]
[[package]]
category = "main"
@@ -2294,10 +2267,11 @@ docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
testing = ["jaraco.itertools", "func-timeout"]
[extras]
-experiments = ["neptune-client", "hiplot", "neptune-contrib", "paramiko", "scp"]
+docs = ["sphinx", "nbsphinx", "sphinx-rtd-theme"]
+experiments = ["neptune-client", "hiplot", "neptune-contrib", "paramiko"]
[metadata]
-content-hash = "95d652267f189d4eb897d0f86713da389cbaefc72d4194a7287fcb10ca38a40f"
+content-hash = "176bb04ddbce2ada8a23d18785405ec71324cb1598d9d4214987db4bc23f504b"
python-versions = "^3.6.1"
[metadata.files]
@@ -2520,6 +2494,41 @@ cycler = [
{file = "cycler-0.10.0-py2.py3-none-any.whl", hash = "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d"},
{file = "cycler-0.10.0.tar.gz", hash = "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"},
]
+cython = [
+ {file = "Cython-0.29.21-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c541b2b49c6638f2b5beb9316726db84a8d1c132bf31b942dae1f9c7f6ad3b92"},
+ {file = "Cython-0.29.21-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b8d8497091c1dc8705d1575c71e908a93b1f127a174b2d472020f3d84263ac28"},
+ {file = "Cython-0.29.21-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:695a6bcaf9e12b1e471dfce96bbecf22a1487adc2ac6106b15960a2b51b97f5d"},
+ {file = "Cython-0.29.21-cp27-cp27m-win32.whl", hash = "sha256:171b9f70ceafcec5852089d0f9c1e75b0d554f46c882cd4e2e4acaba9bd7d148"},
+ {file = "Cython-0.29.21-cp27-cp27m-win_amd64.whl", hash = "sha256:539e59949aab4955c143a468810123bf22d3e8556421e1ce2531ed4893914ca0"},
+ {file = "Cython-0.29.21-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:e93acd1f603a0c1786e0841f066ae7cef014cf4750e3cd06fd03cfdf46361419"},
+ {file = "Cython-0.29.21-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:2922e3031ba9ebbe7cb9200b585cc33b71d66023d78450dcb883f824f4969371"},
+ {file = "Cython-0.29.21-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:497841897942f734b0abc2dead2d4009795ee992267a70a23485fd0e937edc0b"},
+ {file = "Cython-0.29.21-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:0ac10bf476476a9f7ef61ec6e44c280ef434473124ad31d3132b720f7b0e8d2a"},
+ {file = "Cython-0.29.21-cp34-cp34m-win32.whl", hash = "sha256:31c71a615f38401b0dc1f2a5a9a6c421ffd8908c4cd5bbedc4014c1b876488e8"},
+ {file = "Cython-0.29.21-cp34-cp34m-win_amd64.whl", hash = "sha256:c4b78356074fcaac04ecb4de289f11d506e438859877670992ece11f9c90f37b"},
+ {file = "Cython-0.29.21-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:b2f9172e4d6358f33ecce6a4339b5960f9f83eab67ea244baa812737793826b7"},
+ {file = "Cython-0.29.21-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:856c7fb31d247ce713d60116375e1f8153d0291ab5e92cca7d8833a524ba9991"},
+ {file = "Cython-0.29.21-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:715294cd2246b39a8edca464a8366eb635f17213e4a6b9e74e52d8b877a8cb63"},
+ {file = "Cython-0.29.21-cp35-cp35m-win32.whl", hash = "sha256:23f3a00b843a19de8bb4468b087db5b413a903213f67188729782488d67040e0"},
+ {file = "Cython-0.29.21-cp35-cp35m-win_amd64.whl", hash = "sha256:ccb77faeaad99e99c6c444d04862c6cf604204fe0a07d4c8f9cbf2c9012d7d5a"},
+ {file = "Cython-0.29.21-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e272ed97d20b026f4f25a012b25d7d7672a60e4f72b9ca385239d693cd91b2d5"},
+ {file = "Cython-0.29.21-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:8c6e25e9cc4961bb2abb1777c6fa9d0fa2d9b014beb3276cebe69996ff162b78"},
+ {file = "Cython-0.29.21-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:57ead89128dee9609119c93d3926c7a2add451453063147900408a50144598c6"},
+ {file = "Cython-0.29.21-cp36-cp36m-win32.whl", hash = "sha256:0e25c209c75df8785480dcef85db3d36c165dbc0f4c503168e8763eb735704f2"},
+ {file = "Cython-0.29.21-cp36-cp36m-win_amd64.whl", hash = "sha256:a0674f246ad5e1571ef29d4c5ec1d6ecabe9e6c424ad0d6fee46b914d5d24d69"},
+ {file = "Cython-0.29.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5da187bebe38030325e1c0b5b8a804d489410be2d384c0ef3ba39493c67eb51e"},
+ {file = "Cython-0.29.21-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:9ce5e5209f8406ffc2b058b1293cce7a954911bb7991e623564d489197c9ba30"},
+ {file = "Cython-0.29.21-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5e545a48f919e40079b0efe7b0e081c74b96f9ef25b9c1ff4cdbd95764426b58"},
+ {file = "Cython-0.29.21-cp37-cp37m-win32.whl", hash = "sha256:c8435959321cf8aec867bbad54b83b7fb8343204b530d85d9ea7a1f5329d5ac2"},
+ {file = "Cython-0.29.21-cp37-cp37m-win_amd64.whl", hash = "sha256:540b3bee0711aac2e99bda4fa0a46dbcd8c74941666bfc1ef9236b1a64eeffd9"},
+ {file = "Cython-0.29.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:93f5fed1c9445fb7afe20450cdaf94b0e0356d47cc75008105be89c6a2e417b1"},
+ {file = "Cython-0.29.21-cp38-cp38-manylinux1_i686.whl", hash = "sha256:9207fdedc7e789a3dcaca628176b80c82fbed9ae0997210738cbb12536a56699"},
+ {file = "Cython-0.29.21-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:603b9f1b8e93e8b494d3e89320c410679e21018e48b6cbc77280f5db71f17dc0"},
+ {file = "Cython-0.29.21-cp38-cp38-win32.whl", hash = "sha256:473df5d5e400444a36ed81c6596f56a5b52a3481312d0a48d68b777790f730ae"},
+ {file = "Cython-0.29.21-cp38-cp38-win_amd64.whl", hash = "sha256:b8a8a31b9e8860634adbca30fea1d0c7f08e208b3d7611f3e580e5f20992e5d7"},
+ {file = "Cython-0.29.21-py2.py3-none-any.whl", hash = "sha256:5c4276fdcbccdf1e3c1756c7aeb8395e9a36874fa4d30860e7694f43d325ae13"},
+ {file = "Cython-0.29.21.tar.gz", hash = "sha256:e57acb89bd55943c8d8bf813763d20b9099cc7165c0f16b707631a7654be9cad"},
+]
dataclasses = [
{file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"},
{file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"},
@@ -2532,6 +2541,24 @@ defusedxml = [
{file = "defusedxml-0.6.0-py2.py3-none-any.whl", hash = "sha256:6687150770438374ab581bb7a1b327a847dd9c5749e396102de3fad4e8a3ef93"},
{file = "defusedxml-0.6.0.tar.gz", hash = "sha256:f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5"},
]
+dm-tree = [
+ {file = "dm-tree-0.1.5.tar.gz", hash = "sha256:a951d2239111dfcc468071bc8ff792c7b1e3192cab5a3c94d33a8b2bda3127fa"},
+ {file = "dm_tree-0.1.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:39a2c12d86b9df7bcb64c3473957744bc1af812dd7bf27c01108f17ed688644b"},
+ {file = "dm_tree-0.1.5-cp27-cp27m-win_amd64.whl", hash = "sha256:e4314cd65c5d2218fe580c265e005b334d609e51c33746fedd4014006fca7fa2"},
+ {file = "dm_tree-0.1.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9699c10c3c58bcc8d221445c7864ff9e3e738e629f1943efdb818c4fd4083326"},
+ {file = "dm_tree-0.1.5-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:3f7d29c6130f8cb41ee862c96f752dd37c35e6c4cdbf3aaa0db9a3c8516892e2"},
+ {file = "dm_tree-0.1.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:21775e6a4d5afafa65514db435dba71637edccb145ec8fd7f144355e9d14d6e2"},
+ {file = "dm_tree-0.1.5-cp35-cp35m-win_amd64.whl", hash = "sha256:4992e3517a6d5f6211b8240784ec0006927945fbf9feb56c37c8068a80e211af"},
+ {file = "dm_tree-0.1.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:90706db66bd8dd4b71c00dfd883d83bca6be238093e2db3d46331085b3a12214"},
+ {file = "dm_tree-0.1.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:12e662dadd3f0d3e729cbc5a8a8aef88606339d056c95dc83c1e467e6330ba9a"},
+ {file = "dm_tree-0.1.5-cp36-cp36m-win_amd64.whl", hash = "sha256:a13ebfdce7b47310ba02e14005971de12d7d30e082e17f3a73f3f309a53b926e"},
+ {file = "dm_tree-0.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6caa5308f139aeef22c64c55cf6bb7e6882a4119eac48c7cc572e2388d379598"},
+ {file = "dm_tree-0.1.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:be6e8173eb8e0f3d03a93abadf6eae6581219b6b408fa835e2eac04e775f0969"},
+ {file = "dm_tree-0.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:23707e6d4c2ee4567f341429b7b358c3108cf5130f0897bdee54b3aa890de220"},
+ {file = "dm_tree-0.1.5-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:669c8a7929d350db2ba7354e030d8b7318ba94d4cc5702858beed2219f0566be"},
+ {file = "dm_tree-0.1.5-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:134b58263e09a6e552c05c85c630f424446b75600bd38ff1838329eb0cbe0e50"},
+ {file = "dm_tree-0.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:3ed4a2c3328603fa06c2104921b31faf3e831e9828ed625bb0e0813dc2c8a17f"},
+]
docutils = [
{file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"},
{file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"},
@@ -2638,7 +2665,10 @@ grpcio = [
{file = "grpcio-1.32.0-cp38-cp38-win_amd64.whl", hash = "sha256:14c0f017bfebbc18139551111ac58ecbde11f4bc375b73a53af38927d60308b6"},
{file = "grpcio-1.32.0.tar.gz", hash = "sha256:01d3046fe980be25796d368f8fc5ff34b7cf5e1444f3789a017a7fe794465639"},
]
-gryffin = []
+gryffin = [
+ {file = "gryffin-0.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f6f57faddb9e79227f41556ce5cb4c672f0cf93b37086fa54533951c726373cf"},
+ {file = "gryffin-0.1.1.tar.gz", hash = "sha256:ef44cf79616e078406c5ba5cedfc95d1919673fc0f8f13abb18ea3567c06e47d"},
+]
h5py = [
{file = "h5py-2.10.0-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:ecf4d0b56ee394a0984de15bceeb97cbe1fe485f1ac205121293fc44dcf3f31f"},
{file = "h5py-2.10.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:86868dc07b9cc8cb7627372a2e6636cdc7a53b7e2854ad020c9e9d8a4d3fd0f5"},
@@ -2950,10 +2980,6 @@ numpy = [
{file = "numpy-1.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:712f0c32555132f4b641b918bdb1fd3c692909ae916a233ce7f50eac2de87e37"},
{file = "numpy-1.18.0.zip", hash = "sha256:a9d72d9abaf65628f0f31bbb573b7d9304e43b1e6bbae43149c17737a42764c4"},
]
-numpydoc = [
- {file = "numpydoc-1.1.0-py3-none-any.whl", hash = "sha256:c53d6311190b9e3b9285bc979390ba0257ba9acde5eca1a7065fc8dfca9d46e8"},
- {file = "numpydoc-1.1.0.tar.gz", hash = "sha256:c36fd6cb7ffdc9b4e165a43f67bf6271a7b024d0bb6b00ac468c9e2bfc76448e"},
-]
oauthlib = [
{file = "oauthlib-3.1.0-py2.py3-none-any.whl", hash = "sha256:df884cd6cbe20e32633f1db1072e9356f53638e4361bef4e8b03c9127c9328ea"},
{file = "oauthlib-3.1.0.tar.gz", hash = "sha256:bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889"},
@@ -3113,8 +3139,8 @@ pycparser = [
{file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"},
]
pygments = [
- {file = "Pygments-2.6.1-py3-none-any.whl", hash = "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"},
- {file = "Pygments-2.6.1.tar.gz", hash = "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44"},
+ {file = "Pygments-2.7.0-py3-none-any.whl", hash = "sha256:2df50d16b45b977217e02cba6c8422aaddb859f3d0570a88e09b00eafae89c6e"},
+ {file = "Pygments-2.7.0.tar.gz", hash = "sha256:2594e8fdb06fef91552f86f4fd3a244d148ab24b66042036e64f29a291515048"},
]
pyjwt = [
{file = "PyJWT-1.7.1-py2.py3-none-any.whl", hash = "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e"},
@@ -3166,10 +3192,6 @@ python-dateutil = [
{file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"},
{file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"},
]
-python-dotenv = [
- {file = "python-dotenv-0.13.0.tar.gz", hash = "sha256:3b9909bc96b0edc6b01586e1eed05e71174ef4e04c71da5786370cebea53ad74"},
- {file = "python_dotenv-0.13.0-py2.py3-none-any.whl", hash = "sha256:25c0ff1a3e12f4bde8d592cc254ab075cfe734fc5dd989036716fd17ee7e5ec7"},
-]
pytz = [
{file = "pytz-2020.1-py2.py3-none-any.whl", hash = "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed"},
{file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"},
@@ -3321,7 +3343,6 @@ scipy = [
{file = "scipy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:2cce3f9847a1a51019e8c5b47620da93950e58ebc611f13e0d11f4980ca5fecb"},
{file = "scipy-1.4.1.tar.gz", hash = "sha256:dee1bbf3a6c8f73b6b218cb28eed8dd13347ea2f87d572ce19b289d6fd3fbc59"},
]
-scp = []
send2trash = [
{file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"},
{file = "Send2Trash-1.5.0.tar.gz", hash = "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2"},
@@ -3459,37 +3480,37 @@ sqlalchemy = [
{file = "SQLAlchemy-1.3.19.tar.gz", hash = "sha256:3bba2e9fbedb0511769780fe1d63007081008c5c2d7d715e91858c94dbaa260e"},
]
sqsnobfit = [
- {file = "SQSnobFit-0.4.3.tar.gz", hash = "sha256:f5f259445db0ccf2074f1164efe127dfd121c45b976b66cf59f661a3404e56bd"},
+ {file = "SQSnobFit-0.4.4.tar.gz", hash = "sha256:22b0e880fa86d82102a512254b2d06af8def121572b465ebe66dbdbcb0a9f524"},
]
swagger-spec-validator = [
{file = "swagger-spec-validator-2.7.3.tar.gz", hash = "sha256:f4f23ee4dbd52bfcde90b1144dde22304add6260e9f29252e9fd7814c9b8fd16"},
{file = "swagger_spec_validator-2.7.3-py2.py3-none-any.whl", hash = "sha256:d1514ec7e3c058c701f27cc74f85ceb876d6418c9db57786b9c54085ed5e29eb"},
]
tensorboard = [
- {file = "tensorboard-2.2.2-py3-none-any.whl", hash = "sha256:a3feb73e1221c0a512398ad2cd08570fb082d8a2ba364aa0562543ecbd3659ef"},
+ {file = "tensorboard-2.3.0-py3-none-any.whl", hash = "sha256:d34609ed83ff01dd5b49ef81031cfc9c166bba0dabd60197024f14df5e8eae5e"},
]
tensorboard-plugin-wit = [
{file = "tensorboard_plugin_wit-1.7.0-py3-none-any.whl", hash = "sha256:ee775f04821185c90d9a0e9c56970ee43d7c41403beb6629385b39517129685b"},
]
tensorflow = [
- {file = "tensorflow-2.2.0-cp35-cp35m-macosx_10_11_x86_64.whl", hash = "sha256:7ed67b47cdf6598a79583de5b57c595493eac2b8b6b3a828f912354716cb8149"},
- {file = "tensorflow-2.2.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:267017724a49c367ca5df536e5f6d3d59643eaed946c82233d6b371e62b5ddc8"},
- {file = "tensorflow-2.2.0-cp35-cp35m-win_amd64.whl", hash = "sha256:572f69d2d0a3d3d83ebfb2c24e6d73d88b85a09f5da796974ef4a0ad83ff7cde"},
- {file = "tensorflow-2.2.0-cp36-cp36m-macosx_10_11_x86_64.whl", hash = "sha256:6735486ee9c3cb0807476e2b36ef7a4cd6c597cb24abf496e66b703360e1e54e"},
- {file = "tensorflow-2.2.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:dc5548562308acde7931f040e73d46ae31b398924cf675c3486fd3504e00a4af"},
- {file = "tensorflow-2.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3ee8819732d8594913b7d22ded7b22e48a49aa015050d8dd8464eaa010ba2e41"},
- {file = "tensorflow-2.2.0-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:c332c7fc5cfd54cb86d5da99787c9693e3a924848097c54df1b71ee595a39c93"},
- {file = "tensorflow-2.2.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f5f27528570fc0d7b90668be10c5dfd90d6ceb8fd2ed62d7d679554acb616bfe"},
- {file = "tensorflow-2.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:68ea22aee9c269a6a0c1061c141f1ec1cd1b1be7569390519c1bf4773f434a40"},
- {file = "tensorflow-2.2.0-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:bbcfb04738099bd46822db91584db74703fdddacf4cd0a76acfc5e086956b5ba"},
- {file = "tensorflow-2.2.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:8f364528f70d895b96a0de36c7c6002644bf4c5df1ee3fbfa775f5cee6571ad7"},
- {file = "tensorflow-2.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:784ab8217e4b0eb4d121c28430c6cdc2ce56c02634a9720d84fb30598b338b8c"},
+ {file = "tensorflow-2.3.0-cp35-cp35m-macosx_10_11_x86_64.whl", hash = "sha256:c6fad4e944e20199e963e158fe626352e349865ea4ca71655f5456193a6d3b9d"},
+ {file = "tensorflow-2.3.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:6f74ef59dc59cf8f2002738c65dffa591e2c332e9b1b4ced33ff8d39b6fb477c"},
+ {file = "tensorflow-2.3.0-cp35-cp35m-win_amd64.whl", hash = "sha256:797d6ca09d4f69570458180b7813dc12efe9166ba60454b0df7bed531bb5e4f4"},
+ {file = "tensorflow-2.3.0-cp36-cp36m-macosx_10_11_x86_64.whl", hash = "sha256:b1699903cf3a9f41c379d79ada2279a206a071b7e05671646d7b5e7fc37e2eae"},
+ {file = "tensorflow-2.3.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:5c9f9a36d5b4d0ceb67b985486fe4cc6999a96e2bf89f3ba82ffd8317e5efadd"},
+ {file = "tensorflow-2.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:bc9d761a857839344930eef86f0d6409840b1c9ada9cbe56b92287b2077ef752"},
+ {file = "tensorflow-2.3.0-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:0cfb0fbe875408cdbfc7677f12aa0b23656f3e6d8c5f568b3100450ec29262a7"},
+ {file = "tensorflow-2.3.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:92430b6e91f00f38a602c4f547bbbaca598a3a90376f90d5b2acd24bc18fa1d7"},
+ {file = "tensorflow-2.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:36a4ce9bbc9865385c1bb606fe34f0da96b0496ce3997e652d2b765a4382fe48"},
+ {file = "tensorflow-2.3.0-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:44c8d979b2d19ed56dbe6b03aef87616d6138a58fd80c43e7a758c90105e9adf"},
+ {file = "tensorflow-2.3.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:c33a423eb1f39c4c6acc44c044a138979868f0d4c91e380c191bd8fddc7c2e9b"},
+ {file = "tensorflow-2.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2d9994157d6a222d9ffd956e99af4b5e46e47338428d2d197e325362283ec835"},
]
tensorflow-estimator = [
- {file = "tensorflow_estimator-2.2.0-py2.py3-none-any.whl", hash = "sha256:d09dacdd127f2579cea8d5af21f4a918036b8ae246adc82f26b61f91cc247dc2"},
+ {file = "tensorflow_estimator-2.3.0-py2.py3-none-any.whl", hash = "sha256:b75e034300ccb169403cf2695adf3368da68863aeb0c14c3760064c713d5c486"},
]
tensorflow-probability = [
- {file = "tensorflow_probability-0.10.1-py2.py3-none-any.whl", hash = "sha256:3ae630d3e0ae12623d77ab2394291dcf0d3ca6b78a8d193372165f7c335f14d0"},
+ {file = "tensorflow_probability-0.11.0-py2.py3-none-any.whl", hash = "sha256:60e66a376f8b3602d3f61b0ac0bf844796b2cc10c104a13bced5cb65e1770eae"},
]
termcolor = [
{file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"},
diff --git a/pyproject.toml b/pyproject.toml
index e628d3d3..8a0b4efe 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,13 +1,13 @@
[tool.poetry]
name = "summit"
-version = "0.5.0"
+version = "0.6.0"
description = "Tools for optimizing chemical processes"
-authors = ["Kobi Felton "]
-license = ""
+authors = ["Kobi Felton ", "Jan Rittig"]
+license = "MIT"
readme = 'README.md'
repository = "https://github.com/sustainable-processes/summit"
-homepage = "https://pypi.org/project/summit"
-keywords = []
+homepage = "https://github.com/sustainable-processes/summit"
+keywords = ["machine-learning", "chemistry", "reactions", "optimization", "neural-networks", "drug-discovery", "bayesian-optimization", "nelder-mead", "snobfit", "gryffin", "TSEMO"]
[tool.poetry.dependencies]
pandas = "^1.1.0"
@@ -24,29 +24,25 @@ gpyopt = "^1.2.6"
neptune-client = {version= "^0.4.115", optional = true}
hiplot = {version= "^0.1.12", optional = true}
neptune-contrib = {extras = ["viz"], version = "^0.22.0", optional=true}
-gryffin = {git = "https://github.com/sustainable-processes/gryffin", rev = "0.1.0"}
-tensorflow = "^2.2.0"
-tensorflow-probability = "^0.10.1"
-tensorflow-estimator = ">=2.2.0,<2.3.0"
+tensorflow = "^2.3.0"
torch = "^1.4.0"
blitz-bayesian-pytorch = "0.2.3"
paramiko = {version="^2.7.1", optional=true}
-scp = {git = "https://github.com/jbardin/scp.py", rev = "v0.13.2", optional = true}
+gryffin = "^0.1.1"
+sphinx = {version="^3.2.1", optional=true}
+nbsphinx = {version="^0.7.1", optional=true}
+sphinx-rtd-theme = {version="^0.5.0", optional=true}
[tool.poetry.extras]
-experiments = ["neptune-client", "hiplot", "neptune-contrib", "paramiko", "scp"]
+experiments = ["neptune-client", "hiplot", "neptune-contrib", "paramiko"]
+docs = ["sphinx", "nbsphinx", "sphinx-rtd-theme"]
[tool.poetry.dev-dependencies]
pytest = "^3.0"
ipdb = "^0.13.2"
rope = "^0.17.0"
-python-dotenv = "^0.13.0"
-sphinx = "^3.2.1"
-nbsphinx = "^0.7.1"
-sphinx-rtd-theme = "^0.5.0"
black = {version = "^20.8b1", allow-prereleases = true}
-numpydoc = "^1.1.0"
[build-system]
requires = ["poetry>=0.12"]
-build-backendpoetry = "poetry.masonry.api"
+build-backend = "poetry.masonry.api"
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index 5ffea156..00000000
--- a/requirements.txt
+++ /dev/null
@@ -1,28 +0,0 @@
-blitz-bayesian-pytorch==0.2.3
-fastprogress==0.*,>=0.2.3
-gpy==1.*,>=1.9.0
-gpyopt==1.*,>=1.2.6
-gryffin @ git+https://github.com/sustainable-processes/gryffin@0.1.1#egg=gryffin
-hiplot==0.*,>=0.1.12
-ipdb==0.*,>=0.13.2
-ipywidgets==7.*,>=7.5.1
-matplotlib==3.*,>=3.2.2
-nbsphinx==0.*,>=0.7.1
-neptune-client==0.*,>=0.4.115
-neptune-contrib[viz]==0.*,>=0.22.0
-numpy==1.18.0
-pandas==1.*,>=1.1.0
-paramiko==2.*,>=2.7.1
-plotly==4.*,>=4.8.2
-pymoo==0.*,>=0.4.1
-pyrff==1.*,>=1.0.1
-pytest==3.*,>=3.0.0
-python-dotenv==0.*,>=0.13.0
-rope==0.*,>=0.17.0
-scp @ git+https://github.com/jbardin/scp.py@v0.13.2#egg=scp.py
-sphinx==3.*,>=3.2.1
-sqsnobfit==0.*,>=0.4.3
-tensorflow==2.*,>=2.2.0
-tensorflow-estimator<2.3.0,>=2.2.0
-tensorflow-probability==0.*,>=0.10.1
-torch==1.*,>=1.4.0
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 67994ce9..00000000
--- a/setup.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# DO NOT EDIT THIS FILE!
-# This file has been autogenerated by dephell <3
-# https://github.com/dephell/dephell
-
-try:
- from setuptools import setup
-except ImportError:
- from distutils.core import setup
-
-import os.path
-
-readme = ""
-here = os.path.abspath(os.path.dirname(__file__))
-readme_path = os.path.join(here, "README.md")
-if os.path.exists(readme_path):
- with open(readme_path, "r", encoding="utf8") as stream:
- readme = stream.read()
-
-requirements = []
-requirements_path = os.path.join(here, "requirements.txt")
-if os.path.exists(requirements_path):
- with open(requirements_path, "r", encoding="utf8") as stream:
- requirements = stream.read().splitlines()
-
-extras_requiremetns = []
-extras_requirements = []
-requirements_path = os.path.join(here, "experiments_requirements.txt")
-if os.path.exists(requirements_path):
- with open(requirements_path, "r", encoding="utf8") as stream:
- extras_requirements = stream.read().splitlines()
-
-setup(
- long_description=readme,
- name="summit",
- version="0.4.0",
- description="Tools for optimizing chemical processes",
- python_requires="==3.*,>=3.6.1",
- project_urls={
- "homepage": "https://pypi.org/project/summit",
- "repository": "https://github.com/sustainable-processes/summit",
- },
- author="Kobi Felton, Jan Rittig",
- author_email="kobi.c.f@gmail.com",
- packages=[
- "summit",
- "summit.benchmarks",
- "summit.benchmarks.experiment_emulator",
- "summit.strategies",
- "summit.utils",
- ],
- package_data={
- "summit.benchmarks.experiment_emulator": [
- "trained_models/BNN/*.json",
- "trained_models/BNN/*.pt",
- "data/*.csv",
- ]
- },
- install_requires=requirements,
-)