diff --git a/CHANGELOG.rst b/CHANGELOG.rst index fa65a4eb..fa5096ee 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,9 +1,11 @@ Changelog ========= -dev ---- +0.7.1 (2018-04-11) +------------------ - Implemented model selection (elfi.compare_models). See API documentation. +- Fix threshold=0 in rejection sampling +- Set default batch_size to 1 in ParameterInference base class 0.7 (2017-11-30) ---------------- diff --git a/README.md b/README.md index dc0b8d81..2be28d32 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -**Version 0.7 released!** See the CHANGELOG and [notebooks](https://github.com/elfi-dev/notebooks). +**Version 0.7.1 released!** See the CHANGELOG and [notebooks](https://github.com/elfi-dev/notebooks). **NOTE:** For the time being NetworkX 2 is incompatible with ELFI. diff --git a/elfi/__init__.py b/elfi/__init__.py index 508bc038..fa178453 100644 --- a/elfi/__init__.py +++ b/elfi/__init__.py @@ -25,4 +25,4 @@ __email__ = 'elfi-support@hiit.fi' # make sure __version_ is on the last non-empty line (read by setup.py) -__version__ = '0.7_dev' +__version__ = '0.7.1' diff --git a/elfi/model/tools.py b/elfi/model/tools.py index c0aab4b7..77f3d65c 100644 --- a/elfi/model/tools.py +++ b/elfi/model/tools.py @@ -44,10 +44,10 @@ def run_vectorized(operation, *inputs, constants=None, dtype=None, batch_size=No # Test if a numpy array if is_array(inpt): - l = len(inpt) + length = len(inpt) if batch_size is None: - batch_size = l - elif batch_size != l: + batch_size = length + elif batch_size != length: raise ValueError('Batch size {} does not match with input {} length of ' '{}. Please check `constants` argument for the ' 'vectorize decorator for marking constant inputs.') diff --git a/elfi/store.py b/elfi/store.py index 02a3749d..c5aead02 100644 --- a/elfi/store.py +++ b/elfi/store.py @@ -208,12 +208,12 @@ def _make_store_for(self, node): def __len__(self): """Return the largest batch index in any of the stores.""" - l = 0 + largest = 0 for output, store in self.stores.items(): if store is None: continue - l = max(l, len(store)) - return l + largest = max(largest, len(store)) + return largest def __getitem__(self, batch_index): """Return the batch."""