Skip to content

Commit

Permalink
Merge pull request #243 from elfi-dev/dev
Browse files Browse the repository at this point in the history
Release 0.6.3
  • Loading branch information
vuolleko authored Sep 28, 2017
2 parents 53effb0 + 077308b commit d13da82
Show file tree
Hide file tree
Showing 19 changed files with 250 additions and 104 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
Changelog
=========

0.6.3 (2017-09-28)
------------------

- Further performance improvements for rerunning inference using stored data via caches
- Added the general Gaussian noise example model (fixed covariance)
- restrict NetworkX to versions < 2.0

0.6.2 (2017-09-06)
------------------

Expand Down
6 changes: 5 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
**Version 0.6.2 released!** See the CHANGELOG and [notebooks](https://github.com/elfi-dev/notebooks).
**Version 0.6.3 released!** See the CHANGELOG and [notebooks](https://github.com/elfi-dev/notebooks).

**NOTE:** For the time being NetworkX 2 is incompatible with ELFI.

ELFI - Engine for Likelihood-Free Inference
===========================================
Expand All @@ -7,6 +9,7 @@ ELFI - Engine for Likelihood-Free Inference
[![Code Health](https://landscape.io/github/elfi-dev/elfi/dev/landscape.svg?style=flat)](https://landscape.io/github/elfi-dev/elfi/dev)
[![Documentation Status](https://readthedocs.org/projects/elfi/badge/?version=latest)](http://elfi.readthedocs.io/en/latest/?badge=latest)
[![Gitter](https://badges.gitter.im/elfi-dev/elfi.svg)](https://gitter.im/elfi-dev/elfi?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
[![DOI](https://zenodo.org/badge/69855441.svg)](https://zenodo.org/badge/latestdoi/69855441)

<img src="https://cloud.githubusercontent.com/assets/1233418/20178983/6e22ee44-a75c-11e6-8345-5934b55b9dc6.png" width="15%" align="right"></img>

Expand Down Expand Up @@ -79,6 +82,7 @@ Resolving these may sometimes go wrong:
- On OS X with Anaconda virtual environment say `conda install python.app` and then use
`pythonw` instead of `python`.
- Note that ELFI requires Python 3.5 or greater so try `pip3 install elfi`.
- Make sure your Python installation meets the versions listed in `requirements.txt`.


Citation
Expand Down
3 changes: 3 additions & 0 deletions docs/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ Resolving these may sometimes go wrong:
* On OS X with Anaconda virtual environment say `conda install python.app` and then use `pythonw` instead of `python`.
* Note that ELFI requires Python 3.5 or greater
* In some environments ``pip`` refers to Python 2.x, and you have to use ``pip3`` to use the Python 3.x version
* Make sure your Python installation meets the versions listed in requirements_.

.. _requirements: https://github.com/elfi-dev/elfi/blob/dev/requirements.txt

Developer installation from sources
-----------------------------------
Expand Down
2 changes: 1 addition & 1 deletion elfi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@
__email__ = 'elfi-support@hiit.fi'

# make sure __version_ is on the last non-empty line (read by setup.py)
__version__ = '0.6.2'
__version__ = '0.6.3'
3 changes: 3 additions & 0 deletions elfi/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,4 +326,7 @@ def load_data(cls, compiled_net, context, batch_index):
loaded_net = RandomStateLoader.load(context, loaded_net, batch_index)
loaded_net = PoolLoader.load(context, loaded_net, batch_index)

# Add cache from the contect
loaded_net.graph['_executor_cache'] = context.caches['executor']

return loaded_net
2 changes: 1 addition & 1 deletion elfi/examples/bignk.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def BiGNK(a1, a2, b1, b2, g1, g2, k1, k2, rho, c=.8, n_obs=150, batch_size=1,
term_product_misaligned = np.swapaxes(term_product, 1, 0)
y_misaligned = np.add(a, term_product_misaligned)
y = np.swapaxes(y_misaligned, 1, 0)
# print(y.shape)

return y


Expand Down
131 changes: 107 additions & 24 deletions elfi/examples/gauss.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,78 @@
"""An example implementation of a Gaussian noise model."""
"""Example implementations of Gaussian noise models."""

from functools import partial

import numpy as np
import scipy.stats as ss

import elfi
from elfi.examples.gnk import euclidean_multidim


def Gauss(mu, sigma, n_obs=50, batch_size=1, random_state=None):
"""Sample the Gaussian distribution.
def gauss(mu, sigma, n_obs=50, batch_size=1, random_state=None):
"""Sample the 1-D Gaussian distribution.
Parameters
----------
mu : float, array_like
sigma : float, array_like
n_obs : int, optional
batch_size : int, optional
random_state : RandomState, optional
random_state : np.random.RandomState, optional
Returns
-------
y_obs : array_like
1-D observation.
"""
# Handling batching.
batches_mu = np.asanyarray(mu).reshape((-1, 1))
batches_sigma = np.asanyarray(sigma).reshape((-1, 1))

# Sampling observations.
y_obs = ss.norm.rvs(loc=batches_mu, scale=batches_sigma,
size=(batch_size, n_obs), random_state=random_state)
return y_obs


def gauss_nd_mean(*mu, cov_matrix, n_obs=15, batch_size=1, random_state=None):
"""Sample an n-D Gaussian distribution.
Parameters
----------
*mu : array_like
Mean parameters.
cov_matrix : array_like
Covariance matrix.
n_obs : int, optional
batch_size : int, optional
random_state : np.random.RandomState, optional
Returns
-------
y_obs : array_like
n-D observation.
"""
# Standardising the parameter's format.
mu = np.asanyarray(mu).reshape((-1, 1))
sigma = np.asanyarray(sigma).reshape((-1, 1))
y = ss.norm.rvs(loc=mu, scale=sigma, size=(batch_size, n_obs), random_state=random_state)
return y
n_dim = len(mu)

# Handling batching.
batches_mu = np.zeros(shape=(batch_size, n_dim))
for idx_dim, param_mu in enumerate(mu):
batches_mu[:, idx_dim] = param_mu

# Sampling the observations.
y_obs = np.zeros(shape=(batch_size, n_obs, n_dim))
for idx_batch in range(batch_size):
y_batch = ss.multivariate_normal.rvs(mean=batches_mu[idx_batch],
cov=cov_matrix,
size=n_obs,
random_state=random_state)
if n_dim == 1:
y_batch = y_batch[:, np.newaxis]
y_obs[idx_batch, :, :] = y_batch
return y_obs


def ss_mean(x):
Expand All @@ -39,36 +87,71 @@ def ss_var(x):
return ss


def get_model(n_obs=50, true_params=None, seed_obs=None):
"""Return a complete Gaussian noise model.
def get_model(n_obs=50, true_params=None, seed_obs=None, nd_mean=False, cov_matrix=None):
"""Return a Gaussian noise model.
Parameters
----------
n_obs : int, optional
the number of observations
true_params : list, optional
true_params[0] corresponds to the mean,
true_params[1] corresponds to the standard deviation
Default parameter settings.
seed_obs : int, optional
seed for the observed data generation
Seed for the observed data generation.
nd_mean : bool, optional
Option to use an n-D mean Gaussian noise model.
cov_matrix : None, optional
Covariance matrix, a requirement for the nd_mean model.
Returns
-------
m : elfi.ElfiModel
"""
# Defining the default settings.
if true_params is None:
true_params = [10, 2]
if nd_mean:
true_params = [4, 4] # 2-D mean.
else:
true_params = [4, .4] # mean and standard deviation.

y_obs = Gauss(*true_params, n_obs=n_obs, random_state=np.random.RandomState(seed_obs))
sim_fn = partial(Gauss, n_obs=n_obs)
# Choosing the simulator for both observations and simulations.
if nd_mean:
sim_fn = partial(gauss_nd_mean, cov_matrix=cov_matrix, n_obs=n_obs)
else:
sim_fn = partial(gauss, n_obs=n_obs)

# Obtaining the observations.
y_obs = sim_fn(*true_params, n_obs=n_obs, random_state=np.random.RandomState(seed_obs))

m = elfi.ElfiModel()
elfi.Prior('uniform', -10, 50, model=m, name='mu')
elfi.Prior('truncnorm', 0.01, 5, model=m, name='sigma')
elfi.Simulator(sim_fn, m['mu'], m['sigma'], observed=y_obs, name='Gauss')
elfi.Summary(ss_mean, m['Gauss'], name='S1')
elfi.Summary(ss_var, m['Gauss'], name='S2')
elfi.Distance('euclidean', m['S1'], m['S2'], name='d')

# Initialising the priors.
eps_prior = 5 # The longest distance from the median of an initialised prior's distribution.
priors = []
if nd_mean:
n_dim = len(true_params)
for i in range(n_dim):
name_prior = 'mu_{}'.format(i)
prior_mu = elfi.Prior('uniform', true_params[i] - eps_prior,
2 * eps_prior, model=m, name=name_prior)
priors.append(prior_mu)
else:
priors.append(elfi.Prior('uniform', true_params[0] - eps_prior,
2 * eps_prior, model=m, name='mu'))
priors.append(elfi.Prior('truncnorm',
np.amax([.01, true_params[1] - eps_prior]),
2 * eps_prior, model=m, name='sigma'))
elfi.Simulator(sim_fn, *priors, observed=y_obs, name='gauss')

# Initialising the summary statistics.
sumstats = []
sumstats.append(elfi.Summary(ss_mean, m['gauss'], name='ss_mean'))
sumstats.append(elfi.Summary(ss_var, m['gauss'], name='ss_var'))

# Choosing the discrepancy metric.
if nd_mean:
elfi.Discrepancy(euclidean_multidim, *sumstats, name='d')
else:
elfi.Distance('euclidean', *sumstats, name='d')

return m
17 changes: 9 additions & 8 deletions elfi/examples/gnk.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,11 +134,11 @@ def euclidean_multidim(*simulated, observed):
array_like
"""
pts_sim = np.column_stack(simulated)
pts_obs = np.column_stack(observed)
d_multidim = np.sum((pts_sim - pts_obs)**2., axis=1)
d_squared = np.sum(d_multidim, axis=1)
d = np.sqrt(d_squared)
pts_sim = np.stack(simulated, axis=1)
pts_obs = np.stack(observed, axis=1)
d_ss_merged = np.sum((pts_sim - pts_obs)**2., axis=1)
d_dim_merged = np.sum(d_ss_merged, axis=1)
d = np.sqrt(d_dim_merged)

return d

Expand Down Expand Up @@ -185,8 +185,8 @@ def ss_robust(y):
ss_g = _get_ss_g(y)
ss_k = _get_ss_k(y)

ss_robust = np.stack((ss_a, ss_b, ss_g, ss_k), axis=1)

# Combining the summary statistics by expanding the dimensionality.
ss_robust = np.hstack((ss_a, ss_b, ss_g, ss_k))
return ss_robust


Expand All @@ -209,7 +209,8 @@ def ss_octile(y):
octiles = np.linspace(12.5, 87.5, 7)
E1, E2, E3, E4, E5, E6, E7 = np.percentile(y, octiles, axis=1)

ss_octile = np.stack((E1, E2, E3, E4, E5, E6, E7), axis=1)
# Combining the summary statistics by expanding the dimensionality.
ss_octile = np.hstack((E1, E2, E3, E4, E5, E6, E7))

return ss_octile

Expand Down
57 changes: 36 additions & 21 deletions elfi/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,27 +99,42 @@ def get_execution_order(cls, G):
nodes that require execution
"""
nodes = set()
order = nx_constant_topological_sort(G)
dep_graph = nx.DiGraph(G.edges())

for node in order:
attr = G.node[node]
if attr.keys() >= {'operation', 'output'}:
raise ValueError('Generative graph has both op and output present')

# Remove nodes from dependency graph whose outputs are present
if 'output' in attr:
dep_graph.remove_node(node)
elif 'operation' not in attr:
raise ValueError('Generative graph has no op or output present')

for output_node in G.graph['outputs']:
if dep_graph.has_node(output_node):
nodes.update([output_node])
nodes.update(nx.ancestors(dep_graph, output_node))

return [n for n in order if n in nodes]
# Get the cache dict if it exists
cache = G.graph.get('_executor_cache', {})

output_nodes = G.graph['outputs']
# Filter those output nodes who have an operation to run
needed = tuple(sorted(node for node in output_nodes if 'operation' in G.node[node]))

if needed not in cache:
# Resolve the nodes that need to be executed in the graph
nodes_to_execute = set(needed)

if 'sort_order' not in cache:
cache['sort_order'] = nx_constant_topological_sort(G)
sort_order = cache['sort_order']

# Resolve the dependencies of needed
dep_graph = nx.DiGraph(G.edges())
for node in sort_order:
attr = G.node[node]
if attr.keys() >= {'operation', 'output'}:
raise ValueError('Generative graph has both op and output present')

# Remove those nodes from the dependency graph whose outputs are present
if 'output' in attr:
dep_graph.remove_node(node)
elif 'operation' not in attr:
raise ValueError('Generative graph has no op or output present')

# Add the dependencies of the needed nodes
for needed_node in needed:
nodes_to_execute.update(nx.ancestors(dep_graph, needed_node))

# Turn in to a sorted list and cache
cache[needed] = [n for n in sort_order if n in nodes_to_execute]

return cache[needed]

@staticmethod
def _run(fn, node, G):
Expand Down
5 changes: 2 additions & 3 deletions elfi/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,9 +164,8 @@ def load(cls, context, compiled_net, batch_index):
elif isinstance(seed, (int, np.int32, np.uint32)):
# TODO: In the future, we could use https://pypi.python.org/pypi/randomstate to enable
# jumps?
sub_seed, context.sub_seed_cache = get_sub_seed(seed,
batch_index,
cache=context.sub_seed_cache)
cache = context.caches.get('sub_seed', None)
sub_seed = get_sub_seed(seed, batch_index, cache=cache)
random_state = np.random.RandomState(sub_seed)
else:
raise ValueError("Seed of type {} is not supported".format(seed))
Expand Down
4 changes: 2 additions & 2 deletions elfi/methods/post_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,8 +242,8 @@ def adjust_posterior(sample, model, summary_names, parameter_names=None, adjustm
>>> import elfi
>>> from elfi.examples import gauss
>>> m = gauss.get_model()
>>> res = elfi.Rejection(m['d'], output_names=['S1', 'S2']).sample(1000)
>>> adj = adjust_posterior(res, m, ['S1', 'S2'], ['mu'], LinearAdjustment())
>>> res = elfi.Rejection(m['d'], output_names=['ss_mean', 'ss_var']).sample(1000)
>>> adj = adjust_posterior(res, m, ['ss_mean', 'ss_var'], ['mu'], LinearAdjustment())
"""
adjustment = _get_adjustment(adjustment)
Expand Down
6 changes: 4 additions & 2 deletions elfi/model/elfi_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def random_name(length=4, prefix=''):
return prefix + str(uuid.uuid4().hex[0:length])


# TODO: move to another file
# TODO: move to another file?
class ComputationContext:
"""Container object for key components for consistent computation results.
Expand Down Expand Up @@ -167,9 +167,11 @@ def __init__(self, batch_size=None, seed=None, pool=None):

self._batch_size = batch_size or 1
self._seed = random_seed() if seed is None else seed
self.sub_seed_cache = {}
self._pool = pool

# Caches will not be used if they are not found from the caches dict
self.caches = {'executor': {}, 'sub_seed': {}}

# Count the number of submissions from this context
self.num_submissions = 0

Expand Down
Loading

0 comments on commit d13da82

Please sign in to comment.