Skip to content

Commit

Permalink
[MaxVar, Part 1] Added the general Gaussian noise model.
Browse files Browse the repository at this point in the history
  • Loading branch information
perdaug committed Sep 7, 2017
1 parent 54e62b2 commit 1d8900c
Show file tree
Hide file tree
Showing 7 changed files with 159 additions and 14 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ Changelog
- Renamed elfi.get_current_model to elfi.get_default_model
- Improved performance when rerunning inference using stored data
- Change SMC to use ModelPrior, use to immediately reject invalid proposals
- Added the general Gaussian noise example model (fixed covariance)

0.6.1 (2017-07-21)
------------------
Expand Down
2 changes: 1 addition & 1 deletion elfi/examples/bignk.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def BiGNK(a1, a2, b1, b2, g1, g2, k1, k2, rho, c=.8, n_obs=150, batch_size=1,
term_product_misaligned = np.swapaxes(term_product, 1, 0)
y_misaligned = np.add(a, term_product_misaligned)
y = np.swapaxes(y_misaligned, 1, 0)
# print(y.shape)

return y


Expand Down
130 changes: 130 additions & 0 deletions elfi/examples/gauss_nd.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
"""The general, n-dimensional Gaussian noise model."""

from functools import partial

import numpy as np
import scipy.stats as ss

import elfi
from elfi.examples.gnk import euclidean_multidim


def Gauss_nd(*params, cov_ii=None, cov_ij=None, n_obs=15, batch_size=1, random_state=None):
"""Sample the Gaussian distribution.
Reference
---------
The default settings replicate the experiment settings used in [1].
[1] arXiv:1704.00520 (Järvenpää et al., 2017).
Parameters
----------
*params : array_like
The array elements correspond to the mean parameters.
cov_ii : float, optional
The diagonal variance.
cov_ij : float, optional
The non-diagonal variance.
n_obs : int, optional
The number of observations.
batch_size : int, optional
The number of batches.
random_state : np.random.RandomState, optional
Returns
-------
y : array_like
"""
n_dim = len(params)
# Formatting the mean.
mu = np.zeros(shape=(batch_size, n_dim))
for idx_dim, param_mu in enumerate(params):
mu[:, idx_dim] = param_mu
# Formatting the diagonal covariance.
cov_ii = np.repeat(cov_ii, batch_size)
if batch_size == 1:
cov_ii = cov_ii[None]
# Formatting the non-diagonal covariance.
if n_dim != 1:
cov_ij = np.repeat(cov_ij, batch_size)
if batch_size == 1:
cov_ij = cov_ij[None]
# Creating the covariance matrix.
cov = np.zeros(shape=(batch_size, n_dim, n_dim))
for idx_batch in range(batch_size):
if n_dim != 1:
cov[idx_batch].fill(np.asscalar(cov_ij[idx_batch]))
np.fill_diagonal(cov[idx_batch], cov_ii[idx_batch])
# Sampling observations.
y = np.zeros(shape=(batch_size, n_obs, n_dim))
for idx_batch in range(batch_size):
y_batch = ss.multivariate_normal.rvs(mean=mu[idx_batch],
cov=cov[idx_batch],
size=n_obs,
random_state=random_state)
if n_dim == 1:
y_batch = y_batch[:, None]
y[idx_batch, :, :] = y_batch
return y


def ss_mean(x):
"""Return the summary statistic corresponding to the mean."""
ss = np.mean(x, axis=1)
return ss


def ss_var(x):
"""Return the summary statistic corresponding to the variance."""
ss = np.var(x, axis=1)
return ss


def get_model(true_params=None, cov_ii=1, cov_ij=.5, n_obs=15, seed_obs=None):
"""Return an initialised Gaussian noise model.
Parameters
----------
true_params : array_like
The array elements correspond to the mean parameters.
cov_ii : float, optional
The diagonal variance.
cov_ij : float, optional
The non-diagonal variance.
n_obs : int, optional
The number of observations.
random_state : np.random.RandomState, optional
Returns
-------
m : elfi.ElfiModel
"""
# The default settings use the 2-D Gaussian model.
if true_params is None:
true_params = [4, 4]
n_dim = len(true_params)
# Obtaining the observations.
y_obs = Gauss_nd(*true_params,
cov_ii=cov_ii,
cov_ij=cov_ij,
n_obs=n_obs,
random_state=np.random.RandomState(seed_obs))
sim_fn = partial(
Gauss_nd, cov_ii=cov_ii, cov_ij=cov_ij, n_obs=n_obs)
m = elfi.ElfiModel()
# Defining the priors.
priors = []
for i in range(n_dim):
name_prior = 'mu_{}'.format(i)
prior_mu = elfi.Prior('uniform', 0, 8, model=m, name=name_prior)
priors.append(prior_mu)

elfi.Simulator(sim_fn, *priors, observed=y_obs, name='gm')
elfi.Summary(ss_mean, m['gm'], name='ss_mean')
elfi.Summary(ss_var, m['gm'], name='ss_var')
elfi.Discrepancy(euclidean_multidim, m['ss_mean'], m['ss_var'], name='d')

return m
17 changes: 9 additions & 8 deletions elfi/examples/gnk.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,11 +134,11 @@ def euclidean_multidim(*simulated, observed):
array_like
"""
pts_sim = np.column_stack(simulated)
pts_obs = np.column_stack(observed)
d_multidim = np.sum((pts_sim - pts_obs)**2., axis=1)
d_squared = np.sum(d_multidim, axis=1)
d = np.sqrt(d_squared)
pts_sim = np.stack(simulated, axis=1)
pts_obs = np.stack(observed, axis=1)
d_ss_merged = np.sum((pts_sim - pts_obs)**2., axis=1)
d_dim_merged = np.sum(d_ss_merged, axis=1)
d = np.sqrt(d_dim_merged)

return d

Expand Down Expand Up @@ -185,8 +185,8 @@ def ss_robust(y):
ss_g = _get_ss_g(y)
ss_k = _get_ss_k(y)

ss_robust = np.stack((ss_a, ss_b, ss_g, ss_k), axis=1)

# Combining the summary statistics by expanding the dimensionality.
ss_robust = np.hstack((ss_a, ss_b, ss_g, ss_k))
return ss_robust


Expand All @@ -209,7 +209,8 @@ def ss_octile(y):
octiles = np.linspace(12.5, 87.5, 7)
E1, E2, E3, E4, E5, E6, E7 = np.percentile(y, octiles, axis=1)

ss_octile = np.stack((E1, E2, E3, E4, E5, E6, E7), axis=1)
# Combining the summary statistics by expanding the dimensionality.
ss_octile = np.hstack((E1, E2, E3, E4, E5, E6, E7))

return ss_octile

Expand Down
4 changes: 2 additions & 2 deletions elfi/methods/post_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,8 +204,8 @@ def _adjust(self, i, theta_i, regression_model):

def _input_variables(self, model, sample, summary_names):
"""Regress on the differences to the observed summaries."""
observed_summaries = np.stack([model[s].observed for s in summary_names], axis=1)
summaries = np.stack([sample.outputs[name] for name in summary_names], axis=1)
observed_summaries = np.stack([model[s].observed.ravel() for s in summary_names], axis=1)
summaries = np.stack([sample.outputs[name].ravel() for name in summary_names], axis=1)
return summaries - observed_summaries


Expand Down
1 change: 1 addition & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import elfi.clients.ipyparallel as eipp
import elfi.clients.multiprocessing as mp
import elfi.clients.native as native
import elfi.examples.gauss_nd
import elfi.examples.ma2

elfi.clients.native.set_as_default()
Expand Down
18 changes: 15 additions & 3 deletions tests/unit/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import pytest

import elfi
from elfi.examples import bdm, gauss, ricker, gnk, bignk
from elfi.examples import bdm, gauss, gauss_nd, ricker, gnk, bignk


def test_bdm():
Expand Down Expand Up @@ -41,12 +41,24 @@ def test_bdm():
if do_cleanup:
os.system('rm {}/bdm'.format(cpp_path))


def test_Gauss():
def test_gauss():
m = gauss.get_model()
rej = elfi.Rejection(m, m['d'], batch_size=10)
rej.sample(20)

def test_gauss_1d():
params_true = [4]
m = gauss_nd.get_model(true_params=params_true, cov_ii=1)
rej = elfi.Rejection(m, m['d'], batch_size=10)
rej.sample(20)


def test_gauss_2d():
params_true = [4, 4]
m = gauss_nd.get_model(true_params=params_true, cov_ii=1, cov_ij=.5)
rej = elfi.Rejection(m, m['d'], batch_size=10)
rej.sample(20)


def test_Ricker():
m = ricker.get_model()
Expand Down

0 comments on commit 1d8900c

Please sign in to comment.