Stationary GMRF simulation with Discrete Fourier Transformation


In [9]:
# Load Biospytial modules and etc.
%matplotlib inline
import sys
sys.path.append('/apps')
sys.path.append('..')
#sys.path.append('../../spystats')
import django
django.setup()
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
## Use the ggplot style
plt.style.use('ggplot')

from external_plugins.spystats.spystats import tools as sptools
import scipy

For benchmarking we will perfom a GF simulation.

Based on non-conditional simulation.


In [10]:
vm = sptools.ExponentialVariogram(sill=0.3,range_a=0.4)

In [388]:
%time xx,yy,z = sptools.simulatedGaussianFieldAsPcolorMesh(vm)


INFO:external_plugins.spystats.spystats.tools:Calculating Sigma (CovMat)
CPU times: user 19.8 s, sys: 696 ms, total: 20.4 s
Wall time: 6.71 s

In [389]:
plt.imshow(z)


Out[389]:
<matplotlib.image.AxesImage at 0x7ff5dec64990>

In [13]:
x = xx[1,:]
y = yy[:,1]

Simulation of a temporal GMRF with DFT


In [372]:
import scipy.fftpack as fft
c_delta = lambda d : np.hstack(((2 + d),-1,np.zeros(128 - 3),-1))
c_base = c_delta(0.1)
c_base


Out[372]:
array([ 2.1, -1. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,  0. ,
        0. ,  0. ,  0. ,  0. ,  0. ,  0. , -1. ])

In [373]:
C = scipy.linalg.circulant(c_base)
n = C.shape[1]
C_inv = np.linalg.inv(C)
plt.imshow(C_inv, interpolation = 'None')
C_inv.max()


Out[373]:
1.5617376188860608

In [374]:
plop = fft.ifft(fft.fft(c_base) ** -1)
#plop = fft.rifft(fft.rfft(c_base) ** -1) / n
plop.real.max()


Out[374]:
1.5617376188860603

In [321]:
C_inv2 = scipy.linalg.circulant(plop.real)
plt.imshow(C_inv2, interpolation = 'None')


Out[321]:
<matplotlib.image.AxesImage at 0x7ff5e49a5710>

In [375]:
C_chol = np.linalg.cholesky(C_inv)
z = np.random.normal(0, 1, [n])
mmm = np.matmul(C_chol, z)
plt.plot(mmm);



In [378]:
lambda_vec = fft.fft(c_base)
Lambda_aux = np.power(lambda_vec, -0.5)
z_re = np.random.normal(0, 1, [n])
z_im = np.random.normal(0, 1, [n])
z = z_re + 1j * z_im
# z = np.random.normal(0, 1, [n])
x = fft.fft(Lambda_aux.real * z).real / np.sqrt(n)
plt.plot(x);


Now let's build the circulant matrix for the tourus

Oke, for the moment I´ll follow the example in GMRF book. i.e. a Torus (stationary of 128x128)


In [396]:
#c_delta = lambda d : np.hstack(((4 + d),-1,np.zeros(128 - 3),-1))
#c_delta = lambda d : np.hstack(((0),-1,np.zeros(128 - 3),-1))
n = 64
N = 64
delta = 0.1
c_base = np.zeros([n, N])
c_base[0, 0] = 4 + delta
c_base[0, 1] = -1
c_base[0, 2] = -1

c_base[1, 0] = -1
c_base[2, 0] = -1

c_base[0, N-1] = -1
c_base[0, N-2] = -1

c_base[N-1, 0] = -1
c_base[N-2, 0] = -1

c_base


Out[396]:
array([[ 4.1, -1. , -1. , ...,  0. , -1. , -1. ],
       [-1. ,  0. ,  0. , ...,  0. ,  0. ,  0. ],
       [-1. ,  0. ,  0. , ...,  0. ,  0. ,  0. ],
       ...,
       [ 0. ,  0. ,  0. , ...,  0. ,  0. ,  0. ],
       [-1. ,  0. ,  0. , ...,  0. ,  0. ,  0. ],
       [-1. ,  0. ,  0. , ...,  0. ,  0. ,  0. ]])

In [397]:
%%time 
lambda_mat = fft.fft2(c_base)
z_re = np.random.normal(0, 1, [n, N])
z_im = np.random.normal(0, 1, [n, N])
z = z_re + 1j * z_im
x = fft.fft2((lambda_mat ** -0.5) * z).real / np.sqrt(n *N)
plt.imshow(x, interpolation = 'None')


CPU times: user 164 ms, sys: 0 ns, total: 164 ms
Wall time: 163 ms

In [423]:
%%time

n = 64
N = 64
delta = 0.0001
c_base = np.zeros([n, N])
c_base[0, 0] = 4 + delta
c_base[0, 1] = -1
#c_base[0, 2] = -1

c_base[1, 0] = -1
#c_base[2, 0] = -1

c_base[0, N-1] = -1
#c_base[0, N-2] = -1

c_base[N-1, 0] = -1
#c_base[N-2, 0] = -1

 
lambda_mat = fft.fft2(c_base)
z_re = np.random.normal(0, 1, [n, N])
z_im = np.random.normal(0, 1, [n, N])
z = z_re + 1j * z_im
x = fft.fft2((lambda_mat ** -0.5) * z).real / np.sqrt(n *N)
plt.imshow(x, interpolation = 'none')


CPU times: user 152 ms, sys: 16 ms, total: 168 ms
Wall time: 164 ms

Algorithm to simulate GMRF with block-circulant


In [69]:
## Simulate random noise (Normal distributed)
zr = scipy.stats.norm.rvs(size=(C.size,2),loc=0,scale=1)
zr.dtype=np.complex_
#plt.hist(zr.real)

In [70]:
from scipy.fftpack import ifft2, fft2

In [71]:
Lm = scipy.sqrt(C.shape[0]*C.shape[0]) * fft2(C)

In [72]:
Lm.shape


Out[72]:
(10000, 10000)

In [73]:
zr.shape


Out[73]:
(100000000, 1)

In [74]:
Lm.size


Out[74]:
100000000

In [75]:
%time v = fft2(scipy.sqrt(Lm) * zr.reshape(Lm.shape))


CPU times: user 15.1 s, sys: 2.74 s, total: 17.9 s
Wall time: 17.9 s

In [76]:
x = v.real

In [77]:
x.shape


Out[77]:
(10000, 10000)

In [78]:
plt.imshow(x,interpolation='None')


Out[78]:
<matplotlib.image.AxesImage at 0x7f5f49d9ff90>

In [79]:
cc = scipy.linalg.inv(C)

In [84]:
plt.plot(cc[:,0])


Out[84]:
[<matplotlib.lines.Line2D at 0x7f5f52a5e150>]

In [85]:
n = x.shape[0]
mm = scipy.stats.multivariate_normal(np.zeros(n),cc)

In [86]:
mmm = mm.rvs()

In [87]:
plt.imshow(mmm.reshape(100,100))


Out[87]:
<matplotlib.image.AxesImage at 0x7f5f529e1790>

In [64]:
scipy.stats.multivariate_normal?


Signature:       scipy.stats.multivariate_normal(self, mean=None, cov=1, allow_singular=False, seed=None)
Call signature:  scipy.stats.multivariate_normal(mean=None, cov=1, allow_singular=False, seed=None)
Type:            multivariate_normal_gen
String form:     <scipy.stats._multivariate.multivariate_normal_gen object at 0x7f5f5d6b2a90>
File:            /opt/conda/envs/biospytial/lib/python2.7/site-packages/scipy/stats/_multivariate.py
Docstring:      
A multivariate normal random variable.

The `mean` keyword specifies the mean. The `cov` keyword specifies the
covariance matrix.

Methods
-------
``pdf(x, mean=None, cov=1, allow_singular=False)``
    Probability density function.
``logpdf(x, mean=None, cov=1, allow_singular=False)``
    Log of the probability density function.
``cdf(x, mean=None, cov=1, allow_singular=False, maxpts=1000000*dim, abseps=1e-5, releps=1e-5)``
    Cumulative distribution function.
``logcdf(x, mean=None, cov=1, allow_singular=False, maxpts=1000000*dim, abseps=1e-5, releps=1e-5)``
    Log of the cumulative distribution function.
``rvs(mean=None, cov=1, size=1, random_state=None)``
    Draw random samples from a multivariate normal distribution.
``entropy()``
    Compute the differential entropy of the multivariate normal.

Parameters
----------
x : array_like
    Quantiles, with the last axis of `x` denoting the components.
mean : array_like, optional
    Mean of the distribution (default zero)
cov : array_like, optional
    Covariance matrix of the distribution (default one)
allow_singular : bool, optional
    Whether to allow a singular covariance matrix.  (Default: False)
random_state : None or int or np.random.RandomState instance, optional
    If int or RandomState, use it for drawing the random variates.
    If None (or np.random), the global np.random state is used.
    Default is None.

Alternatively, the object may be called (as a function) to fix the mean
and covariance parameters, returning a "frozen" multivariate normal
random variable:

rv = multivariate_normal(mean=None, cov=1, allow_singular=False)
    - Frozen object with the same methods but holding the given
      mean and covariance fixed.

Notes
-----
Setting the parameter `mean` to `None` is equivalent to having `mean`
    be the zero-vector. The parameter `cov` can be a scalar, in which case
    the covariance matrix is the identity times that value, a vector of
    diagonal entries for the covariance matrix, or a two-dimensional
    array_like.
    

The covariance matrix `cov` must be a (symmetric) positive
semi-definite matrix. The determinant and inverse of `cov` are computed
as the pseudo-determinant and pseudo-inverse, respectively, so
that `cov` does not need to have full rank.

The probability density function for `multivariate_normal` is

.. math::

    f(x) = \frac{1}{\sqrt{(2 \pi)^k \det \Sigma}}
           \exp\left( -\frac{1}{2} (x - \mu)^T \Sigma^{-1} (x - \mu) \right),

where :math:`\mu` is the mean, :math:`\Sigma` the covariance matrix,
and :math:`k` is the dimension of the space where :math:`x` takes values.

.. versionadded:: 0.14.0

Examples
--------
>>> import matplotlib.pyplot as plt
>>> from scipy.stats import multivariate_normal

>>> x = np.linspace(0, 5, 10, endpoint=False)
>>> y = multivariate_normal.pdf(x, mean=2.5, cov=0.5); y
array([ 0.00108914,  0.01033349,  0.05946514,  0.20755375,  0.43939129,
        0.56418958,  0.43939129,  0.20755375,  0.05946514,  0.01033349])
>>> fig1 = plt.figure()
>>> ax = fig1.add_subplot(111)
>>> ax.plot(x, y)

The input quantiles can be any shape of array, as long as the last
axis labels the components.  This allows us for instance to
display the frozen pdf for a non-isotropic random variable in 2D as
follows:

>>> x, y = np.mgrid[-1:1:.01, -1:1:.01]
>>> pos = np.dstack((x, y))
>>> rv = multivariate_normal([0.5, -0.2], [[2.0, 0.3], [0.3, 0.5]])
>>> fig2 = plt.figure()
>>> ax2 = fig2.add_subplot(111)
>>> ax2.contourf(x, y, rv.pdf(pos))
Class docstring:
A multivariate normal random variable.

The `mean` keyword specifies the mean. The `cov` keyword specifies the
covariance matrix.

Methods
-------
``pdf(x, mean=None, cov=1, allow_singular=False)``
    Probability density function.
``logpdf(x, mean=None, cov=1, allow_singular=False)``
    Log of the probability density function.
``cdf(x, mean=None, cov=1, allow_singular=False, maxpts=1000000*dim, abseps=1e-5, releps=1e-5)``
    Cumulative distribution function.
``logcdf(x, mean=None, cov=1, allow_singular=False, maxpts=1000000*dim, abseps=1e-5, releps=1e-5)``
    Log of the cumulative distribution function.
``rvs(mean=None, cov=1, size=1, random_state=None)``
    Draw random samples from a multivariate normal distribution.
``entropy()``
    Compute the differential entropy of the multivariate normal.

Parameters
----------
x : array_like
    Quantiles, with the last axis of `x` denoting the components.
%(_mvn_doc_default_callparams)s
%(_doc_random_state)s

Alternatively, the object may be called (as a function) to fix the mean
and covariance parameters, returning a "frozen" multivariate normal
random variable:

rv = multivariate_normal(mean=None, cov=1, allow_singular=False)
    - Frozen object with the same methods but holding the given
      mean and covariance fixed.

Notes
-----
%(_mvn_doc_callparams_note)s

The covariance matrix `cov` must be a (symmetric) positive
semi-definite matrix. The determinant and inverse of `cov` are computed
as the pseudo-determinant and pseudo-inverse, respectively, so
that `cov` does not need to have full rank.

The probability density function for `multivariate_normal` is

.. math::

    f(x) = \frac{1}{\sqrt{(2 \pi)^k \det \Sigma}}
           \exp\left( -\frac{1}{2} (x - \mu)^T \Sigma^{-1} (x - \mu) \right),

where :math:`\mu` is the mean, :math:`\Sigma` the covariance matrix,
and :math:`k` is the dimension of the space where :math:`x` takes values.

.. versionadded:: 0.14.0

Examples
--------
>>> import matplotlib.pyplot as plt
>>> from scipy.stats import multivariate_normal

>>> x = np.linspace(0, 5, 10, endpoint=False)
>>> y = multivariate_normal.pdf(x, mean=2.5, cov=0.5); y
array([ 0.00108914,  0.01033349,  0.05946514,  0.20755375,  0.43939129,
        0.56418958,  0.43939129,  0.20755375,  0.05946514,  0.01033349])
>>> fig1 = plt.figure()
>>> ax = fig1.add_subplot(111)
>>> ax.plot(x, y)

The input quantiles can be any shape of array, as long as the last
axis labels the components.  This allows us for instance to
display the frozen pdf for a non-isotropic random variable in 2D as
follows:

>>> x, y = np.mgrid[-1:1:.01, -1:1:.01]
>>> pos = np.dstack((x, y))
>>> rv = multivariate_normal([0.5, -0.2], [[2.0, 0.3], [0.3, 0.5]])
>>> fig2 = plt.figure()
>>> ax2 = fig2.add_subplot(111)
>>> ax2.contourf(x, y, rv.pdf(pos))
Call docstring: 
Create a frozen multivariate normal distribution.

See `multivariate_normal_frozen` for more information.

In [510]:
nn = mm.rvs()

In [513]:



Out[513]:
256

Example to perform a FFT in two dimensions


In [8]:
from scipy.fftpack import ifftn
import matplotlib.pyplot as plt
import matplotlib.cm as cm
N = 30
f, ((ax1, ax2, ax3), (ax4, ax5, ax6)) = plt.subplots(2, 3, sharex='col', sharey='row')
xf = np.zeros((N,N))
xf[0, 5] = 1
xf[0, N-5] = 1
Z = ifftn(xf)
ax1.imshow(xf, cmap=cm.Reds)
ax4.imshow(np.real(Z), cmap=cm.gray)
xf = np.zeros((N, N))
xf[5, 0] = 1
xf[N-5, 0] = 1
Z = ifftn(xf)
ax2.imshow(xf, cmap=cm.Reds)
ax5.imshow(np.real(Z), cmap=cm.gray)
xf = np.zeros((N, N))
xf[5, 10] = 1
xf[N-5, N-10] = 1
Z = ifftn(xf)
ax3.imshow(xf, cmap=cm.Reds)
ax6.imshow(np.real(Z), cmap=cm.gray)
plt.show()



In [ ]: