Note: We now simulate many trials. Multiprocessing is used.
In [1]:
import time
import os
import shutil
from collections import namedtuple
import numpy as np
import h5py
import pickle
from multiprocessing import Pool
import pyximport; pyximport.install()
from Iterate import iterate
In [2]:
def init_grid(c):
'''Initialized the 2D grid with random values, with an empty border.'''
Z = np.random.randint(0, 2, (c.rows, c.cols))
Z[0, :] = 0
Z[-1, :] = 0
Z[:, 0] = 0
Z[:, -1] = 0
return Z
In [3]:
# Configure parameters
Const = namedtuple('c', ['rows', 'cols', 'n_iterations', 'n_trials', 'n_cores', 'rootdir'])
c = Const(rows=150, cols=150, n_iterations=600, n_trials=64, n_cores=4,
rootdir='./results/')
assert (c.n_trials % c.n_cores) == 0
# Create a fresh results directory
if os.path.exists(c.rootdir):
shutil.rmtree(c.rootdir)
os.makedirs(c.rootdir)
pickle.dump(c._asdict(), open('./results/c.p', 'wb'))
In [4]:
def start_trial(trialnum):
# Initialize grid
np.random.seed(trialnum)
Z = init_grid(c)
Z_history = np.empty((c.rows, c.cols, c.n_iterations), dtype=int)
Z_history[:, :, 0] = Z # Initial state
for i in range(c.n_iterations):
Z = iterate(Z, c)
Z_history[:, :, i] = Z
return Z_history
In [5]:
start = time.time()
p = Pool(c.n_cores)
all_trials = p.map(start_trial, range(c.n_trials))
print('Time elapsed: ', time.time() - start)
In [6]:
# Create new store
f = h5py.File('./results/results.hdf5', 'w')
dset = f.create_dataset("Results", (c.n_trials, c.rows, c.cols, c.n_iterations), dtype=int)
dset[...] = all_trials
f.close()
In [7]:
print(np.array(all_trials).shape)
print(np.array(all_trials).nbytes / 1e9, 'GB')