In [1]:
import h5py
from pearce.mocks.kittens import TrainingBox
In [2]:
fname = '/u/ki/swmclau2/des/wp_zheng07/PearceWpCosmo.hdf5'
In [3]:
with h5py.File(fname, 'r') as f:
hod_pnames= f.attrs['hod_param_names']
hod_pvals = f.attrs['hod_param_vals']
In [4]:
cat = TrainingBox(0)
cat.load(1.0, HOD='zheng07')
In [18]:
param_dict = dict(zip(hod_pnames, hod_pvals[1]))
In [29]:
from scipy.optimize import minimize_scalar
def _add_logMmin(hod_params, cat, nd = 5e-4):
"""
In the fixed number density case, find the logMmin value that will match the nd given hod_params
:param: hod_params:
The other parameters besides logMmin
:param cat:
the catalog in question
:return:
None. hod_params will have logMmin added to it.
"""
hod_params['logMmin'] = 13.0 #initial guess
#cat.populate(hod_params) #may be overkill, but will ensure params are written everywhere
def func(logMmin, hod_params):
print logMmin
hod_params.update({'logMmin':logMmin})
return (cat.calc_analytic_nd(hod_params, min_ptcl=100) - nd)**2
res = minimize_scalar(func, bounds = (12, 16), args = (hod_params,), options = {'maxiter':100}, method = 'Bounded')
In [30]:
_add_logMmin(param_dict, cat)
In [31]:
param_dict
Out[31]:
In [32]:
cat.populate(param_dict, min_ptcl=100)
In [33]:
len(cat.model.mock.galaxy_table)
Out[33]:
In [34]:
5e-4*(1000**3)
Out[34]:
In [39]:
cat.calc_analytic_nd(param_dict, min_ptcl=100)
Out[39]:
In [ ]: