First import all the modules such as healpy and astropy needed for analyzing the structure
In [1]:
import healpix_util as hu
import astropy as ap
import numpy as np
from astropy.io import fits
from astropy.table import Table
import astropy.io.ascii as ascii
from astropy.io import fits
from astropy.constants import c
import matplotlib.pyplot as plt
import math as m
from math import pi
#from scipy.constants import c
import scipy.special as sp
from astroML.decorators import pickle_results
from scipy import integrate
import warnings
from sklearn.neighbors import BallTree
import pickle
import multiprocessing as mp
import time
from lcdmmetric import *
from progressbar import *
from tqdm import *
from functools import partial
import pymangle
from scipy.optimize import curve_fit
#from astroML.datasets import fetch_sdss_specgals
#from astroML.correlation import bootstrap_two_point_angular
%matplotlib inline
In [4]:
dr12q=fits.open("./input/DR12Q.fits")
In [5]:
dr12q
Out[5]:
In [6]:
dr12qdat=dr12q[1].data
In [7]:
dr12qdat.columns
Out[7]:
In [8]:
z=dr12qdat['Z_PIPE']
In [9]:
ra=dr12qdat['RA']
In [10]:
dec=dr12qdat['DEC']
In [ ]:
fdata = open("./output/DR12Q.dat",'w')
fdata.write("z\t ra\t dec \n")
for i in range(0,len(ra)-1):
fdata.write("%f\t" %z[i])
fdata.write("%f\t" %ra[i])
fdata.write("%f\n" %dec[i])
fdata.close()
In [ ]:
z
In [ ]:
ra,dec
In [ ]:
dat=np.array([z,ra,dec])
In [ ]:
dat
In [ ]:
dat=dat.transpose()
In [ ]:
dat
Read the data file (taken from http://cosmo.nyu.edu/~eak306/SDSS-LRG.html ) converted to ascii with comoving distance etc. in V01 reading from pkl files for faster read
In [ ]:
# Saving the objects:
with open('datDR12Q.pkl', 'w') as f: # Python 3: open(..., 'wb')
pickle.dump(dat, f)
In [2]:
# Getting back the objects:
with open('datDR12Q.pkl') as f: # Python 3: open(..., 'rb')
dat = pickle.load(f)
dat
Out[2]:
In [ ]:
In [3]:
Ez = lambda x: 1/m.sqrt(0.3*(1+x)**3+0.7)
np.vectorize(Ez)
#Calculate comoving distance of a data point using the Redshift - This definition is based on the cosmology model we take. Here the distance for E-dS universe is considered. Also note that c/H0 ratio is cancelled in the equations and hence not taken.
def DC_LCDM(z):
return integrate.quad(Ez, 0, z)[0]
DC_LCDM=np.vectorize(DC_LCDM)
In [ ]:
In [11]:
dr12f = open("./output/DR12Qsrarf.dat",'w')
dr12f.write("z\t ra\t dec\t s\t rar\t decr \n")
for i in range(0,len(z)):
dr12f.write("%f\t " %z[i])
dr12f.write("%f\t %f\t " %(ra[i],dec[i]))
dr12f.write("%f\t " %DC_LCDM(z[i]))
dr12f.write("%f\t %f\n " %(ra[i]*pi/180.0,dec[i]*pi/180.0))
dr12f.close()
In [12]:
dr12Qdat=ascii.read("./output/DR12Qsrarf.dat")
In [13]:
s=dr12Qdat['s']
rar=dr12Qdat['ra']
decr=dr12Qdat['dec']
In [15]:
dat=np.array([s,rar,decr])
In [16]:
dat
Out[16]:
In [17]:
dat=dat.transpose()
In [18]:
dat
Out[18]:
Read the data file (taken from http://cosmo.nyu.edu/~eak306/SDSS-LRG.html ) converted to ascii with comoving distance etc. in V01 reading from pkl files for faster read
In [19]:
# Saving the objects:
with open('datDR12Q.pkl', 'w') as f: # Python 3: open(..., 'wb')
pickle.dump(dat, f)
In [20]:
# Getting back the objects:
with open('datDR12Q.pkl') as f: # Python 3: open(..., 'rb')
dat = pickle.load(f)
dat
Out[20]:
In [21]:
bins=np.arange(0.,0.08,0.005)
In [22]:
print bins
In [23]:
binsq=bins**2
In [24]:
binsq
Out[24]:
In [ ]:
len(dat)
In [27]:
LCDMmetricsq(dat[0],dat[1])
Out[27]:
In [28]:
%%time
BTD = BallTree(dat,metric='pyfunc',func=LCDMmetricsq,leaf_size=5)
with open('BTDDR12QLCDM.pkl', 'w') as f:
pickle.dump(BTD,f)
In [29]:
with open('BTDDR12QLCDM.pkl') as f:
BTD = pickle.load(f)
BTD
Out[29]:
In [30]:
%%time
start_time=time.time()
counts_DD=BTD.two_point_correlation(dat,binsq)
print counts_DD
end_time=time.time()
tottime=end_time-start_time
print "Total run time:"
print tottime
with open('BTD12QcDDLCDM.pkl', 'w') as f:
pickle.dump(counts_DD,f)
In [31]:
with open('BTD12QcDDLCDM.pkl') as f:
counts_DD = pickle.load(f)
counts_DD
Out[31]:
In [32]:
DD=np.diff(counts_DD)
In [33]:
DD
Out[33]:
In [34]:
plt.plot(bins[1:len(bins)],DD,'ro-')
Out[34]:
BallTree.two_point_correlation works almost 10 times faster! with leaf_size=5 Going with it to the random catalog
In [35]:
dataR=ascii.read("./output/rand200kDR12Q.dat")
In [36]:
dataR
Out[36]:
In [37]:
len(dataR)
Out[37]:
In [38]:
len(dat)
Out[38]:
In [39]:
rdr12f = open("./output/DR12Qsrarf.dat",'w')
rdr12f.write("z\t ra\t dec\t s\t rar\t decr \n")
for i in range(0,len(dataR)):
rdr12f.write("%f\t " %dataR['z'][i])
rdr12f.write("%f\t %f\t " %(dataR['ra'][i],dataR['dec'][i]))
rdr12f.write("%f\t " %DC_LCDM(dataR['z'][i]))
rdr12f.write("%f\t %f\n " %((dataR['ra'][i]*pi)/180.0,(dataR['dec'][i]*pi)/180.0))
rdr12f.close()
In [40]:
datR=ascii.read("./output/DR12Qsrarf.dat")
In [41]:
datR
Out[41]:
In [42]:
rs=np.array(datR['s'])
rrar=np.array(datR['rar'])
rdecr=np.array(datR['decr'])
In [43]:
datR=np.array([rs,rrar,rdecr])
In [44]:
datR
Out[44]:
In [45]:
datR.reshape(3,len(dataR))
Out[45]:
In [46]:
datR=datR.transpose()
In [47]:
datR
Out[47]:
In [48]:
# Saving the objects:
with open('./output/rDR12Qsrarf.pkl', 'w') as f: # Python 3: open(..., 'wb')
pickle.dump(datR, f)
In [49]:
# Getting back the objects:
with open('./output/rDR12Qsrarf.pkl') as f: # Python 3: open(..., 'rb')
datR = pickle.load(f)
datR
Out[49]:
In [50]:
%%time
BT_R2 = BallTree(datR,metric='pyfunc',func=LCDMmetricsq,leaf_size=5)
with open('./output/BTRDR12QLCDM.pkl', 'w') as f:
pickle.dump(BT_R2,f)
In [51]:
with open('./output/BTRDR12QLCDM.pkl') as f:
BTR = pickle.load(f)
BTR
Out[51]:
In [52]:
%%time
start_time=time.time()
counts_RR=BTR.two_point_correlation(datR,bins)
print counts_RR
end_time=time.time()
tottime=end_time-start_time
print "Total run time:"
print tottime
with open('./output/BTRDR12QcRRLCDM.pkl', 'w') as f:
pickle.dump(counts_RR,f)
In [53]:
with open('./output/BTRDR12QcRRLCDM.pkl') as f:
counts_RR = pickle.load(f)
counts_RR
Out[53]:
In [54]:
counts_RR
Out[54]:
In [55]:
RR=np.diff(counts_RR)
In [56]:
RR
Out[56]:
In [57]:
plt.plot(bins[1:len(bins)],RR,'bo-')
Out[57]:
In [58]:
RR_zero = (RR == 0)
RR[RR_zero] = 1
In [59]:
%%time
start_time=time.time()
counts_DR=BTR.two_point_correlation(dat,bins)
print counts_DR
end_time=time.time()
tottime=end_time-start_time
print "Total run time:"
print tottime
with open('./output/BTRDR12QcDRLCDM.pkl', 'w') as f:
pickle.dump(counts_DR,f)
In [60]:
with open('./output/BTRDR12QcDRLCDM.pkl') as f:
counts_DR = pickle.load(f)
counts_DR
Out[60]:
In [61]:
DR=np.diff(counts_DR)
In [62]:
corrells=(4.0 * DD - 4.0 * DR + RR) / RR
In [63]:
DR
Out[63]:
In [64]:
corrells
Out[64]:
In [65]:
plt.plot(bins[1:len(bins)],corrells,'go-')
Out[65]:
In [66]:
plt.plot(bins[1:len(bins)],bins[1:len(bins)]*bins[1:len(bins)]*corrells*(c*1e-5)**2,'go-')
Out[66]:
In [67]:
plt.plot(bins[2:len(bins)],bins[2:len(bins)]*bins[2:len(bins)]*corrells[1:len(bins)]*(c*1e-5)**2,'go-')
Out[67]:
In [68]:
plt.plot(bins[2:len(bins)],corrells[1:len(bins)],'go-')
Out[68]:
In [69]:
plt.plot(bins[2:len(bins)],corrells[1:len(bins)],'go-')
plt.savefig("correlDR12Qls.pdf")
In [70]:
plt.plot(bins[2:len(bins)]*c/1e5,corrells[1:len(bins)],'bo-')
plt.savefig("correl12Q1ls.pdf")
In [71]:
plt.yscale('log')
plt.plot(bins[1:len(bins)]*c/1e5,corrells,'bo-')
plt.savefig("correllsfiglogDR12Q.pdf")
In [72]:
plt.yscale('log')
plt.plot(bins[2:len(bins)]*c/1e5,corrells[1:len(bins)],'ro-')
plt.savefig("correllslog2xDR12Q.pdf")
In [73]:
plt.yscale('log')
plt.xscale('log')
plt.plot(bins[1:len(bins)]*c/1e5,corrells,'bo-')
plt.savefig("correllsloglogDR12Q.pdf")
In [ ]: