In [1]:
#
# LF test with noise
# C = S + N, where noise N = sigma^2 I, where I is the identity matrix
#
# We generate 20 values of parameters x for function logLF(x)
# We chose our parameters around the value of Boltzmann code generated C_3
# CAMB generated C_3 equals 5.88275e-10
#
# 1. Set all C_l = 0 except C_3  DONE
#
# 2. LF is based on a matrix C that only has a P_3 term. DONE
#
# 3. Add to that matrix a white noise term (sigma^2 on the diagonal). DONE
#
# 4. LF now has *exactly two* free parameters, C_3 and sigma^2. DONE
#
# 5. What is LF vs C_3 (at, perhaps, a couple values of sigma^2)? PROBLEM!
#
# 6. over-plot |a_3m|^2 values
#
# You could initialize that parameter to the empirical variance!
#

In [2]:
%matplotlib inline
import math
import matplotlib.pyplot as plt 
import numpy as np
import healpy as hp
import pyfits as pf
import astropy as ap
import os
from scipy.special import eval_legendre  ##special scipy function

In [3]:
#
# Review of Likelihood Formalism:
#
# -2 loglikelihood is
# -2 ln L \propto m^T C^-1 m + lnDet C
# where C = S + N
# We are working with noiseless maps, N = 0, so C = S
#
# In real space, 
# data: the temperature map, 
# parameters: theoretical CAMB generated C_l, C^theory_l 
#
# m = array of temperature pixels
# S = S_ij
# N = diagonal noise (but here it is noiseless! N=0)
#
# In spherical harmonic space
# data: the Healpix anafast outputarray of a_lm (or similarly, \hat{C}_l )
# parameters: the theoretical C_l
#
# m = a_lm coefficients
# S is diagonal (C_2, C_3, etc.)
# N is non-sparse matrix (but here it is noiseless! N=0)
#
#
# NOTE: The correct standard is to systematically remove the monopole, dipole terms l=0,l=1
#       Also, we use in the following lmax = 2*nside

In [4]:
cd ~/Desktop/CMBintheLikeHoodz/Likelihood_Comparison


/Users/evanbiederstedt/Desktop/CMBintheLikeHoodz/Likelihood_Comparison

In [5]:
camb1 = "camb_nside16_lmax32_alms.fits"
camb2 = "camb_nside16_lmax32_map.fits"
camb3 = "camb_nside16_lmax32_scalcls.fits"
planck1 = "100GHz_nside16_lmax32_cls.fits"
planck2 = "100GHz_nside16_lmax32_cmb_alm.fits"
planck3 = "100GHz_nside16_lmax32_sky_alm.fits"
planck4 = "100GHz_nside16_lmax32_skymap.fits"

nside = 16

In [6]:
npix = 12*(nside**2) #total number of pixels, npix
LMAX = ((2*nside)) #maximum l of the power spectrum C_l
heal_npix = hp.nside2npix(nside) # Healpix calculated npix

print "The total number of pixels is " + str(npix)
print "The maximum ell of the power spectrum C_l set to lmax = 2*nside " +str(LMAX)
print "Healpix tells me total number of pixels npix is equal to " + str(heal_npix)


The total number of pixels is 3072
The maximum ell of the power spectrum C_l set to lmax = 2*nside 32
Healpix tells me total number of pixels npix is equal to 3072

In [7]:
#
# Begin with a Munich Planck-simulated map, and CAMB Boltzmann-code generated C_l values
#

In [8]:
# Theoretical scalar C_l array, CAMB
#
# open a FITS file, theoretical C_l values generated by CAMB
# type()=pyfits.hdu.hdulist.HDUList
cl_open = pf.open(camb3)
# recall camb3 = "camb_nside16_lmax32_scalcls.fits"

In [9]:
theoryCls_arr1 = cl_open[1].data
print theoryCls_arr1[:10]
# Recall there are four columns: temp, E pol, B pol, grad-temp cross terms
# first two values are zero, i.e. monopole, dipole
# XXX.field() references columns by 0-index
# field(0) is temperature values
# all Cl scalar temp values put into ndarray
# type()=numpy.ndarray


[(0.0, 0.0, 0.0, 0.0) (0.0, 0.0, 0.0, 0.0)
 (1.2639207e-09, 5.5308927e-14, 0.0, 3.6016001e-12)
 (5.8827504e-10, 4.5345086e-14, 0.0, 2.2704995e-12)
 (3.2867314e-10, 3.0680604e-14, 0.0, 1.4030458e-12)
 (2.065753e-10, 1.8198442e-14, 0.0, 8.6680196e-13)
 (1.4100289e-10, 9.6222612e-15, 0.0, 5.4035639e-13)
 (1.0233801e-10, 4.5884262e-15, 0.0, 3.3960166e-13)
 (7.7812937e-11, 2.0448748e-15, 0.0, 2.1437516e-13)
 (6.1362422e-11, 9.4503657e-16, 0.0, 1.3715881e-13)]

In [10]:
cltemps = theoryCls_arr1.field(0) 
print cltemps
print "The length of the array of theoretical Cl's is " +str(len(cltemps))
print "The array contains [C_0, C_1, C_2,..., C_" +str(len(cltemps)-1) + "]"
#print type(cltemps)=numpy.ndarray


[  0.00000000e+00   0.00000000e+00   1.26392075e-09   5.88275040e-10
   3.28673144e-10   2.06575299e-10   1.41002890e-10   1.02338013e-10
   7.78129366e-11   6.13624221e-11   4.98225766e-11   4.14123076e-11
   3.50811741e-11   3.01814025e-11   2.63133803e-11   2.32104266e-11
   2.06850804e-11   1.86004823e-11   1.68565179e-11   1.53792031e-11
   1.41130128e-11   1.30162998e-11   1.20591913e-11   1.12186076e-11
   1.04760627e-11   9.81660760e-12   9.22804801e-12   8.70035207e-12
   8.22520958e-12   7.79568684e-12   7.40596039e-12   7.05096528e-12
   6.72585642e-12]
The length of the array of theoretical Cl's is 33
The array contains [C_0, C_1, C_2,..., C_32]

In [11]:
# remove monopole l=0 and dipole l=1
theoryCl = cltemps[2:]
# len(theoryCl) = 31
print theoryCl
# theoryCl is np.ndarray of theoretical [C_2, C_3, C_4, ..., C_32]


[  1.26392075e-09   5.88275040e-10   3.28673144e-10   2.06575299e-10
   1.41002890e-10   1.02338013e-10   7.78129366e-11   6.13624221e-11
   4.98225766e-11   4.14123076e-11   3.50811741e-11   3.01814025e-11
   2.63133803e-11   2.32104266e-11   2.06850804e-11   1.86004823e-11
   1.68565179e-11   1.53792031e-11   1.41130128e-11   1.30162998e-11
   1.20591913e-11   1.12186076e-11   1.04760627e-11   9.81660760e-12
   9.22804801e-12   8.70035207e-12   8.22520958e-12   7.79568684e-12
   7.40596039e-12   7.05096528e-12   6.72585642e-12]

In [12]:
# Our input data is Gaerching generated, noiseless full-sky map
# Temperature map: here we use Planck simulated map from Munich, not CAMB map
# http://gavo.mpa-garching.mpg.de/planck/
#
# Read in with Healpy routine/function
#
# Use planck4 = "100GHz_nside16_lmax32_skymap.fits"
# This is a simulated data, 100GHz (where CMB dominates), no foregrounds
# 
mapread_camb2 = hp.read_map(camb2) # Healpix routine, input the sky map


NSIDE = 16
ORDERING = RING in fits file

In [13]:
hp.mollview(mapread_camb2) # visualization of full-sky CMB map, nside=16, lmax=32



In [14]:
# The uploaded temperature map is mapread_planck4 = hp.read_map(planck4)
print type(mapread_camb2) # type(mapread_planck4) = np.ndarray
print mapread_camb2.shape # mapread_planck4.shape = (3072, ) = (N_pix, )
#
# rename array for convenience
tempval = mapread_camb2
#print tempval


<type 'numpy.ndarray'>
(3072,)

In [15]:
# Next, we use healpy map2alm to tranform to alm values
# Our input data is CAMB generated, noiseless full-sky map
# We calculate an array of a_lm from this by using Healpix map2alm, a subroutine of anafast
#
# map2alm only outputs m >=0 values, because m = -l values are equivalent to m = +l values
#
# Using map2alm, the length of the alm array is expected to be:
# (mmax * (2 * lmax + 1 - mmax)) / 2 + lmax + 1)"
#
# For mmax = lmax, this is l(l+1)/2 + l + 1
# i.e. 
# l = 0, there is 1
# l = 1, there is 3
# l = 2, there is 6
# l = 3, there is 10
# l = 4, there is 15
# etc.
almarr = hp.map2alm(mapread_camb2)                # This is an array of a_lm values
print "The array of spherical harmonic coefficients a_lm is"
#print almarr
print "The arr.shape is " + str(almarr.shape)
print "The length of a_lm array is " + str(len(almarr))
#
print "For l=3, map2alm gives (a_00, a_10, a_11, a_20, a_21, a_22, a_30, a_31, a_32, a_33)"
print "However, this is NOT the order of the output! See below"
# In the Fortran F90 subroutines, complex alm are stored in an array that has 
# two dimensions to contain coefficients for positive and negative m values.
# Healpy doesn't do this....I think

print "============================="
print "============================="
print "Check indices with healpy.sphtfunc.Alm.getidx(lmax, l, m)"
print "Default ordering of healpy.map2alm() output is "
print "(0,0), (1,0), ..., (lmax, 0),"
print "(1,1), (2,1), ...., (lmax, 1),"
print "(2,2), .... (lmax, 2),(3,3), ...., (lmax, 3), etc. , .... (lmax, lmax)."


The array of spherical harmonic coefficients a_lm is
The arr.shape is (1176,)
The length of a_lm array is 1176
For l=3, map2alm gives (a_00, a_10, a_11, a_20, a_21, a_22, a_30, a_31, a_32, a_33)
However, this is NOT the order of the output! See below
=============================
=============================
Check indices with healpy.sphtfunc.Alm.getidx(lmax, l, m)
Default ordering of healpy.map2alm() output is 
(0,0), (1,0), ..., (lmax, 0),
(1,1), (2,1), ...., (lmax, 1),
(2,2), .... (lmax, 2),(3,3), ...., (lmax, 3), etc. , .... (lmax, lmax).

In [16]:
# ==========================
# DEMONSTRATION
# Notice how a_lm is indexed
# ==========================

mmm = np.arange(12) # define a map, i.e. an array of 12 "pixels"
lmaxxx = 4
alm = hp.map2alm(mmm, lmax=lmaxxx) # spherical harmonic transform
lm = hp.map2alm(mmm, lmax=lmaxxx) # spherical harmonic transform
print(alm)
print(alm.shape)

# So alm is actually a 1D vector.
# How is alm indexed?


l, m = hp.Alm.getlm(lmax=lmaxxx)
print(l)

print(m)




print "The l values are "+str(l)
print "The m values are "+str(m)
print " (l,m) is in order " +str(list(zip(l,m)))
#
# l, m = hp.Alm.getlm(lmax=lmax)
# print(l)
# [0 1 2 1 2 2]
# print(m)
# [0 0 0 1 1 2]
#
#
# So, for l = 2, m is [0, 1, 2].
#
# ==========================
# Notice how a_lm is indexed
# ==========================
#
#
#


[  1.43186913e+01 +0.00000000e+00j  -9.06158785e+00 +0.00000000e+00j
  -7.57672587e+00 +0.00000000e+00j   5.38292381e+00 +0.00000000e+00j
   2.49127749e+00 +0.00000000e+00j   6.02204636e-01 -2.01587603e+00j
   4.55476051e-16 +1.48937220e-15j   2.43950547e-01 -3.28507898e-01j
  -2.47937026e-17 +3.49331635e-16j   3.65595455e-01 -7.92402132e+00j
   5.92284905e-17 -4.53852723e-15j  -3.16614952e-01 -1.44872968e+01j
  -2.28780515e-03 +4.33449165e-01j  -5.66350430e-16 +1.59192782e-15j
   1.08501635e+01 -3.35394518e-16j]
(15,)
[0 1 2 3 4 1 2 3 4 2 3 4 3 4 4]
[0 0 0 0 0 1 1 1 1 2 2 2 3 3 4]
The l values are [0 1 2 3 4 1 2 3 4 2 3 4 3 4 4]
The m values are [0 0 0 0 0 1 1 1 1 2 2 2 3 3 4]
 (l,m) is in order [(0, 0), (1, 0), (2, 0), (3, 0), (4, 0), (1, 1), (2, 1), (3, 1), (4, 1), (2, 2), (3, 2), (4, 2), (3, 3), (4, 3), (4, 4)]

In [17]:
# Check with healpy.sphtfunc.Alm.getidx(lmax, l, m)
# Returns index corresponding to (l,m) in an array describing alm up to lmax.
#
ell, emm = hp.Alm.getlm(lmax=32)

print "len(ell) is " +str(len(ell))
print "len(emm) is "+str(len(emm))
print "l values are "+str(ell[:10])
print "m values are "+str(emm[:10])
pairs = list(zip(ell, emm)) # put values together in pairs, zip()
ellemm = np.vstack((ell,emm)).T  # equivalent to list(zip(ell,emm)), but uses numpy throughout
print "Indices for a_lm for lmax (l, m) are:" 
print str(pairs[:50]) # The expected output


len(ell) is 561
len(emm) is 561
l values are [0 1 2 3 4 5 6 7 8 9]
m values are [0 0 0 0 0 0 0 0 0 0]
Indices for a_lm for lmax (l, m) are:
[(0, 0), (1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0), (7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0), (13, 0), (14, 0), (15, 0), (16, 0), (17, 0), (18, 0), (19, 0), (20, 0), (21, 0), (22, 0), (23, 0), (24, 0), (25, 0), (26, 0), (27, 0), (28, 0), (29, 0), (30, 0), (31, 0), (32, 0), (1, 1), (2, 1), (3, 1), (4, 1), (5, 1), (6, 1), (7, 1), (8, 1), (9, 1), (10, 1), (11, 1), (12, 1), (13, 1), (14, 1), (15, 1), (16, 1), (17, 1)]

In [18]:
print ellemm[:10]


[[0 0]
 [1 0]
 [2 0]
 [3 0]
 [4 0]
 [5 0]
 [6 0]
 [7 0]
 [8 0]
 [9 0]]

In [19]:
#
# For our first test, mode l = 3, we need to access a_lm coefficients a_30, a_31, a_32, a_33
# To find this for lmax = 32, we use
# healpy.sphtfunc.Alm.getidx(lmax, l, m)
# Returns index corresponding to (l,m) in an array describing alm up to lmax.
#
# Find the indices
index_a30 = hp.Alm.getidx(lmax=32, l=3, m=0)
index_a31 = hp.Alm.getidx(lmax=32, l=3, m=1)
index_a32 = hp.Alm.getidx(lmax=32, l=3, m=2)
index_a33 = hp.Alm.getidx(lmax=32, l=3, m=3)

In [20]:
print "Index a_30 is " +str(index_a30)
print "Index a_31 is "+str(index_a31)
print "Index a_32 is "+str(index_a32)
print "Index a_33 is "+str(index_a33)


Index a_30 is 3
Index a_31 is 35
Index a_32 is 66
Index a_33 is 96

In [21]:
#
# Create an array with only the values a_3m, i.e. a_30, a_31, a_32, a_33
#
# First convert the array of alm coefficients into a real
#
realalm = almarr.real
#
print realalm[:36]


[  4.27365741e-10   2.29584030e-09  -3.99058124e-05  -3.54786292e-05
   2.10319852e-05   9.87750024e-06   7.97092193e-06  -1.18858780e-05
  -7.27840112e-06   3.61680988e-07  -4.90787138e-06   3.47579016e-06
  -4.30349062e-06  -2.65684938e-07  -1.00274382e-06  -1.12408678e-06
   4.89651663e-06   7.38091856e-07   2.10038079e-07   3.19967529e-06
  -2.82808712e-06   3.98389365e-07   9.92859235e-07   1.12017252e-07
   1.85264741e-06  -1.88066911e-06  -5.47687726e-07  -4.03524074e-08
   6.09934135e-07   6.46948184e-07  -2.27504639e-07   6.00326431e-07
  -1.16138884e-07   6.93299985e-09   3.15959247e-09   6.92273981e-09]

In [22]:
empty_almlist = []
#
a30 = realalm[3]
a31 = realalm[35]
a32 = realalm[66]
a33 = realalm[96]
#
print "a30 is " + str(a30)
print "a31 is " + str(a31)
print "a32 is " + str(a32)
print "a33 is " + str(a33)
#
print str(pairs[3]) # Check with our output above
print str(pairs[35])
print str(pairs[66])
print str(pairs[96])
#
empty_almlist.append(a30)
empty_almlist.append(a31)
empty_almlist.append(a32)
empty_almlist.append(a33)
#
print empty_almlist


a30 is -3.54786291692e-05
a31 is 6.92273981371e-09
a32 is -7.65807282676e-07
a33 is 2.69713402623e-05
(3, 0)
(3, 1)
(3, 2)
(3, 3)
[-3.5478629169177805e-05, 6.9227398137093788e-09, -7.6580728267559685e-07, 2.697134026229829e-05]

In [23]:
# create array of real-valued alm coefficients, a30 a31 a32 a33
realalm3 = np.asarray(empty_almlist) # np.asarray() converts input into an array
print realalm3


[ -3.54786292e-05   6.92273981e-09  -7.65807283e-07   2.69713403e-05]

In [24]:
# Repeat the above procedure for mode l = 4, i.e. a40 a41 a42 a43 a44
# Find the indices
index_a40 = hp.Alm.getidx(lmax=32, l=4, m=0)
index_a41 = hp.Alm.getidx(lmax=32, l=4, m=1)
index_a42 = hp.Alm.getidx(lmax=32, l=4, m=2)
index_a43 = hp.Alm.getidx(lmax=32, l=4, m=3)
index_a44 = hp.Alm.getidx(lmax=32, l=4, m=4)
#
print "Index a_40 is " +str(index_a40)
print "Index a_41 is "+str(index_a41)
print "Index a_42 is "+str(index_a42)
print "Index a_43 is "+str(index_a43)
print "Index a_44 is "+str(index_a44)
#
# Check with the above ouput
print str(pairs[4]) 
print str(pairs[36])
print str(pairs[67])
print str(pairs[97])
print str(pairs[126])
#
emptylistalm2 = []
#
#print realalm
#
a40 = realalm[4]
a41 = realalm[36]
a42 = realalm[67]
a43 = realalm[97]
a44 = realalm[127]
#
print "a40 is " + str(a40)
print "a41 is " + str(a41)
print "a42 is " + str(a42)
print "a43 is " + str(a43)
print "a44 is " + str(a44)
#
emptylistalm2.append(a40)
emptylistalm2.append(a41)
emptylistalm2.append(a42)
emptylistalm2.append(a43)
emptylistalm2.append(a44)
#
#print emptylistalm2


Index a_40 is 4
Index a_41 is 36
Index a_42 is 67
Index a_43 is 97
Index a_44 is 126
(4, 0)
(4, 1)
(4, 2)
(4, 3)
(4, 4)
a40 is 2.10319852146e-05
a41 is 3.20500644244e-09
a42 is 3.94936640444e-07
a43 is 3.29951851271e-06
a44 is -1.19861955538e-09

In [25]:
# create array of real-valued alm coefficients, a40 a41 a42 a43 a44
realalm4 = np.asarray(emptylistalm2) # np.asarray() converts input into an array
print realalm4


[  2.10319852e-05   3.20500644e-09   3.94936640e-07   3.29951851e-06
  -1.19861956e-09]

In [26]:
# Calculate (abs(alm))**2 i.e. |alm|^2
abs_alm3 = np.absolute(realalm3)
abs_alm4 = np.absolute(realalm4)
print abs_alm3
print abs_alm4
# Now calculate the squares element-wise, x**2
alm3_squared = abs_alm3**2
alm4_squared = abs_alm4**2
print alm3_squared
print alm4_squared


[  3.54786292e-05   6.92273981e-09   7.65807283e-07   2.69713403e-05]
[  2.10319852e-05   3.20500644e-09   3.94936640e-07   3.29951851e-06
   1.19861956e-09]
[  1.25873313e-09   4.79243265e-17   5.86460794e-13   7.27453196e-10]
[  4.42344402e-10   1.02720663e-17   1.55974950e-13   1.08868224e-11
   1.43668884e-18]

In [27]:
# For l = 3 test, we need theoretical value of C_3; ditto for l = 4
print theoryCl
C3 = theoryCl[1]
print "theory C_3 is " +str(C3)
C4 = theoryCl[2]
print "theory C_4 is "+str(C4)


[  1.26392075e-09   5.88275040e-10   3.28673144e-10   2.06575299e-10
   1.41002890e-10   1.02338013e-10   7.78129366e-11   6.13624221e-11
   4.98225766e-11   4.14123076e-11   3.50811741e-11   3.01814025e-11
   2.63133803e-11   2.32104266e-11   2.06850804e-11   1.86004823e-11
   1.68565179e-11   1.53792031e-11   1.41130128e-11   1.30162998e-11
   1.20591913e-11   1.12186076e-11   1.04760627e-11   9.81660760e-12
   9.22804801e-12   8.70035207e-12   8.22520958e-12   7.79568684e-12
   7.40596039e-12   7.05096528e-12   6.72585642e-12]
theory C_3 is 5.88275e-10
theory C_4 is 3.28673e-10

In [28]:
# For lmax = 32, we must create an array of ell values, i.e. [0 1 2 3....31 32]
ell = np.arange(33)
print ell
# 
# Subtract the monopole and dipole, l=0, l=1
ellval = ell[2:]
print ellval


[ 0  1  2  3  4  5  6  7  8  9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
 25 26 27 28 29 30 31 32]
[ 2  3  4  5  6  7  8  9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
 27 28 29 30 31 32]

In [29]:
# Calculate an array of (2*l + 1)C_l 
# i.e. 5*C_2, 7*C_3, 9*C_4, 11*C_5, 13*C_6, ...
print theoryCl

for i in ellval:
    paramsCl = (2*ellval + 1)*theoryCl # define array (2*l + 1)C_l 
    
print paramsCl


[  1.26392075e-09   5.88275040e-10   3.28673144e-10   2.06575299e-10
   1.41002890e-10   1.02338013e-10   7.78129366e-11   6.13624221e-11
   4.98225766e-11   4.14123076e-11   3.50811741e-11   3.01814025e-11
   2.63133803e-11   2.32104266e-11   2.06850804e-11   1.86004823e-11
   1.68565179e-11   1.53792031e-11   1.41130128e-11   1.30162998e-11
   1.20591913e-11   1.12186076e-11   1.04760627e-11   9.81660760e-12
   9.22804801e-12   8.70035207e-12   8.22520958e-12   7.79568684e-12
   7.40596039e-12   7.05096528e-12   6.72585642e-12]
[  6.31960373e-09   4.11792528e-09   2.95805830e-09   2.27232828e-09
   1.83303757e-09   1.53507020e-09   1.32281992e-09   1.16588602e-09
   1.04627411e-09   9.52483075e-10   8.77029352e-10   8.14897868e-10
   7.63088029e-10   7.19523226e-10   6.82607653e-10   6.51016879e-10
   6.23691163e-10   5.99788922e-10   5.78633527e-10   5.59700893e-10
   5.42663609e-10   5.27274559e-10   5.13327074e-10   5.00646987e-10
   4.89086545e-10   4.78519364e-10   4.68836946e-10   4.59945524e-10
   4.51763584e-10   4.44210813e-10   4.37180667e-10]

In [30]:
norm = ((2*ellval + 1))/(4*math.pi)
print norm


[ 0.39788736  0.5570423   0.71619724  0.87535219  1.03450713  1.19366207
  1.35281702  1.51197196  1.6711269   1.83028185  1.98943679  2.14859173
  2.30774667  2.46690162  2.62605656  2.7852115   2.94436645  3.10352139
  3.26267633  3.42183128  3.58098622  3.74014116  3.89929611  4.05845105
  4.21760599  4.37676094  4.53591588  4.69507082  4.85422576  5.01338071
  5.17253565]

In [31]:
anafastCl = hp.anafast(mapread_camb2, lmax=32)
#len(anafastCl) = 33
# remove monopole and dipole values, l=0, l=1
hatCl = anafastCl[2:] #len() = 31, type() = np.ndarray
hatC3 = hatCl[1]   # index 0 = C2, 1 = C3, etc.
hatC4 = hatCl[2]
print hatC3
print hatC4


6.49352544114e-10
3.16267210302e-10

In [32]:
# 
# Add a_lm squared, |a_lm|^2
#
print "The values for |a_lm|^2 are : "
print "For |a_3m|**2 such that a_30, a_31, a_32, a_33: "
print str(alm3_squared)
print "And for |a_4m|**2 such that a_40, a_41, a_42, a_43, a_44: "
print str(alm4_squared)


The values for |a_lm|^2 are : 
For |a_3m|**2 such that a_30, a_31, a_32, a_33: 
[  1.25873313e-09   4.79243265e-17   5.86460794e-13   7.27453196e-10]
And for |a_4m|**2 such that a_40, a_41, a_42, a_43, a_44: 
[  4.42344402e-10   1.02720663e-17   1.55974950e-13   1.08868224e-11
   1.43668884e-18]

In [33]:
# =========================================================================
#
# =========================================================================
#
# Data:
#     tempval      # the array of pixel values, (3072,)
#     realalm3     # array of alm values, a30, a31, a32, a33 
#     realalm4     # array of alm values, a40, a41, a42, a43, a44
#     alm3_squared # array of |alm|^2, (abs(a3m))**2
#     alm4_squared # array of |alm|^2, (abs(a4m))**2
#     hatCl        # array of anafast-calculated \hat{C}_l values, l=2 to l=32
#     hatC3        # \hat{C}_3 value
#     hatC4        # \hat{C}_4 value
#
# Parameters:
#     theoryCl     # array of Boltzmann code generated C_l, i.e. C^{theory}_l
#     paramsCl     # array of (2*l + 1)C_l from l=2 to l=lmax
#     C3           # array of C_3 value
#     C4           # array of C_4 value
#    
# Array of ell's: 
#     ellval       # array of l = 2 to l=lmax
#                  # [2 3 4 ... 31 32]
#     norm         # array of (2*l+1)/4pi
#                  # [5/4pi 7/4pi 9/4pi 11/4pi ... 63/4pi 65/4pi]
# =========================================================================
#
# =========================================================================

In [34]:
#
# Next, create the matrix, n_i /cdot n_j
# solely using Healpy routines, i.e. taking the dot product of the vectors
# The result is "dotproductmatrix"
#
# npix = 3072

In [35]:
totalpix = np.arange(npix) # An array indexing the total number of pixels

In [36]:
## healpy.pixelfunc.pix2vec(nside, ipix, nest=False)
## 
## will give three arrays
## arrays of all x values, all y values, all z values
## RING scheme default
# len()=3
# type()=tuple
#
#
vecval = hp.pix2vec(nside, totalpix) #Nside = 16, type()=tuple

In [37]:
vecvalx = vecval[0] #shape (3072,)
vecvaly = vecval[1]
vecvalz = vecval[2]

In [38]:
# First arrange arrays vertically
# numpy.vstack = Stack arrays in sequence vertically (row wise), input sequence of arrays
totalvecval = np.vstack((vecvalx, vecvaly, vecvalz)) #type()=numpy.ndarray

In [39]:
trans = totalvecval.T #transpose

In [40]:
dotproductmatrix = trans.dot(totalvecval) #take the dot product
# dotproductmatrix.shape = (npix, npix) = (3072, 3072)
# type(dotproductmatrix) = np.ndarray

In [41]:
# =========================================================
# =========================================================
#
# \Sum_l (2*l + 1)/4pi C^th_l P_l (dotproductmatrix)
# sum from l=2 to l=lmax
#
# arrays l = [2 3 4 .... lmax]
#        C_l = [C_2 C_3 .... C_lmax]
#
# The correct way to do the summation:
# 
# Step 1: calculate the matrix
#            M = dotproductmatrix
#
# Step 2: evaluate the function P_l(x) for each entry of the matrix
#         OUTPUT: [P_2(M) P_3(M) P_4(M) .... P_lmax(M) ]
#
# Step 3: (2*l +1)/4pi from l=2 to l=lmax
#          [5/4pi 7/4pi 9/4pi 11/4pi .... 65/4pi ]
#
# Step 4: multiply 
#         [5/4pi*P_2(M) + 7/4pi*P_3(M) +...... + 65/4pi*P_32(M)]
#
#
# Step 5: multiply by theoretical CAMB values, [C_2 C_3    C_31 C_32]
#         [5/4pi**C_2* P_2(M) + 7/4pi*C_3* P_3(M) +...... + 65/4pi*C_32* P_32(M)]
#
# Step 6: This is an array of S_ij for each theory C_l, l=2 to l=32
#         
#
#
# =========================================================
# =========================================================

In [42]:
# =========================================================
# =========================================================
#
# Now calculate the likelihood 
# -2lnL \propto m^T C^-1 m + ln det C + N ln (2pi)
#
# First term, m^T C^-1 m is the "model fit term"
# Second term, lndetC is the "complexity penalty"
# Third term, N ln 2pi, a constant
#
# m = tempval
# C = Sij
#
# STEP 1: do inverse of Sij
#        invSij = np.linalg.inv(Sij)
#
# STEP 2: do matrix mulplication, m.T*inv(C)*m
#       
# lnL_modelfit_terms = np.array([np.dot(tempval.T , np.dot(invSij[i] , tempval) ) for i in range(invSij.shape[0])])
#
# STEP 3: do logdet Sij
#
# logdetC = np.linalg.slogdet(Sij) #computes sign and log det C
# logdetC[1]
#
# STEP 4: compute N_pix * 2pi
#
# Npix2pi = (npix)*2*math.pi
#
# Step 5: -2loglikelihood = m.T*inv(C)*m + logdetC[1] + Npix2pi
# 
# =========================================================
# =========================================================

In [43]:
# CODE BOTTLENECK!
# 
# Evaluate Legendre from l=2 to l=lmax for each matrix entry
# [P_2(M) P_3(M) P_4(M) .... P_lmax(M) ]
# 
# WITHOUT BROADCASTING, one would do something like 
# PlMat = []
# for i in ellval:
#    PlMat.append( eval_legendre(i, dotproductmatrix) )
#
#
# With broadcasting, we use
PlMat = eval_legendre(ellval[:, None, None], dotproductmatrix)
# PlMat = [P_2(M) P_3(M) P_4(M) .... P_lmax(M) ]
# PlMat is an array, len()=31 of 31 3072 by 3072 matrices
# PlMat.shape = (31, 3072, 3072)

In [44]:
# multiply PlMat by (2*l+1)/4pi, i.e. norm
norm_matrix = norm[:, None, None] * PlMat
# [5/4pi * P_2(M)  7/4pi * P_3(M) ....   65/4pi * P_32(M)]

In [45]:
# JULY 1, 2015
#
#
# Here we define the LF 
#
# Next we should writen the REAL-space log-likelihood, -2 lnLikefunction
# this is a function of parameters
# plot against parameters, theory C_l
#
# Likelihood function should use -2lnL /propto m^T C^-1 M + log det M
#

In [46]:
# TEST C_3 Likelihood
#
# The covariance matrix is a function of variable "x" where "x" is "C_3", an unknown parameter. 
# 
# Our covariance matrix is therefore S_ij  = 7/4pi * x * P_3(matrix)   
# (We set l=3, i.e. l=3, and P_3)
# 
# The LF is then a function of x, LF(x). This is the only parameter we vary. 
#
# LF = -2loglikelihood /propto T^T inv(S_ij) T + log det (Sij) + N log (2pi)
# 
# We then plot LF(x) vs. parameters x. 
#

In [47]:
# define pixel-value arrays
mT = np.matrix(tempval)     # mT.shape = (1, 3072)
m = np.matrix(tempval).T    # m.shape = (3072, 1)
Npix2pi = (npix)*2*math.pi  # LF constant

In [48]:
print C3 # Boltzmann code CAMB output


5.88275e-10

In [49]:
# generate a number of samples to plot for x
# Boltzmann code value for C3 is 5.88275e-10
# start at 1e-10, end at 9e-10
# default samples generated is 50

In [50]:
vary_x_samples1 = np.linspace(1e-10, 9e-10, num=20 ) #set default num = 20

In [51]:
print vary_x_samples1


[  1.00000000e-10   1.42105263e-10   1.84210526e-10   2.26315789e-10
   2.68421053e-10   3.10526316e-10   3.52631579e-10   3.94736842e-10
   4.36842105e-10   4.78947368e-10   5.21052632e-10   5.63157895e-10
   6.05263158e-10   6.47368421e-10   6.89473684e-10   7.31578947e-10
   7.73684211e-10   8.15789474e-10   8.57894737e-10   9.00000000e-10]

In [52]:
# create Sij array, len()=31, l=2 to l=32
# Sij = norm_matrix * theoryCl[:, None, None]
# [5/4pi*C_2*P_2(M)  7/4pi*C_3*P_3(M) ....   65/4pi*C_32*P_32(M)]

In [53]:
print hatC3  # anafast generated C_l for l = 3 from sky map
print C3     # CAMB generated C_3, Boltzmann code output


6.49352544114e-10
5.88275e-10

In [54]:
print alm3_squared  # a_3m extracted from sky map, absolute value, squared


[  1.25873313e-09   4.79243265e-17   5.86460794e-13   7.27453196e-10]

In [55]:
np.set_printoptions(threshold=100000)  # Default is threshold=1000
## Use this to print all values, disables corner printing

In [56]:
# print out all matrices and parts of the logLF
print vary_x_samples1  # generated from 1e-10 to 9e-10


[  1.00000000e-10   1.42105263e-10   1.84210526e-10   2.26315789e-10
   2.68421053e-10   3.10526316e-10   3.52631579e-10   3.94736842e-10
   4.36842105e-10   4.78947368e-10   5.21052632e-10   5.63157895e-10
   6.05263158e-10   6.47368421e-10   6.89473684e-10   7.31578947e-10
   7.73684211e-10   8.15789474e-10   8.57894737e-10   9.00000000e-10]

In [57]:
print Npix2pi # LogLF constant


19301.9452637

In [58]:
sigma2 = np.logspace(-12, -14, num=30 ) #set default num = 30
print sigma2


[  1.00000000e-12   8.53167852e-13   7.27895384e-13   6.21016942e-13
   5.29831691e-13   4.52035366e-13   3.85662042e-13   3.29034456e-13
   2.80721620e-13   2.39502662e-13   2.04335972e-13   1.74332882e-13
   1.48735211e-13   1.26896100e-13   1.08263673e-13   9.23670857e-14
   7.88046282e-14   6.72335754e-14   5.73615251e-14   4.89390092e-14
   4.17531894e-14   3.56224789e-14   3.03919538e-14   2.59294380e-14
   2.21221629e-14   1.88739182e-14   1.61026203e-14   1.37382380e-14
   1.17210230e-14   1.00000000e-14]

In [59]:
print 7*hatC3
print hatC3


4.5454678088e-09
6.49352544114e-10

In [60]:
# Second test with rescaled values
vary_x_samples3 = np.logspace(-8, -12, num=30) #num = 30 
sigma3 = np.logspace(-12, -14, num=30)

In [61]:
print vary_x_samples3
print sigma3


[  1.00000000e-08   7.27895384e-09   5.29831691e-09   3.85662042e-09
   2.80721620e-09   2.04335972e-09   1.48735211e-09   1.08263673e-09
   7.88046282e-10   5.73615251e-10   4.17531894e-10   3.03919538e-10
   2.21221629e-10   1.61026203e-10   1.17210230e-10   8.53167852e-11
   6.21016942e-11   4.52035366e-11   3.29034456e-11   2.39502662e-11
   1.74332882e-11   1.26896100e-11   9.23670857e-12   6.72335754e-12
   4.89390092e-12   3.56224789e-12   2.59294380e-12   1.88739182e-12
   1.37382380e-12   1.00000000e-12]
[  1.00000000e-12   8.53167852e-13   7.27895384e-13   6.21016942e-13
   5.29831691e-13   4.52035366e-13   3.85662042e-13   3.29034456e-13
   2.80721620e-13   2.39502662e-13   2.04335972e-13   1.74332882e-13
   1.48735211e-13   1.26896100e-13   1.08263673e-13   9.23670857e-14
   7.88046282e-14   6.72335754e-14   5.73615251e-14   4.89390092e-14
   4.17531894e-14   3.56224789e-14   3.03919538e-14   2.59294380e-14
   2.21221629e-14   1.88739182e-14   1.61026203e-14   1.37382380e-14
   1.17210230e-14   1.00000000e-14]

In [62]:
# For N matrix, set the identity
id_mat = np.identity(3072)
# print id_mat    # This is a (3072, 3072) matrix

In [63]:
noiseresult = sigma2[:, None, None] * id_mat[None, :, :]
#print noiseresult

In [64]:
# Now, rescale
#
# Multiply the CMB maps by 1e6 and the C matrix entries by 1e12 and try again.
#
tempp = (1e6)*tempval # multiply CMB maps by 1e6

def LogLikehood_wNoise_1e12(param, sig):
    # param is our parameter, C_3
    Sij = param[:, None, None] * norm_matrix[1][None, :, :]
    newSij = (1e12)*Sij   # multiply S_ij by 1e12
    Nij = sig[:, None, None] * id_mat[None, :, :]
    newNij = (1e12)*Nij
    # Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
    # Sij.shape = (20, 3072, 3072)
    Cij = newSij + newNij
    #invCij = np.linalg.inv(Cij)
    logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
    # model_fit_terms = m^T C^-1 m
    #
    # model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
    # for i in range(invCij.shape[0])])
    #
    model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
    return model_fit_terms + logdetC[1] + Npix2pi

In [ ]:


In [65]:
print sigma3
print "*************"
print vary_x_samples3


[  1.00000000e-12   8.53167852e-13   7.27895384e-13   6.21016942e-13
   5.29831691e-13   4.52035366e-13   3.85662042e-13   3.29034456e-13
   2.80721620e-13   2.39502662e-13   2.04335972e-13   1.74332882e-13
   1.48735211e-13   1.26896100e-13   1.08263673e-13   9.23670857e-14
   7.88046282e-14   6.72335754e-14   5.73615251e-14   4.89390092e-14
   4.17531894e-14   3.56224789e-14   3.03919538e-14   2.59294380e-14
   2.21221629e-14   1.88739182e-14   1.61026203e-14   1.37382380e-14
   1.17210230e-14   1.00000000e-14]
*************
[  1.00000000e-08   7.27895384e-09   5.29831691e-09   3.85662042e-09
   2.80721620e-09   2.04335972e-09   1.48735211e-09   1.08263673e-09
   7.88046282e-10   5.73615251e-10   4.17531894e-10   3.03919538e-10
   2.21221629e-10   1.61026203e-10   1.17210230e-10   8.53167852e-11
   6.21016942e-11   4.52035366e-11   3.29034456e-11   2.39502662e-11
   1.74332882e-11   1.26896100e-11   9.23670857e-12   6.72335754e-12
   4.89390092e-12   3.56224789e-12   2.59294380e-12   1.88739182e-12
   1.37382380e-12   1.00000000e-12]

In [66]:
print vary_x_samples3
print sigma3


[  1.00000000e-08   7.27895384e-09   5.29831691e-09   3.85662042e-09
   2.80721620e-09   2.04335972e-09   1.48735211e-09   1.08263673e-09
   7.88046282e-10   5.73615251e-10   4.17531894e-10   3.03919538e-10
   2.21221629e-10   1.61026203e-10   1.17210230e-10   8.53167852e-11
   6.21016942e-11   4.52035366e-11   3.29034456e-11   2.39502662e-11
   1.74332882e-11   1.26896100e-11   9.23670857e-12   6.72335754e-12
   4.89390092e-12   3.56224789e-12   2.59294380e-12   1.88739182e-12
   1.37382380e-12   1.00000000e-12]
[  1.00000000e-12   8.53167852e-13   7.27895384e-13   6.21016942e-13
   5.29831691e-13   4.52035366e-13   3.85662042e-13   3.29034456e-13
   2.80721620e-13   2.39502662e-13   2.04335972e-13   1.74332882e-13
   1.48735211e-13   1.26896100e-13   1.08263673e-13   9.23670857e-14
   7.88046282e-14   6.72335754e-14   5.73615251e-14   4.89390092e-14
   4.17531894e-14   3.56224789e-14   3.03919538e-14   2.59294380e-14
   2.21221629e-14   1.88739182e-14   1.61026203e-14   1.37382380e-14
   1.17210230e-14   1.00000000e-14]

In [67]:
xaxis3 = vary_x_samples3
yaxis3 = LogLikehood_wNoise_1e12(vary_x_samples3, sigma3)


fig3 = plt.figure()
plt.plot(xaxis3, yaxis3)
plt.xscale("log")
fig3.suptitle('Real-space LF vs C_3 values')
plt.xlabel('C_3 values, num=30, logspace from 1e-8 to 1e-12')
plt.ylabel('-2 ln likelihood L(T|C_3)')
plt.axvline(x =1.25873313e-09, color = 'g')  # a30 = 1.25873313e-09
# plt.plt.axvline(x = 4.79243265e-17 ) # a31 = 4.79243265e-17 
plt.axvline(x =5.86460794e-13, color = 'r')  # a32 = 5.86460794e-13
plt.axvline(x =7.27453196e-10, color = 'm')  # a33 = 7.27453196e-10
plt.axvline(x =4.5454678088e-09, color = 'y') # 7*\hat{C}_3 = 4.5454678088e-09
plt.axvline(x =6.49352544114e-10, color = 'k')  # \hat{C}_3 = 6.49352544114e-10
plt.grid()

print "vary_x_samples3 = np.logspace(-8, -12, num=30) #num = 30"
print "sigma3 = np.logspace(-12, -14, num=30)"
print "LogLikehood_wNoise_1e12(vary_x_samples3, sigma3)"
print "|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black"


vary_x_samples3 = np.logspace(-8, -12, num=30) #num = 30
sigma3 = np.logspace(-12, -14, num=30)
LogLikehood_wNoise_1e12(vary_x_samples3, sigma3)
|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black

In [68]:
print vary_x_samples3
print LogLikehood_wNoise_1e12(vary_x_samples3, sigma3)


[  1.00000000e-08   7.27895384e-09   5.29831691e-09   3.85662042e-09
   2.80721620e-09   2.04335972e-09   1.48735211e-09   1.08263673e-09
   7.88046282e-10   5.73615251e-10   4.17531894e-10   3.03919538e-10
   2.21221629e-10   1.61026203e-10   1.17210230e-10   8.53167852e-11
   6.21016942e-11   4.52035366e-11   3.29034456e-11   2.39502662e-11
   1.74332882e-11   1.26896100e-11   9.23670857e-12   6.72335754e-12
   4.89390092e-12   3.56224789e-12   2.59294380e-12   1.88739182e-12
   1.37382380e-12   1.00000000e-12]
[  7.13033005e+06   8.35364775e+06   9.78758542e+06   1.14683913e+07
   1.34385519e+07   1.57478658e+07   1.84547024e+07   2.16274760e+07
   2.53463756e+07   2.97053908e+07   3.48146872e+07   4.08033900e+07
   4.78228472e+07   5.60504544e+07   6.56941374e+07   7.69976068e+07
   9.02465162e+07   1.05775682e+08   1.23977542e+08   1.45312075e+08
   1.70318426e+08   1.99628525e+08   2.33983064e+08   2.74250209e+08
   3.21447543e+08   3.76767782e+08   4.41608915e+08   5.17609537e+08
   6.06690257e+08   7.11102239e+08]

In [69]:
#
# Plot noise sigma from e-20 to e-22
#
tempp = (1e6)*tempval # multiply CMB maps by 1e6

def LogLikehood_wNoise_1e20(param, sig):
    # param is our parameter, C_3
    Sij = param[:, None, None] * norm_matrix[1][None, :, :]
    newSij = (1e20)*Sij   # multiply S_ij by 1e12
    Nij = sig[:, None, None] * id_mat[None, :, :]
    newNij = (1e20)*Nij
    # Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
    # Sij.shape = (20, 3072, 3072)
    Cij = newSij + newNij
    #invCij = np.linalg.inv(Cij)
    logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
    # model_fit_terms = m^T C^-1 m
    #
    # model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
    # for i in range(invCij.shape[0])])
    #
    model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
    return model_fit_terms + logdetC[1] + Npix2pi

#
# Plot noise sigma from e-20 to e-22
#
vary_x_samples12 = np.logspace(-8, -12, num=40) #num = 40
sigma12 = np.logspace(-20, -22, num=40)

xaxis12 = vary_x_samples12
yaxis12 = LogLikehood_wNoise_1e20(vary_x_samples12, sigma12)

fig12 = plt.figure()
plt.plot(xaxis12, yaxis12)
plt.xscale("log")
plt.xlabel('C_3 values, num=40, logspace from 1e-8 to 1e-12')
plt.ylabel('-2 ln likelihood L(T|C_3)')
plt.axvline(x =1.25873313e-09, color = 'g')  # a30 = 1.25873313e-09
# plt.plt.axvline(x = 4.79243265e-17 ) # a31 = 4.79243265e-17 
plt.axvline(x =5.86460794e-13, color = 'r')  # a32 = 5.86460794e-13
plt.axvline(x =7.27453196e-10, color = 'm')  # a33 = 7.27453196e-10
plt.axvline(x =4.5454678088e-09, color = 'y') # 7*\hat{C}_3 = 4.5454678088e-09
plt.axvline(x =6.49352544114e-10, color = 'k')  # \hat{C}_3 = 6.49352544114e-10
plt.grid()

print "vary_x_samples12 = np.logspace(-8, -12, num=40) #num = 40"
print "sigma12 = np.logspace(-20, -22, num=40)"
print "LogLikehood_wNoise_1e20(vary_x_samples12, sigma12)"
print "|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black"


vary_x_samples12 = np.logspace(-8, -12, num=40) #num = 40
sigma12 = np.logspace(-20, -22, num=40)
LogLikehood_wNoise_1e20(vary_x_samples12, sigma12)
|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black

In [70]:
print vary_x_samples12
print LogLikehood_wNoise_1e20(vary_x_samples12, sigma12)


[  1.00000000e-08   7.89652287e-09   6.23550734e-09   4.92388263e-09
   3.88815518e-09   3.07029063e-09   2.42446202e-09   1.91448198e-09
   1.51177507e-09   1.19377664e-09   9.42668455e-10   7.44380301e-10
   5.87801607e-10   4.64158883e-10   3.66524124e-10   2.89426612e-10
   2.28546386e-10   1.80472177e-10   1.42510267e-10   1.12533558e-10
   8.88623816e-11   7.01703829e-11   5.54102033e-11   4.37547938e-11
   3.45510729e-11   2.72833338e-11   2.15443469e-11   1.70125428e-11
   1.34339933e-11   1.06081836e-11   8.37677640e-12   6.61474064e-12
   5.22334507e-12   4.12462638e-12   3.25702066e-12   2.57191381e-12
   2.03091762e-12   1.60371874e-12   1.26638017e-12   1.00000000e-12]
[  7.27691114e+06   8.16795180e+06   9.16922170e+06   1.02974838e+07
   1.15663165e+07   1.29939553e+07   1.46026222e+07   1.64116138e+07
   1.84488568e+07   2.07409440e+07   2.33187451e+07   2.62201503e+07
   2.94881759e+07   3.31626926e+07   3.72983460e+07   4.19535599e+07
   4.71921318e+07   5.30870411e+07   5.97203796e+07   6.71863775e+07
   7.55881233e+07   8.50415290e+07   9.56802979e+07   1.07653711e+08
   1.21125729e+08   1.36287768e+08   1.53350921e+08   1.72551199e+08
   1.94158955e+08   2.18475972e+08   2.45840175e+08   2.76633026e+08
   3.11286032e+08   3.50281019e+08   3.94166775e+08   4.43550256e+08
   4.99122925e+08   5.61662545e+08   6.32040136e+08   7.11239239e+08]

In [71]:
#
# Plot noise sigma from e-21 to e-23
#
tempp = (1e6)*tempval # multiply CMB maps by 1e6

def LogLikehood_wNoise_1e21(param, sig):
    # param is our parameter, C_3
    Sij = param[:, None, None] * norm_matrix[1][None, :, :]
    newSij = (1e21)*Sij   # multiply S_ij by 1e12
    Nij = sig[:, None, None] * id_mat[None, :, :]
    newNij = (1e21)*Nij
    # Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
    # Sij.shape = (20, 3072, 3072)
    Cij = newSij + newNij
    #invCij = np.linalg.inv(Cij)
    logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
    # model_fit_terms = m^T C^-1 m
    #
    # model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
    # for i in range(invCij.shape[0])])
    #
    model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
    return model_fit_terms + logdetC[1] + Npix2pi

#
# Plot noise sigma from e-21 to e-23
#
vary_x_samples125 = np.logspace(-8, -12, num=40) #num = 40
sigma125 = np.logspace(-21, -23, num=40)

xaxis125 = vary_x_samples125
yaxis125 = LogLikehood_wNoise_1e21(vary_x_samples125, sigma125)

fig125 = plt.figure()
plt.plot(xaxis125, yaxis125)
plt.xscale("log")
plt.xlabel('C_3 values, num=40, logspace from 1e-8 to 1e-12')
plt.ylabel('-2 ln likelihood L(T|C_3)')
plt.axvline(x =1.25873313e-09, color = 'g')  # a30 = 1.25873313e-09
# plt.plt.axvline(x = 4.79243265e-17 ) # a31 = 4.79243265e-17 
plt.axvline(x =5.86460794e-13, color = 'r')  # a32 = 5.86460794e-13
plt.axvline(x =7.27453196e-10, color = 'm')  # a33 = 7.27453196e-10
plt.axvline(x =4.5454678088e-09, color = 'y') # 7*\hat{C}_3 = 4.5454678088e-09
plt.axvline(x =6.49352544114e-10, color = 'k')  # \hat{C}_3 = 6.49352544114e-10
plt.grid()

print "vary_x_samples12 = np.logspace(-8, -12, num=40) #num = 40"
print "sigma12 = np.logspace(-21, -23, num=40)"
print "LogLikehood_wNoise_1e20(vary_x_samples12, sigma12)"
print "|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black"


vary_x_samples12 = np.logspace(-8, -12, num=40) #num = 40
sigma12 = np.logspace(-21, -23, num=40)
LogLikehood_wNoise_1e20(vary_x_samples12, sigma12)
|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black

In [72]:
vary_x_samples125 = np.logspace(-8, -12, num=40) #num = 40
sigma125 = np.logspace(-21, -23, num=40)

Sij = vary_x_samples125[:, None, None] * norm_matrix[1][None, :, :]
newSij = (1e21)*Sij   # multiply S_ij by 1e12
Nij = sigma125[:, None, None] * id_mat[None, :, :]
newNij = (1e21)*Nij
# Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
# Sij.shape = (40, 3072, 3072)
Cij = newSij + newNij
#invCij = np.linalg.inv(Cij)
logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
# model_fit_terms = m^T C^-1 m
#
# model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
# for i in range(invCij.shape[0])])
#
model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
print "m^T c^-1 m terms are"
print model_fit_terms 
print "******************"
print "logdetC terms are"
print logdetC[1] 
print "******************"
print "Npix2pi term is"
print Npix2pi


m^T c^-1 m terms are
[  9.45804805e+06   1.01483017e+07   1.10268075e+07   1.20673659e+07
   1.32689374e+07   1.46133094e+07   1.61825120e+07   1.79600153e+07
   1.99559126e+07   2.22256705e+07   2.47646632e+07   2.76424156e+07
   3.08855750e+07   3.45339903e+07   3.86587049e+07   4.33120503e+07
   4.85288998e+07   5.44131447e+07   6.10398347e+07   6.84991954e+07
   7.68939251e+07   8.63276615e+07   9.69909206e+07   1.08947713e+08
   1.22424263e+08   1.37576408e+08   1.54634394e+08   1.73828119e+08
   1.95434208e+08   2.19747753e+08   2.47115351e+08   2.77910405e+08
   3.12543652e+08   3.51552581e+08   3.95426692e+08   4.44811316e+08
   5.00390442e+08   5.62931595e+08   6.33312474e+08   7.12509516e+08]
******************
logdetC terms are
[   234.83595269   -125.48871852   -486.73629436   -848.73844692
  -1211.04772189  -1573.36162977  -1936.79205007  -2300.35381787
  -2662.82613924  -3026.67180777  -3389.29604324  -3752.32106286
  -4115.94072189  -4479.40823086  -4842.79499263  -5205.87784784
  -5569.53277853  -5933.18379399  -6296.59554129  -6660.37091899
  -7023.75150763  -7387.0944891   -7750.90452509  -8114.35002254
  -8477.84033831  -8841.49715615  -9205.03206405  -9568.51393667
  -9932.08545533 -10295.68073606 -10659.20465258 -11022.79171886
 -11386.33372681 -11749.98089605 -12113.47607432 -12477.02136508
 -12840.61843452 -13204.16794796 -13567.75960538 -13931.32768536]
******************
Npix2pi term is
19301.9452637

In [97]:
#
# Hold sigma^2 constant, vary C3
#
vary_x_samples125 = np.logspace(-8, -12, num=40) # C3 parameter, vary from e-08 to e-12
sigma125 = 5e-22 # chose this sigma^2 parameter, hold constant

Sij = vary_x_samples125[:, None, None] * norm_matrix[1][None, :, :]
newSij = (1e21)*Sij   # multiply S_ij by 1e12
Nij = sigma125 * id_mat[None, :, :]
newNij = (1e21)*Nij
# Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
# Sij.shape = (40, 3072, 3072)
Cij = newSij + newNij
#invCij = np.linalg.inv(Cij)
logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
# model_fit_terms = m^T C^-1 m
#
# model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
# for i in range(invCij.shape[0])])
#
model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
print "m^T c^-1 m terms are"
print model_fit_terms 
print "******************"
print "logdetC terms are"
print logdetC[1] 
print "******************"
print "Npix2pi term is"
print Npix2pi


m^T c^-1 m terms are
[ 17424944.59432721  25431124.82535299  20988175.41728179  18788639.9688897
  17380549.27187007  16487860.85815524  15909766.35792065
  15497517.16395973  15189182.91137147  14972008.39197645
  14793336.73669815  14664017.69568876  14568322.70447167  14492458.7918589
  14433833.23682215  14389138.7429928   14352751.6784684   14325011.10904521
  14302605.58943325  14285747.28930357  14272097.84990945
  14261432.05420008  14253089.98221404  14246570.26856775
  14241465.54843466  14237157.87713204  14234018.59879179
  14231438.17048982  14229450.48704928  14227802.58328702
  14226586.63568589  14225606.63455151  14224772.51924103
  14224178.22232257  14223689.86871848  14223295.19405429
  14222996.47245571  14222746.69923505  14222557.92339739
  14222409.72199761]
******************
logdetC terms are
[-1910.79269502 -1899.49346143 -1894.31400072 -1893.9865907  -1890.0321336
 -1889.55785882 -1889.79498435 -1890.52858638 -1891.04930553 -1892.56437866
 -1893.64297778 -1895.06109716 -1896.61902812 -1898.36601353 -1899.78459156
 -1901.39518245 -1903.05163531 -1904.67733691 -1906.32176722 -1907.95386497
 -1909.59522293 -1911.21625436 -1912.86998408 -1914.50988852 -1916.15690315
 -1917.81120861 -1919.462296   -1921.10832586 -1922.76238417 -1924.41349353
 -1926.06744684 -1927.71856858 -1929.37104233 -1931.02466563 -1932.67642731
 -1934.32805631 -1935.98147704 -1937.63429343 -1939.28755537 -1940.94054106]
******************
Npix2pi term is
19301.9452637

In [ ]:


In [ ]:


In [73]:
print "vary_x_samples125 = np.logspace(-8, -12, num=40) #num = 40"
print "sigma125 = np.logspace(-21, -23, num=40)"
print "LogLikehood_wNoise_1e21(vary_x_samples125, sigma125)"
print vary_x_samples125
print LogLikehood_wNoise_1e21(vary_x_samples125, sigma125)


vary_x_samples125 = np.logspace(-8, -12, num=40) #num = 40
sigma125 = np.logspace(-21, -23, num=40)
LogLikehood_wNoise_1e21(vary_x_samples125, sigma125)
[  1.00000000e-08   7.89652287e-09   6.23550734e-09   4.92388263e-09
   3.88815518e-09   3.07029063e-09   2.42446202e-09   1.91448198e-09
   1.51177507e-09   1.19377664e-09   9.42668455e-10   7.44380301e-10
   5.87801607e-10   4.64158883e-10   3.66524124e-10   2.89426612e-10
   2.28546386e-10   1.80472177e-10   1.42510267e-10   1.12533558e-10
   8.88623816e-11   7.01703829e-11   5.54102033e-11   4.37547938e-11
   3.45510729e-11   2.72833338e-11   2.15443469e-11   1.70125428e-11
   1.34339933e-11   1.06081836e-11   8.37677640e-12   6.61474064e-12
   5.22334507e-12   4.12462638e-12   3.25702066e-12   2.57191381e-12
   2.03091762e-12   1.60371874e-12   1.26638017e-12   1.00000000e-12]
[  9.47758483e+06   1.01674782e+07   1.10456227e+07   1.20858191e+07
   1.32870283e+07   1.46310379e+07   1.61998771e+07   1.79770169e+07
   1.99725517e+07   2.22419458e+07   2.47805759e+07   2.76579653e+07
   3.09007610e+07   3.45488128e+07   3.86731641e+07   4.33261464e+07
   4.85426322e+07   5.44265135e+07   6.10528400e+07   6.85118369e+07
   7.69062033e+07   8.63395763e+07   9.70024716e+07   1.08958900e+08
   1.22435088e+08   1.37586869e+08   1.54644491e+08   1.73837852e+08
   1.95443578e+08   2.19756759e+08   2.47123994e+08   2.77918684e+08
   3.12551568e+08   3.51560133e+08   3.95433881e+08   4.44818141e+08
   5.00396903e+08   5.62937693e+08   6.33318209e+08   7.12514886e+08]

In [74]:
#
# Plot noise sigma from e-22 to e-24
#

def LogLikehood_wNoise_1e22(param, sig):
    # param is our parameter, C_3
    Sij = param[:, None, None] * norm_matrix[1][None, :, :]
    newSij = (1e22)*Sij   # multiply S_ij by 1e12
    Nij = sig[:, None, None] * id_mat[None, :, :]
    newNij = (1e22)*Nij
    # Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
    # Sij.shape = (40, 3072, 3072)
    Cij = newSij + newNij
    #invCij = np.linalg.inv(Cij)
    logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
    # model_fit_terms = m^T C^-1 m
    #
    # model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
    # for i in range(invCij.shape[0])])
    #
    model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
    return model_fit_terms + logdetC[1] + Npix2pi


vary_x_samples123 = np.logspace(-8, -12, num=40) #num = 40
sigma123 = np.logspace(-22, -24, num=40)

xaxis123 = vary_x_samples123
yaxis123 = LogLikehood_wNoise_1e22(vary_x_samples123, sigma123)

fig123 = plt.figure()
plt.plot(xaxis123, yaxis123)
plt.xscale("log")
plt.xlabel('C_3 values, num=40, logspace from 1e-8 to 1e-12')
plt.ylabel('-2 ln likelihood L(T|C_3)')
plt.axvline(x =1.25873313e-09, color = 'g')  # a30 = 1.25873313e-09
# plt.plt.axvline(x = 4.79243265e-17 ) # a31 = 4.79243265e-17 
plt.axvline(x =5.86460794e-13, color = 'r')  # a32 = 5.86460794e-13
plt.axvline(x =7.27453196e-10, color = 'm')  # a33 = 7.27453196e-10
plt.axvline(x =4.5454678088e-09, color = 'y') # 7*\hat{C}_3 = 4.5454678088e-09
plt.axvline(x =6.49352544114e-10, color = 'k')  # \hat{C}_3 = 6.49352544114e-10
plt.grid()

print "vary_x_samples123 = np.logspace(-8, -12, num=40) #num = 40"
print "sigma123 = np.logspace(-22, -24, num=40)"
print "LogLikehood_wNoise_1e22(vary_x_samples123, sigma123)"
print "|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black"


vary_x_samples123 = np.logspace(-8, -12, num=40) #num = 40
sigma123 = np.logspace(-22, -24, num=40)
LogLikehood_wNoise_1e22(vary_x_samples123, sigma123)
|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black

In [75]:
vary_x_samples123 = np.logspace(-8, -12, num=40) #num = 40
sigma123 = np.logspace(-22, -24, num=40)

#
# Plot noise sigma from e-22 to e-24
#


Sij = vary_x_samples123 [:, None, None] * norm_matrix[1][None, :, :]
newSij = (1e22)*Sij   # multiply S_ij by 1e12
Nij = sigma123[:, None, None] * id_mat[None, :, :]
newNij = (1e22)*Nij
# Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
# Sij.shape = (40, 3072, 3072)
Cij = newSij + newNij
#invCij = np.linalg.inv(Cij)
logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
# model_fit_terms = m^T C^-1 m
#
# model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
# for i in range(invCij.shape[0])])
#
model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 

print "m^T c^-1 m terms are"
print model_fit_terms 
print "******************"
print "logdetC terms are"
print logdetC[1] 
print "******************"
print "Npix2pi term is"
print Npix2pi


m^T c^-1 m terms are
[  1.16534465e+06   5.03576814e+06   1.18842066e+06   3.78456972e+06
   2.07230699e+06   1.16453569e+06  -7.41348447e+05  -3.92254528e+06
  -9.93526825e+06  -2.29984553e+07  -5.63527744e+07  -3.44097793e+08
   2.13719373e+08   1.10203181e+08   9.28265870e+07   8.26463737e+07
   8.06037248e+07   8.18050050e+07   8.54633954e+07   8.98256378e+07
   9.79018046e+07   1.05962829e+08   1.15393283e+08   1.26566259e+08
   1.39139646e+08   1.53686968e+08   1.70516270e+08   1.89068539e+08
   2.10547663e+08   2.34462101e+08   2.61421501e+08   2.92204648e+08
   3.26641827e+08   3.65300144e+08   4.09114198e+08   4.58308672e+08
   5.13744893e+08   5.76396127e+08   6.46599075e+08   7.25782771e+08]
******************
logdetC terms are
[  -410.62898964   -672.91913446   -926.22503662  -1169.34164618
  -1445.18261905  -1738.08384536  -2079.4620371   -2399.02656433
  -2735.89491794  -3080.90524843  -3433.09248454  -3787.30516381
  -4142.67275289  -4502.70581932  -4854.37183367  -5211.64589642
  -5574.580628    -5931.36044619  -6295.41523156  -6660.63863576
  -7019.91756434  -7381.27969271  -7740.57086979  -8102.90872247
  -8466.51398412  -8828.15412569  -9191.80550759  -9554.54294936
  -9917.99595215 -10281.00471672 -10644.32030792 -11007.88053013
 -11371.45867768 -11734.62772477 -12098.20386412 -12461.47921611
 -12824.82116055 -13188.76508825 -13552.04849419 -13915.45159908]
******************
Npix2pi term is
19301.9452637

In [99]:
#
# Hold sigma^2 constant, vary C3
#
vary_x_samples123 = np.logspace(-8, -12, num=40) # C3 parameter, vary from e-08 to e-12
sigma123 = 5e-22 # chose this sigma^2 parameter, hold constant
#
# Plot noise sigma from e-22 to e-24
#


Sij = vary_x_samples123 [:, None, None] * norm_matrix[1][None, :, :]
newSij = (1e22)*Sij   # multiply S_ij by 1e12
Nij = sigma123 * id_mat[None, :, :]
newNij = (1e22)*Nij
# Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
# Sij.shape = (40, 3072, 3072)
Cij = newSij + newNij
#invCij = np.linalg.inv(Cij)
logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
# model_fit_terms = m^T C^-1 m
#
# model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
# for i in range(invCij.shape[0])])
#
model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 

print "m^T c^-1 m terms are"
print model_fit_terms 
print "******************"
print "logdetC terms are"
print logdetC[1] 
print "******************"
print "Npix2pi term is"
print Npix2pi


m^T c^-1 m terms are
[ 2931868.99027531  2561423.09736563  2092876.99077943  1876397.00384942
  1740382.76555146  1650113.62054166  1589940.751404    1549446.41704495
  1519557.92056381  1496555.37291566  1479682.09032262  1466653.76019673
  1456837.51067493  1449220.46772181  1443528.73129329  1438909.39635894
  1435211.46326468  1432468.39780895  1430277.65089106  1428567.73979521
  1427238.59379346  1426165.74999515  1425328.54426065  1424671.52510419
  1424138.61076047  1423723.85218339  1423394.01406752  1423137.00097182
  1422944.56767479  1422779.35144192  1422656.21763844  1422559.03605542
  1422480.3001671   1422414.48572627  1422367.35435307  1422328.77614777
  1422298.52139999  1422275.64094104  1422255.88586292  1422241.38234343]
******************
logdetC terms are
[ 5162.11916985  5174.89668376  5179.54501215  5178.57071109  5182.81051551
  5184.46873299  5183.19398907  5183.19703352  5182.37013867  5181.01067721
  5179.97913256  5178.52226416  5176.80653401  5175.16623507  5173.76193185
  5172.11691893  5170.45643259  5168.87473124  5167.23262542  5165.6055479
  5163.95696043  5162.3116691   5160.67301514  5159.02720484  5157.37926926
  5155.73010752  5154.07978174  5152.43292945  5150.77874406  5149.12856074
  5147.47438028  5145.82296635  5144.17003878  5142.51772009  5140.86502619
  5139.21290009  5137.56004832  5135.9069266   5134.25393896  5132.60101615]
******************
Npix2pi term is
19301.9452637

In [ ]:


In [ ]:


In [76]:
print vary_x_samples123
print LogLikehood_wNoise_1e22(vary_x_samples123, sigma123)


[  1.00000000e-08   7.89652287e-09   6.23550734e-09   4.92388263e-09
   3.88815518e-09   3.07029063e-09   2.42446202e-09   1.91448198e-09
   1.51177507e-09   1.19377664e-09   9.42668455e-10   7.44380301e-10
   5.87801607e-10   4.64158883e-10   3.66524124e-10   2.89426612e-10
   2.28546386e-10   1.80472177e-10   1.42510267e-10   1.12533558e-10
   8.88623816e-11   7.01703829e-11   5.54102033e-11   4.37547938e-11
   3.45510729e-11   2.72833338e-11   2.15443469e-11   1.70125428e-11
   1.34339933e-11   1.06081836e-11   8.37677640e-12   6.61474064e-12
   5.22334507e-12   4.12462638e-12   3.25702066e-12   2.57191381e-12
   2.03091762e-12   1.60371874e-12   1.26638017e-12   1.00000000e-12]
[  1.18423596e+06   5.05439716e+06   1.20679638e+06   3.80270233e+06
   2.09016375e+06   1.18209955e+06  -7.24125963e+05  -3.90564236e+06
  -9.91870220e+06  -2.29822343e+07  -5.63369055e+07  -3.44082279e+08
   2.13734532e+08   1.10217980e+08   9.28410346e+07   8.26604640e+07
   8.06174522e+07   8.18183755e+07   8.54764019e+07   8.98382791e+07
   9.79140866e+07   1.05974750e+08   1.15404844e+08   1.26577458e+08
   1.39150482e+08   1.53697442e+08   1.70526380e+08   1.89078287e+08
   2.10557047e+08   2.34471122e+08   2.61430158e+08   2.92212942e+08
   3.26649758e+08   3.65307711e+08   4.09121401e+08   4.58315512e+08
   5.13751370e+08   5.76402240e+08   6.46604825e+08   7.25788157e+08]

In [77]:
#
# Plot noise sigma from e-23 to e-25
#

def LogLikehood_wNoise_1e23(param, sig):
    # param is our parameter, C_3
    Sij = param[:, None, None] * norm_matrix[1][None, :, :]
    newSij = (1e23)*Sij   # multiply S_ij by 1e12
    Nij = sig[:, None, None] * id_mat[None, :, :]
    newNij = (1e23)*Nij
    # Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
    # Sij.shape = (20, 3072, 3072)
    Cij = newSij + newNij
    #invCij = np.linalg.inv(Cij)
    logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
    # model_fit_terms = m^T C^-1 m
    #
    # model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
    # for i in range(invCij.shape[0])])
    #
    model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
    return model_fit_terms + logdetC[1] + Npix2pi


vary_x_samples13 = np.logspace(-8, -12, num=40) #num = 40
sigma13 = np.logspace(-23, -25, num=40)

xaxis13 = vary_x_samples13
yaxis13 = LogLikehood_wNoise_1e23(vary_x_samples13, sigma13)

fig13 = plt.figure()
plt.plot(xaxis13, yaxis13)
plt.xscale("log")
plt.xlabel('C_3 values, num=40, logspace from 1e-8 to 1e-12')
plt.ylabel('-2 ln likelihood L(T|C_3)')
plt.axvline(x =1.25873313e-09, color = 'g')  # a30 = 1.25873313e-09
# plt.plt.axvline(x = 4.79243265e-17 ) # a31 = 4.79243265e-17 
plt.axvline(x =5.86460794e-13, color = 'r')  # a32 = 5.86460794e-13
plt.axvline(x =7.27453196e-10, color = 'm')  # a33 = 7.27453196e-10
plt.axvline(x =4.5454678088e-09, color = 'y') # 7*\hat{C}_3 = 4.5454678088e-09
plt.axvline(x =6.49352544114e-10, color = 'k')  # \hat{C}_3 = 6.49352544114e-10
plt.grid()

print "vary_x_samples13 = np.logspace(-8, -12, num=40) #num = 40"
print "sigma13 = np.logspace(-23, -25, num=40)"
print "LogLikehood_wNoise_1e23(vary_x_samples13, sigma13)"
print "|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black"


vary_x_samples13 = np.logspace(-8, -12, num=40) #num = 40
sigma13 = np.logspace(-23, -25, num=40)
LogLikehood_wNoise_1e23(vary_x_samples13, sigma13)
|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black

In [78]:
print "vary_x_samples13 = np.logspace(-8, -12, num=40) #num = 40"
print "sigma13 = np.logspace(-23, -25, num=40)"
print "LogLikehood_wNoise_1e23(vary_x_samples13, sigma13)"
print vary_x_samples13
print LogLikehood_wNoise_1e23(vary_x_samples13, sigma13)


vary_x_samples13 = np.logspace(-8, -12, num=40) #num = 40
sigma13 = np.logspace(-23, -25, num=40)
LogLikehood_wNoise_1e23(vary_x_samples13, sigma13)
[  1.00000000e-08   7.89652287e-09   6.23550734e-09   4.92388263e-09
   3.88815518e-09   3.07029063e-09   2.42446202e-09   1.91448198e-09
   1.51177507e-09   1.19377664e-09   9.42668455e-10   7.44380301e-10
   5.87801607e-10   4.64158883e-10   3.66524124e-10   2.89426612e-10
   2.28546386e-10   1.80472177e-10   1.42510267e-10   1.12533558e-10
   8.88623816e-11   7.01703829e-11   5.54102033e-11   4.37547938e-11
   3.45510729e-11   2.72833338e-11   2.15443469e-11   1.70125428e-11
   1.34339933e-11   1.06081836e-11   8.37677640e-12   6.61474064e-12
   5.22334507e-12   4.12462638e-12   3.25702066e-12   2.57191381e-12
   2.03091762e-12   1.60371874e-12   1.26638017e-12   1.00000000e-12]
[  3.77426185e+04  -4.71884682e+04   2.19525717e+06  -1.83104245e+06
   1.44077742e+06  -9.30100768e+05  -7.76671075e+05  -1.26936525e+05
   1.28253827e+07   9.68504741e+06  -8.76538959e+05  -1.47451062e+07
   5.16853388e+06   1.03687383e+07   2.63644475e+07  -5.73003279e+07
  -5.65150956e+06   4.77383307e+06  -1.02381080e+07   1.69372916e+07
  -2.72128212e+06   3.31160649e+07   3.90890261e+07   2.98301784e+07
   1.85441770e+07   3.69278683e+06  -2.01159266e+07  -6.85478208e+07
  -1.52581669e+08  -3.75382278e+08  -1.10801369e+09   6.65028850e+09
   1.37094621e+09   8.56374267e+08   8.52803804e+08   8.11743764e+08
   8.07777142e+08   8.37763508e+08   8.79143712e+08   9.46798929e+08]

In [79]:
#
# Plot noise sigma from e-24 to e-26
#

def LogLikehood_wNoise_1e24(param, sig):
    # param is our parameter, C_3
    Sij = param[:, None, None] * norm_matrix[1][None, :, :]
    newSij = (1e24)*Sij   # multiply S_ij by 1e12
    Nij = sig[:, None, None] * id_mat[None, :, :]
    newNij = (1e24)*Nij
    # Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
    # Sij.shape = (20, 3072, 3072)
    Cij = newSij + newNij
    #invCij = np.linalg.inv(Cij)
    logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
    # model_fit_terms = m^T C^-1 m
    #
    # model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
    # for i in range(invCij.shape[0])])
    #
    model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
    return model_fit_terms + logdetC[1] + Npix2pi



vary_x_samples14 = np.logspace(-8, -12, num=40) #num = 40
sigma14 = np.logspace(-24, -26, num=40)

xaxis14 = vary_x_samples14
yaxis14 = LogLikehood_wNoise_1e24(vary_x_samples14, sigma14)



fig14 = plt.figure()
plt.plot(xaxis14, yaxis14)
plt.xscale("log")
plt.xlabel('C_3 values, num=40, logspace from 1e-8 to 1e-12')
plt.ylabel('-2 ln likelihood L(T|C_3)')
plt.axvline(x =1.25873313e-09, color = 'g')  # a30 = 1.25873313e-09
# plt.plt.axvline(x = 4.79243265e-17 ) # a31 = 4.79243265e-17 
plt.axvline(x =5.86460794e-13, color = 'r')  # a32 = 5.86460794e-13
plt.axvline(x =7.27453196e-10, color = 'm')  # a33 = 7.27453196e-10
plt.axvline(x =4.5454678088e-09, color = 'y') # 7*\hat{C}_3 = 4.5454678088e-09
plt.axvline(x =6.49352544114e-10, color = 'k')  # \hat{C}_3 = 6.49352544114e-10
plt.grid()


print "vary_x_samples14 = np.logspace(-8, -12, num=40) #num = 40"
print "sigma14 = np.logspace(-24, -26, num=40)"
print "LogLikehood_wNoise(vary_x_samples14, sigma14)"
print "|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black"


vary_x_samples14 = np.logspace(-8, -12, num=40) #num = 40
sigma14 = np.logspace(-24, -26, num=40)
LogLikehood_wNoise(vary_x_samples14, sigma14)
|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black

In [80]:
print vary_x_samples14
print LogLikehood_wNoise_1e24(vary_x_samples14, sigma14)


[  1.00000000e-08   7.89652287e-09   6.23550734e-09   4.92388263e-09
   3.88815518e-09   3.07029063e-09   2.42446202e-09   1.91448198e-09
   1.51177507e-09   1.19377664e-09   9.42668455e-10   7.44380301e-10
   5.87801607e-10   4.64158883e-10   3.66524124e-10   2.89426612e-10
   2.28546386e-10   1.80472177e-10   1.42510267e-10   1.12533558e-10
   8.88623816e-11   7.01703829e-11   5.54102033e-11   4.37547938e-11
   3.45510729e-11   2.72833338e-11   2.15443469e-11   1.70125428e-11
   1.34339933e-11   1.06081836e-11   8.37677640e-12   6.61474064e-12
   5.22334507e-12   4.12462638e-12   3.25702066e-12   2.57191381e-12
   2.03091762e-12   1.60371874e-12   1.26638017e-12   1.00000000e-12]
[  4.14423224e+04  -2.68221082e+04  -4.24429210e+05  -2.10578450e+06
   2.47135612e+05  -6.06707744e+04   3.91217048e+05  -1.29760275e+06
   1.25524330e+05  -1.23160175e+05  -4.78897027e+05  -2.65982811e+06
   8.85507312e+05  -6.25246332e+04  -2.29375944e+06   2.61646465e+06
  -2.22803779e+06   6.65701789e+05  -2.13840424e+07  -2.77597765e+06
   2.69505537e+07   3.57189263e+06   2.94454370e+07  -9.01073786e+05
   1.92229256e+07  -7.72763940e+06  -9.09225749e+05  -3.74768615e+06
   6.73825231e+07  -5.52113110e+07   1.31605305e+08   3.62730958e+06
  -1.00644883e+06  -2.28428434e+07   3.15372407e+09  -1.91160005e+08
  -1.14152165e+08  -1.95995668e+08   1.54450820e+08   9.85681092e+08]

In [81]:
print hatC3  # anafast generated C_l for l = 3 from sky map
print 7*hatC3
print C3     # CAMB generated C_3, Boltzmann code output
print alm3_squared


6.49352544114e-10
4.5454678088e-09
5.88275e-10
[  1.25873313e-09   4.79243265e-17   5.86460794e-13   7.27453196e-10]

In [82]:
print tempval.shape


(3072,)

In [83]:
np.logspace(-8, -10, num=40)


Out[83]:
array([  1.00000000e-08,   8.88623816e-09,   7.89652287e-09,
         7.01703829e-09,   6.23550734e-09,   5.54102033e-09,
         4.92388263e-09,   4.37547938e-09,   3.88815518e-09,
         3.45510729e-09,   3.07029063e-09,   2.72833338e-09,
         2.42446202e-09,   2.15443469e-09,   1.91448198e-09,
         1.70125428e-09,   1.51177507e-09,   1.34339933e-09,
         1.19377664e-09,   1.06081836e-09,   9.42668455e-10,
         8.37677640e-10,   7.44380301e-10,   6.61474064e-10,
         5.87801607e-10,   5.22334507e-10,   4.64158883e-10,
         4.12462638e-10,   3.66524124e-10,   3.25702066e-10,
         2.89426612e-10,   2.57191381e-10,   2.28546386e-10,
         2.03091762e-10,   1.80472177e-10,   1.60371874e-10,
         1.42510267e-10,   1.26638017e-10,   1.12533558e-10,
         1.00000000e-10])

In [84]:
# Plot C_3 from e-08 to e-10
# Plot noise sigma from e-22 to e-23
#

def LogLikehood_wNoise_1e22(param, sig):
    # param is our parameter, C_3
    Sij = param[:, None, None] * norm_matrix[1][None, :, :]
    newSij = (1e22)*Sij   # multiply S_ij by 1e12
    Nij = sig[:, None, None] * id_mat[None, :, :]
    newNij = (1e22)*Nij
    # Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
    # Sij.shape = (20, 3072, 3072)
    Cij = newSij + newNij
    #invCij = np.linalg.inv(Cij)
    logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
    # model_fit_terms = m^T C^-1 m
    #
    # model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
    # for i in range(invCij.shape[0])])
    #
    model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
    return model_fit_terms + logdetC[1] + Npix2pi


vary_x_samples15 = np.logspace(-8, -10, num=40) #num = 40
sigma15 = np.logspace(-22, -23, num=40)

xaxis15 = vary_x_samples15
yaxis15 = LogLikehood_wNoise_1e22(vary_x_samples15, sigma15)

fig15 = plt.figure()
plt.plot(xaxis15, yaxis15)
plt.xscale("log")
plt.xlabel('C_3 values, num=40, logspace from 1e-9 to 1e-10')
plt.ylabel('-2 ln likelihood L(T|C_3)')
#plt.axvline(x =1.25873313e-09, color = 'g')  # a30 = 1.25873313e-09
# plt.plt.axvline(x = 4.79243265e-17 ) # a31 = 4.79243265e-17 
#plt.axvline(x =5.86460794e-13, color = 'r')  # a32 = 5.86460794e-13
#plt.axvline(x =7.27453196e-10, color = 'm')  # a33 = 7.27453196e-10
#plt.axvline(x =4.5454678088e-09, color = 'y') # 7*\hat{C}_3 = 4.5454678088e-09
#plt.axvline(x =6.49352544114e-10, color = 'k')  # \hat{C}_3 = 6.49352544114e-10
plt.grid()


print "|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black"


|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black

In [85]:
print vary_x_samples15
print LogLikehood_wNoise_1e22(vary_x_samples15, sigma15)


[  1.00000000e-08   8.88623816e-09   7.89652287e-09   7.01703829e-09
   6.23550734e-09   5.54102033e-09   4.92388263e-09   4.37547938e-09
   3.88815518e-09   3.45510729e-09   3.07029063e-09   2.72833338e-09
   2.42446202e-09   2.15443469e-09   1.91448198e-09   1.70125428e-09
   1.51177507e-09   1.34339933e-09   1.19377664e-09   1.06081836e-09
   9.42668455e-10   8.37677640e-10   7.44380301e-10   6.61474064e-10
   5.87801607e-10   5.22334507e-10   4.64158883e-10   4.12462638e-10
   3.66524124e-10   3.25702066e-10   2.89426612e-10   2.57191381e-10
   2.28546386e-10   2.03091762e-10   1.80472177e-10   1.60371874e-10
   1.42510267e-10   1.26638017e-10   1.12533558e-10   1.00000000e-10]
[  1.18423596e+06  -1.24653693e+04   5.05439716e+06   3.16566061e+06
   1.20679638e+06   8.44245200e+06   3.80270233e+06   3.26303568e+06
   2.09016375e+06   2.01331259e+06   1.18209955e+06   3.29437040e+05
  -7.24125963e+05  -1.92847638e+06  -3.90564236e+06  -6.64634227e+06
  -9.91870220e+06  -1.49515512e+07  -2.29822343e+07  -3.43676154e+07
  -5.63369055e+07  -1.16912692e+08  -3.44082279e+08   1.63685829e+08
   2.13734532e+08   1.38908195e+08   1.10217980e+08   8.53769658e+07
   9.28410346e+07   8.53345465e+07   8.26604640e+07   8.06413908e+07
   8.06174522e+07   8.06604458e+07   8.18183755e+07   8.33685071e+07
   8.54764019e+07   8.76337626e+07   8.98382791e+07   9.44712776e+07]

In [86]:
np.logspace(-8.3, -10, num=40)


Out[86]:
array([  5.01187234e-09,   4.53325612e-09,   4.10034607e-09,
         3.70877741e-09,   3.35460219e-09,   3.03424945e-09,
         2.74448928e-09,   2.48240018e-09,   2.24533968e-09,
         2.03091762e-09,   1.83697212e-09,   1.66154773e-09,
         1.50287575e-09,   1.35935639e-09,   1.22954263e-09,
         1.11212562e-09,   1.00592153e-09,   9.09859552e-10,
         8.22971159e-10,   7.44380301e-10,   6.73294595e-10,
         6.08997324e-10,   5.50840217e-10,   4.98236910e-10,
         4.50657034e-10,   4.07620869e-10,   3.68694506e-10,
         3.33485475e-10,   3.01638783e-10,   2.72833338e-10,
         2.46778711e-10,   2.23212211e-10,   2.01896228e-10,
         1.82615847e-10,   1.65176674e-10,   1.49402882e-10,
         1.35135431e-10,   1.22230472e-10,   1.10557891e-10,
         1.00000000e-10])

In [87]:
# Plot C_3 from 7.07945784e-09 to 1.00000000e-10
# Plot noise sigma from e-22 to e-23
#

def LogLikehood_wNoise_1e22(param, sig):
    # param is our parameter, C_3
    Sij = param[:, None, None] * norm_matrix[1][None, :, :]
    newSij = (1e22)*Sij   # multiply S_ij by 1e12
    Nij = sig[:, None, None] * id_mat[None, :, :]
    newNij = (1e22)*Nij
    # Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
    # Sij.shape = (20, 3072, 3072)
    Cij = newSij + newNij
    #invCij = np.linalg.inv(Cij)
    logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
    # model_fit_terms = m^T C^-1 m
    #
    # model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
    # for i in range(invCij.shape[0])])
    #
    model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
    return model_fit_terms + logdetC[1] + Npix2pi


vary_x_samples16 = np.logspace(-8.3, -10, num=40) #num = 40
sigma16 = np.logspace(-22, -24, num=40)

xaxis16 = vary_x_samples16
yaxis16 = LogLikehood_wNoise_1e22(vary_x_samples16, sigma16)

fig16 = plt.figure()
plt.plot(xaxis16, yaxis16)
plt.xscale("log")
plt.xlabel('C_3 values, num=40, logspace from 1e-9 to 1e-10')
plt.ylabel('-2 ln likelihood L(T|C_3)')
#plt.axvline(x =1.25873313e-09, color = 'g')  # a30 = 1.25873313e-09
# plt.plt.axvline(x = 4.79243265e-17 ) # a31 = 4.79243265e-17 
#plt.axvline(x =5.86460794e-13, color = 'r')  # a32 = 5.86460794e-13
#plt.axvline(x =7.27453196e-10, color = 'm')  # a33 = 7.27453196e-10
#plt.axvline(x =4.5454678088e-09, color = 'y') # 7*\hat{C}_3 = 4.5454678088e-09
#plt.axvline(x =6.49352544114e-10, color = 'k')  # \hat{C}_3 = 6.49352544114e-10
plt.grid()


print "|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black"


|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black

In [88]:
# Plot C_3 from 7.07945784e-09 to 1.00000000e-10
# Plot noise sigma from e-22 to e-23
#

def LogLikehood_wNoise_1e22(param, sig):
    # param is our parameter, C_3
    Sij = param[:, None, None] * norm_matrix[1][None, :, :]
    newSij = (1e22)*Sij   # multiply S_ij by 1e12
    Nij = sig[:, None, None] * id_mat[None, :, :]
    newNij = (1e22)*Nij
    # Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
    # Sij.shape = (20, 3072, 3072)
    Cij = newSij + newNij
    #invCij = np.linalg.inv(Cij)
    logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
    # model_fit_terms = m^T C^-1 m
    #
    # model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
    # for i in range(invCij.shape[0])])
    #
    model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
    return model_fit_terms + logdetC[1] + Npix2pi


vary_x_samples17 = np.logspace(-8.1, -10, num=40) #num = 40
sigma17 = np.logspace(-22, -23, num=40)

xaxis17 = vary_x_samples17
yaxis17 = LogLikehood_wNoise_1e22(vary_x_samples17, sigma17)

fig16 = plt.figure()
plt.plot(xaxis17, yaxis17)
plt.xscale("log")
plt.xlabel('C_3 values, num=40, logspace from 1e-9 to 1e-10')
plt.ylabel('-2 ln likelihood L(T|C_3)')
#plt.axvline(x =1.25873313e-09, color = 'g')  # a30 = 1.25873313e-09
# plt.plt.axvline(x = 4.79243265e-17 ) # a31 = 4.79243265e-17 
#plt.axvline(x =5.86460794e-13, color = 'r')  # a32 = 5.86460794e-13
#plt.axvline(x =7.27453196e-10, color = 'm')  # a33 = 7.27453196e-10
#plt.axvline(x =4.5454678088e-09, color = 'y') # 7*\hat{C}_3 = 4.5454678088e-09
#plt.axvline(x =6.49352544114e-10, color = 'k')  # \hat{C}_3 = 6.49352544114e-10
plt.grid()


print "|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black"


|a30|^2 is green, |a32|^2 is red, |a33|^2 is magenta, 7*\hat{C}_3 is yellow, \hat{C}_3 is black

In [89]:
print vary_x_samples17


[  7.94328235e-09   7.10038751e-09   6.34693576e-09   5.67343592e-09
   5.07140396e-09   4.53325612e-09   4.05221339e-09   3.62221612e-09
   3.23784765e-09   2.89426612e-09   2.58714347e-09   2.31261088e-09
   2.06721009e-09   1.84784980e-09   1.65176674e-09   1.47649088e-09
   1.31981427e-09   1.17976327e-09   1.05457367e-09   9.42668455e-10
   8.42637971e-10   7.53222140e-10   6.73294595e-10   6.01848496e-10
   5.37983840e-10   4.80896130e-10   4.29866235e-10   3.84251335e-10
   3.43476822e-10   3.07029063e-10   2.74448928e-10   2.45326007e-10
   2.19293440e-10   1.96023298e-10   1.75222448e-10   1.56628863e-10
   1.40008321e-10   1.25151453e-10   1.11871110e-10   1.00000000e-10]

In [90]:
print tempp


[ -4.00660210e+01  -3.31482152e+01  -3.18903330e+01  -3.86252832e+01
  -4.09070599e+01  -3.80054043e+01  -5.15054235e+01  -2.78335774e+01
  -3.64571897e+01  -3.56088458e+01  -3.29712057e+01  -4.72086685e+01
  -3.46885499e+01  -8.62572870e+00  -4.25201470e+01  -8.23425726e+01
  -4.90070634e+01  -2.15932214e+01  -3.91514659e+01  -4.57219685e+01
  -3.55881348e+01  -2.73288642e+01  -3.24044413e+01  -4.32329507e+01
  -2.70442742e+01   3.40311999e+00   1.13238739e+01  -5.34020619e+01
  -1.08011831e+02  -8.47720148e+01  -3.91867688e+01  -1.61740827e+01
  -4.15835530e+01  -4.80873314e+01  -3.81000536e+01  -2.74326358e+01
  -1.81533815e+01  -1.12887055e+01  -2.12625200e+01  -2.85981914e+01
  -2.38236444e+01  -4.44361831e+00   2.22557555e+01   2.35198036e+00
  -6.05186542e+01  -1.11739580e+02  -1.05175597e+02  -7.29729145e+01
  -3.15950347e+01  -1.23235595e+01  -5.00592687e+01  -5.76983766e+01
  -3.34505785e+01  -2.42166570e+01  -8.64667891e+00  -1.49720074e+00
   1.29289656e+01   1.46104048e+01  -5.89111050e+00  -1.28025640e+01
  -2.16043391e+01  -1.55031466e+01   8.84273868e+00   3.07842356e+00
  -2.00584036e+01  -5.73395919e+01  -9.28959271e+01  -1.01636106e+02
  -8.66872506e+01  -6.35890246e+01  -1.96615620e+01  -9.04534045e+00
  -6.01215979e+01  -7.89859623e+01  -4.31245971e+01  -2.62405702e+01
  -9.08846232e+00   1.21840303e+01   1.54445406e+01   3.16251098e+01
   5.29979188e+01   3.44072905e+01   8.55548660e+00   4.42880349e-01
  -1.12912348e+01  -1.54818372e+01   2.65464053e-01  -9.42022325e+00
  -2.54185506e+01  -3.69834233e+01  -5.12482547e+01  -6.85945633e+01
  -8.60303480e+01  -8.09932317e+01  -6.97121432e+01  -4.73896325e+01
  -8.12451799e+00  -8.03337116e+00  -6.09610070e+01  -9.19763188e+01
  -6.67450076e+01  -4.18261952e+01  -2.74367849e+01   1.87521982e+00
   1.86844227e+01   2.21777118e+01   3.90221285e+01   6.85307095e+01
   7.64282959e+01   4.18950367e+01   2.01384664e+01   1.20103432e+01
   8.67501058e+00  -1.12812154e+00  -2.28799468e+00  -1.42622930e+01
  -3.28114584e+01  -3.65986380e+01  -4.99814487e+01  -5.68089308e+01
  -5.91452008e+01  -7.54078210e+01  -7.63361677e+01  -5.88093426e+01
  -5.12078805e+01  -3.56501878e+01  -9.98743326e+00  -1.31831803e+01
  -5.19908608e+01  -8.24165618e+01  -7.86351884e+01  -6.57245837e+01
  -4.92960717e+01  -2.50517169e+01  -1.76947788e+00   6.86265275e+00
   2.22292656e+01   4.35631118e+01   6.29062779e+01   8.56545375e+01
   7.43103010e+01   3.81363388e+01   2.44507846e+01   2.03013060e+01
   2.96366361e+01   1.83159282e+01  -5.78461459e+00  -2.19299673e+01
  -3.82700964e+01  -3.72756585e+01  -4.20574142e+01  -7.00885066e+01
  -7.70175538e+01  -6.97888900e+01  -7.61100600e+01  -8.31789075e+01
  -6.33089512e+01  -4.20280157e+01  -4.46372032e+01  -4.07155967e+01
  -2.52350837e+01  -2.35150019e+01  -4.38583920e+01  -6.38021229e+01
  -6.81678939e+01  -7.65084551e+01  -6.69500878e+01  -4.16186158e+01
  -2.66311054e+01  -1.96860892e+01  -6.46424633e+00   3.01306245e+01
   5.64681250e+01   5.73688703e+01   6.98325894e+01   8.12377184e+01
   5.50866680e+01   2.84975340e+01   2.15312230e+01   2.46816162e+01
   4.35340116e+01   3.07805240e+01  -1.16271258e+01  -3.47908790e+01
  -4.27680679e+01  -4.56485650e+01  -3.79898229e+01  -6.08705304e+01
  -9.27229266e+01  -9.59233876e+01  -8.68503339e+01  -8.10610727e+01
  -9.06661080e+01  -8.82611421e+01  -5.52905985e+01  -4.11415058e+01
  -5.39737157e+01  -5.43476053e+01  -4.00606950e+01  -3.23679014e+01
  -4.36945738e+01  -5.85525413e+01  -5.53587379e+01  -6.57041091e+01
  -7.07782019e+01  -4.21782570e+01  -3.14350618e+01  -3.86133106e+01
  -3.45734079e+01  -4.12706822e+00   4.98100853e+01   7.50145045e+01
   6.00026870e+01   5.33802049e+01   7.09494343e+01   6.55874537e+01
   3.64792941e+01   2.27385262e+01   1.95296489e+01   2.98638570e+01
   4.86580866e+01   3.33148018e+01  -1.44384194e+01  -4.31640874e+01
  -4.06223990e+01  -4.46815357e+01  -4.30719883e+01  -5.18135130e+01
  -8.59283755e+01  -1.01425510e+02  -9.70139808e+01  -9.39978345e+01
  -8.28106276e+01  -8.65598995e+01  -1.05887681e+02  -8.95323974e+01
  -5.59295368e+01  -5.42095622e+01  -6.69467918e+01  -6.24654640e+01
  -4.52790155e+01  -3.52761526e+01  -4.74034277e+01  -6.83377511e+01
  -5.95732417e+01  -5.10170685e+01  -5.93846817e+01  -3.45251683e+01
  -1.40119500e+01  -3.49051443e+01  -4.85223864e+01  -3.35173718e+01
   1.05913768e+01   6.89448279e+01   8.83977264e+01   6.58873541e+01
   4.71899766e+01   5.54317630e+01   6.93985858e+01   5.07396289e+01
   2.97953611e+01   2.28180361e+01   2.19861104e+01   3.74074152e+01
   4.34314352e+01   2.91698580e+01  -9.29680846e+00  -3.65580163e+01
  -3.10704163e+01  -3.02127792e+01  -3.78022851e+01  -4.58120812e+01
  -7.34082714e+01  -1.00335768e+02  -9.60351827e+01  -8.61010121e+01
  -9.00912419e+01  -8.36951440e+01  -7.84622898e+01  -9.98279720e+01
  -1.10287125e+02  -8.65026668e+01  -6.69158035e+01  -6.82063619e+01
  -7.22281111e+01  -6.23760425e+01  -4.26980041e+01  -3.25609253e+01
  -4.58280811e+01  -7.32715052e+01  -7.03361147e+01  -4.71800267e+01
  -4.61822601e+01  -2.99834683e+01   2.40828558e+00  -9.44181284e+00
  -4.28874555e+01  -4.78857219e+01  -2.36828182e+01   2.38146768e+01
   7.79013862e+01   9.45657739e+01   7.44374338e+01   5.56957457e+01
   4.71866733e+01   5.88749390e+01   6.46399931e+01   4.17753363e+01
   2.75427064e+01   1.99030073e+01   2.14418469e+01   3.80826787e+01
   2.47236931e+01   1.76301965e+01  -3.24505368e+00  -1.77406000e+01
  -1.56539136e+01  -1.69225059e+01  -2.61764144e+01  -3.89581692e+01
  -6.17606929e+01  -9.04090484e+01  -1.04431543e+02  -9.62289778e+01
  -8.42180598e+01  -8.61034787e+01  -8.82402746e+01  -8.11694190e+01
  -8.47603733e+01  -1.01453879e+02  -1.03670398e+02  -8.85527770e+01
  -7.48805105e+01  -6.94803093e+01  -7.04600388e+01  -5.82457469e+01
  -3.46589259e+01  -2.38265293e+01  -3.34003926e+01  -5.69581243e+01
  -6.22337466e+01  -4.30008731e+01  -3.74322808e+01  -3.20021209e+01
  -1.56255226e+00   9.05627621e+00  -1.87467849e+01  -4.53051325e+01
  -4.59091134e+01  -1.63245131e+01   3.24598805e+01   7.99961854e+01
   9.79876204e+01   8.64250469e+01   7.58811366e+01   5.98643419e+01
   4.88857149e+01   6.49009089e+01   6.14966048e+01   3.26831796e+01
   1.99863716e+01   1.27297999e+01   1.34477996e+01   2.27243308e+01
  -2.20674974e+00  -7.92933179e-01  -8.55358030e+00  -5.28886039e+00
  -2.09931727e+00  -1.50377064e+01  -2.58340751e+01  -3.62684223e+01
  -5.83426299e+01  -8.11322316e+01  -9.52526389e+01  -1.06475709e+02
  -1.10070738e+02  -9.78316966e+01  -8.80081570e+01  -9.27432920e+01
  -9.23317712e+01  -7.78062895e+01  -7.67462625e+01  -9.37955265e+01
  -9.55817595e+01  -7.92214996e+01  -6.32287629e+01  -6.22696170e+01
  -6.81654856e+01  -4.94119740e+01  -1.90844712e+01  -7.48160346e+00
  -1.19404604e+01  -2.47873359e+01  -3.13463934e+01  -2.37241420e+01
  -2.41064135e+01  -3.13155469e+01  -1.71070806e+01   2.92351774e+00
  -1.39556266e+00  -2.17580073e+01  -4.45959195e+01  -4.74450208e+01
  -9.39376241e+00   4.09474378e+01   7.77598907e+01   9.54233037e+01
   9.44410567e+01   9.39339370e+01   8.46781186e+01   6.18772610e+01
   6.54838877e+01   8.28667762e+01   5.73358702e+01   2.00562408e+01
   1.37353800e+01   1.25983952e+01   4.96893108e+00  -2.96650387e+00
  -2.19471385e+01  -1.38048936e+01  -1.97310128e+01  -8.96902202e+00
  -1.06467007e+00  -2.53348080e+01  -4.49573381e+01  -4.78944967e+01
  -6.02464715e+01  -7.75005174e+01  -8.33823287e+01  -8.71549782e+01
  -1.04292805e+02  -1.20917088e+02  -1.12202171e+02  -9.24821579e+01
  -9.12832838e+01  -9.76113442e+01  -7.99189438e+01  -5.80395536e+01
  -6.97284995e+01  -8.29715209e+01  -6.79583682e+01  -4.99780253e+01
  -4.69437073e+01  -5.85655616e+01  -5.95893471e+01  -2.85471397e+01
   4.05891478e+00   1.42429790e+01   1.25718725e+01   6.14655482e+00
   7.91614468e-01   1.58753380e+00  -4.58556133e+00  -2.22718336e+01
  -2.79066899e+01  -1.51879331e+01  -4.71378962e+00  -4.13086627e+00
  -1.57780796e+01  -4.14879214e+01  -4.35370966e+01   5.58370459e-01
   4.42357923e+01   6.55810436e+01   8.06999888e+01   9.23637344e+01
   1.03665348e+02   1.03070830e+02   8.61660956e+01   8.21204449e+01
   1.01608573e+02   9.69662506e+01   4.60946685e+01   1.12958387e+01
   1.68436618e+01   2.10097842e+01   1.40018744e+00  -2.37383611e+01
  -2.49624572e+01  -9.68654240e+00  -1.74772431e+01  -1.43550378e+01
  -9.19884951e+00  -3.81902682e+01  -6.98772346e+01  -7.52421911e+01
  -7.37914088e+01  -7.48335224e+01  -7.18042065e+01  -6.98841686e+01
  -7.67940583e+01  -9.60084217e+01  -1.17363459e+02  -1.17043841e+02
  -9.70257024e+01  -8.58757921e+01  -9.17995712e+01  -8.62718371e+01
  -6.28192574e+01  -6.00597050e+01  -7.04263948e+01  -5.45942239e+01
  -3.35074619e+01  -3.34349970e+01  -4.23601668e+01  -4.58330323e+01
  -2.74622635e+01   6.55320218e+00   2.86899267e+01   3.71005772e+01
   3.84931081e+01   3.11560143e+01   2.12348077e+01   1.69333125e+01
   9.68823224e+00  -1.24904591e+01  -3.37083402e+01  -3.39332619e+01
  -1.91557992e+01  -4.79848813e+00  -2.19557933e-01  -1.26219011e+01
  -3.59970363e+01  -3.34646465e+01   4.59347984e+00   3.38818063e+01
   4.58409595e+01   6.28022244e+01   8.66118280e+01   1.12273192e+02
   1.17157571e+02   1.00901307e+02   9.71263726e+01   1.12204783e+02
   1.20716897e+02   8.88457726e+01   3.42228741e+01   1.15378907e+01
   1.96191359e+01   2.29810048e+01  -6.45083389e-01  -3.09462303e+01
  -2.46495711e+01  -2.22158064e+00   3.31917340e+00  -7.93201525e+00
  -1.17808740e+01  -3.44036162e+01  -7.20212556e+01  -9.28209120e+01
  -9.28176887e+01  -7.83209034e+01  -5.87530158e+01  -5.45039366e+01
  -6.78807555e+01  -8.14172090e+01  -9.90447297e+01  -1.16559902e+02
  -1.10536363e+02  -8.80602893e+01  -7.84798103e+01  -8.83944303e+01
  -8.99339575e+01  -8.15328094e+01  -7.88879843e+01  -5.98236838e+01
  -3.33692733e+01  -3.31972806e+01  -3.89094312e+01  -2.73063051e+01
  -9.24242215e-01   2.87055918e+01   4.18147247e+01   5.07801633e+01
   6.50979491e+01   6.40203143e+01   4.71360945e+01   3.01433574e+01
   1.96827841e+01   2.47777820e-01  -3.12312695e+01  -4.71949897e+01
  -3.52239585e+01  -9.43527903e+00   2.30960381e+00  -8.37201969e+00
  -2.60301858e+01  -3.61475250e+01  -1.97885347e+01   8.91761556e+00
   2.69106331e+01   4.98059999e+01   7.68249738e+01   1.08705986e+02
   1.31203502e+02   1.13216935e+02   9.44324929e+01   1.02483486e+02
   1.14516894e+02   1.06535539e+02   6.54267933e+01   2.55842842e+01
   1.02864842e+01   1.14210670e+01   1.24536036e+01  -1.17725840e+01
   2.73705496e+00   1.82418207e+01   8.71380689e+00  -7.04835975e+00
  -2.49835466e+01  -5.27584962e+01  -8.28354459e+01  -9.73279239e+01
  -7.97531393e+01  -4.71305684e+01  -3.43956599e+01  -5.50046170e+01
  -7.97270695e+01  -9.03366163e+01  -1.05541978e+02  -1.11113317e+02
  -9.36911019e+01  -7.36092043e+01  -7.52617198e+01  -9.80137847e+01
  -1.10585381e+02  -1.02687860e+02  -7.62067648e+01  -4.59257935e+01
  -4.46861159e+01  -5.12216538e+01  -2.74636568e+01   1.19807619e+01
   4.45647529e+01   5.15334577e+01   5.06889373e+01   7.57338712e+01
   9.20826496e+01   8.02798968e+01   5.64926195e+01   3.31850169e+01
   1.65101283e+01  -1.04031760e+01  -3.67414614e+01  -3.85665517e+01
  -1.20441555e+01   1.39397389e+01  -9.39532129e-01  -2.95090431e+01
  -3.87886794e+01  -3.31633601e+01  -9.50145568e+00   1.69556715e+01
   4.81424868e+01   8.11060818e+01   9.87906096e+01   1.22593570e+02
   1.21684396e+02   9.02412066e+01   8.07468605e+01   8.87518108e+01
   9.90760818e+01   8.63389869e+01   4.93102962e+01   1.61360858e+01
  -5.36802281e+00   6.25182474e+00   1.72170476e+01  -1.16165029e+00
   2.96727849e+01   3.36439662e+01   2.43995655e+01   2.86829277e+00
  -1.48287190e+01  -3.08293129e+01  -5.92591496e+01  -8.88123323e+01
  -7.65409350e+01  -3.46633351e+01  -1.13778697e+01  -2.37083386e+01
  -6.00226194e+01  -8.11829377e+01  -8.97241553e+01  -9.57549055e+01
  -8.36142717e+01  -7.06436840e+01  -6.89750887e+01  -8.61640219e+01
  -1.12470843e+02  -1.15246767e+02  -8.63026507e+01  -5.06228134e+01
  -4.68562612e+01  -6.28524576e+01  -4.49467334e+01  -8.18465224e-02
   3.53705327e+01   4.94345149e+01   4.41800257e+01   6.60315709e+01
   9.79047181e+01   9.49622190e+01   8.10630299e+01   5.49108081e+01
   3.25609763e+01   2.06659879e+01   1.90865489e+00  -1.14727927e+01
  -3.94036852e+00   2.82000292e+01   2.89181135e+01  -1.68974966e+01
  -4.01756442e+01  -3.46524321e+01  -1.53976525e+01   1.63024051e+01
   4.71708227e+01   8.58368367e+01   1.00666242e+02   9.94494112e+01
   1.06651882e+02   8.66250703e+01   6.65624902e+01   6.46420667e+01
   7.82140851e+01   9.31549948e+01   7.00303208e+01   3.17342237e+01
  -5.89408501e+00  -9.17544639e+00   3.30094263e+01   4.22341545e+01
   6.65266052e+01   4.98492118e+01   1.84427481e+01  -1.17367733e+00
  -8.53942038e+00  -3.10957912e+01  -7.42949851e+01  -7.68214013e+01
  -3.07642331e+01   2.90513572e+00   9.65738764e+00  -1.48428890e+01
  -5.29748759e+01  -6.66950946e+01  -7.05260463e+01  -6.16364778e+01
  -5.52256679e+01  -6.81090605e+01  -8.26176329e+01  -1.00555793e+02
  -1.07895270e+02  -8.47000774e+01  -4.67245591e+01  -3.32365380e+01
  -5.20418216e+01  -5.60954250e+01  -2.14328848e+01   1.17392610e+01
   3.21430962e+01   3.99241617e+01   5.22551418e+01   8.69123469e+01
   8.67355848e+01   7.81433264e+01   7.40867472e+01   4.83024487e+01
   4.05928477e+01   4.15516115e+01   3.50851915e+01   2.52888112e+01
   3.88063454e+01   5.76662751e+01   1.88897902e+01  -2.64896280e+01
  -2.63616002e+01  -9.68281165e+00   2.02737356e+01   4.83613730e+01
   7.62993441e+01   1.00463840e+02   8.77326966e+01   7.89595942e+01
   7.01879762e+01   5.48476673e+01   5.48366406e+01   6.08491246e+01
   8.58279745e+01   8.49199350e+01   4.92915751e+01   1.17436211e+01
  -1.24638245e+01   2.75790226e+01   7.35752401e+01   7.08619118e+01
   9.57021693e+01   8.56149418e+01   4.24278187e+01   1.24398666e+01
   1.08365921e+01   2.94075630e+00  -4.50199441e+01  -7.02367397e+01
  -3.42467647e+01   3.40982865e-01   1.98023372e+01   2.72748293e+01
  -8.22437414e+00  -4.08645465e+01  -4.57871611e+01  -4.16993862e+01
  -3.63928666e+01  -5.80629676e+01  -8.44667084e+01  -9.73114948e+01
  -9.94476795e+01  -7.96177483e+01  -4.52913191e+01  -2.39784040e+01
  -2.84554208e+01  -4.27116502e+01  -3.13009768e+01  -3.74136380e+00
   1.20672012e+01   3.39198741e+01   4.92504259e+01   7.40429095e+01
   7.87459503e+01   5.66697927e+01   7.36259608e+01   6.99707598e+01
   5.05561220e+01   5.50080149e+01   6.43662934e+01   6.20540523e+01
   5.56768100e+01   7.12941619e+01   5.19330606e+01  -1.34105085e+00
  -1.30653825e+01   3.71031774e+00   2.50664289e+01   4.88177138e+01
   6.40831495e+01   8.51231962e+01   7.86269375e+01   5.62342248e+01
   4.95256463e+01   3.50817209e+01   4.21839031e+01   5.56210907e+01
   7.13857735e+01   8.52350204e+01   6.25163739e+01   3.68546098e+01
   1.13759424e+01   2.15306463e+01   7.69378894e+01   9.46551590e+01
   1.06076557e+02   6.97171272e+01   2.32165221e+01   1.57937029e+01
   2.56426101e+01  -3.54390613e+00  -4.41676966e+01  -2.49282875e+01
  -2.18125066e-01   1.01411269e+01   3.88271183e+01   3.34620345e+01
  -1.36316739e+01  -3.25767542e+01  -3.06187721e+01  -2.63521761e+01
  -4.28740423e+01  -7.62486379e+01  -9.38970988e+01  -9.59566387e+01
  -7.88722973e+01  -4.85862256e+01  -2.77325871e+01  -1.98925100e+01
  -1.96470501e+01  -2.14338834e+01  -4.66786560e+00   4.39637233e+00
   1.83809861e+01   4.71768144e+01   6.39942737e+01   7.75106892e+01
   5.02414914e+01   5.37813976e+01   8.55766339e+01   6.92859467e+01
   5.19947062e+01   5.66004783e+01   7.49099854e+01   7.55073270e+01
   8.10725687e+01   7.45809448e+01   2.43562099e+01   1.19620074e+00
   1.70859021e+01   3.39019462e+01   4.68161925e+01   5.46881747e+01
   7.01014651e+01   6.79813020e+01   3.27559792e+01   2.73731221e+01
   2.20611910e+01   1.67015751e+01   4.30154323e+01   6.05110727e+01
   7.11036191e+01   6.23384403e+01   4.90610073e+01   4.56413436e+01
   3.83704937e+01   6.82835744e+01   9.59951140e+01   9.89300461e+01
   9.91281777e+01   8.72510573e+01   3.50272312e+01   1.23829577e+01
   2.23886982e+01   2.22792205e+01  -1.16263291e+01  -6.07264201e+00
   1.82901149e+01   1.22744777e+01   2.98096984e+01   5.22370210e+01
   1.71815900e+01  -2.51559250e+01  -2.94506117e+01  -2.21519040e+01
  -2.78594871e+01  -5.76664424e+01  -8.17351756e+01  -8.73686440e+01
  -7.51532280e+01  -5.01131617e+01  -3.13560668e+01  -2.47749067e+01
  -1.29539976e+01  -4.92577328e+00   3.33989806e+00   9.29686666e+00
   9.11245309e-01   2.71922454e+01   5.16749460e+01   6.77296921e+01
   6.38673228e+01   4.18757845e+01   7.54615321e+01   8.81597662e+01
   5.88415351e+01   3.43887768e+01   5.11131402e+01   7.85044467e+01
   8.70623990e+01   9.37472287e+01   5.46373049e+01   2.06334535e+01
   3.08322014e+01   4.48268984e+01   5.10083592e+01   4.87297875e+01
   5.97363833e+01   6.93041366e+01   2.26344673e+01  -5.56271334e+00
   1.47391493e+01   6.08777736e+00   1.43951338e+01   4.34852045e+01
   5.33324492e+01   5.10829377e+01   4.51259766e+01   5.62293499e+01
   6.40034195e+01   6.72283786e+01   8.69595824e+01   8.97508580e+01
   8.82745589e+01   4.92456675e+01   1.51982886e+01   1.48841655e+01
   1.96909750e+01   5.20034928e+00  -1.25398984e-01   3.21725056e+01
   3.34655779e+01   3.03438010e+01   5.07900040e+01   4.20867182e+01
  -7.73397187e+00  -3.11871081e+01  -1.92909974e+01  -1.09034354e+01
  -2.89144518e+01  -6.00929743e+01  -7.62616546e+01  -6.48778368e+01
  -4.13904017e+01  -2.52947302e+01  -2.15723285e+01  -1.72830460e+01
  -2.11581619e+00   1.14290588e+01   1.76909689e+01  -5.91921435e+00
  -6.52328572e+00   3.14562749e+01   4.89993181e+01   6.90012021e+01
   5.47507589e+01   5.39891298e+01   8.03638468e+01   7.00657474e+01
   3.12840748e+01   1.64044905e+01   5.46569390e+01   7.81926065e+01
   9.77399177e+01   8.99440638e+01   5.15599277e+01   5.23052659e+01
   5.86772658e+01   5.93208824e+01   5.43794413e+01   5.30124817e+01
   7.16167269e+01   4.17657175e+01  -2.34473300e+01  -8.92559547e+00
   1.63139466e+01   2.86855220e+00   1.63804689e+01   3.32840827e+01
   3.50034315e+01   3.99647652e+01   4.91789433e+01   6.71262678e+01
   7.13544141e+01   7.30894099e+01   8.14075247e+01   8.40394860e+01
   7.90649938e+01   6.03937660e+01   2.24643918e+01   2.14312022e+01
   1.73040589e+01   3.06662491e+00  -8.77994080e+00   1.73595672e+01
   4.24628233e+01   3.97601470e+01   4.62092976e+01   5.03964548e+01
   1.90625906e+01  -2.28817353e+01  -1.74023153e+01   4.20721517e+00
   3.35671052e+00  -2.26366537e+01  -5.84128866e+01  -5.92063443e+01
  -2.67557389e+01  -9.44196927e+00  -7.37206301e+00  -1.16169376e+01
  -1.06029383e+01   7.01470481e+00   2.36824362e+01  -3.56905787e-01
  -3.33549942e+01   2.02634328e+00   3.66615714e+01   5.73318175e+01
   7.13020490e+01   5.19516034e+01   5.47250529e+01   6.19100319e+01
   4.34069661e+01   6.95814515e+00   2.03773125e+01   5.39543362e+01
   7.53425338e+01   1.05792285e+02   9.04694461e+01   7.84193835e+01
   8.02369832e+01   6.71073212e+01   6.41076113e+01   5.91737335e+01
   6.61914964e+01   6.10210191e+01  -2.68301528e+00  -3.27154521e+01
   1.20553104e+01   1.99727747e+01   5.33133516e+00   1.37052084e+01
   1.81820378e+01   3.23874774e+01   5.27572083e+01   6.00507956e+01
   6.80676239e+01   6.01095780e+01   6.60875230e+01   7.55898291e+01
   6.14216478e+01   2.78628468e+01   2.24421819e+01   3.17754893e+01
   4.03972717e+00  -1.96310230e+01  -1.09218026e+01   2.46450345e+01
   3.50365808e+01   3.68535730e+01   4.28202839e+01   3.81725877e+01
   4.83205156e-01  -1.27185922e+01   1.26977066e+01   2.26390803e+01
   1.66532573e+01  -1.71115898e+01  -4.88405749e+01  -2.01908970e+01
   9.77109539e+00   9.33726733e+00   2.60715478e+00  -1.19545921e+01
  -1.15295279e+01   1.77253278e+01   1.23783684e+01  -3.96511750e+01
  -3.20702675e+01   2.62410613e+01   5.07553159e+01   7.21968463e+01
   6.61710001e+01   4.46442828e+01   4.22654739e+01   4.57405695e+01
   2.24240412e+01   3.90889227e+00   2.54688639e+01   4.20087708e+01
   8.29460041e+01   1.15504896e+02   1.02180595e+02   9.89514665e+01
   7.97873290e+01   6.67589338e+01   6.99585871e+01   6.85688574e+01
   6.03734006e+01   2.37280037e+01  -2.65499475e+01  -1.13058004e+01
   2.76436203e+01   1.85455065e+01   7.09527103e+00   8.88471095e+00
   1.88756785e+01   5.54099825e+01   7.13569316e+01   6.72160895e+01
   5.58406537e+01   4.61001291e+01   6.43353851e+01   6.95011404e+01
   5.31942351e+01   3.07508017e+01   8.85535610e+00   3.16042133e+01
   1.95025714e+01  -2.15817363e+01  -3.20743638e+01  -2.67050791e+00
   1.85138415e+01   1.47371820e+01   2.01936728e+01   3.57069512e+01
   2.66761399e+01   1.84728003e+00   1.70258372e+01   2.78019561e+01
   3.03392026e+01   2.98308478e+01  -9.58742385e+00  -1.34685397e+01
   2.23500792e+01   2.58690288e+01   1.47487763e+01  -3.21909147e+00
  -2.64400751e+01  -6.93646371e+00   1.80963925e+01  -2.47836779e+01
  -5.50139848e+01  -7.95318158e-01   4.99714188e+01   6.95247509e+01
   7.50926265e+01   5.38953791e+01   3.63068139e+01   3.99474957e+01
   3.67700741e+01   1.02641952e+01   5.82598932e+00   1.77223828e+01
   4.31259468e+01   1.02826853e+02   1.17615949e+02   1.02787984e+02
   9.11185707e+01   6.97747091e+01   7.19318559e+01   7.69812905e+01
   6.20355931e+01   2.91445540e+01  -7.57782345e+00  -2.26914235e+01
   9.92245441e+00   2.64013925e+01   1.20929017e+01   7.58337183e+00
   1.25391662e+01   4.01881844e+01   7.91433413e+01   8.13576917e+01
   6.39393038e+01   3.88798617e+01   4.48777828e+01   5.91590979e+01
   2.87066996e+01   1.60624654e+00   4.49173922e+00   2.16480767e+01
  -1.06070729e+01  -4.00730496e+01  -2.85082697e+01   2.49559571e+00
  -3.24397070e+00  -1.23962036e+01   9.53517610e+00   3.38226528e+01
   2.37139629e+01   2.35822517e+01   3.10978394e+01   2.38278990e+01
   4.43799945e+01   4.13139242e+01   1.43566067e+01   3.08678973e+01
   3.96178912e+01   2.62470676e+01   7.93483650e+00  -2.40912341e+01
  -3.32907839e+01   2.64370965e+00  -5.67773486e+00  -4.99897287e+01
  -3.26548179e+01   3.15010802e+01   7.05103739e+01   7.94874941e+01
   6.37559715e+01   4.21785116e+01   3.99660930e+01   4.41361008e+01
   2.23103507e+01  -4.30919926e-01   2.95790710e+00   2.03633463e+01
   6.37193079e+01   1.11048808e+02   9.73310307e+01   8.61965600e+01
   7.51234547e+01   7.19941963e+01   7.83808500e+01   7.16146023e+01
   3.63458457e+01   8.28402563e-01  -1.78519749e+01  -6.05857485e+00
   1.75774985e+01   1.28164156e+01   4.18289210e+00   1.12446914e+01
   2.62351605e+01   5.80808337e+01   8.42880399e+01   7.70468978e+01
   4.72809515e+01   3.15106881e+01   4.15200957e+01   4.06826730e+01
   1.87907790e+01   3.81535847e+00  -1.39819231e+01  -3.04289438e+00
  -5.18947672e+00  -3.24306238e+01  -4.59860239e+01  -2.15739819e+01
  -6.41943006e+00  -3.09325042e+01  -2.79835367e+01   1.09960747e+01
   3.39130966e+01   3.43607644e+01   3.70280832e+01   2.23176703e+01
   3.10917058e+01   6.23285741e+01   5.18111228e+01   4.55140616e+01
   4.94640663e+01   3.87997097e+01   2.20009351e+01  -8.22598849e+00
  -3.72493159e+01  -2.09292302e+01  -3.25384440e+00  -2.76411411e+01
  -3.55670054e+01   3.53580367e+00   6.12296790e+01   8.76514969e+01
   7.19131931e+01   4.44807556e+01   3.77625365e+01   4.96885550e+01
   3.35565201e+01   2.96883786e+00  -8.28060911e+00   1.38699716e+01
   3.67581415e+01   7.98872425e+01   8.94374025e+01   6.75801202e+01
   7.04456615e+01   7.22998520e+01   7.84972653e+01   7.25273785e+01
   5.41119989e+01   1.35917771e+01  -1.21300227e+01  -8.07975084e+00
   1.38312362e+01   9.61503520e+00  -2.62875665e+00   1.77274228e+00
   1.98999569e+01   3.45766493e+01   5.59766304e+01   7.20974203e+01
   5.52334932e+01   3.13435048e+01   2.33762967e+01   2.04732241e+01
  -1.73425269e+00  -1.50112428e+01  -2.19900467e+01  -1.92497937e+01
  -2.31136637e+01  -4.25792023e+01  -4.84431075e+01  -1.70789517e+01
  -2.15407908e+01  -4.88439509e+01  -2.85499063e+01   1.69948016e+01
   3.96719479e+01   4.32522356e+01   3.06507136e+01   2.13540916e+01
   5.18133820e+01   6.99305019e+01   6.12859076e+01   5.72410900e+01
   4.90092534e+01   3.93277369e+01   1.37988382e+01  -1.86072139e+01
  -2.58069031e+01  -1.07293836e+01  -1.51326794e+01  -1.17772597e+01
   3.30513467e+00   4.49424078e+01   9.28288064e+01   8.57324703e+01
   4.50789048e+01   2.35877087e+01   4.19110438e+01   4.38550669e+01
   1.26756004e+01  -1.12950984e+01   2.52625910e+00   3.34227261e+01
   4.88422083e+01   7.76171873e+01   5.41772133e+01   5.19716232e+01
   6.30121067e+01   7.59667091e+01   6.85097766e+01   5.87523245e+01
   3.84099440e+01  -1.42145450e+00  -9.13541317e+00   2.05439101e+01
   1.84809342e+01  -4.57499664e+00  -5.39804614e+00   8.86587623e+00
   2.63088914e+01   2.94670353e+01   4.24016725e+01   4.65972553e+01
   3.13837481e+01   1.69681462e+01  -4.83988651e+00  -3.57321073e+00
  -2.13596850e+01  -2.51869460e+01  -2.64678856e+01  -3.16075311e+01
  -2.94941237e+01  -3.08186063e+01  -5.26581716e+01  -3.98921184e+01
  -8.55570397e+00  -3.39505532e+01  -5.37197702e+01  -1.75482455e+01
   2.84106209e+01   4.50224979e+01   4.03853264e+01   2.47895914e+01
   3.55964767e+01   6.63258252e+01   6.89721928e+01   6.09373092e+01
   5.46713636e+01   5.38105778e+01   3.65371525e+01   6.24480572e+00
  -1.00281741e+01  -7.10325139e+00  -1.25977622e+01   3.05230174e+00
   3.22525302e+01   4.69138904e+01   9.00573505e+01   1.01167629e+02
   5.22928203e+01   8.38142114e+00   1.50361720e+01   4.07504958e+01
   2.14452939e+01  -3.46726006e+00  -1.22127312e+01   2.72509769e+01
   3.93348128e+01   6.17077312e+01   6.01024622e+01   3.38756436e+01
   4.53429602e+01   5.76126804e+01   6.39357895e+01   4.65266130e+01
   5.04795353e+01   2.07590219e+01  -1.08779641e+01   1.84747787e+01
   3.79747908e+01   2.79484448e+00  -7.52086805e-01   7.65835102e+00
   1.93680298e+01   2.69879511e+01   2.24431897e+01   2.41615344e+01
   2.03302752e+01   1.58283001e+01  -1.67421531e+01  -3.72420000e+01
  -4.48971805e+01  -4.43933423e+01  -3.34689612e+01  -3.81262798e+01
  -3.39930048e+01  -4.31685730e+01  -4.82493706e+01  -1.16039237e+01
  -3.41278701e+00  -4.17588381e+01  -4.16134499e+01   4.85110877e+00
   3.66467575e+01   4.39566065e+01   3.55335033e+01   2.64314458e+01
   5.27596858e+01   7.15221249e+01   6.17662008e+01   5.41720983e+01
   5.98166043e+01   5.39583998e+01   2.51263245e+01   8.37780226e+00
   5.93201503e+00  -3.62365859e+00  -4.52459574e-01   5.09743077e+01
   7.13381087e+01   8.73907920e+01   1.08719643e+02   6.49396534e+01
   4.31224089e+00  -1.19445140e+01   1.91538729e+01   1.96490910e+01
  -7.33813636e-01  -1.63365276e+01   3.18084881e+00   3.90291061e+01
   4.69638180e+01   7.34523201e+01   3.73007861e+01   2.90413791e+01
   3.27363741e+01   4.52908534e+01   3.34903125e+01   3.56494347e+01
   3.89082270e+01  -1.92851962e+00   3.34594165e+00   4.61217023e+01
   1.75538553e+01   1.20740901e+00   2.40705722e+01   2.21827322e+01
   2.69063075e+01   3.09467105e+01   1.62938686e+01   3.35113214e+00
   6.20130822e+00  -1.20948325e+01  -6.05637106e+01  -5.19069945e+01
  -6.07088659e+01  -6.84749102e+01  -4.72969805e+01  -3.48467038e+01
  -3.96643118e+01  -4.55208647e+01  -4.54335095e+01  -1.61798198e+01
   1.07222868e+01  -9.15687997e+00  -3.38806713e+01  -1.34599886e+01
   1.92435346e+01   3.33518146e+01   4.29774809e+01   2.89011859e+01
   3.49520269e+01   6.78821598e+01   6.49301146e+01   5.01797913e+01
   5.49884244e+01   6.08201299e+01   3.55312877e+01   1.72657092e+01
   1.58170242e+01   7.99308236e+00  -3.83024781e+00   4.00617355e+01
   9.15555211e+01   9.32004768e+01   1.04449988e+02   7.67891324e+01
   1.10177480e+01  -2.05005963e+01  -3.66743552e+00   1.08687755e+01
  -1.28512229e+01  -2.12011983e+01  -2.22329327e+01   2.23983552e+01
   3.77217548e+01   6.70922309e+01   5.89158990e+01   2.22508097e+01
   2.02132178e+01   1.73084209e+01   2.00085360e+01   1.10982928e+01
   3.36004996e+01   1.61654152e+01  -5.16313048e+00   3.31211850e+01
   3.13615856e+01  -4.07477864e+00   2.87796065e+01   4.19087155e+01
   2.61347559e+01   3.62070168e+01   3.67185785e+01   1.01193223e+00
  -1.00568932e+01  -1.00145498e+01  -5.74748628e+01  -8.03519215e+01
  -7.27139050e+01  -6.78930883e+01  -3.37896490e+01  -2.82005076e+01
  -4.34576395e+01  -5.18560082e+01  -1.91721392e+01   1.05332274e+01
   5.91209664e+00  -7.97198481e+00  -1.07552796e+01   2.39753922e+00
   9.65828531e+00   2.96774615e+01   3.69116751e+01   2.35450480e+01
   5.20424110e+01   6.48759451e+01   4.70372033e+01   4.47904094e+01
   5.45799849e+01   4.01797224e+01   1.72892287e+01   2.04310600e+01
   1.65831607e+01  -7.72425381e-01   1.71368993e+01   8.49443968e+01
   1.04927436e+02   9.55896830e+01   8.02678405e+01   2.38374359e+01
  -1.21223320e+01  -8.93066044e+00   5.60314402e+00  -2.01044295e+01
  -4.15371615e+01  -4.03244194e+01  -1.03960301e+01   2.63698639e+01
   3.99385572e+01   6.29856077e+01   2.50492740e+01   1.58522544e+01
   4.63879269e+00   1.70382657e+00  -4.90327102e+00   9.57999328e+00
   2.36096330e+01   4.19019580e+00   1.54552472e+01   3.32975869e+01
  -3.38949008e+00   1.13366832e+01   4.88789301e+01   3.64492225e+01
   2.36058477e+01   4.90497259e+01   2.71125791e+01  -1.64752346e+01
  -1.75601708e+01  -4.30241416e+01  -8.73387980e+01  -7.57100570e+01
  -6.86255444e+01  -6.97953001e+01  -4.03002123e+01  -1.17026320e+01
  -1.72462042e+01  -4.62278767e+01  -3.29454633e+01   8.97036443e+00
   1.92577590e+00  -1.52792836e+00   5.83736892e+00  -1.58631394e+00
  -1.21552430e+01  -4.54681094e+00   2.64909959e+01   2.20761267e+01
   3.13352357e+01   5.36980006e+01   4.10446264e+01   3.79871999e+01
   4.40204349e+01   4.00068529e+01   1.85548724e+01   2.03749223e+01
   3.01724267e+01   1.19734186e+01   4.40285703e+00   5.67255047e+01
   1.06281412e+02   9.44776475e+01   7.36187721e+01   3.57008612e+01
   1.13023486e+00   3.02321178e+00   7.06005630e+00  -1.08557415e+01
  -5.50028963e+01  -6.22631051e+01  -4.08123560e+01   3.12395468e+00
   1.30204480e+01   3.37001838e+01   2.31953491e+01   5.12216366e+00
   7.21527294e+00  -9.80588538e+00  -1.40256761e+01  -9.60926809e+00
   1.08265631e+01   1.37765601e+01   1.01805817e+01   2.21203754e+01
   3.35932077e+00  -4.75739716e+00   3.20812615e+01   3.92013171e+01
   1.71236661e+01   2.52187638e+01   5.19370078e+01   3.56569853e+00
  -2.30142778e+01  -3.30200601e+01  -7.31813416e+01  -8.58628700e+01
  -6.06435824e+01  -4.41559896e+01  -5.35360005e+00   1.15624789e+01
  -1.05262880e+01  -3.83377919e+01  -1.66544532e+00   6.66397455e+00
  -1.55752714e+01   6.56993416e+00   7.98526344e+00  -1.60874552e+01
  -3.15148027e+01  -7.99262489e+00   1.25672059e+01   1.42032422e+01
   3.56442324e+01   3.11233052e+01   3.29164541e+01   4.47784914e+01
   4.01578654e+01   2.80213862e+01   2.10679918e+01   4.30275431e+01
   4.26677943e+01   1.73692169e+01   3.43736683e+01   8.62294182e+01
   9.44542044e+01   6.56136544e+01   3.92263028e+01   1.10994324e+01
   1.47273640e+01   1.49312236e+01  -4.63376546e+00  -4.44682773e+01
  -8.07009055e+01  -6.67537097e+01  -2.47947992e+01  -7.34586001e+00
  -3.66042150e+00   8.22695074e+00  -1.01881697e+01   3.08087067e-01
  -8.13958650e+00  -2.18264868e+01  -1.54771351e+01  -2.12284272e+00
   6.05153991e+00   9.38510038e+00   6.46987655e+00   2.79629558e+00
  -1.02899603e+01   1.28541515e+01   2.38666271e+01   1.78371974e+01
  -1.90423873e+00   3.27573143e+01   3.55999291e+01  -1.34819857e+01
  -2.56539824e+01  -5.25481846e+01  -8.01101050e+01  -7.10319800e+01
  -5.55984479e+01  -4.59953517e+01  -1.19352544e+01   2.21190221e+01
   2.34385461e+01  -1.62907781e+01  -1.96699038e+01   1.56773131e+01
  -1.07062324e+01  -1.21863777e+01   1.17605905e+01  -1.47840672e+00
  -2.68039166e+01  -3.45580738e+01  -1.29403143e+01  -3.53848213e+00
   1.46669900e+01   2.39085784e+01   2.72567631e+01   5.42572016e+01
   5.29377212e+01   4.28799067e+01   3.32104100e+01   4.36575610e+01
   7.21622055e+01   5.73657453e+01   4.31492335e+01   6.63405372e+01
   7.91335769e+01   5.64976726e+01   3.40992410e+01   1.83317698e+01
   1.56835176e+01   2.27022156e+01  -8.20435707e+00  -3.91076755e+01
  -8.04439551e+01  -9.34068739e+01  -5.13657615e+01  -2.36183641e+01
  -2.83908612e+01  -1.02399090e+01  -1.46210714e+01  -1.83932352e+01
  -9.44061867e+00  -2.47845128e+01  -2.03721193e+01   1.27518035e+00
  -1.35746996e+00  -1.02064075e+00  -8.23326900e+00  -1.17398522e+01
  -1.45198519e+01  -5.72085764e+00   6.25099119e+00   7.40599035e+00
  -1.91825711e+00  -9.03197633e+00   3.38644531e+01   7.95498363e+00
  -2.00338709e+01  -3.36142766e+01  -6.25508037e+01  -6.98116346e+01
  -4.21290060e+01  -2.88488500e+01   1.39810272e+01   3.30631301e+01
   8.49533626e+00  -2.67697851e+01   4.48530182e+00   1.55002326e+01
  -1.39382055e+01   1.41493797e+00   1.32751165e+01   2.37573227e+00
  -2.50462854e+01  -3.33856769e+01  -2.23641164e+01  -8.24922699e+00
   1.53449619e+01   2.69608354e+01   5.67214702e+01   7.31757827e+01
   6.11286887e+01   5.40905639e+01   4.37647723e+01   7.14125563e+01
   9.30777605e+01   7.64841971e+01   7.52445922e+01   6.58492936e+01
   3.89422676e+01   2.14262218e+01   2.33955252e+01   1.91261424e+01
   2.24358100e+01  -4.73619639e+00  -5.28837336e+01  -7.99229456e+01
  -1.11501948e+02  -8.54779137e+01  -3.84038904e+01  -3.94819217e+01
  -2.56613966e+01  -5.61460138e+00  -2.20081129e+01  -2.18995665e+01
  -2.42299775e+01  -2.70507207e+01   5.94654921e+00   1.42771842e+01
  -7.60151806e+00  -1.96497458e+01  -3.40199258e+01  -2.48779979e+01
  -2.79921660e+01  -1.72700966e+01  -1.17830868e+01   2.82693304e+00
  -1.83653938e+01  -5.66467043e+00   1.21691246e+01  -1.95580196e+01
  -2.61915266e+01  -4.42712153e+01  -6.18030899e+01  -5.03292395e+01
  -2.70348646e+01  -3.39650469e+01  -8.74754551e+00   2.91066244e+01
   1.85635290e+01  -2.08755228e+01  -1.76014055e+01   2.36372489e+01
   9.83848986e+00  -1.31102274e+00   1.64099638e+01   2.77948329e+01
   9.80809727e+00  -2.81896610e+01  -3.40972401e+01  -2.44599178e+01
  -2.22303561e+00   2.61085497e+01   5.08599296e+01   7.61492702e+01
   7.62614873e+01   7.11017274e+01   5.58091488e+01   5.34386963e+01
   9.21281826e+01   1.00062374e+02   9.79360193e+01   8.23324008e+01
   3.16546211e+01   4.40591702e+00   1.90530500e+01   3.37097663e+01
   2.54530951e+01   7.56470399e+00  -5.20079666e+01  -9.22732070e+01
  -1.17509699e+02  -1.21041783e+02  -6.53448005e+01  -4.25225589e+01
  -3.66245731e+01  -4.50820806e-01  -2.66378129e+00  -2.58344517e+01
  -2.68722852e+01  -2.62348003e+01  -3.85128288e-01   3.60214617e+01
   1.07626502e+01  -1.92913340e+01  -4.74076442e+01  -4.51202504e+01
  -4.44864199e+01  -5.34001083e+01  -3.59235200e+01  -1.07205933e+01
  -1.59648050e+00  -2.80578097e+01  -1.57410159e+01  -2.94518122e+01
  -3.89171182e+01  -3.65818560e+01  -5.44182803e+01  -4.80256567e+01
  -1.25341312e+01  -2.22385625e+01   1.69359973e+01   2.79940523e+01
  -6.62717412e+00  -2.24939231e+01   1.12922544e+01   2.48864526e+01
   1.02184003e+01   1.88460999e+01   3.58461621e+01   3.74167612e+01
  -1.71087197e+00  -3.18574312e+01  -2.88196188e+01  -1.95545745e+01
   9.74277464e+00   3.99599121e+01   5.67450625e+01   7.07148502e+01
   7.72230196e+01   7.06848659e+01   5.03583178e+01   6.99031370e+01
   9.78696189e+01   1.00775113e+02   1.05025531e+02   5.70416923e+01
   2.57914007e+00   5.35447225e+00   3.85911808e+01   4.01255820e+01
   1.86094112e+01  -2.56923049e+01  -8.78033607e+01  -1.16341886e+02
  -1.35405397e+02  -1.03602550e+02  -5.45855983e+01  -4.13718335e+01
  -6.84812721e+00   1.53139863e+01  -1.01470750e+01  -2.76220217e+01
  -2.00127015e+01  -4.16147577e+00   3.79728990e+01   4.05889623e+01
  -1.69765758e+00  -3.98680459e+01  -6.42880477e+01  -5.87668110e+01
  -8.14346131e+01  -6.77635180e+01  -3.20366853e+01  -2.86085151e+00
  -1.63507502e+01  -4.07700136e+01  -5.05735188e+01  -6.33220843e+01
  -4.89586419e+01  -5.10947939e+01  -5.31843980e+01  -1.89247748e+01
   8.72455166e+00  -4.99950784e+00   6.36737104e+00   3.76821044e+01
   1.81239739e+01  -4.35223546e+00   1.22879292e+01   2.60208581e+01
   1.44629339e+01   2.22116087e+01   3.81661557e+01   4.40082986e+01
   2.39031688e+01  -1.22350166e+01  -2.18521727e+01  -2.43958184e+01
  -1.65401689e+01   1.66815626e+01   3.29291179e+01   4.37984127e+01
   6.80769226e+01   7.77339737e+01   6.31719813e+01   6.09959425e+01
   8.87352726e+01   9.10268136e+01   9.70340043e+01   8.70879012e+01
   2.97376628e+01   1.60275522e+00   2.10459730e+01   4.12454865e+01
   2.27984019e+01  -8.24356084e+00  -5.84541594e+01  -1.00678801e+02
  -1.23665493e+02  -1.24759026e+02  -8.26362084e+01  -5.18696324e+01
  -1.89738639e+01   1.74235829e+01   8.84211295e+00  -1.92607858e+01
  -1.77033562e+01   6.88142507e-01   2.78662919e+01   5.28897872e+01
   2.28463723e+01  -1.99251626e+01  -6.30659124e+01  -7.72262574e+01
  -9.34129130e+01  -9.60182952e+01  -4.92061263e+01  -2.18305267e+01
  -1.53536839e+01  -4.00117060e+01  -6.47262059e+01  -8.14221930e+01
  -7.29559106e+01  -5.74764599e+01  -5.61448687e+01  -2.64439495e+01
   2.13619569e+01   1.42680101e+01   4.01919824e+01   4.22606754e+01
   2.06578388e+01   2.92590230e+01   3.76991229e+01   1.38977002e+01
   1.23969794e+01   3.74776646e+01   4.41509910e+01   3.45657463e+01
   1.49702346e+01   1.18395656e+00  -1.18336893e+01  -2.98679897e+01
  -1.49532070e+01   1.20772247e+01   1.97136924e+01   4.72643478e+01
   7.61766059e+01   7.52215856e+01   6.70785084e+01   8.55559556e+01
   9.28639711e+01   7.70295010e+01   8.32243313e+01   6.07010334e+01
   1.92701755e+01   5.32506510e+00   1.52398416e+01   1.04055134e+01
  -1.86022662e+01  -4.63574033e+01  -7.71405394e+01  -1.00452111e+02
  -1.17072996e+02  -1.04584120e+02  -7.63348507e+01  -4.30581531e+01
   4.19181379e+00   1.82402073e+01  -5.57871499e+00  -1.90936425e+01
  -8.00156727e-01   1.95993998e+01   4.45359547e+01   4.02950463e+01
  -9.70673909e-01  -4.58044997e+01  -8.45447285e+01  -1.02496408e+02
  -1.11495421e+02  -6.69040819e+01  -3.37688398e+01  -3.96350697e+01
  -4.63494362e+01  -6.23538363e+01  -8.03881485e+01  -8.54451791e+01
  -7.24217825e+01  -5.82302127e+01  -3.05226968e+01   1.22538004e+01
   3.46538327e+01   3.02578646e+01   4.13648049e+01   5.14599160e+01
   3.47396308e+01   3.53774922e+01   5.06367942e+01   2.76053870e+01
   9.50556682e-01   2.24430933e+01   4.51829110e+01   3.99370292e+01
   3.12078409e+01   3.01630280e+01   1.87751248e+01  -1.41658620e+01
  -2.94353104e+01  -5.71569899e+00   1.11448262e+01   3.08169656e+01
   6.89685112e+01   8.20106216e+01   7.26128856e+01   7.80875416e+01
   9.41133912e+01   7.39758325e+01   5.98397455e+01   6.44077954e+01
   4.03012054e+01   1.00624366e+01  -5.62053447e+00  -1.22656638e+01
  -3.86574211e+01  -6.68639623e+01  -7.68054524e+01  -8.56378028e+01
  -1.01262645e+02  -1.07804597e+02  -9.82922866e+01  -8.05162272e+01
  -3.28165952e+01   1.06248981e+01   7.89198384e+00  -1.46174498e+01
  -1.23204154e+01   6.26423935e+00   2.53546805e+01   4.38006500e+01
   1.87268379e+01  -2.87369076e+01  -7.45503057e+01  -1.06242427e+02
  -1.18364151e+02  -8.83609391e+01  -4.36975461e+01  -5.76096609e+01
  -7.59311588e+01  -6.89499211e+01  -6.69962756e+01  -7.51048428e+01
  -7.82264688e+01  -6.70438094e+01  -3.16704754e+01   1.40345983e+01
   3.78566256e+01   4.02005790e+01   4.79666924e+01   3.51635827e+01
   2.44067505e+01   3.93929768e+01   4.04365601e+01   1.10851806e+01
   7.23088988e+00   3.69076333e+01   4.87943580e+01   3.84854029e+01
   4.01196849e+01   4.61186028e+01   2.36638225e+01  -8.60834825e+00
  -7.42357361e+00   1.12339449e+01   2.61068963e+01   6.02605978e+01
   8.62371235e+01   7.77439709e+01   6.55387848e+01   7.56744048e+01
   7.20500466e+01   4.48166720e+01   4.56856105e+01   4.41065713e+01
   1.90135488e+01  -2.76024957e+00  -1.67275084e+01  -4.11835463e+01
  -8.07477190e+01  -9.74816649e+01  -9.27141227e+01  -9.76213269e+01
  -1.07741042e+02  -1.08420689e+02  -1.10223074e+02  -8.66819028e+01
  -2.73486767e+01   7.89487785e+00  -1.32840546e+00  -1.52928587e+01
  -1.03283555e+01  -8.37429013e-01   2.62814247e+01   3.55571647e+01
  -8.18386798e+00  -5.99533159e+01  -9.88519532e+01  -1.17769392e+02
  -1.03678118e+02  -6.18833510e+01  -5.77695537e+01  -9.12241740e+01
  -9.25084605e+01  -6.70049994e+01  -5.70652010e+01  -6.52256858e+01
  -6.92423637e+01  -4.34231370e+01   8.98226335e+00   3.95691495e+01
   3.68694091e+01   3.34796459e+01   3.74228512e+01   3.13527526e+01
   1.61200351e+01   1.71565753e+01   3.12153716e+01   2.95734571e+01
   1.77264519e+01   2.80032509e+01   5.01201612e+01   4.78371439e+01
   3.52589341e+01   4.45430269e+01   5.09099664e+01   3.16658152e+01
   1.48337549e+01   1.77443235e+01   2.66913394e+01   5.17869885e+01
   8.52521698e+01   8.61660665e+01   6.25311150e+01   5.46172741e+01
   5.94718658e+01   3.94016206e+01   2.50022331e+01   3.09955612e+01
   1.64219837e+01  -7.67442543e-01  -5.48221351e+00  -2.13886306e+01
  -6.13675729e+01  -9.77866512e+01  -1.05542087e+02  -1.07003645e+02
  -1.16074669e+02  -1.18438795e+02  -1.23338032e+02  -1.25141771e+02
  -8.22323345e+01  -2.14983847e+01   5.36510356e-01  -6.74963803e+00
  -1.13847154e+01  -1.76179201e+01  -7.41307804e+00   2.69308912e+01
   1.50244869e+01  -4.01512807e+01  -8.58941057e+01  -1.10529465e+02
  -1.05311366e+02  -7.40179530e+01  -5.12853840e+01  -6.81679085e+01
  -9.29053931e+01  -8.18650224e+01  -5.57270068e+01  -4.97060828e+01
  -5.49121905e+01  -4.94293563e+01  -1.51560107e+01   2.63670572e+01
   2.59804328e+01   2.56743879e+01   2.76360879e+01   2.23441893e+01
   1.46057237e+01   1.75672212e+01   3.22230626e+01   4.20532560e+01
   4.06824074e+01   4.43419849e+01   4.80197959e+01   3.46782654e+01
   3.04157675e+01   4.97456895e+01   5.83406618e+01   4.39945652e+01
   3.01624641e+01   2.95768550e+01   4.62520620e+01   7.65977748e+01
   8.72159580e+01   6.61160884e+01   4.81796451e+01   5.35319887e+01
   4.70564046e+01   2.08262991e+01   1.49867992e+01   9.49784044e+00
  -8.08783079e+00  -7.72711792e+00  -6.64638355e+00  -3.19115534e+01
  -6.94081609e+01  -9.50658068e+01  -1.11404537e+02  -1.27156207e+02
  -1.32638976e+02  -1.31299181e+02  -1.35345239e+02  -1.15421899e+02
  -6.23893648e+01  -2.55533123e+01  -1.69650357e+01  -8.21394588e+00
  -9.78874505e+00  -2.49277437e+01  -6.50266929e+00   1.64597168e+01
  -1.35043420e+01  -6.34322278e+01  -9.73886490e+01  -9.98102842e+01
  -7.28743180e+01  -4.35035363e+01  -3.19823303e+01  -5.10546961e+01
  -7.50891923e+01  -6.69464498e+01  -4.84646771e+01  -4.04941748e+01
  -3.72699178e+01  -3.26198751e+01  -8.75115256e+00   1.94326185e+01
   1.35004229e+01   1.88353952e+01   2.24294254e+01   3.06213879e+01
   2.95962127e+01   2.08295460e+01   2.53389953e+01   5.08172780e+01
   6.69085275e+01   5.42542184e+01   3.72704744e+01   2.90614516e+01
   2.76838491e+01   4.46190134e+01   6.66757405e+01   6.35251126e+01
   4.21432815e+01   3.41623054e+01   4.90126513e+01   7.06086721e+01
   7.54350040e+01   5.64112779e+01   3.87163091e+01   4.82549513e+01
   6.22938314e+01   4.19066237e+01   1.52363746e+01   9.33001320e+00
  -1.78565745e+00  -1.15486637e+01  -9.52140999e+00  -2.02103893e+01
  -4.52415152e+01  -6.87537104e+01  -9.51063266e+01  -1.26414801e+02
  -1.43057900e+02  -1.37895171e+02  -1.29292981e+02  -1.14330498e+02
  -7.93337240e+01  -5.39636421e+01  -5.29722347e+01  -3.70777125e+01
  -8.06112348e+00  -1.24568287e+01  -2.48086544e+01  -5.36539392e+00
   1.49741663e+00  -3.00136417e+01  -7.18314332e+01  -9.07868016e+01
  -7.24781858e+01  -4.45568548e+01  -2.21886530e+01  -1.34771153e+01
  -3.83718252e+01  -6.06775538e+01  -5.07048062e+01  -3.52786374e+01
  -2.62670092e+01  -2.93616431e+01  -3.38139471e+01  -1.21690437e+01
   1.24267326e+01   1.51687454e+01   2.37797976e+01   3.11396179e+01
   2.47671542e+01   1.95193516e+01   4.13211419e+01   7.88755278e+01
   8.27082695e+01   4.68486141e+01   2.06887707e+01   2.64029677e+01
   4.94148007e+01   7.33421912e+01   7.74289074e+01   5.57933963e+01
   4.03867125e+01   5.59686850e+01   7.72120256e+01   6.99087614e+01
   3.94027884e+01   1.60921863e+01   2.31616941e+01   5.23098242e+01
   5.83749352e+01   3.00413813e+01   1.36825629e+01   1.56947790e+01
   8.49890694e+00  -2.28681120e+00  -1.34001175e+01  -3.28540846e+01
  -5.18611632e+01  -7.18566880e+01  -1.06900930e+02  -1.40764983e+02
  -1.43164027e+02  -1.20838973e+02  -9.61277983e+01  -6.99519660e+01
  -5.55590268e+01  -7.10697277e+01  -7.93425934e+01  -4.48373430e+01
  -1.26727537e+01  -1.81854957e+01  -1.98114267e+01  -2.99660678e+00
  -6.25509574e+00  -4.12359295e+01  -7.66975791e+01  -7.84062140e+01
  -6.20687861e+01  -5.05580974e+01  -3.00043939e+01  -2.02468982e+01
  -3.63436302e+01  -3.96073265e+01  -2.76431056e+01  -2.31559661e+01
  -2.70882974e+01  -3.80117599e+01  -3.33983953e+01  -5.32219156e+00
   7.91042112e+00   7.67856909e+00   7.92290393e+00   5.86927990e+00
   1.99208966e+00   1.64432931e+01   6.06568865e+01   9.59453755e+01
   7.53592685e+01   3.45995868e+01   3.23466411e+01   5.71576129e+01
   7.62795826e+01   7.66319645e+01   5.79617481e+01   5.06198703e+01
   7.43381534e+01   8.73350218e+01   5.79787520e+01   1.75464866e+01
   2.02465390e+00   1.88606446e+01   4.14012029e+01   3.00201955e+01
   8.77723232e+00   1.49453663e+01   1.97198115e+01   8.13485985e+00
  -4.89779222e+00  -2.57828342e+01  -5.08712692e+01  -7.40858086e+01
  -1.11507652e+02  -1.45641112e+02  -1.35374328e+02  -9.48379675e+01
  -6.21672152e+01  -4.63247707e+01  -5.65003684e+01  -8.08674304e+01
  -7.27439910e+01  -3.90339010e+01  -2.65308336e+01  -2.38028842e+01
  -1.02651447e+01  -1.52882294e+01  -5.31078767e+01  -8.39545464e+01
  -8.32416190e+01  -8.40728171e+01  -8.17194959e+01  -5.04467644e+01
  -2.99064468e+01  -2.88216488e+01  -2.22110411e+01  -2.27340533e+01
  -3.24233224e+01  -4.06535546e+01  -3.38171740e+01  -8.07895958e+00
   5.44032673e+00  -4.08536692e-01  -1.58375351e+01  -2.73637215e+01
  -2.01892435e+01   1.46031834e+01   6.25848115e+01   7.80534247e+01
   5.48243661e+01   4.61281088e+01   5.73398320e+01   5.90306918e+01
   5.54567196e+01   5.12953011e+01   5.71657365e+01   8.25279712e+01
   9.03129112e+01   6.08873415e+01   2.56460153e+01   1.01265005e+01
   1.85003428e+01   2.09936024e+01   1.51709685e+00  -3.52668712e+00
   1.25312874e+00  -5.14613566e+00  -7.48690491e+00  -2.19875965e+01
  -5.48837015e+01  -8.30962599e+01  -1.20162200e+02  -1.50870575e+02
  -1.28478539e+02  -8.14205268e+01  -5.45651055e+01  -4.47189122e+01
  -5.38802633e+01  -6.93074107e+01  -6.28409398e+01  -4.71954918e+01
  -3.61656930e+01  -2.73853439e+01  -4.11372348e+01  -7.79255570e+01
  -9.43241903e+01  -9.11063398e+01  -1.01754275e+02  -9.03700784e+01
  -5.16207510e+01  -3.83506049e+01  -3.90618625e+01  -3.51903836e+01
  -3.62230821e+01  -3.89105662e+01  -3.14565623e+01  -9.30121587e+00
   3.55400812e+00  -4.75708976e+00  -2.93422327e+01  -4.32205416e+01
  -3.18626990e+01  -3.76827643e-01   3.07819682e+01   4.18894160e+01
   5.04155469e+01   5.71356068e+01   4.01225407e+01   2.85281858e+01
   4.05284263e+01   6.11837895e+01   8.75830665e+01   9.77720119e+01
   7.44881545e+01   3.58444777e+01   1.43184188e+01   1.80293791e+01
   1.27486055e+01  -3.06924630e+00  -1.67250746e+01  -3.11894255e+01
  -2.64018290e+01  -2.96025137e+01  -6.24544919e+01  -8.98500148e+01
  -1.21436322e+02  -1.44714097e+02  -1.18458105e+02  -8.00521593e+01
  -5.65242335e+01  -4.09190798e+01  -4.99474700e+01  -6.50930742e+01
  -5.87212999e+01  -4.62636417e+01  -4.59121802e+01  -6.66650740e+01
  -9.27044894e+01  -9.18793885e+01  -8.72858436e+01  -9.34408818e+01
  -7.30639149e+01  -5.61228699e+01  -6.60055157e+01  -6.14376549e+01
  -4.32361630e+01  -3.54219483e+01  -3.09282186e+01  -1.28619449e+01
  -2.88955198e+00  -6.26016663e+00  -2.43952400e+01  -3.62234023e+01
  -3.48890535e+01  -1.91513300e+01   5.95837628e+00   3.68207075e+01
   5.82661378e+01   4.25545331e+01   2.67922806e+01   4.65832782e+01
   7.44817735e+01   9.83162463e+01   1.05323888e+02   7.30556712e+01
   2.47883017e+01   1.25342749e+01   2.78440093e+01   2.55180767e+01
  -1.73866033e+00  -3.74189294e+01  -4.37734379e+01  -4.29462307e+01
  -6.84814440e+01  -8.56093102e+01  -1.03438411e+02  -1.16571027e+02
  -9.69463799e+01  -7.07125437e+01  -4.65730773e+01  -3.86502179e+01
  -5.31210717e+01  -5.35942163e+01  -4.54768197e+01  -5.55411134e+01
  -7.92610590e+01  -9.21959654e+01  -8.13801598e+01  -7.42550183e+01
  -6.94629052e+01  -5.83323817e+01  -6.97799551e+01  -7.52142805e+01
  -5.25599753e+01  -3.82862199e+01  -3.67845314e+01  -2.16941244e+01
  -1.10997580e+01  -3.89889919e+00  -5.23579956e+00  -1.46155480e+01
  -2.16870303e+01  -7.98297060e+00   2.42532424e+01   5.67798925e+01
   6.07065049e+01   5.52278470e+01   7.49933679e+01   9.61510596e+01
   1.08451124e+02   1.02021157e+02   5.58106440e+01   1.38853911e+01
   2.41461075e+01   4.51715532e+01   2.67777214e+01  -2.25835010e+01
  -4.92078143e+01  -5.35549589e+01  -6.93492038e+01  -7.09468368e+01
  -7.21722827e+01  -7.89498445e+01  -6.96819116e+01  -4.95829299e+01
  -3.44660839e+01  -4.24295868e+01  -4.81403731e+01  -4.61727541e+01
  -6.25748507e+01  -8.60858272e+01  -9.09373266e+01  -7.38930394e+01
  -5.54045946e+01  -4.10915745e+01  -4.26600891e+01  -5.41425907e+01
  -4.80675481e+01  -4.34101712e+01  -4.52265776e+01  -3.06915390e+01
  -1.28307438e+01   2.84726320e+00   1.42314047e+01   7.19394302e+00
   5.04169020e+00   2.35456755e+01   4.84621742e+01   6.67582426e+01
   8.16740285e+01   1.02265600e+02   1.10013832e+02   1.08284497e+02
   9.13059121e+01   4.69298466e+01   2.56822186e+01   4.20775214e+01
   3.72776485e+01  -9.41726557e+00  -4.67897771e+01  -5.43819842e+01
  -5.94902485e+01  -4.92383551e+01  -4.35547590e+01  -5.23912495e+01
  -4.76611822e+01  -3.20495055e+01  -3.69799563e+01  -5.42507514e+01
  -6.19756611e+01  -7.71490159e+01  -9.60208999e+01  -9.44243438e+01
  -6.43592066e+01  -2.80603090e+01  -1.08176237e+01  -1.55184825e+01
  -2.42311253e+01  -3.68411020e+01  -4.40001168e+01  -2.99623225e+01
  -1.07021706e+01   4.17113006e+00   1.63765544e+01   1.39641515e+01
   2.16487097e+01   3.49394177e+01   5.05236821e+01   7.86558085e+01
   1.04962426e+02   1.06571875e+02   9.86105588e+01   8.12170838e+01
   5.16528962e+01   4.37592134e+01   3.81360442e+01  -7.87776003e-01
  -3.73719668e+01  -4.04633392e+01  -3.39158505e+01  -2.08103756e+01
  -2.27149667e+01  -3.70838534e+01  -3.20112631e+01  -3.24147841e+01
  -6.02884174e+01  -8.23008668e+01  -9.36573779e+01  -1.03925122e+02
  -9.19210797e+01  -4.48417231e+01  -2.04865046e+00   7.52955611e+00
  -3.75434070e-01  -1.72075233e+01  -2.80140594e+01  -1.95445409e+01
  -1.35621376e+01  -7.53414724e+00   1.83781253e+00   1.14559534e+01
   2.64204045e+01   3.59483456e+01   5.93523218e+01   8.80964944e+01
   9.23774787e+01   8.33288577e+01   6.74215844e+01   5.31423211e+01
   4.68847393e+01   1.76158374e+01  -1.72781147e+01  -1.75377463e+01
  -1.98096382e+00   8.19600155e+00  -5.53104383e+00  -2.15676173e+01
  -2.40399077e+01  -5.04364580e+01  -8.45450559e+01  -9.73995120e+01
  -9.97961542e+01  -7.50922554e+01  -2.13499297e+01   7.92297942e+00
   8.05681793e+00   2.20678658e+00  -4.89231024e+00  -7.81898234e+00
  -1.61717580e+01  -1.87172991e+01  -3.23879817e+00   2.48817014e+01
   4.34131180e+01   5.44305076e+01   7.30031607e+01   7.61570991e+01
   6.08093032e+01   4.53510293e+01   4.76392779e+01   4.04863895e+01
   9.23762127e+00   4.44916708e-01   1.65295096e+01   2.39655692e+01
   5.95827305e+00  -9.84926282e+00  -2.95745394e+01  -6.51565497e+01
  -8.39116838e+01  -8.11615610e+01  -4.62813332e+01  -1.73605213e+00
   9.70855217e+00   1.34340662e+01   1.62887372e+01   4.36470509e+00
  -4.45201385e+00  -1.04392175e+01   1.66522095e+01   5.31864716e+01
   6.47475026e+01   6.70379013e+01   5.81222994e+01   3.19269675e+01
   2.54496317e+01   4.24670361e+01   2.93361536e+01   8.72091005e+00
   1.60476447e+01   2.09036389e+01   4.29472948e+00  -1.47565843e+01
  -4.22218363e+01  -6.16481557e+01  -5.26936747e+01  -1.38692694e+01
   1.27593021e+01   1.86832349e+01   2.99397561e+01   2.16860208e+01
   1.91823819e+01   1.20122877e+01   4.01286416e+01   6.35268516e+01
   5.98296392e+01   3.69431582e+01   7.14429279e+00   1.75716759e+01
   3.14442586e+01   1.40224620e+01   1.09125140e+01   9.01932799e+00
  -8.89502371e+00  -2.86798859e+01  -4.00207136e+01  -2.35550797e+01
   8.04419142e+00   1.76259546e+01   2.99129715e+01   3.77738688e+01
   3.61899474e+01   2.82897527e+01   4.44317411e+01   4.56481903e+01
   1.80611660e+01  -3.25716178e+00   1.55109938e+01   1.80098432e+01
   1.19701626e+01   2.63643415e-01  -1.82511048e+01  -2.28408953e+01
  -5.71117471e+00   7.78298272e+00   1.51502045e+01   3.86731481e+01
   3.76560783e+01   3.31466836e+01   3.30543808e+01   8.53646725e+00
  -1.30959805e-01   1.49474845e+01   1.37389197e+01  -2.53974372e+00
  -1.07650940e+01  -4.10934172e+00  -2.11029737e+00   2.37050372e+01
   3.08214985e+01   2.80533732e+01   7.66647463e+00   7.56921554e+00
   9.32328385e+00  -3.12755105e+00  -7.61094407e+00   7.35310414e+00
   2.05054675e+01   7.95958931e+00   2.41235625e+00   5.07977290e-01]

In [91]:
print tempp.shape


(3072,)

In [92]:
print np.median(tempp)


2.81161430848

In [93]:
print np.average(tempp) #weighted average


0.000739081832773

In [94]:
print np.mean(tempp)


0.000739081832773

In [95]:
print np.var(tempp)


2676.42822051

In [100]:
#
# Hold C3 fixed, then do test again with 10% less C3 value
# We use C3 = 5e-10
#
vary_x_samples125 = 5e-10
sigma125 = np.logspace(-21, -23, num=40)

Sij = vary_x_samples125 * norm_matrix[1][None, :, :]
newSij = (1e21)*Sij   # multiply S_ij by 1e12
Nij = sigma125[:, None, None] * id_mat[None, :, :]
newNij = (1e21)*Nij
# Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
# Sij.shape = (40, 3072, 3072)
Cij = newSij + newNij
#invCij = np.linalg.inv(Cij)
logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
# model_fit_terms = m^T C^-1 m
#
# model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
# for i in range(invCij.shape[0])])
#
model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
print "m^T c^-1 m terms are"
print model_fit_terms 
print "******************"
print "logdetC terms are"
print logdetC[1] 
print "******************"
print "Npix2pi term is"
print Npix2pi


m^T c^-1 m terms are
[  7.18293292e+06   8.09386470e+06   9.12113596e+06   1.02809343e+07
   1.15907061e+07   1.30724929e+07   1.47468537e+07   1.66377572e+07
   1.87792946e+07   2.12083991e+07   2.39623566e+07   2.70868218e+07
   3.06414381e+07   3.46776845e+07   3.92815846e+07   4.45663590e+07
   5.05957546e+07   5.75247398e+07   6.54552543e+07   7.47090796e+07
   8.54799215e+07   9.81015899e+07   1.12663772e+08   1.30393234e+08
   1.51636111e+08   1.77587337e+08   2.08278721e+08   2.50099439e+08
   3.03916267e+08   3.79636875e+08   4.98623636e+08   7.21005423e+08
   1.10892079e+09   3.23917412e+09  -3.41985605e+09  -9.80515049e+08
  -5.10193129e+08  -2.76047856e+08  -1.17087469e+08  -1.90858052e+07]
******************
logdetC terms are
[   226.9052794    -135.07131727   -496.90121724   -858.84168741
  -1220.95683913  -1583.00135661  -1944.915235    -2306.69141473
  -2668.93133159  -3030.70976015  -3393.04701702  -3754.86449285
  -4117.08237295  -4478.81252165  -4840.70266103  -5203.60698497
  -5565.30740991  -5927.80221985  -6289.9127427   -6652.62034049
  -7016.06626191  -7378.97770017  -7739.66767859  -8103.60482614
  -8468.13642146  -8832.06627575  -9201.75852318  -9558.79338163
  -9920.82765381 -10286.47587538 -10655.88535624 -11026.26666575
 -11395.66670283 -11762.37585804 -12132.20128441 -12499.66763487
 -12883.12465843 -13266.56868972 -13654.89410426 -14040.44010189]
******************
Npix2pi term is
19301.9452637

In [101]:
#
# Hold C3 fixed, then do test again with 10% less C3 value
# Now use C3 = 4.5e-10
#
vary_x_samples125 = 5e-10
sigma125 = np.logspace(-21, -23, num=40)

Sij = vary_x_samples125 * norm_matrix[1][None, :, :]
newSij = (1e21)*Sij   # multiply S_ij by 1e12
Nij = sigma125[:, None, None] * id_mat[None, :, :]
newNij = (1e21)*Nij
# Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
# Sij.shape = (40, 3072, 3072)
Cij = newSij + newNij
#invCij = np.linalg.inv(Cij)
logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
# model_fit_terms = m^T C^-1 m
#
# model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
# for i in range(invCij.shape[0])])
#
model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 
print "m^T c^-1 m terms are"
print model_fit_terms 
print "******************"
print "logdetC terms are"
print logdetC[1] 
print "******************"
print "Npix2pi term is"
print Npix2pi


m^T c^-1 m terms are
[  7.18293292e+06   8.09386470e+06   9.12113596e+06   1.02809343e+07
   1.15907061e+07   1.30724929e+07   1.47468537e+07   1.66377572e+07
   1.87792946e+07   2.12083991e+07   2.39623566e+07   2.70868218e+07
   3.06414381e+07   3.46776845e+07   3.92815846e+07   4.45663590e+07
   5.05957546e+07   5.75247398e+07   6.54552543e+07   7.47090796e+07
   8.54799215e+07   9.81015899e+07   1.12663772e+08   1.30393234e+08
   1.51636111e+08   1.77587337e+08   2.08278721e+08   2.50099439e+08
   3.03916267e+08   3.79636875e+08   4.98623636e+08   7.21005423e+08
   1.10892079e+09   3.23917412e+09  -3.41985605e+09  -9.80515049e+08
  -5.10193129e+08  -2.76047856e+08  -1.17087469e+08  -1.90858052e+07]
******************
logdetC terms are
[   226.9052794    -135.07131727   -496.90121724   -858.84168741
  -1220.95683913  -1583.00135661  -1944.915235    -2306.69141473
  -2668.93133159  -3030.70976015  -3393.04701702  -3754.86449285
  -4117.08237295  -4478.81252165  -4840.70266103  -5203.60698497
  -5565.30740991  -5927.80221985  -6289.9127427   -6652.62034049
  -7016.06626191  -7378.97770017  -7739.66767859  -8103.60482614
  -8468.13642146  -8832.06627575  -9201.75852318  -9558.79338163
  -9920.82765381 -10286.47587538 -10655.88535624 -11026.26666575
 -11395.66670283 -11762.37585804 -12132.20128441 -12499.66763487
 -12883.12465843 -13266.56868972 -13654.89410426 -14040.44010189]
******************
Npix2pi term is
19301.9452637

In [102]:
#
# Hold C3 fixed, then do test again with 10% less C3 value
# Use C3=5e-10
#
# Noise is now varied from e-22 to e-24
#
vary_x_samples123 = 5e-10
sigma123 = np.logspace(-22, -24, num=40)

Sij = vary_x_samples123 * norm_matrix[1][None, :, :]
newSij = (1e22)*Sij   # multiply S_ij by 1e12
Nij = sigma123[:, None, None] * id_mat[None, :, :]
newNij = (1e22)*Nij
# Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
# Sij.shape = (40, 3072, 3072)
Cij = newSij + newNij
#invCij = np.linalg.inv(Cij)
logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
# model_fit_terms = m^T C^-1 m
#
# model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
# for i in range(invCij.shape[0])])
#
model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 

print "m^T c^-1 m terms are"
print model_fit_terms 
print "******************"
print "logdetC terms are"
print logdetC[1] 
print "******************"
print "Npix2pi term is"
print Npix2pi


m^T c^-1 m terms are
[  7.99112774e+06   9.15862622e+06   1.05174464e+07   1.21218540e+07
   1.40481199e+07   1.63849383e+07   1.94345938e+07   2.28107290e+07
   2.75426116e+07   3.39869501e+07   4.30589812e+07   5.86874289e+07
   8.17059720e+07   1.64401397e+08  -3.15809360e+10  -1.55530576e+08
  -7.00025652e+07  -3.69097741e+07  -2.01136741e+07   5.33705542e+06
   4.54644402e+06   1.50969154e+07   3.64295994e+07   7.59843057e+07
  -5.50891593e+07   1.77650620e+07   5.68860609e+07   1.24610626e+08
  -1.18748462e+08   1.00196018e+08   3.26506311e+07  -5.96827008e+06
  -9.43865379e+07  -2.91770949e+07   1.05304583e+08  -4.74799471e+07
  -1.85701131e+07   1.86812899e+07  -4.27639327e+07  -5.13285598e+07]
******************
logdetC terms are
[   239.71699157   -122.71079135   -486.33852385   -849.25504606
  -1212.4921126   -1576.26351061  -1944.85327619  -2305.753459
  -2669.13822474  -3030.65420367  -3396.41771088  -3763.82736171
  -4131.60575246  -4506.35071982  -4889.11466545  -5248.4037388
  -5612.4490359   -5995.67825433  -6385.48032885  -6779.9332622
  -7172.39370076  -7581.395549    -8041.60387949  -8517.11067861
  -9002.83276846  -9446.49442793  -9839.83647962 -10172.92306214
 -10420.76829252 -10623.50283171 -10792.24792309 -10928.10844176
 -11027.19559602 -11100.06733525 -11170.65063404 -11215.66540551
 -11261.93661065 -11284.43614935 -11306.60681018 -11326.08454794]
******************
Npix2pi term is
19301.9452637

In [103]:
#
# Hold C3 fixed, then do test again with 10% less C3 value
# Now use C3=4.5e-10
#
# Noise is now varied from e-22 to e-24
#
vary_x_samples123 = 4.5e-10
sigma123 = np.logspace(-22, -24, num=40)

Sij = vary_x_samples123 * norm_matrix[1][None, :, :]
newSij = (1e22)*Sij   # multiply S_ij by 1e12
Nij = sigma123[:, None, None] * id_mat[None, :, :]
newNij = (1e22)*Nij
# Format 7/4pi * param * P_3(M) where param is the parameter we vary, C_l
# Sij.shape = (40, 3072, 3072)
Cij = newSij + newNij
#invCij = np.linalg.inv(Cij)
logdetC = np.linalg.slogdet(Cij)  # returns sign and determinant; use logdetC[1]
# model_fit_terms = m^T C^-1 m
#
# model_fit_terms = np.array([np.dot(tempval.T , np.dot(invCij[i] , tempval) ) 
# for i in range(invCij.shape[0])])
#
model_fit_terms = np.array([np.dot(tempp.T , np.linalg.solve(Cij[i], tempp) ) for i in range(Cij.shape[0]) ]) 

print "m^T c^-1 m terms are"
print model_fit_terms 
print "******************"
print "logdetC terms are"
print logdetC[1] 
print "******************"
print "Npix2pi term is"
print Npix2pi


m^T c^-1 m terms are
[  7.88230937e+06   9.00401772e+06   1.03091948e+07   1.18496346e+07
   1.36733594e+07   1.58717504e+07   1.85223083e+07   2.29277071e+07
   2.58774395e+07   3.11601583e+07   3.85718641e+07   4.95209561e+07
   6.71862827e+07   9.81421763e+07   2.00515338e+08   1.79149266e+08
  -1.57174217e+08  -7.01207170e+07  -3.93179729e+07  -2.01955132e+07
  -6.98201421e+06   5.88648498e+06   1.83612716e+07   3.94038299e+07
   5.39473080e+07   3.01503865e+07  -3.34339516e+07  -1.31643012e+08
  -1.18775863e+08  -8.63387028e+06   2.02255477e+08  -5.89407096e+07
  -1.31132724e+08   2.82823527e+07   8.34855351e+06   1.41266857e+08
   1.68945314e+08  -8.34350175e+07  -4.03057940e+07   1.36690513e+09]
******************
logdetC terms are
[   238.87906146   -123.59978549   -486.97635053   -849.54919459
  -1212.63010956  -1576.01604717  -1942.33073337  -2309.73321034
  -2670.91431763  -3031.09627919  -3396.32588831  -3762.14204865
  -4128.07960518  -4502.34341668  -4872.33980536  -5255.02597355
  -5607.79232715  -5986.94061374  -6370.62068917  -6753.73665052
  -7147.26442618  -7537.878678    -7981.39859695  -8433.45165572
  -8907.83987127  -9384.80250567  -9848.92410084 -10252.021386
 -10558.32732388 -10804.17249772 -11029.95815536 -11179.59272393
 -11324.19494314 -11416.3809462  -11503.90735819 -11568.77486146
 -11622.48104156 -11656.23132841 -11687.85082231 -11713.95931238]
******************
Npix2pi term is
19301.9452637

In [ ]: