In [1]:
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline

In [2]:
import wisps
import wisps.simulations as wispsim

In [3]:
import astropy.units as u
from astropy.coordinates import SkyCoord

In [4]:
import matplotlib as mpl
from scipy import stats

In [5]:
import splat
from scipy import integrate

In [6]:
#import pymc stuff 
import numba
import pymc3 as pm

In [7]:
spgrid=np.arange(17, 42)

In [8]:
hs =  [100, 250, 275, 300, 325 , 350, 1000]

In [9]:
import splat.simulate as spsim

In [10]:
from pymc3.distributions import Interpolated, DiscreteUniform

In [11]:
import pandas as pd

In [12]:
pnts=pd.read_pickle(wisps.OUTPUT_FILES+'/bayesian_observed_pointings.pkl')

In [13]:
def from_posterior(param, samples):
    smin, smax = np.min(samples), np.max(samples)
    width = smax - smin
    x = np.linspace(smin, smax, 100)
    y = stats.gaussian_kde(samples)(x)

    # what was never sampled should have a small probability but not 0,
    # so we'll extend the domain and use linear approximation of density on it
    x = np.concatenate([[x[0] - 3 * width], x, [x[-1] + 3 * width]])
    y = np.concatenate([[0], y, [0]])
    return Interpolated(param, x, y)

In [14]:
from wisps.simulations import logp

In [15]:
import numpy
import theano
import theano.tensor as T
import itertools
import multiprocess as mp
from concurrent.futures import ThreadPoolExecutor, wait , ALL_COMPLETED
from  functools import partial

In [16]:
import numpy as npy

In [17]:
def conv_to_galactic(l, b, d):

    '''
    Function to convert l, b in radians and distances into 
    Galactocentric coordinates R, theta, Z.
    
    
    '''

    Rsun=8000.
    Tsun=0.
    Zsun=15.
    r2d   = 180. / numpy.pi # radians to degrees

    """
    # The SLOOOOOOOOOOW Astropy way
    c_icrs = SkyCoord(ra = ra*u.degree, dec = dec*u.degree, frame = 'icrs')  
    l, b = c_icrs.galactic.l.radian, c_icrs.galactic.b.radian
    """

    
    r    = np.sqrt( (d * np.cos( b ) )**2 + Rsun * (Rsun - 2 * d * np.cos( b ) * np.cos( l ) ) )
    t    = np.rad2deg( np.arcsin(d * np.sin( l ) * np.cos( b ) / r) )
    z    = Zsun + d * np.sin( b - np.arctan( Zsun / Rsun) )
    
    return r, t, z

In [18]:
conv_to_galactic(np.pi/2, np.pi/2, 1000.)


Out[18]:
(8000.0, 4.3854433115842975e-16, 1014.9982421921349)

In [19]:
import theano

In [20]:
import theano.tensor as T
from theano import function

In [21]:
dmin=0.
dmax=20000
Rsun=8000.
Tsun=0.
Zsun=15.
r2d   = 180. / numpy.pi # radians to degrees

In [22]:
COORDS=SkyCoord([p.coord for p in wisps.OBSERVED_POINTINGS if p.name.lower().startswith('par')])

In [23]:
galc=COORDS.transform_to('galactic')

In [24]:
fig = plt.figure(figsize=(8,6))
ax = fig.add_subplot(111, projection="mollweide")
ax.scatter(galc.l.wrap_at(180*u.degree).radian,galc.b.wrap_at(90*u.degree).radian, marker='+')


Out[24]:
<matplotlib.collections.PathCollection at 0x1c552d1438>

In [25]:
LS=galc.l.wrap_at(360*u.degree).radian

In [26]:
BS=galc.b.wrap_at(90*u.degree).radian

In [27]:
import scipy.stats as stats
gaussian_kde = stats.gaussian_kde

In [28]:
plt.scatter(LS, BS)


Out[28]:
<matplotlib.collections.PathCollection at 0x1c855bbba8>

In [29]:
#wisps.OBSERVED_POINTINGS

In [30]:
#/earth_centric_likelihood

In [63]:
traces=[]
for h in hs:
    with pm.Model() as model:
        
        l=pm.Uniform('l', lower=np.nanmin(LS), upper=np.nanmax(LS),  observed=LS)
        b=pm.Uniform('b', lower=np.nanmin(BS), upper=np.nanmax(LS),  observed=BS)
        
        d=pm.Uniform('d', upper=Rsun+0, lower=0.)
        
        r=pm.Deterministic('r', np.sqrt( (d * np.cos( b ) )**2 + Rsun * (Rsun - 2 * d * np.cos( b ) * np.cos( l ) ) ))
        z=pm.Deterministic('z', Zsun+ d * np.sin( b - np.arctan( Zsun / Rsun) ))
        
        #d_3d=pm.Deterministic('d3d', (r**2+z**2)**0.5)
        
        #likelihood
        like = pm.Potential('lnlike', logp(r, z,h)) #earth-centric likelihood
    
        trace = pm.sample(tune=100, draws=int(1e3), cores=2)
        
        traces.append(trace)


Auto-assigning NUTS sampler...
Initializing NUTS using jitter+adapt_diag...
Multiprocess sampling (2 chains in 2 jobs)
NUTS: [d]
Sampling 2 chains: 100%|██████████| 2200/2200 [00:02<00:00, 804.28draws/s]
The acceptance probability does not match the target. It is 0.9100092621288748, but should be close to 0.8. Try to increase the number of tuning steps.
There were 3 divergences after tuning. Increase `target_accept` or reparameterize.
Auto-assigning NUTS sampler...
Initializing NUTS using jitter+adapt_diag...
Multiprocess sampling (2 chains in 2 jobs)
NUTS: [d]
Sampling 2 chains: 100%|██████████| 2200/2200 [00:03<00:00, 690.17draws/s]
There was 1 divergence after tuning. Increase `target_accept` or reparameterize.
The acceptance probability does not match the target. It is 0.889148113507654, but should be close to 0.8. Try to increase the number of tuning steps.
Auto-assigning NUTS sampler...
Initializing NUTS using jitter+adapt_diag...
Multiprocess sampling (2 chains in 2 jobs)
NUTS: [d]
Sampling 2 chains: 100%|██████████| 2200/2200 [00:02<00:00, 750.31draws/s]
There were 6 divergences after tuning. Increase `target_accept` or reparameterize.
The acceptance probability does not match the target. It is 0.8864245701859678, but should be close to 0.8. Try to increase the number of tuning steps.
Auto-assigning NUTS sampler...
Initializing NUTS using jitter+adapt_diag...
Multiprocess sampling (2 chains in 2 jobs)
NUTS: [d]
Sampling 2 chains: 100%|██████████| 2200/2200 [00:03<00:00, 670.62draws/s]
There were 4 divergences after tuning. Increase `target_accept` or reparameterize.
Auto-assigning NUTS sampler...
Initializing NUTS using jitter+adapt_diag...
Multiprocess sampling (2 chains in 2 jobs)
NUTS: [d]
Sampling 2 chains: 100%|██████████| 2200/2200 [00:03<00:00, 686.24draws/s]
The acceptance probability does not match the target. It is 0.9499543417661246, but should be close to 0.8. Try to increase the number of tuning steps.
There were 6 divergences after tuning. Increase `target_accept` or reparameterize.
Auto-assigning NUTS sampler...
Initializing NUTS using jitter+adapt_diag...
Multiprocess sampling (2 chains in 2 jobs)
NUTS: [d]
Sampling 2 chains: 100%|██████████| 2200/2200 [00:03<00:00, 666.62draws/s]
There were 3 divergences after tuning. Increase `target_accept` or reparameterize.
There were 2 divergences after tuning. Increase `target_accept` or reparameterize.
Auto-assigning NUTS sampler...
Initializing NUTS using jitter+adapt_diag...
Multiprocess sampling (2 chains in 2 jobs)
NUTS: [d]
Sampling 2 chains: 100%|██████████| 2200/2200 [00:03<00:00, 600.18draws/s]
There were 4 divergences after tuning. Increase `target_accept` or reparameterize.
The acceptance probability does not match the target. It is 0.880275431632376, but should be close to 0.8. Try to increase the number of tuning steps.

In [64]:
for RV in model.basic_RVs:
    print(RV.name, RV.logp(model.test_point))


d_interval__ -1.3862943611198906
l -706.7965672800917
b -787.295138222742

In [65]:
pm.model_to_graphviz(model)


Out[65]:
%3 cluster385 385 b b ~ Uniform z z ~ Deterministic b->z r r ~ Deterministic b->r lnlike lnlike ~ Deterministic z->lnlike r->lnlike l l ~ Uniform l->r d d ~ Uniform d->z d->r

In [66]:
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
%matplotlib inline

In [67]:
from matplotlib.colors import Normalize

In [68]:
cmap=sns.diverging_palette(124, 256, n=10, as_cmap=True)

In [69]:
cnorm=Normalize(hs[1], hs[-2])

In [55]:
#for idx, t in enumerate(traces):
#    h=plt.scatter(np.log(t['r']), t['z'], color=cmap(cnorm(hs[idx])), alpha=.01, s=1, marker='.' )

#plt.xlabel('log r (pc)', fontsize=18)
#plt.ylabel('z (pc)', fontsize=18)

In [70]:
t=traces[0]

In [71]:
dists=np.array([t['d'] for t in traces])

In [72]:
dd=((t['r'])**2+(t['z'])**2)**0.5

In [73]:
dd.max(), dd.min()


Out[73]:
(8010.8620881827155, 7989.807068301993)

In [74]:
dists.max(), dists.min()


Out[74]:
(141.29292534549128, 0.0015818880253541262)

In [ ]:
#h=plt.hist(dd, bins='auto')

In [62]:
for idx, t in enumerate(traces):
    h=plt.hist(dists[idx], bins='auto', histtype='step',  color=cmap(cnorm(hs[idx])))



In [ ]:
hgvbjnkml

In [ ]:
import wisps

In [ ]:
pnts=pd.read_pickle(wisps.OUTPUT_FILES+'/bayesian_observed_pointings.pkl')

In [ ]:
@numba.jit
def custom_volume_correction(coordinate,dmin, dmax, h):
    nsamp=1000
    ds = np.linspace(dmin,dmax,nsamp)
    r, z=wispsim.convert_to_rz(coordinate.ra, coordinate.dec, ds)
    rh0=wispsim.density_function(r, z,h )
    num=integrate.trapz(rh0*(ds**2), x=ds)
    den=((dmax-dmin)**3)
    return  num/den

def computer_volume(pnt):
        """
        given area calculate the volume
        """
        volumes={}
        for k in spgrid:
            vcs=[]
            for h in hs:
                vc=custom_volume_correction(pnt.coord,  pnt.dist_limits[k][1], pnt.dist_limits[k][0], h)
                vcs.append(vc)
            volumes['vc_{}'.format(str(k))]=vcs
            volumes[k]= np.array(vcs)*0.33333333333*(pnt.dist_limits[k][0]**3-pnt.dist_limits[k][1]**3)

        return volumes

In [ ]:
volumes=[computer_volume(pnt) for pnt in pnts]

In [ ]:
#p=plt.hist(np.concatenate(dists).flatten())
#plt.show()

In [ ]:
dists=np.array(dists)

In [ ]:
len(spgrid)

In [ ]:
dist_dict=dict(zip(hs, dists))

In [ ]:
#dist_dict

In [ ]:
full_dict={'volumes': volumes, 'distances': dist_dict}

In [ ]:
import pickle
with open(wisps.OUTPUT_FILES+'/bayesian_pointings.pkl', 'wb') as file:
           pickle.dump(full_dict,file)

In [ ]:


In [ ]: