In [1]:
from astropy.io import fits
import math
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import os
import subprocess
from sklearn.cluster import DBSCAN
from analyzeImage import analyzeImage
import keras
from keras.models import load_model
from __future__ import print_function
%matplotlib inline
%load_ext autoreload
%autoreload 2


Using TensorFlow backend.

In [2]:
gpu_code_path = "../code/gpu/"
real_image_name = "chip_7"
real_image_path = "../../HITS/trimmed_chip_03/Blind15A_02/night_1/" #gpu_code_path+"images/"+real_image_name
psi_image_path = gpu_code_path+"output-images/psi"
phi_image_path = gpu_code_path+"output-images/phi"

In [3]:
def execute(cmd):
    popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
    for stdout_line in iter(popen.stdout.readline, ""):
        yield stdout_line 
    popen.stdout.close()
    return_code = popen.wait()
    if return_code:
        raise subprocess.CalledProcessError(return_code, cmd)

In [38]:
paramsFile = open('../code/gpu/debug/parameters.config', 'w')
paramsFile.write(
"""Debug ................ : 1
Image Count .......... : 5
Generate Images ...... : 0
Image Width .......... : 500
Image Height ......... : 500
PSF Sigma ............ : 1.0
Object Brightness .... : 360.0
Object Initial x ..... : 320.0
Object Initial y ..... : 180.0
Velocity x ........... : -1.3
Velocity y ........... : -0.2
Background Level ..... : 1024.0
Background Sigma ..... : 32.0
Mask Threshold ....... : 40.0
Mask Penalty ......... : 15.0
Angles to Search ..... : 80
Velocities to Search . : 20
Minimum Velocity ..... : 300
Maximum Velocity ..... : 450
Write to file ........ : 1
Source Images Path ... : ../../{source}/
Psi Images Path ...... : ../../{psi}/
Phi Images Path....... : ../../{phi}/
""".format( source=real_image_path, psi=psi_image_path, phi=phi_image_path ))
paramsFile.close()

In [5]:
#popen = subprocess.Popen( "./clearImages.sh", stdout=subprocess.PIPE, 
#                         stderr=subprocess.PIPE)
#popen.wait()
#output = popen.stderr.read()
#output += popen.stdout.read()
#print( output)




In [40]:
#for path in execute("./search.sh"):
#    print(path, end="")

In [3]:
raw_results = np.genfromtxt('../data/results/HITS1.txt', names=True)

In [4]:
image_mjd = []

for filename in sorted(os.listdir(real_image_path)):
    hdulist = fits.open(os.path.join(real_image_path, filename))
    image_mjd.append(hdulist[0].header['MJD'])

image_mjd = np.array(image_mjd)
image_times = image_mjd - image_mjd[0]
#image_times*=24.

Load Psi Images


In [5]:
hdulist = fits.open(os.path.join(psi_image_path, os.listdir(psi_image_path)[0]))
num_images = len(os.listdir(psi_image_path))
image_shape = np.shape(hdulist[0].data)
im_psi_array = np.zeros((num_images, image_shape[0], image_shape[1]))

for idx, filename in list(enumerate(sorted(os.listdir(psi_image_path)))):

    #print (str('Loaded ' + filename))

    image_file = os.path.join(psi_image_path, filename)
    hdulist = fits.open(image_file)
    im_psi_array[idx] = hdulist[0].data#*mask

Load Phi Images (for potentially making psi/phi stamps)


In [6]:
hdulist = fits.open(os.path.join(phi_image_path, os.listdir(phi_image_path)[0]))
num_images = len(os.listdir(phi_image_path))
image_shape = np.shape(hdulist[0].data)
im_phi_array = np.zeros((num_images, image_shape[0], image_shape[1]))

for idx, filename in list(enumerate(sorted(os.listdir(phi_image_path)))):

   # print (str('Loaded ' + filename))

    image_file = os.path.join(phi_image_path, filename)
    hdulist = fits.open(image_file)
    im_phi_array[idx] = hdulist[0].data#*mask

In [7]:
hdulist = fits.open(os.path.join(real_image_path, os.listdir(real_image_path)[0]))
num_images = len(os.listdir(real_image_path))
image_shape = np.shape(hdulist[1].data)
im_array = np.zeros((num_images, image_shape[0], image_shape[1]))


for idx, filename in list(enumerate(sorted(os.listdir(real_image_path)))):

   # print( str('Loaded ' + filename))

    image_file = os.path.join(real_image_path, filename)
    hdulist = fits.open(image_file)
    im_array[idx] = hdulist[1].data#*mask

In [8]:
ai = analyzeImage()

In [9]:
model = load_model('../data/kbmod_model.h5')

In [13]:
print(np.shape(raw_results))
results = raw_results[np.where(raw_results['likelihood'] >= 6.0)]
print(np.shape(results))
print(results)


(979582,)
(156006,)
[(102.0, 2713.0, 0.0, 0.0, -240.264, 194.591, 35.679, 7215.087)
 (102.0, 2714.0, 0.0, 0.0, -239.355, 193.854, 35.664, 7216.561)
 (102.0, 2718.0, 0.0, 0.0, -239.707, 174.184, 35.332, 7084.095) ...,
 (682.0, 2723.0, 0.0, 0.0, 374.523, 79.605, 6.0, 144.98)
 (1598.0, 2783.0, 0.0, 0.0, -42.891, 52.973, 6.0, 123.703)
 (1498.0, 2857.0, 0.0, 0.0, 23.695, -21.342, 6.0, 110.845)]

In [14]:
plt.hist(raw_results['likelihood'], bins=np.arange(20))


Out[14]:
(array([  0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
          0.00000000e+00,   4.66919000e+05,   3.56657000e+05,
          8.69120000e+04,   2.28540000e+04,   1.64080000e+04,
          2.13480000e+04,   3.95800000e+03,   1.97000000e+03,
          8.79000000e+02,   7.19000000e+02,   2.11000000e+02,
          1.79000000e+02,   1.26000000e+02,   1.29000000e+02,
          8.20000000e+01]),
 array([ 0,  1,  2,  3,  4,  5,  6,  7,  8,  9, 10, 11, 12, 13, 14, 15, 16,
        17, 18, 19]),
 <a list of 19 Patch objects>)

In [15]:
filtered_results = ai.filter_results(im_array, results, image_times, model, chunk_size=5000)


Finished chunk 1 of 32
Finished chunk 2 of 32
Finished chunk 3 of 32
Finished chunk 4 of 32
Finished chunk 5 of 32
Finished chunk 6 of 32
Finished chunk 7 of 32
Finished chunk 8 of 32
Finished chunk 9 of 32
Finished chunk 10 of 32
Finished chunk 11 of 32
Finished chunk 12 of 32
Finished chunk 13 of 32
Finished chunk 14 of 32
Finished chunk 15 of 32
Finished chunk 16 of 32
Finished chunk 17 of 32
Finished chunk 18 of 32
Finished chunk 19 of 32
Finished chunk 20 of 32
Finished chunk 21 of 32
Finished chunk 22 of 32
Finished chunk 23 of 32
Finished chunk 24 of 32
Finished chunk 25 of 32
Finished chunk 26 of 32
Finished chunk 27 of 32
Finished chunk 28 of 32
Finished chunk 29 of 32
Finished chunk 30 of 32
Finished chunk 31 of 32
Finished chunk 32 of 32

In [16]:
print( len(filtered_results) )
filtered_results


8738
Out[16]:
array([(1930.0, 1028.0, 0.0, 0.0, 0.004, 89.22, 10.554, 299.828),
       (1930.0, 1024.0, 0.0, 0.0, 0.004, 90.39, 10.258, 280.609),
       (1930.0, 1031.0, 0.0, 0.0, 0.004, 88.05, 10.133, 280.108), ...,
       (1432.0, 3015.0, 0.0, 0.0, 33.928, -52.263, 6.0, 115.667),
       (1451.0, 3019.0, 0.0, 0.0, 24.384, -54.789, 6.0, 110.528),
       (1338.0, 3153.0, 0.0, 0.0, 75.338, -116.05, 6.0, 113.957)], 
      dtype=[('t0_x', '<f8'), ('t0_y', '<f8'), ('theta_par', '<f8'), ('theta_perp', '<f8'), ('v_x', '<f8'), ('v_y', '<f8'), ('likelihood', '<f8'), ('est_flux', '<f8')])

In [17]:
results_to_cluster = filtered_results
arg = dict(eps=0.03, min_samples=1, n_jobs=-1)
clustered_results = ai.clusterResults(results_to_cluster, dbscan_args=arg)#, im_array, image_times)
clustered_results =  results_to_cluster[np.array(clustered_results[1], dtype=np.int)]
#best_targets = range(stamp_count)
#best_targets

In [18]:
print( len(clustered_results) )
clustered_results


29
Out[18]:
array([(1930.0, 1028.0, 0.0, 0.0, 0.004, 89.22, 10.554, 299.828),
       (1434.0, 2994.0, 0.0, 0.0, 31.804, -43.789, 9.943, 195.572),
       (1653.0, 2818.0, 0.0, 0.0, -19.942, -3.156, 9.008, 247.178),
       (1185.0, 2595.0, 0.0, 0.0, 90.964, 46.347, 8.544, 163.518),
       (1884.0, 3769.0, 0.0, 0.0, -74.24, -228.387, 6.982, 130.887),
       (1299.0, 3868.0, 0.0, 0.0, 93.652, -440.914, 6.402, 132.053),
       (1781.0, 2989.0, 0.0, 0.0, -126.029, -40.935, 6.365, 122.758),
       (1795.0, 2637.0, 0.0, 0.0, -132.375, 119.208, 6.359, 122.941),
       (1596.0, 3333.0, 0.0, 0.0, 64.704, -17.35, 6.359, 119.836),
       (1835.0, 3797.0, 0.0, 0.0, -51.422, -241.762, 6.323, 124.547),
       (1743.0, 2633.0, 0.0, 0.0, -109.013, 121.088, 6.248, 118.313),
       (1558.0, 2379.0, 0.0, 0.0, -24.846, 236.508, 6.248, 125.773),
       (1479.0, 3390.0, 0.0, 0.0, 11.68, -223.465, 6.196, 116.324),
       (1366.0, 2228.0, 0.0, 0.0, 10.925, 208.274, 6.184, 113.662),
       (1851.0, 2786.0, 0.0, 0.0, -158.29, 51.446, 6.181, 116.593),
       (1743.0, 2361.0, 0.0, 0.0, -109.085, 245.047, 6.174, 120.235),
       (1205.0, 3032.0, 0.0, 0.0, 136.007, -60.583, 6.165, 114.482),
       (1452.0, 3392.0, 0.0, 0.0, 23.603, -224.875, 6.153, 116.45),
       (1716.0, 2246.0, 0.0, 0.0, -96.61, 297.391, 6.105, 115.461),
       (1338.0, 2191.0, 0.0, 0.0, 23.645, 224.87, 6.103, 114.49),
       (1203.0, 3234.0, 0.0, 0.0, 137.181, -152.404, 6.075, 114.072),
       (1803.0, 3764.0, 0.0, 0.0, -35.768, -225.633, 6.072, 117.286),
       (1256.0, 3240.0, 0.0, 0.0, 112.936, -155.495, 6.055, 114.723),
       (1250.0, 3012.0, 0.0, 0.0, 115.701, -51.537, 6.05, 111.863),
       (1648.0, 2454.0, 0.0, 0.0, -65.883, 202.807, 6.05, 124.39),
       (1784.0, 2168.0, 0.0, 0.0, -127.553, 332.343, 6.048, 117.908),
       (1503.0, 3491.0, 0.0, 0.0, -0.038, -269.4, 6.042, 115.64),
       (1783.0, 3643.0, 0.0, 0.0, -26.609, -167.854, 6.037, 111.38),
       (1932.0, 2989.0, 0.0, 0.0, -194.851, -41.397, 6.007, 118.12)], 
      dtype=[('t0_x', '<f8'), ('t0_y', '<f8'), ('theta_par', '<f8'), ('theta_perp', '<f8'), ('v_x', '<f8'), ('v_y', '<f8'), ('likelihood', '<f8'), ('est_flux', '<f8')])

In [19]:
#im_psi_array

In [20]:
f_results = clustered_results#filtered_results
rejected = 0
for imNum in range(min(len(f_results),40)):
    current = imNum#best_targets[imNum]
    fig = plt.figure(figsize=(8,3))
    fig.add_subplot(121)
    plt.imshow(ai.createPostageStamp(im_array,
                                         list(f_results[['t0_x', 't0_y']][current]),
                                         np.array(list(f_results[['v_x', 'v_y']][current])),
                                         image_times, 
                                         [25., 25.])[0],
                    origin='lower',
               #cmap=plt.cm.Greys_r,
               interpolation='None')
    plt.title(str('#' + str(imNum+1) + ' [x,y] = ' + str(np.round(list(f_results[['t0_x', 't0_y']][current]), 2))
                  + ' v = ' + str(np.round(list(f_results[['v_x', 'v_y']][current]),2)) + ' ' +
                  str(f_results['likelihood'][current])))
    fig.add_subplot(122)
    ai.plotLightCurves(im_array, f_results[current], image_times)
    plt.tight_layout()
    traj_coords = ai.calc_traj_coords(f_results[current], image_times)
    light_curve = [im_array[x, traj_coords[x,1], traj_coords[x,0]] for x in range(len(im_array))]
    if np.max(light_curve) > 50*np.median(light_curve):
        rejected+=1
 #   plt.savefig("stamps/stamp"+str(imNum+1)+".png")
    plt.show()



In [62]:
image_times


Out[62]:
array([ 0.       ,  0.0680697,  0.153044 ,  0.2217088,  0.996763 ,
        1.0646263,  1.2708684,  1.9935337,  2.0612634,  2.1987387,
        2.2678587,  2.9910217,  3.0587132,  3.9889088,  4.2517774,
        4.2530772,  4.9861608,  5.0537649,  5.1210723,  5.1897426,
        5.2584592])

In [21]:
rejected


Out[21]:
29

In [ ]: