In [1]:
# This changes the current directory to the base saga directory - make sure to run this first!
# This is necessary to be able to import the py files and use the right directories,
# while keeping all the notebooks in their own directory.
import os
import sys
if 'saga_base_dir' not in locals():
saga_base_dir = os.path.abspath('..')
if saga_base_dir not in sys.path:
os.chdir(saga_base_dir)
In [2]:
from __future__ import print_function
import hosts
import targeting
import mmthecto
import numpy as np
from astropy import units as u
from astropy.coordinates import *
from astropy import table
from astropy.table import Table
from astropy.io import fits
In [3]:
%matplotlib inline
from matplotlib import pyplot as plt
from matplotlib import rcParams
rcParams['image.interpolation'] = 'none'
rcParams['figure.figsize'] = (16, 10)
In [4]:
hostlst = hosts.get_saga_hosts_from_google() #'named' hosts
In [5]:
candidates = """
NSAID RA DEC Name
150887 23.2455 4.53406 N/A
150578. 23.0824 12.3229 N/A
149781. 22.3942 -3.43167 OBrother
150307. 22.9283 -5.49535 N/A
61945.0 23.6087 0.297265 AnaK
149977. 22.6248 10.5315 N/A
153017. 23.9904 20.7498 N/A
126115. 0.0663717 20.7524 N/A
127226. 0.579656 -8.39644 N/A
129387. 1.40109 12.9175 N/A
129237. 1.36327 17.5922 N/A
132339. 2.63655 -1.31876 Narnia
130133. 1.65181 -7.51266 N/A
131531. 2.25767 6.00248 N/A
130625. 1.88703 4.19576 N/A
"""
candidates = Table.read(candidates,format='ascii')
hostdct = dict([(h.name, h) for h in hostlst])
candhosts = [hostdct[name] if name in hostdct else hosts.NSAHost(int(nsaid))
for nsaid, name in zip(candidates['NSAID'], candidates['Name'])]
candhosts
Out[5]:
In [6]:
# now set to the latest base catalogs
for h in candhosts:
h.fnsdss = 'catalogs/base_sql_nsa{0}.fits.gz'.format(h.nsaid)
h._cached_sdss = None
In [7]:
# actually make sure they're loaded here
for h in candhosts:
h.get_sdss_catalog()
h.get_sdss_catalog() # just to see
Out[7]:
In [8]:
mlpred = Table.read('catalogs/SAGA.ALL.objid_rescaledrobs_pred.Oct28_SDSS_nopreclean.csv.fits.gz')
mlpred['objID'] = mlpred['OBJID'] # used for matching
mlpred
Out[8]:
In [9]:
p_column_for_ranking = 'PROBABILITY_CLASS_1' #this is *just* SDSS
In [9]:
n150887 = candhosts[0]
hosts_to_target = [n150887]
In [12]:
joined_catalogs = {}
for h in hosts_to_target:
#filter on manual remove lists *and* REMOVE!=-1
cat = targeting.remove_targets_with_remlist(h.get_sdss_catalog(), h, 'catalogs/TargetRemoveAug28_2015.csv')
pre_joined = table.join(cat, mlpred, join_type='left', keys='objID')
tokeep = pre_joined['REMOVE']==-1
notobserved = pre_joined['ZQUALITY']<3
print('removed', np.sum(~tokeep),'for remove list and', np.sum(~notobserved),'b/c observed')
joined_catalogs[h] = pre_joined[tokeep¬observed]
highp = pre_joined[p_column_for_ranking]>0.01
print('n w/ ML pred>0.01 and observed:', np.sum(highp&~notobserved), 'not yet obs:', np.sum(highp¬observed))
In [13]:
# compute ranks, which includes filtering on mag
rankdct = {}
for h in hosts_to_target:
cat = joined_catalogs[h]
highprob = cat[p_column_for_ranking].filled(0) > 0.5
medprob = (cat[p_column_for_ranking].filled(0) > 0.05) & ~highprob
lowprob = (cat[p_column_for_ranking].filled(0) > 0.01) & ~highprob & ~medprob
inside = cat['RHOST_KPC']<300
rankdct[h] = ranks = np.ones(len(cat), dtype=int)*-1 #the -1 will be re-set at the end
# rank 1 reserved for flux stars
ranks[inside&highprob] = 2
ranks[~inside&highprob] = 3
ranks[inside&medprob] = 4
ranks[~inside&medprob] = 5
ranks[inside&lowprob] = 6
ranks[~inside&lowprob] = 7
validtarget = (cat['r'] < 21.5) & (cat['fibermag_r']<23) & (cat['phot_sg']=='GALAXY')
other = ranks < 0
colorcutmsk = targeting.colorcut_mask(cat, targeting.bossanova_color_cuts)
ranks[validtarget&other&colorcutmsk] = 8
ranks[validtarget&other&~colorcutmsk] = 9
#informational
for h, ranks in rankdct.items():
cat = joined_catalogs[h]
hranks = dict(enumerate(np.bincount(ranks[ranks>-1])))
del hranks[0]
del hranks[1]
hranks[-1] = np.sum(ranks==-1)
print(h.name,'rank counts:', hranks)
nother = np.sum(ranks==8)
nprob = np.sum((ranks<8)&(ranks>0))
nother_obs = 270-np.sum((ranks<8)&(ranks>0)&(cat['RHOST_ARCM']<30))
print('fraction not "other": {0:.1%}, nprob_obs={1}\n'.format(nprob/(nprob+nother), nother_obs))
In [14]:
for h in hosts_to_target:
cat = joined_catalogs[h]
ranks = rankdct[h]
plt.figure()
msk = (ranks>0)&(ranks<8)
plt.subplot(1,2,1)
plt.hist(cat[msk]['r'],bins=25, histtype='step')
plt.axvline(21,c='r')
plt.xlabel('r')
plt.title(h.name, fontsize=24)
plt.subplot(1,2,2)
plt.hist(cat[msk]['fibermag_r'],bins=25, histtype='step')
plt.axvline(23,c='r')
plt.xlabel('fibermag_r')
plt.title(h.name, fontsize=24)
plt.tight_layout()
In [17]:
bins = np.linspace(15,25, 100)
notinml = cat['OBJID'].mask
plt.hist(cat['r'], color='b', bins=bins, histtype='step', label='all')
plt.hist(cat['r'][notinml], color='r', bins=bins, histtype='step', label='insdssnotML')
plt.legend(loc=0)
None
In [18]:
mlpred_old = Table.read('catalogs/SAGAobjid_PredictionAug28_SDSS_SDSSwise_v4removed_combinedPs.fits.gz')
mlpred_old['OBJID'].name = 'objID'
In [19]:
#
joined_catalogs_old = {}
for h in hosts_to_target:
#filter on manual remove lists *and* REMOVE!=-1
cat = targeting.remove_targets_with_remlist(h.get_sdss_catalog(), h, 'catalogs/TargetRemoveAug28_2015.csv')
pre_joined = table.join(cat, mlpred_old, join_type='left', keys='objID')
tokeep = pre_joined['REMOVE']==-1
notobserved = pre_joined['ZQUALITY']<3
print('removed', np.sum(~tokeep),'for remove list and', np.sum(~notobserved),'b/c observed')
joined_catalogs_old[h] = pre_joined[tokeep¬observed]
highp = pre_joined[p_column_for_ranking]>0.01
print('n w/ ML pred>0.01 and observed:', np.sum(highp&~notobserved), 'not yet obs:', np.sum(highp¬observed))
In [20]:
cat = joined_catalogs_old[n150887]
bins = np.linspace(15,25, 100)
notinml = cat['PROBABILITY_CLASS_1'].mask
plt.hist(cat['r'], color='b', bins=bins, histtype='step', label='all')
plt.hist(cat['r'][notinml], color='r', bins=bins, histtype='step', label='insdssnotML')
plt.legend(loc=0)
None
In [22]:
generated_cats = {}
for h in hosts_to_target:
print('On host', h.name)
sys.stdout.flush()
cat = joined_catalogs[h]
ranks = rankdct[h]
fnout = 'mmthecto/{0}_nov2015.cat'.format(h.name)
fluxfnout = fnout.replace('.cat', '.fluxstars')
print('Going to write', fnout)
msk = (cat['RHOST_ARCM']<40) & (ranks>0) & (ranks<9)
generated_cats[h] = mmthecto.generate_catalog(h, cat[msk], ranks[msk],
repeatflux=3, removefluxdistance=1*u.arcmin,
fnout=fnout, fluxfnout=fluxfnout)
In [68]:
gcat = generated_cats[h]
In [77]:
ranknum = gcat['rank'].copy()
ranknum[gcat['rank']==''] = '-1'
ranknum = ranknum.astype(int)
{r: np.sum(r==ranknum) for r in set(ranknum)}
Out[77]:
In [122]:
msk = (ranknum>1)&(ranknum<6)
gcatm = gcat[msk]
targeting.sampled_imagelist(gcatm[np.argsort(gcatm['ra'])], None, None, names=gcat['rank'][msk])
None
In [155]:
#identify clumps of nearby objects
gcatmsc = SkyCoord(gcatm['ra'], gcatm['dec'], unit=u.deg)
seps = u.Quantity([c.separation(gcatmsc) for c in gcatmsc])
pairs = zip(*np.where((seps>0.1*u.arcsec)&(seps<10*u.arcsec)))
pairs = [pair for pair in pairs if pair[0]<pair[1]]
clumps = []
for p1, p2 in pairs:
for clump in clumps:
if p1 in clump:
clump.append(p2)
break
elif p2 in clump:
clump.append(p1)
break
else:
clumps.append([p1, p2])
for cl in clumps:
targeting.sampled_imagelist(gcatm[np.array(cl)], None, None, names=cl)
clumps
Out[155]:
In [161]:
np.sum(joined_catalogs[n150887]['objID']==long(1237678777399509169))
Out[161]:
In [163]:
cat = joined_catalogs[n150887]
clumpcats = []
for clump in clumps:
msk = np.zeros(len(cat), dtype=bool)
for c in clump:
msk[cat['objID']==long(gcatm[c]['object'])] = True
clumpcats.append(cat[msk])
clumpcats[0]
Out[163]:
In [167]:
clumped = []
for clump in clumps:
for i in clump:
clumped.append(i)
nmult = len(clumped) - len(clumps)
len(gcatm), len(clumped), nmult, len(clumped)/len(gcatm), nmult/len(gcatm)
Out[167]:
In [63]:
coords, targets, ranks, fields = mmthecto.parse_cfg_file('mmthecto/NSA150887_nov2015.cfg')
In [65]:
msk = ranks == 2
targeting.sampled_imagelist(coords[msk], None, None, names=targets[msk])
Out[65]:
In [39]:
hostnames_to_target = [150887, 150578, 'OBrother', 150307, 'AnaK', 149977, 153017, 129387, 129237, 'Narnia', 126115]
hosts_to_target = []
for hostname in hostnames_to_target:
if isinstance(hostname, basestring):
for host in candhosts:
if host.name == hostname:
hosts_to_target.append(host)
break
else:
raise ValueError('could not find host named {0}'.format(hostname))
else:
for host in candhosts:
if host.nsaid == hostname:
hosts_to_target.append(host)
break
else:
raise ValueError('could not find host with nsa number {0}'.format(hostname))
hosts_to_target
Out[39]:
In [40]:
joined_catalogs = {}
for h in hosts_to_target:
#filter on manual remove lists *and* REMOVE!=-1
#cat = targeting.remove_targets_with_remlist(h.get_sdss_catalog(), h, 'catalogs/TargetRemoveNov8_2015.csv')
cat = targeting.remove_targets_with_remlist(h.get_sdss_catalog(), h, 'catalogs/TargetRemoveNov11_2015.csv')
pre_joined = table.join(cat, mlpred, join_type='left', keys='objID')
tokeep = pre_joined['REMOVE']==-1
notobserved = pre_joined['ZQUALITY']<3
print('removed', np.sum(~tokeep),'for remove list and', np.sum(~notobserved),'b/c observed')
joined_catalogs[h] = pre_joined[tokeep¬observed]
highp = pre_joined[p_column_for_ranking]>0.01
print('n w/ ML pred>0.01 and observed:', np.sum(highp&~notobserved), 'not yet obs:', np.sum(highp¬observed))
In [41]:
redmappertargs = table.Table.read('catalogs/redmagic_targets_0828.fit')
redmappertargs
Out[41]:
In [42]:
# compute ranks, which includes filtering on mag
rankdct = {}
for h in hosts_to_target:
cat = joined_catalogs[h]
highprob = cat[p_column_for_ranking].filled(0) > 0.5
medprob = (cat[p_column_for_ranking].filled(0) > 0.05) & ~highprob
lowprob = (cat[p_column_for_ranking].filled(0) > 0.01) & ~highprob & ~medprob
inside = cat['RHOST_KPC']<300
rankdct[h] = ranks = np.ones(len(cat), dtype=int)*-1 #the -1 will be re-set at the end
# rank 1 reserved for flux stars
ranks[inside&highprob] = 2
ranks[~inside&highprob] = 3
ranks[inside&medprob] = 4
ranks[~inside&medprob] = 5
ranks[inside&lowprob] = 6
ranks[~inside&lowprob] = 7
validtarget = (cat['r'] < 21.5) & (cat['fibermag_r']<23) & (cat['phot_sg']=='GALAXY')
other = ranks < 0
colorcutmsk = targeting.colorcut_mask(cat, targeting.bossanova_color_cuts)
ranks[validtarget&other&colorcutmsk] = 9 #other incl. color cuts
ranks[validtarget&other&~colorcutmsk] = 10 #other not incl. color cuts
redmappertarg = np.in1d(cat['objID'], redmappertargs['OBJID'])
rg7 = ranks>7
ranks[validtarget&redmappertarg&rg7] = 8 #redmapper
#informational
for h, ranks in rankdct.items():
cat = joined_catalogs[h]
hranks = dict(enumerate(np.bincount(ranks[ranks>-1])))
del hranks[0]
del hranks[1]
hranks[-1] = np.sum(ranks==-1)
print(h.name,'rank counts:', hranks)
nother = np.sum(ranks==8)
nprob = np.sum((ranks<8)&(ranks>0))
nother_obs = 270-np.sum((ranks<8)&(ranks>0)&(cat['RHOST_ARCM']<30))
print('fraction not "other": {0:.1%}, nprob_obs={1}\n'.format(nprob/(nprob+nother), nother_obs))
In [28]:
def clump_finder(sc, clumpsep=10*u.arcsec):
"""
Find clumps within a certain distance in a SkyCoord catalog
"""
sc1 = SkyCoord(sc.ra.reshape(1, len(sc)), sc.dec.reshape(1, len(sc)))
sc2 = SkyCoord(sc.ra.reshape(len(sc), 1), sc.dec.reshape(len(sc), 1))
seps = sc1.separation(sc2)
#seps = u.Quantity([c.separation(sc) for c in sc])
pairs = zip(*np.where((seps>0.1*u.arcsec)&(seps<clumpsep)))
pairs = [pair for pair in pairs if pair[0]<pair[1]]
clumps = []
for p1, p2 in pairs:
for clump in clumps:
if p1 in clump:
clump.append(p2)
break
elif p2 in clump:
clump.append(p1)
break
else:
clumps.append([p1, p2])
return clumps
clumpdct = {}
for h in hosts_to_target:
cat = joined_catalogs[h]
ranks = rankdct[h]
#msk = (1<ranks)&(ranks<8)
msk = (ranks==2)|(ranks==4)|(ranks==6)
catm = cat[msk]
highp_scs = SkyCoord(catm['ra'], catm['dec'], unit=u.deg)
clumpdct[h] = (clump_finder(highp_scs), msk)
In [30]:
#now we go through each and show the clumps
for h in hosts_to_target:
cat = joined_catalogs[h]
clumps, clumpmsk = clumpdct[h]
clumpnum = []
clumpra = []
clumpdec = []
clumpobjid = []
for i, c in enumerate(clumps):
print('Clump#', i)
for idx in c:
clumpnum.append(i)
clumpra.append(cat[clumpmsk][idx]['ra'])
clumpdec.append(cat[clumpmsk][idx]['dec'])
clumpobjid.append(cat[clumpmsk][idx]['objID'])
if h.name.startswith('NSA'):
nmstr = ''
else:
nmstr = h.name
print('{0}\t{1}\t{2}\t{3}\t{4}\t{5}'.format(nmstr, h.nsaid, clumpobjid[-1], clumpra[-1], clumpdec[-1],'shred galaxy <semiauto-found>'))
targeting.sampled_imagelist(clumpra, clumpdec, n=np.inf, names=clumpnum)
res = raw_input('{} <enter to continue>'.format(h.name, h.nsaid))
if res == 'x':
break
In [61]:
#now we go through each and show the clumps
#NOTE: the output should now be much smaller because the rem list is update
for h in hosts_to_target:
cat = joined_catalogs[h]
clumps, clumpmsk = clumpdct[h]
clumpnum = []
clumpra = []
clumpdec = []
clumpobjid = []
for i, c in enumerate(clumps):
print('Clump#', i)
for idx in c:
clumpnum.append(i)
clumpra.append(cat[clumpmsk][idx]['ra'])
clumpdec.append(cat[clumpmsk][idx]['dec'])
clumpobjid.append(cat[clumpmsk][idx]['objID'])
if h.name.startswith('NSA'):
nmstr = ''
else:
nmstr = h.name
print('{0}\t{1}\t{2}\t{3}\t{4}\t{5}'.format(nmstr, h.nsaid, clumpobjid[-1], clumpra[-1], clumpdec[-1],'shred galaxy <semiauto-found>'))
targeting.sampled_imagelist(clumpra, clumpdec, n=np.inf, names=clumpnum)
res = raw_input('{} <enter to continue>'.format(h.name, h.nsaid))
if res == 'x':
break
In [43]:
generated_cats = {}
for h in hosts_to_target:
print('On host', h.name)
sys.stdout.flush()
cat = joined_catalogs[h]
ranks = rankdct[h]
fnout = 'mmthecto/{0}_nov2015.cat'.format(h.name)
fluxfnout = fnout.replace('.cat', '.fluxstars')
print('Going to write', fnout, fluxfnout)
msk = (cat['RHOST_ARCM']<40) & (ranks>0) & (ranks<10)
generated_cats[h] = gcat = mmthecto.generate_catalog(h, cat[msk], ranks[msk],
repeatflux=3, removefluxdistance=20*u.arcsec,
fnout=fnout, fluxfnout=fluxfnout)
In [79]:
fncat = 'mmthecto/NSA153017_nov2015.cat'
fnflux = 'mmthecto/NSA153017_nov2015.fluxstars'
cat153017 = Table.read(fncat, format='ascii', data_start=2)
flux153017 = Table.read(fnflux, format='ascii')
catra = Angle(cat153017['ra'], u.hour).to(u.deg)
catdec = Angle(cat153017['dec'], u.deg)
fluxra = Angle(flux153017['RA'], u.deg)
fluxdec = Angle(flux153017['DEC'], u.deg)
msk = cat153017['type']=='guide'
plt.scatter(catra[msk], catdec[msk], alpha=.5,lw=0)
plt.scatter(catra[~msk], catdec[~msk], alpha=.5,lw=0, c='g')
plt.scatter(fluxra, fluxdec, alpha=.5,lw=0, c='r')
Out[79]:
In [89]:
msk = cat153017['type']=='guide'
plt.scatter(catra[msk].wrap_at(180*u.deg), catdec[msk].wrap_at(180*u.deg), alpha=.5,lw=0)
plt.scatter(catra[~msk].wrap_at(180*u.deg), catdec[~msk].wrap_at(180*u.deg), alpha=.5,lw=0, c='g')
plt.scatter(fluxra.wrap_at(180*u.deg), fluxdec.wrap_at(180*u.deg), alpha=.5,lw=0, c='r')
Out[89]:
Whoopsie - the problem is in RHOST_ARCM/RHOST_KPC - need to fix that up for the 0-crossing
In [93]:
hostnames_to_target = [153017]
hosts_to_target = []
for hostname in hostnames_to_target:
if isinstance(hostname, basestring):
for host in candhosts:
if host.name == hostname:
hosts_to_target.append(host)
break
else:
raise ValueError('could not find host named {0}'.format(hostname))
else:
for host in candhosts:
if host.nsaid == hostname:
hosts_to_target.append(host)
break
else:
raise ValueError('could not find host with nsa number {0}'.format(hostname))
hosts_to_target
Out[93]:
In [94]:
joined_catalogs = {}
for h in hosts_to_target:
#filter on manual remove lists *and* REMOVE!=-1
#cat = targeting.remove_targets_with_remlist(h.get_sdss_catalog(), h, 'catalogs/TargetRemoveNov8_2015.csv')
cat = targeting.remove_targets_with_remlist(h.get_sdss_catalog(), h, 'catalogs/TargetRemoveNov11_2015.csv')
pre_joined = table.join(cat, mlpred, join_type='left', keys='objID')
tokeep = pre_joined['REMOVE']==-1
notobserved = pre_joined['ZQUALITY']<3
print('removed', np.sum(~tokeep),'for remove list and', np.sum(~notobserved),'b/c observed')
joined_catalogs[h] = pre_joined[tokeep¬observed]
highp = pre_joined[p_column_for_ranking]>0.01
print('n w/ ML pred>0.01 and observed:', np.sum(highp&~notobserved), 'not yet obs:', np.sum(highp¬observed))
In [96]:
redmappertargs = table.Table.read('catalogs/redmagic_targets_0828.fit')
In [118]:
# compute ranks, which includes filtering on mag
rankdct = {}
for h in hosts_to_target:
cat = joined_catalogs[h]
# need to fix up distance for 0-crossing
hc3d = SkyCoord(h.coords.ra, h.coords.dec, h.dist)
sc = SkyCoord(cat['ra']*u.deg, cat['dec']*u.deg, distance=h.dist)
cat['RHOST_ARCM'] = sc.separation(hc3d).to(u.arcmin).value
cat['RHOST_KPC'] = sc.separation_3d(hc3d).to(u.kpc).value
highprob = cat[p_column_for_ranking].filled(0) > 0.5
medprob = (cat[p_column_for_ranking].filled(0) > 0.05) & ~highprob
lowprob = (cat[p_column_for_ranking].filled(0) > 0.01) & ~highprob & ~medprob
inside = cat['RHOST_KPC']<300
rankdct[h] = ranks = np.ones(len(cat), dtype=int)*-1 #the -1 will be re-set at the end
# rank 1 reserved for flux stars
ranks[inside&highprob] = 2
ranks[~inside&highprob] = 3
ranks[inside&medprob] = 4
ranks[~inside&medprob] = 5
ranks[inside&lowprob] = 6
ranks[~inside&lowprob] = 7
validtarget = (cat['r'] < 21.5) & (cat['fibermag_r']<23) & (cat['phot_sg']=='GALAXY')
other = ranks < 0
colorcutmsk = targeting.colorcut_mask(cat, targeting.bossanova_color_cuts)
ranks[validtarget&other&colorcutmsk] = 9 #other incl. color cuts
ranks[validtarget&other&~colorcutmsk] = 10 #other not incl. color cuts
redmappertarg = np.in1d(cat['objID'], redmappertargs['OBJID'])
rg7 = ranks>7
ranks[validtarget&redmappertarg&rg7] = 8 #redmapper
#informational
for h, ranks in rankdct.items():
cat = joined_catalogs[h]
hranks = dict(enumerate(np.bincount(ranks[ranks>-1])))
del hranks[0]
del hranks[1]
hranks[-1] = np.sum(ranks==-1)
print(h.name,'rank counts:', hranks)
nother = np.sum(ranks==8)
nprob = np.sum((ranks<8)&(ranks>0))
nother_obs = 270-np.sum((ranks<8)&(ranks>0)&(cat['RHOST_ARCM']<30))
print('fraction not "other": {0:.1%}, nprob_obs={1}\n'.format(nprob/(nprob+nother), nother_obs))
In [130]:
reload(mmthecto)
#write out the catalog
generated_cats = {}
for h in hosts_to_target:
print('On host', h.name)
sys.stdout.flush()
cat = joined_catalogs[h]
ranks = rankdct[h]
fnout = 'mmthecto/{0}_nov2015_fixed.cat'.format(h.name)
fluxfnout = fnout.replace('.cat', '.fluxstars')
print('Going to write', fnout, fluxfnout)
msk = (cat['RHOST_ARCM']<40) & (ranks>0) & (ranks<10)
generated_cats[h] = gcat = mmthecto.generate_catalog(h, cat[msk], ranks[msk],
repeatflux=3, removefluxdistance=20*u.arcsec,
fnout=fnout, fluxfnout=fluxfnout)
In [131]:
reload(mmthecto)
#write out the catalog
generated_cats = {}
for h in hosts_to_target:
print('On host', h.name)
sys.stdout.flush()
cat = joined_catalogs[h]
ranks = rankdct[h]
fnout = 'mmthecto/{0}_nov2015_fixed180.cat'.format(h.name)
fluxfnout = fnout.replace('.cat', '.fluxstars')
print('Going to write', fnout, fluxfnout)
msk = (cat['RHOST_ARCM']<40) & (ranks>0) & (ranks<10)
generated_cats[h] = gcat = mmthecto.generate_catalog(h, cat[msk], ranks[msk],
repeatflux=3, removefluxdistance=20*u.arcsec,
fnout=fnout, fluxfnout=fluxfnout, wrapraat=180*u.deg)
In [132]:
fncat = 'mmthecto/NSA153017_nov2015_fixed180.cat'
fnflux = 'mmthecto/NSA153017_nov2015_fixed180.fluxstars'
cat153017 = Table.read(fncat, format='ascii', data_start=2)
flux153017 = Table.read(fnflux, format='ascii')
catra = Angle(cat153017['ra'], u.hour).to(u.deg)
catdec = Angle(cat153017['dec'], u.deg)
fluxra = Angle(flux153017['RA'], u.deg)
fluxdec = Angle(flux153017['DEC'], u.deg)
msk = cat153017['type']=='guide'
plt.scatter(catra[msk], catdec[msk], alpha=.5,lw=0)
plt.scatter(catra[~msk], catdec[~msk], alpha=.5,lw=0, c='g')
plt.scatter(fluxra, fluxdec, alpha=.5,lw=0, c='r')
Out[132]: