In [1]:
# This changes the current directory to the base saga directory - make sure to run this first!
# This is necessary to be able to import the py files and use the right directories,
# while keeping all the notebooks in their own directory.
import os
import sys
if 'saga_base_dir' not in locals():
saga_base_dir = os.path.abspath('..')
if saga_base_dir not in sys.path:
os.chdir(saga_base_dir)
In [2]:
from __future__ import print_function, division
from collections import Counter, OrderedDict
import hosts
import targeting
import mmthecto
import numpy as np
from astropy import units as u
from astropy.coordinates import *
from astropy import table
from astropy.visualization import hist as ahist
In [3]:
%matplotlib inline
from matplotlib import pyplot as plt
from matplotlib import rcParams
from matplotlib import patches
rcParams['image.interpolation'] = 'none'
rcParams['figure.figsize'] = (16, 10)
plt.rcParams['image.cmap'] = 'viridis'
In [19]:
DATESUFFIX = 'oct2016'
In [104]:
proposed_targets = [126115, 150578, 135879, 132339, 129237, 161174]
In [105]:
hostlst = hosts.get_saga_hosts_from_google() #'named' hosts
In [106]:
hosts_to_target = []
for nsanum in proposed_targets:
for h in hostlst:
if h.nsaid == nsanum:
hosts_to_target.append(h)
break
else:
# new target
hosts_to_target.append(hosts.NSAHost(nsanum))
hosts_to_target
Out[106]:
In [107]:
# now set to the latest base catalogs
for h in hosts_to_target:
h.fnsdss = 'SAGADropbox/base_catalogs/base_sql_nsa{0}.fits.gz'.format(h.nsaid)
h._cached_sdss = None
In [108]:
# actually make sure they're loaded
torem = []
for h in hosts_to_target:
try:
h.get_sdss_catalog()
except IOError:
torem.append(h)
print('Failed to load base catalog for', h, 'so removing from hosts_to_target')
hosts_to_target_orig = hosts_to_target[:]
for h in torem:
hosts_to_target.remove(h)
In [109]:
# preview the catalog
hosts_to_target[0].get_sdss_catalog()
Out[109]:
In [110]:
# these are the already-observed objects
spectra = h.load_and_reprocess_sdss_catalog('SAGADropbox/data/saga_spectra_clean_jan15.fits.gz')
rank | desc |
---|---|
1 | Flux Stars |
2 | Special targets |
3 | r<300 kpc, ugri cuts, M_r<20 |
4 | r<300 kpc, ugri cuts, M_r<20.75 |
5 | r<300 kpc, gri cuts, M_r<20.75 |
6 | 300<r<450 kpc, ugri cuts, M_r<20.75 |
7 | 300<r<450 kpc, gri cuts, M_r<20.75 |
8 | other |
The ugr cut is: $ u-g > 1.5(g-r) - 0.25$ (that's the cut, not the accept condition)
In [111]:
for h in hosts_to_target:
fix = False
cat = h.get_sdss_catalog()
if np.median(cat['RHOST_KPC']) > 600:
print('RHOST distances wrong for', h)
fix = 1
if np.abs(np.median(cat['HOST_DIST'])-h.distmpc)/h.distmpc > 0.05:
print('HOST_DIST wrong on', h)
fix = 2
if fix:
scs = SkyCoord(cat['RA'], cat['DEC'], unit=u.deg)
cat['RHOST_ARCM'] = scs.separation(h.coords).to(u.arcmin).value
cat['RHOST_KPC'] = scs.separation(h.coords).to(u.radian).value * h.distmpc*1000
In [112]:
def rank_targets(cat, ugoffset=-0.25):
corrmag = {band: cat[band] - cat['A'+band] for band in 'ugri'}
phot_good = (cat['r']<21.25)& (cat['fibermag_r']<23)&(cat['phot_sg']=='GALAXY')
gmr_wunc = corrmag['g']- corrmag['r'] - 2*np.hypot(cat['g_err'], cat['r_err'])
rmi_wunc = corrmag['r']- corrmag['i'] - 2*np.hypot(cat['g_err'], cat['r_err'])
gmr_accept = gmr_wunc < 0.85
rmi_accept = rmi_wunc < 0.55
if ugoffset is None:
# this is a *cut*, meaning True means get rid of it
ug_cut = np.zeros_like(gmr_accept)
else:
ug_cut = corrmag['u'] - corrmag['g'] < (1.5*(corrmag['g'] - corrmag['r']) + ugoffset)
gri_accept = gmr_accept&gmr_accept
ugri_accept = gri_accept&~ug_cut
r_bright = corrmag['r'] < 20.
r_ok = corrmag['r'] < 20.75
r_faint = r_ok&~r_bright
near = cat['RHOST_KPC']<300
nearish = (cat['RHOST_KPC']<450) & ~near
ranks = np.zeros(len(cat), dtype=int)
ranks[r_ok & gri_accept & nearish & phot_good] = 7
ranks[r_ok & ugri_accept & nearish & phot_good] = 6
ranks[r_ok & gri_accept & near & phot_good] = 5
ranks[r_ok & ugri_accept & near & phot_good] = 4
ranks[r_bright & ugri_accept & near & phot_good] = 3
ranks[(ranks==0)&phot_good] = 8 # other
do_checks(cat, ranks)
return ranks
def do_checks(cat, ranks):
zq = cat['ZQUALITY'].copy()
# ranks ~-100 are removed due to already-observed
havespec = zq>=3
ranks[havespec] = -100 - zq[havespec]
# this *shouldn't* be necessary, as ZQUALITY should be in the base catalog.
# But as a sanity check we look to see if anything in the spectral catalog is still being included
spec_this_host = spectra[spectra['HOST_NSAID']==h.nsaid]
spec_this_host = spec_this_host[np.in1d(spec_this_host['OBJID'], cat['OBJID'])]
for i, zqi in zip(spec_this_host['OBJID'], spec_this_host['ZQUALITY']):
zq[cat['OBJID']==i] = zqi
if np.any(ranks[zq>2]>=0):
print('POSSIBLE PROBLEM: Found some objects in spectrum list that are *not* claimed '
'as having spectra in the base catalogs. Setting them to -11x:', dict(Counter(ranks[ranks<-110])))
ranks[zq>2] = -110 - zq[zq>2]
# remove list
tokeep = cat['REMOVE']==-1
ranks[~tokeep] = -cat['REMOVE'][~tokeep] # sets the REMOVE objects to -their remove value
remmskval = np.min(ranks)-1
# remove anything in the remove list online but not in the catalog as remove
ranks[~targeting.remove_targets_with_remlist(cat, h, maskonly=True, verbose='warning')&(ranks>-1)] = remmskval
if np.sum(ranks==remmskval) > 0:
print('Removed', np.sum(ranks==remmskval), 'due to online remove list. Remmsk val:', remmskval)
#de-duplicate
if len(np.unique(cat['OBJID'])) != len(cat):
_, idxs = np.unique(cat['OBJID'], return_index=True)
msk = np.ones_like(cat, dtype=bool)
msk[idxs] = 0
ranks[msk] = -1000
print('WARNING: some duplicate objid found. Setting', np.sum(ranks==-1000), 'dupes to pri=-1000')
In [113]:
for h in hosts_to_target:
print('On host', h.name)
cat = h.get_sdss_catalog()
cat['mmt_ranks'] = rank_targets(cat)
print(Counter(cat['mmt_ranks']), end='\n\n')
In [114]:
for h in hosts_to_target:
cat = h.get_sdss_catalog()
ranks = cat['mmt_ranks']
plt.figure()
msk_good = (ranks>1)&(ranks<8)
msk_bad = (ranks>7)
ras = Angle(cat['ra'], u.deg).wrap_at(180*u.deg)
decs = cat['dec']
plt.scatter(ras[msk_bad], decs[msk_bad],
lw=0, alpha=.3, c=ranks[msk_bad],s=8, vmin=2, vmax=np.max(ranks))
plt.scatter(ras[msk_good], decs[msk_good],
lw=0, alpha=.9, c=ranks[msk_good],s=20, vmin=2, vmax=np.max(ranks))
plt.colorbar()
ell = patches.Ellipse((Angle(h.ra, u.deg).wrap_at(180*u.deg).value, h.dec),
1./np.cos(np.radians(h.dec)), 1,
fc='none', ec='k', lw=3, ls='--')
plt.gca().add_patch(ell)
plt.title('{}, {:.1f} {}'.format(h.name, h.dist, np.unique(cat['HOST_DIST'])[0]), fontsize=24)
plt.tight_layout()
In [115]:
needmorefluxstars = [129237, 150578]
generated_cats = {}
for h in hosts_to_target:
if generated_cats:
print('') # makes a newline except for on the first one
print('On host', h.name)
sys.stdout.flush()
cat = h.get_sdss_catalog()
ranks = cat['mmt_ranks']
fnout = 'mmthecto/{0}_{1}.cat'.format(h.name, DATESUFFIX)
fluxfnout = fnout.replace('.cat', '.fluxstars')
print('Going to write', fnout)
msk = (cat['RHOST_ARCM']<40) & (ranks>0) & (ranks<8)
if h.nsaid in needmorefluxstars:
fluxrng = (17., 18.)
removefluxdistance = (20*u.arcsec, ranks[msk]<6)
else:
fluxrng = (17., 17.7)
removefluxdistance = (40*u.arcsec, ranks[msk]<6)
generated_cats[h] = mmthecto.generate_catalog(h, cat[msk], ranks[msk],
fnout=fnout, fluxfnout=fluxfnout,
repeatflux=4, fluxrng=fluxrng,
removefluxdistance=removefluxdistance)
In [84]:
for h in [h for h in hosts_to_target if h.nsaid in toofewflux]:
cat = h.get_sdss_catalog()
gcat = generated_cats[h]
ranks = cat['mmt_ranks']
fstars = gcat[gcat['rank']=='1'][::4]
plt.figure()
msk_good = (ranks>1)&(ranks<8)
msk_bad = (ranks>7)
ras = Angle(cat['ra'], u.deg).wrap_at(180*u.deg)
decs = cat['dec']
plt.scatter(ras[msk_bad], decs[msk_bad],
lw=0, alpha=.3, c=ranks[msk_bad],s=8, vmin=2, vmax=np.max(ranks))
plt.scatter(ras[msk_good], decs[msk_good],
lw=0, alpha=.9, c=ranks[msk_good],s=20, vmin=2, vmax=np.max(ranks))
fra = Angle(fstars['ra'], u.deg).wrap_at(180*u.deg)
fdec = Angle(fstars['dec'], u.deg)
plt.scatter(fra, fdec, color='r', s=60, marker='*')
ell = patches.Ellipse((Angle(h.ra, u.deg).wrap_at(180*u.deg).value, h.dec),
1./np.cos(np.radians(h.dec)), 1,
fc='none', ec='k', lw=3, ls='--')
plt.gca().add_patch(ell)
plt.title('{}, {:.1f} {}'.format(h.name, h.dist, np.unique(cat['HOST_DIST'])[0]), fontsize=24)
plt.tight_layout()
In [18]:
def compute_percentage(matched, targets):
"""
Inputs should come from "By rank" table of xfitfibs, ranks 3/4/5
"""
perc_ugri = (matched[0]+ matched[1])/(targets[0]+ targets[1])
perc_gri = (matched[0]+ matched[1]+ matched[2])/(targets[0]+ targets[1]+ targets[2])
return '{:.1%} | {:.1%}'.format(perc_ugri, perc_gri)
compute_percentage([350, 374, 60], [398,514,474])
Out[18]:
The most distant of the new ones
nconfigs | ugri completeness | gri completeness |
---|---|---|
1 | 45.8% | 29.8% |
2 | 85.7% | 60.2% |
3 | 98.6% | 85.9% |
This one covers all the rank 3/4/5 region with one config, but needs multiples to get completeness up
Closest of the new ones (29.2 Mpc)
nconfigs | ugri completeness | gri completeness |
---|---|---|
1 | 26.3% | 17.7% |
2 | 52.7% | 35.3% |
3 | 79.4% | 56.6% |
This takes 3 fields to cover the area, but even with those 3, the lower percentages here are probably still due to collisions/need to repeat an area at least once
In [19]:
lowzmsk = spectra['SATS']>0
corrmag = {band: spectra[band] - spectra['A'+band] for band in 'ugri'}
gmr_wunc = corrmag['g']- corrmag['r'] - 2*np.hypot(spectra['g_err'], spectra['r_err'])
rmi_wunc = corrmag['r']- corrmag['i'] - 2*np.hypot(spectra['g_err'], spectra['r_err'])
gmr_accept = gmr_wunc < 0.85
rmi_accept = rmi_wunc < 0.55
ug_cut = corrmag['u'] - corrmag['g'] < (1.5*(corrmag['g'] - corrmag['r']) - 0.25)
cuts = gmr_accept&rmi_accept&~ug_cut
umg = corrmag['u'] - corrmag['g']
gmr = corrmag['g'] - corrmag['r']
rmi = corrmag['r'] - corrmag['i']
fig, (ax1, ax2) = plt.subplots(2,1, figsize=(8, 12))
ax1.scatter(gmr[lowzmsk&cuts], umg[lowzmsk&cuts], alpha=.75, s=3, lw=0, c='b')
ax1.scatter(gmr[~lowzmsk&cuts], umg[~lowzmsk&cuts], alpha=.1, s=2, lw=0, c='k')
ax1.scatter(gmr[lowzmsk&~cuts], umg[lowzmsk&~cuts], alpha=.75, s=3, lw=0, c='c')
ax1.scatter(gmr[~lowzmsk&~cuts], umg[~lowzmsk&~cuts], alpha=.1, s=2, lw=0, c='r')
ax1.set_xlim(-0.2, 2)
ax1.set_ylim(-0.5, 3)
ax1.set_xlabel('g-r')
ax1.set_ylabel('u-g')
ax2.scatter(gmr[lowzmsk&cuts], rmi[lowzmsk&cuts], alpha=.75, s=3, lw=0, c='b')
ax2.scatter(gmr[~lowzmsk&cuts], rmi[~lowzmsk&cuts], alpha=.1, s=2, lw=0, c='k')
ax2.scatter(gmr[lowzmsk&~cuts], rmi[lowzmsk&~cuts], alpha=.75, s=3, lw=0, c='c')
ax2.scatter(gmr[~lowzmsk&~cuts], rmi[~lowzmsk&~cuts], alpha=.1, s=2, lw=0, c='r')
ax2.set_xlim(-0.2, 2)
ax2.set_ylim(-0.5, 1.5)
ax2.set_xlabel('g-r')
ax2.set_ylabel('r-i')
Out[19]: