Input preparation


In [ ]:
from hirise_tools.downloads import download_RED_product
from hirise_tools.products import RED_PRODUCT_ID

In [ ]:
import logging
from nbtools.logging import setup_live_logging

In [ ]:
from planet4 import io, region_data

In [ ]:
db = io.DBManager()

In [ ]:
roi = region_data.Potsdam()
obsids = roi.all_obsids
obsids

In [ ]:
obsids = db.obsids
len(obsids)

In [ ]:
setup_live_logging('planet4', logging.DEBUG)

Create mosaics


In [ ]:
from planet4.projection import create_RED45_mosaic
from planet4 import io

In [ ]:
from nbtools import execute_in_parallel

In [ ]:
len(obsids)

In [ ]:
results = execute_in_parallel(create_RED45_mosaic, obsids)

In [ ]:
for res in results:
    print(res)

Parallel production


In [ ]:
from ipyparallel import Client
c = Client()

lbview = c.load_balanced_view()
dview = c.direct_view()

In [ ]:
with c[:].sync_imports():
    from planet4.projection import create_RED45_mosaic

In [ ]:
results = lbview.map_async(create_RED45_mosaic, obsids)

In [ ]:
from nbtools import display_multi_progress

In [ ]:
display_multi_progress(results, obsids)

In [ ]:
for res in results:
    print(res)

xy2latlon


In [ ]:
from pysis.isis import campt
from pysis.exceptions import ProcessError
import pvl
from planet4 import io, catalog_production as cp

In [ ]:
from pathlib import Path

# clusterpath = io.analysis_folder() / 'p4_catalog'
rm = cp.ReleaseManager('v1.0')

In [ ]:
rm.calc_tile_coordinates()

In [ ]:
rm.calc_metadata()

In [ ]:
rm.merge_all()

In [ ]:
meta = pd.read_csv(rm.metadata_path)
meta.query('map_scale==0.25')

In [ ]:
obsids = rm.obsids

In [ ]:
with dview.sync_imports():
    from pysis.isis import campt
    from pysis.exceptions import ProcessError
    from pathlib import Path
    from ipyparallel import CompositeError

In [ ]:
def do_campt(mosaicname, savepath, temppath):
    try:
        campt(from_=mosaicname, to=savepath, format='flat', append='no',
              coordlist=temppath, coordtype='image')
    except ProcessError as e:
        print(e.stderr)
        return obsid, False

def obsid_marking_from_clusterpath(inpath):
    """Find obsid and marking kind from HiRISE cluster path.
    
    For example, a cluster path like this:
    '/Users/klay6683/Dropbox/data/planet4/p4_analysis/p4_catalog/ESP_011350_0945_blotches.csv'
    will return:
    ('ESP_011350_0945', 'blotches')
    """
    s = inpath.stem  # stem is 'name' (without folders) minus extension
    # s = ESP_xxxxxx_yyyy_blotches (or ..._fans)
    # obsid = s[:until last '_']
    sep = s.rfind('_')
    return s[:sep], s[sep+1:]

In [ ]:
class XY2LATLON:
    edrpath = io.get_ground_projection_root()
    def __init__(self, df, inpath, overwrite=False, obsid=None):
        self.obsid = obsid
        self.df = df
        self.inpath = inpath
        self.overwrite = overwrite
        self.edrpath = io.get_ground_projection_root()
        self._obsid = obsid
            
    @property
    def obsid(self):
        return self.df.image_name.iloc[0] if self._obsid is None else self._obsid

    @obsid.setter
    def obsid(self, value):
        self._obsid = value
            
    @property
    def mosaicname(self):
        return f"{self.obsid}_mosaic_RED45.cub"
    
    @property
    def mosaicpath(self):
        return self.edrpath / self.obsid / self.mosaicname
        
    @property
    def savepath(self):
        return self.inpath / f"{self.obsid}_campt_out.csv"
        
    @property
    def savepath_blotch(self):
        return self.inpath / f"{self.obsid}_blotch_campt_out.csv"
    
    @property
    def savepath_fan(self):
        return self.inpath / f"{self.obsid}_fan_campt_out.csv"
    
    @property
    def temppath(self):
        return self.inpath / f"{self.obsid}.tocampt"
    
    def process_inpath(self):
        df = self.df
        tempcoords = ['image_x', 'image_y']
        df[tempcoords].to_csv(str(self.temppath), header=False, index=False)
        if self.savepath.exists() and self.overwrite is False:
            return
        try:
            do_campt(self.mosaicpath, self.savepath, self.temppath)
        except Exception as e:
            print(e)
            return False
        
    def combine_marking_files(self):
        try:
            f = pd.read_csv(self.savepath_fan)
        except FileNotFoundError:
            f = None
        try:
            b = pd.read_csv(self.savepath_blotch)
        except FileNotFoundError:
            b = None
        pd.concat([f, b]).to_csv(self.savepath, index=False)

In [ ]:
from pysis.isis import campt
from pysis.exceptions import ProcessError
import pvl
from astropy import units as u
from astropy.coordinates import CartesianRepresentation

def vec_unit_to_astro(pvlunit):
    rep = CartesianRepresentation(pvlunit.value, unit=pvlunit.units)
    return rep
    
class CAMPTER:
    edrpath = io.get_ground_projection_root()
    def __init__(self, obsid):
        self.obsid = obsid
        
    @property
    def mosaicpath(self):
        mosaicname = f"{self.obsid}_mosaic_RED45.cub"
        return self.edrpath / self.obsid / mosaicname
    
    def execute(self, sample, line):
        try:
            return campt(from_=self.mosaicpath, SAMPLE=sample, LINE=line, type_='image')
        except ProcessError as e:
            print(e.stderr)
            raise ValueError("CAMPT failed.")
            
    def calc_point(self, sample, line):
        obj = pvl.loads(self.execute(sample, line))
        self.dic = obj['GroundPoint']
        return self.dic

    @property
    def body_vector(self):
        vec = self.dic['BodyFixedCoordinate']
        return vec_unit_to_astro(vec)
    
    @property
    def lat(self):
        return self.dic['PlanetographicLatitude'].value*u.deg
    
    def get_two_results(self, p1, p2):
        dic1 = self.calc_point(*p1)
        dic2 = self.calc_point(*p2)
        return dic1, dic2
    
    def calc_pixel_diff_km(self, p1, p2):
        """Calculate BodyFixed Vector difference for two pixel coordinates.
        
        Parameters:
        p1, p2 : tuple
            Tuples of (SAMPLE, LINE) coordinates
        """
        dic1, dic2 = self.get_two_results(p1, p2)
        key = 'BodyFixedCoordinate'
        v1 = vec_unit_to_astro(dic1[key])
        v2 = vec_unit_to_astro(dic2[key])
        return (v1 - v2)
    
    def calc_pixel_diff_latlon(self, p1, p2, key='lat'):
        """Calculate Lat/Lon difference for two pixel coordinates.
        
        Parameters:
        p1, p2 : tuple
            Tuples of (SAMPLE, LINE) coordinates
        """
        switch = dict(lat='PlanetographicLatitude',
                      lon='PositiveEast360Longitude')
        dic1, dic2 = self.get_two_results(p1, p2)
        key = switch[key]
        lat1 = dic1[key].value * u.deg
        lat2 = dic2[key].value * u.deg
        return (lat1 - lat2)

In [ ]:
campter = CAMPTER('ESP_011680_1055')

In [ ]:
campter.dic['BodyFixedCoordinate']

In [ ]:
campter.calc_pixel_diff_km((1,1), (1.5,1))

In [ ]:
campter.calc_pixel_diff_latlon((1,1), (2,1), key='lat').value

In [ ]:
0.0000031

In [ ]:
campter.dic.keys()

In [ ]:


In [ ]:
from astropy import units as u
from astropy.coordinates import CartesianRepresentation
rep = CartesianRepresentation(campter.body_vector.value, unit=campter.body_vector.units)

In [ ]:
rep

In [ ]:


In [ ]:


In [ ]:
def xy2latlon(inpath):
    d = dict(inpath=inpath)
    xy = XY2LATLON(inpath)
    ok = xy.process_inpath()  #inpath, *marking_mosaicpath_from_inpath(inpath))
    d['ok'] = ok
    return d

In [ ]:


In [ ]:
blotches = rm.read_blotch_file().drop_duplicates()
fans = rm.read_fan_file().drop_duplicates()
combined = pd.concat([blotches, fans])

In [ ]:
obsids = combined.obsid.unique()

In [ ]:
len(obsids)

In [ ]:
for obsid in obsids:
    xy = XY2LATLON(combined, rm.savefolder, obsid=obsid)
    xy.combine_marking_files()

Execute campt for all obsids


In [ ]:
from tqdm import tqdm

for obsid in tqdm(obsids):
    data = combined[combined.image_name==obsid]
    xy = XY2LATLON(data, rm.savefolder)
    xy.process_inpath()

In [ ]:
pd.set_option('display.max_columns', 100)

In [ ]:
cols_to_merge = ("image_name image_x image_y PlanetocentricLatitude PlanetographicLatitude "
                 "PositiveEast360Longitude BodyFixedCoordinateX BodyFixedCoordinateY "
                 "BodyFixedCoordinateZ".split())
index = ['image_name', 'image_x', 'image_y']

def combine_campt_into_catalog():
    for marking in ['fans', 'blotches']:
        print(marking)
        data = eval(marking)
        bucket = []
        for obsid in obsids:
            df = data.query('image_name==@obsid')
            if df.shape[0] == 0:
                continue
            xy = XY2LATLON(df, rm.savefolder, obsid=obsid)
            savepath = xy.savepath_blotch if marking=='blotches' else xy.savepath_fan
            bucket.append(pd.read_csv(savepath).assign(image_name=obsid))

        ground = pd.concat(bucket).drop_duplicates()
        ground.rename(dict(Sample='image_x', Line='image_y'), axis=1, inplace=True)

        data.sort_values(by=index, inplace=True)
        ground.sort_values(by=index, inplace=True)

        merged = data.merge(ground[cols_to_merge], on=index)
        savepath = rm.blotch_merged if marking == 'blotches' else rm.fan_merged
        if marking == 'fans':
            merged.version = merged.version.astype('int')
        merged.to_csv(savepath, index=False)

In [ ]:
cols_to_merge = ("obsid image_x image_y PlanetocentricLatitude PlanetographicLatitude "
                 "PositiveEast360Longitude BodyFixedCoordinateX BodyFixedCoordinateY "
                 "BodyFixedCoordinateZ".split())

index = ['obsid', 'image_x', 'image_y']

def get_all_campt_results():
    bucket = []
    for obsid in obsids:
        xy = XY2LATLON(None, rm.savefolder, obsid=obsid)
        bucket.append(pd.read_csv(xy.savepath).assign(obsid=obsid))

    ground = pd.concat(bucket).drop_duplicates()
    ground.rename(dict(Sample='image_x', Line='image_y'), axis=1, inplace=True)
    return ground
    
def combine_campt_into_catalog(fans, blotches):
    ground = get_all_campt_results()

    fans = fans.merge(ground[cols_to_merge], on=index)
    blotches = blotches.merge(ground[cols_to_merge], on=index)
    return fans, blotches

In [ ]:
ground = get_all_campt_results()

In [ ]:
fans.shape

In [ ]:
fans.merge(ground[cols_to_merge], on=index).shape

In [ ]:
blotches.shape

In [ ]:
blotches.merge(ground[cols_to_merge], on=index).shape

In [ ]:
fans.columns

Tile coordinates


In [ ]:
from planet4.projection import TileCalculator, xy_to_hirise
from planet4 import projection as proj

In [ ]:
edrpath = io.get_ground_projection_root()
edrpath

In [ ]:
obsids.shape

In [ ]:
cubepaths = [edrpath / obsid / f"{obsid}_mosaic_RED45.cub" for obsid in obsids]

In [ ]:
cubepaths[:3]

In [ ]:
# testing
tc = TileCalculator(cubepaths[0])

tc.calc_tile_coords()

df = pd.read_csv(tc.campt_results_path)

pd.set_option('max_columns', 60)

%matplotlib inline

Check for presence of campt files


In [ ]:
todo=[]
for cubepath in cubepaths:
    tilecalc = proj.TileCalculator(cubepath, read_data=False)
    if not tilecalc.campt_results_path.exists():
        todo.append(cubepath)
print(f"{len(todo)} still todo.")

In [ ]:
def get_tile_coords(cubepath):
    from planet4.projection import TileCalculator
    tilecalc = TileCalculator(cubepath)
    tilecalc.calc_tile_coords()

In [ ]:
results = execute_in_parallel(get_tile_coords, todo)

In [ ]:
len(cubepaths)

In [ ]:
bucket = []
from tqdm import tqdm
for cubepath in tqdm(cubepaths):
    tilecalc = proj.TileCalculator(cubepath, read_data=False)
    bucket.append(tilecalc.tile_coords_df)

coords = pd.concat(bucket, ignore_index=True)

In [ ]:
coords.shape

In [ ]:
coords.head()

In [ ]:
catalog = 'catalog_1.0b3'

In [ ]:
savefolder = io.analysis_folder() / catalog / f"{roi.name.lower()}"
savefolder.mkdir(exist_ok=True)
savename = savefolder / f"{roi.name.lower()}_tile_coords.csv"

In [ ]:
savefolder = io.data_root / catalog

In [ ]:
savename = savefolder / "all_images_tile_coords.csv"

In [ ]:
coords.to_csv(savename, index=False)

In [ ]:
coords = pd.read_csv(savename)

In [ ]:
coords.shape

In [ ]:
coords.tail()

In [ ]:
%matplotlib nbagg

In [ ]:
coords.plot(kind='scatter', marker='.', x='BodyFixedCoordinateY', y='BodyFixedCoordinateX')

In [ ]:
ax = plt.gca()

ax.invert_xaxis()

ax.invert_yaxis()

ax.set_title(f"{roi} tile center coordinates")

fig = plt.gcf()

fig.savefig(f'/Users/klay6683/Dropbox/src/p4_paper1/figures/{roi.lower()}_tile_center_coordinates.png', dpi=200)

In [ ]:
from matplotlib.colors import LogNorm

In [ ]:
xycoords = coords['BodyFixedCoordinateX BodyFixedCoordinateY'.split()]

In [ ]:
xrange = []
yrange = []
for col,bucket in zip(xycoords.columns, [xrange, yrange]):
    coord = xycoords[col]
    bucket.append(int(coord.min()))
    bucket.append(int(coord.max()))

In [ ]:
xrange
yrange

In [ ]:
gridres = 0.5
xedges = np.arange(xrange[0], xrange[1]+1, gridres)
yedges = np.arange(yrange[0], yrange[1]+1, gridres)

In [ ]:
plt.figure()
plt.scatter(xycoords.BodyFixedCoordinateY, xycoords.BodyFixedCoordinateX,
            marker='.', alpha=0.1, color='red')

counts, y_ret, x_ret, _ = plt.hist2d(xycoords.BodyFixedCoordinateY, xycoords.BodyFixedCoordinateX,
           bins=[yedges, xedges], cmin=10)
plt.colorbar()

plt.hlines(yedges, *xrange, lw=0.5)
plt.vlines(xedges, *yrange, lw=0.5)
plt.gca().axes.set_aspect('equal', 'datalim')
plt.gca().invert_xaxis()
plt.gca().invert_yaxis()
# plt.savefig('ithaca_coverage.png', dpi=200)

In [ ]:
H, y_ret, x_ret = np.histogram2d(xycoords.BodyFixedCoordinateY, xycoords.BodyFixedCoordinateX,
           bins=[yedges, xedges])

In [ ]:
yindices, xindices = np.where(H>11)

In [ ]:
x1 = -288.402
y1 = -3.17798
x2 = -283.19
y2 = -6.21769
m = (y1-y2)/(x1-x2)
b = y1 - m*x1
b

In [ ]:
def f(x):
    return m*x + b

In [ ]:
f(-288)

In [ ]:
box1 = []
box2 = []
for xind, yind in zip(xindices, yindices):
    xlow = x_ret[xind]
    xhigh = x_ret[xind+1]
    xmean = (xlow+xhigh)/2
    ylow = y_ret[yind]
    yhigh = y_ret[yind+1]
    ymean = (ylow+yhigh)/2
    x_query = '@xlow < BodyFixedCoordinateX < @xhigh'
    y_query = '@ylow < BodyFixedCoordinateY < @yhigh'
    boxtmp = coords.query(f"{x_query} and {y_query}")
    if f(xmean) > ymean:  # if the separating line is above the current y-value
        box1.append(boxtmp)
    elif f(xmean) < ymean:
        if xmean > -281.5:
            box1.append(boxtmp)
        else:
            box2.append(boxtmp)
box1 = pd.concat(box1, ignore_index=True)
box2 = pd.concat(box2, ignore_index=True)

In [ ]:
import seaborn as sns
sns.set_context('poster')

In [ ]:
ax = box1.plot.scatter(x='BodyFixedCoordinateX', y='BodyFixedCoordinateY', color='green')
box2.plot.scatter(x='BodyFixedCoordinateX', y='BodyFixedCoordinateY', color='blue', ax=ax)
x = np.linspace(-290, -278, 100)
ax.plot(x, m*x+b, c='red')
ax.hlines(yedges, *xrange, lw=0.5)
ax.vlines(xedges, *yrange, lw=0.5)
ax.set_xlim(-293, -276)
ax.set_ylim(-11, 0)
ax.set_title("Ithaca P4 tile coords with > 11 entries in 0.25 sqkm cell")
fig = ax.get_figure()
fig.savefig("/Users/klay6683/Dropbox/p4_4Chase/ithaca/box_selection.png", dpi=200)

In [ ]:
box1.image_id.to_csv('box1_image_ids.csv', index=False)
box2.image_id.to_csv('box2_image_ids.csv', index=False)

In [ ]:

Metadata


In [ ]:
meta = pd.read_hdf('/Users/klay6683/Dropbox/SternchenAndMe/python_stuff/hirise_rdr_index.hdf')

In [ ]:
colorimgs = meta[meta.PRODUCT_ID.str.endswith('_COLOR')]

In [ ]:
coords = coords.merge(colorimgs, right_on='OBSERVATION_ID', left_on='image_name')

In [ ]:
coords.SOLAR_LONGITUDE.max()

In [ ]:
coords.columns

In [ ]:
coords.groupby('image_name').IMAGE_LINES.mean()

In [ ]: