In [1]:
import holoviews as hv
from explorer.functors import Column, CustomFunctor, StarGalaxyLabeller, CompositeFunctor, RAColumn, DecColumn
import pandas as pd
import fastparquet
import dask.dataframe as dd
In [2]:
from distributed import Client, LocalCluster
# cluster = LocalCluster(n_workers=32)
# client = Client(cluster)
In [3]:
client = Client(scheduler_file='/scratch/tmorton/dask/scheduler.json')
In [4]:
from explorer.catalog import ParquetCatalog
import glob
files = glob.glob('/scratch/tmorton/qa_explorer_data/forced_big_fake*')[:32]
cat = ParquetCatalog(files)
In [5]:
from explorer.functors import (Mag, CustomFunctor, DeconvolvedMoments, Column,
SdssTraceSize, PsfSdssTraceSizeDiff, HsmTraceSize,
PsfHsmTraceSizeDiff)
# fdict = {'x': Mag('base_PsfFlux'), 'y1': DeconvolvedMoments(),
# 'y2': CustomFunctor('mag(modelfit_CModel) - mag(base_PsfFlux)'),
# 'ra': RAColumn(), 'dec':DecColumn()}
In [6]:
f = DeconvolvedMoments()
f.columns
Out[6]:
In [7]:
xfn = CustomFunctor('mag(base_PsfFlux)')
yfn = CustomFunctor('mag(modelfit_CModel) - mag(base_PsfFlux)')
In [8]:
%time x = client.persist(xfn(cat))
%time y = client.persist(yfn(cat))
In [13]:
type(x)
Out[13]:
In [11]:
type(y)
Out[11]:
In [ ]: