In [8]:
import os
import glob
import numpy as np
import pandas as pd

In [2]:
os.getenv('datadir')


Out[2]:
'/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF'

In [3]:
datadir = os.getenv('datadir')

In [13]:
fields = glob.glob(datadir+'/field*')

In [15]:
field = fields[0]

In [20]:
fieldid = int(field.split('_')[-1])

In [21]:
fieldid


Out[21]:
1309

In [26]:
healpixels = glob.glob(field + '/*.dat')

In [28]:
df = pd.read_csv(healpixels[0], skiprows=2, delimiter=r"\s+", names=['time', 'filters'])

In [29]:
df


Out[29]:
time filters
0 89299 6
1 607963 6
2 879799 1
3 1212853 5
4 1214435 5
5 1644286 5
6 1645914 5
7 1739917 6
8 1742349 6
9 1817151 6
10 1818007 6
11 1820606 6
12 1827937 6
13 2086216 5
14 2087218 5
15 2087684 5
16 2248873 5
17 2250526 5
18 2335275 5
19 2336863 5
20 2768159 5
21 5014827 6
22 5015906 6
23 5016214 6
24 5017292 6
25 22317839 5
26 23695461 1
27 23695888 1
28 23953713 1
29 23956549 1
... ... ...
4764 249009717 2
4765 249009753 2
4766 249009789 2
4767 249009825 2
4768 249009861 2
4769 249009897 2
4770 249009933 2
4771 249009969 2
4772 249268583 2
4773 249268619 2
4774 249268655 2
4775 249268691 2
4776 249268727 2
4777 249268763 2
4778 249268799 2
4779 249268835 2
4780 249268871 2
4781 249268907 2
4782 250392511 2
4783 250392547 2
4784 250392583 2
4785 250392619 2
4786 250392655 2
4787 250392691 2
4788 250392727 2
4789 250392763 2
4790 250392799 2
4791 250392835 2
4792 250651856 2
4793 250651892 2

4794 rows × 2 columns


In [27]:
healpixels


Out[27]:
['/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/144205.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/144717.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/144718.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/145229.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/145230.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/145231.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/145741.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/145742.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/145743.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/145744.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/146252.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/146253.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/146254.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/146255.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/146764.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/146765.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/146766.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/146767.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/147275.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/147276.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/147277.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/147278.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/147787.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/147788.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/147789.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/147790.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/148299.dat',
 '/Users/rbiswas/data/LSST/RutgersFullSkyPixels/fullskypix/OpSim2.168_DithDDF/fieldid_1309/148300.dat']

In [ ]:
df = pd.read_csv(field, delimiter=r"\s+")

In [ ]:
opsimData = '/Users/rbiswas/data/LSST/OpSimData/output_opsim2_168.dat'

In [30]:
import sqlalchemy

In [32]:
engine = sqlalchemy.create_engine('sqlite:////Users/rbiswas/data/LSST/OpSimData/enigma_1189_sqlite.db')

In [33]:
summary =  pd.read_sql_table('Summary', engine)

In [40]:
proposal =  pd.read_sql_table('Proposal', engine)
seeing = pd.read_sql_table('Seeing', engine)
Field    = pd.read_sql_table('Field', engine)

In [34]:
!pwd


/Users/rbiswas/doc/projects/LSST/CadenceRutgers

In [35]:
store = pd.HDFStore('storage.h5')

In [41]:
proposal.to_hdf('storage.h5', 'proposal')
seeing.to_hdf('storage.h5', 'seeing')
Field.to_hdf('storage.h5', 'field')


/usr/local/manual/anaconda/lib/python2.7/site-packages/pandas/io/pytables.py:2558: PerformanceWarning: 
your performance may suffer as PyTables will pickle object types that it cannot
map directly to c-types [inferred_type->unicode,key->block1_values] [items->[u'propConf', u'propName', u'objectHost']]

  warnings.warn(ws, PerformanceWarning)

In [36]:
summary.to_hdf('storage.h5', 'summary')


/usr/local/manual/anaconda/lib/python2.7/site-packages/pandas/io/pytables.py:2558: PerformanceWarning: 
your performance may suffer as PyTables will pickle object types that it cannot
map directly to c-types [inferred_type->unicode,key->axis0] [items->None]

  warnings.warn(ws, PerformanceWarning)
/usr/local/manual/anaconda/lib/python2.7/site-packages/pandas/io/pytables.py:2558: PerformanceWarning: 
your performance may suffer as PyTables will pickle object types that it cannot
map directly to c-types [inferred_type->unicode,key->block0_items] [items->None]

  warnings.warn(ws, PerformanceWarning)
/usr/local/manual/anaconda/lib/python2.7/site-packages/pandas/io/pytables.py:2558: PerformanceWarning: 
your performance may suffer as PyTables will pickle object types that it cannot
map directly to c-types [inferred_type->unicode,key->block1_items] [items->None]

  warnings.warn(ws, PerformanceWarning)
/usr/local/manual/anaconda/lib/python2.7/site-packages/pandas/io/pytables.py:2558: PerformanceWarning: 
your performance may suffer as PyTables will pickle object types that it cannot
map directly to c-types [inferred_type->unicode,key->block2_values] [items->[u'filter']]

  warnings.warn(ws, PerformanceWarning)
/usr/local/manual/anaconda/lib/python2.7/site-packages/pandas/io/pytables.py:2558: PerformanceWarning: 
your performance may suffer as PyTables will pickle object types that it cannot
map directly to c-types [inferred_type->unicode,key->block2_items] [items->None]

  warnings.warn(ws, PerformanceWarning)

In [42]:
!du -h storage.h5


885M	storage.h5

In [43]:
print store


<class 'pandas.io.pytables.HDFStore'>
File path: storage.h5
/field               frame        (shape->[5292,2])   
/proposal            frame        (shape->[5,2])      
/seeing              frame        (shape->[210384,2]) 
/summary             frame        (shape->[2473105,3])

In [45]:
from sqlalchemy import inspect
inspector = inspect(engine)

for table_name in inspector.get_table_names():
    print table_name
   # for column in inspector.get_columns(table_name):
   #    print("Column: %s" % column['name'])


Cloud
Config
Config_File
Field
Log
MissedHistory
ObsHistory
ObsHistory_Proposal
Proposal
Proposal_Field
Seeing
SeqHistory
SeqHistory_MissedHistory
SeqHistory_ObsHistory
Session
SlewActivities
SlewHistory
SlewMaxSpeeds
SlewState
Summary
TimeHistory

In [38]:
summary.to_sql('summaryTable.sql')


---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-38-694436f3d11f> in <module>()
----> 1 summary.to_sql('summaryTable.sql')

TypeError: to_sql() takes at least 3 arguments (2 given)

In [39]:
help(summary.to_sql)


Help on method to_sql in module pandas.core.generic:

to_sql(self, name, con, flavor='sqlite', schema=None, if_exists='fail', index=True, index_label=None, chunksize=None, dtype=None) method of pandas.core.frame.DataFrame instance
    Write records stored in a DataFrame to a SQL database.
    
    Parameters
    ----------
    name : string
        Name of SQL table
    con : SQLAlchemy engine or DBAPI2 connection (legacy mode)
        Using SQLAlchemy makes it possible to use any DB supported by that
        library.
        If a DBAPI2 object, only sqlite3 is supported.
    flavor : {'sqlite', 'mysql'}, default 'sqlite'
        The flavor of SQL to use. Ignored when using SQLAlchemy engine.
        'mysql' is deprecated and will be removed in future versions, but it
        will be further supported through SQLAlchemy engines.
    schema : string, default None
        Specify the schema (if database flavor supports this). If None, use
        default schema.
    if_exists : {'fail', 'replace', 'append'}, default 'fail'
        - fail: If table exists, do nothing.
        - replace: If table exists, drop it, recreate it, and insert data.
        - append: If table exists, insert data. Create if does not exist.
    index : boolean, default True
        Write DataFrame index as a column.
    index_label : string or sequence, default None
        Column label for index column(s). If None is given (default) and
        `index` is True, then the index names are used.
        A sequence should be given if the DataFrame uses MultiIndex.
    chunksize : int, default None
        If not None, then rows will be written in batches of this size at a
        time.  If None, all rows will be written at once.
    dtype : dict of column name to SQL type, default None
        Optional specifying the datatype for columns. The SQL type should
        be a SQLAlchemy type, or a string for sqlite3 fallback connection.


In [ ]: