In [13]:
import sqlite3
import os.path
import csv
import json
import numpy as np
import requests
from bs4 import BeautifulSoup
db_name = 'sample.db'
In [14]:
if os.path.isfile(db_name):
os.remove(db_name)
conn = sqlite3.connect(db_name)
c = conn.cursor()
# create ooi instruments
c.execute('''CREATE TABLE ooi_instruments
(
ref_code text NOT NULL,
array_code text NOT NULL,
site_prefix text NOT NULL,
site_suffix text NOT NULL,
node_type_code text NOT NULL,
node_site_seq text NOT NULL,
port_number text NOT NULL,
instrument_class text NOT NULL,
instrument_series text NOT NULL,
instrument_seq text NOT NULL,
latitude real,
longitude real
)''')
Out[14]:
In [15]:
lat_lon_dict = {}
with open('platform_ports.csv', 'rb') as csvfile:
csvreader = csv.reader(csvfile, delimiter=';', quotechar='|')
for row in csvreader:
#CE01ISSM-MF004-01-DOSTAD999
datarow = ""
valarray = []
array = row[0]
site_prefix = "0"+row[1]
site_suffix = row[2]
node_type_code = row[3]
try:
lat = float(row[5])
lon = float(row[6])
except:
lat = '-999'
lon = '-999'
loc = "".join([array,site_prefix,site_suffix])
if loc not in lat_lon_dict:
lat_lon_dict[loc]= {"lat":lat,"lon":lon}
print "".join([array,site_prefix,site_suffix]),lat,lon
In [16]:
json_data=open('ref_list.json')
data = json.load(json_data)
for r in data:
array_code = r[0:2]
site_prefix = r[2:4]
site_suffix = r[4:8]
#skip one
node_type_code = r[9:10]
node_site_seq = r[10:14]
#skip on
port_number = r[15:17]
#skip on
instrument_class = r [18:23]
instrument_series = r [23:24]
instrument_seq = r [24:27]
loc = "".join([array_code,site_prefix,site_suffix])
lat = lat_lon_dict[loc]["lat"]
lon = lat_lon_dict[loc]["lon"]
ref = "','".join([r,array_code,site_prefix,site_suffix,node_type_code,node_site_seq,port_number,instrument_class,instrument_series,instrument_seq,str(lat),str(lon)])
#ref = "".join([array_code,site_prefix,site_suffix,node_type_code,node_site_seq,port_number,instrument_class,instrument_series,instrument_seq])
#print ref
print r
query = "INSERT INTO ooi_instruments VALUES ('"+ref+"')"
c.execute(query)
conn.commit()
In [17]:
avail_plats = []
try:
req = requests.get("http://ray-srv.ooi.rutgers.edu:12570/sensor/user/inv/null")
soup = BeautifulSoup(req.content)
aa = soup.find_all('a')
for a in aa:
if not a.text == "All Sensors":
avail_plats.append(str(a.text))
except:
pass
if len(avail_plats) < 1:
avail_plats = ['CP05MOAS-GL001-00-ENG000000', 'CP05MOAS-GL002-00-CTDGV0000', 'CP05MOAS-GL002-00-DOSTA0000', 'CP05MOAS-GL002-00-ENG000000', 'CP05MOAS-GL001-00-DOSTA0000', 'CP05MOAS-GL001-00-PARAD0000', 'CP05MOAS-GL002-00-PARAD0000', 'CP05MOAS-GL001-00-FLORT0000', 'CP05MOAS-GL001-00-CTDGV0000']
print avail_plats
In [18]:
c.execute("SELECT * FROM ooi_instruments")
rows = c.fetchall()
data_dict = {}
print "rows",len(rows)
st_count = 0
for row in rows:
ref = row[0]
array = row[1]
site_id = row[2] #site prefix
site = row[3] #site suffix
node = row[4] #note type
port = row[4]
lat = row[-2]
lon = row[-1]
node_seq = row[-1] #node seq?
if array not in data_dict:
data_dict[array] = {}
if site not in data_dict[array]:
if ref in avail_plats:
status = "on"
else:
status = "na"
data_dict[array][site] = {"status":status,"instruments":[],"lat":lat,"lon":lon,"site_id":site_id}
st_count +=1
if ref not in data_dict[array][site]["instruments"]:
data_dict[array][site]["instruments"].append(ref)
with open("stations.json", "w") as outfile:
json.dump(data_dict, outfile, indent=4)
print "platforms",st_count
In [19]:
conn.close()
In [19]:
In [19]: