In [149]:
from __future__ import division
from pyomo.environ import *
from pyomo.opt import SolverFactory
import googlemaps
from sqlalchemy import create_engine
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import os
import ast
import time as tm
In [169]:
def connection(user,passwd,dbname):
str1 = ('postgresql+pg8000://' + user +':' + passw + '@switch-db2.erg.berkeley.edu:5432/'
+ dbname + '?ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory')
engine = create_engine(str1)
return engine
In [170]:
user = 'jdlara'
passw = 'Amadeus-2010'
dbname = 'apl_cec'
engine_apl = connection(user,passw,dbname)
In [4]:
query = ('SELECT biosum.latlon.biosum_cond_id as biosum_id, \"PGE\".feeders.feeder_no as feeder_id, '+
'biosum.latlon.lon as biosum_lon, biosum.latlon.lat as biosum_lat, ' +
'\"PGE\".feeders.lon as feeders_lon, \"PGE\".feeders.lat as feeders_lat, '
'ST_Distance(st_transform(biosum.latlon.geom,5070),st_transform(\"PGE\".feeders.geom,5070))/1000 ' +
'as distance_km FROM biosum.latlon, \"PGE\".feeders WHERE ST_DWithin(st_transform(biosum.latlon.geom,5070),' +
'st_transform(\"PGE\".feeders.geom,5070), 160000);')
In [5]:
df_routes = pd.read_sql_query(query,engine_apl)
In [6]:
biosum_coord = df_routes.biosum_lat.astype(str).str.cat(df_routes.biosum_lon.astype(str), sep=',')
biosum_coord = biosum_coord.values.tolist()
df_routes['biosum_coord_str'] = biosum_coord
feeder_coord = df_routes.feeders_lat.astype(str).str.cat(df_routes.feeders_lon.astype(str), sep=',')
feeder_coord = feeder_coord.values.tolist()
df_routes['feeder_coord_str'] = feeder_coord
df_routes['distance_google'] = pd.Series(np.random.randn(len(df_routes['biosum_id'])), index=df_routes.index)
df_routes['time_google'] = pd.Series(np.random.randn(len(df_routes['biosum_id'])), index=df_routes.index)
df_routes['data_state'] = pd.Series(np.zeros(len(df_routes['biosum_id'])), index=df_routes.index, dtype=np.int)
In [7]:
df_routes = df_routes.drop(['biosum_lon','biosum_lat','feeders_lon','feeders_lat'], 1)
df_routes.to_sql('biosum_pge', engine_apl, schema='biosum', if_exists='replace',chunksize=100)
In [154]:
gmaps = googlemaps.Client(key='AIzaSyCWHFM0lnv0_TJpIGBtXYCuh-Yh2eU69O8')
distance_table = {}
time_table = {}
biomass_list = []
substation_list = []
avoid_table = {}
fail_table = {}
In [167]:
df_routes = pd.read_csv('~/Desktop/backup.csv')
df_routes = df_routes.drop(['Unnamed: 0'],1)
In [171]:
df_routes.to_sql('biosum_pge', engine_apl, schema='biosum', if_exists='replace',chunksize=30000)
In [172]:
count = 0;
for index, row in df_routes.iterrows():
if index > 1048500:
sql_cmd = engine_apl.connect()
query = ('select data_state from biosum.biosum_pge where ' +
'biosum_pge.biosum_coord_str = \'' + row['biosum_coord_str'] + '\' and ' +
'biosum_pge.feeder_coord_str = \'' + row['feeder_coord_str'] + '\'')
state = sql_cmd.execute(query)
state = row['data_state']
sql_cmd.close()
if state == 0:
tm.sleep(0.81)
matrx_distance = (gmaps.distance_matrix(row['biosum_coord_str'], row['feeder_coord_str'],
mode="driving", departure_time="now", traffic_model="pessimistic"))
count = count + 1;
error = matrx_distance['rows'][0]['elements'][0]['status']
if error != 'OK':
df_routes.set_value(index,'data_state', -1)
f = open('fail_table.dat', 'a')
f.write(('Route data unavailable for ' + str(row['biosum_id']) + "," + str(row['feeder_id']) + "\n"))
f.close()
else:
try:
df_routes.set_value(index,'data_state',1)
df_routes.set_value(index,'distance_google', (0.001 * matrx_distance['rows'][0]['elements'][0]['distance']['value']))
df_routes.set_value(index,'time_google', (1 / 3600) *(matrx_distance['rows'][0]['elements'][0]['duration_in_traffic']['value']))
except Exception, e:
df_routes.set_value(index,'data_state', -1)
pass
else:
continue
if count == 1000:
df_routes.to_csv('~/Desktop/backup.csv')
print count
count =0;
else:
continue
df_routes.to_sql('biosum_pge', engine_apl, schema='biosum', if_exists='replace',chunksize=10000)
In [85]:
matrx_distance = (gmaps.distance_matrix(row['biosum_coord_str'], row['feeder_coord_str'],
mode="driving", departure_time="now", traffic_model="pessimistic"))
In [165]:
df_routes.to_csv('~/Desktop/backup.csv')
In [157]:
matrx_distance
Out[157]:
In [173]:
index
Out[173]:
In [ ]: