In [1]:
import pandas as pd, json, numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
Load airports of each country
In [2]:
L=json.loads(file('../json/L.json','r').read())
M=json.loads(file('../json/M.json','r').read())
N=json.loads(file('../json/N.json','r').read())
In [3]:
import requests
In [4]:
AP={}
for c in M:
if c not in AP:AP[c]={}
for i in range(len(L[c])):
AP[c][N[c][i]]=L[c][i]
record schedules for 2 weeks, then augment count with weekly flight numbers. seasonal and seasonal charter will count as once per week for 3 months, so 12/52 per week. TGM separate, since its history is in the past.
parse Arrivals
In [5]:
baseurl='https://www.airportia.com/'
import requests, urllib2
In [6]:
def urlgetter(url):
s = requests.Session()
cookiesopen = s.get(url)
cookies=str(s.cookies)
fcookies=[[k[:k.find('=')],k[k.find('=')+1:k.find(' for ')]] for k in cookies[cookies.find('Cookie '):].split('Cookie ')[1:]]
#push token
opener = urllib2.build_opener()
for k in fcookies:
opener.addheaders.append(('Cookie', k[0]+'='+k[1]))
#read html
return s.get(url).content
In [7]:
SD={}
In [ ]:
for c in AP:
#country not parsed yet
if c not in SD:
print c
airportialinks=AP[c]
sch={}
#all airports of country
for i in airportialinks:
print i,
if i not in sch:sch[i]={}
try:
url=baseurl+airportialinks[i]
m=urlgetter(url)
#if there are flights at all
if len(pd.read_html(m))>0:
good=False
w=pd.read_html(m)[0]
#if there are daily flights
if 'Date' not in w.columns:good=True
#if there are flights at least in the last two months
elif 5>int(w.loc[0]['Date'][-1])>2:good=True
if good:
try:
#then parse march 4-31 = 4 weeks
for d in range (4,32):
#date not parsed yet
if d not in sch[i]:
try:
url=baseurl+airportialinks[i]+'arrivals/201703'+str(d)
m=urlgetter(url)
sch[i][d]=pd.read_html(m)[0]
except: print '--W-,'+d
except: print '--2W-X,',
else: print '--2W-S,',
else: print '--NO-S,',
except: print '--NO-X,',
print
SD[c]=sch
Save
In [ ]:
cnc_path='../../universal/countries/'
cnc=pd.read_excel(cnc_path+'cnc.xlsx').set_index('Name')
In [ ]:
for c in SD:
sch=SD[c]
mdf=pd.DataFrame()
for i in sch:
for d in sch[i]:
df=sch[i][d].drop(sch[i][d].columns[3:],axis=1).drop(sch[i][d].columns[0],axis=1)
df['To']=i
df['Date']=d
mdf=pd.concat([mdf,df])
mdf=mdf.replace('Hahn','Frankfurt')
mdf=mdf.replace('Hahn HHN','Frankfurt HHN')
mdf['City']=[i[:i.rfind(' ')] for i in mdf['From']]
mdf['Airport']=[i[i.rfind(' ')+1:] for i in mdf['From']]
file('../countries/'+cnc.T.loc[c]['ISO2'].lower()+"/json/mdf_arrv.json",'w').write(json.dumps(mdf.reset_index().to_json()))