In [69]:
import pandas as pd
import bs4 as bs
In [ ]:
dfs=pd.read_html('https://en.wikipedia.org/wiki/Research_stations_in_Antarctica#List_of_research_stations')
In [ ]:
dfr=pd.read_html('https://en.wikipedia.org/wiki/Antarctic_field_camps')
In [ ]:
df=dfs[1][1:]
In [ ]:
df.columns=dfs[1].loc[0].values
In [ ]:
df.to_excel('bases.xlsx')
In [ ]:
import requests
In [ ]:
url='https://en.wikipedia.org/wiki/Research_stations_in_Antarctica'
f=requests.get(url).content
soup = bs.BeautifulSoup(f, 'lxml')
parsed_table = soup.find_all('table')[1]
data = [[''.join(td.strings)+'#'+td.a['href'] if td.find('a') else
''.join(td.strings)
for td in row.find_all('td')]
for row in parsed_table.find_all('tr')]
headers=[''.join(row.strings)
for row in parsed_table.find_all('th')]
df = pd.DataFrame(data[1:], columns=headers)
In [ ]:
stations=[]
for i in df.T.iteritems():
helper={}
dummy=i[1][0].split('#')
dummy0=dummy[0].split('[')[0].replace('\n',' ').replace('\n',' ').replace('\n',' ')
helper['name']=dummy0
helper['link']='https://en.wikipedia.org'+dummy[1]
dummy=i[1][2].replace('\n',' ').replace('\n',' ').replace('\n',' ')
if 'ummer since' in dummy:dummy='Summer'
dummy=dummy.split('[')[0]
if 'emporary summer' in dummy:dummy='Summer'
if 'intermittently Summer' in dummy:dummy='Summer'
helper['type']=dummy
dummy=i[1][3].split('#')[0].replace('\n',' |').replace(']','').replace('| |','|')[1:]
if '' == dummy:dummy='Greenpeace'
helper['country']=dummy
dummy=i[1][4].replace('\n',' ').replace('\n',' ').replace('\n',' ').split(' ')[0]
if 'eteo' in dummy:dummy='1958'
helper['opened']=dummy
dummy=i[1][5].split('#')[0].replace('\n',' | ').replace('| and |','|').split('[')[0].replace('.','')
helper['program']=dummy
dummy=i[1][6].split('#')[0].replace('\n',', ').replace('| and |','|').split('[')[0].replace('.','')
helper['location']=dummy
dummy=i[1][7].replace('\n',' ')
if ' ' in dummy:
if 'Active' in dummy: dummy='Active'
elif 'Relocated to Union Glacier' in dummy: dummy='2014'
elif 'Unmanned activity' in dummy: dummy='Active'
elif 'Abandoned and lost' in dummy: dummy='1999'
elif 'Dismantled 1992' in dummy: dummy='1992'
elif 'Temporary abandoned since March 2017' in dummy: dummy='2017'
elif 'Reopened 23 November 2017' in dummy: dummy='Active'
elif 'Abandoned and lost' in dummy: dummy='1999'
else: dummy=dummy.split(' ')[1]
if dummy=='Active':
helper['active']=True
helper['closed']='9999'
else:
helper['active']=False
helper['closed']=dummy
if dummy=='Closed':
helper['active']=True
helper['closed']='9999'
dummy=i[1][8].replace('\n',', ').split('/')[2].split('(')[0].split('#')[0].split(',')[0].split('Coor')[0].split(u'\ufeff')[0].split(';')
helper['latitude']=dummy[0][1:]
helper['longitude']=dummy[1].replace(' 0',' 0.001')[1:]
stations.append(helper)
In [ ]:
stations
In [141]:
pd.DataFrame(stations).to_excel('stations.xlsx')
In [ ]:
import folium
from IPython.display import HTML
def display(m, height=300):
"""Takes a folium instance and embed HTML."""
m._build_map()
srcdoc = m.HTML.replace('"', '"')
embed = HTML('<iframe srcdoc="{0}" '
'style="width: 100%; height: {1}px; '
'border: none"></iframe>'.format(srcdoc, height))
return embed
In [2]:
map = folium.Map(location=[37.76, -122.45])
map.simple_marker([37.76, -122.45])
display(map)
In [4]:
import ee
import folium
ee.Initialize()
print(folium.__version__)
In [4]:
from googleearthplot.googleearthplot import googleearthplot
In [5]:
#Plot point
lon=18.333868#degree
lat=-34.038274#degree
gep9=googleearthplot()
gep9.PlotPoints(lat,lon,"point")
gep9.GenerateKMLFile(filepath="sample9.kml")
In [6]:
# Plot point chart
gep10 = googleearthplot()
lat = [ -77.6192, -77.6195, -77.6198, -77.6208, -77.6216]
lon = [43.1725, 43.1728, 43.173, 43.1725, 43.1719, 43.1719]
for (ilat,ilon) in zip(lat,lon):
gep10.PlotPoints(ilat, ilon, "point")
gep10.GenerateKMLFile(filepath="sample10.kml")
In [20]:
import cesiumpy
In [42]:
v = cesiumpy.CesiumWidget()
v.camera.flyTo((0, -90, 7000000))
v
Out[42]:
In [44]:
import pandas as pd
In [45]:
url = "https://en.wikipedia.org/wiki/List_of_national_parks_of_the_United_States"
df = pd.read_html(url, header=0)[0]
df.head()
Out[45]:
In [46]:
locations = df['Location'].str.extract(u'(\D+) (\d+°\d+′[NS]) (\d+°\d+′[WE]).*')
locations.columns = ['State', 'lat', 'lon']
locations['lat'] = locations['lat'].str.replace(u'°', '.')
locations['lon'] = locations['lon'].str.replace(u'°', '.')
locations.loc[locations['lat'].str.endswith('S'), 'lat'] = '-' + locations['lat']
locations.loc[locations['lon'].str.endswith('W'), 'lon'] = '-' + locations['lon']
locations['lat'] = locations['lat'].str.slice_replace(start=-2)
locations['lon'] = locations['lon'].str.slice_replace(start=-2)
locations[['lat', 'lon']] = locations[['lat', 'lon']].astype(float)
locations.head()
Out[46]:
In [47]:
df = pd.concat([df, locations], axis=1)
In [48]:
import cesiumpy
In [57]:
v = cesiumpy.Viewer()
for i, row in df.iterrows():
l = row[u'Recreation visitors (2017)[6]']
cyl = cesiumpy.Cylinder(position=[row['lon'], row['lat'], l / 2.], length=float(l),
topRadius=1e4, bottomRadius=10e4, material='aqua')
v.entities.add(cyl)
v
Out[57]:
In [58]:
v = cesiumpy.Viewer()
for i, row in df.iterrows():
l = row[u'Recreation visitors (2017)[6]']
cyl = cesiumpy.Cylinder(position=[row['lon'], row['lat'], l / 2.], length=float(l),
topRadius=1e4, bottomRadius=10e4, material='royalBlue')
v.entities.add(cyl)
v
Out[58]:
In [62]:
import numpy as np
In [63]:
v = cesiumpy.Viewer()
for i, row in df.iterrows():
l = row[u'Recreation visitors (2017)[6]']
p= cesiumpy.Point(position=[row['lon'], row['lat'], 0],
pixelSize=np.sqrt(l / 10000), color='blue')
v.entities.add(p)
v
Out[63]:
In [65]:
v = cesiumpy.Viewer()
pin = cesiumpy.Pin()
for i, row in df.iterrows():
l = row['Recreation visitors (2017)[6]']
b = cesiumpy.Billboard(position=[row['lon'], row['lat'], 0], image = pin, scale=0.4)
v.entities.add(b)
v
Out[65]:
In [66]:
v = cesiumpy.Viewer()
v.plot.scatter([130, 140, 150], [30, 40, 50])
Out[66]:
In [67]:
v = cesiumpy.Viewer()
v.plot.pin([130, 140, 150], [30, 40, 50], color=cesiumpy.color.RED, text=['!', '?', '!?'])
Out[67]: