In [1]:
import requests
import json
import os
from time import sleep
In [266]:
# Good to know..
# Status Description
# 200 “OK”: nonspecific success
# 201 “Created”: successful resource creation
# 202 “Accepted”: successful start of an asynchronous action
# 204 “No Content”: success without any content
# 400 “Bad Request”: nonspecific failure
# 401 “Unauthorized”: failed authentication or authorization
# 404 “Not Found”: resource not found at the target URI
# 500 “Internal Server Error”: API malfunction
In [2]:
#myIbmId = 'ips.app@outlook.com'
#myIbmIdPassword = 'ips.app1'
#myClientId = '5c20d153-09ad-40e3-9387-124cd132bc42'
#myClientSecret = 'R8qP7dF2kH4fH4iH3lH2iJ7uD3yI2fQ5oY0cH8pE2kY3eN0cJ8'
myIbmId = 'tsarouch@gmail.com'
myIbmIdPassword = 'Eat2@pples'
myClientId = '28701078-8029-4da5-a335-763d0b05951f'
myClientSecret = 'tY2pF6qW6fG2hC2qD3jQ3rF1gR8oF2yQ6cF0sM4yU1yH7kV0lH'
In [3]:
apiBase = 'https://' + myIbmId + ':' + myIbmIdPassword + '@api.ibm.com/pairs/run/v1/'
apiHeaders = {
'Content-Type': 'application/json', # Required for POST and PUT operations
'x-ibm-client-id': myClientId,
'x-ibm-client-secret': myClientSecret
}
In [4]:
# The "noop" operation is useful for checking API invocation credentials, gathering performance measures, monitoring API service availability, etc.
apiName = 'noop'
response = requests.get((apiBase + apiName), headers=apiHeaders)
print('HTTP status code: ' + str(response.status_code))
if (response.status_code != 204):
print(response.json())
In [284]:
queryId = '41f3afebb1799875da9ba07a3d5d840a'
apiName = 'queries' + '/' + queryId
response = requests.delete((apiBase + apiName), headers=apiHeaders)
print('HTTP status code: ' + str(response.status_code))
if (response.status_code != 204):
print(response.json())
In [555]:
df_geocodes = pd.read_csv('/Users/charilaostsarouchas/Downloads/extended.csv')
df_geocodes = df_geocodes.sample(frac=1)
df_geocodes['lat'] = df_geocodes['lat'].astype('str')
df_geocodes['long'] = df_geocodes['lon'].astype('str')
points_examples = df_geocodes[['lat', 'long']].drop_duplicates()[:10].to_dict('records')
In [568]:
points_examples
Out[568]:
In [586]:
apiName = 'queries'
apiBody = {
"name": "point query",
"spatial" : {
"type" : "point",
"point" : [{"lat" : "39.6247215271", "long" : "-0.595210015774"},
{"lat": '42.3436415', 'long': '-7.8674242'},
{'lat': '37.2575874', 'long': '-6.9484945'}]
},
"datalayer" : [
{"id" : "25001",
"temporal": [["2016-02-01", "2016-03-01"]]
}
]
}
query_response = requests.post((apiBase + apiName), headers=apiHeaders, data=json.dumps(apiBody))
print "the query id is: ", query_response.json()['query'][0]['id']
In [588]:
# query
query_id = query_response.json()['query'][0]['id']
print "Query ID: ", query_id
apiName = 'queries' + '/' + query_id + "?done=true"
query_response = requests.get((apiBase + apiName), headers=apiHeaders)
print "Query Status Response: ", query_response.status_code
# job
job_id = query_response.json()['query'][0]['job']['id']
print "Job ID: ", job_id
apiName = 'jobs' + '/' + job_id
job_response = requests.get((apiBase + apiName), headers=apiHeaders)
print "Job Status Response: ", job_response.status_code
In [589]:
query_id = query_response.json()['query'][0]['id']
print query_id
apiName = 'queries' + '/' + query_id + "?done=true"
response = requests.get((apiBase + apiName), headers=apiHeaders)
print "Query status: ", response.status_code
result = None
if response.status_code == 200:
response = requests.get((apiBase + apiName), headers=apiHeaders)
print "Query - Job status: ", response.json()['query'][0]['job']['status']
if response.json()['query'][0]['job']['status'] == '201':
downloadUrl = response.json()['query'][0]['result']['refs'][0]['url']
result = requests.get(downloadUrl)
In [590]:
print "N datapoints: ", len(json.loads(result.content)['data'])
In [591]:
import datetime
import pandas as pd
data = []
for i in range(len(json.loads(result.content)['data'])):
ts = json.loads(result.content)['data'][i]['timestamp']
dt = datetime.datetime.fromtimestamp(int(ts/1000)).strftime('%Y-%m-%d %H:%M:%S')
date = dt[:10]
temp_K = json.loads(result.content)['data'][i]['value']
temp_C = temp_K -273.15
lat = json.loads(result.content)['data'][i]['lat']
lon = json.loads(result.content)['data'][i]['lon']
data.append([lat, lon, ts, dt, date, temp_C])
df = pd.DataFrame(data=data, columns = ['lat', 'lon', 'timestamp', 'datetime', 'date', 'temperature'])
In [592]:
df.head()
Out[592]:
In [593]:
import os
import numpy as np
import folium
from folium.plugins import HeatMap
In [594]:
df.groupby(['lat','lon'])['temperature'].agg('sum')
Out[594]:
In [595]:
m = folium.Map([48., 5.], tiles='stamentoner', zoom_start=7)
data = df[['lat', 'lon', 'temperature']].values.tolist()
HeatMap(data).add_to(m)
m.save(os.path.join('/Users/charilaostsarouchas/Downloads/', 'Heatmap.html'))
m
Out[595]:
In [531]:
import folium
import folium.plugins as plugins
import numpy as np
np.random.seed(3141592)
initial_data = (
np.random.normal(size=(100, 2)) * np.array([[1, 1]]) +
np.array([[48, 5]])
)
move_data = np.random.normal(size=(100, 2)) * 0.01
data = [(initial_data + move_data * i).tolist() for i in range(100)]
In [ ]:
df.groupby('')
In [538]:
data = df.sort_values(by='timestamp')[['lat', 'lon', 'temperature']].values.tolist()
m = folium.Map([48., 5.], tiles='stamentoner', zoom_start=6)
hm = plugins.HeatMapWithTime(data)
hm.add_to(m)
m
Out[538]:
In [ ]:
In [ ]:
In [ ]:
# curl \
# -X POST "https://ibmpairs-mvp2-api.mybluemix.net/queryjobs?spatialLimitsType=area" \
# -H "Content-Type: application/json" \
# -H "X-Access-Token: 4usk9y29lzsj2vuwb0rwvad1rg9ogdmbijcp4b6a" \
# -d '{"name":"Agriculture in Midwestern U.S.","description":"We compare the US Department of Agriculture's crop data with PRISM (historical weather observation in USA) temperature data during the beginning of June 2017. Since the crop data is associated with January 1st, the query uses different temporal intervals for each layer.","isPublic":true,"layers":[{"dataLayerId":"48522","startingDateTime":"2017-01-01T00:00:00.000Z","endingDateTime":"2017-01-01T00:00:00.000Z","aggregationOperator":"None","dimensions":[]},{"dataLayerId":"92","startingDateTime":"2017-06-01T00:00:00.000Z","endingDateTime":"2017-06-07T00:00:00.000Z","aggregationOperator":"None","dimensions":[]}],"filters":[],"space":{"northEastCoordinate":{"latitude":42.391008609205045,"longitude":-88.83544921875},"southWestCoordinate":{"latitude":36.13787471840729,"longitude":-100.61279296875}}}'
# curl "https://ibmpairs-mvp2-api.mybluemix.net/queryjobs/00000000-2137-8c84-0000-00002155c70a" \
# -H "X-Access-Token: 4usk9y29lzsj2vuwb0rwvad1rg9ogdmbijcp4b6a"
In [55]:
import os
cmd = 'curl "https://ibmpairs-mvp2-api.mybluemix.net/queryjobs/00000000-0cd6-1fa3-0000-000002f71050" -H "X-Access-Token: 4usk9y29lzsj2vuwb0rwvad1rg9ogdmbijcp4b6a"'
result = os.popen(cmd).read()
# move str result to dictionary
d = json.loads(result)
In [56]:
# move d to json
print(json.dumps(d, indent=2))
In [57]:
print [str(i) for i in d.keys()]
In [61]:
d['query']
Out[61]:
In [30]:
import pandas as pd
pd.DataFrame(d)
Out[30]:
In [ ]: