In [1]:
# Reload when code changed:
%load_ext autoreload
%reload_ext autoreload
%autoreload 2
%pwd
import sys
import os
path = "../"
sys.path.append(path)
#os.path.abspath("../")
print(os.path.abspath(path))
In [2]:
import os
import core
import logging
import importlib
importlib.reload(core)
try:
logging.shutdown()
importlib.reload(logging)
except:
pass
import pandas as pd
import numpy as np
import json
import time
from event_handler import EventHandler
from event_handler import get_list_from_interval
print(core.__file__)
pd.__version__
Out[2]:
In [ ]:
In [3]:
user_id_1 = 'user_1'
user_id_2 = 'user_2'
user_1_ws_1 = 'mw1'
print(path)
paths = {'user_id': user_id_1,
'workspace_directory': 'D:/git/ekostat_calculator/workspaces',
'resource_directory': path + '/resources',
'log_directory': path + '/log',
'test_data_directory': path + '/test_data',
'temp_directory': path + '/temp',
'cache_directory': path + '/cache'}
ekos = EventHandler(**paths)
ekos.test_timer()
In [4]:
ekos.mapping_objects['quality_element'].get_indicator_list_for_quality_element('secchi')
Out[4]:
In [5]:
def update_workspace_uuid_in_test_requests(workspace_alias='New test workspace'):
ekos = EventHandler(**paths)
workspace_uuid = ekos.get_unique_id_for_alias(workspace_alias=workspace_alias)
if workspace_uuid:
print('Updating user {} with uuid: {}'.format(user_id_1, workspace_uuid))
print('-'*70)
ekos.update_workspace_uuid_in_test_requests(workspace_uuid)
else:
print('No workspaces for user: {}'.format(user_id_1))
def update_subset_uuid_in_test_requests(workspace_alias='New test workspace',
subset_alias=False):
ekos = EventHandler(**paths)
workspace_uuid = ekos.get_unique_id_for_alias(workspace_alias=workspace_alias)
if workspace_uuid:
ekos.load_workspace(workspace_uuid)
subset_uuid = ekos.get_unique_id_for_alias(workspace_alias=workspace_alias, subset_alias=subset_alias)
print('Updating user {} with workspace_uuid {} and subset_uuid {}'.format(user_id_1, workspace_uuid, subset_uuid))
print(workspace_uuid, subset_uuid)
print('-'*70)
ekos.update_subset_uuid_in_test_requests(subset_uuid=subset_uuid)
else:
print('No workspaces for user: {}'.format(user_id_1))
def print_boolean_structure(workspace_uuid):
workspace_object = ekos.get_workspace(unique_id=workspace_uuid)
workspace_object.index_handler.print_boolean_keys()
In [6]:
# update_workspace_uuid_in_test_requests()
In [7]:
t0 = time.time()
ekos = EventHandler(**paths)
request = ekos.test_requests['request_workspace_add_1']
response_workspace_add = ekos.request_workspace_add(request)
ekos.write_test_response('request_workspace_add_1', response_workspace_add)
# request = ekos.test_requests['request_workspace_add_2']
# response_workspace_add = ekos.request_workspace_add(request)
# ekos.write_test_response('request_workspace_add_2', response_workspace_add)
print('-'*50)
print('Time for request: {}'.format(time.time()-t0))
In [8]:
update_workspace_uuid_in_test_requests()
In [9]:
# ekos = EventHandler(**paths)
# # When copying data the first time all sources has status=0, i.e. no data will be loaded.
# request = ekos.test_requests['request_workspace_import_default_data']
# response_import_data = ekos.request_workspace_import_default_data(request)
# ekos.write_test_response('request_workspace_import_default_data', response_import_data)
In [10]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_sharkweb_import']
response_sharkweb_import = ekos.request_sharkweb_import(request)
ekos.write_test_response('request_sharkweb_import', response_sharkweb_import)
In [11]:
ekos.data_params
Out[11]:
In [12]:
ekos.selection_dicts
Out[12]:
In [13]:
# ekos = EventHandler(**paths)
# ekos.mapping_objects['sharkweb_mapping'].df
In [23]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_workspace_data_sources_list']
response = ekos.request_workspace_data_sources_list(request)
ekos.write_test_response('request_workspace_data_sources_list', response)
request = response
request['data_sources'][0]['status'] = False
request['data_sources'][1]['status'] = False
request['data_sources'][2]['status'] = False
request['data_sources'][3]['status'] = False
# request['data_sources'][4]['status'] = True
# Edit data source
response = ekos.request_workspace_data_sources_edit(request)
ekos.write_test_response('request_workspace_data_sources_edit', response)
In [ ]:
In [ ]:
In [ ]:
In [12]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_subset_add_1']
response_subset_add = ekos.request_subset_add(request)
ekos.write_test_response('request_subset_add_1', response_subset_add)
In [13]:
update_subset_uuid_in_test_requests(subset_alias='mw_subset')
In [ ]:
In [ ]:
In [22]:
ekos = EventHandler(**paths)
update_subset_uuid_in_test_requests(subset_alias='mw_subset')
request = ekos.test_requests['request_subset_get_data_filter']
response_subset_get_data_filter = ekos.request_subset_get_data_filter(request)
ekos.write_test_response('request_subset_get_data_filter', response_subset_get_data_filter)
In [20]:
# import re
# string = """{
# "workspace_uuid": "52725df4-b4a0-431c-a186-5e542fc6a3a4",
# "data_sources": [
# {
# "status": true,
# "loaded": false,
# "filename": "physicalchemical_sharkweb_data_all_2013-2014_20180916.txt",
# "datatype": "physicalchemical"
# }
# ]
# }"""
# r = re.sub('"workspace_uuid": ".{36}"', '"workspace_uuid": "new"', string)
In [20]:
ekos = EventHandler(**paths)
update_subset_uuid_in_test_requests(subset_alias='mw_subset')
request = ekos.test_requests['request_subset_set_data_filter']
response_subset_set_data_filter = ekos.request_subset_set_data_filter(request)
ekos.write_test_response('request_subset_set_data_filter', response_subset_set_data_filter)
In [ ]:
In [ ]:
In [27]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_subset_get_indicator_settings']
# request = ekos.test_requests['request_subset_get_indicator_settings_no_areas']
# print(request['subset']['subset_uuid'])
# request['subset']['subset_uuid'] = 'fel'
# print(request['subset']['subset_uuid'])
response_subset_get_indicator_settings = ekos.request_subset_get_indicator_settings(request)
ekos.write_test_response('request_subset_get_indicator_settings', response_subset_get_indicator_settings)
In [ ]:
In [ ]:
In [22]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_subset_set_indicator_settings']
response_subset_set_indicator_settings = ekos.request_subset_set_indicator_settings(request)
ekos.write_test_response('request_subset_set_indicator_settings', response_subset_set_indicator_settings)
In [14]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_subset_calculate_status']
response = ekos.request_subset_calculate_status(request)
ekos.write_test_response('request_subset_calculate_status', response)
In [ ]:
In [ ]:
In [21]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_workspace_result']
response_workspace_result = ekos.request_workspace_result(request)
ekos.write_test_response('request_workspace_result', response_workspace_result)
In [158]:
response_workspace_result['subset']['a4e53080-2c68-40d5-957f-8cc4dbf77815']['result']['SE552170-130626']['result']['indicator_din_winter']['data']
Out[158]:
In [136]:
workspace_uuid = 'fccc7645-8501-4541-975b-bdcfb40a5092'
subset_uuid = 'a4e53080-2c68-40d5-957f-8cc4dbf77815'
result = ekos.dict_data_timeseries(workspace_uuid=workspace_uuid,
subset_uuid=subset_uuid,
viss_eu_cd='SE575150-162700',
element_id='indicator_din_winter')
In [138]:
print(result['datasets'][0]['x'])
print()
print(result['y'])
In [143]:
for k in range(len(result['datasets'])):
print(result['datasets'][k]['x'])
In [116]:
import datetime
# Extend date list
start_year = all_dates[0].year
end_year = all_dates[-1].year+1
date_intervall = []
for year in range(start_year, end_year+1):
for month in range(1, 13):
d = datetime.datetime(year, month, 1)
if d >= all_dates[0] and d <= all_dates[-1]:
date_intervall.append(d)
extended_dates = sorted(set(all_dates + date_intervall))
# Loop dates and add/remove values
new_x = []
new_y = dict((item, []) for item in date_to_y)
for date in extended_dates:
if date in date_intervall:
new_x.append(date.strftime('%y-%b'))
else:
new_x.append('')
for i in new_y:
new_y[i].append(date_to_y[i].get(date, None))
# new_y = {}
# for i in date_to_y:
# new_y[i] = []
# for date in all_dates:
# d = date_to_y[i].get(date)
# if d:
# new_y[i].append(d)
# else:
# new_y[i].append(None)
In [117]:
new_y[0]
Out[117]:
In [110]:
import datetime
year_list = range(2011, 2013+1)
month_list = range(1, 13)
date_list = []
for year in year_list:
for month in month_list:
date_list.append(datetime.datetime(year, month, 1))
In [111]:
date_list
Out[111]:
In [104]:
a
Out[104]:
In [81]:
y[3][i]
Out[81]:
In [29]:
sorted(pd.to_datetime(df['SDATE']))
Out[29]:
In [ ]: