In [1]:
# Reload when code changed:
%load_ext autoreload
%reload_ext autoreload
%autoreload 2
%pwd
import sys
import os
path = "../"
sys.path.append(path)
#os.path.abspath("../")
print(os.path.abspath(path))


D:\git\ekostat_calculator

In [2]:
import os 
import core
import logging
import importlib
importlib.reload(core) 
try:
    logging.shutdown()
    importlib.reload(logging)
except:
    pass
import pandas as pd
import numpy as np
import json
import time

from event_handler import EventHandler
from event_handler import get_list_from_interval
print(core.__file__)
pd.__version__


..\core\__init__.py
Out[2]:
'0.20.3'

In [ ]:


In [3]:
user_id_1 = 'user_1'
user_id_2 = 'user_2'
user_1_ws_1 = 'mw1'
print(path)
paths = {'user_id': user_id_1, 
         'workspace_directory': 'D:/git/ekostat_calculator/workspaces', 
         'resource_directory': path + '/resources', 
         'log_directory': path + '/log', 
         'test_data_directory': path + '/test_data', 
         'temp_directory': path + '/temp', 
         'cache_directory': path + '/cache'}

ekos = EventHandler(**paths)
ekos.test_timer()


2018-09-20 19:02:51,219	logger.py	85	add_log	DEBUG	
2018-09-20 19:02:51,223	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-20 19:02:51,227	logger.py	87	add_log	DEBUG	### Log added for log_id "event_handler" at locaton: ..\log\main_event_handler.log
2018-09-20 19:02:51,231	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-20 19:02:51,235	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:02:51,239	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
../
====================================================================================================
event_handler
..//log
main
----------------------------------------------------------------------------------------------------
2018-09-20 19:02:52,077	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.839047908782959
2018-09-20 19:02:52,110	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.8930511474609375
2018-09-20 19:02:52,115	event_handler.py	50	f	DEBUG	Start: "test_timer"
2018-09-20 19:02:52,991	event_handler.py	54	f	DEBUG	Stop: "test_timer". Time for running method was 0.8720498085021973
test

In [4]:
ekos.mapping_objects['quality_element'].get_indicator_list_for_quality_element('secchi')


Out[4]:
['indicator_secchi']

In [5]:
def update_workspace_uuid_in_test_requests(workspace_alias='New test workspace'):
    ekos = EventHandler(**paths)

    workspace_uuid = ekos.get_unique_id_for_alias(workspace_alias=workspace_alias)
   
    if workspace_uuid: 
        print('Updating user {} with uuid: {}'.format(user_id_1, workspace_uuid))
        print('-'*70)
        ekos.update_workspace_uuid_in_test_requests(workspace_uuid)
    else:
        print('No workspaces for user: {}'.format(user_id_1))
        

        
def update_subset_uuid_in_test_requests(workspace_alias='New test workspace', 
                                        subset_alias=False):
    ekos = EventHandler(**paths)

    workspace_uuid = ekos.get_unique_id_for_alias(workspace_alias=workspace_alias)
    
    if workspace_uuid:              
        ekos.load_workspace(workspace_uuid)
        subset_uuid = ekos.get_unique_id_for_alias(workspace_alias=workspace_alias, subset_alias=subset_alias)
        print('Updating user {} with workspace_uuid {} and subset_uuid {}'.format(user_id_1, workspace_uuid, subset_uuid))
        print(workspace_uuid, subset_uuid)
        print('-'*70)
        ekos.update_subset_uuid_in_test_requests(subset_uuid=subset_uuid)
    else:
        print('No workspaces for user: {}'.format(user_id_1))
        

        
def print_boolean_structure(workspace_uuid): 
    workspace_object = ekos.get_workspace(unique_id=workspace_uuid) 
    workspace_object.index_handler.print_boolean_keys()

In [6]:
# update_workspace_uuid_in_test_requests()

Request workspace add


In [7]:
t0 = time.time()
ekos = EventHandler(**paths)
request = ekos.test_requests['request_workspace_add_1']
response_workspace_add = ekos.request_workspace_add(request)
ekos.write_test_response('request_workspace_add_1', response_workspace_add)

# request = ekos.test_requests['request_workspace_add_2']
# response_workspace_add = ekos.request_workspace_add(request)
# ekos.write_test_response('request_workspace_add_2', response_workspace_add)
print('-'*50)
print('Time for request: {}'.format(time.time()-t0))


2018-09-20 19:02:54,811	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:02:54,814	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 19:02:55,637	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.8230469226837158
2018-09-20 19:02:55,671	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.8610491752624512
2018-09-20 19:02:55,684	event_handler.py	50	f	DEBUG	Start: "request_workspace_add"
2018-09-20 19:02:55,689	event_handler.py	4257	request_workspace_add	DEBUG	Start: request_workspace_add
2018-09-20 19:02:55,713	event_handler.py	422	copy_workspace	DEBUG	Trying to copy workspace "default_workspace". Copy has alias "New test workspace"
2018-09-20 19:02:55,844	event_handler.py	2984	load_workspace	DEBUG	Trying to load new workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace"
¤ New test workspace
2018-09-20 19:02:55,951	logger.py	85	add_log	DEBUG	
2018-09-20 19:02:55,956	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-20 19:02:55,961	logger.py	87	add_log	DEBUG	### Log added for log_id "default_subset" at locaton: D:\git\ekostat_calculator\workspaces\a377ee26-cd2d-411b-999c-073cd7a3dbd4\log\subset_default_subset.log
2018-09-20 19:02:55,967	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
====================================================================================================
default_subset
D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/log
subset
----------------------------------------------------------------------------------------------------
2018-09-20 19:02:56,333	logger.py	85	add_log	DEBUG	
2018-09-20 19:02:56,339	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-20 19:02:56,343	logger.py	87	add_log	DEBUG	### Log added for log_id "a377ee26-cd2d-411b-999c-073cd7a3dbd4" at locaton: D:\git\ekostat_calculator\workspaces\a377ee26-cd2d-411b-999c-073cd7a3dbd4\log\workspace_a377ee26-cd2d-411b-999c-073cd7a3dbd4.log
2018-09-20 19:02:56,349	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-20 19:02:56,357	event_handler.py	3002	load_workspace	INFO	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace loaded."
2018-09-20 19:02:56,362	event_handler.py	54	f	DEBUG	Stop: "request_workspace_add". Time for running method was 0.6730387210845947
====================================================================================================
a377ee26-cd2d-411b-999c-073cd7a3dbd4
D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/log
workspace
----------------------------------------------------------------------------------------------------
--------------------------------------------------
Time for request: 1.561089277267456

Update workspace uuid in test requests


In [8]:
update_workspace_uuid_in_test_requests()


2018-09-20 19:02:56,883	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:02:56,886	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 19:02:57,796	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.9100518226623535
2018-09-20 19:02:57,854	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.9720554351806641
Updating user user_1 with uuid: a377ee26-cd2d-411b-999c-073cd7a3dbd4
----------------------------------------------------------------------

Request workspace import default data


In [9]:
# ekos = EventHandler(**paths)
# # When copying data the first time all sources has status=0, i.e. no data will be loaded. 
# request = ekos.test_requests['request_workspace_import_default_data']
# response_import_data = ekos.request_workspace_import_default_data(request)
# ekos.write_test_response('request_workspace_import_default_data', response_import_data)

Import data from sharkweb


In [10]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_sharkweb_import']
response_sharkweb_import = ekos.request_sharkweb_import(request)
ekos.write_test_response('request_sharkweb_import', response_sharkweb_import)


2018-09-20 19:02:59,042	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:02:59,045	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 19:02:59,952	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.9070520401000977
2018-09-20 19:02:59,991	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.950054407119751
2018-09-20 19:02:59,996	event_handler.py	50	f	DEBUG	Start: "request_sharkweb_import"
2018-09-20 19:03:00,054	event_handler.py	2984	load_workspace	DEBUG	Trying to load new workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace"
=== a377ee26-cd2d-411b-999c-073cd7a3dbd4
{'year_from': 2007, 'year_to': 2012, 'datatype': 'Phytoplankton', 'water_district_list': [], 'type_area_list': [], 'svar_sea_area_list': ['Krabbfjärden', 'Gullmarn centralbassäng', 'Gussöfjärden', 'Örefjärden', 'Gaviksfjärden'], 'encoding': 'utf8', 'lineend': 'windows', 'delimiters': 'point-tab', 'sample_table_view': 'sample_col_phytoplankton', 'parameter': 'Biovolume concentration', 'headerlang': 'internal'}
2018-09-20 19:03:00,511	event_handler.py	3002	load_workspace	INFO	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace loaded."
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
DEBUG: Status:  200
2018-09-20 19:03:13,587	event_handler.py	2995	load_workspace	DEBUG	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
DEBUG: Header:  application/octet-stream; charset=utf-8
DEBUG: Encoding:  utf-8
datatype Phytoplankton
Done
=== a377ee26-cd2d-411b-999c-073cd7a3dbd4
{'year_from': 2007, 'year_to': 2012, 'datatype': 'Chlorophyll', 'water_district_list': [], 'type_area_list': [], 'svar_sea_area_list': ['Krabbfjärden', 'Gullmarn centralbassäng', 'Gussöfjärden', 'Örefjärden', 'Gaviksfjärden'], 'encoding': 'utf8', 'lineend': 'windows', 'delimiters': 'point-tab', 'sample_table_view': 'sample_col_chlorophyll', 'parameter': 'Chlorophyll-a', 'headerlang': 'internal'}
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
2018-09-20 19:03:14,809	event_handler.py	2995	load_workspace	DEBUG	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
DEBUG: Status:  200
DEBUG: Header:  application/octet-stream; charset=utf-8
DEBUG: Encoding:  utf-8
datatype Chlorophyll
Done
=== a377ee26-cd2d-411b-999c-073cd7a3dbd4
{'year_from': 2007, 'year_to': 2012, 'datatype': 'Zoobenthos', 'water_district_list': [], 'type_area_list': [], 'svar_sea_area_list': ['Krabbfjärden', 'Gullmarn centralbassäng', 'Gussöfjärden', 'Örefjärden', 'Gaviksfjärden'], 'encoding': 'utf8', 'lineend': 'windows', 'delimiters': 'point-tab', 'sample_table_view': 'sample_col_zoobenthos', 'parameter': 'BQIm', 'headerlang': 'internal'}
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
2018-09-20 19:03:16,175	event_handler.py	2995	load_workspace	DEBUG	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
DEBUG: Status:  200
DEBUG: Header:  application/octet-stream; charset=utf-8
DEBUG: Encoding:  utf-8
datatype Zoobenthos
Done
=== a377ee26-cd2d-411b-999c-073cd7a3dbd4
{'year_from': 2007, 'year_to': 2012, 'datatype': 'Physical and Chemical', 'water_district_list': [], 'type_area_list': [], 'svar_sea_area_list': ['Krabbfjärden', 'Gullmarn centralbassäng', 'Gussöfjärden', 'Örefjärden', 'Gaviksfjärden'], 'encoding': 'utf8', 'lineend': 'windows', 'delimiters': 'point-tab', 'sample_table_view': 'sample_col_physicalchemical_columnparams', 'parameter': None, 'headerlang': 'internal'}
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
DEBUG: Status:  200
DEBUG: Header:  application/json; charset=utf-8
DEBUG: Encoding:  utf-8
2018-09-20 19:03:20,216	event_handler.py	54	f	DEBUG	Stop: "request_sharkweb_import". Time for running method was 20.215156316757202
DEBUG: Status:  200
DEBUG: Header:  application/octet-stream; charset=utf-8
DEBUG: Encoding:  utf-8
datatype Physical and Chemical
Done

In [11]:
ekos.data_params


Out[11]:
{'adv_check_status': '',
 'adv_checked_by_list': '',
 'adv_dataset_name': '',
 'adv_dataset_name_option': '',
 'adv_datatype_list': '',
 'adv_deliverer_list': '',
 'adv_max_depth': '',
 'adv_min_depth': '',
 'adv_orderer_list': '',
 'adv_parameter_list': '',
 'adv_project_list': '',
 'adv_quality_flag_list': '',
 'adv_red_list_category': '',
 'bounds': '',
 'county_list': '',
 'datatype': 'Physical and Chemical',
 'delimiters': 'point-tab',
 'economic_zone': '',
 'encoding': 'utf8',
 'headerlang': 'internal',
 'helcom_ospar': '',
 'lineend': 'windows',
 'month_list': '',
 'municipality_list': '',
 'parameter': None,
 'sample_table_view': 'sample_col_physicalchemical_columnparams',
 'sea_basin': '',
 'station_name': '',
 'station_name_option': '',
 'svar_sea_area_list': '',
 'taxon_name': '',
 'taxon_name_option': '',
 'type_area_list': '',
 'water_category': '',
 'water_district_list': 'S%C3%B6dra%20%C3%96stersj%C3%B6ns%20vattendistrikt',
 'year_from': 2011,
 'year_to': 2016}

In [12]:
ekos.selection_dicts


Out[12]:
{'Physical and Chemical': {'datatype': 'Physical and Chemical',
  'delimiters': 'point-tab',
  'encoding': 'utf8',
  'headerlang': 'internal',
  'lineend': 'windows',
  'parameter': None,
  'sample_table_view': 'sample_col_physicalchemical_columnparams',
  'svar_sea_area_list': [],
  'type_area_list': [],
  'water_district_list': ['Södra Östersjöns vattendistrikt'],
  'year_from': 2011,
  'year_to': 2016}}

In [13]:
# ekos = EventHandler(**paths)
# ekos.mapping_objects['sharkweb_mapping'].df

Request data source list/edit


In [23]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_workspace_data_sources_list']
response = ekos.request_workspace_data_sources_list(request) 
ekos.write_test_response('request_workspace_data_sources_list', response) 

request = response
request['data_sources'][0]['status'] = False 
request['data_sources'][1]['status'] = False 
request['data_sources'][2]['status'] = False 
request['data_sources'][3]['status'] = False 
# request['data_sources'][4]['status'] = True 


# Edit data source 
response = ekos.request_workspace_data_sources_edit(request) 
ekos.write_test_response('request_workspace_data_sources_edit', response)


2018-09-20 19:31:23,369	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:31:23,373	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 19:31:24,259	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.8860509395599365
2018-09-20 19:31:24,295	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.9250528812408447
2018-09-20 19:31:24,301	event_handler.py	50	f	DEBUG	Start: "request_workspace_data_sources_list"
2018-09-20 19:31:24,305	event_handler.py	4480	request_workspace_data_sources_list	DEBUG	Start: request_workspace_data_sources_list
2018-09-20 19:31:24,342	event_handler.py	2991	load_workspace	DEBUG	Trying to load new workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace"
2018-09-20 19:31:24,845	event_handler.py	3009	load_workspace	INFO	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace loaded."
2018-09-20 19:31:24,858	event_handler.py	54	f	DEBUG	Stop: "request_workspace_data_sources_list". Time for running method was 0.5530316829681396
2018-09-20 19:31:24,864	event_handler.py	50	f	DEBUG	Start: "request_workspace_data_sources_edit"
2018-09-20 19:31:24,869	event_handler.py	4438	request_workspace_data_sources_edit	DEBUG	Start: request_workspace_data_sources_list
2018-09-20 19:31:24,910	event_handler.py	3002	load_workspace	DEBUG	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 19:31:25,055	workspaces.py	1842	load_all_data	DEBUG	Data has been loaded from existing all_data.pickle file.
2018-09-20 19:31:25,058	event_handler.py	50	f	DEBUG	Start: "request_workspace_data_sources_list"
2018-09-20 19:31:25,063	event_handler.py	4480	request_workspace_data_sources_list	DEBUG	Start: request_workspace_data_sources_list
2018-09-20 19:31:25,099	event_handler.py	3002	load_workspace	DEBUG	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
REQUEST True phytoplankton_sharkweb_data_Biovolume concentration_2007-2012_201809201903.txt
REQUEST True chlorophyll_sharkweb_data_Chlorophyll-a_2007-2012_201809201903.txt
REQUEST False zoobenthos_sharkweb_data_BQIm_2007-2012_201809201903.txt
REQUEST True physicalchemical_sharkweb_data_all_2007-2012_201809201903.txt
self.all_data 0
2018-09-20 19:31:25,115	event_handler.py	54	f	DEBUG	Stop: "request_workspace_data_sources_list". Time for running method was 0.052002906799316406
2018-09-20 19:31:25,122	event_handler.py	54	f	DEBUG	Stop: "request_workspace_data_sources_edit". Time for running method was 0.25301432609558105

In [ ]:


In [ ]:


In [ ]:

Request subset add


In [12]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_subset_add_1']
response_subset_add = ekos.request_subset_add(request)
ekos.write_test_response('request_subset_add_1', response_subset_add)


2018-09-20 19:05:14,633	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:05:14,636	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 19:05:15,527	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.8900508880615234
2018-09-20 19:05:15,566	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.9330534934997559
2018-09-20 19:05:15,572	event_handler.py	50	f	DEBUG	Start: "request_subset_add"
2018-09-20 19:05:15,576	event_handler.py	3256	request_subset_add	DEBUG	Start: request_subset_add
2018-09-20 19:05:15,617	event_handler.py	2984	load_workspace	DEBUG	Trying to load new workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace"
2018-09-20 19:05:16,040	event_handler.py	3002	load_workspace	INFO	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace loaded."
2018-09-20 19:05:16,043	event_handler.py	375	copy_subset	DEBUG	Trying to copy subset "default_subset"
2018-09-20 19:05:16,211	logger.py	85	add_log	DEBUG	
2018-09-20 19:05:16,216	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-20 19:05:16,219	logger.py	87	add_log	DEBUG	### Log added for log_id "59365b60-aadd-4974-8df0-ba7c0a3d98ef" at locaton: D:\git\ekostat_calculator\workspaces\a377ee26-cd2d-411b-999c-073cd7a3dbd4\log\subset_59365b60-aadd-4974-8df0-ba7c0a3d98ef.log
2018-09-20 19:05:16,224	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-20 19:05:16,237	event_handler.py	54	f	DEBUG	Stop: "request_subset_add". Time for running method was 0.6600375175476074
¤ mw_subset
====================================================================================================
59365b60-aadd-4974-8df0-ba7c0a3d98ef
D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/log
subset
----------------------------------------------------------------------------------------------------

In [13]:
update_subset_uuid_in_test_requests(subset_alias='mw_subset')


2018-09-20 19:05:16,853	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:05:16,857	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 19:05:17,716	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.8590488433837891
2018-09-20 19:05:17,754	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.9010515213012695
2018-09-20 19:05:17,789	event_handler.py	2984	load_workspace	DEBUG	Trying to load new workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace"
2018-09-20 19:05:18,278	event_handler.py	3002	load_workspace	INFO	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace loaded."
Updating user user_1 with workspace_uuid a377ee26-cd2d-411b-999c-073cd7a3dbd4 and subset_uuid 59365b60-aadd-4974-8df0-ba7c0a3d98ef
a377ee26-cd2d-411b-999c-073cd7a3dbd4 59365b60-aadd-4974-8df0-ba7c0a3d98ef
----------------------------------------------------------------------

In [ ]:


In [ ]:

Request subset get data filter


In [22]:
ekos = EventHandler(**paths)
update_subset_uuid_in_test_requests(subset_alias='mw_subset')
request = ekos.test_requests['request_subset_get_data_filter']
response_subset_get_data_filter = ekos.request_subset_get_data_filter(request)
ekos.write_test_response('request_subset_get_data_filter', response_subset_get_data_filter)


2018-09-20 19:21:34,611	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:21:34,614	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 19:21:35,519	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.9050517082214355
2018-09-20 19:21:35,555	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.9450538158416748
2018-09-20 19:21:35,562	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:21:35,567	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 19:21:36,485	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.9180526733398438
2018-09-20 19:21:36,519	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.958054780960083
2018-09-20 19:21:36,554	event_handler.py	2991	load_workspace	DEBUG	Trying to load new workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace"
2018-09-20 19:21:37,058	event_handler.py	3009	load_workspace	INFO	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace loaded."
2018-09-20 19:21:37,273	event_handler.py	50	f	DEBUG	Start: "request_subset_get_data_filter"
2018-09-20 19:21:37,276	event_handler.py	3452	request_subset_get_data_filter	DEBUG	Start: request_subset_get_data_filter
Updating user user_1 with workspace_uuid a377ee26-cd2d-411b-999c-073cd7a3dbd4 and subset_uuid 59365b60-aadd-4974-8df0-ba7c0a3d98ef
a377ee26-cd2d-411b-999c-073cd7a3dbd4 59365b60-aadd-4974-8df0-ba7c0a3d98ef
----------------------------------------------------------------------
2018-09-20 19:21:37,315	event_handler.py	2991	load_workspace	DEBUG	Trying to load new workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace"
2018-09-20 19:21:37,803	event_handler.py	3009	load_workspace	INFO	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace loaded."
2018-09-20 19:21:37,893	workspaces.py	1842	load_all_data	DEBUG	Data has been loaded from existing all_data.pickle file.
self.all_data 0
2018-09-20 19:21:38,088	event_handler.py	54	f	DEBUG	Stop: "request_subset_get_data_filter". Time for running method was 0.8120465278625488
TYPE AREA 14
TYPE AREA 18
TYPE AREA 2
TYPE AREA 20
TYPE AREA 22
TYPE AREA 14
TYPE AREA 18
TYPE AREA 2
TYPE AREA 20
TYPE AREA 22
TYPE AREA 14
TYPE AREA 18
TYPE AREA 2
TYPE AREA 20
TYPE AREA 22
TYPE AREA 14
TYPE AREA 18
TYPE AREA 2
TYPE AREA 20
TYPE AREA 22

In [20]:
# import re
# string = """{
#     "workspace_uuid": "52725df4-b4a0-431c-a186-5e542fc6a3a4",
#     "data_sources": [
#         {
#             "status": true,
#             "loaded": false,
#             "filename": "physicalchemical_sharkweb_data_all_2013-2014_20180916.txt",
#             "datatype": "physicalchemical"
#         }
#     ]
# }"""

# r = re.sub('"workspace_uuid": ".{36}"', '"workspace_uuid": "new"', string)

Request subset set data filter


In [20]:
ekos = EventHandler(**paths)
update_subset_uuid_in_test_requests(subset_alias='mw_subset')
request = ekos.test_requests['request_subset_set_data_filter']
response_subset_set_data_filter = ekos.request_subset_set_data_filter(request)
ekos.write_test_response('request_subset_set_data_filter', response_subset_set_data_filter)


2018-09-20 13:54:00,112	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 13:54:00,112	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 13:54:00,912	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.8000011444091797
2018-09-20 13:54:00,942	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.8300011157989502
2018-09-20 13:54:00,942	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 13:54:00,952	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 13:54:01,762	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.8100011348724365
2018-09-20 13:54:01,792	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.8500008583068848
2018-09-20 13:54:01,822	event_handler.py	2887	load_workspace	DEBUG	Trying to load new workspace "fccc7645-8501-4541-975b-bdcfb40a5092" with alias "New test workspace"
2018-09-20 13:54:02,262	event_handler.py	2905	load_workspace	INFO	Workspace "fccc7645-8501-4541-975b-bdcfb40a5092" with alias "New test workspace loaded."
2018-09-20 13:54:02,452	event_handler.py	50	f	DEBUG	Start: "request_subset_set_data_filter"
2018-09-20 13:54:02,452	event_handler.py	3249	request_subset_set_data_filter	DEBUG	Start: request_subset_get_indicator_settings
Updating user user_1 with workspace_uuid fccc7645-8501-4541-975b-bdcfb40a5092 and subset_uuid a4e53080-2c68-40d5-957f-8cc4dbf77815
fccc7645-8501-4541-975b-bdcfb40a5092 a4e53080-2c68-40d5-957f-8cc4dbf77815
----------------------------------------------------------------------
2018-09-20 13:54:02,482	event_handler.py	2887	load_workspace	DEBUG	Trying to load new workspace "fccc7645-8501-4541-975b-bdcfb40a5092" with alias "New test workspace"
2018-09-20 13:54:02,942	event_handler.py	2905	load_workspace	INFO	Workspace "fccc7645-8501-4541-975b-bdcfb40a5092" with alias "New test workspace loaded."
2018-09-20 13:54:03,032	workspaces.py	1842	load_all_data	DEBUG	Data has been loaded from existing all_data.pickle file.
2018-09-20 13:54:03,062	event_handler.py	54	f	DEBUG	Stop: "request_subset_set_data_filter". Time for running method was 0.6100008487701416
self.all_data 0

In [ ]:


In [ ]:

Request subset get indicator settings


In [27]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_subset_get_indicator_settings']
# request = ekos.test_requests['request_subset_get_indicator_settings_no_areas']
# print(request['subset']['subset_uuid'])
# request['subset']['subset_uuid'] = 'fel'
# print(request['subset']['subset_uuid'])

response_subset_get_indicator_settings = ekos.request_subset_get_indicator_settings(request)
ekos.write_test_response('request_subset_get_indicator_settings', response_subset_get_indicator_settings)


2018-09-20 06:50:41,643	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 06:50:41,643	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 06:50:42,330	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.6864011287689209
2018-09-20 06:50:42,361	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.7176012992858887
2018-09-20 06:50:42,361	event_handler.py	50	f	DEBUG	Start: "request_subset_get_indicator_settings"
2018-09-20 06:50:42,361	event_handler.py	3416	request_subset_get_indicator_settings	DEBUG	Start: request_subset_get_indicator_settings
2018-09-20 06:50:42,392	event_handler.py	2887	load_workspace	DEBUG	Trying to load new workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace"
2018-09-20 06:50:42,798	event_handler.py	2905	load_workspace	INFO	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace loaded."
2018-09-20 06:50:42,829	workspaces.py	1842	load_all_data	DEBUG	Data has been loaded from existing all_data.pickle file.
self.all_data 0
2018-09-20 06:50:43,047	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:43,141	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:43,219	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:43,344	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:43,468	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:43,562	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:43,640	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:43,765	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:43,858	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:43,968	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,061	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,139	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,217	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,295	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,389	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,482	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,592	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,670	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,748	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,826	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:44,919	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:45,013	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:45,122	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:45,216	event_handler.py	2898	load_workspace	DEBUG	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 06:50:45,278	event_handler.py	54	f	DEBUG	Stop: "request_subset_get_indicator_settings". Time for running method was 2.9172048568725586

In [ ]:


In [ ]:

Request subset set indicator settings


In [22]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_subset_set_indicator_settings']
response_subset_set_indicator_settings = ekos.request_subset_set_indicator_settings(request)
ekos.write_test_response('request_subset_set_indicator_settings', response_subset_set_indicator_settings)


2018-09-20 12:09:08,454	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 12:09:08,454	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 12:09:09,234	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.780001163482666
2018-09-20 12:09:09,264	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.8100008964538574
2018-09-20 12:09:09,284	event_handler.py	50	f	DEBUG	Start: "request_subset_set_indicator_settings"
2018-09-20 12:09:09,294	event_handler.py	3627	request_subset_set_indicator_settings	DEBUG	Start: request_subset_set_indicator_settings
2018-09-20 12:09:09,324	event_handler.py	2887	load_workspace	DEBUG	Trying to load new workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace"
2018-09-20 12:09:09,444	logger.py	85	add_log	DEBUG	
2018-09-20 12:09:09,444	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-20 12:09:09,454	logger.py	87	add_log	DEBUG	### Log added for log_id "cc264e56-f958-4ec4-932d-bc0cc1d2caf8" at locaton: D:\git\ekostat_calculator\workspaces\1a349dfd-5e08-4617-85a8-5bdde050a4ee\log\subset_cc264e56-f958-4ec4-932d-bc0cc1d2caf8.log
2018-09-20 12:09:09,454	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-20 12:09:09,544	logger.py	85	add_log	DEBUG	
2018-09-20 12:09:09,554	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-20 12:09:09,554	logger.py	87	add_log	DEBUG	### Log added for log_id "default_subset" at locaton: D:\git\ekostat_calculator\workspaces\1a349dfd-5e08-4617-85a8-5bdde050a4ee\log\subset_default_subset.log
2018-09-20 12:09:09,564	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
====================================================================================================
cc264e56-f958-4ec4-932d-bc0cc1d2caf8
D:/git/ekostat_calculator/workspaces/1a349dfd-5e08-4617-85a8-5bdde050a4ee/log
subset
----------------------------------------------------------------------------------------------------
====================================================================================================
default_subset
D:/git/ekostat_calculator/workspaces/1a349dfd-5e08-4617-85a8-5bdde050a4ee/log
subset
----------------------------------------------------------------------------------------------------
2018-09-20 12:09:09,874	logger.py	85	add_log	DEBUG	
2018-09-20 12:09:09,874	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-20 12:09:09,874	logger.py	87	add_log	DEBUG	### Log added for log_id "1a349dfd-5e08-4617-85a8-5bdde050a4ee" at locaton: D:\git\ekostat_calculator\workspaces\1a349dfd-5e08-4617-85a8-5bdde050a4ee\log\workspace_1a349dfd-5e08-4617-85a8-5bdde050a4ee.log
2018-09-20 12:09:09,884	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-20 12:09:09,884	event_handler.py	2905	load_workspace	INFO	Workspace "1a349dfd-5e08-4617-85a8-5bdde050a4ee" with alias "New test workspace loaded."
2018-09-20 12:09:09,924	workspaces.py	1842	load_all_data	DEBUG	Data has been loaded from existing all_data.pickle file.
2018-09-20 12:09:09,934	event_handler.py	50	f	DEBUG	Start: "set_settings_filter"
2018-09-20 12:09:09,954	event_handler.py	54	f	DEBUG	Stop: "set_settings_filter". Time for running method was 0.009999752044677734
2018-09-20 12:09:09,954	event_handler.py	50	f	DEBUG	Start: "set_settings_filter"
2018-09-20 12:09:09,974	event_handler.py	54	f	DEBUG	Stop: "set_settings_filter". Time for running method was 0.019999980926513672
2018-09-20 12:09:09,974	event_handler.py	50	f	DEBUG	Start: "set_settings_filter"
2018-09-20 12:09:09,994	event_handler.py	54	f	DEBUG	Stop: "set_settings_filter". Time for running method was 0.010000228881835938
2018-09-20 12:09:09,994	event_handler.py	50	f	DEBUG	Start: "set_settings_filter"
2018-09-20 12:09:10,014	event_handler.py	54	f	DEBUG	Stop: "set_settings_filter". Time for running method was 0.009999752044677734
2018-09-20 12:09:10,024	event_handler.py	54	f	DEBUG	Stop: "request_subset_set_indicator_settings". Time for running method was 0.7300012111663818
====================================================================================================
1a349dfd-5e08-4617-85a8-5bdde050a4ee
D:/git/ekostat_calculator/workspaces/1a349dfd-5e08-4617-85a8-5bdde050a4ee/log
workspace
----------------------------------------------------------------------------------------------------
self.all_data 0
Tolerance column
Value to set for type_area "12n" and variable "MIN_NR_YEARS": ['9']
List column (6, 7, 8)
¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤
¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤
(6, 7, 8) <class 'tuple'> MONTH_LIST
Value to set for type_area "12n" and variable "MONTH_LIST": ['6;7;8']
Tolerance column
Value to set for type_area "1n" and variable "MIN_NR_YEARS": ['8']
List column (1, 2)
¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤
¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤¤
(1, 2) <class 'tuple'> MONTH_LIST
Value to set for type_area "1n" and variable "MONTH_LIST": ['1;2']

Request subset calculate status


In [14]:
ekos = EventHandler(**paths)
request = ekos.test_requests['request_subset_calculate_status']
response = ekos.request_subset_calculate_status(request)
ekos.write_test_response('request_subset_calculate_status', response)


2018-09-20 19:05:31,914	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:05:31,917	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 19:05:32,790	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.8740499019622803
2018-09-20 19:05:32,826	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.9130520820617676
2018-09-20 19:05:32,831	event_handler.py	50	f	DEBUG	Start: "request_subset_calculate_status"
2018-09-20 19:05:32,837	event_handler.py	3296	request_subset_calculate_status	DEBUG	Start: request_subset_calculate_status
2018-09-20 19:05:32,871	event_handler.py	2984	load_workspace	DEBUG	Trying to load new workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace"
2018-09-20 19:05:33,359	event_handler.py	3002	load_workspace	INFO	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace loaded."
2018-09-20 19:05:33,453	workspaces.py	1842	load_all_data	DEBUG	Data has been loaded from existing all_data.pickle file.
2018-09-20 19:05:33,493	event_handler.py	2995	load_workspace	DEBUG	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
2018-09-20 19:05:33,530	event_handler.py	2995	load_workspace	DEBUG	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
self.all_data 0
=== dummy_file.txt
2018-09-20 19:05:33,630	event_handler.py	2995	load_workspace	DEBUG	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
--------------------------------------------------
Total time to apply data filters for indicator indicator_din_winter: 0.10400605201721191
--------------------------------------------------
--------------------------------------------------
Total time to apply data filters for indicator indicator_ntot_summer: 0.08800482749938965
--------------------------------------------------
--------------------------------------------------
Total time to apply data filters for indicator indicator_ntot_winter: 0.08200478553771973
--------------------------------------------------
--------------------------------------------------
Total time to apply data filters for indicator indicator_dip_winter: 0.09100508689880371
--------------------------------------------------
--------------------------------------------------
Total time to apply data filters for indicator indicator_ptot_summer: 0.09100532531738281
--------------------------------------------------
--------------------------------------------------
Total time to apply data filters for indicator indicator_ptot_winter: 0.09700560569763184
--------------------------------------------------
--------------------------------------------------
Total time to apply data filters for indicator indicator_bqi: 0.06500363349914551
--------------------------------------------------
--------------------------------------------------
Total time to apply data filters for indicator indicator_oxygen: 0.03300213813781738
--------------------------------------------------
--------------------------------------------------
Total time to apply data filters for indicator indicator_biov: 0.05800342559814453
--------------------------------------------------
--------------------------------------------------
Total time to apply data filters for indicator indicator_chl: 0.05700325965881348
--------------------------------------------------
--------------------------------------------------
Total time to apply data filters for indicator indicator_secchi: 0.05700325965881348
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_din_winter
"_set_water_body_indicator_df". Time for running method was 0.7780444622039795
--------------------------------------------------
Total time to set up indicator object indicator indicator_din_winter: 0.7910451889038086
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_ntot_summer
"_set_water_body_indicator_df". Time for running method was 2.4691412448883057
--------------------------------------------------
Total time to set up indicator object indicator indicator_ntot_summer: 2.480142116546631
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_ntot_winter
"_set_water_body_indicator_df". Time for running method was 0.6210355758666992
--------------------------------------------------
Total time to set up indicator object indicator indicator_ntot_winter: 0.6330358982086182
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_dip_winter
"_set_water_body_indicator_df". Time for running method was 0.6750385761260986
--------------------------------------------------
Total time to set up indicator object indicator indicator_dip_winter: 0.6860394477844238
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_ptot_summer
"_set_water_body_indicator_df". Time for running method was 2.3531346321105957
--------------------------------------------------
Total time to set up indicator object indicator indicator_ptot_summer: 2.3641350269317627
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_ptot_winter
"_set_water_body_indicator_df". Time for running method was 0.5730326175689697
--------------------------------------------------
Total time to set up indicator object indicator indicator_ptot_winter: 0.5830333232879639
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_bqi
"_set_water_body_indicator_df". Time for running method was 0.12700724601745605
--------------------------------------------------
Total time to set up indicator object indicator indicator_bqi: 0.1370079517364502
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_oxygen
"_set_water_body_indicator_df". Time for running method was 0.05600309371948242
--------------------------------------------------
Total time to set up indicator object indicator indicator_oxygen: 0.06600379943847656
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_biov
"_set_water_body_indicator_df". Time for running method was 0.7830448150634766
--------------------------------------------------
Total time to set up indicator object indicator indicator_biov: 0.7960455417633057
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_chl
"_set_water_body_indicator_df". Time for running method was 0.9020516872406006
--------------------------------------------------
Total time to set up indicator object indicator indicator_chl: 0.9130520820617676
--------------------------------------------------
****INITIATING INDICATOR OBJECT FOR****
indicator_secchi
"_set_water_body_indicator_df". Time for running method was 0.8600492477416992
--------------------------------------------------
Total time to set up indicator object indicator indicator_secchi: 0.8720500469207764
--------------------------------------------------
indicator_din_winter
--------------------------------------------------
Total time to calculate status for indicator indicator_din_winter: 0.7950456142425537
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_din_winter-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_din_winter-by_year_pos.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_din_winter-by_period.txt
indicator_ntot_summer
--------------------------------------------------
Total time to calculate status for indicator indicator_ntot_summer: 0.970055341720581
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ntot_summer-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ntot_summer-by_year_pos.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ntot_summer-by_period.txt
indicator_ntot_winter
--------------------------------------------------
Total time to calculate status for indicator indicator_ntot_winter: 0.6610379219055176
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ntot_winter-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ntot_winter-by_year_pos.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ntot_winter-by_period.txt
indicator_dip_winter
--------------------------------------------------
Total time to calculate status for indicator indicator_dip_winter: 0.7040402889251709
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_dip_winter-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_dip_winter-by_year_pos.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_dip_winter-by_period.txt
indicator_ptot_summer
--------------------------------------------------
Total time to calculate status for indicator indicator_ptot_summer: 0.9590549468994141
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ptot_summer-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ptot_summer-by_year_pos.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ptot_summer-by_period.txt
indicator_ptot_winter
--------------------------------------------------
Total time to calculate status for indicator indicator_ptot_winter: 0.7010397911071777
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ptot_winter-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ptot_winter-by_year_pos.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_ptot_winter-by_period.txt
indicator_bqi
--------------------------------------------------
Total time to calculate status for indicator indicator_bqi: 0.07900452613830566
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_bqi-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_bqi-by_year.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_bqi-by_year_pos.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_bqi-by_period.txt
indicator_oxygen
..\core\indicators.py:1434: RuntimeWarning: Mean of empty slice
  value = np.nanmean(q)
..\core\indicators.py:1406: RuntimeWarning: invalid value encountered in less
  if minimum_deficiency_depth > self.maxD:
--------------------------------------------------
Total time to calculate status for indicator indicator_oxygen: 0.10100579261779785
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_oxygen-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_oxygen-by_period.txt
indicator_biov
--------------------------------------------------
Total time to calculate status for indicator indicator_biov: 1.3230760097503662
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_biov-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_biov-by_year.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_biov-by_year_pos.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_biov-by_period.txt
indicator_chl
--------------------------------------------------
Total time to calculate status for indicator indicator_chl: 1.1100633144378662
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_chl-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_chl-by_year.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_chl-by_year_pos.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_chl-by_period.txt
indicator_secchi
--------------------------------------------------
Total time to calculate status for indicator indicator_secchi: 0.9720554351806641
--------------------------------------------------
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_secchi-by_date.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_secchi-by_period.txt
QualityElementBase
********
nutrients
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_p_winter-by_period.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_n_winter-by_period.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_p_summer-by_period.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_n_summer-by_period.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_p-by_period.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/indicator_n-by_period.txt
2018-09-20 19:05:53,944	event_handler.py	54	f	DEBUG	Stop: "request_subset_calculate_status". Time for running method was 21.10720729827881
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/qe_nutrients-by_period.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/nutrients_all_results.txt
QualityElementBase
********
phytoplankton
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/qe_phytoplankton-by_period.txt
Saving data to: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/subsets/59365b60-aadd-4974-8df0-ba7c0a3d98ef/step_3/output/results/phytoplankton_all_results.txt

In [ ]:


In [ ]:

Request subset result get


In [21]:
ekos = EventHandler(**paths) 
request = ekos.test_requests['request_workspace_result']
response_workspace_result = ekos.request_workspace_result(request)
ekos.write_test_response('request_workspace_result', response_workspace_result)


2018-09-20 19:13:01,347	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-09-20 19:13:01,351	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
2018-09-20 19:13:02,227	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.8760499954223633
2018-09-20 19:13:02,262	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.9140522480010986
2018-09-20 19:13:02,266	event_handler.py	50	f	DEBUG	Start: "request_workspace_result"
2018-09-20 19:13:02,306	event_handler.py	2991	load_workspace	DEBUG	Trying to load new workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace"
2018-09-20 19:13:02,812	event_handler.py	3009	load_workspace	INFO	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace loaded."
2018-09-20 19:13:02,872	event_handler.py	3002	load_workspace	DEBUG	Workspace "a377ee26-cd2d-411b-999c-073cd7a3dbd4" with alias "New test workspace" is already loaded. Set reload=True if you want to reload the workspace.
==============================
qe_phytoplankton
------------------------------
==============================
indicator_oxygen
------------------------------
==============================
indicator_p_summer
------------------------------
==============================
indicator_p
------------------------------
==============================
indicator_p_winter
------------------------------
==============================
indicator_n_summer
------------------------------
==============================
indicator_n
------------------------------
==============================
indicator_n_winter
------------------------------
==============================
indicator_p_summer
------------------------------
==============================
indicator_p
------------------------------
==============================
indicator_p_winter
------------------------------
==============================
indicator_n_summer
------------------------------
==============================
indicator_n
------------------------------
==============================
indicator_n_winter
------------------------------
==============================
qe_phytoplankton
------------------------------
==============================
indicator_p_summer
------------------------------
==============================
indicator_p
------------------------------
==============================
indicator_p_winter
------------------------------
==============================
indicator_n_summer
------------------------------
==============================
indicator_n
------------------------------
==============================
indicator_n_winter
------------------------------
==============================
qe_phytoplankton
------------------------------
==============================
indicator_p_summer
------------------------------
==============================
indicator_p
------------------------------
==============================
indicator_p_winter
------------------------------
==============================
indicator_n_summer
------------------------------
==============================
indicator_n
------------------------------
==============================
indicator_n_winter
------------------------------
==============================
qe_phytoplankton
------------------------------
==============================
indicator_oxygen
------------------------------
==============================
indicator_p_summer
------------------------------
==============================
indicator_n_summer
------------------------------
2018-09-20 19:13:04,635	event_handler.py	54	f	DEBUG	Stop: "request_workspace_result". Time for running method was 2.3631348609924316
Cache file saved: D:/git/ekostat_calculator/workspaces/a377ee26-cd2d-411b-999c-073cd7a3dbd4/cache/59365b60-aadd-4974-8df0-ba7c0a3d98ef_result_dict.pkl

In [158]:
response_workspace_result['subset']['a4e53080-2c68-40d5-957f-8cc4dbf77815']['result']['SE552170-130626']['result']['indicator_din_winter']['data']


Out[158]:
[[datetime.datetime(2011, 1, 19, 0, 0),
  datetime.datetime(2011, 2, 9, 0, 0),
  datetime.datetime(2012, 1, 10, 0, 0),
  datetime.datetime(2013, 1, 8, 0, 0),
  datetime.datetime(2014, 1, 15, 0, 0),
  datetime.datetime(2014, 2, 10, 0, 0),
  datetime.datetime(2015, 1, 7, 0, 0),
  datetime.datetime(2015, 2, 4, 0, 0),
  datetime.datetime(2016, 1, 13, 0, 0),
  datetime.datetime(2016, 2, 11, 0, 0)]]

In [136]:
workspace_uuid = 'fccc7645-8501-4541-975b-bdcfb40a5092'
subset_uuid = 'a4e53080-2c68-40d5-957f-8cc4dbf77815'
result = ekos.dict_data_timeseries(workspace_uuid=workspace_uuid, 
                                  subset_uuid=subset_uuid, 
                                  viss_eu_cd='SE575150-162700',
                                  element_id='indicator_din_winter')

In [138]:
print(result['datasets'][0]['x'])
print()
print(result['y'])


[9.2100000000000009, None, 11.92, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 9.7699999999999996, None, None, None, None, None, 11.09]

['', '12-Jan', '', '12-Mar', '12-May', '12-Jul', '12-Sep', '12-Nov', '13-Jan', '', '', '13-Mar', '13-May', '13-Jul', '13-Sep', '13-Nov', '', '14-Jan', '14-Mar', '14-May', '14-Jul', '14-Sep', '14-Nov', '15-Jan', '', '15-Mar', '15-May', '15-Jul', '15-Sep', '15-Nov', '', '16-Jan', '', '16-Mar', '16-May', '16-Jul', '16-Sep', '16-Nov', '']

In [143]:
for k in range(len(result['datasets'])):
    print(result['datasets'][k]['x'])


[9.2100000000000009, None, 11.92, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 9.7699999999999996, None, None, None, None, None, 11.09]
[None, None, None, None, None, None, None, None, None, None, 18.41, None, None, None, None, None, 11.69, None, None, None, None, None, None, None, 12.41, None, None, None, None, None, 9.6500000000000004, None, None, None, None, None, None, None, None]
[10.640000000000001, None, 11.210000000000001, None, None, None, None, None, None, 13.69, None, None, None, None, None, None, 10.210000000000001, None, None, None, None, None, None, None, 13.07, None, None, None, None, None, 9.5800000000000001, None, 9.8499999999999996, None, None, None, None, None, 11.0]
[2.9951999999999988, None, 2.4985999999999979, None, None, None, None, None, None, 4.7332999999999981, 3.4917999999999978, None, None, None, None, None, 2.7468999999999983, None, None, None, None, None, None, None, 3.9139099999999978, None, None, None, None, None, 2.2502999999999975, None, 2.0019999999999989, None, None, None, None, None, 2.4985999999999979]
[3.7439999999999984, None, 3.1232499999999974, None, None, None, None, None, None, 5.9166249999999971, 4.3647499999999972, None, None, None, None, None, 3.4336249999999979, None, None, None, None, None, None, None, 4.8923874999999972, None, None, None, None, None, 2.8128749999999969, None, 2.5024999999999986, None, None, None, None, None, 3.1232499999999974]
[4.4704477611940279, None, 3.7292537313432801, None, None, None, None, None, None, 7.0646268656716389, 5.2116417910447721, None, None, None, None, None, 4.099850746268654, None, None, None, None, None, None, None, 5.8416567164179067, None, None, None, None, None, 3.3586567164179066, None, 2.9880597014925354, None, None, None, None, None, 3.7292537313432801]
[6.807272727272724, None, 5.6786363636363593, None, None, None, None, None, None, 10.757499999999995, 7.9359090909090861, None, None, None, None, None, 6.2429545454545421, None, None, None, None, None, None, None, 8.8952499999999954, None, None, None, None, None, 5.1143181818181764, None, 4.5499999999999972, None, None, None, None, None, 5.6786363636363593]
[10.328275862068962, None, 8.6158620689655105, None, None, None, None, None, None, 16.321724137931028, 12.040689655172407, None, None, None, None, None, 9.4720689655172361, None, None, None, None, None, None, None, 13.496241379310337, None, None, None, None, None, 7.759655172413785, None, 6.9034482758620657, None, None, None, None, None, 8.6158620689655105]

In [116]:
import datetime 

# Extend date list 
start_year = all_dates[0].year
end_year = all_dates[-1].year+1


date_intervall = []
for year in range(start_year, end_year+1):
    for month in range(1, 13):
        d = datetime.datetime(year, month, 1) 
        if d >= all_dates[0] and d <= all_dates[-1]:
            date_intervall.append(d)

extended_dates = sorted(set(all_dates + date_intervall))


# Loop dates and add/remove values 
new_x = [] 
new_y = dict((item, []) for item in date_to_y)
for date in extended_dates: 
    if date in date_intervall:
        new_x.append(date.strftime('%y-%b'))
    else:
        new_x.append('')
        
    for i in new_y:
        new_y[i].append(date_to_y[i].get(date, None))
        
            


# new_y = {}
# for i in date_to_y:
#     new_y[i] = []
#     for date in all_dates:
#         d = date_to_y[i].get(date)
#         if d:
#             new_y[i].append(d)
#         else:
#             new_y[i].append(None)

In [117]:
new_y[0]


Out[117]:
[9.2100000000000009,
 None,
 11.92,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 9.7699999999999996,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 None,
 11.09]

In [110]:
import datetime 
year_list = range(2011, 2013+1) 
month_list = range(1, 13)

date_list = []
for year in year_list:
    for month in month_list:
        date_list.append(datetime.datetime(year, month, 1))

In [111]:
date_list


Out[111]:
[datetime.datetime(2011, 1, 1, 0, 0),
 datetime.datetime(2011, 2, 1, 0, 0),
 datetime.datetime(2011, 3, 1, 0, 0),
 datetime.datetime(2011, 4, 1, 0, 0),
 datetime.datetime(2011, 5, 1, 0, 0),
 datetime.datetime(2011, 6, 1, 0, 0),
 datetime.datetime(2011, 7, 1, 0, 0),
 datetime.datetime(2011, 8, 1, 0, 0),
 datetime.datetime(2011, 9, 1, 0, 0),
 datetime.datetime(2011, 10, 1, 0, 0),
 datetime.datetime(2011, 11, 1, 0, 0),
 datetime.datetime(2011, 12, 1, 0, 0),
 datetime.datetime(2012, 1, 1, 0, 0),
 datetime.datetime(2012, 2, 1, 0, 0),
 datetime.datetime(2012, 3, 1, 0, 0),
 datetime.datetime(2012, 4, 1, 0, 0),
 datetime.datetime(2012, 5, 1, 0, 0),
 datetime.datetime(2012, 6, 1, 0, 0),
 datetime.datetime(2012, 7, 1, 0, 0),
 datetime.datetime(2012, 8, 1, 0, 0),
 datetime.datetime(2012, 9, 1, 0, 0),
 datetime.datetime(2012, 10, 1, 0, 0),
 datetime.datetime(2012, 11, 1, 0, 0),
 datetime.datetime(2012, 12, 1, 0, 0),
 datetime.datetime(2013, 1, 1, 0, 0),
 datetime.datetime(2013, 2, 1, 0, 0),
 datetime.datetime(2013, 3, 1, 0, 0),
 datetime.datetime(2013, 4, 1, 0, 0),
 datetime.datetime(2013, 5, 1, 0, 0),
 datetime.datetime(2013, 6, 1, 0, 0),
 datetime.datetime(2013, 7, 1, 0, 0),
 datetime.datetime(2013, 8, 1, 0, 0),
 datetime.datetime(2013, 9, 1, 0, 0),
 datetime.datetime(2013, 10, 1, 0, 0),
 datetime.datetime(2013, 11, 1, 0, 0),
 datetime.datetime(2013, 12, 1, 0, 0)]

In [104]:
a


Out[104]:
{2, 4}

In [81]:
y[3][i]


Out[81]:
2.9951999999999988

In [29]:
sorted(pd.to_datetime(df['SDATE']))


Out[29]:
[Timestamp('2011-01-19 00:00:00'),
 Timestamp('2011-02-09 00:00:00'),
 Timestamp('2012-01-10 00:00:00'),
 Timestamp('2013-01-08 00:00:00')]

In [ ]: