In [1]:
# coding: utf-8

# In[1]:


import os 
import sys
path = "../"
path = "D:/github/w_vattenstatus/ekostat_calculator"
sys.path.append(path)
#os.path.abspath("../")
print(os.path.abspath(path))

import pandas as pd
import numpy as np
import json
import timeit
import time
import core
import importlib
importlib.reload(core)
import logging
importlib.reload(core) 
try:
    logging.shutdown()
    importlib.reload(logging)
except:
    pass
from event_handler import EventHandler
print(core.__file__)
pd.__version__


D:\github\w_vattenstatus\ekostat_calculator
D:/github/w_vattenstatus/ekostat_calculator\core\__init__.py
Out[1]:
'0.19.2'
#

Load directories


In [3]:
root_directory = 'D:/github/w_vattenstatus/ekostat_calculator'#"../" #os.getcwd()
workspace_directory = root_directory + '/workspaces' 
resource_directory = root_directory + '/resources'
user_id = 'test_user'
# ## Initiate EventHandler
print(root_directory)
paths = {'user_id': user_id, 
         'workspace_directory': root_directory + '/workspaces', 
         'resource_directory': root_directory + '/resources', 
         'log_directory': 'D:/github' + '/log', 
         'test_data_directory': 'D:/github' + '/test_data',
         'cache_directory': 'D:/github/w_vattenstatus/cache'}

t0 = time.time()
ekos = EventHandler(**paths)
#request = ekos.test_requests['request_workspace_list']
#response = ekos.request_workspace_list(request) 
#ekos.write_test_response('request_workspace_list', response)
print('-'*50)
print('Time for request: {}'.format(time.time()-t0))


2018-11-22 15:28:11,410	logger.py	85	add_log	DEBUG	
2018-11-22 15:28:11,413	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-11-22 15:28:11,415	logger.py	87	add_log	DEBUG	### Log added for log_id "event_handler" at locaton: D:\github\log\main_event_handler.log
2018-11-22 15:28:11,421	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-11-22 15:28:11,423	event_handler.py	117	__init__	DEBUG	Start EventHandler: event_handler
2018-11-22 15:28:11,429	event_handler.py	152	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
D:/github/w_vattenstatus/ekostat_calculator
====================================================================================================
event_handler
D:/github/log
main
----------------------------------------------------------------------------------------------------
2018-11-22 15:28:11,880	event_handler.py	128	__init__	DEBUG	Time for mapping: 0.4508187770843506
2018-11-22 15:28:11,880	event_handler.py	133	__init__	DEBUG	Time for initiating EventHandler: 0.4718210697174072
--------------------------------------------------
Time for request: 0.48742127418518066

In [4]:
###############################################################################################################################
# ### Make a new workspace

In [5]:
# ekos.copy_workspace(source_uuid='default_workspace', target_alias='kustzonsmodellen_3daydata')

In [6]:
# ### See existing workspaces and choose workspace name to load
ekos.print_workspaces()


====================================================================================================
Current workspaces for user are:

uuid                                    alias                         status                        
----------------------------------------------------------------------------------------------------
default_workspace                       default_workspace             readable                      
e86ae1c5-d241-46a4-9236-59524b44e500    lena_indicator                editable                      
2c27da69-6035-418b-8f5e-bc8ef8e6320b    kuszonsmodellen               editable                      
78bd7584-5de1-45ca-9176-09a998a7e734    kustzonsmodellen_3daydata     editable                      
6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c    waters_export                 editable                      
c876d9b9-e68c-476c-88c6-dee685d70334    satellit                      editable                      
====================================================================================================

In [7]:
#alias = 'lena'
# workspace_alias = 'lena_indicator' # kustzonsmodellen_3daydata
workspace_alias = 'waters_export'

In [8]:
workspace_uuid = ekos.get_unique_id_for_alias(workspace_alias = workspace_alias) #'kuszonsmodellen' lena_indicator 
print(workspace_uuid)

workspace_alias = ekos.get_alias_for_unique_id(workspace_uuid = workspace_uuid)


6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c

In [9]:
###############################################################################################################################
# ### Load existing workspace
ekos.load_workspace(unique_id = workspace_uuid)


2018-11-22 15:28:24,505	event_handler.py	3071	load_workspace	DEBUG	Trying to load new workspace "6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c" with alias "waters_export"
2018-11-22 15:28:24,701	logger.py	85	add_log	DEBUG	
2018-11-22 15:28:24,701	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-11-22 15:28:24,701	logger.py	87	add_log	DEBUG	### Log added for log_id "19ac8dc5-80be-4304-9ed2-3267bf59425b" at locaton: D:\github\w_vattenstatus\ekostat_calculator\workspaces\6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c\log\subset_19ac8dc5-80be-4304-9ed2-3267bf59425b.log
2018-11-22 15:28:24,701	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-11-22 15:28:24,837	logger.py	85	add_log	DEBUG	
2018-11-22 15:28:24,837	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-11-22 15:28:24,837	logger.py	87	add_log	DEBUG	### Log added for log_id "default_subset" at locaton: D:\github\w_vattenstatus\ekostat_calculator\workspaces\6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c\log\subset_default_subset.log
2018-11-22 15:28:24,853	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
====================================================================================================
19ac8dc5-80be-4304-9ed2-3267bf59425b
D:/github/w_vattenstatus/ekostat_calculator/workspaces/6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c/log
subset
----------------------------------------------------------------------------------------------------
====================================================================================================
default_subset
D:/github/w_vattenstatus/ekostat_calculator/workspaces/6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c/log
subset
----------------------------------------------------------------------------------------------------
2018-11-22 15:28:25,003	logger.py	85	add_log	DEBUG	
2018-11-22 15:28:25,005	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-11-22 15:28:25,007	logger.py	87	add_log	DEBUG	### Log added for log_id "6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c" at locaton: D:\github\w_vattenstatus\ekostat_calculator\workspaces\6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c\log\workspace_6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c.log
2018-11-22 15:28:25,038	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-11-22 15:28:25,038	event_handler.py	3089	load_workspace	INFO	Workspace "6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c" with alias "waters_export loaded."
====================================================================================================
6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c
D:/github/w_vattenstatus/ekostat_calculator/workspaces/6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c/log
workspace
----------------------------------------------------------------------------------------------------
Out[9]:
True

In [10]:
###############################################################################################################################
# ### import data
# ekos.import_default_data(workspace_alias = workspace_alias)

In [11]:
###############################################################################################################################
# ### Load all data in workspace
# #### if there is old data that you want to remove
#ekos.get_workspace(workspace_uuid = workspace_uuid).delete_alldata_export()
#ekos.get_workspace(workspace_uuid = workspace_uuid).delete_all_export_data()

In [12]:
###############################################################################################################################
# #### to just load existing data in workspace
ekos.load_data(workspace_uuid = workspace_uuid)


2018-11-22 15:28:29,798	workspaces.py	1887	load_all_data	DEBUG	Data has been loaded from existing all_data.pickle file.
all_data loaded from pickle
Out[12]:
True

In [13]:
############################################################################################################################### 
# ### check workspace data length
w = ekos.get_workspace(workspace_uuid = workspace_uuid)
len(w.data_handler.get_all_column_data_df())


Out[13]:
120681

In [14]:
###############################################################################################################################  
# ### see subsets in data  
for subset_uuid in w.get_subset_list():
    print('uuid {} alias {}'.format(subset_uuid, w.uuid_mapping.get_alias(unique_id=subset_uuid)))


uuid 19ac8dc5-80be-4304-9ed2-3267bf59425b alias waters_export
uuid default_subset alias default_subset

In [15]:
###############################################################################################################################  
# # Step 0 
print(w.data_handler.all_data.columns)


Index(['AMON', 'BIOV_CONC_ALL', 'BQIm', 'CPHL_BTL', 'CPHL_INTEG', 'DEPH',
       'DIN', 'DOXY_BTL', 'DOXY_CTD', 'LATIT_DD', 'LONGI_DD', 'MNDEP', 'MXDEP',
       'MYEAR', 'NTOT', 'NTRA', 'NTRI', 'NTRZ', 'PHOS', 'PTOT', 'Q_AMON',
       'Q_BIOV_CONC_ALL', 'Q_BQIm', 'Q_CPHL_BTL', 'Q_CPHL_INTEG', 'Q_DOXY_BTL',
       'Q_DOXY_CTD', 'Q_NTOT', 'Q_NTRA', 'Q_NTRI', 'Q_NTRZ', 'Q_PHOS',
       'Q_PTOT', 'Q_SALT_BTL', 'Q_SALT_CTD', 'Q_SECCHI', 'Q_TEMP_BTL',
       'Q_TEMP_CTD', 'RLABO', 'SALT_BTL', 'SALT_CTD', 'SDATE', 'SEA_BASIN',
       'SECCHI', 'SERNO', 'SHARKID_MD5', 'SHIPC', 'SLABO', 'STATN', 'STIME',
       'TEMP_BTL', 'TEMP_CTD', 'VISS_EU_CD', 'WADEP', 'WATER_BODY_NAME',
       'WATER_DISTRICT', 'WATER_TYPE_AREA', 'WLTYP', 'origin_dtype',
       'origin_file_path', 'MONTH', 'YEAR', 'POSITION', 'visit_id_str', 'date',
       'SALT', 'Q_SALT', 'source_SALT', 'TEMP', 'Q_TEMP', 'source_TEMP',
       'DOXY', 'Q_DOXY', 'source_DOXY', 'CPHL_INTEG_CALC',
       'CPHL_INTEG_CALC_depths', 'CPHL_INTEG_CALC_values'],
      dtype='object')

In [16]:
###############################################################################################################################    
# ### Apply first data filter 
w.apply_data_filter(step = 0) # This sets the first level of data filter in the IndexHandler

In [17]:
###############################################################################################################################  
# # Step 1 
# ### make new subset
# w.copy_subset(source_uuid='default_subset', target_alias='test_kustzon')

In [18]:
###############################################################################################################################
# ### Choose subset name to load
subset_alias = 'waters_export'
# subset_alias = 'period_2007-2012_refvalues_2013'
# subset_alias = 'test_subset'
subset_uuid = ekos.get_unique_id_for_alias(workspace_alias = workspace_alias, subset_alias = subset_alias)
print('subset_alias', subset_alias, 'subset_uuid', subset_uuid)


subset_alias waters_export subset_uuid 19ac8dc5-80be-4304-9ed2-3267bf59425b
#

Set subset filters


In [ ]:
# #### year filter
w.set_data_filter(subset = subset_uuid, step=1, 
                         filter_type='include_list', 
                         filter_name='MYEAR', 
                         data=[2007,2008,2009,2010,2011,2012])#['2011', '2012', '2013']) #, 2014, 2015, 2016

In [ ]:
###############################################################################################################################
# #### waterbody filter
w.set_data_filter(subset = subset_uuid, step=1, 
                         filter_type='include_list', 
                         filter_name='viss_eu_cd', data = []) #'SE584340-174401', 'SE581700-113000', 'SE654470-222700', 'SE633000-195000', 'SE625180-181655'
#                          data=['SE584340-174401', 'SE581700-113000', 'SE654470-222700', 'SE633000-195000', 'SE625180-181655']) 
#                          wb with no data for din 'SE591400-182320'
  
f1 = w.get_data_filter_object(subset = subset_uuid, step=1) 
print(f1.include_list_filter)

print('subset_alias:', subset_alias, '\nsubset uuid:', subset_uuid)

f1 = w.get_data_filter_object(subset = subset_uuid, step=1) 
print(f1.include_list_filter)

In [ ]:
###############################################################################################################################    
# ## Apply step 1 datafilter to subset
w.apply_data_filter(subset = subset_uuid, step = 1)
filtered_data = w.get_filtered_data(step = 1, subset = subset_uuid)
print(filtered_data['VISS_EU_CD'].unique())

In [ ]:
filtered_data[['AMON','NTRA','DIN','CPHL_INTEG_CALC','DEPH']].head()
#

Step 2


In [ ]:
### Load indicator settings filter 
w.get_step_object(step = 2, subset = subset_uuid).load_indicator_settings_filters()

In [ ]:
############################################################################################################################### 
### set available indicators  
w.get_available_indicators(subset= subset_uuid, step=2)

In [ ]:
###############################################################################################################################
# ### choose indicators
#list(zip(typeA_list, df_step1.WATER_TYPE_AREA.unique()))
# indicator_list = ['oxygen','din_winter','ntot_summer', 'ntot_winter', 'dip_winter', 'ptot_summer', 'ptot_winter','bqi', 'biov', 'chl', 'secchi']
# indicator_list = ['din_winter','ntot_summer', 'ntot_winter', 'dip_winter', 'ptot_summer', 'ptot_winter']
#indicator_list = ['biov', 'chl']
# indicator_list = ['bqi', 'biov', 'chl', 'secchi']
#indicator_list = ['bqi', 'secchi'] + ['biov', 'chl'] + ['din_winter']
# indicator_list = ['din_winter','ntot_summer']
# indicator_list = ['indicator_' + indicator for indicator in indicator_list]
indicator_list = w.available_indicators

In [ ]:
###############################################################################################################################  
# ### Apply indicator data filter
print('apply indicator data filter to {}'.format(indicator_list))
for indicator in indicator_list:
    w.apply_indicator_data_filter(step = 2, 
                          subset = subset_uuid, 
                          indicator = indicator)#,
#                         water_body_list = test_wb)
    #print(w.mapping_objects['water_body'][wb])
    #print('*************************************')

#df = w.get_filtered_data(subset = subset_uuid, step = 'step_2', water_body = 'SE625180-181655', indicator = 'indicator_din_winter').dropna(subset = ['DIN'])
#

Step 3


In [ ]:
# ### Set up indicator objects
print('indicator set up to {}'.format(indicator_list))
w.get_step_object(step = 3, subset = subset_uuid).indicator_setup(indicator_list = indicator_list)

In [ ]:
###############################################################################################################################
# ### CALCULATE STATUS
print('CALCULATE STATUS to {}'.format(indicator_list))
w.get_step_object(step = 3, subset = subset_uuid).calculate_status(indicator_list = indicator_list)

In [ ]:
###############################################################################################################################  
# ### CALCULATE QUALITY ELEMENTS
w.get_step_object(step = 3, subset = subset_uuid).calculate_quality_element(quality_element = 'nutrients')
# w.get_step_object(step = 3, subset = subset_uuid).calculate_quality_element(quality_element = 'phytoplankton')
# w.get_step_object(step = 3, subset = subset_uuid).calculate_quality_element(quality_element = 'bottomfauna')
# w.get_step_object(step = 3, subset = subset_uuid).calculate_quality_element(quality_element = 'oxygen')
# w.get_step_object(step = 3, subset = subset_uuid).calculate_quality_element(quality_element = 'secchi')
 
# w.get_step_object(step = 3, subset = subset_uuid).calculate_quality_element(subset_unique_id = subset_uuid, quality_element = 'Phytoplankton')

In [20]:
w.get_data_for_waterstool(step = 3, subset = subset_uuid)


Saving data to: D:/github/w_vattenstatus/ekostat_calculator/workspaces/6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c/subsets/19ac8dc5-80be-4304-9ed2-3267bf59425b/step_3/output/results/WATERS_export.txt

In [ ]:
['a','b']+['c','d']

In [ ]:
p = paths['workspace_directory']+'/6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c/subsets/19ac8dc5-80be-4304-9ed2-3267bf59425b/step_3/output/results/'
[re.search(f + r'-by_date.pkl', col) for f in os.listdir(p) if '-by_date.pkl' in f]

In [ ]:
paths['workspace_directory']+'/6f85f2fc-dcce-4bd4-9fc4-26fc14f9ad0c/subsets/19ac8dc5-80be-4304-9ed2-3267bf59425b/step_3/output/results/'

In [ ]:


In [ ]: