In [1]:
# Reload when code changed:
%load_ext autoreload
%autoreload 2
%pwd
%matplotlib inline
import os 
import sys
path = "../"
sys.path.append(path)
#os.path.abspath("../")
print(os.path.abspath(path))


D:\github\w_vattenstatus\ekostat_calculator

In [2]:
import pandas as pd
import numpy as np
import json
import pickle
import core
import importlib
importlib.reload(core)
import logging
importlib.reload(core) 
try:
    logging.shutdown()
    importlib.reload(logging)
except:
    pass
from event_handler import EventHandler
print(core.__file__)
pd.__version__


..\core\__init__.py
Out[2]:
'0.19.2'

Load directories


In [3]:
root_directory = 'D:/github/w_vattenstatus/ekostat_calculator'#"../" #os.getcwd()
workspace_directory = root_directory + '/workspaces' 
resource_directory = root_directory + '/resources'
#alias = 'lena'
user_id = 'test_user' #kanske ska vara off_line user?
workspace_alias = 'lena_indicator'

Initiate EventHandler


In [4]:
print(root_directory)
paths = {'user_id': user_id, 
         'workspace_directory': root_directory + '/workspaces', 
         'resource_directory': root_directory + '/resources', 
         'log_directory': 'D:/github' + '/log', 
         'test_data_directory': 'D:/github' + '/test_data'}


D:/github/w_vattenstatus/ekostat_calculator

In [5]:
ekos = EventHandler(**paths)
#request = ekos.test_requests['request_workspace_list']
#response = ekos.request_workspace_list(request) 
#ekos.write_test_response('request_workspace_list', response)
# OLD: ekos = EventHandler(root_directory)


2018-09-07 10:16:46,899	logger.py	85	add_log	DEBUG	
2018-09-07 10:16:46,914	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-07 10:16:46,914	logger.py	87	add_log	DEBUG	### Log added for log_id "event_handler" at locaton: D:\github\log\main_event_handler.log
2018-09-07 10:16:46,914	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-07 10:16:46,914	event_handler.py	106	__init__	DEBUG	Start EventHandler: event_handler
2018-09-07 10:16:46,915	event_handler.py	141	_load_mapping_objects	DEBUG	Loading mapping files from pickle file.
====================================================================================================
event_handler
D:/github/log
main
----------------------------------------------------------------------------------------------------
2018-09-07 10:16:47,291	event_handler.py	117	__init__	DEBUG	Time for mapping: 0.37560009956359863
2018-09-07 10:16:47,291	event_handler.py	122	__init__	DEBUG	Time for initiating EventHandler: 0.3921999931335449

Load existing workspace


In [6]:
##### BEHÖVS BARA FÖRSTA GÅNGEN FÖR ATT SKAPA WORKSPACE #######
#ekos.copy_workspace(source_uuid='default_workspace', target_alias=workspace_alias)

In [7]:
ekos.print_workspaces()


====================================================================================================
Current workspaces for user are:

uuid                                    alias                         status                        
----------------------------------------------------------------------------------------------------
default_workspace                       default_workspace             readable                      
e86ae1c5-d241-46a4-9236-59524b44e500    lena_indicator                editable                      
====================================================================================================

In [8]:
workspace_uuid = ekos.get_unique_id_for_alias(workspace_alias = workspace_alias)
print(workspace_uuid)


e86ae1c5-d241-46a4-9236-59524b44e500

In [9]:
workspace_alias = ekos.get_alias_for_unique_id(workspace_uuid = workspace_uuid)

In [10]:
ekos.load_workspace(unique_id = workspace_uuid)


2018-09-07 10:16:54,315	event_handler.py	2962	load_workspace	DEBUG	Trying to load new workspace "e86ae1c5-d241-46a4-9236-59524b44e500" with alias "lena_indicator"
2018-09-07 10:16:54,353	logger.py	85	add_log	DEBUG	
2018-09-07 10:16:54,356	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-07 10:16:54,358	logger.py	87	add_log	DEBUG	### Log added for log_id "7f201e5d-4483-4e2f-a1ea-4d1665df23dc" at locaton: D:\github\w_vattenstatus\ekostat_calculator\workspaces\e86ae1c5-d241-46a4-9236-59524b44e500\log\subset_7f201e5d-4483-4e2f-a1ea-4d1665df23dc.log
2018-09-07 10:16:54,361	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-07 10:16:54,424	logger.py	85	add_log	DEBUG	
2018-09-07 10:16:54,426	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-07 10:16:54,428	logger.py	87	add_log	DEBUG	### Log added for log_id "81978047-1a7a-4b9c-8acb-4a09a70f6c7d" at locaton: D:\github\w_vattenstatus\ekostat_calculator\workspaces\e86ae1c5-d241-46a4-9236-59524b44e500\log\subset_81978047-1a7a-4b9c-8acb-4a09a70f6c7d.log
2018-09-07 10:16:54,430	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-07 10:16:54,494	logger.py	85	add_log	DEBUG	
2018-09-07 10:16:54,496	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-07 10:16:54,498	logger.py	87	add_log	DEBUG	### Log added for log_id "889e11e4-1887-4dc6-8dc5-687e56494dc7" at locaton: D:\github\w_vattenstatus\ekostat_calculator\workspaces\e86ae1c5-d241-46a4-9236-59524b44e500\log\subset_889e11e4-1887-4dc6-8dc5-687e56494dc7.log
2018-09-07 10:16:54,500	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
====================================================================================================
7f201e5d-4483-4e2f-a1ea-4d1665df23dc
D:/github/w_vattenstatus/ekostat_calculator/workspaces/e86ae1c5-d241-46a4-9236-59524b44e500/log
subset
----------------------------------------------------------------------------------------------------
====================================================================================================
81978047-1a7a-4b9c-8acb-4a09a70f6c7d
D:/github/w_vattenstatus/ekostat_calculator/workspaces/e86ae1c5-d241-46a4-9236-59524b44e500/log
subset
----------------------------------------------------------------------------------------------------
====================================================================================================
889e11e4-1887-4dc6-8dc5-687e56494dc7
D:/github/w_vattenstatus/ekostat_calculator/workspaces/e86ae1c5-d241-46a4-9236-59524b44e500/log
subset
----------------------------------------------------------------------------------------------------
====================================================================================================
2018-09-07 10:16:54,564	logger.py	85	add_log	DEBUG	
2018-09-07 10:16:54,566	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-07 10:16:54,569	logger.py	87	add_log	DEBUG	### Log added for log_id "88a513f6-9754-44b1-8b53-1d6dd0929891" at locaton: D:\github\w_vattenstatus\ekostat_calculator\workspaces\e86ae1c5-d241-46a4-9236-59524b44e500\log\subset_88a513f6-9754-44b1-8b53-1d6dd0929891.log
2018-09-07 10:16:54,572	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-07 10:16:54,642	logger.py	85	add_log	DEBUG	
2018-09-07 10:16:54,643	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-07 10:16:54,645	logger.py	87	add_log	DEBUG	### Log added for log_id "default_subset" at locaton: D:\github\w_vattenstatus\ekostat_calculator\workspaces\e86ae1c5-d241-46a4-9236-59524b44e500\log\subset_default_subset.log
2018-09-07 10:16:54,648	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-07 10:16:54,836	logger.py	85	add_log	DEBUG	
2018-09-07 10:16:54,836	logger.py	86	add_log	DEBUG	========================================================================================================================
2018-09-07 10:16:54,836	logger.py	87	add_log	DEBUG	### Log added for log_id "e86ae1c5-d241-46a4-9236-59524b44e500" at locaton: D:\github\w_vattenstatus\ekostat_calculator\workspaces\e86ae1c5-d241-46a4-9236-59524b44e500\log\workspace_e86ae1c5-d241-46a4-9236-59524b44e500.log
2018-09-07 10:16:54,836	logger.py	88	add_log	DEBUG	------------------------------------------------------------------------------------------------------------------------
2018-09-07 10:16:54,837	event_handler.py	2980	load_workspace	INFO	Workspace "e86ae1c5-d241-46a4-9236-59524b44e500" with alias "lena_indicator loaded."
88a513f6-9754-44b1-8b53-1d6dd0929891
D:/github/w_vattenstatus/ekostat_calculator/workspaces/e86ae1c5-d241-46a4-9236-59524b44e500/log
subset
----------------------------------------------------------------------------------------------------
====================================================================================================
default_subset
D:/github/w_vattenstatus/ekostat_calculator/workspaces/e86ae1c5-d241-46a4-9236-59524b44e500/log
subset
----------------------------------------------------------------------------------------------------
====================================================================================================
e86ae1c5-d241-46a4-9236-59524b44e500
D:/github/w_vattenstatus/ekostat_calculator/workspaces/e86ae1c5-d241-46a4-9236-59524b44e500/log
workspace
----------------------------------------------------------------------------------------------------
Out[10]:
True

In [ ]:


In [11]:
##### BEHÖVS BARA VID NYTT WORKSPACE ELLER NYA DATAFILER ######
#ekos.import_default_data(workspace_alias = workspace_alias)

Load all data in workspace


In [12]:
#ekos.get_workspace(unique_id = workspace_uuid, alias = workspace_alias).delete_alldata_export()

INNAN DU LADDAR DATA FÖRSTA GÅNGEN BEHÖVER DU SÄTTA STATUS 1 PÅ DE FILER SOM SKA LADDAS I FILEN:

workspaces/my_workspace/input_data/datatype_settings.txt


In [30]:
ekos.load_data(workspace_uuid = workspace_uuid)


self.all_data 0
2018-09-06 08:47:24,601	workspaces.py	1824	load_all_data	DEBUG	Data has been loaded from existing all_data.pickle file.
Out[30]:
True

In [13]:
w = ekos.get_workspace(workspace_uuid = workspace_uuid)
len(w.data_handler.get_all_column_data_df())
### Om "rätt" DATA så bör len bli 10694


Out[13]:
0

In [54]:
print('subsetlist', w.get_subset_list())
for subset_uuid in w.get_subset_list():
    print('uuid {} alias {}'.format(subset_uuid, w.uuid_mapping.get_alias(unique_id=subset_uuid)))


subsetlist ['7f201e5d-4483-4e2f-a1ea-4d1665df23dc', '81978047-1a7a-4b9c-8acb-4a09a70f6c7d', '889e11e4-1887-4dc6-8dc5-687e56494dc7', '88a513f6-9754-44b1-8b53-1d6dd0929891', 'default_subset']
uuid 7f201e5d-4483-4e2f-a1ea-4d1665df23dc alias test_subset
uuid 81978047-1a7a-4b9c-8acb-4a09a70f6c7d alias period_2007-2012_refvalues_2017
uuid 889e11e4-1887-4dc6-8dc5-687e56494dc7 alias period_2007-2012_refvalues_2013
uuid 88a513f6-9754-44b1-8b53-1d6dd0929891 alias period_2007-2012_refvalues_2006
uuid default_subset alias default_subset

In [ ]:

LOAD AND DISPLAY RESULTFILES

STATUS SHOULD ALREADY BE CALCULATED


In [12]:
def load_and_fix(subset_alias, indicator):
    subset_uuid = ekos.get_unique_id_for_alias(workspace_alias = workspace_alias, subset_alias = subset_alias)
    result_path = w.get_step_object(step = 3, subset = subset_uuid).paths['directory_paths']['results']
    df =  pickle.load(open(result_path + '/'+ indicator + '_by_period.pkl', "rb"))
    df.rename(columns = {'VISS_EU_CD': 'EU_CD'}, inplace = True)
    return df

In [13]:
def merge_and_compare(df1, df2, suffixes):
    par_list = ['STATUS','EU_CD','YEAR_count','global_EQR','WATER_TYPE_AREA']
    df = pd.merge(df1[par_list], df2[par_list], on = ['EU_CD','WATER_TYPE_AREA'], suffixes = suffixes)
    conditions = [(df['global_EQR'+suffixes[0]] >= 0.6) & (df['global_EQR'+suffixes[1]] < 0.6),
                 (df['global_EQR'+suffixes[0]] < 0.6) & (df['global_EQR'+suffixes[1]] >= 0.6),
                 (df['STATUS'+suffixes[0]] == df['STATUS'+suffixes[1]])]
    choices = [-1,1,0]
    df['change'] = np.select(conditions, choices, default=2)
    return df

In [15]:
ntot_winter2006 = load_and_fix('period_2007-2012_refvalues_2006', 'ntot_winter')
ntot_winter2017 = load_and_fix('period_2007-2012_refvalues_2017', 'ntot_winter')
ntot_winter = merge_and_compare(ntot_winter2006, ntot_winter2017, ['2006','2017'])

In [17]:
din_winter2006 = load_and_fix('period_2007-2012_refvalues_2006', 'din_winter')
din_winter2017 = load_and_fix('period_2007-2012_refvalues_2017', 'din_winter')
din_winter = merge_and_compare(din_winter2006, din_winter2017, ['2006','2017'])

In [18]:
ntot_summer2006 = load_and_fix('period_2007-2012_refvalues_2006', 'ntot_summer')
ntot_summer2017 = load_and_fix('period_2007-2012_refvalues_2017', 'ntot_summer')
ntot_summer = merge_and_compare(ntot_summer2006, ntot_summer2017, ['2006','2017'])

In [19]:
df = pd.merge(din_winter, ntot_winter, on = ['EU_CD','WATER_TYPE_AREA'], suffixes = ['din_winter','ntot_winter'])
ntot_summer.add_suffix('ntot_summer')
ntot_summer.rename(columns = {'EU_CDntot_summer':'EU_CD','WATER_TYPE_AREAntotsummer':'WATER_TYPE_AREA'})
df = pd.merge(df, ntot_summer, on = ['EU_CD','WATER_TYPE_AREA'], suffixes = ['',''])

In [20]:
df.to_csv('D:/comparisonN20062017.txt', header = True, index = None, sep = '\t')

In [ ]:


In [ ]:
par_list = ['STATUS','EU_CD','YEAR_count','global_EQR','WATER_TYPE_AREA']
ntot_winter = pd.merge(ntot_winter_df_2006ref[par_list], ntot_winter_df_2017ref[par_list], on = ['EU_CD','WATER_TYPE_AREA'], suffixes = ['2006','2017'])
ntot_winter.columns

In [ ]:
conditions = [(din_winter['global_EQR2006'] >= 0.6) & (din_winter['global_EQR2017'] < 0.6)]
choices = [True]
din_winter['change'] = np.select(conditions, choices, default=False)

In [ ]:
ntot_winter.loc[(ntot_winter['WATER_TYPE_AREA'].str.contains('Botten')) & (ntot_winter['change_ntot'] == True)][['EU_CD','STATUS2006','global_EQR2006','STATUS2017','global_EQR2017','change_ntot']].dropna(subset = ['global_EQR2006'])

In [ ]:
din_winter.loc[(din_winter['WATER_TYPE_AREA'].str.contains('Botten')) & (din_winter['change'] == True)][['EU_CD','STATUS2006','global_EQR2006','STATUS2017','global_EQR2017','change_din']].dropna(subset = ['global_EQR2006'])

In [ ]:
df = pd.merge(din_winter, ntot_winter, on = ['EU_CD','WATER_TYPE_AREA'], suffixes = ['din_winter','ntot_winter'])
df.loc[df['WATER_TYPE_AREA'].str.contains('Botten')]

In [ ]:
din_winter.loc[din_winter['WATER_TYPE_AREA2006'].str.contains('Botten')].plot(kind = 'bar',x = 'EU_CD', y = ['global_EQR2006','global_EQR2017'])

Quality Element


In [14]:
subset_uuid = ekos.get_unique_id_for_alias(workspace_alias = workspace_alias, subset_alias = 'period_2007-2012_refvalues_2017')
print(subset_uuid)
print(w.get_subset_list())
w.get_step_object(step = 3, subset = subset_uuid).calculate_quality_element(subset_unique_id = subset_uuid, quality_element = 'Nutrients', class_name = 'QualityElementNutrients')


81978047-1a7a-4b9c-8acb-4a09a70f6c7d
['7f201e5d-4483-4e2f-a1ea-4d1665df23dc', '81978047-1a7a-4b9c-8acb-4a09a70f6c7d', '889e11e4-1887-4dc6-8dc5-687e56494dc7', '88a513f6-9754-44b1-8b53-1d6dd0929891', 'default_subset']
********
nutrients
P_winter columns 1 Index(['STATIONS_USED_indicator_dip_winter', 'STATUS_indicator_dip_winter',
       'VISS_EU_CD', 'WATER_BODY_NAME', 'WATER_TYPE_AREA',
       'YEAR_count_indicator_dip_winter', 'global_EQR_indicator_dip_winter',
       'local_EQR_indicator_dip_winter', 'new_index_indicator_dip_winter',
       'ok_indicator_dip_winter', 'p_ges_indicator_dip_winter',
       'variance_indicator_dip_winter', 'index_column_indicator_dip_winter',
       'STATIONS_USED_indicator_ptot_winter', 'STATUS_indicator_ptot_winter',
       'YEAR_count_indicator_ptot_winter', 'global_EQR_indicator_ptot_winter',
       'local_EQR_indicator_ptot_winter', 'new_index_indicator_ptot_winter',
       'ok_indicator_ptot_winter', 'p_ges_indicator_ptot_winter',
       'variance_indicator_ptot_winter', 'index_column_indicator_ptot_winter'],
      dtype='object')

In [26]:
# old
def old_get_QF_results(subset_alias):
    subset_uuid = ekos.get_unique_id_for_alias(workspace_alias = workspace_alias, subset_alias = subset_alias)
    #result_path = w.get_step_object(step = 3, subset = subset_uuid).paths['directory_paths']['results']
    #df =  pickle.load(open(result_path + '/'+ 'nutrients_all_results.pkl', "rb"))
    w.get_step_object(step = 3, subset = subset_uuid).calculate_quality_element(subset_unique_id = subset_uuid, quality_element = 'Nutrients', class_name = 'QualityElementNutrients')
    #columns = ['VISS_EU_CD','WATER_BODY_NAME','WATER_TYPE_AREA','STATUS_NUTRIENTS','mean_EQR','MEAN_N_EQR','EQR_N_winter_mean','global_EQR_indicator_ntot_winter','global_EQR_indicator_din_winter','global_EQR_indicator_ntot_summer','MEAN_P_EQR','EQR_P_winter_mean','global_EQR_indicator_ptot_winter','global_EQR_indicator_dip_winter','global_EQR_indicator_ptot_summer']
    return w.get_step_object(step = 3, subset = subset_uuid).quality_element['Nutrients'].results#[columns]

In [38]:
def get_QF_results(subset_alias):
    subset_uuid = ekos.get_unique_id_for_alias(workspace_alias = workspace_alias, subset_alias = subset_alias)
    result_path = w.get_step_object(step = 3, subset = subset_uuid).paths['directory_paths']['results']
    df =  pickle.load(open(result_path + '/'+ 'nutrients_all_results.pkl', "rb"))
    #w.get_step_object(step = 3, subset = subset_uuid).calculate_quality_element(subset_unique_id = subset_uuid, quality_element = 'Nutrients', class_name = 'QualityElementNutrients')
    #columns = ['VISS_EU_CD','WATER_BODY_NAME','WATER_TYPE_AREA','STATUS_NUTRIENTS','mean_EQR','MEAN_N_EQR','EQR_N_winter_mean','global_EQR_ntot_winter','global_EQR_din_winter','global_EQR_ntot_summer','MEAN_P_EQR','EQR_P_winter_mean','global_EQR_ptot_winter','global_EQR_dip_winter','global_EQR_ptot_summer']
    return df#w.get_step_object(step = 3, subset = subset_uuid).quality_element['Nutrients'].results#[columns]

In [34]:
QF_2006 = old_get_QF_results('period_2007-2012_refvalues_2006')
QF_2013 = old_get_QF_results('period_2007-2012_refvalues_2013')
QF_2017 = old_get_QF_results('period_2007-2012_refvalues_2017')


********
nutrients
********
nutrients
********
nutrients

In [ ]:


In [16]:
QF_2006 = get_QF_results('period_2007-2012_refvalues_2006')
QF_2013 = get_QF_results('period_2007-2012_refvalues_2013')
QF_2017 = get_QF_results('period_2007-2012_refvalues_2017')


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-16-21a4a0d902fd> in <module>()
----> 1 QF_2006 = get_QF_results('period_2007-2012_refvalues_2006')
      2 QF_2013 = get_QF_results('period_2007-2012_refvalues_2013')
      3 QF_2017 = get_QF_results('period_2007-2012_refvalues_2017')

NameError: name 'get_QF_results' is not defined

In [20]:
def merge_df(df1, df2, suffixes):
    #par_list = ['STATUS','EU_CD','YEAR_count','global_EQR','WATER_TYPE_AREA']
    df = pd.merge(df1, df2, on = ['EU_CD','WATER_TYPE_AREA','WATER_BODY_NAME'], suffixes = suffixes)

In [35]:
df = pd.merge(QF_2006, QF_2017, on = ['VISS_EU_CD','WATER_TYPE_AREA','WATER_BODY_NAME'], suffixes = ['_2006','_2017'])
list(df.columns)


Out[35]:
['STATIONS_USED_indicator_dip_winter_2006',
 'STATUS_indicator_dip_winter_2006',
 'VISS_EU_CD',
 'WATER_BODY_NAME',
 'WATER_TYPE_AREA',
 'YEAR_count_indicator_dip_winter_2006',
 'global_EQR_indicator_dip_winter_2006',
 'local_EQR_indicator_dip_winter_2006',
 'new_index_indicator_dip_winter_2006',
 'ok_indicator_dip_winter_2006',
 'p_ges_indicator_dip_winter_2006',
 'variance_indicator_dip_winter_2006',
 'index_column_indicator_dip_winter_2006',
 'STATIONS_USED_indicator_ptot_winter_2006',
 'STATUS_indicator_ptot_winter_2006',
 'YEAR_count_indicator_ptot_winter_2006',
 'global_EQR_indicator_ptot_winter_2006',
 'local_EQR_indicator_ptot_winter_2006',
 'new_index_indicator_ptot_winter_2006',
 'ok_indicator_ptot_winter_2006',
 'p_ges_indicator_ptot_winter_2006',
 'variance_indicator_ptot_winter_2006',
 'index_column_indicator_ptot_winter_2006',
 'EQR_P_winter_mean_2006',
 'STATUS_P_winter_2006',
 'STATIONS_USED_indicator_ptot_summer_2006',
 'STATUS_indicator_ptot_summer_2006',
 'YEAR_count_indicator_ptot_summer_2006',
 'global_EQR_indicator_ptot_summer_2006',
 'local_EQR_indicator_ptot_summer_2006',
 'new_index_indicator_ptot_summer_2006',
 'ok_indicator_ptot_summer_2006',
 'p_ges_indicator_ptot_summer_2006',
 'variance_indicator_ptot_summer_2006',
 'index_column_indicator_ptot_summer_2006',
 'MEAN_P_EQR_2006',
 'STATUS_P_2006',
 'STATIONS_USED_indicator_din_winter_2006',
 'STATUS_indicator_din_winter_2006',
 'YEAR_count_indicator_din_winter_2006',
 'global_EQR_indicator_din_winter_2006',
 'local_EQR_indicator_din_winter_2006',
 'new_index_indicator_din_winter_2006',
 'ok_indicator_din_winter_2006',
 'p_ges_indicator_din_winter_2006',
 'variance_indicator_din_winter_2006',
 'index_column_indicator_din_winter_2006',
 'STATIONS_USED_indicator_ntot_winter_2006',
 'STATUS_indicator_ntot_winter_2006',
 'YEAR_count_indicator_ntot_winter_2006',
 'global_EQR_indicator_ntot_winter_2006',
 'local_EQR_indicator_ntot_winter_2006',
 'new_index_indicator_ntot_winter_2006',
 'ok_indicator_ntot_winter_2006',
 'p_ges_indicator_ntot_winter_2006',
 'variance_indicator_ntot_winter_2006',
 'index_column_indicator_ntot_winter_2006',
 'EQR_N_winter_mean_2006',
 'STATUS_N_winter_2006',
 'STATIONS_USED_indicator_ntot_summer_2006',
 'STATUS_indicator_ntot_summer_2006',
 'YEAR_count_indicator_ntot_summer_2006',
 'global_EQR_indicator_ntot_summer_2006',
 'local_EQR_indicator_ntot_summer_2006',
 'new_index_indicator_ntot_summer_2006',
 'ok_indicator_ntot_summer_2006',
 'p_ges_indicator_ntot_summer_2006',
 'variance_indicator_ntot_summer_2006',
 'index_column_indicator_ntot_summer_2006',
 'MEAN_N_EQR_2006',
 'STATUS_N_2006',
 'mean_EQR_2006',
 'STATUS_NUTRIENTS_2006',
 'STATIONS_USED_indicator_dip_winter_2017',
 'STATUS_indicator_dip_winter_2017',
 'YEAR_count_indicator_dip_winter_2017',
 'global_EQR_indicator_dip_winter_2017',
 'local_EQR_indicator_dip_winter_2017',
 'new_index_indicator_dip_winter_2017',
 'ok_indicator_dip_winter_2017',
 'p_ges_indicator_dip_winter_2017',
 'variance_indicator_dip_winter_2017',
 'index_column_indicator_dip_winter_2017',
 'STATIONS_USED_indicator_ptot_winter_2017',
 'STATUS_indicator_ptot_winter_2017',
 'YEAR_count_indicator_ptot_winter_2017',
 'global_EQR_indicator_ptot_winter_2017',
 'local_EQR_indicator_ptot_winter_2017',
 'new_index_indicator_ptot_winter_2017',
 'ok_indicator_ptot_winter_2017',
 'p_ges_indicator_ptot_winter_2017',
 'variance_indicator_ptot_winter_2017',
 'index_column_indicator_ptot_winter_2017',
 'EQR_P_winter_mean_2017',
 'STATUS_P_winter_2017',
 'STATIONS_USED_indicator_ptot_summer_2017',
 'STATUS_indicator_ptot_summer_2017',
 'YEAR_count_indicator_ptot_summer_2017',
 'global_EQR_indicator_ptot_summer_2017',
 'local_EQR_indicator_ptot_summer_2017',
 'new_index_indicator_ptot_summer_2017',
 'ok_indicator_ptot_summer_2017',
 'p_ges_indicator_ptot_summer_2017',
 'variance_indicator_ptot_summer_2017',
 'index_column_indicator_ptot_summer_2017',
 'MEAN_P_EQR_2017',
 'STATUS_P_2017',
 'STATIONS_USED_indicator_din_winter_2017',
 'STATUS_indicator_din_winter_2017',
 'YEAR_count_indicator_din_winter_2017',
 'global_EQR_indicator_din_winter_2017',
 'local_EQR_indicator_din_winter_2017',
 'new_index_indicator_din_winter_2017',
 'ok_indicator_din_winter_2017',
 'p_ges_indicator_din_winter_2017',
 'variance_indicator_din_winter_2017',
 'index_column_indicator_din_winter_2017',
 'STATIONS_USED_indicator_ntot_winter_2017',
 'STATUS_indicator_ntot_winter_2017',
 'YEAR_count_indicator_ntot_winter_2017',
 'global_EQR_indicator_ntot_winter_2017',
 'local_EQR_indicator_ntot_winter_2017',
 'new_index_indicator_ntot_winter_2017',
 'ok_indicator_ntot_winter_2017',
 'p_ges_indicator_ntot_winter_2017',
 'variance_indicator_ntot_winter_2017',
 'index_column_indicator_ntot_winter_2017',
 'EQR_N_winter_mean_2017',
 'STATUS_N_winter_2017',
 'STATIONS_USED_indicator_ntot_summer_2017',
 'STATUS_indicator_ntot_summer_2017',
 'YEAR_count_indicator_ntot_summer_2017',
 'global_EQR_indicator_ntot_summer_2017',
 'local_EQR_indicator_ntot_summer_2017',
 'new_index_indicator_ntot_summer_2017',
 'ok_indicator_ntot_summer_2017',
 'p_ges_indicator_ntot_summer_2017',
 'variance_indicator_ntot_summer_2017',
 'index_column_indicator_ntot_summer_2017',
 'MEAN_N_EQR_2017',
 'STATUS_N_2017',
 'mean_EQR_2017',
 'STATUS_NUTRIENTS_2017']

In [22]:
def change(df, col, status_col, suffixes):
    conditions = [(df[col+suffixes[0]] >= 0.6) & (df[col+suffixes[1]] < 0.6),
                 (df[col+suffixes[0]] < 0.6) & (df[col+suffixes[1]] >= 0.6),
                 (df[status_col+suffixes[0]] == df[status_col+suffixes[1]]),
                 (df[status_col+suffixes[0]] == 'HIGH') & (df[status_col+suffixes[1]] == 'GOOD'),
                (df[status_col+suffixes[0]] == 'MODERATE') & (df[status_col+suffixes[1]] == 'POOR'),
                 (df[status_col+suffixes[0]] == 'POOR') & (df[status_col+suffixes[1]] == 'BAD'),
                 (df[status_col+suffixes[0]] == 'GODD') & (df[status_col+suffixes[1]] == 'HIGH'),
                (df[status_col+suffixes[0]] == 'POOR') & (df[status_col+suffixes[1]] == 'MODERATE'),
                 (df[status_col+suffixes[0]] == 'BAD') & (df[status_col+suffixes[1]] == 'POOR')]
    choices = [-2,2,0,-1,-1,-1,1,1,1]
    df['change_'+status_col] = np.select(conditions, choices, default=2)

In [36]:
#old 
suffixes = ['_2006','_2017'] 
change(df, 'mean_EQR', 'STATUS_NUTRIENTS', suffixes) 
change(df, 'MEAN_N_EQR', 'STATUS_N', suffixes) 
change(df, 'global_EQR_indicator_ntot_summer', 'STATUS_indicator_ntot_summer', suffixes) 
change(df, 'EQR_N_winter_mean', 'STATUS_N_winter', suffixes) 
change(df, 'global_EQR_indicator_ntot_winter', 'STATUS_indicator_ntot_winter', suffixes) 
change(df, 'global_EQR_indicator_din_winter', 'STATUS_indicator_din_winter', suffixes)
change(df, 'MEAN_P_EQR', 'STATUS_P', suffixes) 
change(df, 'global_EQR_indicator_ptot_summer', 'STATUS_indicator_ptot_summer', suffixes) 
change(df, 'EQR_P_winter_mean', 'STATUS_P_winter', suffixes) 
change(df, 'global_EQR_indicator_ptot_winter', 'STATUS_indicator_ptot_winter', suffixes) 
change(df, 'global_EQR_indicator_dip_winter', 'STATUS_indicator_dip_winter', suffixes)

In [42]:
k1 = ['STATUS_NUTRIENTS',
    'STATUS_N','STATUS_indicator_ntot_summer','STATUS_N_winter','STATUS_indicator_ntot_winter','STATUS_indicator_din_winter',
    'STATUS_P','STATUS_indicator_ptot_summer','STATUS_P_winter','STATUS_indicator_ptot_winter','STATUS_indicator_dip_winter']
suffixes = ['_2006','_2017']
k2 =[]
for L in k1:
    k2 = k2+[L+suffixes[0], L+suffixes[1], 'change_'+L]  
col_list = ['VISS_EU_CD','WATER_BODY_NAME','WATER_TYPE_AREA'] + k2
print(col_list)


['VISS_EU_CD', 'WATER_BODY_NAME', 'WATER_TYPE_AREA', 'STATUS_NUTRIENTS_2006', 'STATUS_NUTRIENTS_2017', 'change_STATUS_NUTRIENTS', 'STATUS_N_2006', 'STATUS_N_2017', 'change_STATUS_N', 'STATUS_indicator_ntot_summer_2006', 'STATUS_indicator_ntot_summer_2017', 'change_STATUS_indicator_ntot_summer', 'STATUS_N_winter_2006', 'STATUS_N_winter_2017', 'change_STATUS_N_winter', 'STATUS_indicator_ntot_winter_2006', 'STATUS_indicator_ntot_winter_2017', 'change_STATUS_indicator_ntot_winter', 'STATUS_indicator_din_winter_2006', 'STATUS_indicator_din_winter_2017', 'change_STATUS_indicator_din_winter', 'STATUS_P_2006', 'STATUS_P_2017', 'change_STATUS_P', 'STATUS_indicator_ptot_summer_2006', 'STATUS_indicator_ptot_summer_2017', 'change_STATUS_indicator_ptot_summer', 'STATUS_P_winter_2006', 'STATUS_P_winter_2017', 'change_STATUS_P_winter', 'STATUS_indicator_ptot_winter_2006', 'STATUS_indicator_ptot_winter_2017', 'change_STATUS_indicator_ptot_winter', 'STATUS_indicator_dip_winter_2006', 'STATUS_indicator_dip_winter_2017', 'change_STATUS_indicator_dip_winter']

In [44]:
df[col_list].to_csv('//winfs-proj/proj/havgem/LenaV/Projekt/Bedömningsgrunder/Revidering 2017/Utvärdering förändring av refvärden/Nutrients_2006_2017_gammal.txt', columns = col_list, float_format='%.3f', header = True, index = None, sep = '\t')

old

suffixes = ['_2006','_2017'] change(df, 'mean_EQR', 'STATUS_NUTRIENTS', suffixes) change(df, 'MEAN_N_EQR', 'STATUS_N', suffixes) change(df, 'global_EQR_indicator_ntot_summer', 'STATUS_indicator_ntot_summer', suffixes) change(df, 'EQR_N_winter_mean', 'STATUS_N_winter', suffixes) change(df, 'global_EQR_indicator_ntot_winter', 'STATUS_indicator_ntot_winter', suffixes) change(df, 'global_EQR_indicator_din_winter', 'STATUS_indicator_din_winter', suffixes)


In [153]:
suffixes = ['_2006','_2017']
change_list = ['qe_nutrients',
               'indicator_n','indicator_n_summer','indicator_n_winter','indicator_ntot_winter','indicator_din_winter',
               'indicator_p','indicator_p_summer','indicator_p_winter','indicator_ptot_winter','indicator_dip_winter']
col_list = ['VISS_EU_CD','WATER_TYPE_AREA','WATER_BODY_NAME']
for ind in change_list:
    change(df, 'global_EQR_' + ind, 'STATUS_' + ind, suffixes)
    if 'ok_'+ind in ['ok_indicator_n_summer','ok_indicator_ntot_winter','ok_indicator_din_winter',
               'ok_indicator_p_summer','ok_indicator_ptot_winter','ok_indicator_dip_winter']:
        print(ind)
        col_list = col_list + ['STATUS_' + ind + suffixes[0], 
               'STATUS_' + ind + suffixes[1], 'YEAR_count_' + ind + suffixes[1], 
               'change_'+'STATUS_' + ind]
    else:
        col_list = col_list + ['STATUS_' + ind + suffixes[0], 
               'STATUS_' + ind + suffixes[1],
               'change_'+'STATUS_' + ind]

#col_list = col_list + \
#            [L+suffixes[0] for L in ['ok_indicator_n_summer','ok_indicator_ntot_winter','ok_indicator_din_winter',
#               'ok_indicator_p_summer','ok_indicator_ptot_winter','ok_indicator_dip_winter']] + \
#            [L+suffixes[1] for L in ['ok_indicator_n_summer','ok_indicator_ntot_winter','ok_indicator_din_winter',
#               'ok_indicator_p_summer','ok_indicator_ptot_winter','ok_indicator_dip_winter']]


indicator_n_summer
indicator_ntot_winter
indicator_din_winter
indicator_p_summer
indicator_ptot_winter
indicator_dip_winter

In [154]:
df[col_list].to_csv('//winfs-proj/proj/havgem/LenaV/Projekt/Bedömningsgrunder/Revidering 2017/Utvärdering förändring av refvärden/Nutrients_2006_2017.txt', columns = col_list, float_format='%.3f', header = True, index = None, sep = '\t')

In [137]:
[L+'_2013'for L in ['ok_indicator_n_summer','ok_indicator_ntot_winter','ok_indicator_din_winter',
               'ok_indicator_p_summer','ok_indicator_ptot_winter','ok_indicator_dip_winter']] + \
[L+'_2017'for L in ['ok_indicator_n_summer','ok_indicator_ntot_winter','ok_indicator_din_winter',
               'ok_indicator_p_summer','ok_indicator_ptot_winter','ok_indicator_dip_winter']]


Out[137]:
['ok_indicator_n_summer_2013',
 'ok_indicator_ntot_winter_2013',
 'ok_indicator_din_winter_2013',
 'ok_indicator_p_summer_2013',
 'ok_indicator_ptot_winter_2013',
 'ok_indicator_dip_winter_2013',
 'ok_indicator_n_summer_2017',
 'ok_indicator_ntot_winter_2017',
 'ok_indicator_din_winter_2017',
 'ok_indicator_p_summer_2017',
 'ok_indicator_ptot_winter_2017',
 'ok_indicator_dip_winter_2017']

In [69]:
col_list = ['VISS_EU_CD','WATER_TYPE_AREA','WATER_BODY_NAME', 
            'STATUS_NUTRIENTS'+suffixes[0],'STATUS_NUTRIENTS_2017','change_STATUS_NUTRIENTS',
            'STATUS_N'+suffixes[0],'STATUS_N_2017','change_STATUS_N',
            'STATUS_ntot_summer'+suffixes[0],'STATUS_ntot_summer_2017','change_STATUS_ntot_summer',
            'STATUS_N_winter'+suffixes[0],'STATUS_N_winter_2017','change_STATUS_N_winter',
            'STATUS_ntot_winter'+suffixes[0],'STATUS_ntot_winter_2017','change_STATUS_ntot_winter',
            'STATUS_din_winter'+suffixes[0],'STATUS_din_winter_2017','change_STATUS_din_winter',
           ]
df[col_list].head()


---------------------------------------------------------------------------
KeyError                                  Traceback (most recent call last)
<ipython-input-69-1b0e22654584> in <module>()
      7             'STATUS_din_winter'+suffixes[0],'STATUS_din_winter_2017','change_STATUS_din_winter',
      8            ]
----> 9 df[col_list].head()

C:\Anaconda3\envs\LenaEnv\lib\site-packages\pandas\core\frame.py in __getitem__(self, key)
   2051         if isinstance(key, (Series, np.ndarray, Index, list)):
   2052             # either boolean or fancy integer index
-> 2053             return self._getitem_array(key)
   2054         elif isinstance(key, DataFrame):
   2055             return self._getitem_frame(key)

C:\Anaconda3\envs\LenaEnv\lib\site-packages\pandas\core\frame.py in _getitem_array(self, key)
   2095             return self.take(indexer, axis=0, convert=False)
   2096         else:
-> 2097             indexer = self.ix._convert_to_indexer(key, axis=1)
   2098             return self.take(indexer, axis=1, convert=True)
   2099 

C:\Anaconda3\envs\LenaEnv\lib\site-packages\pandas\core\indexing.py in _convert_to_indexer(self, obj, axis, is_setter)
   1228                 mask = check == -1
   1229                 if mask.any():
-> 1230                     raise KeyError('%s not in index' % objarr[mask])
   1231 
   1232                 return _values_from_object(indexer)

KeyError: "['STATUS_NUTRIENTS_2013' 'STATUS_NUTRIENTS_2017' 'change_STATUS_NUTRIENTS'\n 'STATUS_N_2013' 'STATUS_N_2017' 'change_STATUS_N'\n 'STATUS_ntot_summer_2013' 'STATUS_ntot_summer_2017'\n 'change_STATUS_ntot_summer' 'STATUS_N_winter_2013' 'STATUS_N_winter_2017'\n 'change_STATUS_N_winter' 'STATUS_ntot_winter_2013'\n 'STATUS_ntot_winter_2017' 'change_STATUS_ntot_winter'\n 'STATUS_din_winter_2013' 'STATUS_din_winter_2017'\n 'change_STATUS_din_winter'] not in index"

In [53]:
df[col_list].to_csv('D:/QF_Nutrients_period2007-2012_ref2013_2017'+'.txt', float_format='%.3f', header = True, index = None, sep = '\t')

In [ ]:
conditions = [(df['mean_EQR'+suffixes[0]] >= 0.6) & (df['mean_EQR'+suffixes[1]] < 0.6),
             (df['mean_EQR'+suffixes[0]] < 0.6) & (df['mean_EQR'+suffixes[1]] >= 0.6),
             (df['STATUS_NUTRIENTS'+suffixes[0]] == df['STATUS_NUTRIENTS'+suffixes[1]])]
choices = [-1,1,0]
df['change_STATUS_NUTRIENTS'] = np.select(conditions, choices, default=2)

In [25]:
#.to_csv('D:/Nutrients'+subset_alias+'.txt', float_format='%.3f', header = True, index = None, sep = '\t')


********
nutrients
['AMON', 'DIN', 'NTOT', 'NTRA', 'NTRI', 'NTRZ', 'SALT', 'STATIONS_USED', 'STATUS', 'VISS_EU_CD', 'WATER_BODY_NAME', 'WATER_TYPE_AREA', 'YEAR_count', 'global_EQR', 'index_column', 'local_EQR', 'new_index']
['PHOS', 'PTOT', 'SALT', 'STATIONS_USED', 'STATUS', 'VISS_EU_CD', 'WATER_BODY_NAME', 'WATER_TYPE_AREA', 'YEAR_count', 'global_EQR', 'index_column', 'local_EQR', 'new_index']
['DIN', 'NTOT', 'SALT', 'STATIONS_USED', 'STATUS', 'VISS_EU_CD', 'WATER_BODY_NAME', 'WATER_TYPE_AREA', 'YEAR_count', 'global_EQR', 'index_column', 'local_EQR', 'new_index']
['DIN', 'NTOT', 'SALT', 'STATIONS_USED', 'STATUS', 'VISS_EU_CD', 'WATER_BODY_NAME', 'WATER_TYPE_AREA', 'YEAR_count', 'global_EQR', 'index_column', 'local_EQR', 'new_index']
['PHOS', 'PTOT', 'SALT', 'STATIONS_USED', 'STATUS', 'VISS_EU_CD', 'WATER_BODY_NAME', 'WATER_TYPE_AREA', 'YEAR_count', 'global_EQR', 'index_column', 'local_EQR', 'new_index']
['PHOS', 'PTOT', 'SALT', 'STATIONS_USED', 'STATUS', 'VISS_EU_CD', 'WATER_BODY_NAME', 'WATER_TYPE_AREA', 'YEAR_count', 'global_EQR', 'index_column', 'local_EQR', 'new_index']
Out[25]:
VISS_EU_CD WATER_BODY_NAME WATER_TYPE_AREA STATUS_NUTRIENTS mean_EQR MEAN_N_EQR EQR_N_winter_mean global_EQR_ntot_winter global_EQR_din_winter global_EQR_ntot_summer MEAN_P_EQR EQR_P_winter_mean global_EQR_ptot_winter global_EQR_dip_winter global_EQR_ptot_summer
0 SE592000-184700 Kanholmsfjärden 12n - Östergötlands och Stockholms skärgård. M... MODERATE 0.468828 0.475279 0.363056 0.433652 0.292461 0.587503 0.462376 0.384970 0.425035 0.344905 0.539781
1 SE581700-113000 Gullmarn centralbassäng 02 - Västkustens fjordar GOOD 0.751219 0.772121 0.780627 0.804719 0.756535 0.763615 0.730318 0.714104 0.609090 0.819118 0.746531
2 SE561400-161201 S Kalmarsunds utsjövatten 09 - Blekinge skärgård och Kalmarsund. Yttre k... MODERATE 0.417731 0.553922 0.550852 0.478683 0.623021 0.556993 0.281540 0.279751 0.327740 0.231763 0.283328
3 SE562000-123800 Skälderviken 05 - Södra Hallands och norra Öresunds kustvatten MODERATE 0.539633 0.566166 0.560260 0.574496 0.546024 0.572072 0.513099 0.531137 0.476450 0.585824 0.495061
4 SE633000-195000 Örefjärden 20 - Norra Kvarkens inre kustvatten HIGH 0.808635 0.886175 0.790892 0.767844 0.813940 0.981458 0.731096 0.670761 0.728313 0.613209 0.791430

In [ ]:
conditions = [(df['global_EQR'+suffixes[0]] >= 0.6) & (df['global_EQR'+suffixes[1]] < 0.6),
                 (df['global_EQR'+suffixes[0]] < 0.6) & (df['global_EQR'+suffixes[1]] >= 0.6),
                 (df['STATUS'+suffixes[0]] == df['STATUS'+suffixes[1]])]
    choices = [-1,1,0]
    df['change'] = np.select(conditions, choices, default=2)

In [ ]:
import folium
import geopandas as gpd

In [ ]:
gdf = gpd.read_file('D:\LenaV\maps\SVAR\O_Back\moddad med ny typ, fast har blivit lite felaktig\havsomr_y_2012_2.shp')
gdf_HOlsson = gpd.read_file('D:\LenaV\maps\SVAR\H_Olsson\Kustvatten_20180213\Kustvatten_2016_4.shp')

In [ ]:
gdf_merged = gdf.merge(din_winter, on='EU_CD')

In [ ]:
gdf_merged.loc[(gdf_merged['WATER_TYPE_AREA2006'].str.contains('Botten')) & (gdf_merged['change'] == True)][['EU_CD','STATUS2006','global_EQR2006','STATUS2017','global_EQR2017','change']]

In [ ]:
m = folium.Map(location=[65, 17], zoom_start=6)
for i, row in gdf_merged.to_crs(epsg = 4326).iterrows():#, stations.PROJ.values):
    if row.global_EQR2017 < 0.2:
        c = 'red'
    elif row.global_EQR2017 < 0.4:
        c = 'orange'
    elif row.global_EQR2017 < 0.6:
        c = 'yellow'
    elif row.global_EQR2017 < 0.8:
        c = 'green'
    elif row.global_EQR2017 <= 1:
        c = 'blue'
    else:
        c = None
    lon = row.geometry.centroid.x
    lat = row.geometry.centroid.y
    # The description column is used for popup messages.
    marker = folium.RegularPolygonMarker([lat, lon], number_of_sides=4, radius = 5, fill_color = c, color = c, 
                                 popup = "STATUS 2006 {}".format(row.STATUS2006) +'__ '+ "STATUS 2017 {}".format(row.STATUS2017) +' ______________ '+ row.NAMN +' '+ row.EU_CD).add_to(m)
    if row.change:
        #print(row.change, row.STATUS2006, row.STATUS2017, str(lat), str(lon))
        marker = folium.RegularPolygonMarker([lat, lon],  number_of_sides=4, radius = 5, fill_color = '#bf35a7', color = '#bf35a7',
                                             popup = "STATUS 2006 {}".format(row.STATUS2006) +'---> '+ "STATUS 2017 {}".format(row.STATUS2017) +' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _  '+ row.NAMN +' '+ row.EU_CD).add_to(m)
                                 
m

In [ ]:
gdf_HOlsson.columns

In [ ]:
fig, ax = mpl.pyplot.subplots()
gdf.to_crs(epsg = 4326).plot(ax = ax)
gdf_merged.to_crs(epsg = 4326).plot(ax = ax, column = 'STATUS2006', cmap =cmap, norm = norm)
ax.set_ylim([60, 67])
ax.set_xlim([16, 26])
#ax.set_ylim([4000000, 4900000])
#ax.set_xlim([4700000, 5050000])
#mpl.pyplot.savefig(result_path+'din_winter2006.pdf')

ax.get_xlim()


In [ ]:
m = folium.Map(location=[65, 17], zoom_start=6)
m.choropleth(
    geo_data=gdf_HOlsson,
    name='choropleth',
    data=din_winter,
    columns=['EU_CD', 'global_EQR2006'],
    key_on='feature.properties.VISS_MS_CD',
    fill_color='YlGn',
    fill_opacity=0.7,
    line_opacity=0.2,
    legend_name='EQR 2006'
)


folium.LayerControl().add_to(m)

for i, row in gdf_merged.to_crs(epsg = 4326).iterrows():#, stations.PROJ.values):
    if row.global_EQR2017 < 0.2:
        c = 'red'
    elif row.global_EQR2017 < 0.4:
        c = 'orange'
    elif row.global_EQR2017 < 0.6:
        c = 'yellow'
    elif row.global_EQR2017 < 0.8:
        c = 'green'
    elif row.global_EQR2017 <= 1:
        c = 'blue'
    else:
        c = None
    lon = row.geometry.centroid.x
    lat = row.geometry.centroid.y
    # The description column is used for popup messages.
    marker = folium.RegularPolygonMarker([lat, lon], number_of_sides=4, radius = 5, fill_color = c, color = c, 
                                 popup = "STATUS 2006 {}".format(row.STATUS2006) +'__ '+ "STATUS 2017 {}".format(row.STATUS2017) +' ______________ '+ row.NAMN +' '+ row.EU_CD).add_to(m)
    if row.change:
        #print(row.change, row.STATUS2006, row.STATUS2017, str(lat), str(lon))
        marker = folium.RegularPolygonMarker([lat, lon],  number_of_sides=4, radius = 5, fill_color = '#bf35a7', color = '#bf35a7',
                                             popup = "STATUS 2006 {}".format(row.STATUS2006) +'---> '+ "STATUS 2017 {}".format(row.STATUS2017) +' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _  '+ row.NAMN +' '+ row.EU_CD).add_to(m)
   
m.save('test.html')

In [ ]:
gdf.set_index('EU_CD')['geometry']

In [ ]:
gdf_merged.columns

In [ ]:
import matplotlib as mpl
cmap = mpl.colors.ListedColormap(['red', 'orange','yellow', 'green', 'blue'])
boundaries = [0.2,0.4,0.6,0.8]
norm = mpl.colors.BoundaryNorm(boundaries, cmap.N, clip=True)

In [ ]:
fig = gdf_merged.plot(ax = fig, column = 'OMRTYP', color = None, edgecolor='black', linewidth = 0.01)
gdf_merged.plot(column = 'global_EQR', cmap = cmap, norm = norm, edgecolor='black', linewidth = 0.01)

mpl.pyplot.savefig(result_path+'din_winter2006.pdf')

In [ ]:
gjson = gdf_HOlsson.to_crs(epsg='4326').to_json()

In [ ]:
def my_color_function(feature):
    """Maps BAD to red, POOR to orange, MODERATE to yellow, GOOD to green and HIGH to blue."""
    #print(feature['properties']['VISS_MS_CD'])
    #print(feature['properties'].keys())
    
    status = din_winter.loc[din_winter['EU_CD'] == feature['properties']['VISS_MS_CD'], 'STATUS2006'].values
    #print(status)
    
    if len(status) == 0:
        return '#CADCEA'
    
    #try: 
    #    din_winter_df_ix[feature['properties']['EU_CD']] 
    #except KeyError:
    #    return 'black' 
    status = status[0]
    if status == 'BAD':
        return 'red'
    elif status== 'POOR':
        return 'orange'
    elif status == 'MODERATE':
        return 'yellow'
    elif status == 'GOOD':
        return 'green'
    elif status == 'HIGH':
        return 'blue'
    else:
        return 'black'

In [ ]:
step = folium.StepColormap(['red','orange','yellow','green','blue'], vmin=0, vmax=1., caption='step')
step
m = folium.Map(location=[60, 17], tiles='cartodbpositron', zoom_start=5)
folium.GeoJson(
    gdf_HOlsson,
    style_function=lambda feature: {
        'fillColor': step(din_winter.dropna(subset = ['global_EQR2006']).loc[din_winter['EU_CD'] == feature['properties']['VISS_MS_CD'], 'global_EQR2006'].values),
        #'fillColor': my_color_function(feature),
        'color' : 'black',
        'weight' : 2,
        'dashArray' : '5, 5'
        }
    ).add_to(m)
m.save('test2.html')

In [ ]:
din_winter_df_ix = din_winter_df.set_index('EU_CD')['global_EQR']
din_winter_df_ix.head()
din_winter_df_ix['SE584340-174401']

In [ ]:
def my_longlat(longlat):
    s = str(longlat)
    return float(s[:2]) + (float(s[2:4]) + float(s[4:])/100)/60

In [ ]:
my_longlat(654125)

In [ ]:
m

In [ ]:
gjson#['features'][0]['properties']['EU_CD']

In [ ]:
# Set center for the map.
center_lat = 60
center_long = 17
# Create map object.
m = folium.Map(location=[center_lat, center_long], zoom_start=5)
m.choropleth(geo_data = gjson, key_on = 'feature.properties.EU_CD', fill_color = 'YlGnBu')
#m = folium.Map([43,-100], tiles='cartodbpositron', zoom_start=4)
m

In [ ]:

Step 3 Load Indicator objects step 3....