develop-individual-uncertainty



In [1]:
"""
This NB is used to develop the .py script that makes csv files with
individual uncertainty in ../output/indiv_uncert
"""
import os
import pandas as pd
import scipy.stats as stats
import scipy.optimize as opt
import numpy as np

In [42]:
input_dir = "../clean_data/"
series = 'HICP' #'UN' #'HICP' 
year = '2014'
quarter = 'Q4'

input_file = series + year + quarter + '.csv'
fname = input_dir + input_file
print(input_file)


HICP2014Q4.csv

In [43]:
if not os.path.isfile(fname):
    print(input_file + ' does not exist in ' + input_dir)
else:
    print("it's all good!")


it's all good!

In [44]:
df = pd.read_csv(fname,)
df.head()


Out[44]:
source target newTarget id point [-2.0,-1.1] [-1.0,-0.6] [-0.5,-0.1] [0.0,0.4] [0.5,0.9] [1.0,1.4] [1.5,1.9] [2.0,2.4] [2.5,2.9] [3.0,3.4] [3.5,3.9] [4.0,5.0]
0 2014-Q4 2014 t 1 0.4 0 0.0 0 50 50 0.0 0 0 0 0 0 0
1 2014-Q4 2014 t 2 0.5 0 0.0 0 40 60 0.0 0 0 0 0 0 0
2 2014-Q4 2014 t 3 0.5 0 2.5 5 45 45 2.5 0 0 0 0 0 0
3 2014-Q4 2014 t 6 0.5 0 0.0 0 10 90 0.0 0 0 0 0 0 0
4 2014-Q4 2014 t 7 0.8 0 0.0 0 0 0 0.0 0 0 0 0 0 0

In [45]:
df.columns[5:]


Out[45]:
Index(['[-2.0,-1.1]', '[-1.0,-0.6]', '[-0.5,-0.1]', '[0.0,0.4]', '[0.5,0.9]',
       '[1.0,1.4]', '[1.5,1.9]', '[2.0,2.4]', '[2.5,2.9]', '[3.0,3.4]',
       '[3.5,3.9]', '[4.0,5.0]'],
      dtype='object')
    df = df.rename(columns={'TARGET_PERIOD':'target','FCT_SOURCE':'id','POINT':'point',
                            'TN1_0':'[-2.0,-1.1]',
                            'FN1_0TN0_6':'[-1.0,-0.6]',
                            'FN0_5TN0_1':'[-0.5,-0.1]',
                            'F0_0T0_4':'[0.0,0.4]',
                            'F0_5T0_9':'[0.5,0.9]',
                            'F1_0T1_4':'[1.0,1.4]',
                            'F1_5T1_9':'[1.5,1.9]',
                            'F2_0T2_4':'[2.0,2.4]',
                            'F2_5T2_9':'[2.5,2.9]',
                            'F3_0T3_4':'[3.0,3.4]',
                            'F3_5T3_9':'[3.5,3.9]',
                            'F4_0':'[4.0,5.0]'})
elif df.shape[1]==23: # UNEMP
    df = df.rename(columns={'TARGET_PERIOD':'target','FCT_SOURCE':'id','POINT':'point',
                            'T6_5':'[5.5,6.4]',
                            'F6_5T6_9':'[6.5,6.9]',
                            'F7_0T7_4':'[7.0,7.4]',
                            'F7_5T7_9':'[7.5,7.9]',
                            'F8_0T8_4':'[8.0,8.4]',
                            'F8_5T8_9':'[8.5,8.9]',
                            'F9_0T9_4':'[9.0,9.4]',
                            'F9_5T9_9':'[9.5,9.9]',
                            'F10_0T10_4':'[10.0,10.4]',
                            'F10_5T10_9':'[10.5,10.9]',
                            'F11_0T11_4':'[11.0,11.4]',
                            'F11_5T11_9':'[11.5,11.9]',
                            'F12_0T12_4':'[12.0,12.4]',
                            'F12_5T12_9':'[12.5,12.9]',
                            'F13_0T13_4':'[13.0,13.4]',
                            'F13_5T13_9':'[13.5,13.9]',
                            'F14_0T14_4':'[14.0,14.4]',
                            'F14_5T14_9':'[14.5,14.9]',
                            'F15_0':'[15.0,15.9]',})

In [46]:
def fit_normal_to_hist(h):
    if len(h)==12:
            bins =np.array([-2.0,-1.0,-0.5,0.0,0.5,1.0,1.5,2.0,2.5,3.0,3.5,4.0,5.0])  
    elif len(h) ==19:
            bins =np.array([5.5,6.5,7.0,7.5,8.0,8.5,9.0,9.5,10.0,10.5,11.0,11.5,12.0,12.5,13.0,13.5,14.0,14.5,15.0,16.0]) 
#
    if not all(h==0):
        orig_hist = np.array(h).astype(float)
        norm_hist = orig_hist/float(np.sum(orig_hist))
        mid_points = (bins[1:] + bins[:-1])/2
        popt,pcov = opt.curve_fit(lambda x,mu,sig: stats.norm.pdf(x,mu,sig), mid_points,norm_hist)
    else:
        popt = [float('nan'),float('nan')]
    return popt[1]

In [47]:
def ZL_std(h):
    if len(h)==12:       
        intervals =[[-2.0,-1.1],[-1.0,-0.6],[-0.5,-0.1],[0.0,0.4],[0.5,0.9],[1.0,1.4],
               [1.0,1.4],[1.5,1.9],[2.0,2.4],[2.5,2.9],[3.0,3.4],[3.5,3.9],[4.0,5.0]]
    elif len(h) ==19:
        intervals = [[5.5,6.4], [6.5,6.9], [7.0,7.4], [7.5,7.9], [8.0,8.4],[8.5,8.9],
                     [9.0,9.4], [9.5,9.9], [10.0,10.4], [10.5,10.9],[11.0,11.4],
                     [11.5,11.9],[12.0,12.4], [12.5,12.9],[13.0,13.4], [13.5,13.9],
                     [14.0,14.4], [14.5,14.9],[15.0,15.9]]      
    if not all(h==0):
        sum_i1 = 0
        sum_i2 = 0
        for i in range(1,len(h)):
            p = h[i]/100
            v1,v2 = intervals[i]
            sum_i1 += p*(v2**3 - v1**3)/(3*(v2-v1))
            sum_i2 += p*(v2**2 - v1**2)/(2*(v2-v1))
            zl_std = np.sqrt(sum_i1 - sum_i2**2)
    else:
            zl_std = float('nan')    
    return zl_std

In [48]:
def Hist_std(h):
    if not all(h==0):
        if len(h)==12:
            bins =np.array([-2.0,-1.0,-0.5,0.0,0.5,1.0,1.5,2.0,2.5,3.0,3.5,4.0,5.0])  
        elif len(h) ==19:
            bins =np.array([5.5,6.5,7.0,7.5,8.0,8.5,9.0,9.5,10.0,10.5,11.0,11.5,12.0,12.5,13.0,13.5,14.0,14.5,15.0,16.0]) 
        orig_hist = np.array(h).astype(float)
        norm_hist = orig_hist/float(np.sum(orig_hist))
        mid_points = (bins[1:] + bins[:-1])/2
        MeanCrude = np.dot(norm_hist,mid_points)
        VarCrude  = np.dot(norm_hist,(mid_points-MeanCrude)**2)
        bin_widths = np.diff(bins)
        BinWidth = bin_widths.mean()
        VarSheppard = VarCrude - (BinWidth**2)/12       #variance, Sheppard's correction
        if VarSheppard>0:
            hist_std = np.sqrt(VarSheppard)
        else:
            hist_std = np.sqrt(VarCrude)
    else:
        hist_std = float('nan')
    return hist_std

In [49]:
h = df.iloc[0,5:].values
h


Out[49]:
array([0.0, 0.0, 0.0, 50.0, 50.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], dtype=object)

In [28]:
yy = fit_normal_to_hist(h)

In [ ]:


In [50]:
tt = ZL_std(h)
tt


Out[50]:
0.27537852736430496

In [30]:
ss = Hist_std(h)
ss


Out[30]:
0.65349052135118602

In [ ]:
df.iloc[:,5:].head()

In [ ]:
df.iloc[0,5:]

In [53]:
mask = df.columns.str.contains(',')
mask


Out[53]:
array([False, False, False, False, False,  True,  True,  True,  True,
        True,  True,  True,  True,  True,  True,  True,  True], dtype=bool)

In [9]:
df['mean'] = df.iloc[:,mask].apply(np.mean,axis=1)

In [54]:
df['GA_std'] = df.iloc[:,mask].apply(fit_normal_to_hist,axis=1)

In [55]:
df


Out[55]:
source target newTarget id point [-2.0,-1.1] [-1.0,-0.6] [-0.5,-0.1] [0.0,0.4] [0.5,0.9] [1.0,1.4] [1.5,1.9] [2.0,2.4] [2.5,2.9] [3.0,3.4] [3.5,3.9] [4.0,5.0] GA_std
0 2014-Q4 2014 t 1 0.400000 0.000000 0.000000 0.000000 50.000000 50.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.541179
1 2014-Q4 2014 t 2 0.500000 0.000000 0.000000 0.000000 40.000000 60.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.524163
2 2014-Q4 2014 t 3 0.500000 0.000000 2.500000 5.000000 45.000000 45.000000 2.500000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.687504
3 2014-Q4 2014 t 6 0.500000 0.000000 0.000000 0.000000 10.000000 90.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.337515
4 2014-Q4 2014 t 7 0.800000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 NaN
5 2014-Q4 2014 t 14 0.400000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 NaN
6 2014-Q4 2014 t 15 0.500000 0.000000 0.000000 0.000000 10.000000 90.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.337515
7 2014-Q4 2014 t 16 0.600000 1.000000 2.000000 3.000000 25.000000 45.000000 16.000000 5.000000 2.000000 1.000000 0.000000 0.000000 0.000000 1.109188
8 2014-Q4 2014 t 20 0.500000 0.000000 0.000000 0.000000 35.000000 50.000000 15.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.693979
9 2014-Q4 2014 t 23 0.500000 0.000000 0.000000 0.000000 25.000000 70.000000 5.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.490474
10 2014-Q4 2014 t 24 0.500000 0.000000 5.000000 15.000000 25.000000 30.000000 15.000000 7.500000 2.500000 0.000000 0.000000 0.000000 0.000000 1.838406
11 2014-Q4 2014 t 26 0.500000 0.000000 0.000000 5.000000 45.000000 45.000000 5.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.672097
12 2014-Q4 2014 t 29 0.500000 0.000000 0.000000 0.000000 30.000000 60.000000 10.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.598159
13 2014-Q4 2014 t 30 0.500000 0.000000 0.000000 0.000000 10.000000 90.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.337515
14 2014-Q4 2014 t 31 0.500000 0.000000 0.000000 0.000000 25.000000 75.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.427899
15 2014-Q4 2014 t 32 0.600000 0.000000 0.000000 3.000000 34.000000 50.000000 12.000000 1.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.732385
16 2014-Q4 2014 t 35 0.500000 0.000000 0.000000 0.000000 40.000000 60.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.524163
17 2014-Q4 2014 t 36 0.300000 0.000000 0.000000 0.000000 80.000000 20.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.388353
18 2014-Q4 2014 t 37 0.500000 0.000000 0.000000 0.000000 5.000000 95.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.320563
19 2014-Q4 2014 t 38 0.600000 0.000000 0.000000 0.000000 10.000000 80.000000 10.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.395641
20 2014-Q4 2014 t 39 0.600000 0.000000 0.000000 0.000000 5.000000 95.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.320563
21 2014-Q4 2014 t 41 0.500000 0.000000 0.000000 0.000000 20.000000 70.000000 10.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.505708
22 2014-Q4 2014 t 45 0.550000 0.000000 0.000000 0.000000 30.000000 70.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.469434
23 2014-Q4 2014 t 47 0.500000 0.000000 0.000000 0.000000 20.000000 80.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.388349
24 2014-Q4 2014 t 48 0.500000 0.000000 0.000000 0.000000 50.000000 50.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.541179
25 2014-Q4 2014 t 49 0.500000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 NaN
26 2014-Q4 2014 t 52 0.500000 0.000000 0.000000 3.000000 38.000000 45.000000 10.000000 3.000000 1.000000 0.000000 0.000000 0.000000 0.000000 0.804799
27 2014-Q4 2014 t 54 0.700000 0.000000 0.000000 5.000000 20.000000 65.000000 10.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.617174
28 2014-Q4 2014 t 57 0.700000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 NaN
29 2014-Q4 2014 t 58 0.500000 0.000000 0.000000 0.000000 40.000000 60.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.524163
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
284 2014-Q4 2019 t+4 48 1.400000 0.000000 0.000000 0.000000 0.000000 0.000000 50.000000 50.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.540994
285 2014-Q4 2019 t+4 52 2.000000 0.000000 0.000000 2.000000 5.000000 11.000000 15.000000 20.000000 20.000000 18.000000 7.000000 2.000000 0.000000 2.799572
286 2014-Q4 2019 t+4 54 2.000000 0.000000 0.000000 0.000000 0.000000 5.000000 20.000000 40.000000 30.000000 5.000000 0.000000 0.000000 0.000000 0.942441
287 2014-Q4 2019 t+4 57 2.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 NaN
288 2014-Q4 2019 t+4 58 1.700000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 NaN
289 2014-Q4 2019 t+4 59 1.800000 0.000000 0.000000 0.000000 0.000000 0.000000 30.000000 60.000000 10.000000 0.000000 0.000000 0.000000 0.000000 0.597856
290 2014-Q4 2019 t+4 63 1.800000 0.000000 0.000000 0.000000 5.000000 15.000000 25.000000 40.000000 10.000000 5.000000 0.000000 0.000000 0.000000 1.163471
291 2014-Q4 2019 t+4 68 1.700000 0.000000 0.000000 0.000000 4.000000 9.000000 20.000000 30.000000 17.000000 10.000000 10.000000 0.000000 0.000000 2.176542
292 2014-Q4 2019 t+4 70 1.800000 0.000000 0.000000 0.000000 0.000000 0.000000 35.000000 65.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.502020
293 2014-Q4 2019 t+4 80 1.916421 0.000000 0.000000 1.960784 5.490196 9.019608 16.862745 18.431373 21.568627 12.156863 7.450980 5.882353 1.176471 2.989366
294 2014-Q4 2019 t+4 82 1.500000 1.000000 1.000000 1.000000 5.000000 10.000000 50.000000 20.000000 5.000000 4.000000 1.000000 1.000000 1.000000 1.176878
295 2014-Q4 2019 t+4 84 1.900000 1.000000 1.000000 1.000000 2.000000 7.000000 16.000000 24.000000 23.000000 17.000000 5.000000 2.000000 1.000000 2.694683
296 2014-Q4 2019 t+4 85 2.000000 0.096760 0.369358 1.320323 3.693487 8.086677 13.858706 18.591904 19.524958 16.051813 10.330347 5.204010 2.871656 2.903667
297 2014-Q4 2019 t+4 88 1.850000 0.000000 0.000000 2.500000 8.000000 12.000000 19.000000 24.000000 16.000000 9.000000 7.000000 2.500000 0.000000 2.953270
298 2014-Q4 2019 t+4 89 2.000000 0.000000 0.000000 0.000000 2.500000 10.000000 15.000000 25.000000 20.000000 15.000000 7.500000 2.500000 2.500000 2.723372
299 2014-Q4 2019 t+4 90 1.500000 0.000000 0.000000 0.000000 10.000000 20.000000 20.000000 30.000000 20.000000 0.000000 0.000000 0.000000 0.000000 1.386330
300 2014-Q4 2019 t+4 91 1.700000 0.000000 0.000000 0.000000 5.000000 15.000000 25.000000 37.500000 15.000000 2.500000 0.000000 0.000000 0.000000 1.153900
301 2014-Q4 2019 t+4 93 2.000000 0.000000 0.000000 0.000000 0.000000 0.000000 10.000000 25.000000 55.000000 10.000000 0.000000 0.000000 0.000000 0.773651
302 2014-Q4 2019 t+4 94 1.700000 0.500000 1.000000 3.000000 4.500000 8.000000 16.000000 32.000000 16.000000 8.000000 6.000000 3.000000 2.000000 3.053127
303 2014-Q4 2019 t+4 95 1.775500 0.000000 0.000000 0.000000 0.000000 7.000000 16.600000 42.500000 22.100000 11.800000 0.000000 0.000000 0.000000 1.065619
304 2014-Q4 2019 t+4 96 1.600000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 NaN
305 2014-Q4 2019 t+4 98 2.000000 0.000000 0.000000 0.000000 0.000000 0.000000 15.000000 35.000000 35.000000 15.000000 0.000000 0.000000 0.000000 0.951233
306 2014-Q4 2019 t+4 101 1.800000 0.000000 0.000000 0.000000 0.000000 10.000000 25.000000 50.000000 15.000000 0.000000 0.000000 0.000000 0.000000 0.834096
307 2014-Q4 2019 t+4 102 1.400000 0.000000 0.000000 0.000000 0.000000 10.000000 45.000000 35.000000 10.000000 0.000000 0.000000 0.000000 0.000000 0.803505
308 2014-Q4 2019 t+4 104 1.800000 0.000000 0.000000 0.000000 0.000000 10.000000 20.000000 30.000000 25.000000 15.000000 0.000000 0.000000 0.000000 1.280160
309 2014-Q4 2019 t+4 108 1.714953 0.000000 0.000000 0.000000 0.000000 7.130000 21.980000 34.770000 25.530000 10.340000 0.250000 0.000000 0.000000 1.119451
310 2014-Q4 2019 t+4 110 2.000000 0.066431 0.203893 0.962294 3.113570 7.615604 14.083656 19.694365 20.826293 16.654443 10.071117 4.604826 2.103508 2.793891
311 2014-Q4 2019 t+4 112 2.000000 0.000000 0.000000 0.000000 0.000000 5.000000 10.000000 20.000000 30.000000 25.000000 10.000000 0.000000 0.000000 1.523344
312 2014-Q4 2019 t+4 114 1.700000 0.000000 0.000000 3.000000 5.000000 5.000000 10.000000 55.000000 10.000000 5.000000 5.000000 2.000000 0.000000 1.526989
313 2014-Q4 2019 t+4 115 1.900000 0.056307 0.235269 1.357551 4.935207 12.286346 20.955076 24.491513 19.617106 10.767138 4.048441 1.042331 0.207716 2.474654

314 rows × 18 columns


In [56]:
df['ZL_std'] = df.iloc[:,mask].apply(ZL_std,axis=1)

In [57]:
df['Hist_std'] = df.iloc[:,mask].apply(Hist_std,axis=1)

In [58]:
df.head()


Out[58]:
source target newTarget id point [-2.0,-1.1] [-1.0,-0.6] [-0.5,-0.1] [0.0,0.4] [0.5,0.9] [1.0,1.4] [1.5,1.9] [2.0,2.4] [2.5,2.9] [3.0,3.4] [3.5,3.9] [4.0,5.0] GA_std ZL_std Hist_std
0 2014-Q4 2014 t 1 0.4 0 0.0 0 50 50 0.0 0 0 0 0 0 0 0.541179 0.275379 0.184780
1 2014-Q4 2014 t 2 0.5 0 0.0 0 40 60 0.0 0 0 0 0 0 0 0.524163 0.270801 0.177886
2 2014-Q4 2014 t 3 0.5 0 2.5 5 45 45 2.5 0 0 0 0 0 0 0.687504 0.385141 0.326563
3 2014-Q4 2014 t 6 0.5 0 0.0 0 10 90 0.0 0 0 0 0 0 0 0.337515 0.189297 0.150000
4 2014-Q4 2014 t 7 0.8 0 0.0 0 0 0 0.0 0 0 0 0 0 0 NaN NaN NaN

In [ ]:
df.to_csv('temp.csv')

In [37]:
# Define the aggregation calculations
aggregations = {
    'point': { #
        '1.mean': 'mean',  #
         '2.std': 'std', #
         '3.IQR': lambda x: x.quantile(.75) - x.quantile(.25)
    },
    'GA_std': {#
       # 'mean': 'mean',
      'median': 'median'
    },
    'ZL_std': {#
      #  'mean': 'mean',
      'median': 'median'
    },
    'Hist_std': {#
     #   'mean': 'mean',
      'median': 'median'
    }
}

In [39]:
grouped = df.groupby(['source','newTarget']).agg(aggregations)
grouped = grouped.rename(columns={'point':'a. point','ZL_std':'b. ZL_std',
'GA_std':'c. GA_std','Hist_std':'d. Hist_std'})

In [40]:
df.head()


Out[40]:
source target newTarget id point [5.5,6.4] [6.5,6.9] [7.0,7.4] [7.5,7.9] [8.0,8.4] ... [12.0,12.4] [12.5,12.9] [13.0,13.4] [13.5,13.9] [14.0,14.4] [14.5,14.9] [15.0,15.9] GA_std ZL_std Hist_std
0 2016-Q2 2016 t 4 10.1 0 0 0 0 0 ... 0 0 0 0 0 0 0 1.033592 0.682520 0.653491
1 2016-Q2 2016 t 5 10.1 0 0 0 0 0 ... 0 0 0 0 0 0 0 1.033592 0.495185 0.454340
2 2016-Q2 2016 t 6 10.1 0 0 0 0 0 ... 0 0 0 0 0 0 0 1.033592 0.368556 0.311528
3 2016-Q2 2016 t 8 10.1 0 0 0 0 0 ... 0 0 0 0 0 0 0 1.033592 0.360844 0.302365
4 2016-Q2 2016 t 15 10.0 0 0 0 0 0 ... 0 0 0 0 0 0 0 1.033592 0.313581 0.244028

5 rows × 27 columns


In [ ]:
writer = pd.ExcelWriter(out_data + 'SPF_time_series.xlsx')

In [ ]:
for h in df.loc[:,'targetNew'].unique():
    df_h = grouped.xs(h,level=1).sortlevel(0,axis=1)
#    print(df_h)
    df_h.to_excel(writer,h)
writer.save()