In [322]:
#!/Tsan/bin/python
# -*- coding: utf-8 -*-

In [323]:
# Libraries To Use
from __future__ import division 
import numpy as np
import pandas as pd
import statsmodels.api as sm
import matplotlib.pyplot as plt
import seaborn as sns

In [324]:
%matplotlib inline

In [325]:
%load_ext line_profiler


The line_profiler extension is already loaded. To reload it, use:
  %reload_ext line_profiler

In [326]:
datapath = 'C:/Users/LZJF_02/Desktop/myjob/FOFNetWorth/'

In [327]:
# Daily networth data
filenameleigen = 'leigen.csv'
filenameqilin = 'qilin.csv'
filenameyinnuo = 'yinnuo.csv'
filenamelanse = 'lanse.csv'
filenameminghong = 'minghong.csv'

In [328]:
# Weekly networth data
filenameaiye = 'aiye.csv'
filenameqianshi = 'qianshi.csv'

In [382]:
from os import walk

f = []
for (dirpath, dirnames, filenames) in walk(datapath ):
    f.extend(filenames)

In [383]:
f


Out[383]:
['aiye.csv',
 'CTA\xb2\xdf\xc2\xd4-\xbe\xc5\xc0\xa4\xb2\xdf\xc2\xd4\xbe\xab\xd1\xa1A\xc6\xda.xls',
 'CTA\xb2\xdf\xc2\xd4-\xbe\xc5\xc0\xa4\xc1\xbf\xbb\xaf\xbb\xec\xba\xcf\xcb\xbd\xc4\xbc1\xba\xc5.xls',
 'lanse.csv',
 'leigen.csv',
 'minghong.csv',
 'qianshi.csv',
 'qilin.csv',
 'yinnuo.csv',
 '\xbe\xc5\xc0\xa4\xbe\xbb\xd6\xb5.rar',
 '\xc7\xa7\xca\xaf\xd7\xca\xb1\xbe\xc1\xbf\xb5\xc0\xb6\xd4\xb3\xe51\xba\xc5.xlsx',
 '\xbb\xaa\xd0\xc5\xcd\xf2\xb4\xef\xc6\xf4\xc1\xd61\xba\xc5\xbe\xbb\xd6\xb5.xlsx',
 '\xd2\xf2\xc5\xb5\xc6\xf4\xba\xbd1\xba\xc5\xc8\xd5\xbe\xbb\xd6\xb50428.xlsx',
 '\xd6\xb8\xca\xfd\xd4\xf6\xc7\xbf\xb2\xdf\xc2\xd4-\xbe\xc5\xc0\xa4\xba\xec\xc1\xeb\xd6\xb8\xca\xfd\xd4\xf6\xc7\xbf\xcb\xbd\xc4\xbc\xcd\xb6\xd7\xca\xbb\xf9\xbd\xf0.xls',
 '\xd6\xb8\xca\xfd\xd4\xf6\xc7\xbf\xb2\xdf\xc2\xd4-\xbd\xf0\xb8\xab\xd7\xd3\xbe\xc5\xc0\xa4\xbb\xa6\xc9\xee300\xd6\xb8\xca\xfd\xd4\xf6\xc7\xbf1\xba\xc5\xbb\xf9\xbd\xf0.xls',
 '\xc3\xf7\x9bK\xcd\xb6\xd7\xca\xb6\xe0\xb2\xdf\xc2\xd4\xb6\xd4\xb3\xe5\xd2\xbb\xba\xc5\xc8\xd5\xbe\xbb\xd6\xb5.xls',
 '\xb0\xac\xd2\xb6\xcd\xb6\xd7\xca\xc3\xbf\xd6\xdc\xbe\xbb\xd6\xb5\xb8\xfa\xd7\xd90505.xlsx',
 '\xc0\xb6\xc9\xab\xcc\xec\xbc\xca-\xbf\xed\xba\xea1\xba\xc5 \xbf\xed\xba\xea2\xba\xc5 \xbb\xf9\xbd\xf0\xbe\xbb\xd6\xb5\xb8\xfc\xd0\xc2\xbd\xd8\xd6\xc120170505.xlsx',
 '\xc1\xbf\xbb\xaf\xb6\xd4\xb3\xe5\xb2\xdf\xc2\xd4-\xbe\xc5\xc0\xa4\xc1\xbf\xbb\xaf\xbb\xec\xba\xcf\xcb\xbd\xc4\xbc2\xba\xc5\xcb\xbd\xc4\xbc\xbb\xf9\xbd\xf0.xls',
 '\xc1\xbf\xbb\xaf\xb6\xd4\xb3\xe5\xb2\xdf\xc2\xd4-\xc7\xa7\xca\xaf\xd7\xca\xb1\xbe\xbe\xc5\xc0\xa4\xc1\xbf\xbb\xaf\xb6\xd4\xb3\xe52\xba\xc5.xls',
 '\xc0\xd7\xb8\xf9\xb6\xe0\xb2\xdf\xc2\xd4&\xd0\xcb\xd2\xb5\xbe\xbb\xd6\xb5.xlsx']

In [331]:
# 雷根
dataleigen = pd.read_csv(datapath+filenameleigen,infer_datetime_format=True,parse_dates=[0],index_col=0)
dataleigen.columns =['time','leigen_NW']
dataleigen.reset_index( drop=True,inplace =True)
dataleigen.set_index('time',inplace=True)
dataleigen


Out[331]:
leigen_NW
time
2016/5/19 1.000
2016/5/20 1.000
2016/5/23 1.000
2016/5/24 1.000
2016/5/25 1.000
2016/5/26 1.000
2016/5/27 1.000
2016/5/30 1.000
2016/5/31 1.000
2016/6/1 1.000
2016/6/2 0.999
2016/6/3 0.999
2016/6/6 0.999
2016/6/7 0.999
2016/6/8 0.999
2016/6/13 0.999
2016/6/14 0.999
2016/6/15 0.999
2016/6/16 0.999
2016/6/17 0.999
2016/6/20 0.999
2016/6/21 0.999
2016/6/22 0.999
2016/6/23 0.999
2016/6/24 0.999
2016/6/27 0.999
2016/6/28 0.999
2016/6/29 0.999
2016/6/30 0.999
2016/7/1 0.999
... ...
2017/3/15 1.005
2017/3/16 1.004
2017-03-17 1.003
2017-03-20 1.003
2017-03-21 0.999
2017-03-22 1.000
2017-03-23 1.000
2017-03-24 1.001
2017-03-27 1.008
2017-03-28 1.008
2017-03-29 1.002
2017-03-30 1.004
2017-03-31 1.006
2017-04-05 1.008
2017-04-06 1.007
2017-04-07 1.003
2017-04-10 1.004
2017-04-11 1.009
2017-04-12 1.008
2017-04-13 1.010
2017-04-14 1.014
2017-04-17 1.008
2017-04-18 1.016
2017-04-19 1.031
2017-04-20 1.026
2017-04-21 1.008
2017-04-24 1.024
2017-04-25 1.022
2017-04-26 1.004
2017-04-27 1.000

228 rows × 1 columns


In [332]:
# 千石
dataqianshi = pd.read_csv(datapath+filenameqianshi,infer_datetime_format=True,parse_dates=[0],index_col=0)
#dataqianshi.columns =['time','Lei_NW']
#dataqianshi.reset_index( drop=True,inplace =True)
#dataqianshi
dataqianshi.drop('return',axis=1,inplace =True)
dataqianshi


Out[332]:
qianshi_NW
time
2014-10-31 1.000
2014-11-07 1.006
2014-11-14 1.005
2014-11-21 1.009
2014-11-28 1.015
2014-12-05 1.023
2014-12-12 1.048
2014-12-19 1.047
2014-12-26 1.054
2014-12-31 1.061
2015-01-09 1.069
2015-01-16 1.073
2015-01-23 1.073
2015-01-30 1.074
2015-02-06 1.085
2015-02-13 1.076
2015-02-27 1.082
2015-03-06 1.074
2015-03-13 1.084
2015-03-20 1.090
2015-03-27 1.093
2015-04-03 1.112
2015-04-10 1.107
2015-04-17 1.099
2015-04-24 1.108
2015-04-30 1.105
2015-05-08 1.126
2015-05-15 1.126
2015-05-22 1.141
2015-05-29 1.172
... ...
2016-09-30 1.480
2016-10-14 1.500
2016-10-21 1.502
2016-10-28 1.509
2016-11-04 1.511
2016-11-11 1.544
2016-11-18 1.534
2016-11-25 1.556
2016-12-02 1.533
2016-12-09 1.530
2016-12-16 1.529
2016-12-23 1.519
2016-12-30 1.534
2017-01-06 1.543
2017-01-13 1.522
2017-01-20 1.522
2017-01-26 1.526
2017-02-03 1.524
2017-02-10 1.529
2017-02-17 1.528
2017-02-24 1.533
2017-03-03 1.525
2017-03-10 1.526
2017-03-17 1.513
2017-03-24 1.515
2017-03-31 1.513
2017-04-07 1.521
2017-04-14 1.524
2017-04-21 1.511
2017-04-28 1.502

129 rows × 1 columns


In [333]:
# 启林
dataqilin = pd.read_csv(datapath+filenameqilin,infer_datetime_format=True,parse_dates=[0],index_col=0)
#dataqianshi.columns =['time','Lei_NW']
#dataqianshi.reset_index( drop=True,inplace =True)
#dataqianshi
dataqilin


Out[333]:
qilin_NW
time
2016-03-28 1.0000
2016-03-29 0.9991
2016-03-30 1.0008
2016-03-31 1.0002
2016-04-01 0.9991
2016-04-05 1.0038
2016-04-06 1.0087
2016-04-07 1.0073
2016-04-08 1.0053
2016-04-11 1.0096
2016-04-12 1.0067
2016-04-13 1.0058
2016-04-14 1.0053
2016-04-15 1.0062
2016-04-18 1.0106
2016-04-19 1.0108
2016-04-20 1.0075
2016-04-21 0.9957
2016-04-22 1.0011
2016-04-25 1.0001
2016-04-26 1.0051
2016-04-27 1.0056
2016-04-28 1.0056
2016-04-29 1.0050
2016-05-03 1.0127
2016-05-04 1.0138
2016-05-05 1.0116
2016-05-06 1.0057
2016-05-09 0.9959
2016-05-10 0.9967
... ...
2017-02-06 1.0613
2017-02-07 1.0699
2017-02-08 1.0701
2017-02-09 1.0800
2017-02-10 1.0800
2017-02-13 1.0832
2017-02-14 1.0825
2017-02-15 1.0841
2017-02-16 1.0822
2017-02-17 1.0815
2017-02-20 1.0791
2017-02-21 1.0866
2017-02-22 1.0906
2017-02-23 1.0917
2017-02-24 1.0947
2017-02-27 1.0998
2017-02-28 1.1027
2017-03-01 1.1049
2017-03-02 1.1039
2017-03-03 1.1048
2017-03-06 1.1076
2017-03-07 1.1086
2017-03-08 1.1102
2017-03-09 1.1104
2017-03-10 1.1137
2017-03-13 1.1129
2017-03-14 1.1168
2017-03-15 1.1177
2017-03-16 1.1165
2017-03-17 1.1164

238 rows × 1 columns


In [334]:
# 因诺
datayinnuo = pd.read_csv(datapath+filenameyinnuo,infer_datetime_format=True,parse_dates=[0],index_col=0)
#dataqianshi.columns =['time','Lei_NW']
#dataqianshi.reset_index( drop=True,inplace =True)
#dataqianshi
datayinnuo


Out[334]:
yinnuo_NW
time
2015-03-03 1.0000
2015-03-04 1.0000
2015-03-05 1.0000
2015-03-06 1.0000
2015-03-09 1.0000
2015-03-10 1.0000
2015-03-11 1.0000
2015-03-12 1.0000
2015-03-13 1.0000
2015-03-16 1.0000
2015-03-17 1.0001
2015-03-18 1.0007
2015-03-19 0.9986
2015-03-20 0.9999
2015-03-23 1.0004
2015-03-24 1.0006
2015-03-25 1.0048
2015-03-26 1.0052
2015-03-27 1.0048
2015-03-30 1.0014
2015-03-31 1.0044
2015-04-01 1.0051
2015-04-02 1.0060
2015-04-03 1.0094
2015-04-07 1.0099
2015-04-08 1.0104
2015-04-09 0.9879
2015-04-10 0.9902
2015-04-13 0.9922
2015-04-14 0.9957
... ...
2017-03-16 1.9908
2017-03-17 1.9891
2017-03-20 1.9923
2017-03-21 1.9924
2017-03-22 1.9900
2017-03-23 1.9901
2017-03-24 1.9882
2017-03-27 1.9885
2017-03-28 1.9879
2017-03-29 1.9857
2017-03-30 1.9823
2017-03-31 1.9861
2017-04-05 1.9874
2017-04-06 1.9903
2017-04-07 1.9925
2017-04-10 1.9932
2017-04-11 2.0011
2017-04-12 2.0010
2017-04-13 2.0005
2017-04-14 1.9956
2017-04-17 1.9935
2017-04-18 1.9876
2017-04-19 1.9854
2017-04-20 1.9850
2017-04-21 1.9864
2017-04-24 1.9760
2017-04-25 1.9795
2017-04-26 1.9778
2017-04-27 1.9826
2017-04-28 1.9860

523 rows × 1 columns


In [335]:
# 蓝色天际一号
datalanse = pd.read_csv(datapath+filenamelanse,infer_datetime_format=True,parse_dates=[0],index_col=0)
#dataqianshi.columns =['time','Lei_NW']
#dataqianshi.reset_index( drop=True,inplace =True)
#dataqianshi
datalanse.dropna(axis=1,inplace =True)
datalanse


Out[335]:
lanse_NW
time
2016-04-21 1.0000
2016-04-22 0.9999
2016-04-25 0.9998
2016-04-26 0.9997
2016-04-27 0.9997
2016-04-28 0.9996
2016-04-29 0.9996
2016-05-03 0.9993
2016-05-04 0.9993
2016-05-05 0.9992
2016-05-06 0.9992
2016-05-09 0.9990
2016-05-10 0.9990
2016-05-11 0.9989
2016-05-12 0.9989
2016-05-13 0.9988
2016-05-16 0.9986
2016-05-17 0.9986
2016-05-18 0.9985
2016-05-19 0.9985
2016-05-20 0.9984
2016-05-23 0.9983
2016-05-24 0.9982
2016-05-25 0.9981
2016-05-26 0.9981
2016-05-27 0.9980
2016-05-30 0.9979
2016-05-31 0.9978
2016-06-01 0.9978
2016-06-02 0.9977
... ...
2017-03-22 1.0745
2017-03-23 1.0747
2017-03-24 1.0766
2017-03-27 1.0800
2017-03-28 1.0839
2017-03-29 1.0622
2017-03-30 1.0628
2017-03-31 1.0669
2017-04-05 1.0652
2017-04-06 1.0655
2017-04-07 1.0664
2017-04-10 1.0661
2017-04-11 1.0646
2017-04-12 1.0659
2017-04-13 1.0597
2017-04-14 1.0602
2017-04-17 1.0499
2017-04-18 1.0459
2017-04-19 1.0463
2017-04-20 1.0454
2017-04-21 1.0402
2017-04-24 1.0427
2017-04-25 1.0393
2017-04-26 1.0377
2017-04-27 1.0344
2017-04-28 1.0356
2017-05-02 1.0357
2017-05-03 1.0359
2017-05-04 1.0353
2017-05-05 1.0384

250 rows × 1 columns


In [336]:
datalanse.merge(datayinnuo,left_index=True, right_index=True,how ='inner')


Out[336]:
lanse_NW yinnuo_NW
time
2016-04-21 1.0000 1.6076
2016-04-22 0.9999 1.6120
2016-04-25 0.9998 1.6127
2016-04-26 0.9997 1.6162
2016-04-27 0.9997 1.6119
2016-04-28 0.9996 1.6077
2016-04-29 0.9996 1.6045
2016-05-03 0.9993 1.6017
2016-05-04 0.9993 1.6004
2016-05-05 0.9992 1.6005
2016-05-06 0.9992 1.5914
2016-05-09 0.9990 1.5864
2016-05-10 0.9990 1.5852
2016-05-11 0.9989 1.5912
2016-05-12 0.9989 1.5895
2016-05-13 0.9988 1.5896
2016-05-16 0.9986 1.5916
2016-05-17 0.9986 1.5888
2016-05-18 0.9985 1.5892
2016-05-19 0.9985 1.5888
2016-05-20 0.9984 1.5880
2016-05-23 0.9983 1.5880
2016-05-24 0.9982 1.5882
2016-05-25 0.9981 1.5901
2016-05-26 0.9981 1.5888
2016-05-27 0.9980 1.5866
2016-05-30 0.9979 1.5830
2016-05-31 0.9978 1.5886
2016-06-01 0.9978 1.5906
2016-06-02 0.9977 1.5951
... ... ...
2017-03-16 1.0633 1.9908
2017-03-17 1.0646 1.9891
2017-03-20 1.0654 1.9923
2017-03-21 1.0677 1.9924
2017-03-22 1.0745 1.9900
2017-03-23 1.0747 1.9901
2017-03-24 1.0766 1.9882
2017-03-27 1.0800 1.9885
2017-03-28 1.0839 1.9879
2017-03-29 1.0622 1.9857
2017-03-30 1.0628 1.9823
2017-03-31 1.0669 1.9861
2017-04-05 1.0652 1.9874
2017-04-06 1.0655 1.9903
2017-04-07 1.0664 1.9925
2017-04-10 1.0661 1.9932
2017-04-11 1.0646 2.0011
2017-04-12 1.0659 2.0010
2017-04-13 1.0597 2.0005
2017-04-14 1.0602 1.9956
2017-04-17 1.0499 1.9935
2017-04-18 1.0459 1.9876
2017-04-19 1.0463 1.9854
2017-04-20 1.0454 1.9850
2017-04-21 1.0402 1.9864
2017-04-24 1.0427 1.9760
2017-04-25 1.0393 1.9795
2017-04-26 1.0377 1.9778
2017-04-27 1.0344 1.9826
2017-04-28 1.0356 1.9860

239 rows × 2 columns


In [337]:
# 明泓多策略对冲一号
dataminghong = pd.read_csv(datapath+filenameminghong,infer_datetime_format=True,parse_dates=[0],index_col=0)
dataminghong.dropna(axis=1,inplace= True)
dataminghong


Out[337]:
minghong_NW
time
2015-05-18 1.0000
2015-05-19 1.0000
2015-05-20 1.0000
2015-05-21 1.0000
2015-05-22 1.0000
2015-05-25 1.0000
2015-05-26 1.0000
2015-05-27 1.0000
2015-05-28 1.0000
2015-05-29 1.0000
2015-06-01 1.0000
2015-06-02 1.0000
2015-06-03 1.0000
2015-06-04 1.0032
2015-06-05 1.0021
2015-06-08 1.0033
2015-06-09 1.0142
2015-06-10 1.0067
2015-06-11 1.0105
2015-06-12 0.9951
2015-06-15 0.9949
2015-06-16 1.0117
2015-06-17 1.0343
2015-06-18 1.0296
2015-06-19 1.0333
2015-06-22 1.0321
2015-06-23 1.0310
2015-06-24 1.0342
2015-06-25 1.0557
2015-06-26 1.0528
... ...
2017-03-22 1.4720
2017-03-23 1.4660
2017-03-24 1.4720
2017-03-27 1.4800
2017-03-28 1.4760
2017-03-29 1.4730
2017-03-30 1.4620
2017-03-31 1.4590
2017-04-05 1.4680
2017-04-06 1.4730
2017-04-07 1.4780
2017-04-10 1.4760
2017-04-11 1.4770
2017-04-12 1.4820
2017-04-13 1.4820
2017-04-14 1.4810
2017-04-17 1.4720
2017-04-18 1.4680
2017-04-19 1.4650
2017-04-20 1.4640
2017-04-21 1.4640
2017-04-24 1.4600
2017-04-25 1.4580
2017-04-26 1.4540
2017-04-27 1.4550
2017-04-28 1.4580
2017-05-02 1.4580
2017-05-03 1.4590
2017-05-04 1.4660
2017-05-05 1.4640

489 rows × 1 columns


In [338]:
dataList = [dataleigen,datayinnuo,datalanse,dataqilin,dataminghong]

In [339]:
mergedNW = reduce(lambda x,y: x.merge(y,left_index=True, right_index=True,how ='inner'),dataList)

In [340]:
# daily return 
returndf = mergedNW.pct_change().iloc[1:]
returndf


Out[340]:
leigen_NW yinnuo_NW lanse_NW qilin_NW minghong_NW
time
2016-05-20 0.000000 -0.000504 -0.000100 0.001223 0.000746
2016-05-23 0.000000 0.000000 -0.000100 0.005090 0.005216
2016-05-24 0.000000 0.000126 -0.000100 -0.001317 0.000000
2016-05-25 0.000000 0.001196 -0.000100 -0.002231 -0.000741
2016-05-26 0.000000 -0.000818 0.000000 -0.001728 -0.002967
2016-05-27 0.000000 -0.001385 -0.000100 -0.006109 -0.001488
2016-05-30 0.000000 -0.002269 -0.000100 -0.011782 -0.001490
2016-05-31 0.000000 0.003538 -0.000100 -0.004458 0.002239
2016-06-01 0.000000 0.001259 0.000000 0.000729 0.002234
2016-06-02 -0.001000 0.002829 -0.000100 0.000728 0.002229
2016-06-03 0.000000 -0.000752 0.000000 0.000520 0.001483
2016-06-06 0.000000 -0.000188 -0.000200 0.000520 0.001480
2016-06-07 0.000000 0.001381 -0.000100 0.000935 0.002956
2016-06-08 0.000000 0.001692 0.000000 -0.000415 -0.000737
2016-06-13 0.000000 -0.001251 -0.000301 -0.001038 0.002950
2016-06-14 0.000000 -0.001378 0.000000 0.000208 -0.004412
2016-06-15 0.000000 0.002133 -0.000100 0.000935 0.000739
2016-06-16 0.000000 -0.000688 -0.000100 0.001246 -0.004428
2016-06-17 0.000000 0.001127 0.000000 0.001244 -0.000741
2016-06-20 0.000000 0.001877 -0.000201 0.002485 0.001484
2016-06-21 0.000000 -0.000250 0.001906 0.000826 0.001481
2016-06-22 0.000000 0.002873 0.000000 -0.001342 0.002219
2016-06-23 0.000000 -0.000997 -0.000100 0.001033 0.002214
2016-06-24 0.000000 -0.000125 0.000000 -0.001858 -0.000736
2016-06-27 0.000000 0.003803 -0.000200 0.001758 0.008843
2016-06-28 0.000000 0.003044 -0.000100 -0.001446 0.007305
2016-06-29 0.000000 0.007617 0.000000 0.002068 0.000000
2016-06-30 0.000000 0.010325 -0.000100 -0.000413 0.005076
2016-07-01 0.000000 0.011375 0.000000 0.000413 0.002165
2016-07-04 0.000000 0.019488 -0.000200 -0.001445 0.010799
... ... ... ... ... ...
2017-01-20 0.000000 0.000912 -0.001738 0.003404 0.003551
2017-01-23 0.000000 0.000861 0.001451 0.010662 0.001415
2017-01-24 0.000000 -0.000253 0.000869 -0.002110 0.005654
2017-02-03 0.000000 0.000202 -0.008975 0.010572 0.004216
2017-02-10 -0.001002 0.001821 0.010225 0.027104 0.003499
2017-02-13 0.001003 0.000556 0.010411 0.002963 0.009763
2017-02-14 0.000000 0.000505 -0.000191 -0.000646 0.002072
2017-02-15 -0.001002 -0.000454 -0.004771 0.001478 -0.003446
2017-02-16 0.000000 0.000050 -0.003452 -0.001753 -0.001383
2017-02-17 0.000000 -0.000303 0.000000 -0.000647 -0.002078
2017-02-20 0.000000 -0.000101 0.001155 -0.002219 0.002776
2017-02-21 -0.001003 0.000757 0.008745 0.006950 0.004844
2017-02-22 0.002008 0.000454 -0.003239 0.003681 -0.000689
2017-02-23 0.002004 0.000403 -0.002389 0.001009 -0.003446
2017-02-24 0.000000 0.000302 0.000671 0.002748 -0.002766
2017-02-27 -0.002000 -0.000302 0.004596 0.004659 0.005548
2017-02-28 0.001002 0.000555 -0.000095 0.002637 0.000000
2017-03-01 -0.002002 -0.000252 0.000858 0.001995 -0.000690
2017-03-02 0.000000 -0.000403 -0.000476 -0.000905 0.000000
2017-03-03 -0.001003 0.000655 -0.004764 0.000815 -0.002070
2017-03-06 0.002008 0.001209 0.002776 0.002534 0.003458
2017-03-07 0.002004 0.000503 -0.000191 0.000903 0.006203
2017-03-08 0.002000 -0.000503 0.003342 0.001443 0.001370
2017-03-09 0.004990 -0.000604 0.004378 0.000180 0.000000
2017-03-10 0.000000 0.000554 -0.000758 0.002972 0.000000
2017-03-13 -0.003972 0.000906 0.006448 -0.000718 0.001368
2017-03-14 0.000997 -0.000151 0.004993 0.003504 0.004781
2017-03-15 0.000996 -0.000151 -0.000469 0.000806 0.001360
2017-03-16 -0.000995 0.001257 -0.002720 -0.001074 -0.002716
2017-03-17 -0.000996 -0.000854 0.001223 -0.000090 -0.001361

190 rows × 5 columns


In [341]:
#  annualized downsideRisk
downsideRisk = returndf[returndf < returndf.mean()].std(skipna = True) * np.sqrt(252)
downsideRisk


Out[341]:
leigen_NW      0.007650
yinnuo_NW      0.018120
lanse_NW       0.029260
qilin_NW       0.047360
minghong_NW    0.040952
dtype: float64

In [342]:
mergedNW = mergedNW/mergedNW.iloc[0]
mergedNW


Out[342]:
leigen_NW yinnuo_NW lanse_NW qilin_NW minghong_NW
time
2016-05-19 1.000 1.000000 1.000000 1.000000 1.000000
2016-05-20 1.000 0.999496 0.999900 1.001223 1.000746
2016-05-23 1.000 0.999496 0.999800 1.006319 1.005966
2016-05-24 1.000 0.999622 0.999700 1.004994 1.005966
2016-05-25 1.000 1.000818 0.999599 1.002752 1.005220
2016-05-26 1.000 1.000000 0.999599 1.001019 1.002237
2016-05-27 1.000 0.998615 0.999499 0.994904 1.000746
2016-05-30 1.000 0.996349 0.999399 0.983182 0.999254
2016-05-31 1.000 0.999874 0.999299 0.978799 1.001491
2016-06-01 1.000 1.001133 0.999299 0.979513 1.003729
2016-06-02 0.999 1.003965 0.999199 0.980226 1.005966
2016-06-03 0.999 1.003210 0.999199 0.980736 1.007457
2016-06-06 0.999 1.003021 0.998998 0.981246 1.008949
2016-06-07 0.999 1.004406 0.998898 0.982163 1.011931
2016-06-08 0.999 1.006105 0.998898 0.981755 1.011186
2016-06-13 0.999 1.004846 0.998598 0.980736 1.014169
2016-06-14 0.999 1.003462 0.998598 0.980940 1.009694
2016-06-15 0.999 1.005602 0.998498 0.981857 1.010440
2016-06-16 0.999 1.004909 0.998398 0.983080 1.005966
2016-06-17 0.999 1.006042 0.998398 0.984303 1.005220
2016-06-20 0.999 1.007931 0.998197 0.986750 1.006711
2016-06-21 0.999 1.007679 1.000100 0.987565 1.008203
2016-06-22 0.999 1.010574 1.000100 0.986240 1.010440
2016-06-23 0.999 1.009567 1.000000 0.987259 1.012677
2016-06-24 0.999 1.009441 1.000000 0.985425 1.011931
2016-06-27 0.999 1.013280 0.999800 0.987157 1.020880
2016-06-28 0.999 1.016365 0.999700 0.985730 1.028337
2016-06-29 0.999 1.024106 0.999700 0.987769 1.028337
2016-06-30 0.999 1.034680 0.999599 0.987361 1.033557
2016-07-01 0.999 1.046450 0.999599 0.987769 1.035794
... ... ... ... ... ...
2017-01-20 0.998 1.242951 1.035353 1.051575 1.053691
2017-01-23 0.998 1.244021 1.036855 1.062787 1.055183
2017-01-24 0.998 1.243706 1.037757 1.060544 1.061148
2017-02-03 0.998 1.243958 1.028443 1.071756 1.065623
2017-02-10 0.997 1.246224 1.038958 1.100805 1.069351
2017-02-13 0.998 1.246916 1.049775 1.104067 1.079791
2017-02-14 0.998 1.247545 1.049574 1.103353 1.082028
2017-02-15 0.997 1.246979 1.044567 1.104984 1.078300
2017-02-16 0.997 1.247042 1.040961 1.103048 1.076808
2017-02-17 0.997 1.246664 1.040961 1.102334 1.074571
2017-02-20 0.997 1.246538 1.042163 1.099888 1.077554
2017-02-21 0.996 1.247482 1.051277 1.107532 1.082774
2017-02-22 0.998 1.248049 1.047872 1.111609 1.082028
2017-02-23 1.000 1.248552 1.045368 1.112731 1.078300
2017-02-24 1.000 1.248930 1.046069 1.115788 1.075317
2017-02-27 0.998 1.248552 1.050876 1.120987 1.081283
2017-02-28 0.999 1.249245 1.050776 1.123943 1.081283
2017-03-01 0.997 1.248930 1.051678 1.126185 1.080537
2017-03-02 0.997 1.248426 1.051177 1.125166 1.080537
2017-03-03 0.996 1.249245 1.046169 1.126083 1.078300
2017-03-06 0.998 1.250755 1.049074 1.128937 1.082028
2017-03-07 1.000 1.251385 1.048873 1.129956 1.088740
2017-03-08 1.002 1.250755 1.052379 1.131587 1.090231
2017-03-09 1.007 1.250000 1.056985 1.131791 1.090231
2017-03-10 1.007 1.250692 1.056184 1.135154 1.090231
2017-03-13 1.003 1.251825 1.062994 1.134339 1.091723
2017-03-14 1.004 1.251636 1.068302 1.138314 1.096943
2017-03-15 1.005 1.251448 1.067802 1.139231 1.098434
2017-03-16 1.004 1.253021 1.064897 1.138008 1.095451
2017-03-17 1.003 1.251951 1.066199 1.137906 1.093960

191 rows × 5 columns


In [386]:
# get the last day of each month
endOfMonthList = mergedNW .iloc[mergedNW .resample('M').size().cumsum().sub(1)].index

In [388]:
# Monthly return
mergedNW.loc[endOfMonthList].pct_change().mean()


Out[388]:
leigen_NW      0.000301
yinnuo_NW      0.023233
lanse_NW       0.006553
qilin_NW       0.015419
minghong_NW    0.008990
dtype: float64

In [394]:
# get the last day of each week
endOfWeekList = set(mergedNW .iloc[mergedNW .resample('W').size().cumsum().sub(1)].index)

# Weekly return
mergedNW.loc[endOfWeekList].pct_change().mean()


Out[394]:
leigen_NW     -0.000043
yinnuo_NW      0.009548
lanse_NW       0.001114
qilin_NW       0.003244
minghong_NW    0.002008
dtype: float64

In [343]:
# annualized Return
annualizedReturn = (1+returndf.mean())**252 - 1
annualizedReturn


Out[343]:
leigen_NW      0.004055
yinnuo_NW      0.348977
lanse_NW       0.089784
qilin_NW       0.189884
minghong_NW    0.128604
dtype: float64

In [344]:
# annualized  Volatility
annualizedVol = (returndf.std() * np.sqrt(252))
annualizedVol


Out[344]:
leigen_NW      0.012163
yinnuo_NW      0.051696
lanse_NW       0.044007
qilin_NW       0.071146
minghong_NW    0.061389
dtype: float64

In [345]:
# Shape Ratio
shapeRatio = annualizedReturn/ annualizedVol
shapeRatio


Out[345]:
leigen_NW      0.333359
yinnuo_NW      6.750548
lanse_NW       2.040204
qilin_NW       2.668927
minghong_NW    2.094896
dtype: float64

In [346]:
# Sortino Ratio
sortinoRatio = annualizedReturn/downsideRisk
sortinoRatio


Out[346]:
leigen_NW       0.530023
yinnuo_NW      19.259371
lanse_NW        3.068476
qilin_NW        4.009408
minghong_NW     3.140385
dtype: float64

In [367]:
# Max draw down
maxdd = mergedNW.copy()
maxdd.iloc[0] = 0
for date in mergedNW.index[1:]:
    maxdd.loc[date] = 1-mergedNW.loc[date]/mergedNW.loc[:date].max()
maxddInfo = pd.concat([maxdd.max(),maxdd.idxmax()],axis=1)
maxddInfo.columns = ['Max_drawdown','Time']
maxddInfo


Out[367]:
Max_drawdown Time
leigen_NW 0.006000 2016-12-12
yinnuo_NW 0.014825 2016-08-03
lanse_NW 0.025896 2017-02-03
qilin_NW 0.045814 2017-01-16
minghong_NW 0.047361 2017-01-19

In [348]:
# calmar Ratio
calmarRatio = annualizedReturn/ maxddInfo['Max_drawdown']
calmarRatio


Out[348]:
leigen_NW       0.675788
yinnuo_NW      23.539957
lanse_NW        3.467034
qilin_NW        4.144642
minghong_NW     2.715372
dtype: float64

In [379]:
# information integration 
infodf  = pd.concat([annualizedReturn, annualizedVol,downsideRisk,maxddInfo['Max_drawdown'],shapeRatio,sortinoRatio,calmarRatio], axis=1)
infodf.columns = ['Return','Volatility','Downside_Risk','Max_DD','Shape_Ratio','Sortino_Ratio','Calmar_Ratio']
rankdict = dict(zip(infodf.columns,[1,-1,-1,-1,1,1,1]))
#infodf = np.round(infodf)
adjInfodf = infodf.apply(lambda x: x * rankdict[x.name],axis=0)
#infodf*rankdict.rank(axis=0)
adjInfodf.rank()


Out[379]:
Return Volatility Downside_Risk Max_DD Shape_Ratio Sortino_Ratio Calmar_Ratio
leigen_NW 1.0 5.0 5.0 5.0 1.0 1.0 1.0
yinnuo_NW 5.0 3.0 4.0 4.0 5.0 5.0 5.0
lanse_NW 2.0 4.0 3.0 3.0 2.0 2.0 3.0
qilin_NW 4.0 1.0 1.0 2.0 4.0 4.0 4.0
minghong_NW 3.0 2.0 2.0 1.0 3.0 3.0 2.0

In [378]:
infodf


Out[378]:
Return Volatility Downside_Risk Max_DD Shape_Ratio Sortino_Ratio Calmar_Ratio
leigen_NW 0.004055 0.012163 0.007650 0.006000 0.333359 0.530023 0.675788
yinnuo_NW 0.348977 0.051696 0.018120 0.014825 6.750548 19.259371 23.539957
lanse_NW 0.089784 0.044007 0.029260 0.025896 2.040204 3.068476 3.467034
qilin_NW 0.189884 0.071146 0.047360 0.045814 2.668927 4.009408 4.144642
minghong_NW 0.128604 0.061389 0.040952 0.047361 2.094896 3.140385 2.715372

In [381]:
# define risk preference vector
riskPref = np.array([0.2,0.1,0.1,0.2,0.1,0.1,0.2])
(adjInfodf.rank()*riskPref).sum(axis=1).rank()


Out[381]:
leigen_NW      2.0
yinnuo_NW      5.0
lanse_NW       3.0
qilin_NW       4.0
minghong_NW    1.0
dtype: float64

In [350]:
# correlation matrix
np.round(mergedNW.corr(method='pearson'),4)


Out[350]:
leigen_NW yinnuo_NW lanse_NW qilin_NW minghong_NW
leigen_NW 1.0000 -0.2152 -0.0332 0.0110 -0.0395
yinnuo_NW -0.2152 1.0000 0.6655 0.7805 0.8977
lanse_NW -0.0332 0.6655 1.0000 0.9095 0.7189
qilin_NW 0.0110 0.7805 0.9095 1.0000 0.8077
minghong_NW -0.0395 0.8977 0.7189 0.8077 1.0000

In [351]:
np.round(mergedNW.corr(method='spearman'),4)


Out[351]:
leigen_NW yinnuo_NW lanse_NW qilin_NW minghong_NW
leigen_NW 1.0000 -0.3912 -0.0836 -0.2484 -0.1923
yinnuo_NW -0.3912 1.0000 0.7542 0.9292 0.8583
lanse_NW -0.0836 0.7542 1.0000 0.7652 0.7157
qilin_NW -0.2484 0.9292 0.7652 1.0000 0.8514
minghong_NW -0.1923 0.8583 0.7157 0.8514 1.0000