By: 顾 瞻 GU Zhan (Sam)

July 2017

[2] Data pre-porcessing

Explore and visualize data


In [1]:
# from __future__ import print_function, division
%matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns; sns.set()
import pandas as pd
import operator
from scipy import interp
from itertools import cycle
from sklearn import svm
from sklearn.utils.validation import check_random_state
from sklearn.model_selection import StratifiedKFold, cross_val_score
from sklearn.preprocessing import StandardScaler

from sklearn.ensemble import GradientBoostingRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import AdaBoostRegressor
from sklearn.ensemble import ExtraTreesRegressor
from sklearn.ensemble import BaggingRegressor
from sklearn.linear_model import LinearRegression
from sklearn.neighbors import KNeighborsRegressor
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.ensemble import BaggingClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.neighbors import KNeighborsClassifier

from sklearn.metrics import roc_curve, auc
from statsmodels.graphics.mosaicplot import mosaic
print(__doc__)


Automatically created module for IPython interactive environment

Read raw data


In [2]:
df_history_ts = pd.read_csv('data/history_ts.csv') 
df_history_ts_process = df_history_ts.copy()
df_history_ts_process.tail()


Out[2]:
ccyy-mm time bid-price
1886 2017-07 11:29:56 92100
1887 2017-07 11:29:57 92100
1888 2017-07 11:29:58 92100
1889 2017-07 11:29:59 92200
1890 2017-07 11:30:00 92200

In [3]:
df_history_table = pd.read_csv('data/history_table.csv') 
df_history_table_process = df_history_table.copy()
df_history_table_process.tail()


Out[3]:
ccyy-mm volume-plate deal-price-low deal-price-avg deal-early-second volume-bidder
26 2017-03 10356 87800 87916 55 262010
27 2017-04 12196 89800 89850 59 252273
28 2017-05 10316 90100 90209 55 270197
29 2017-06 10312 89400 89532 45 244349
30 2017-07 10325 92200 92250 57 269189

Parameters


In [4]:
parm_ts_cycle = 61 # seconds/records per month
print('parm_ts_cycle : %d seconds' % parm_ts_cycle)
parm_ts_month = int(len(df_history_ts) / parm_ts_cycle)
print('parm_ts_month : %d months' %  parm_ts_month)

parm_calculate_base_price_second = 15 # Use the current month's bid-price as base-price at this seconds. Later to derive increment-price
parm_calculate_prev_bp = 15 # Number of previous price/increment to include, i.e. previous 2sec, 3sec, 4sec, 5sec ... 15sec
parm_calculate_mv = 15 # Number of  previous price/increment Moving Average to calculate, i.e. previous 2sec, 3sec, 4sec, 5sec ... 15sec
parm_calculate_target_second = 7 # How many seconds in future to predict: target variable
parm_calculate_prev_month = 3 # Number of previous month to include (need to remove earliest x month from training data)

parm_record_cut_row_head = max(parm_calculate_base_price_second, parm_calculate_prev_bp, parm_calculate_mv)
parm_record_cut_row_tail = parm_calculate_target_second
parm_record_cut_month_head = parm_calculate_prev_month + 1

parm_ts_valid_cycle = parm_ts_cycle - parm_record_cut_row_head - parm_record_cut_row_tail
print('parm_ts_valid_cycle : %d seconds' % parm_ts_valid_cycle)
parm_ts_valid_month = parm_ts_month - parm_record_cut_month_head
print('parm_ts_valid_month : %d months' % parm_ts_valid_month)

if parm_record_cut_month_head < 10:
    parm_record_cut_ccyy = pd.to_datetime('2015-0'+str(parm_record_cut_month_head))
else:
    parm_record_cut_ccyy = pd.to_datetime('2015-'+str(parm_record_cut_month_head))

print('parm_record_cut_ccyy : %s' % parm_record_cut_ccyy)

print('parm_record_cut_month_head : %d months' % parm_record_cut_month_head)
print('parm_record_cut_row_head :  %d seconds' % parm_record_cut_row_head)
print('parm_record_cut_row_tail :  %d seconds' % parm_record_cut_row_tail)
print(' : ' )
print(' : ' )
print(' : ' )


parm_ts_cycle : 61 seconds
parm_ts_month : 31 months
parm_ts_valid_cycle : 39 seconds
parm_ts_valid_month : 27 months
parm_record_cut_ccyy : 2015-04-01 00:00:00
parm_record_cut_month_head : 4 months
parm_record_cut_row_head :  15 seconds
parm_record_cut_row_tail :  7 seconds
 : 
 : 
 : 

In [5]:
df_history_ts_process.head()


Out[5]:
ccyy-mm time bid-price
0 2015-01 11:29:00 74000
1 2015-01 11:29:01 74000
2 2015-01 11:29:02 74000
3 2015-01 11:29:03 74000
4 2015-01 11:29:04 74000

In [ ]:

Prepare derived features

Process: df_history_ts_process


In [6]:
# date of current month
df_history_ts_process['date-curr'] = df_history_ts_process.apply(lambda row: pd.to_datetime(row['ccyy-mm']), axis=1)

# date of previous month
df_history_ts_process['date-prev'] = df_history_ts_process.apply(lambda row: row['date-curr'] - pd.offsets.MonthBegin(1), axis=1)


# Year
df_history_ts_process['year'] = df_history_ts_process.apply(lambda row: row['ccyy-mm'][0:4], axis=1)

# Month
df_history_ts_process['month'] = df_history_ts_process.apply(lambda row: row['ccyy-mm'][5:7], axis=1)

# Hour
df_history_ts_process['hour'] = df_history_ts_process.apply(lambda row: row['time'][0:2], axis=1)

# Minute
df_history_ts_process['minute'] = df_history_ts_process.apply(lambda row: row['time'][3:5], axis=1)

# Second
df_history_ts_process['second'] = df_history_ts_process.apply(lambda row: row['time'][6:8], axis=1)


# datetime of current month
df_history_ts_process['datetime-curr'] = df_history_ts_process.apply(lambda row: str(row['date-curr']) + ' ' + row['time'], axis=1)

# datetime of previous month
df_history_ts_process['datetime-prev'] = df_history_ts_process.apply(lambda row: str(row['date-prev']) + ' ' + row['time'], axis=1)

In [7]:
df_history_ts_process.tail()


Out[7]:
ccyy-mm time bid-price date-curr date-prev year month hour minute second datetime-curr datetime-prev
1886 2017-07 11:29:56 92100 2017-07-01 2017-06-01 2017 07 11 29 56 2017-07-01 00:00:00 11:29:56 2017-06-01 00:00:00 11:29:56
1887 2017-07 11:29:57 92100 2017-07-01 2017-06-01 2017 07 11 29 57 2017-07-01 00:00:00 11:29:57 2017-06-01 00:00:00 11:29:57
1888 2017-07 11:29:58 92100 2017-07-01 2017-06-01 2017 07 11 29 58 2017-07-01 00:00:00 11:29:58 2017-06-01 00:00:00 11:29:58
1889 2017-07 11:29:59 92200 2017-07-01 2017-06-01 2017 07 11 29 59 2017-07-01 00:00:00 11:29:59 2017-06-01 00:00:00 11:29:59
1890 2017-07 11:30:00 92200 2017-07-01 2017-06-01 2017 07 11 30 00 2017-07-01 00:00:00 11:30:00 2017-06-01 00:00:00 11:30:00

In [8]:
# df_history_ts_process
# df_history_ts_process[1768:]

In [9]:
# new ['base-price']
gap = 1 # only one new feature/column

for gap in range(1, gap+1):
    col_name = 'base-price'+str(parm_calculate_base_price_second)+'sec'
    col_name_base_price = col_name
    col_data = pd.DataFrame(columns=[col_name])
    print('Creating : ', col_name)  

    for month in range(0, parm_ts_month):
        for i in range(0, parm_ts_cycle):
            col_data.loc[month*parm_ts_cycle+i] = df_history_ts_process['bid-price'][month*parm_ts_cycle+parm_calculate_base_price_second]
  
    df_history_ts_process[col_name] = col_data

print('Total records processed : ', len(col_data))


Creating :  base-price15sec
Total records processed :  1891

In [10]:
# df_history_ts_process
# df_history_ts_process[1768:]

In [11]:
# new ['increment-price'] = ['bid-price'] - ['base-price']

df_history_ts_process['increment-price'] = df_history_ts_process.apply(lambda row: row['bid-price'] - row[col_name_base_price], axis=1)

In [12]:
# df_history_ts_process
# df_history_ts_process[1768:]

In [13]:
plt.figure()
plt.plot(df_history_ts_process['bid-price'])
plt.plot(df_history_ts_process[col_name_base_price])
plt.plot()
plt.figure()
plt.plot(df_history_ts_process['increment-price'])
plt.plot()


Out[13]:
[]
/home/user/env_py3/lib/python3.5/site-packages/matplotlib/font_manager.py:1297: UserWarning: findfont: Font family ['sans-serif'] not found. Falling back to DejaVu Sans
  (prop.get_family(), self.defaultFamily[fontext]))

['increment-price-target']


In [14]:
# previous N sec ['increment-price-target']

for gap in range(1, 2):
    col_name = 'increment-price-target'
    col_data = pd.DataFrame(columns=[col_name])
    print('Creating : ', col_name)  

    for month in range(0, parm_ts_month):
    #     print('month : ', month)
        for i in range(0, (parm_ts_cycle - parm_calculate_target_second)):
            col_data.loc[month*parm_ts_cycle+i] = df_history_ts_process['increment-price'][month*parm_ts_cycle+i+parm_calculate_target_second]
        for i in range((parm_ts_cycle - parm_calculate_target_second), parm_ts_cycle):
            col_data.loc[month*parm_ts_cycle+i] = 0
  
    df_history_ts_process[col_name] = col_data

print('Total records processed : ', len(col_data))


Creating :  increment-price-target
Total records processed :  1891

In [15]:
plt.figure()
plt.plot(df_history_ts_process['increment-price'])
plt.plot(df_history_ts_process['increment-price-target'])
plt.plot()

plt.figure()
plt.plot(df_history_ts_process['increment-price'][1768:])
plt.plot(df_history_ts_process['increment-price-target'][1768:])
plt.plot()


Out[15]:
[]
/home/user/env_py3/lib/python3.5/site-packages/matplotlib/font_manager.py:1297: UserWarning: findfont: Font family ['sans-serif'] not found. Falling back to DejaVu Sans
  (prop.get_family(), self.defaultFamily[fontext]))

In [ ]:


In [16]:
# previous 'parm_calculate_prev_bp' sec ['increment-price']
gap = parm_calculate_prev_bp

for gap in range(1, gap+1):
    col_name = 'increment-price-prev'+str(gap)+'sec'
    col_data = pd.DataFrame(columns=[col_name])
#     col_data_zeros = pd.DataFrame({col_name: np.zeros(gap)})
    print('Creating : ', col_name)  

    for month in range(0, parm_ts_month):
    #     print('month : ', month)
#         col_data.append(col_data_zeros)
        for i in range(0, gap):
            col_data.loc[month*parm_ts_cycle+i] = 0
        for i in range(gap, parm_ts_cycle):
            col_data.loc[month*parm_ts_cycle+i] = df_history_ts_process['increment-price'][month*parm_ts_cycle+i-gap]
  
    df_history_ts_process[col_name] = col_data

print('Total records processed : ', len(col_data))


Creating :  increment-price-prev1sec
Creating :  increment-price-prev2sec
Creating :  increment-price-prev3sec
Creating :  increment-price-prev4sec
Creating :  increment-price-prev5sec
Creating :  increment-price-prev6sec
Creating :  increment-price-prev7sec
Creating :  increment-price-prev8sec
Creating :  increment-price-prev9sec
Creating :  increment-price-prev10sec
Creating :  increment-price-prev11sec
Creating :  increment-price-prev12sec
Creating :  increment-price-prev13sec
Creating :  increment-price-prev14sec
Creating :  increment-price-prev15sec
Total records processed :  1891

In [17]:
# previous 'parm_calculate_mv' sec Moving Average ['increment-price']

gap = parm_calculate_mv

for gap in range(2, gap+1): # MV starts from 2 seconds, till parm_calculate_mv
    col_name = 'increment-price-mv'+str(gap)+'sec'
    col_data = pd.DataFrame(columns=[col_name])
    print('Creating : ', col_name)  

    for month in range(0, parm_ts_month):
    #     print('month : ', month)
        for i in range(0, gap):
            col_data.loc[month*parm_ts_cycle+i] = 0
        for i in range(gap, parm_ts_cycle):
            col_data.loc[month*parm_ts_cycle+i] = \
            np.mean(df_history_ts_process['increment-price'][month*parm_ts_cycle+i-gap:month*parm_ts_cycle+i])
  
    df_history_ts_process[col_name] = col_data

print('Total records processed : ', len(col_data))


Creating :  increment-price-mv2sec
Creating :  increment-price-mv3sec
Creating :  increment-price-mv4sec
Creating :  increment-price-mv5sec
Creating :  increment-price-mv6sec
Creating :  increment-price-mv7sec
Creating :  increment-price-mv8sec
Creating :  increment-price-mv9sec
Creating :  increment-price-mv10sec
Creating :  increment-price-mv11sec
Creating :  increment-price-mv12sec
Creating :  increment-price-mv13sec
Creating :  increment-price-mv14sec
Creating :  increment-price-mv15sec
Total records processed :  1891

In [18]:
# df_history_ts_process[1768:]

In [19]:
plt.figure()
plt.plot(df_history_ts_process['increment-price'][1768:])
plt.plot(df_history_ts_process['increment-price-prev3sec'][1768:])
plt.plot(df_history_ts_process['increment-price-prev7sec'][1768:])
plt.plot(df_history_ts_process['increment-price-prev11sec'][1768:])
plt.plot(df_history_ts_process['increment-price-prev15sec'][1768:])
plt.plot()


Out[19]:
[]
/home/user/env_py3/lib/python3.5/site-packages/matplotlib/font_manager.py:1297: UserWarning: findfont: Font family ['sans-serif'] not found. Falling back to DejaVu Sans
  (prop.get_family(), self.defaultFamily[fontext]))

In [20]:
plt.figure()
plt.plot(df_history_ts_process['increment-price'][1768:])
plt.plot(df_history_ts_process['increment-price-mv3sec'][1768:])
plt.plot(df_history_ts_process['increment-price-mv7sec'][1768:])
plt.plot(df_history_ts_process['increment-price-mv11sec'][1768:])
plt.plot(df_history_ts_process['increment-price-mv15sec'][1768:])
plt.plot()


Out[20]:
[]
/home/user/env_py3/lib/python3.5/site-packages/matplotlib/font_manager.py:1297: UserWarning: findfont: Font family ['sans-serif'] not found. Falling back to DejaVu Sans
  (prop.get_family(), self.defaultFamily[fontext]))

In [ ]:

Process: df_history_table_process


In [21]:
df_history_table_process.tail()


Out[21]:
ccyy-mm volume-plate deal-price-low deal-price-avg deal-early-second volume-bidder
26 2017-03 10356 87800 87916 55 262010
27 2017-04 12196 89800 89850 59 252273
28 2017-05 10316 90100 90209 55 270197
29 2017-06 10312 89400 89532 45 244349
30 2017-07 10325 92200 92250 57 269189

In [22]:
# date of current month
df_history_table_process['date-curr'] = df_history_table_process.apply(lambda row: pd.to_datetime(row['ccyy-mm']), axis=1)
df_history_table_process['d-avg-low-price'] = df_history_table_process.apply(lambda row: row['deal-price-avg'] - row['deal-price-low'], axis=1)
df_history_table_process['ratio-bid'] = df_history_table_process.apply(lambda row: row['volume-plate'] / row['volume-bidder'], axis=1)

Merge dataframe


In [23]:
df_history_ts_process_tmp2 = df_history_ts_process.copy()

In [41]:
df_history_ts_process = df_history_ts_process_tmp2.copy()

In [42]:
df_history_ts_process = pd.merge(df_history_ts_process, df_history_table_process[['date-curr', 'volume-plate', 'ratio-bid']], how = 'left', left_on = 'date-curr', right_on = 'date-curr', suffixes=['', '_table'])

In [43]:
df_history_ts_process.columns


Out[43]:
Index(['ccyy-mm', 'time', 'bid-price', 'date-curr', 'date-prev', 'year',
       'month', 'hour', 'minute', 'second', 'datetime-curr', 'datetime-prev',
       'base-price15sec', 'increment-price', 'increment-price-target',
       'increment-price-prev1sec', 'increment-price-prev2sec',
       'increment-price-prev3sec', 'increment-price-prev4sec',
       'increment-price-prev5sec', 'increment-price-prev6sec',
       'increment-price-prev7sec', 'increment-price-prev8sec',
       'increment-price-prev9sec', 'increment-price-prev10sec',
       'increment-price-prev11sec', 'increment-price-prev12sec',
       'increment-price-prev13sec', 'increment-price-prev14sec',
       'increment-price-prev15sec', 'increment-price-mv2sec',
       'increment-price-mv3sec', 'increment-price-mv4sec',
       'increment-price-mv5sec', 'increment-price-mv6sec',
       'increment-price-mv7sec', 'increment-price-mv8sec',
       'increment-price-mv9sec', 'increment-price-mv10sec',
       'increment-price-mv11sec', 'increment-price-mv12sec',
       'increment-price-mv13sec', 'increment-price-mv14sec',
       'increment-price-mv15sec', 'volume-plate', 'ratio-bid'],
      dtype='object')

In [44]:
df_history_ts_process = pd.merge(df_history_ts_process, df_history_table_process[['date-curr', 'volume-plate', 'ratio-bid', 'deal-early-second', 'deal-price-avg']], how = 'left', left_on = 'date-prev', right_on = 'date-curr', suffixes=['', '_m0'])

In [45]:
df_history_ts_process.columns


Out[45]:
Index(['ccyy-mm', 'time', 'bid-price', 'date-curr', 'date-prev', 'year',
       'month', 'hour', 'minute', 'second', 'datetime-curr', 'datetime-prev',
       'base-price15sec', 'increment-price', 'increment-price-target',
       'increment-price-prev1sec', 'increment-price-prev2sec',
       'increment-price-prev3sec', 'increment-price-prev4sec',
       'increment-price-prev5sec', 'increment-price-prev6sec',
       'increment-price-prev7sec', 'increment-price-prev8sec',
       'increment-price-prev9sec', 'increment-price-prev10sec',
       'increment-price-prev11sec', 'increment-price-prev12sec',
       'increment-price-prev13sec', 'increment-price-prev14sec',
       'increment-price-prev15sec', 'increment-price-mv2sec',
       'increment-price-mv3sec', 'increment-price-mv4sec',
       'increment-price-mv5sec', 'increment-price-mv6sec',
       'increment-price-mv7sec', 'increment-price-mv8sec',
       'increment-price-mv9sec', 'increment-price-mv10sec',
       'increment-price-mv11sec', 'increment-price-mv12sec',
       'increment-price-mv13sec', 'increment-price-mv14sec',
       'increment-price-mv15sec', 'volume-plate', 'ratio-bid', 'date-curr_m0',
       'volume-plate_m0', 'ratio-bid_m0', 'deal-early-second',
       'deal-price-avg'],
      dtype='object')

Shift to copy previous 'parm_calculate_prev_month' month's data into current row


In [81]:
# df_history_ts_process = df_history_ts_process_lookup.copy()

In [82]:
df_history_ts_process_lookup = df_history_ts_process.copy()
df_history_ts_process_lookup.tail()


Out[82]:
ccyy-mm time bid-price date-curr date-prev year month hour minute second ... increment-price-mv13sec increment-price-mv14sec increment-price-mv15sec volume-plate ratio-bid date-curr_m0 volume-plate_m0 ratio-bid_m0 deal-early-second deal-price-avg
1886 2017-07 11:29:56 92100 2017-07-01 2017-06-01 2017 07 11 29 56 ... 861.538 828.571 800 10325 0.038356 2017-06-01 10312.0 0.042202 45.0 89532.0
1887 2017-07 11:29:57 92100 2017-07-01 2017-06-01 2017 07 11 29 57 ... 946.154 907.143 873.333 10325 0.038356 2017-06-01 10312.0 0.042202 45.0 89532.0
1888 2017-07 11:29:58 92100 2017-07-01 2017-06-01 2017 07 11 29 58 ... 1023.08 985.714 946.667 10325 0.038356 2017-06-01 10312.0 0.042202 45.0 89532.0
1889 2017-07 11:29:59 92200 2017-07-01 2017-06-01 2017 07 11 29 59 ... 1100 1057.14 1020 10325 0.038356 2017-06-01 10312.0 0.042202 45.0 89532.0
1890 2017-07 11:30:00 92200 2017-07-01 2017-06-01 2017 07 11 30 00 ... 1176.92 1135.71 1093.33 10325 0.038356 2017-06-01 10312.0 0.042202 45.0 89532.0

5 rows × 51 columns


In [83]:
# _m1
df_history_ts_process = pd.merge(df_history_ts_process, df_history_ts_process_lookup[[ \
        'datetime-curr', 'datetime-prev', 
        'base-price15sec', 'increment-price', 'increment-price-target',
        'increment-price-prev1sec', 'increment-price-prev2sec',
        'increment-price-prev3sec', 'increment-price-prev4sec',
        'increment-price-prev5sec', 'increment-price-prev6sec',
        'increment-price-prev7sec', 'increment-price-prev8sec',
        'increment-price-prev9sec', 'increment-price-prev10sec',
        'increment-price-prev11sec', 'increment-price-prev12sec',
        'increment-price-prev13sec', 'increment-price-prev14sec',
        'increment-price-prev15sec', 
        'increment-price-mv2sec',
        'increment-price-mv3sec', 'increment-price-mv4sec',
        'increment-price-mv5sec', 'increment-price-mv6sec',
        'increment-price-mv7sec', 'increment-price-mv8sec',
        'increment-price-mv9sec', 'increment-price-mv10sec',
        'increment-price-mv11sec', 'increment-price-mv12sec',
        'increment-price-mv13sec', 'increment-price-mv14sec',
        'increment-price-mv15sec', 
        'volume-plate_m0', 
        'ratio-bid_m0', 
        'deal-early-second',
        'deal-price-avg'                                                                                      
        ]], how = 'left', left_on = 'datetime-prev', right_on = 'datetime-curr', suffixes=['', '_m1'])
df_history_ts_process.tail()


Out[83]:
ccyy-mm time bid-price date-curr date-prev year month hour minute second ... increment-price-mv10sec_m1 increment-price-mv11sec_m1 increment-price-mv12sec_m1 increment-price-mv13sec_m1 increment-price-mv14sec_m1 increment-price-mv15sec_m1 volume-plate_m0_m1 ratio-bid_m0_m1 deal-early-second_m1 deal-price-avg_m1
1886 2017-07 11:29:56 92100 2017-07-01 2017-06-01 2017 07 11 29 56 ... 630 627.273 616.667 607.692 592.857 580 10316.0 0.03818 55.0 90209.0
1887 2017-07 11:29:57 92100 2017-07-01 2017-06-01 2017 07 11 29 57 ... 640 636.364 633.333 623.077 614.286 600 10316.0 0.03818 55.0 90209.0
1888 2017-07 11:29:58 92100 2017-07-01 2017-06-01 2017 07 11 29 58 ... 660 654.545 650 646.154 635.714 626.667 10316.0 0.03818 55.0 90209.0
1889 2017-07 11:29:59 92200 2017-07-01 2017-06-01 2017 07 11 29 59 ... 680 672.727 666.667 661.538 657.143 646.667 10316.0 0.03818 55.0 90209.0
1890 2017-07 11:30:00 92200 2017-07-01 2017-06-01 2017 07 11 30 00 ... 700 690.909 683.333 676.923 671.429 666.667 10316.0 0.03818 55.0 90209.0

5 rows × 89 columns


In [84]:
# _m2
df_history_ts_process = pd.merge(df_history_ts_process, df_history_ts_process_lookup[[ \
        'datetime-curr', 'datetime-prev', 
        'base-price15sec', 'increment-price', 'increment-price-target',
        'increment-price-prev1sec', 'increment-price-prev2sec',
        'increment-price-prev3sec', 'increment-price-prev4sec',
        'increment-price-prev5sec', 'increment-price-prev6sec',
        'increment-price-prev7sec', 'increment-price-prev8sec',
        'increment-price-prev9sec', 'increment-price-prev10sec',
        'increment-price-prev11sec', 'increment-price-prev12sec',
        'increment-price-prev13sec', 'increment-price-prev14sec',
        'increment-price-prev15sec', 
        'increment-price-mv2sec',
        'increment-price-mv3sec', 'increment-price-mv4sec',
        'increment-price-mv5sec', 'increment-price-mv6sec',
        'increment-price-mv7sec', 'increment-price-mv8sec',
        'increment-price-mv9sec', 'increment-price-mv10sec',
        'increment-price-mv11sec', 'increment-price-mv12sec',
        'increment-price-mv13sec', 'increment-price-mv14sec',
        'increment-price-mv15sec', 
        'volume-plate_m0', 
        'ratio-bid_m0', 
        'deal-early-second',
        'deal-price-avg'                                                                                      
        ]], how = 'left', left_on = 'datetime-prev_m1', right_on = 'datetime-curr', suffixes=['', '_m2'])
df_history_ts_process.tail()


Out[84]:
ccyy-mm time bid-price date-curr date-prev year month hour minute second ... increment-price-mv10sec_m2 increment-price-mv11sec_m2 increment-price-mv12sec_m2 increment-price-mv13sec_m2 increment-price-mv14sec_m2 increment-price-mv15sec_m2 volume-plate_m0_m2 ratio-bid_m0_m2 deal-early-second_m2 deal-price-avg_m2
1886 2017-07 11:29:56 92100 2017-07-01 2017-06-01 2017 07 11 29 56 ... 710 681.818 658.333 630.769 607.143 586.667 12196.0 0.048344 59.0 89850.0
1887 2017-07 11:29:57 92100 2017-07-01 2017-06-01 2017 07 11 29 57 ... 770 745.455 716.667 692.308 664.286 640 12196.0 0.048344 59.0 89850.0
1888 2017-07 11:29:58 92100 2017-07-01 2017-06-01 2017 07 11 29 58 ... 840 809.091 783.333 753.846 728.571 700 12196.0 0.048344 59.0 89850.0
1889 2017-07 11:29:59 92200 2017-07-01 2017-06-01 2017 07 11 29 59 ... 910 881.818 850 823.077 792.857 766.667 12196.0 0.048344 59.0 89850.0
1890 2017-07 11:30:00 92200 2017-07-01 2017-06-01 2017 07 11 30 00 ... 980 945.455 916.667 884.615 857.143 826.667 12196.0 0.048344 59.0 89850.0

5 rows × 127 columns


In [85]:
# _m3
df_history_ts_process = pd.merge(df_history_ts_process, df_history_ts_process_lookup[[ \
        'datetime-curr', 'datetime-prev', 
        'base-price15sec', 'increment-price', 'increment-price-target',
        'increment-price-prev1sec', 'increment-price-prev2sec',
        'increment-price-prev3sec', 'increment-price-prev4sec',
        'increment-price-prev5sec', 'increment-price-prev6sec',
        'increment-price-prev7sec', 'increment-price-prev8sec',
        'increment-price-prev9sec', 'increment-price-prev10sec',
        'increment-price-prev11sec', 'increment-price-prev12sec',
        'increment-price-prev13sec', 'increment-price-prev14sec',
        'increment-price-prev15sec', 
        'increment-price-mv2sec',
        'increment-price-mv3sec', 'increment-price-mv4sec',
        'increment-price-mv5sec', 'increment-price-mv6sec',
        'increment-price-mv7sec', 'increment-price-mv8sec',
        'increment-price-mv9sec', 'increment-price-mv10sec',
        'increment-price-mv11sec', 'increment-price-mv12sec',
        'increment-price-mv13sec', 'increment-price-mv14sec',
        'increment-price-mv15sec', 
        'volume-plate_m0', 
        'ratio-bid_m0', 
        'deal-early-second',
        'deal-price-avg'                                                                                      
        ]], how = 'left', left_on = 'datetime-prev_m2', right_on = 'datetime-curr', suffixes=['', '_m3'])
df_history_ts_process.tail()


Out[85]:
ccyy-mm time bid-price date-curr date-prev year month hour minute second ... increment-price-mv10sec_m3 increment-price-mv11sec_m3 increment-price-mv12sec_m3 increment-price-mv13sec_m3 increment-price-mv14sec_m3 increment-price-mv15sec_m3 volume-plate_m0_m3 ratio-bid_m0_m3 deal-early-second_m3 deal-price-avg_m3
1886 2017-07 11:29:56 92100 2017-07-01 2017-06-01 2017 07 11 29 56 ... 720 709.091 691.667 669.231 650 626.667 10356.0 0.039525 55.0 87916.0
1887 2017-07 11:29:57 92100 2017-07-01 2017-06-01 2017 07 11 29 57 ... 750 736.364 725 707.692 685.714 666.667 10356.0 0.039525 55.0 87916.0
1888 2017-07 11:29:58 92100 2017-07-01 2017-06-01 2017 07 11 29 58 ... 790 772.727 758.333 746.154 728.571 706.667 10356.0 0.039525 55.0 87916.0
1889 2017-07 11:29:59 92200 2017-07-01 2017-06-01 2017 07 11 29 59 ... 820 809.091 791.667 776.923 764.286 746.667 10356.0 0.039525 55.0 87916.0
1890 2017-07 11:30:00 92200 2017-07-01 2017-06-01 2017 07 11 30 00 ... 860 845.455 833.333 815.385 800 786.667 10356.0 0.039525 55.0 87916.0

5 rows × 165 columns


In [86]:
plt.figure()
plt.plot(df_history_ts_process['increment-price-mv10sec'][1768:])
plt.plot(df_history_ts_process['increment-price-mv10sec_m1'][1768:])
plt.plot(df_history_ts_process['increment-price-mv10sec_m2'][1768:])
plt.plot(df_history_ts_process['increment-price-mv10sec_m3'][1768:])
plt.figure()
plt.plot(df_history_ts_process['increment-price-prev10sec'][1768:])
plt.plot(df_history_ts_process['increment-price-prev10sec_m1'][1768:])
plt.plot(df_history_ts_process['increment-price-prev10sec_m2'][1768:])
plt.plot(df_history_ts_process['increment-price-prev10sec_m3'][1768:])
plt.figure()
plt.plot(df_history_ts_process['increment-price'][1768:])
plt.plot(df_history_ts_process['increment-price_m1'][1768:])
plt.plot(df_history_ts_process['increment-price_m2'][1768:])
plt.plot(df_history_ts_process['increment-price_m3'][1768:])
plt.figure()
plt.plot(df_history_ts_process['increment-price-target'][1768:])
plt.plot(df_history_ts_process['increment-price-target_m1'][1768:])
plt.plot(df_history_ts_process['increment-price-target_m2'][1768:])
plt.plot(df_history_ts_process['increment-price-target_m3'][1768:])

plt.plot()


Out[86]:
[]
/home/user/env_py3/lib/python3.5/site-packages/matplotlib/font_manager.py:1297: UserWarning: findfont: Font family ['sans-serif'] not found. Falling back to DejaVu Sans
  (prop.get_family(), self.defaultFamily[fontext]))

In [ ]:

Housekeeping to remove some invald data during pre-processing


In [87]:
for i in range(0, len(df_history_ts_process.columns)): print(df_history_ts_process.columns[i])


ccyy-mm
time
bid-price
date-curr
date-prev
year
month
hour
minute
second
datetime-curr
datetime-prev
base-price15sec
increment-price
increment-price-target
increment-price-prev1sec
increment-price-prev2sec
increment-price-prev3sec
increment-price-prev4sec
increment-price-prev5sec
increment-price-prev6sec
increment-price-prev7sec
increment-price-prev8sec
increment-price-prev9sec
increment-price-prev10sec
increment-price-prev11sec
increment-price-prev12sec
increment-price-prev13sec
increment-price-prev14sec
increment-price-prev15sec
increment-price-mv2sec
increment-price-mv3sec
increment-price-mv4sec
increment-price-mv5sec
increment-price-mv6sec
increment-price-mv7sec
increment-price-mv8sec
increment-price-mv9sec
increment-price-mv10sec
increment-price-mv11sec
increment-price-mv12sec
increment-price-mv13sec
increment-price-mv14sec
increment-price-mv15sec
volume-plate
ratio-bid
date-curr_m0
volume-plate_m0
ratio-bid_m0
deal-early-second
deal-price-avg
datetime-curr_m1
datetime-prev_m1
base-price15sec_m1
increment-price_m1
increment-price-target_m1
increment-price-prev1sec_m1
increment-price-prev2sec_m1
increment-price-prev3sec_m1
increment-price-prev4sec_m1
increment-price-prev5sec_m1
increment-price-prev6sec_m1
increment-price-prev7sec_m1
increment-price-prev8sec_m1
increment-price-prev9sec_m1
increment-price-prev10sec_m1
increment-price-prev11sec_m1
increment-price-prev12sec_m1
increment-price-prev13sec_m1
increment-price-prev14sec_m1
increment-price-prev15sec_m1
increment-price-mv2sec_m1
increment-price-mv3sec_m1
increment-price-mv4sec_m1
increment-price-mv5sec_m1
increment-price-mv6sec_m1
increment-price-mv7sec_m1
increment-price-mv8sec_m1
increment-price-mv9sec_m1
increment-price-mv10sec_m1
increment-price-mv11sec_m1
increment-price-mv12sec_m1
increment-price-mv13sec_m1
increment-price-mv14sec_m1
increment-price-mv15sec_m1
volume-plate_m0_m1
ratio-bid_m0_m1
deal-early-second_m1
deal-price-avg_m1
datetime-curr_m2
datetime-prev_m2
base-price15sec_m2
increment-price_m2
increment-price-target_m2
increment-price-prev1sec_m2
increment-price-prev2sec_m2
increment-price-prev3sec_m2
increment-price-prev4sec_m2
increment-price-prev5sec_m2
increment-price-prev6sec_m2
increment-price-prev7sec_m2
increment-price-prev8sec_m2
increment-price-prev9sec_m2
increment-price-prev10sec_m2
increment-price-prev11sec_m2
increment-price-prev12sec_m2
increment-price-prev13sec_m2
increment-price-prev14sec_m2
increment-price-prev15sec_m2
increment-price-mv2sec_m2
increment-price-mv3sec_m2
increment-price-mv4sec_m2
increment-price-mv5sec_m2
increment-price-mv6sec_m2
increment-price-mv7sec_m2
increment-price-mv8sec_m2
increment-price-mv9sec_m2
increment-price-mv10sec_m2
increment-price-mv11sec_m2
increment-price-mv12sec_m2
increment-price-mv13sec_m2
increment-price-mv14sec_m2
increment-price-mv15sec_m2
volume-plate_m0_m2
ratio-bid_m0_m2
deal-early-second_m2
deal-price-avg_m2
datetime-curr_m3
datetime-prev_m3
base-price15sec_m3
increment-price_m3
increment-price-target_m3
increment-price-prev1sec_m3
increment-price-prev2sec_m3
increment-price-prev3sec_m3
increment-price-prev4sec_m3
increment-price-prev5sec_m3
increment-price-prev6sec_m3
increment-price-prev7sec_m3
increment-price-prev8sec_m3
increment-price-prev9sec_m3
increment-price-prev10sec_m3
increment-price-prev11sec_m3
increment-price-prev12sec_m3
increment-price-prev13sec_m3
increment-price-prev14sec_m3
increment-price-prev15sec_m3
increment-price-mv2sec_m3
increment-price-mv3sec_m3
increment-price-mv4sec_m3
increment-price-mv5sec_m3
increment-price-mv6sec_m3
increment-price-mv7sec_m3
increment-price-mv8sec_m3
increment-price-mv9sec_m3
increment-price-mv10sec_m3
increment-price-mv11sec_m3
increment-price-mv12sec_m3
increment-price-mv13sec_m3
increment-price-mv14sec_m3
increment-price-mv15sec_m3
volume-plate_m0_m3
ratio-bid_m0_m3
deal-early-second_m3
deal-price-avg_m3

In [88]:
# housekeeping: delete some columns
# df_history_ts_process.drop('date-curr_y', axis=1, inplace=True)

In [89]:
parm_record_cut_ccyy


Out[89]:
Timestamp('2015-04-01 00:00:00')

In [90]:
# remove first 'parm_record_cut_ccyy' months from dataset
df_history_ts_process = df_history_ts_process[df_history_ts_process['date-curr'] > parm_record_cut_ccyy]

In [91]:
# total 61 seconds/rows per month:
# remove first 'parm_record_cut_row_head' reconds
# remove last 'parm_record_cut_row_tail' reconds
df_history_ts_process = df_history_ts_process[df_history_ts_process['second'] >= str(parm_record_cut_row_head) ]
df_history_ts_process = df_history_ts_process[df_history_ts_process['second'] <= str(60 - parm_record_cut_row_tail) ]
# df_history_ts_process = df_history_ts_process[df_history_ts_process['second'] > parm_record_cut_row_head ]

In [92]:
# Reset index after housekeeping
df_history_ts_process = df_history_ts_process.reset_index(drop=True)

In [93]:
df_history_ts_process.head()


Out[93]:
ccyy-mm time bid-price date-curr date-prev year month hour minute second ... increment-price-mv10sec_m3 increment-price-mv11sec_m3 increment-price-mv12sec_m3 increment-price-mv13sec_m3 increment-price-mv14sec_m3 increment-price-mv15sec_m3 volume-plate_m0_m3 ratio-bid_m0_m3 deal-early-second_m3 deal-price-avg_m3
0 2015-05 11:29:15 78400 2015-05-01 2015-04-01 2015 05 11 29 15 ... 0 0 0 0 0 -6.66667 7990.0 0.081362 48.0 74216.0
1 2015-05 11:29:16 78400 2015-05-01 2015-04-01 2015 05 11 29 16 ... 0 0 0 0 0 0 7990.0 0.081362 48.0 74216.0
2 2015-05 11:29:17 78400 2015-05-01 2015-04-01 2015 05 11 29 17 ... 0 0 0 0 0 0 7990.0 0.081362 48.0 74216.0
3 2015-05 11:29:18 78400 2015-05-01 2015-04-01 2015 05 11 29 18 ... 0 0 0 0 0 0 7990.0 0.081362 48.0 74216.0
4 2015-05 11:29:19 78500 2015-05-01 2015-04-01 2015 05 11 29 19 ... 0 0 0 0 0 0 7990.0 0.081362 48.0 74216.0

5 rows × 165 columns


In [94]:
df_history_ts_process.tail()


Out[94]:
ccyy-mm time bid-price date-curr date-prev year month hour minute second ... increment-price-mv10sec_m3 increment-price-mv11sec_m3 increment-price-mv12sec_m3 increment-price-mv13sec_m3 increment-price-mv14sec_m3 increment-price-mv15sec_m3 volume-plate_m0_m3 ratio-bid_m0_m3 deal-early-second_m3 deal-price-avg_m3
1048 2017-07 11:29:49 91400 2017-07-01 2017-06-01 2017 07 11 29 49 ... 470 454.545 441.667 430.769 421.429 413.333 10356.0 0.039525 55.0 87916.0
1049 2017-07 11:29:50 91500 2017-07-01 2017-06-01 2017 07 11 29 50 ... 510 490.909 475 461.538 450 440 10356.0 0.039525 55.0 87916.0
1050 2017-07 11:29:51 91600 2017-07-01 2017-06-01 2017 07 11 29 51 ... 550 527.273 508.333 492.308 478.571 466.667 10356.0 0.039525 55.0 87916.0
1051 2017-07 11:29:52 91700 2017-07-01 2017-06-01 2017 07 11 29 52 ... 590 563.636 541.667 523.077 507.143 493.333 10356.0 0.039525 55.0 87916.0
1052 2017-07 11:29:53 91800 2017-07-01 2017-06-01 2017 07 11 29 53 ... 620 600 575 553.846 535.714 520 10356.0 0.039525 55.0 87916.0

5 rows × 165 columns


In [96]:
plt.figure()
plt.plot(df_history_ts_process['increment-price'][974:])
plt.plot(df_history_ts_process['increment-price-mv3sec'][974:])
plt.plot(df_history_ts_process['increment-price-mv7sec'][974:])
plt.plot(df_history_ts_process['increment-price-mv11sec'][974:])
plt.plot(df_history_ts_process['increment-price-mv15sec'][974:])
plt.figure()
plt.plot(df_history_ts_process['increment-price-mv15sec'][974:])
plt.plot(df_history_ts_process['increment-price-mv15sec_m1'][974:])
plt.plot(df_history_ts_process['increment-price-mv15sec_m2'][974:])
plt.plot(df_history_ts_process['increment-price-mv15sec_m3'][974:])
plt.plot()


Out[96]:
[]
/home/user/env_py3/lib/python3.5/site-packages/matplotlib/font_manager.py:1297: UserWarning: findfont: Font family ['sans-serif'] not found. Falling back to DejaVu Sans
  (prop.get_family(), self.defaultFamily[fontext]))

In [ ]:

[3] Modeling Part 2: Python scikit-learn

Models to use:

  • GradientBoostingClassifier
  • RandomForestClassifier
  • AdaBoostClassifier
  • ExtraTreesClassifier
  • BaggingClassifier
  • LogisticRegression
  • SVM kernal RBF
  • SVM kernal Linear
  • KNeighborsClassifier

Import pre-processed data


In [97]:
df_history_ts_process.head()


Out[97]:
ccyy-mm time bid-price date-curr date-prev year month hour minute second ... increment-price-mv10sec_m3 increment-price-mv11sec_m3 increment-price-mv12sec_m3 increment-price-mv13sec_m3 increment-price-mv14sec_m3 increment-price-mv15sec_m3 volume-plate_m0_m3 ratio-bid_m0_m3 deal-early-second_m3 deal-price-avg_m3
0 2015-05 11:29:15 78400 2015-05-01 2015-04-01 2015 05 11 29 15 ... 0 0 0 0 0 -6.66667 7990.0 0.081362 48.0 74216.0
1 2015-05 11:29:16 78400 2015-05-01 2015-04-01 2015 05 11 29 16 ... 0 0 0 0 0 0 7990.0 0.081362 48.0 74216.0
2 2015-05 11:29:17 78400 2015-05-01 2015-04-01 2015 05 11 29 17 ... 0 0 0 0 0 0 7990.0 0.081362 48.0 74216.0
3 2015-05 11:29:18 78400 2015-05-01 2015-04-01 2015 05 11 29 18 ... 0 0 0 0 0 0 7990.0 0.081362 48.0 74216.0
4 2015-05 11:29:19 78500 2015-05-01 2015-04-01 2015 05 11 29 19 ... 0 0 0 0 0 0 7990.0 0.081362 48.0 74216.0

5 rows × 165 columns

Include relevant features


In [ ]:
X = df_history_ts_process[[
# 'ccyy-mm', 'time', 'bid-price', 'date-curr_x', 'date-prev', 'year',
       'month', 
#     'hour', 'minute', 
    'second', 'base-price15sec',
       'increment-price', 
#     'increment-price-target', 
    'increment-price-prev1sec',
       'increment-price-prev2sec', 'increment-price-prev3sec',
       'increment-price-prev4sec', 'increment-price-prev5sec',
       'increment-price-prev6sec', 'increment-price-prev7sec',
       'increment-price-prev8sec', 'increment-price-prev9sec',
       'increment-price-prev10sec', 'increment-price-prev11sec',
       'increment-price-prev12sec', 'increment-price-prev13sec',
       'increment-price-prev14sec', 'increment-price-prev15sec',
       'increment-price-mv2sec', 'increment-price-mv3sec',
       'increment-price-mv4sec', 'increment-price-mv5sec',
       'increment-price-mv6sec', 'increment-price-mv7sec',
       'increment-price-mv8sec', 'increment-price-mv9sec',
       'increment-price-mv10sec', 'increment-price-mv11sec',
       'increment-price-mv12sec', 'increment-price-mv13sec',
       'increment-price-mv14sec', 'increment-price-mv15sec', 'volume-plate_x',
       'ratio-bid_x', 'volume-plate_y', 'ratio-bid_y', 'deal-early-second',
       'deal-price-avg', 'deal-price-avg'    
        ]]

X_col = X.columns # get the column list

# X = StandardScaler().fit_transform(X.as_matrix())
X = X.as_matrix()

# y = StandardScaler().fit_transform(df_wnv_raw[['increment-price-target']].as_matrix()).reshape(len(df_wnv_raw),)
y = df_history_ts_process[['increment-price-target']].as_matrix().reshape(len(df_history_ts_process),)

In [ ]:
X_col

In [ ]:
plt.figure()
plt.plot(X)
plt.figure()
plt.plot(y)

In [ ]:

[4] Evaluation

K-fold Cross-Validation


In [ ]:
rng = check_random_state(0)

In [ ]:
# GB
classifier_GB = GradientBoostingRegressor(n_estimators=1500, # score: 0.94608 (AUC 0.81419), learning_rate=0.001, max_features=8 <<< Best
#                                    loss='deviance',
#                                    subsample=1,
#                                    max_depth=5,
#                                    min_samples_split=20,
                                   learning_rate=0.002,
#                                    max_features=10,
                                   random_state=rng)

In [ ]:
# AB
classifier_AB = AdaBoostRegressor(n_estimators=1500, # score: 0.93948 (AUC 0.88339), learning_rate=0.004 <<< Best
                                   learning_rate=0.002,
                                   random_state=rng)

In [ ]:
# RF
classifier_RF = RandomForestRegressor(n_estimators=1500, # score: 0.94207 (AUC 0.81870), max_depth=3, min_samples_split=20, <<< Best
#                                     max_features=10,
#                                     max_depth=3,
#                                     min_samples_split=20,
                                    random_state=rng)

In [ ]:
# ET
classifier_ET = ExtraTreesRegressor(n_estimators=1000, # score: 0.94655 (AUC 0.84364), max_depth=3, min_samples_split=20, max_features=10 <<< Best
#                                     max_depth=3,
#                                     min_samples_split=20,
#                                     max_features=10,
                                    random_state=rng)

In [ ]:
# BG
classifier_BG = BaggingRegressor(n_estimators=500, # score: 0.70725 (AUC 0.63729) <<< Best
#                                     max_features=10,
                                    random_state=rng)

LR


In [ ]:
classifier_LR = LinearRegression() # score: 0.90199 (AUC 0.80569)

SVM Linear


In [ ]:
# classifier_SVCL = svm.SVC(kernel='linear', probability=True, random_state=rng) # score: 0.89976 (AUC 0.70524)
classifier_SVRL = svm.SVR() # score: 0.89976 (AUC 0.70524)

SVM


In [ ]:
classifier_SVRR = svm.SVR(kernel='rbf') # score: 0.80188 (AUC 0.50050)
# classifier_SVRR = svm.SVR(kernel='poly') # score: 0.80188 (AUC 0.50050)

KNN


In [ ]:
classifier_KNN = KNeighborsRegressor(n_neighbors=2) # score: 0.94018 (AUC 0.72792)
cv = cross_val_score(classifier_KNN,
                            X,
                            y,
                            cv=StratifiedKFold(parm_ts_valid_month))
print('KNN CV score: {0:.5f}'.format(cv.mean()))

In [ ]:

Select Model


In [ ]:
# classifier = classifier_GB   # 324.632308296
classifier = classifier_AB   # 429.646733221
# classifier = classifier_RF   # 175.504322802
# classifier = classifier_ET   # 172.097916817, 0.0724812030075
# classifier = classifier_BG   # 175.451381872
# classifier = classifier_LR     # 128.465059749, 0.11
# classifier = classifier_SVRL # 3789.82169312
# classifier = classifier_SVRR # 3789.82169312, 0.10754224349

Split Data


In [ ]:
n_splits = parm_ts_valid_cycle
print(n_splits)
# n_splits=54 # 19 seconds/records for each bidding month
# n_splits=19 # 19 seconds/records for each bidding month
n_fold = parm_ts_valid_month
print(n_fold)


# X_train_1 = X[0:(len(X)-batch*n_splits)]
# y_train_1 = y[0:(len(X)-batch*n_splits)]

# X_test_1 = X[(len(X)-batch*n_splits):((len(X)-batch*n_splits)+n_splits)]
# y_test_1 = y[(len(X)-batch*n_splits):((len(X)-batch*n_splits)+n_splits)]

CV


In [ ]:
n_fold=5

In [ ]:
y_pred = {}
y_test = {}

y_pred_org = {}
y_test_org = {}

i = 0
for batch in range(1, n_fold):
    X_train_1 = X[0:(len(X)-batch*n_splits)]
    y_train_1 = y[0:(len(X)-batch*n_splits)]
    X_test_1  = X[(len(X)-batch*n_splits):((len(X)-batch*n_splits)+n_splits)]
    y_test_1  = y[(len(X)-batch*n_splits):((len(X)-batch*n_splits)+n_splits)]
    print(len(X_train_1))
    
    # ReScale
    ScalerX = StandardScaler()
    ScalerX.fit(X_train_1)
    X_train_1 = ScalerX.transform(X_train_1)
    X_test_1  = ScalerX.transform(X_test_1)
    
    ScalerY = StandardScaler()
    ScalerY.fit(y_train_1.reshape(-1, 1))
    y_train_1 = ScalerY.transform(y_train_1.reshape(-1, 1))
    y_test_1  = ScalerY.transform(y_test_1.reshape(-1, 1))
    
    y_pred[i] = classifier.fit(X_train_1, y_train_1).predict(X_test_1)
    y_test[i] = y_test_1  

    y_pred_org[i] = ScalerY.inverse_transform(y_pred[i])
    y_test_org[i] = ScalerY.inverse_transform(y_test[i])
    
    plt.figure()
    plt.plot(y_train_1)
    plt.plot()
    plt.figure()
    plt.plot(y_test[i])
    plt.plot(y_pred[i])
    plt.plot()
    i += 1

no inverse-scale


In [ ]:
k = []
for i in range(0, len(y_test)):
    k.append(np.mean(np.sqrt(np.square(y_test[i] - y_pred[i]))))

k_mean = np.mean(k)

print(k_mean)
print()
print(k)

In [ ]:
k = []
for i in range(0, len(y_test)):
    k.append(np.mean(np.sqrt(np.square(y_test[i][35:37] - y_pred[i][35:37]))))

k_mean = np.mean(k)

print(k_mean)
print()
print(k)

inverse-scale


In [ ]:
k = []
for i in range(0, len(y_test)):
    k.append(np.mean(np.sqrt(np.square(y_test_org[i] - y_pred_org[i]))))

k_mean = np.mean(k)

print(k_mean)
print()
print(k)

In [ ]:
k = []
for i in range(0, len(y_test)):
    k.append(np.mean(np.sqrt(np.square(y_test_org[i][35:37] - y_pred_org[i][35:37]))))

k_mean = np.mean(k)

print(k_mean)
print()
print(k)

In [ ]:
# 50 second predicts 57 second
k = []
for i in range(0, len(y_test)):
    k.append(np.mean(np.sqrt(np.square(y_test_org[i][35:36] - y_pred_org[i][35:36]))))

k_mean = np.mean(k)

print(k_mean)
print()
print(k)

In [ ]:
plt.plot(y_test_org[0])
plt.plot(y_pred_org[0])

In [ ]:
plt.plot(k)

In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:
y_test[1][13:]

In [ ]:
y_pred[1][13:]

In [ ]:
np.mean(np.sqrt(np.square(y_test[4] - y_pred[4])))

In [ ]:
np.mean(np.sqrt(np.square(y_test[4][13:16] - y_pred[4][13:16])))

In [ ]:


In [ ]:
y_pred_df = pd.DataFrame.from_dict(y_pred)

In [ ]:
y_pred_df.columns=['month 7','month 6','month 5','month 4','month 3','month 2','month 1']

In [ ]:
y_pred_df.to_csv('bid_results_v001.csv', index=False)

In [ ]:
y_pred_df

In [ ]:


In [ ]:
# previous N sec ['bid-price']
gap = parm_calculate_prev_bp

for gap in range(1, gap+1):
    col_name = 'bid-price-prev'+str(gap)+'sec'
    col_data = pd.DataFrame(columns=[col_name])
    col_data_zeros = pd.DataFrame({col_name: np.zeros(gap)})
    print('Creating : ', col_name)  

    for month in range(0, parm_ts_month):
    #     print('month : ', month)
        col_data.append(col_data_zeros)
        for i in range(0, gap):
            col_data.loc[month*parm_ts_cycle+i] = 0
        for i in range(gap, parm_ts_cycle):
            col_data.loc[month*parm_ts_cycle+i] = df_history_ts_process['bid-price'][month*parm_ts_cycle+i-gap]
  
    df_history_ts_process[col_name] = col_data

print('Total records processed : ', len(col_data))

In [ ]:
# previous 2 sec Moving Average ['bid-price']

gap = parm_calculate_mv

for gap in range(2, gap+1): # MV starts from 2 seconds, till parm_calculate_mv
    col_name = 'bid-price-mv'+str(gap)+'sec'
    col_data = pd.DataFrame(columns=[col_name])
    col_data_zeros = pd.DataFrame({col_name: np.zeros(gap)})
    print('Creating : ', col_name)  

    for month in range(0, parm_ts_month):
    #     print('month : ', month)
        col_data.append(col_data_zeros)
        for i in range(0, gap):
            col_data.loc[month*parm_ts_cycle+i] = 0
        for i in range(gap, parm_ts_cycle):
            col_data.loc[month*parm_ts_cycle+i] = \
            np.mean(df_history_ts_process['bid-price'][month*parm_ts_cycle+i-gap:month*parm_ts_cycle+i])
  
    df_history_ts_process[col_name] = col_data

print('Total records processed : ', len(col_data))

In [ ]:
df_history_ts_process[1768:]

In [ ]:


In [ ]:


In [ ]:


In [ ]:
# previous 2 sec Moving Average ['bid-price']

gap = parm_calculate_mv

for gap in range(1, gap+1):
    col_name = 'bid-price-mv'+str(gap)+'sec'
    col_data = pd.DataFrame(columns=[col_name])
    print('Creating : ', col_name)  

    for month in range(0, parm_ts_month):
    #     print('month : ', month)
        col_data.append(col_data_zeros)
        for i in range(0, gap):
            col_data.loc[month*parm_ts_cycle+i] = 0
        for i in range(gap, parm_ts_cycle):
            col_data.loc[month*parm_ts_cycle+i] = df_history_ts_process['bid-price'][month*parm_ts_cycle+i-gap]
  
    df_history_ts_process[col_name] = col_data

print('len : ', len(col_data))

In [ ]:


In [ ]:


In [ ]:
# previous N sec
gap = 1
gap = 2
gap = 3
gap = 4
gap = 5
gap = 6
gap = 7
gap = 8
gap = 9
gap = 10

col_name = 'bid-price-prev'+str(gap)+'sec'
col_data = pd.DataFrame(columns=[col_name])

for month in range(0, parm_ts_month):
#     print('month : ', month)
    col_data.append(col_data_zeros)
    for i in range(0, gap):
        col_data.loc[month*parm_ts_cycle+i] = 0
    for i in range(gap, parm_ts_cycle):
        col_data.loc[month*parm_ts_cycle+i] = df_history_ts_process['bid-price'][month*parm_ts_cycle+i]
    
print('len : ', len(col_data))    
df_history_ts_process[col_name] = col_data

In [ ]:
len(col_data)

In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:
# previous 1 sec
gap = 10

col_data = pd.DataFrame({'bid-price-prev'+str(gap)+'sec': np.zeros(gap)})

# for i in range(gap, len(df_history_ts)-1768):
for i in range(gap, parm_ts_cycle):
#     print(df_history_ts['bid-price'][i])
    col_data.loc[i] = df_history_ts['bid-price'][i]

print(len(col_data))

In [ ]:


In [ ]:
df_history_ts_process = df_history_ts.copy()

In [ ]:
df_history_table_process['tmp'] = col_data['bid-price-prev'+str(gap)+'sec']

In [ ]:
df_history_table_process.tail()

In [ ]:


In [ ]:


In [ ]:
col_data

In [ ]:


In [ ]:


In [ ]:


The End