Modified Ornstein Uhlenbeck Process for Simulating Temperature Paths

David Islip


In [180]:
import pandas as pd 
import matplotlib.pyplot as plt
import matplotlib
import datetime as dt
import scipy.stats as stats
from scipy.stats import norm
import numpy as np
import math
import seaborn as sns
from  InvarianceTestEllipsoid import InvarianceTestEllipsoid
from autocorrelation import autocorrelation
import statsmodels.api as sm
from statsmodels.sandbox.regression.predstd import wls_prediction_std
import statsmodels.tsa.ar_model as ar_model
import pickle
%matplotlib inline

0. The model:

The model that will be used to simulate temperature paths is as in Benth et al. Essentially the model is a discretizination of a ornstein uhlenbeck SDE with a linear - periodic mean function and a periodic volatility function:

$dT(t) = ds(t) - \kappa(T(t) - s(t))dt + \sigma(t)dB(t)$

where

$s(t) = a + bt + \sum_ia_isin(2\pi it/365) + \sum_jb_jcos(2\pi jt/365)$

and

$\sigma^2(t) = a + \sum_ic_isin(2\pi it/365) + \sum_jd_jcos(2\pi jt/365)$


In [90]:
T = pd.read_csv("CleanedTemperature.csv")
T.rename(columns={T.columns[0]: 'Date'}, inplace=True)
T.index = pd.to_datetime(T["Date"])
T.drop(T.index[T.index.dayofyear == 366],axis=0, inplace = True)
T.drop("Date", axis = 1, inplace = True)
T.plot(subplots = True, fontsize = 8,layout = (4,3))
T


Out[90]:
Buttonville A Hamilton A Kingsville Moe North Bay A Orillia Oshawa WPCP Ottawa CDA Sault St Marie A Sudbury A Trent U Wiarton A Windsor A
Date
1987-05-01 7.5 7.3 9.300000 5.000000 8.3 8.5 7.3 3.800000 5.2 8.0 8.0 10.1
1987-05-02 7.7 8.8 11.500000 2.600000 9.5 5.8 5.4 3.000000 3.6 6.5 3.9 12.4
1987-05-03 6.2 7.2 9.500000 4.100000 9.3 9.8 6.0 5.400000 4.3 7.3 4.6 8.4
1987-05-04 7.4 7.1 9.500000 6.800000 10.0 7.5 8.1 6.400000 7.1 8.0 6.5 9.5
1987-05-05 10.3 10.9 10.800000 10.600000 10.5 11.0 10.5 9.200000 11.9 11.3 7.7 10.3
1987-05-06 13.8 12.8 11.000000 12.700000 11.5 12.8 14.4 9.200000 13.3 15.0 9.9 15.2
1987-05-07 7.8 11.6 13.800000 6.000000 10.0 12.8 10.3 5.900000 6.7 11.3 6.8 12.6
1987-05-08 8.4 9.5 14.500000 7.100000 9.8 8.0 10.3 8.000000 8.9 9.0 7.1 13.0
1987-05-09 17.8 15.6 17.300000 14.700000 17.8 13.5 15.8 16.200000 17.2 16.3 16.8 18.2
1987-05-10 18.0 19.6 19.500000 8.900000 18.0 17.0 14.9 11.600000 8.9 18.0 12.0 23.4
1987-05-11 18.4 19.6 20.300000 12.000000 16.3 16.5 11.7 12.800000 12.8 16.0 15.0 20.5
1987-05-12 9.6 11.7 14.300000 5.600000 10.0 12.5 10.4 6.100000 7.1 12.3 6.0 10.3
1987-05-13 10.2 9.4 14.300000 10.800000 10.5 11.0 10.1 12.600000 10.8 10.0 9.3 13.7
1987-05-14 15.8 17.3 13.300000 13.600000 15.5 14.0 15.8 11.800000 10.6 15.0 17.1 19.9
1987-05-15 9.5 11.0 14.000000 5.500000 11.3 13.0 8.6 7.800000 7.1 12.0 8.4 12.4
1987-05-16 11.9 11.8 15.000000 10.000000 12.0 10.0 8.3 14.400000 11.7 9.8 12.1 14.5
1987-05-17 19.5 18.8 18.800000 9.300000 17.3 15.5 12.4 14.300000 9.6 18.0 13.4 21.5
1987-05-18 11.8 11.8 15.800000 8.000000 12.5 13.5 12.8 6.800000 7.1 11.8 9.0 16.3
1987-05-19 12.7 10.8 15.000000 9.800000 12.8 13.0 11.3 7.100000 9.6 11.5 11.9 12.4
1987-05-20 12.3 12.8 13.500000 13.300000 13.3 12.3 12.9 11.000000 11.8 12.3 13.9 17.3
1987-05-21 16.9 17.9 19.800000 14.000000 16.3 14.0 15.7 12.600000 13.1 15.3 16.9 21.6
1987-05-22 20.5 20.9 19.800000 11.400000 21.0 16.5 18.6 9.600000 9.7 19.5 16.1 22.8
1987-05-23 12.2 12.2 13.500000 7.100000 11.0 13.8 8.4 4.800000 4.7 12.0 8.3 13.1
1987-05-24 9.5 8.8 14.000000 8.800000 10.3 10.8 9.9 8.400000 10.6 11.0 8.6 11.5
1987-05-25 14.5 10.8 16.300000 13.500000 12.0 12.5 12.3 9.400000 13.8 12.3 11.5 13.8
1987-05-26 13.3 14.2 18.500000 11.500000 13.3 15.5 13.7 11.400000 11.8 13.5 14.1 21.6
1987-05-27 19.7 22.7 22.800000 16.800000 21.5 17.0 18.1 19.100000 18.1 19.5 20.0 24.0
1987-05-28 24.2 24.3 23.500000 22.400000 26.0 20.0 22.9 21.700000 22.9 23.0 23.4 26.5
1987-05-29 25.7 25.2 25.000000 21.700000 25.3 21.5 24.2 21.500000 22.9 23.5 21.8 26.8
1987-05-30 25.0 24.6 26.300000 21.400000 26.3 21.8 25.5 20.900000 22.6 24.8 23.0 26.5
... ... ... ... ... ... ... ... ... ... ... ... ...
2016-12-02 3.2 3.1 4.000000 -4.083450 1.3 4.0 3.5 2.300000 -0.4 2.7 2.5 3.1
2016-12-03 2.6 2.8 2.836394 -3.400000 0.8 2.5 2.0 0.800000 -3.1 -0.5 1.7 2.7
2016-12-04 -0.5 1.3 2.748016 -4.782742 -3.0 -0.5 0.0 -0.400000 -5.3 -3.8 0.9 2.0
2016-12-05 2.4 2.5 4.000000 -3.100000 0.8 3.0 -2.8 1.100000 -2.7 0.5 2.3 1.4
2016-12-06 1.6 0.4 3.000000 -6.119118 1.5 1.8 -2.3 2.300000 -4.5 -1.3 3.4 1.0
2016-12-07 0.9 0.6 2.500000 -5.941114 0.8 3.0 1.5 -2.300000 -2.3 0.7 2.3 0.5
2016-12-08 -2.2 -2.6 -2.000000 -6.200000 -0.8 0.5 0.5 -5.800000 -7.9 -3.4 -1.8 -2.4
2016-12-09 -5.9 -4.3 -2.300000 -9.200000 -7.0 -4.0 -5.3 -9.400000 -10.9 -8.2 -4.6 -2.1
2016-12-10 -6.0 -5.8 -3.000000 -13.900000 -8.5 -4.0 -7.5 -11.000000 -13.1 -9.4 -6.6 -3.6
2016-12-11 -4.2 -4.2 0.000000 -8.779789 -4.3 -2.5 -9.5 -11.300000 -10.9 -8.7 -4.0 -2.4
2016-12-12 0.6 -1.8 2.500000 -6.000000 -1.3 1.0 -4.3 -4.300000 -7.2 -0.4 0.1 -1.8
2016-12-13 -4.2 -5.5 -4.000000 -8.960377 -4.8 -0.5 -2.5 -8.800000 -10.8 -3.3 -3.4 -7.4
2016-12-14 -8.0 -10.1 -0.821711 -13.400000 -9.5 -5.8 -6.8 -13.600000 -15.4 -7.9 -7.4 -11.5
2016-12-15 -11.0 -12.3 -1.909722 -21.900000 -14.5 -9.8 -14.8 -11.900000 -21.5 -16.5 -10.8 -12.3
2016-12-16 -11.2 -11.6 -7.300000 -18.100000 -12.8 -8.3 -20.0 -10.100000 -15.4 -16.1 -7.9 -11.1
2016-12-17 -3.7 -3.4 -3.000000 -9.800000 -13.3 -3.3 -12.3 -11.100000 -11.5 -5.5 -5.4 -4.4
2016-12-18 -7.0 -7.4 -6.000000 -8.614263 -9.5 -3.8 -8.0 -16.100000 -20.6 -10.8 -7.8 -8.3
2016-12-19 -11.3 -13.1 -9.500000 -15.400000 -12.0 -7.3 -17.3 -11.500000 -15.1 -12.3 -9.8 -14.5
2016-12-20 -5.1 -7.2 -0.620765 -8.059094 -7.0 -3.5 -10.0 -0.400000 -2.8 -4.4 -5.7 -7.6
2016-12-21 -2.7 -3.9 -1.011179 -8.203888 0.5 -0.3 -1.0 0.300000 -1.2 -1.9 0.7 -3.8
2016-12-22 0.9 0.1 2.000000 -1.700000 -1.0 1.8 -0.3 -3.300000 -1.1 -0.1 0.6 0.3
2016-12-23 -0.3 -0.5 0.500000 -0.200000 0.5 1.3 0.3 -1.900000 -0.4 -2.3 0.7 -1.3
2016-12-24 1.9 2.2 2.000000 -3.200000 0.8 3.0 1.3 -0.800000 -2.1 0.8 1.6 2.1
2016-12-25 -2.1 -0.9 2.800000 -9.900000 -4.5 1.0 -6.0 -7.700000 -10.1 -4.2 -2.4 1.0
2016-12-26 0.9 3.6 6.500000 -9.140060 -1.8 1.5 -2.8 1.200000 -3.1 -1.7 2.6 7.2
2016-12-27 1.6 1.7 0.300000 -8.946798 -1.3 1.0 -2.5 -5.552914 -6.3 1.8 0.1 1.2
2016-12-28 -2.2 -1.7 0.500000 -11.500000 -7.0 -0.5 -6.3 -4.400000 -9.3 -3.3 -2.4 0.5
2016-12-29 0.1 0.0 2.000000 -4.300000 -1.3 1.0 -6.3 -1.700000 -3.9 -0.7 0.1 2.1
2016-12-30 -2.9 -2.5 -0.500000 -8.624741 -5.3 -0.5 -5.8 -5.200000 -12.7 -5.1 -2.9 -1.4
2017-01-01 -1.8 -1.8 -0.500000 -10.482341 0.0 2.0 -6.5 -3.500000 -8.3 -4.6 -1.3 -0.5

10831 rows × 12 columns

1. Regression to obtain $m(t)$


In [91]:
Y = np.array(T)
#generating the factor space for the mean
a = np.ones(len(Y))
t = np.linspace(1,len(Y),len(Y))
N = 4 #number of sine and cosine functions to include
n = np.linspace(1,N,N)
Sines = np.sin(2*np.pi*(np.outer(t,n))/365)
Cosines = np.cos(2*np.pi*(np.outer(t,n))/365)
X = np.stack((a, t), axis=1)
X = np.concatenate((X,Sines,Cosines),axis=1)

1.1 Using Lasso Regression to shrink factors to zero

The plot below varies the magnitude of the lasso regularization to see which parameters go to zero

Training data: $(x_t,y_t)$

Model Specification: $Y = \beta X + C$

Lasso regularization: $\underset{\beta}{\operatorname{argmin}}\sum_t(y_t - (\beta x_t + C))^2 + \lambda||\beta||_{l1} $

Depending on the value of $\lambda$, the coefficients in beta will shrink to zero


In [92]:
Y = np.transpose(Y)
y = Y[0][:]
L = []
model = sm.OLS(y, X)
for i in range(10):
    results = model.fit_regularized(method = 'elastic_net',alpha=i/10, L1_wt=0.5)
    L.append(results.params)

In [93]:
L = pd.DataFrame(L)
L = L/L.max(axis=0)
L.plot()
L


Out[93]:
0 1 2 3 4 5 6 7 8 9
0 1.000000 0.160784 1.000000 1.000000 -inf -inf 1.000000 1.000000 1.000000 -inf
1 0.812745 0.377072 0.902925 0.602671 -inf NaN 0.855326 0.586144 0.518852 -inf
2 0.678679 0.532015 0.821889 0.269122 -inf NaN 0.733345 0.250372 0.121704 NaN
3 0.577889 0.648549 0.753255 0.000000 -inf NaN 0.629158 0.000000 0.000000 NaN
4 0.499327 0.739326 0.694433 0.000000 NaN NaN 0.539177 0.000000 0.000000 NaN
5 0.436380 0.812065 0.643425 0.000000 NaN NaN 0.460863 0.000000 0.000000 NaN
6 0.384778 0.871713 0.598767 0.000000 NaN NaN 0.392003 0.000000 0.000000 NaN
7 0.341700 0.921522 0.559346 0.000000 NaN NaN 0.330994 0.000000 0.000000 NaN
8 0.305190 0.963746 0.524292 0.000000 NaN NaN 0.276575 0.000000 0.000000 NaN
9 0.273851 1.000000 0.492919 0.000000 NaN NaN 0.227737 0.000000 0.000000 NaN

In [94]:
cols = L.columns[L.ix[len(L)-1] > 0.001]
Xs = X[:,cols]

1.2 Mean Regression Results (p-values, coefficients .... )


In [161]:
model = sm.OLS(y,Xs)
results = model.fit()
print(results.summary())


                            OLS Regression Results                            
==============================================================================
Dep. Variable:                      y   R-squared:                       0.825
Model:                            OLS   Adj. R-squared:                  0.825
Method:                 Least Squares   F-statistic:                 1.703e+04
Date:                Thu, 02 Nov 2017   Prob (F-statistic):               0.00
Time:                        16:39:05   Log-Likelihood:                -31659.
No. Observations:               10831   AIC:                         6.333e+04
Df Residuals:                   10827   BIC:                         6.336e+04
Df Model:                           3                                         
Covariance Type:            nonrobust                                         
==============================================================================
                 coef    std err          t      P>|t|      [0.025      0.975]
------------------------------------------------------------------------------
const          7.1862      0.087     83.067      0.000       7.017       7.356
x1             0.0001   1.38e-05     10.150      0.000       0.000       0.000
x2            13.7446      0.061    224.463      0.000      13.625      13.865
x3             1.4861      0.061     24.321      0.000       1.366       1.606
==============================================================================
Omnibus:                       60.539   Durbin-Watson:                   0.626
Prob(Omnibus):                  0.000   Jarque-Bera (JB):               87.992
Skew:                          -0.011   Prob(JB):                     7.81e-20
Kurtosis:                       3.441   Cond. No.                     1.25e+04
==============================================================================

Warnings:
[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.
[2] The condition number is large, 1.25e+04. This might indicate that there are
strong multicollinearity or other numerical problems.

In [162]:
Comparison = pd.DataFrame(results.predict(Xs))
Comparison["Actual"] = y
Comparison.rename(columns={Comparison.columns[0]: 'Predicted'}, inplace=True)
Comparison.ix[len(y)-365:len(y)].plot(title = "Hamilton Temperature")


Out[162]:
<matplotlib.axes._subplots.AxesSubplot at 0x132908d0>

2. AR(1) Process for the Residuals

Discretizing the SDE implies that the mean removed temperature series follows an AR(1) process

$X_{t+1}= \alpha X_t + \sigma (t)\epsilon$

2.1 The code below is motivation for an AR(1) process for the residuals obtained from above: we see that there is significant correlation among the residuals from the mean process


In [118]:
epsi = Comparison['Actual'] - Comparison['Predicted']
epsi = np.array(epsi)
epsi = np.expand_dims(epsi, axis=0)

lag_ = 10  # number of lags (for auto correlation test)
acf = autocorrelation(epsi, lag_)

lag = 10 # lag to be printed
ell_scale = 2  # ellipsoid radius coefficient
fit = 0  # normal fitting
InvarianceTestEllipsoid(epsi, acf[0,1:], lag, fit, ell_scale);



In [163]:
epsi = Comparison['Actual'] - Comparison['Predicted']
epsi = np.array(epsi)
model = sm.tsa.AR(epsi)
AResults= model.fit(maxlag = 30, ic = "bic",method = 'cmle')
print("The maximum number of required lags for the residuals above according to the Bayes Information Criterion is:")
sm.tsa.AR(epsi).select_order(maxlag = 10, ic = 'bic',method='cmle')


The maximum number of required lags for the residuals above according to the Bayes Information Criterion is:
Out[163]:
3

In [164]:
ar_mod = sm.OLS(epsi[1:], epsi[:-1])
ar_res = ar_mod.fit()
print(ar_res.summary())

ep = ar_res.predict()
print(len(ep),len(epsi))
z = ep - epsi[1:]

plt.plot(epsi[1:],  color='black')
plt.plot(ep, color='blue',linewidth=3)
plt.title('Residuals AR(1) Process')
plt.ylabel(" ")
plt.xlabel("Days")
plt.legend()


                            OLS Regression Results                            
==============================================================================
Dep. Variable:                      y   R-squared:                       0.472
Model:                            OLS   Adj. R-squared:                  0.472
Method:                 Least Squares   F-statistic:                     9671.
Date:                Thu, 02 Nov 2017   Prob (F-statistic):               0.00
Time:                        16:41:11   Log-Likelihood:                -28201.
No. Observations:               10830   AIC:                         5.640e+04
Df Residuals:                   10829   BIC:                         5.641e+04
Df Model:                           1                                         
Covariance Type:            nonrobust                                         
==============================================================================
                 coef    std err          t      P>|t|      [0.025      0.975]
------------------------------------------------------------------------------
x1             0.6869      0.007     98.343      0.000       0.673       0.701
==============================================================================
Omnibus:                      145.304   Durbin-Watson:                   1.799
Prob(Omnibus):                  0.000   Jarque-Bera (JB):              265.647
Skew:                          -0.035   Prob(JB):                     2.07e-58
Kurtosis:                       3.764   Cond. No.                         1.00
==============================================================================

Warnings:
[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.
10830 10831
C:\Program Files\Anaconda3\lib\site-packages\matplotlib\axes\_axes.py:531: UserWarning: No labelled objects found. Use label='...' kwarg on individual plots.
  warnings.warn("No labelled objects found. "

2.2 Invariance check for the residuals of the AR(1) process


In [165]:
z = np.expand_dims(z, axis=0)

lag_ = 10  # number of lags (for auto correlation test)
acf = autocorrelation(z, lag_)

lag = 10  # lag to be printed
ell_scale = 2  # ellipsoid radius coefficient
fit = 0  # normal fitting
InvarianceTestEllipsoid(z, acf[0,1:], lag, fit, ell_scale);


2.3 As per Benth lets see what the residuals of the AR(1) process are doing...


In [166]:
z = ep - epsi[1:]
plt.plot(z**2)


Out[166]:
[<matplotlib.lines.Line2D at 0x15b29d68>]

3. Modelling the Volatility Term: $\sigma^2(t) $


In [167]:
sigma = z**2
L = []
volmodel = sm.OLS(sigma, X[1:])
for i in range(10):
    volresults = volmodel.fit_regularized(method = 'elastic_net',alpha=i/10, L1_wt=0.5)
    L.append(volresults.params)

In [168]:
L = pd.DataFrame(L)
L = L/L.max(axis=0)
L.plot()
L


Out[168]:
0 1 2 3 4 5 6 7 8 9
0 1.000000 -0.220320 2.268152 -inf 1.000000 -inf -inf 6.085140 1.0 -inf
1 0.818510 0.097131 2.020814 NaN 0.847967 -inf NaN 5.110620 0.0 -inf
2 0.689085 0.323363 1.816455 NaN 0.719607 NaN NaN 4.297068 0.0 -inf
3 0.592107 0.492802 1.644561 NaN 0.609640 NaN NaN 3.606971 0.0 NaN
4 0.516724 0.624472 1.497894 NaN 0.514896 NaN NaN 3.015915 0.0 NaN
5 0.456447 0.729728 1.371232 NaN 0.432493 NaN NaN 2.504200 0.0 NaN
6 0.407151 0.815788 1.260714 NaN 0.360204 NaN NaN 2.056957 0.0 NaN
7 0.366087 0.887460 1.163422 NaN 0.296298 NaN NaN 1.662784 0.0 NaN
8 0.331353 0.948072 1.077106 NaN 0.239411 NaN NaN 1.312803 0.0 NaN
9 0.301590 1.000000 1.000000 NaN 0.188454 NaN NaN 1.000000 0.0 NaN

In [169]:
volcols = L.columns[L.ix[len(L)-1] > 0.001]
Xvol = X[1:,volcols]
volmodel = sm.OLS(sigma,Xvol)
VolResults = volmodel.fit()
print(VolResults.summary())


                            OLS Regression Results                            
==============================================================================
Dep. Variable:                      y   R-squared:                       0.065
Model:                            OLS   Adj. R-squared:                  0.065
Method:                 Least Squares   F-statistic:                     188.7
Date:                Thu, 02 Nov 2017   Prob (F-statistic):          1.35e-156
Time:                        16:41:29   Log-Likelihood:                -46176.
No. Observations:               10830   AIC:                         9.236e+04
Df Residuals:                   10825   BIC:                         9.240e+04
Df Model:                           4                                         
Covariance Type:            nonrobust                                         
==============================================================================
                 coef    std err          t      P>|t|      [0.025      0.975]
------------------------------------------------------------------------------
const         11.8645      0.331     35.879      0.000      11.216      12.513
x1            -0.0002   5.29e-05     -3.898      0.000      -0.000      -0.000
x2            -6.0909      0.234    -26.028      0.000      -6.550      -5.632
x3             1.3675      0.234      5.849      0.000       0.909       1.826
x4            -1.3356      0.234     -5.709      0.000      -1.794      -0.877
==============================================================================
Omnibus:                     8128.079   Durbin-Watson:                   1.911
Prob(Omnibus):                  0.000   Jarque-Bera (JB):           183313.518
Skew:                           3.434   Prob(JB):                         0.00
Kurtosis:                      21.949   Cond. No.                     1.25e+04
==============================================================================

Warnings:
[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.
[2] The condition number is large, 1.25e+04. This might indicate that there are
strong multicollinearity or other numerical problems.

In [170]:
VolComparison = pd.DataFrame(VolResults.predict())
VolComparison["Actual"] = sigma
VolComparison.rename(columns={VolComparison.columns[0]: 'Predicted'}, inplace=True)
VolComparison.ix[0:720]['Actual'].plot(title = "Hamilton Volatility Model")
VolComparison.ix[0:720]['Predicted'].plot(title = "Hamilton Volatility Model")


Out[170]:
<matplotlib.axes._subplots.AxesSubplot at 0x15b61588>

In [171]:
epsi = z/(VolResults.predict())**0.5
epsi = np.expand_dims(epsi, axis=0)
lag_ = 10  # number of lags (for auto correlation test)
acf = autocorrelation(epsi, lag_)

lag = 10  # lag to be printed
ell_scale = 2  # ellipsoid radius coefficient
fit = 0  # normal fitting
InvarianceTestEllipsoid(epsi, acf[0,1:], lag, fit, ell_scale);



In [172]:
# Fit a normal distribution to the data:
mu, std = norm.fit(epsi[0])

# Plot the histogram.
plt.hist(epsi[0], bins=25, normed=True, alpha=0.6, color='g')

# Plot the PDF.
xmin, xmax = plt.xlim()
x = np.linspace(xmin, xmax, 100)
p = norm.pdf(x, mu, std)
plt.plot(x, p, 'k', linewidth=2)
title = "Fit results: mu = %.2f,  std = %.2f" % (mu, std)
plt.title(title)

plt.show()


4. Monte Carlo Simulation


In [214]:
#tau is the risk horizon
tau = 365*2

a = np.ones(tau)
t = np.linspace(len(y),len(y)+tau,tau)
N = 4 #number of sine and cosine functions to include
n = np.linspace(1,N,N)
Sines = np.sin(2*np.pi*(np.outer(t,n))/365)
Cosines = np.cos(2*np.pi*(np.outer(t,n))/365)
X_proj = np.stack((a, t), axis=1)
X_proj = np.concatenate((X_proj,Sines,Cosines),axis=1)

In [215]:
#M is the number of monte carlo paths to simulate
M = 1000
invariants  = np.random.normal(0, 1, (tau,M))
vol_proj = (VolResults.predict(X_proj[:,volcols]))**0.5
sig_hat = np.expand_dims(vol_proj, axis=1)*invariants
AR = np.zeros(sig_hat.shape)
for i in range(sig_hat.shape[0]-1):
    AR[i+1] = ar_res.params[0]*AR[i]+ sig_hat[i]

Mean_Temp = np.expand_dims(results.predict(X_proj[:,cols]),axis=1)
Temp_Paths = np.repeat(Mean_Temp,M,axis=1)+ AR
plt.plot(Temp_Paths[:,0:3])


Out[215]:
[<matplotlib.lines.Line2D at 0x15a56e48>,
 <matplotlib.lines.Line2D at 0x15a56b38>,
 <matplotlib.lines.Line2D at 0x15a56198>]

In [216]:
T_out = pd.DataFrame(Temp_Paths)
T_out.index = np.arange(T.index[-1],T.index[-1] + dt.timedelta(tau),dt.timedelta(days=1)).astype(dt.datetime)
T_out.to_pickle("C:\\Users\\islipd\\Documents\\Thesis Notebooks\\Tout.pkl")
T_out.mean(axis = 1).plot()


Out[216]:
<matplotlib.axes._subplots.AxesSubplot at 0x1457a9b0>