In [1]:
import quandl;
import pandas as pd;

import pickle;

import matplotlib.pyplot as plt;
from matplotlib import style;
style.use("ggplot");

In [2]:
api_key = open("quandlapikey.txt", "r").read();

def state_list():
    fiddy_states = pd.read_html("https://simple.wikipedia.org/wiki/List_of_U.S._states");
    return fiddy_states[0][0][1:];

def grap_initial_state_data_start_pct():
    states = state_list();
    main_df = pd.DataFrame();
    for ab in states:
        querry = "FMAC/HPI_" + ab;
        df = quandl.get(querry, authtoken = api_key);
        df.columns = [ab];        
        df[ab] = (df[ab] - df[ab][0]) / df[ab][0] * 100.0; # <-------
        if main_df.empty:
            main_df = df;
        else:
            main_df = main_df.join(df);

    pickle_out = open("./data/fiddy_states.pickle", "wb");
    pickle.dump(main_df, pickle_out);
    pickle_out.close();
    
def HPI_Benchmark():
    df = quandl.get("FMAC/HPI_USA", authtoken = api_key);
    df.columns = ["US"];        
    df["US"] = (df["US"] - df["US"][0]) / df["US"][0] * 100.0; # <-------
    return df;

In [17]:
#grap_initial_state_data_start_pct(); 
HPI_data = pd.read_pickle("./data/fiddy_states.pickle");

for shift in range(0, 3): # <------- mean values
    avg = 0.;
    for i in range(shift, shift + 12):
        avg += HPI_data["TX"][i];
    avg /= 12.;
    print(avg);
    
HPI_data["TX12MA"] = HPI_data["TX"].rolling(window = 12).mean();
HPI_data["TX12STD"] = HPI_data["TX"].rolling(window = 12).std(); # like root-mean-square deviation from expected value
print(HPI_data[["TX", "TX12MA", "TX12STD"]]);

HPI_data[["TX", "TX12MA", "TX12STD"]].plot();
plt.legend();
plt.show();


4.00183593433
4.52699828329
5.04935449285
                    TX      TX12MA    TX12STD
Date                                         
1975-01-31    0.000000         NaN        NaN
1975-02-28    0.951488         NaN        NaN
1975-03-31    2.701039         NaN        NaN
1975-04-30    5.276570         NaN        NaN
1975-05-31    6.099286         NaN        NaN
1975-06-30    4.882747         NaN        NaN
1975-07-31    4.067261         NaN        NaN
1975-08-31    4.011509         NaN        NaN
1975-09-30    4.272719         NaN        NaN
1975-10-31    4.606024         NaN        NaN
1975-11-30    5.261535         NaN        NaN
1975-12-31    5.891854    4.001836   1.892892
1976-01-31    6.301948    4.526998   1.518962
1976-02-29    7.219763    5.049354   1.227420
1976-03-31    9.422551    5.609480   1.549707
1976-04-30   12.572041    6.217436   2.528899
1976-05-31   13.553547    6.838625   3.296316
1976-06-30   12.362672    7.461952   3.587223
1976-07-31   11.779576    8.104645   3.614506
1976-08-31   11.780492    8.752060   3.508943
1976-09-30   12.048459    9.400038   3.319402
1976-10-31   12.698906   10.074445   3.069569
1976-11-30   13.620130   10.770995   2.816032
1976-12-31   14.218217   11.464859   2.514148
1977-01-31   14.742006   12.168197   2.081918
1977-02-28   16.188283   12.915573   1.722826
1977-03-31   19.550528   13.759571   2.254749
1977-04-30   24.724240   14.772255   3.842705
1977-05-31   25.583865   15.774781   4.915423
1977-06-30   22.434240   16.614078   5.134802
...                ...         ...        ...
2014-04-30  358.514195  342.257257   7.888746
2014-05-31  363.844133  345.177881   8.877960
2014-06-30  368.312081  348.022269  10.375402
2014-07-31  371.399166  350.863255  11.751030
2014-08-31  373.668731  353.743881  12.796029
2014-09-30  375.378778  356.688351  13.407922
2014-10-31  376.310155  359.650588  13.496149
2014-11-30  377.452510  362.638858  13.098547
2014-12-31  378.784349  365.626445  12.256913
2015-01-31  380.324786  368.626528  10.899471
2015-02-28  383.847295  371.689942   9.351035
2015-03-31  389.570965  374.783929   8.508170
2015-04-30  395.528010  377.868413   8.778707
2015-05-31  401.108652  380.973790   9.887690
2015-06-30  406.310999  384.140366  11.428669
2015-07-31  409.911550  387.349732  12.845143
2015-08-31  410.795401  390.443621  13.693524
2015-09-30  411.107680  393.421029  14.001016
2015-10-31  412.400021  396.428518  13.866877
2015-11-30  413.319099  399.417401  13.256885
2015-12-31  413.591667  402.318010  12.088385
2016-01-31  415.471255  405.246883  10.417581
2016-02-29  420.221176  408.278040   8.789550
2016-03-31  425.242406  411.250660   7.871788
2016-04-30  430.403954  414.156988   7.976680
2016-05-31  436.738910  417.126177   9.213590
2016-06-30  441.620726  420.068654  10.924990
2016-07-31  444.858668  422.980914  12.513674
2016-08-31  447.098030  426.006133  13.637641
2016-09-30  447.869345  429.069605  14.107559

[501 rows x 3 columns]

In [22]:
fig = plt.figure();
ax1 = plt.subplot2grid((2, 1), (0, 0)); # ((height-graps, width-graps), (height-start, width-start));
ax2 = plt.subplot2grid((2, 1), (1, 0));

HPI_data = pd.read_pickle("./data/fiddy_states.pickle");

    
HPI_data["TX12MA"] = HPI_data["TX"].rolling(window = 12).mean();
HPI_data["TX12STD"] = HPI_data["TX"].rolling(window = 12).std(); # like root-mean-square deviation from expected value
HPI_data[["TX", "TX12MA"]].plot(ax = ax1);
HPI_data["TX12STD"].plot(ax = ax2)

plt.legend(loc = 2);
plt.show();



In [30]:
HPI_data = pd.read_pickle("./data/fiddy_states.pickle");

fig = plt.figure();
ax1 = plt.subplot2grid((2, 1), (0, 0));
ax2 = plt.subplot2grid((2, 1), (1, 0), sharex = ax1);

TX_AK_12_corr = HPI_data["TX"].rolling(window = 12).corr(HPI_data["AK"]);
HPI_data[["TX", "AK"]].plot(ax = ax1);
TX_AK_12_corr.plot(ax = ax2, label = "Correlation");
plt.legend(loc = 4);
plt.show();



In [ ]: