In [1]:
%matplotlib inline

from sklearn.datasets import load_iris
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import precision_recall_fscore_support
from sklearn.linear_model import LinearRegression
import matplotlib.pylab as plt
import pandas as pd
import numpy as np
from pandas.tools.plotting import scatter_matrix

In [11]:
import json
from pprint import pprint

df = pd.DataFrame()

with open('/Users/danielkershaw/Downloads/regression-test') as data_file:
    for l in data_file:
        data = json.loads(l)
        t = {"r_a_l_results_1":data["r_a_l_results"][0]
            ,"r_a_l_results_2":data["r_a_l_results"][1]
            ,"observation_level":data["observation_level"]
            ,"combination":data["combination"]}
        df = pd.concat((df, pd.DataFrame(t, index=[data["observation_level"]])))

In [12]:
df.head()


Out[12]:
combination observation_level r_a_l_results_1 r_a_l_results_2
1006 all 1006 7.253471e+05 2.843433e-25
1006 basic 1006 6.526907e+07 3.718749e+03
1006 exposure 1006 8.603907e+05 2.120230e+04
1006 community 1006 1.787914e+07 2.688337e-24
1006 time 1006 3.398878e+05 2.170691e+04

In [14]:
df.pivot(index='observation_level', columns='combination', values='r_a_l_results_1')


Out[14]:
combination all basic community exposure time
observation_level
50 8023.668413 9319.264947 63360.915027 19349.121309 59656.365477
51 10046.518743 13829.724012 26142.351340 12619.206307 23623.903971
52 8009.474864 11910.405278 22645.002757 14257.354538 20007.330490
53 23284.141313 24101.619633 85343.466454 34523.745795 78004.436297
54 4938.117265 9144.615682 7014.472553 4639.286932 8664.277441
55 13581.506510 19015.949323 23940.442155 12468.032124 28272.753864
56 3484.941951 4178.671690 5842.340056 2943.607412 5877.817285
57 4026.073532 4879.464509 14491.079816 6891.453077 15323.479235
58 9443.066222 10219.467357 96229.222764 35338.804195 86668.538987
59 12807.359501 17573.012018 37500.339691 22263.501859 32541.112695
60 14645.659572 19796.997164 37936.596210 14745.191834 39601.100681
61 8034.170300 12224.732726 24402.641912 16849.881864 21360.249392
62 5073.135506 6436.243375 10775.293859 3870.997565 8973.717637
63 17143.600012 23093.254512 31302.222273 16728.376811 34010.191464
64 2619.143237 3838.837109 82947.596107 24123.324810 75254.579551
65 9567.250416 12955.077689 23901.735696 10730.299352 20424.860449
66 14621.802612 19513.158220 33111.793698 20910.041092 28830.041588
67 5547.208920 5648.728297 10802.509069 4025.628404 11957.179523
68 3363.340386 5448.240502 6418.657824 3208.565871 7465.041525
69 14188.925009 20457.086147 28953.817649 19587.646020 24953.023848
70 5142.479648 4807.367907 10349.144070 6292.385924 11984.895071
71 23037.172088 22147.712957 32927.064305 22117.931338 22067.303818
72 8366.046349 10443.317852 84805.096443 23356.547161 76411.248420
73 7672.911744 7611.383103 24705.829575 12931.233741 19211.421706
74 7625.857304 8588.820250 18991.452414 7360.741208 13294.542033
75 22162.051787 29253.216620 56933.018021 18463.842364 50293.509417
76 5514.545409 6234.912071 11145.818123 7084.111804 12806.293843
77 6884.199769 7803.528247 123322.279703 39018.660022 103803.518166
78 24240.549337 30707.129258 45997.258607 15436.600059 45818.059900
79 30342.985334 25547.560442 48326.713616 31914.112736 36797.783963
... ... ... ... ... ...
1625 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1626 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1627 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1628 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1629 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1630 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1631 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1632 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1633 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1634 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1635 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1636 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1637 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1638 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1639 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1640 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1641 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1642 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1643 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1644 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1645 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1646 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1647 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1648 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1649 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1650 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1651 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1652 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1653 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000
1654 408321.000000 408321.000000 408321.000000 408321.000000 408321.000000

1605 rows × 5 columns


In [21]:
df.pivot(index='observation_level', columns='combination', values='r_a_l_results_1').plot(logy=True)


Out[21]:
<matplotlib.axes._subplots.AxesSubplot at 0x115e88b90>

In [22]:
df.pivot(index='observation_level', columns='combination', values='r_a_l_results_2').plot(logy=False)


Out[22]:
<matplotlib.axes._subplots.AxesSubplot at 0x115e94210>

In [ ]: