In [1]:
from espei.datasets import load_datasets
from espei.error_functions.context import setup_context
from espei.error_functions.zpf_error import calculate_zpf_error
from pycalphad import Database
from glob import glob
import numpy as np

ds = load_datasets(glob('2020-05-28-Cr-Ni-ZPF/input-data/run/ZPF/*.json'))
#ds = load_datasets(['UQ/Cr-Ni-run/run/ZPF/CR-NI-ZPF-BCC_A2-FCC_A1-zhang2014impurity.json'])
dbf = Database('dft.tdb')
param_symbol_labels = {
'VV0000': 'L(A1;1)B',
'VV0001': 'L(A1;1)A',
'VV0002': 'L(A1;0)B',
'VV0003': 'L(A1;0)A',
'VV0004': 'L(A2;1)B',
'VV0005': 'L(A2;1)A',
'VV0006': 'L(A2;0)B',
'VV0007': 'L(A2;0)A',
'VV0008': 'L(LIQUID;1)B',
'VV0009': 'L(LIQUID;1)A',
'VV0010': 'L(LIQUID;0)B',
'VV0011': 'L(LIQUID;0)A'
}
dbf.elements = ['CR', 'NI', 'VA'] # workaround issue with callables of subsystem
ctx = setup_context(dbf, ds)
# weight from YAML file
ctx['zpf_kwargs']['data_weight'] = 20.0
max_phase_regions = max([len(data['phase_regions']) for data in ctx['zpf_kwargs']['zpf_data']])
print('max_phase_regions', max_phase_regions)
# shape: chain, sample, param_symbol
param_trace = np.load('2020-05-28-Cr-Ni-ZPF/trace-zpf.npy')
print('param_trace.shape', param_trace.shape)
#lnprob = np.load('UQ/Cr-Ni-run/2020-05-28-Cr-Ni-ZPF/lnprob.npy')
#lnprob_zpf = np.load('2020-05-28-Cr-Ni-ZPF/lnprob-zpf.npy')
#print('lnprob_zpf.shape', lnprob_zpf.shape)


max_phase_regions 49
param_trace.shape (24, 50, 12)

In [8]:
from tqdm.auto import tqdm
parameter_trials = []
# chain, sample, dataset, phase_region, [param_symbol]
calc_lnprob = np.zeros((param_trace.shape[0],param_trace.shape[1],len(ds), max_phase_regions))
calc_lnprobgrad = np.zeros((param_trace.shape[0],param_trace.shape[1],len(ds), max_phase_regions, param_trace.shape[2]))
#calc_lnprob = np.load('2020-06-03-zpf-lnprob.npy')
#calc_lnprobgrad = np.load('2020-06-03-zpf-lnprobgrad.npy')
chain_idx = 0
all_parameters = [np.array(param_trace[chain_idx, 0, :])]



for iter_idx in tqdm(range(50)):
    parameters = all_parameters[iter_idx]
    zpf_prob_error, zpf_prob_error_gradient = calculate_zpf_error(parameters=parameters,
                                                                  **ctx.get('zpf_kwargs'))
    calc_lnprob[chain_idx, iter_idx, :, :] = zpf_prob_error
    calc_lnprobgrad[chain_idx, iter_idx, :, :, :] = np.array(zpf_prob_error_gradient)
    ll_grad = np.sum(calc_lnprobgrad[chain_idx, iter_idx, :, :, :], axis=(0, 1))
    df_grad = -np.nansum(calc_lnprobgrad[chain_idx, iter_idx, :, :, :] / calc_lnprob[chain_idx, iter_idx, :, :, np.newaxis], axis=(0, 1))
    hess = np.outer(df_grad, df_grad)
    delta_theta = np.linalg.lstsq(hess, ll_grad, rcond=None)[0]
    #print('zpf_prob_error', zpf_prob_error)
    #print('ll_grad', ll_grad)
    #print('df_grad', df_grad)
    #print('hess', hess)
    print(iter_idx, delta_theta)
    all_parameters.append(np.array(parameters + delta_theta))


0 [-2.15630237e-01 -1.55819406e-04  2.38712145e-01  1.06718280e-04
 -1.02370051e-01 -6.48955208e-05 -3.87937099e-01 -2.60643580e-04
  1.05025907e-01  6.00800901e-05 -1.84371828e-01 -1.17301722e-04]
1 [-5.43768335e-01 -3.95573838e-04  2.03778456e-01 -7.27884029e-06
 -1.59922559e-01 -9.80170413e-05 -5.05478790e-01 -3.49055801e-04
  2.76777545e-01  1.60100337e-04 -4.63710598e-01 -2.90540007e-04]
2 [-8.02708592e-02 -5.90516531e-05 -2.11517863e-01 -1.63634923e-04
  9.19067641e-03  7.25845833e-06  1.56235782e-01  1.00003870e-04
  3.90373329e-02  2.27001142e-05 -3.18029643e-02 -1.90058243e-05]
3 [-4.94563539e-01 -3.57355516e-04 -3.36182556e-01 -3.48802995e-04
 -7.81303843e-02 -4.28792391e-05 -2.02210789e-01 -1.38712743e-04
  2.96385961e-01  1.73563705e-04 -4.67500431e-02 -2.84356550e-05]
4 [-3.66981255e-01 -2.63241921e-04  1.65411608e-02 -9.01498526e-05
  2.50820740e-02  1.60012144e-05  8.06662629e-02  3.55777264e-05
  1.33006852e-01  7.88382963e-05 -5.12601271e-01 -3.10438537e-04]
5 [-9.73736554e-02 -7.31979216e-05 -1.76084611e-01 -1.41635895e-04
 -4.79811973e-02 -2.84923134e-05 -1.51333548e-01 -9.72136122e-05
  7.68854445e-02  4.50398063e-05  1.91038272e-01  1.16574897e-04]
6 [-5.86476683e-01 -4.23452892e-04 -2.69449914e-01 -3.32189255e-04
 -4.14414176e-02 -2.63966082e-05 -1.75826035e-01 -1.33298390e-04
  2.63534964e-01  1.56137749e-04 -2.40221262e-01 -1.40795598e-04]
7 [-4.56430204e-01 -3.46356831e-04 -5.62744364e-01 -5.08690043e-04
 -2.97396896e-02 -2.04011025e-05 -1.80977527e-01 -1.38569865e-04
  1.21809931e-01  7.29429804e-05  7.55504450e-02  5.04130905e-05]
8 [-6.58855709e-02 -4.69273661e-05  1.21759957e-01  5.92188646e-05
  6.23112019e-03  2.50073081e-06  9.57445458e-03  6.38234332e-07
  1.52474364e-02  9.14217447e-06 -2.04345540e-01 -1.24215181e-04]
9 [-2.22434628e-01 -1.67543086e-04  1.39201417e-01  1.41329084e-05
 -2.05036496e-02 -1.61039201e-05 -9.98475297e-02 -7.99997559e-05
  5.37503450e-02  3.24980280e-05 -3.57106506e-01 -2.17741253e-04]
10 [-8.92084283e-03 -1.43600989e-05 -2.02607751e-01 -1.44267517e-04
  4.65653295e-05  1.08287836e-06 -3.28195896e-02 -2.14143346e-05
 -2.25432516e-02 -1.33210031e-05  1.64913860e-01  1.00092653e-04]
11 [-7.63102769e-01 -6.38518714e-04 -9.39184524e-01 -9.61404125e-04
 -4.11335057e-02 -2.93928237e-05 -4.40749947e-01 -3.41233819e-04
 -3.19689103e-02 -1.50662903e-05 -1.61591130e-01 -1.03310179e-04]
12 [-1.71524722e-02 -1.34933083e-05  1.76595440e-01  1.01607279e-04
 -7.71670250e-03 -6.81890317e-06 -3.39141133e-02 -2.70569464e-05
  1.36044816e-03  9.41115406e-07 -1.77390227e-01 -1.07857553e-04]
13 [-1.95991973e-02 -1.93716350e-05  1.86026782e-01  9.70879898e-05
  2.70925890e-03  8.53790662e-07 -2.48212091e-02 -2.13524482e-05
 -1.15477457e-02 -6.64758563e-06 -2.22374284e-01 -1.36518731e-04]
14 [ 3.87613171e-02 -2.06274445e-05 -5.25492304e-01 -4.23467545e-04
  3.60523353e-02  3.17310964e-05 -1.19325253e-01 -7.74221444e-05
 -1.48161168e-01 -8.73279840e-05  3.70068181e-01  2.18943274e-04]
15 [-2.07278106e-02 -1.54345386e-05  1.95089773e-01  1.12370067e-04
 -1.08301867e-03 -2.57401932e-06 -1.88519193e-02 -1.78262122e-05
  4.32434633e-03  2.69725721e-06 -2.06551512e-01 -1.25216291e-04]
16 [-2.04138108e-02 -1.71292452e-05  1.61893499e-01  8.73373843e-05
  7.25767479e-03  4.20328561e-06 -7.46718545e-04 -4.27417616e-06
 -2.26848938e-04 -4.18794727e-08 -1.97403442e-01 -1.20678405e-04]
17 [-1.83794135e-01 -4.20090119e-04  5.57737726e-02 -7.13091333e-04
  2.99763624e-01  2.55214598e-04 -6.08119356e-01 -4.14451086e-04
 -4.97565540e-01 -2.93505980e-04 -1.31688259e+00 -8.60444099e-04]
18 [ 9.06707588e-03  1.59141095e-06 -2.33822712e-01 -1.60495992e-04
  1.31376795e-02  1.43509431e-05  6.02123962e-02  4.88926438e-05
  9.49136089e-03  5.15856022e-06  1.79964886e-01  1.08429419e-04]
19 [ 1.47592685e-02  5.42012961e-06 -1.82291475e-01 -1.23932230e-04
 -3.88704539e-03  1.49430746e-06 -7.58423865e-03  1.52965091e-06
  7.00841167e-03  3.71716395e-06  1.87302318e-01  1.12930314e-04]
20 [ 1.92162200e-02  8.41170417e-06 -1.25543241e-01 -8.62012676e-05
 -1.53522714e-02 -6.48076749e-06 -6.36270581e-02 -3.63835294e-05
  8.02500234e-03  4.31250297e-06  1.81667980e-01  1.09339524e-04]
21 [ 2.85648173e-02  1.18743151e-05 -1.19577369e-01 -9.01319716e-05
 -2.99349543e-02 -1.26612981e-05 -1.24036131e-01 -7.19273474e-05
  3.08434751e-02  1.74724928e-05  2.35657659e-01  1.40606327e-04]
22 [-3.51418073e-02 -4.45367308e-05  3.81909918e-01  1.43195710e-04
  5.60357290e-02  6.53361547e-05  3.74644781e-02  4.85616935e-05
  1.63738639e-01  9.37254254e-05 -4.30929576e-01 -2.75637271e-04]
23 [ 8.40956696e-03  2.77419566e-06 -1.41627865e-01 -9.63809309e-05
  7.90322399e-03  8.27985472e-06  3.49069947e-02  2.76676314e-05
  2.26040119e-03  1.09754470e-06  1.10492793e-01  6.65332273e-05]
24 [ 1.51489942e-02  5.92256285e-06 -1.46935018e-01 -1.01113611e-04
 -4.83713383e-04  3.31272872e-06 -1.31657713e-03  4.39871748e-06
  4.04570866e-03  2.01960384e-06  1.46892629e-01  8.81640494e-05]
25 [ 5.29547772e-02  2.16419261e-05 -1.86118048e-01 -1.47115346e-04
 -2.55370199e-02 -4.72214344e-06 -1.35206335e-01 -7.42426862e-05
  3.27423880e-02  1.80402436e-05  3.08590032e-01  1.82295939e-04]
26 [-1.36018461e-02 -1.10119228e-05  1.31562490e-01  6.87550155e-05
  3.31273836e-02  2.36810597e-05  7.58580756e-02  4.81188797e-05
  5.14529062e-03  2.84989991e-06 -2.09115154e-01 -1.28141265e-04]
27 [ 1.42258263e+00  2.00950285e-04 -1.00428287e+00 -2.87817471e-03
  8.37299187e-01  1.23560356e-03 -1.07625899e+00 -6.11027778e-05
  1.86755479e+00  1.04218679e-03  1.34207029e+00  4.82124759e-04]
28 [-1.89466481e-01 -1.33015255e-04  1.41159200e-01  7.84313227e-06
  3.35379494e-01  2.06485701e-04  1.04296879e+00  6.35880346e-04
 -4.95363204e-02 -2.36399410e-05 -1.20269419e+00 -7.11158245e-04]
29 [-4.95086074e-03 -3.98464658e-06  2.23567783e-01  1.37068984e-04
 -2.64873333e-02 -1.92942586e-05 -9.34215733e-02 -6.66208323e-05
  4.47451668e-03  2.66757646e-06 -1.57029494e-01 -9.55338815e-05]
30 [-1.35873028e-03 -2.80687584e-06  1.77709176e-01  1.06897419e-04
 -1.97171522e-02 -1.44643178e-05 -6.69429563e-02 -4.85240214e-05
 -3.00864932e-03 -1.74300848e-06 -1.38611029e-01 -8.48414554e-05]
31 [ 4.19910271e-03 -4.08093631e-06  1.99171165e-01  1.13095803e-04
 -2.48936399e-02 -1.84764292e-05 -7.77741406e-02 -5.93016812e-05
 -2.57349389e-02 -1.51400183e-05 -1.84743768e-01 -1.14163859e-04]
32 [ 1.61296964e-01  2.81806866e-05 -5.19594729e-01 -4.31788266e-04
 -1.30720802e-01 -9.41402201e-05 -4.15859380e-01 -3.17752559e-04
 -3.70330643e-01 -2.18197435e-04  3.93107137e-01  2.27063540e-04]
33 [-3.68088217e-02 -2.58601622e-05  1.55824157e-01  8.64863638e-05
  2.68883120e-02  1.46450814e-05  8.99988578e-02  4.90764350e-05
 -1.17304323e-03 -3.22296783e-07 -2.72228694e-01 -1.64231437e-04]
34 [-2.63982564e-02 -2.02582557e-05  1.14349113e-01  6.08826617e-05
  1.66375951e-02  9.09849716e-06  5.98637239e-02  3.22230245e-05
 -5.44835976e-03 -3.06313646e-06 -2.05506239e-01 -1.25027546e-04]
35 [-6.53642953e-02 -7.58079054e-05  1.95995877e-01  4.89536404e-05
  8.61590573e-03  6.44632076e-07  8.74767908e-02  2.77863568e-05
 -8.63180733e-02 -5.06318299e-05 -5.46659500e-01 -3.38625335e-04]
36 [ 1.26159732e-02  3.56622601e-06 -1.38749041e-01 -9.35356654e-05
 -2.26168227e-02 -1.33000770e-05 -5.60219594e-02 -3.46283607e-05
  1.57965910e-03  7.13802317e-07  1.72788190e-01  1.04284604e-04]
37 [ 1.74860638e-02  4.37006974e-06 -1.47875060e-01 -1.02468848e-04
 -2.78461833e-02 -1.69404928e-05 -7.84170442e-02 -5.07825098e-05
 -8.18963162e-03 -5.11407725e-06  1.87331256e-01  1.12648807e-04]
38 [-5.46034359e-02 -2.13584058e-04  2.07398355e-01 -2.23816723e-04
 -3.13141884e-01 -2.08826494e-04 -9.37676266e-01 -7.02796367e-04
 -2.90328545e-01 -1.70808892e-04 -5.29822440e-01 -3.57555850e-04]
39 [ 7.39349614e-03  2.73183982e-06 -1.51145742e-01 -1.04461816e-04
  5.31455282e-02  3.88936269e-05  1.57277240e-01  1.07260805e-04
 -2.04682478e-02 -1.20989171e-05  1.26643360e-02  7.78713642e-06]
40 [ 5.11838624e-03  1.49392114e-06 -1.28477351e-01 -8.84094971e-05
  3.81543095e-02  2.78939518e-05  1.20040656e-01  8.19461549e-05
 -1.45086581e-02 -8.63460286e-06  2.16984108e-02  1.31162621e-05]
41 [ 2.91720186e-03 -1.93901530e-07 -1.29817758e-01 -8.89235978e-05
  2.86653781e-02  2.08362475e-05  1.08736548e-01  7.34464082e-05
 -1.12095508e-02 -6.73180786e-06  2.86069874e-02  1.71253646e-05]
42 [ 1.85316859e-03 -1.28108001e-06 -1.39661957e-01 -9.65438964e-05
  2.56569177e-02  1.88279061e-05  1.09434050e-01  7.40812894e-05
 -5.92347650e-03 -3.62239134e-06  3.77912239e-02  2.24783557e-05]
43 [ 7.27599219e-04 -2.09316568e-05 -4.59155111e-01 -3.58613519e-04
  6.90121334e-02  5.69501308e-05  3.43803722e-01  2.36001892e-04
  4.21862505e-02  2.37404278e-05  1.12569151e-01  6.11493442e-05]
44 [ 6.65466814e-04  4.80952358e-07  1.67539993e-01  1.03086347e-04
 -3.55331489e-02 -2.36345701e-05 -9.66410113e-02 -6.45602251e-05
  2.49072996e-02  1.44800136e-05 -7.93027544e-02 -4.92125698e-05]
45 [ 5.54935288e-03  2.28867117e-06  1.83063240e-01  1.10140102e-04
 -4.79701193e-02 -3.14078717e-05 -1.26362308e-01 -8.38945024e-05
  3.12957944e-02  1.82396643e-05 -6.99519246e-02 -4.40533772e-05]
46 [ 7.73102780e-02  2.49666640e-05  2.58716769e-01  9.95073897e-05
 -1.56552597e-01 -8.97565581e-05 -5.20880133e-01 -3.33026898e-04
  1.31515707e-01  7.63135945e-05  1.98306245e-01  1.09624413e-04]
47 [-2.31311839e-03 -2.82062140e-06 -8.10889287e-02 -5.90254032e-05
  4.33487247e-02  3.07712971e-05  1.42677924e-01  9.52286160e-05
 -1.17576804e-02 -6.84527916e-06 -4.71829972e-02 -2.86019350e-05]
48 [-4.43236272e-03 -4.62720385e-06 -8.70098332e-02 -6.34826404e-05
  4.06254775e-02  2.81865859e-05  1.47142299e-01  9.69961287e-05
 -1.19915272e-02 -6.97030788e-06 -5.00492430e-02 -3.03758107e-05]
49 [-6.58262974e-03 -7.54045004e-06 -1.06448636e-01 -8.13506638e-05
  5.32201529e-02  3.67102398e-05  1.91040601e-01  1.25512536e-04
 -1.42215269e-02 -8.29173647e-06 -7.48866059e-02 -4.58735301e-05]


In [9]:
%matplotlib inline
import matplotlib.pyplot as plt
plt.plot(np.arange(50), calc_lnprob[chain_idx, :, :].sum(axis=(-2, -1)))
plt.xlabel('iteration')
plt.ylabel('lnprob')


Out[9]:
Text(0, 0.5, 'lnprob')

In [14]:
%matplotlib inline
import matplotlib.pyplot as plt
from espei.plot import dataplot
from pycalphad import variables as v
from pycalphad import binplot
from pycalphad.core.solver import InteriorPointSolver, SundmanSolver

fig = plt.figure(figsize=(9,9))
ax = fig.gca()
trial_idx = 0
binplot(dbf, ['CR', 'NI', 'VA'], ['A1','A2', 'LIQUID'], {v.P: 101325, v.T: (800, 2200, 10), v.X('NI'): (0,1,0.02), v.N:1},
        plot_kwargs={'ax': ax},
        eq_kwargs={'solver': SundmanSolver(),
                   'parameters': dict(zip(ctx['symbols_to_fit'], all_parameters[trial_idx]))})
dataplot(['CR', 'NI', 'VA'], ['A1', 'A2', 'LIQUID'], {v.P: 101325, v.T: (1,1,1), v.X('NI'): (1, 1, 1)}, ds, ax=ax)
ax.set_title(f'Cr-Ni Newton Trial {trial_idx}')


Out[14]:
Text(0.5, 1.0, 'Cr-Ni Newton Trial 0')

In [15]:
%matplotlib inline
import matplotlib.pyplot as plt
from espei.plot import dataplot
from pycalphad import variables as v
from pycalphad import binplot
from pycalphad.core.solver import InteriorPointSolver, SundmanSolver

fig = plt.figure(figsize=(9,9))
ax = fig.gca()
trial_idx = 49
binplot(dbf, ['CR', 'NI', 'VA'], ['A1','A2', 'LIQUID'], {v.P: 101325, v.T: (800, 2200, 10), v.X('NI'): (0,1,0.02), v.N:1},
        plot_kwargs={'ax': ax},
        eq_kwargs={'solver': SundmanSolver(),
                   'parameters': dict(zip(ctx['symbols_to_fit'], all_parameters[trial_idx]))})
dataplot(['CR', 'NI', 'VA'], ['A1', 'A2', 'LIQUID'], {v.P: 101325, v.T: (1,1,1), v.X('NI'): (1, 1, 1)}, ds, ax=ax)
ax.set_title(f'Cr-Ni Newton Trial {trial_idx}')


Out[15]:
Text(0.5, 1.0, 'Cr-Ni Newton Trial 49')

In [19]:
%matplotlib inline
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(12,6))
ax = fig.gca()
for data_idx, dataset in enumerate(ctx['zpf_kwargs']['zpf_data']):
    data_calc_lnprob = np.sum(calc_lnprob[:, :, data_idx, :], axis=-1)
    avg_calc_lnprob = data_calc_lnprob[chain_idx]
    std_calc_lnprob = np.std(data_calc_lnprob, axis=0)
    ax.plot(np.arange(50), avg_calc_lnprob, label=dataset['dataset_reference'])
    #ax.fill_between(np.arange(50), avg_calc_lnprob+std_calc_lnprob, avg_calc_lnprob-std_calc_lnprob, alpha=0.1)
ax.set_xlabel('iteration')
ax.set_ylabel('lnprob')
ax.set_title('trace-zpf.npy lnprob')
ax.legend()


Out[19]:
<matplotlib.legend.Legend at 0x2ab2b904518>

In [21]:
%matplotlib inline
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(12,6))
ax = fig.gca()
for data_idx, dataset in enumerate(ctx['zpf_kwargs']['zpf_data']):
    data_calc_lnprob = np.sum(calc_lnprob[:, :, data_idx, :], axis=-1)
    avg_calc_lnprob = data_calc_lnprob[chain_idx]
    std_calc_lnprob = np.std(data_calc_lnprob, axis=0)
    num_phase_regions = len(dataset['phase_regions'])
    ax.plot(np.arange(50), avg_calc_lnprob / num_phase_regions, label=dataset['dataset_reference'])
    #ax.fill_between(np.arange(50), (avg_calc_lnprob+std_calc_lnprob) / num_phase_regions,
    #                               (avg_calc_lnprob-std_calc_lnprob) / num_phase_regions, alpha=0.1)
ax.set_xlabel('iteration')
ax.set_ylabel('average lnprob per phase region')
ax.set_title('Newton average lnprob')
ax.legend()


Out[21]:
<matplotlib.legend.Legend at 0x2ab2bd7c860>

In [10]:
%matplotlib inline
import matplotlib.pyplot as plt

fig = plt.figure(figsize=(12,9))
ax = fig.gca()
for data_idx, dataset in enumerate(ctx['zpf_kwargs']['zpf_data']):
    data_calc_lnprobgrad = np.sum(calc_lnprobgrad[:, :, data_idx, :, :], axis=-2)
    mean_calc_lnprobgrad = np.mean(data_calc_lnprobgrad, axis=0)
    ax.plot(np.arange(50), np.log(np.linalg.norm(mean_calc_lnprobgrad, axis=-1)), label=dataset['dataset_reference'])
ax.set_xlabel('iteration')
ax.set_ylabel('log-norm lnprob gradient')
ax.legend()


Out[10]:
<matplotlib.legend.Legend at 0x2ab29425630>

In [22]:
%matplotlib inline
import matplotlib.pyplot as plt
import numpy as np
fig = plt.figure(figsize=(9,6))
ax = fig.gca()
trace_param_std = np.std(param_trace[:, :, :], axis=(0, 1))
tdbdb_param_std = 0 # TODO
# Averaging Scheme No. 1 -- what other possibilities?
for data_idx, dataset in enumerate(ctx['zpf_kwargs']['zpf_data']):
    data_calc_lnprobgrad = np.sum(calc_lnprobgrad[:, :, data_idx, :, :], axis=-2)
    mean_calc_lnprobgrad = data_calc_lnprobgrad[chain_idx]
    ax.plot(np.arange(50), np.abs(mean_calc_lnprobgrad).dot(trace_param_std), label=dataset['dataset_reference'])
ax.set_xlabel('iteration')
ax.set_ylabel('Total Sensitivity')
ax.legend()


Out[22]:
<matplotlib.legend.Legend at 0x2ab2be47f60>

In [24]:
%matplotlib inline
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import cm
from cycler import cycler

fig = plt.figure(figsize=(9,6))
ax = fig.gca()
custom_cycler = (cycler(color='bgrcmyk') *
                 cycler(linestyle=['-', '--', ':', '-.']))
ax.set_prop_cycle(custom_cycler)
trace_param_std = np.std(param_trace[:, :, :], axis=(0,1))
tdbdb_param_std = 0 # TODO
total_calc_lnprobgrad = np.sum(calc_lnprobgrad[0, :, :, :, :], axis=(1, 2))
total_calc_sensitivity = total_calc_lnprobgrad * trace_param_std
for param_idx, param_symbol in enumerate(ctx['symbols_to_fit']):
    ax.plot(np.arange(50), np.abs(total_calc_sensitivity[:, param_idx]), label=param_symbol_labels[param_symbol])

ax.set_xlabel('iteration')
ax.set_ylabel('Total Sensitivity')
ax.legend()


Out[24]:
<matplotlib.legend.Legend at 0x2ab2c07d198>

In [ ]: