In [1]:
%load_ext autoreload
%autoreload 2

In [2]:
import os
import sys
import numpy as np
import matplotlib.pyplot as plt
import hashlib
import time
import shutil

In [3]:
MNM_nb_folder = os.path.join('..', '..', '..', 'side_project', 'network_builder')
sys.path.append(MNM_nb_folder)
python_lib_folder = os.path.join('..', '..', 'pylib')
sys.path.append(python_lib_folder)

In [4]:
from MNM_nb import *
import MNMAPI
from DODE import *

In [5]:
data_folder = os.path.join('..', '..', '..', 'data', 'input_files_2link_fix')

In [6]:
nb = MNM_network_builder()
nb.load_from_folder(data_folder)


MNM_config
MNM_pathtable

In [7]:
config = dict()
config['use_link_flow'] = True
config['use_link_tt'] = False
config['link_flow_weight'] = 1
config['link_tt_weight'] = 1
config['num_data'] = 10
config['observed_links'] = [3]
config['paths_list'] = [0, 1]

In [8]:
x_real = np.random.rand(10) * 10
data_dict = dict()
data_dict['link_flow'] = list()
for i in range(config['num_data']):
    link_df = pd.DataFrame(index = range(10), columns = [3], data = x_real + np.random.randn(10) / 10)
    data_dict['link_flow'].append(link_df)

In [9]:
pdode = PDODE(nb, config)
dode = DODE(nb, config)

In [10]:
pdode.add_data(data_dict)
dode.add_data(data_dict)

In [11]:
pdode.estimate_path_flow(init_scale = 0.1, step_size = 0.1, max_epoch = 100, adagrad = False)


Epoch: 0 Loss: 11.757367303234277
Epoch: 1 Loss: 6.105099608082027
Epoch: 2 Loss: 11.180692640740254
Epoch: 3 Loss: 6.677313105214476
Epoch: 4 Loss: 3.869331699516697
Epoch: 5 Loss: 2.486360505056447
Epoch: 6 Loss: 1.6651075479942985
Epoch: 7 Loss: 1.1924608247069366
Epoch: 8 Loss: 0.8333362800341699
Epoch: 9 Loss: 0.7168863693060363
Epoch: 10 Loss: 0.5065839896182022
Epoch: 11 Loss: 0.4768904708970232
Epoch: 12 Loss: 0.39347287008214255
Epoch: 13 Loss: 0.42071290537449213
Epoch: 14 Loss: 0.36095012490540784
Epoch: 15 Loss: 0.3371443229910015
Epoch: 16 Loss: 0.31264477287982484
Epoch: 17 Loss: 0.3331970529385715
Epoch: 18 Loss: 0.3593775470437759
Epoch: 19 Loss: 0.4989425682402042
Epoch: 20 Loss: 0.4132201401798438
Epoch: 21 Loss: 0.37806338662229155
Epoch: 22 Loss: 0.4365488345297976
Epoch: 23 Loss: 0.3804573278979297
Epoch: 24 Loss: 0.365278918748501
Epoch: 25 Loss: 0.3432329223048109
Epoch: 26 Loss: 0.3792432303568204
Epoch: 27 Loss: 0.37524321109755765
Epoch: 28 Loss: 0.32179554654771964
Epoch: 29 Loss: 0.33059736220988156
Epoch: 30 Loss: 0.3472325331090653
Epoch: 31 Loss: 0.3547670697150167
Epoch: 32 Loss: 0.34747289125642894
Epoch: 33 Loss: 0.3540745223312799
Epoch: 34 Loss: 0.37577322144716463
Epoch: 35 Loss: 0.37525112904828994
Epoch: 36 Loss: 0.3402693184809466
Epoch: 37 Loss: 0.3047596777922713
Epoch: 38 Loss: 0.3794259723774299
Epoch: 39 Loss: 0.3901918403588561
Epoch: 40 Loss: 0.38098709287160204
Epoch: 41 Loss: 0.3523684422702173
Epoch: 42 Loss: 0.33790135128459003
Epoch: 43 Loss: 0.37517532112641117
Epoch: 44 Loss: 0.3494914489668049
Epoch: 45 Loss: 0.36176165726682574
Epoch: 46 Loss: 0.444552488948173
Epoch: 47 Loss: 0.3697697166132738
Epoch: 48 Loss: 0.35671853179955926
Epoch: 49 Loss: 0.33144770831883796
Epoch: 50 Loss: 0.38315622978659175
Epoch: 51 Loss: 0.35407215102698564
Epoch: 52 Loss: 0.3482268996528237
Epoch: 53 Loss: 0.32420493738701406
Epoch: 54 Loss: 0.3274464386149548
Epoch: 55 Loss: 0.34440070514561355
Epoch: 56 Loss: 0.3443508103212029
Epoch: 57 Loss: 0.3374031663279221
Epoch: 58 Loss: 0.32993275964423396
Epoch: 59 Loss: 0.3325690620147271
Epoch: 60 Loss: 0.33778628101665187
Epoch: 61 Loss: 0.2894673735002823
Epoch: 62 Loss: 0.34432309576785874
Epoch: 63 Loss: 0.34172756154307227
Epoch: 64 Loss: 0.31392100396051215
Epoch: 65 Loss: 0.3470246395163222
Epoch: 66 Loss: 0.343925217574902
Epoch: 67 Loss: 0.33781754558472743
Epoch: 68 Loss: 0.3197899229190644
Epoch: 69 Loss: 0.3293367869816669
Epoch: 70 Loss: 0.3391405147566099
Epoch: 71 Loss: 0.3182324537929512
Epoch: 72 Loss: 0.33037186879184394
Epoch: 73 Loss: 0.32766490095394
Epoch: 74 Loss: 0.3409326613582582
Epoch: 75 Loss: 0.31927308712923896
Epoch: 76 Loss: 0.33494355158359956
Epoch: 77 Loss: 0.32595654060921175
Epoch: 78 Loss: 0.33238302381705986
Epoch: 79 Loss: 0.3225415410686051
Epoch: 80 Loss: 0.316845875914521
Epoch: 81 Loss: 0.30620748908428774
Epoch: 82 Loss: 0.3177588130106591
Epoch: 83 Loss: 0.3328307228647379
Epoch: 84 Loss: 0.3192134889577847
Epoch: 85 Loss: 0.30609084030800926
Epoch: 86 Loss: 0.3490848274422416
Epoch: 87 Loss: 0.2781510351510491
Epoch: 88 Loss: 0.3266080659447076
Epoch: 89 Loss: 0.3110496777100453
Epoch: 90 Loss: 0.3220256546173966
Epoch: 91 Loss: 0.3304223673097162
Epoch: 92 Loss: 0.33550336854713153
Epoch: 93 Loss: 0.33125396633992343
Epoch: 94 Loss: 0.2892591773432455
Epoch: 95 Loss: 0.2899970802939178
Epoch: 96 Loss: 0.3111017843739722
Epoch: 97 Loss: 0.3150072460306284
Epoch: 98 Loss: 0.3260448410165583
Epoch: 99 Loss: 0.3428483225637852
Out[11]:
(array([5.40164715e+00, 2.70439462e+00, 6.27366121e+00, 3.14871879e+00,
        2.38527757e+00, 3.49794527e+00, 3.19509465e+00, 3.76041346e+00,
        4.47018764e+00, 1.00000000e-03, 2.20960100e+00, 1.14742017e+00,
        2.90420076e+00, 1.25127084e+00, 2.32957895e+00, 4.28185964e-01,
        4.54313930e+00, 2.08732752e+00, 4.81900554e+00, 3.70213947e+00]),
 array([0.00281628, 0.00748984, 0.00146018, 0.00193123, 0.001     ,
        0.00120962, 0.0027003 , 0.00658194, 0.01090673, 0.001     ,
        0.00229645, 0.01012391, 0.01269252, 0.00187091, 0.01138728,
        0.00368271, 0.01130361, 0.00889937, 0.00337555, 0.00224088]))

In [12]:
dode.estimate_path_flow(init_scale = 0.1, step_size = 0.1, max_epoch = 100, adagrad = False)


Epoch: 0 Loss: 11.803197113728075
Epoch: 1 Loss: 2.8256341277673656
Epoch: 2 Loss: 1.2936437767839513
Epoch: 3 Loss: 0.5490860891657268
Epoch: 4 Loss: 0.40393733242768476
Epoch: 5 Loss: 0.3302143070763968
Epoch: 6 Loss: 0.35228404176499595
Epoch: 7 Loss: 0.344429351560066
Epoch: 8 Loss: 0.35191967363773663
Epoch: 9 Loss: 0.3295708873021804
Epoch: 10 Loss: 0.3179565886912724
Epoch: 11 Loss: 0.32322197698013305
Epoch: 12 Loss: 0.33412861744175815
Epoch: 13 Loss: 0.31268030619491377
Epoch: 14 Loss: 0.34835948222591745
Epoch: 15 Loss: 0.356549259621039
Epoch: 16 Loss: 0.31419312321664605
Epoch: 17 Loss: 0.32239106599408995
Epoch: 18 Loss: 0.29268271171860205
Epoch: 19 Loss: 0.31593018224636377
Epoch: 20 Loss: 0.31187296149224897
Epoch: 21 Loss: 0.3112993497162563
Epoch: 22 Loss: 0.314628231721596
Epoch: 23 Loss: 0.33426551101274427
Epoch: 24 Loss: 0.3263976967004353
Epoch: 25 Loss: 0.35158624110813463
Epoch: 26 Loss: 0.2926200451181104
Epoch: 27 Loss: 0.3243952483380793
Epoch: 28 Loss: 0.33673943432958603
Epoch: 29 Loss: 0.321773232597408
Epoch: 30 Loss: 0.36485021861776723
Epoch: 31 Loss: 0.33241748612534455
Epoch: 32 Loss: 0.3254353483898903
Epoch: 33 Loss: 0.32735614342730596
Epoch: 34 Loss: 0.3240683024646809
Epoch: 35 Loss: 0.327667232891293
Epoch: 36 Loss: 0.3201014454915653
Epoch: 37 Loss: 0.3120191802913558
Epoch: 38 Loss: 0.31669456855366723
Epoch: 39 Loss: 0.34005206161578966
Epoch: 40 Loss: 0.3117655590545357
Epoch: 41 Loss: 0.3236998410044317
Epoch: 42 Loss: 0.35886347200597674
Epoch: 43 Loss: 0.3407841300130474
Epoch: 44 Loss: 0.35218448900485744
Epoch: 45 Loss: 0.33334135112424507
Epoch: 46 Loss: 0.3468442913201143
Epoch: 47 Loss: 0.3222676570531931
Epoch: 48 Loss: 0.322148303820515
Epoch: 49 Loss: 0.33993713493448435
Epoch: 50 Loss: 0.3505954450762702
Epoch: 51 Loss: 0.3283752935014982
Epoch: 52 Loss: 0.35040299195347974
Epoch: 53 Loss: 0.3394177244960096
Epoch: 54 Loss: 0.3229346241067416
Epoch: 55 Loss: 0.3066902784370019
Epoch: 56 Loss: 0.33823805807060076
Epoch: 57 Loss: 0.35059873031213273
Epoch: 58 Loss: 0.32948489787502727
Epoch: 59 Loss: 0.3060968552061367
Epoch: 60 Loss: 0.36059681072316724
Epoch: 61 Loss: 0.32536923102329923
Epoch: 62 Loss: 0.32694813759215174
Epoch: 63 Loss: 0.32962660391363563
Epoch: 64 Loss: 0.2940025235917546
Epoch: 65 Loss: 0.3151610992438482
Epoch: 66 Loss: 0.34085675454067943
Epoch: 67 Loss: 0.3456291849869009
Epoch: 68 Loss: 0.3380167341534761
Epoch: 69 Loss: 0.3285477740582635
Epoch: 70 Loss: 0.3221424202050768
Epoch: 71 Loss: 0.36056369437216035
Epoch: 72 Loss: 0.3094009425847242
Epoch: 73 Loss: 0.3496168230707276
Epoch: 74 Loss: 0.30407476354141216
Epoch: 75 Loss: 0.33006711000966743
Epoch: 76 Loss: 0.3313363960568546
Epoch: 77 Loss: 0.34376126509690985
Epoch: 78 Loss: 0.3334197250050474
Epoch: 79 Loss: 0.3081570321652406
Epoch: 80 Loss: 0.36602884425561794
Epoch: 81 Loss: 0.3689867916605032
Epoch: 82 Loss: 0.3236536865961617
Epoch: 83 Loss: 0.35131861117214436
Epoch: 84 Loss: 0.3545557515944596
Epoch: 85 Loss: 0.3144964638616911
Epoch: 86 Loss: 0.3270182578281995
Epoch: 87 Loss: 0.3465232955887167
Epoch: 88 Loss: 0.3240326033628952
Epoch: 89 Loss: 0.33053643203753963
Epoch: 90 Loss: 0.3441735555585501
Epoch: 91 Loss: 0.33283331910221
Epoch: 92 Loss: 0.3084275641811651
Epoch: 93 Loss: 0.34672899675435703
Epoch: 94 Loss: 0.3599415096864461
Epoch: 95 Loss: 0.3342378937169406
Epoch: 96 Loss: 0.32920011197764254
Epoch: 97 Loss: 0.32871106546008916
Epoch: 98 Loss: 0.3191947603752492
Epoch: 99 Loss: 0.3414176442780213
Out[12]:
array([3.52609934, 4.58479025, 3.90191907, 5.51168109, 5.83482026,
       0.01207602, 2.24052631, 4.71973635, 2.64941212, 1.81314136,
       0.37650081, 2.97249126, 1.92548783, 2.22222206, 1.89524841,
       0.86851698, 3.46995914, 3.16587885, 1.14838629, 7.37887042])

In [ ]: