In [166]:
import os
from google.colab import drive
drive.mount('/content/drive')
os.chdir("/content/drive/My Drive/")


Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).

In [0]:
from importlib.machinery import SourceFileLoader
methods = SourceFileLoader('methods', '/content/drive/My Drive/methods.py').load_module()

In [0]:
%tensorflow_version 1.x
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import random
import cv2
import pickle
import h5py

from methods import AverageSmooth
from methods import RMSE
from methods import CrossCorrelation
from methods import calcFurierTransform

In [0]:
'''
Parameters:
'''
wd = 200 # deepness of moving window in time 
wh = 64 # wertical and horizontal width of images at downscaling
color_channel = 1
rate_epoch_num = 1000 # Number of epochs in training of rates

Definitions


In [0]:
'''
Load in signal from file
'''
def readFromFile(path,filename,signal):
  print("Load to from file:"+path+filename)
  print("...")
  print("loaded")
  with open(path+filename,"r") as ins:
    lines = ins.readlines()
    for line in lines:
      signal.append(float(line))
  return np.asarray(signal)

In [0]:
'''
save signal to file
'''
def saveToFile(path,filename,signal):
  print("Save to this file:"+path+filename)
  print("...")
  print("saved")
  with open(path+filename,"w") as o:
    for i in range(len(signal)):
      o.write(str(signal[i]))
      o.write("\n")

In [0]:
def loadSignalSet(path,set):
  ujset = []
  with open(path,'r') as ins:
    lines = ins.readlines()
    for line in lines:
      values = line.split(',')
      values = values[:-1] # At the end we have an extra ','
      row = []
      for val in values:
        row.append(float(val))
      ujset.append(row)
  return np.asarray(ujset)

Load Data


In [173]:
'''
Load the calculated signal
'''
signal=[]
signal = loadSignalSet("output/MixedDataset2",signal)
print(signal.shape)


(159, 200)

In [174]:
'''
Load the calculated signal
'''
reference = []
reference = readFromFile("output/","MixedDataset_ref2",reference)
print(reference.shape)


Load to from file:output/MixedDataset_ref2
...
loaded
(159,)

Build Network

Rate Head


In [0]:
tf.reset_default_graph()

In [0]:
'''
Initialization
'''

oneD_kernels = [1, 64, 64, 32, 16, 8]
oneD_win_sizes = [32, 32, 16, 8, 4]

batch_size = 1
leaning_rate = 1e-4

THRESHOLDING = False
AUGMENTATION = False
BATCH_NORM = False
DROPOUT = True
UNET = False
L2 = False

In [0]:
'''
Define placeholders
'''
input_data = tf.placeholder(tf.float32, [None, wd])
ref_rate = tf.placeholder(tf.float32, [None,1])

In [200]:
layer_num = 0

'''
1D convolutions. Shape it to the shape of expected output.
'''
current_input = tf.reshape(input_data,[batch_size,1,wd])
for i in range(len(oneD_kernels)-1):
  with tf.variable_scope("1d_conv_"+str(layer_num)):
    W = tf.get_variable('weight', [oneD_win_sizes[i],oneD_kernels[i],oneD_kernels[i+1]], trainable=True) 
    conv_result = tf.nn.conv1d(current_input, W, stride=[1], padding='SAME', data_format='NCW')
    B = tf.get_variable('bias', oneD_kernels[i+1], trainable=True)  
    
    act = tf.nn.elu(conv_result)
    current_input = act    
    pooled = tf.nn.avg_pool1d(current_input, ksize=[1,2,1], strides=[2], padding='SAME', data_format='NCW')

    print(pooled) 
    current_input = pooled
    layer_num = layer_num + 1


Tensor("1d_conv_0/AvgPool1D/Squeeze:0", shape=(1, 64, 100), dtype=float32)
Tensor("1d_conv_1/AvgPool1D/Squeeze:0", shape=(1, 64, 50), dtype=float32)
Tensor("1d_conv_2/AvgPool1D/Squeeze:0", shape=(1, 32, 25), dtype=float32)
Tensor("1d_conv_3/AvgPool1D/Squeeze:0", shape=(1, 16, 13), dtype=float32)
Tensor("1d_conv_4/AvgPool1D/Squeeze:0", shape=(1, 8, 7), dtype=float32)

In [201]:
with tf.variable_scope('fully-' + str(layer_num)):
  current_shape = current_input.get_shape()
  feature_length = int(current_shape[1] * current_shape[2])
  fully_connected = tf.reshape(current_input, [-1, feature_length])
  w = tf.get_variable('weight', [feature_length, 14])
  b = tf.get_variable('bias', [1])
  fully_connected = tf.matmul(fully_connected, w)
  fully_connected = tf.add(fully_connected,b, name = 'output')
  fully_connected = tf.nn.relu(fully_connected)
  if (DROPOUT == True):
      fully_connected = tf.nn.dropout(fully_connected, rate = 0.3)
  print(fully_connected)

  layer_num += 1

with tf.variable_scope('fully-' + str(layer_num)):
  w = tf.get_variable('weight', [14,7])
  b = tf.get_variable('bias', [7])
  fully_connected = tf.matmul(fully_connected,w)
  fully_connected = tf.add(fully_connected,b)
  fully_connected = tf.nn.relu(fully_connected)
  if (DROPOUT == True):
      fully_connected = tf.nn.dropout(fully_connected, rate = 0.3)
  print(fully_connected)

  layer_num += 1

with tf.variable_scope('fully-' + str(layer_num)):
  w = tf.get_variable('weight', [7,1])
  b = tf.get_variable('bias', [1])
  fully_connected = tf.matmul(fully_connected,w)
  fully_connected = tf.add(fully_connected,b)
  #fully_connected = tf.nn.relu(fully_connected)
  
  layer_num += 1

output = fully_connected
myrate = output


Tensor("fully-5/dropout/mul_1:0", shape=(1, 14), dtype=float32)
Tensor("fully-6/dropout/mul_1:0", shape=(1, 7), dtype=float32)

In [0]:
'''
Define loss function and optimizer
'''
with tf.variable_scope('loss'):
    loss = tf.reduce_mean(tf.losses.mean_squared_error(ref_rate, myrate))

with tf.name_scope('optimizer'):
    optimizer = tf.train.AdamOptimizer(leaning_rate).minimize(loss)

Run Training


In [0]:
def mynorm(sig):
  avg = np.mean(sig)
  sig = sig - np.ones_like(sig)*avg
  sig = sig / np.max(sig)
  return sig

In [0]:
# 1204 + 1504                             + 1604

In [0]:
train_epoch_num = 1504
batch_size = 1
sess = tf.Session()
init = tf.global_variables_initializer()
sess.run(init)
saver = tf.train.Saver()
indices = list(range(0,len(reference)))
#saver.restore(sess, "checkpoint/TrainCNN-FlowOnMix-1204")
loss_plot = np.zeros(train_epoch_num*int(len(indices)/batch_size))

In [0]:
step = 0
for epoch in range(train_epoch_num): 
    print("new epoch " + str(epoch))
     
    # create list of indices 
    indices = list(range(0,len(reference)))
    length = len(indices)
    
    avg_loss = 0
    while(batch_size<len(indices)):

        # Make batch of input data
        used_in_batch = random.sample(indices,batch_size)
        
        batch_x = []
        for i in used_in_batch:
          batch_x.append(mynorm(signal[i]))
        batch_x = np.asarray(batch_x)
        batch_x = np.reshape(batch_x, [batch_size, wd])

        batch_y = []
        for i in used_in_batch:
          batch_y.append(reference[i])
        batch_y = np.asarray(batch_y)
        batch_y = np.reshape(batch_y,[batch_size,1])

        _, l = sess.run([optimizer, loss], feed_dict={input_data: batch_x, ref_rate: batch_y})
        loss_plot[step] = l
            
        step = step + 1
        avg_loss = avg_loss + l
              
        for index in used_in_batch:
          indices.remove(index)
    print(avg_loss/length)
        
print('Saving model...')
print(saver.save(sess, "checkpoint/TrainCNN-FlowOnMix", train_epoch_num))
plt.plot(loss_plot[loss_plot!=0])


new epoch 0
2132.4455009796334
new epoch 1
2137.9378946172365
new epoch 2
2135.9885804758133
new epoch 3
2139.0611693184333
new epoch 4
2138.899405617384
new epoch 5
2118.705282919062
new epoch 6
2124.498896952695
new epoch 7
2119.2145673643868
new epoch 8
2122.0849983647186
new epoch 9
2108.681580933385
new epoch 10
2108.0862854771644
new epoch 11
2109.1767480238427
new epoch 12
2105.664172094573
new epoch 13
2103.1428547025475
new epoch 14
2097.1054328582572
new epoch 15
2098.149389878759
new epoch 16
2098.5585517163545
new epoch 17
2083.764122345163
new epoch 18
2078.277246439232
new epoch 19
2085.2408011574416
new epoch 20
2073.4028118781325
new epoch 21
2081.971306566922
new epoch 22
2083.549198366561
new epoch 23
2080.2448436809036
new epoch 24
2078.0234893223023
new epoch 25
2070.4235287072524
new epoch 26
2057.600281721391
new epoch 27
2063.1162980757417
new epoch 28
2051.9941001268303
new epoch 29
2047.8505638650379
new epoch 30
2053.000889808127
new epoch 31
2038.347690990136
new epoch 32
2036.568189704943
new epoch 33
2026.5537477889152
new epoch 34
2031.1613155341
new epoch 35
2019.382744363269
new epoch 36
2007.728432709316
new epoch 37
2010.9182165373795
new epoch 38
2001.6082205142616
new epoch 39
2003.289019314748
new epoch 40
2010.5737462073753
new epoch 41
1986.8398053631092
new epoch 42
1989.3647741161801
new epoch 43
1985.188001908596
new epoch 44
1976.3157768969265
new epoch 45
1972.2399460894508
new epoch 46
1971.4449597244743
new epoch 47
1968.9559199495136
new epoch 48
1965.7574082860406
new epoch 49
1952.9808476286114
new epoch 50
1944.9573552353577
new epoch 51
1944.2836714450668
new epoch 52
1942.8482514387406
new epoch 53
1915.8839616115738
new epoch 54
1910.2415592985333
new epoch 55
1929.5473740295795
new epoch 56
1918.2911605355125
new epoch 57
1911.7961907536728
new epoch 58
1907.7463622663006
new epoch 59
1890.2606070656448
new epoch 60
1886.2867800154777
new epoch 61
1897.0854375107483
new epoch 62
1873.3877419525722
new epoch 63
1883.082422604351
new epoch 64
1853.815771138893
new epoch 65
1862.504624276791
new epoch 66
1852.9738539209907
new epoch 67
1846.0042809060535
new epoch 68
1849.1581326850555
new epoch 69
1839.0128012603184
new epoch 70
1825.3448440263855
new epoch 71
1827.3026681576134
new epoch 72
1837.7327953794468
new epoch 73
1823.6046058126965
new epoch 74
1805.3556190346771
new epoch 75
1817.7643941243489
new epoch 76
1796.3269399966834
new epoch 77
1780.3258703459733
new epoch 78
1789.7004979931332
new epoch 79
1778.6173272282822
new epoch 80
1760.6216545824734
new epoch 81
1756.8169086384323
new epoch 82
1752.3373967776508
new epoch 83
1724.130639418116
new epoch 84
1751.3273292397553
new epoch 85
1736.3764696421113
new epoch 86
1749.5002673646939
new epoch 87
1698.8321771201847
new epoch 88
1708.292393330508
new epoch 89
1681.5583137176322
new epoch 90
1695.991848639722
new epoch 91
1682.696899605997
new epoch 92
1672.2439403413975
new epoch 93
1668.3139473777146
new epoch 94
1658.2002251583076
new epoch 95
1677.9534259532234
new epoch 96
1648.4420738939968
new epoch 97
1655.957823747359
new epoch 98
1659.026026983681
new epoch 99
1633.0852151546837
new epoch 100
1593.3525528817806
new epoch 101
1602.2896609516263
new epoch 102
1600.6048274969905
new epoch 103
1608.5822325892418
new epoch 104
1565.7286835676468
new epoch 105
1597.7410600770195
new epoch 106
1568.7601686873527
new epoch 107
1576.3043158189305
new epoch 108
1556.4292191799332
new epoch 109
1585.9157083379398
new epoch 110
1555.0742436055118
new epoch 111
1532.4890116565632
new epoch 112
1527.3070615372567
new epoch 113
1527.202601210876
new epoch 114
1521.152657754766
new epoch 115
1506.4332818565128
new epoch 116
1474.8468247899468
new epoch 117
1501.7461867422428
new epoch 118
1498.1821379271694
new epoch 119
1493.626880285875
new epoch 120
1463.6117544114215
new epoch 121
1451.426407676073
new epoch 122
1485.6191362105076
new epoch 123
1438.69380197585
new epoch 124
1401.6214673983975
new epoch 125
1418.3951186174118
new epoch 126
1422.9134228784333
new epoch 127
1435.9482260650059
new epoch 128
1410.764026713821
new epoch 129
1398.7010986520058
new epoch 130
1424.9300435384114
new epoch 131
1339.7791919348374
new epoch 132
1417.4426892838387
new epoch 133
1362.7840818009286
new epoch 134
1340.0809199495136
new epoch 135
1367.0913588325932
new epoch 136
1362.536758998655
new epoch 137
1366.0901283887947
new epoch 138
1315.190193368204
new epoch 139
1291.904895998397
new epoch 140
1314.1959437724179
new epoch 141
1309.5667635359855
new epoch 142
1304.5690343125061
new epoch 143
1297.320816663826
new epoch 144
1298.9364478633088
new epoch 145
1254.067013434644
new epoch 146
1247.4538131809834
new epoch 147
1232.0509942252681
new epoch 148
1280.440471265301
new epoch 149
1250.3087813179447
new epoch 150
1233.8716175751117
new epoch 151
1189.625361064695
new epoch 152
1196.5923849261783
new epoch 153
1157.8045901412484
new epoch 154
1214.4426724415905
new epoch 155
1148.5658483925106
new epoch 156
1182.301328599078
new epoch 157
1188.2967777372157
new epoch 158
1216.1846113264935
new epoch 159
1141.0723084143872
new epoch 160
1151.8467747430382
new epoch 161
1156.9589233818294
new epoch 162
1144.6794360658657
new epoch 163
1082.004916323056
new epoch 164
1154.1439305671356
new epoch 165
1080.5104738691318
new epoch 166
1137.8167613197422
new epoch 167
1108.848769337876
new epoch 168
1079.6257418866428
new epoch 169
1095.578307571651
new epoch 170
1042.914863931308
new epoch 171
1071.5162809000076
new epoch 172
1044.7032950059422
new epoch 173
1038.7597081166393
new epoch 174
1031.1195268353576
new epoch 175
998.924875251902
new epoch 176
1100.443237112753
new epoch 177
1053.1877583255932
new epoch 178
1074.1552019059284
new epoch 179
1035.7884756843998
new epoch 180
969.8147432706055
new epoch 181
1023.9315684803252
new epoch 182
971.8466114823548
new epoch 183
1034.548131714827
new epoch 184
1011.7594840811483
new epoch 185
931.8822093004311
new epoch 186
913.3999621793159
new epoch 187
972.8528956047394
new epoch 188
943.4725268127033
new epoch 189
975.1909279973252
new epoch 190
914.3158026671259
new epoch 191
901.8913875046017
new epoch 192
925.0671155925068
new epoch 193
875.8955988239193
new epoch 194
878.8568921509029
new epoch 195
932.6869189484314
new epoch 196
893.7861014582077
new epoch 197
860.7306971789906
new epoch 198
786.6439499765072
new epoch 199
831.9104372023037
new epoch 200
862.8025907814127
new epoch 201
798.7734750179375
new epoch 202
865.7427649947832
new epoch 203
824.9305283788053
new epoch 204
781.570312619959
new epoch 205
853.0854383084027
new epoch 206
872.4852898555731
new epoch 207
765.1454433464216
new epoch 208
775.5897474652567
new epoch 209
800.8879804903606
new epoch 210
802.3389477711054
new epoch 211
739.102212155765
new epoch 212
756.2266427492195
new epoch 213
803.1644212498136
new epoch 214
719.7845284585038
new epoch 215
728.9226972298029
new epoch 216
689.3711102564567
new epoch 217
764.1957892932982
new epoch 218
765.8556933399267
new epoch 219
670.9612146421049
new epoch 220
671.4700050931307
new epoch 221
726.7972287142052
new epoch 222
681.6874019383635
new epoch 223
623.465299122422
new epoch 224
667.4376100964212
new epoch 225
659.1343312846415
new epoch 226
680.1727545128304
new epoch 227
691.2580289011528
new epoch 228
675.8620730398586
new epoch 229
617.701691221844
new epoch 230
652.9764535206856
new epoch 231
634.9996427522516
new epoch 232
591.2543855230763
new epoch 233
643.5428071921493
new epoch 234
647.2705509023472
new epoch 235
566.8417109294132
new epoch 236
639.8329015819344
new epoch 237
584.427872042447
new epoch 238
553.1264202363391
new epoch 239
641.1970895727476
new epoch 240
604.6873585651131
new epoch 241
589.5746101632293
new epoch 242
649.4675458237144
new epoch 243
568.1206986115976
new epoch 244
594.8157933035736
new epoch 245
584.1875084187343
new epoch 246
680.3289974354905
new epoch 247
604.5553117571633
new epoch 248
613.5540019737427
new epoch 249
482.87085184642353
new epoch 250
600.2986810104277
new epoch 251
534.9446657437117
new epoch 252
601.9630865315986
new epoch 253
575.635902829521
new epoch 254
594.1989461071204
new epoch 255
531.3730943096134
new epoch 256
476.6242396799707
new epoch 257
516.2290132027633
new epoch 258
498.40255222839755
new epoch 259
420.67298513238535
new epoch 260
517.5742337913516
new epoch 261
412.144917891601
new epoch 262
460.0654462496955
new epoch 263
515.7267989206989
new epoch 264
453.08417441925536
new epoch 265
527.0825157622871
new epoch 266
397.46287341125355
new epoch 267
506.89755381353245
new epoch 268
419.2949144957388
new epoch 269
484.21371928147926
new epoch 270
589.347217556956
new epoch 271
490.5530156397887
new epoch 272
438.68984681077717
new epoch 273
457.73876774209145
new epoch 274
465.1896433958186
new epoch 275
473.7687927949134
new epoch 276
412.04772060622355
new epoch 277
426.5161198107643
new epoch 278
522.1803215847075
new epoch 279
460.70788523029984
new epoch 280
470.5230992491703
new epoch 281
510.2364366955165
new epoch 282
480.1470818520647
new epoch 283
438.56542050120584
new epoch 284
457.1866656373132
new epoch 285
452.6595148348583
new epoch 286
474.1297033165798
new epoch 287
422.96466774390365
new epoch 288
451.3028652349651
new epoch 289
465.3925241009748
new epoch 290
436.85597963308703
new epoch 291
466.4205889344309
new epoch 292
464.7338991036767
new epoch 293
423.26060384390115
new epoch 294
447.87646164693547
new epoch 295
432.5328798605123
new epoch 296
463.86838612689263
new epoch 297
471.667384320337
new epoch 298
385.11511432402915
new epoch 299
400.6338108955127
new epoch 300
407.6298667903779
new epoch 301
460.92427952807844
new epoch 302
438.5581892462441
new epoch 303
365.3435101603992
new epoch 304
452.35429480655483
new epoch 305
310.5014336626719
new epoch 306
403.57619900931167
new epoch 307
345.88074619130896
new epoch 308
364.65563462735537
new epoch 309
376.38635960265805
new epoch 310
345.22881882452646
new epoch 311
350.86265866556465
new epoch 312
314.31535571644014
new epoch 313
372.9938015131245
new epoch 314
477.07175120153704
new epoch 315
350.0602596145381
new epoch 316
376.8149989290432
new epoch 317
345.6544110770308
new epoch 318
323.82151169648523
new epoch 319
438.8935298475337
new epoch 320
400.0964855633254
new epoch 321
508.92007582543863
new epoch 322
375.0713107095762
new epoch 323
402.15136419302263
new epoch 324
391.19264032312157
new epoch 325
357.43642410354795
new epoch 326
395.6150532297368
new epoch 327
428.4150265452903
new epoch 328
412.9585041997474
new epoch 329
515.8191325769484
new epoch 330
388.02465344487496
new epoch 331
369.4378746719855
new epoch 332
393.6686796778403
new epoch 333
326.76970767365685
new epoch 334
413.50715854136075
new epoch 335
458.6800246416773
new epoch 336
380.5958967514383
new epoch 337
375.5221212668895
new epoch 338
341.3076691667801
new epoch 339
354.00945738867017
new epoch 340
342.1581490474677
new epoch 341
363.42106790896855
new epoch 342
404.7404380356014
new epoch 343
429.63702508963723
new epoch 344
365.29100669697095
new epoch 345
428.03436181292454
new epoch 346
450.45966921435996
new epoch 347
325.0189863474243
new epoch 348
403.0730237362426
new epoch 349
405.6295290351289
new epoch 350
339.95204694256324
new epoch 351
386.3280062257728
new epoch 352
424.9791715961629
new epoch 353
355.4304969282177
new epoch 354
442.05194737868817
new epoch 355
403.8895569432925
new epoch 356
393.80284781328186
new epoch 357
389.6423681913026
new epoch 358
386.29664086524264
new epoch 359
383.2158522913906
new epoch 360
360.5411999464376
new epoch 361
431.51629453261603
new epoch 362
367.76336945184806
new epoch 363
375.60083063705326
new epoch 364
361.71951110473594
new epoch 365
463.3935688506554
new epoch 366
297.66810205199243
new epoch 367
340.4440647024872
new epoch 368
468.49695812559355
new epoch 369
447.92999344773636
new epoch 370
390.96124690518064
new epoch 371
446.8006082862241
new epoch 372
440.7332947981433
new epoch 373
437.1514124550722
new epoch 374
426.478327110316
new epoch 375
392.2592346328984
new epoch 376
355.3082251246995
new epoch 377
347.2338827353369
new epoch 378
391.5288496462851
new epoch 379
349.0531979331149
new epoch 380
334.75774601141705
new epoch 381
396.7693960817425
new epoch 382
471.6022843621472
new epoch 383
371.15718754969606
new epoch 384
304.02169955517724
new epoch 385
322.08456489949856
new epoch 386
335.0100561244561
new epoch 387
368.36453745540217
new epoch 388
425.34018564911315
new epoch 389
362.75016925421266
new epoch 390
393.9608366963272
new epoch 391
430.20849976182546
new epoch 392
400.3400443393339
new epoch 393
370.8087916095161
new epoch 394
418.99926464775075
new epoch 395
341.6535503935514
new epoch 396
376.0231224201479
new epoch 397
457.310724016443
new epoch 398
490.5780932977327
new epoch 399
409.7272212918168
new epoch 400
370.16631601914855
new epoch 401
332.86257454921616
new epoch 402
380.866033755196
new epoch 403
366.3069687635644
new epoch 404
312.7578520262204
new epoch 405
397.64496458888806
new epoch 406
359.2187115756221
new epoch 407
386.12489882102176
new epoch 408
373.6544794539611
new epoch 409
387.03836492013255
new epoch 410
342.02519745337514
new epoch 411
358.490914567649
new epoch 412
323.3290467709303
new epoch 413
368.94602765395956
new epoch 414
334.4630273069788
new epoch 415
447.3807049584839
new epoch 416
425.3134330922515
new epoch 417
354.1887792022723
new epoch 418
336.7668689366594
new epoch 419
332.24491216935826
new epoch 420
365.59387400038776
new epoch 421
281.95082272762585
new epoch 422
371.06027368568584
new epoch 423
352.97903355159474
new epoch 424
376.65878047737874
new epoch 425
409.52770756639586
new epoch 426
361.18542923665836
new epoch 427
398.20225948320245
new epoch 428
322.2846714403457
new epoch 429
412.95246650587836
new epoch 430
383.4326943724106
new epoch 431
427.5569660202512
new epoch 432
303.8899456905611
new epoch 433
291.0278904982872
new epoch 434
370.57371182190604
new epoch 435
375.04936391398684
new epoch 436
348.3934084727464
new epoch 437
377.73409034387475
new epoch 438
335.7762867197799
new epoch 439
416.42313585401314
new epoch 440
330.5693235772826
new epoch 441
385.43879966122546
new epoch 442
381.1825610238528
new epoch 443
360.73645962006265
new epoch 444
352.89100207448257
new epoch 445
396.7198518611543
new epoch 446
358.35206111871094
new epoch 447
361.74133338901044
new epoch 448
322.8021621502139
new epoch 449
400.4829949264725
new epoch 450
337.3728337699442
new epoch 451
345.3844493220051
new epoch 452
351.08087805068743
new epoch 453
318.81670868466756
new epoch 454
424.62548573811847
new epoch 455
369.69226588048156
new epoch 456
429.2051365451899
new epoch 457
386.4211271020742
new epoch 458
328.1345601221722
new epoch 459
270.93611574213975
new epoch 460
462.2092779495244
new epoch 461
362.9520697280788
new epoch 462
355.69957868143354
new epoch 463
401.5070244318355
new epoch 464
360.2009126887709
new epoch 465
451.4696924875098
new epoch 466
397.9001561530907
new epoch 467
308.66784894063414
new epoch 468
402.8324160372967
new epoch 469
409.96041941034764
new epoch 470
406.70238962114655
new epoch 471
336.6495212350486
new epoch 472
371.05536204900403
new epoch 473
338.39747517661385
new epoch 474
306.0442261415001
new epoch 475
360.9199757415028
new epoch 476
363.97273174424964
new epoch 477
312.9948755468025
new epoch 478
375.1447152174166
new epoch 479

Demo


In [190]:
train_epoch_num = 1201
batch_size = 1
sess = tf.Session()
init = tf.global_variables_initializer()
sess.run(init)
saver = tf.train.Saver()
indices = list(range(0,len(reference)))
saver.restore(sess, "checkpoint/TrainCNN-FlowOnMix-1503")
loss_plot = np.zeros(train_epoch_num*int(len(indices)/batch_size))


INFO:tensorflow:Restoring parameters from checkpoint/TrainCNN-FlowOnMix-1503

In [191]:
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)

#saver.restore(sess, "checkpoint/FreqNet4-1005")
saver.restore(sess, "checkpoint/TrainCNN-FlowOnMix-1503")


INFO:tensorflow:Restoring parameters from checkpoint/TrainCNN-FlowOnMix-1503

In [0]:
output_rates = []
#refs = []
for i in range(0,len(signal)):
  sig = np.asarray(mynorm(signal[i]))
  sig = np.reshape(sig, [1,wd])
  r = sess.run([myrate], feed_dict={input_data: sig})
  output_rates.append(r)

  #refs.append(np.mean(reference_train[i:i+wd]))

In [193]:
# absolute error
errors = []
for i in range(len(output_rates)):
  errors.append(abs(output_rates[i]-reference[i]))
print(np.mean(errors))


47.15587

In [194]:
output_rates = np.asarray(output_rates)
print(output_rates.shape)
print(reference[:300].shape)


(159, 1, 1, 1)
(159,)

In [0]:
output_rates = output_rates.flatten()

In [0]:
output_rates = np.reshape(output_rates, [len(output_rates)])

In [165]:
plt.step(output_rates,'r')
plt.step(reference,'b')


Out[165]:
[<matplotlib.lines.Line2D at 0x7f3e51190d30>]

In [0]: