In [ ]:
# Softmax classifier for guessing minesweeper board position and whether it has a mine or not
In [1]:
# Import libraries for simulation
import tensorflow as tf
import numpy as np
import random as r
import datetime as dt
/home/ruben/anaconda3/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: compiletime version 3.5 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.6
return f(*args, **kwds)
In [2]:
dimensions = (12,12)
mineProbability = 0.16 # Probability that a square contain a mine
In [3]:
# Clears a square on the minesweeper board.
# If it had a mine, return true
# Otherwise if it has no adjacent mines, recursively run on adjacent squares
# Return false
def clearSquare(board,adjacency,row,col):
if board[row,col] == 1:
return True
if adjacency[row,col] >= 0:
return False
n = 0
for r in range(row-1,row+2):
for c in range(col-1,col+2):
if 0 <= r and r < rows and 0 <= c and c < cols:
n += board[r,c]
adjacency[row,col] = n
if n == 0:
for r in range(row-1,row+2):
for c in range(col-1,col+2):
if 0 <= r and r < rows and 0 <= c and c < cols:
clearSquare(board,adjacency,r,c)
return False
In [4]:
# This takes a mine board and gives a mine count with mines removed, and other random squares removed
def boardPartialMineCounts(board):
clearProbability = r.uniform(0.05,0.5)
result = np.full(dimensions,-1)
for index, x in np.ndenumerate(board):
row,col = index
if not(x) and result[row,col] == -1 and r.uniform(0,1) < clearProbability:
clearSquare(board,result,row,col)
return result
In [5]:
# Generates a random training batch of size at most n
def next_training_batch(n):
batch_xs = []
batch_ys = []
boards = []
for _ in range(n):
board = np.random.random(dimensions) < mineProbability
counts = boardPartialMineCounts(board)
validGuesses = np.append(((counts == -1).astype(int) - board).flatten().astype(float),
board.flatten().astype(float))
validGuessesSum = sum(validGuesses)
if validGuessesSum > 0:
# encode counts as one hot
countsOneHot = np.zeros((counts.size,10))
countsOneHot[np.arange(counts.size), counts.flatten() + 1] = 1
batch_xs.append(countsOneHot.flatten())
batch_ys.append(validGuesses / validGuessesSum)
boards.append(board)
return (np.asarray(batch_xs), np.asarray(batch_ys), boards)
In [6]:
# Create the model
rows, cols = dimensions
size = rows*cols
mineCountsOneHot = tf.placeholder(tf.float32, [None, size*10], name="mineCountsOneHot")
#mineCountsOneHot = tf.reshape(tf.one_hot(mineCounts+1,10), [-1, size*10])
W = tf.Variable(tf.random_normal([size*10, size*2], stddev=0.01), name="W")
b = tf.Variable(tf.random_normal([size*2], stddev=0.01), name="b")
y = tf.matmul(mineCountsOneHot, W) + b
In [7]:
validGuessAverages = tf.placeholder(tf.float32, [None, size*2], name="validGuessAverages")
In [8]:
# Loss function
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=validGuessAverages, logits=y))
In [9]:
# Summaries for tensorboard
with tf.name_scope('W_reshape'):
image_shaped_W = tf.reshape(W, [-1, size*10, size*2, 1])
tf.summary.image('W', image_shaped_W, 1000)
with tf.name_scope('b_reshape'):
image_shaped_b = tf.reshape(b, [-1, rows*2, cols, 1])
tf.summary.image('b', image_shaped_b, 1000)
_ = tf.summary.scalar('loss', cross_entropy)
In [10]:
# Optimiser
train_step = tf.train.AdamOptimizer().minimize(cross_entropy)
In [11]:
# Create session and initialise or restore stuff
savePath = './saves.tf.Mines6/' + str(dimensions) + '/'
saver = tf.train.Saver()
sess = tf.InteractiveSession()
merged = tf.summary.merge_all()
writer = tf.summary.FileWriter('.', sess.graph)
In [12]:
tf.global_variables_initializer().run()
In [13]:
# Restore model?
#saver.restore(sess, savePath + "model-2000")
In [14]:
# Train
for iteration in range(10001):
batch_xs, batch_ys, _ = next_training_batch(1000)
summary, loss, _ = sess.run([merged, cross_entropy, train_step],
feed_dict={mineCountsOneHot: batch_xs, validGuessAverages: batch_ys})
writer.add_summary(summary, iteration)
print('%s: Loss at step %s: %s' % (dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), iteration, loss))
if iteration % 100 == 0:
save_path = saver.save(sess, savePath + 'model', global_step=iteration)
print("Model saved in file: %s" % save_path)
2017-11-07 14:18:04: Loss at step 0: 5.672
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-0
2017-11-07 14:18:06: Loss at step 1: 5.6473
2017-11-07 14:18:08: Loss at step 2: 5.63212
2017-11-07 14:18:10: Loss at step 3: 5.61701
2017-11-07 14:18:12: Loss at step 4: 5.61179
2017-11-07 14:18:14: Loss at step 5: 5.6016
2017-11-07 14:18:15: Loss at step 6: 5.59099
2017-11-07 14:18:17: Loss at step 7: 5.57955
2017-11-07 14:18:19: Loss at step 8: 5.57101
2017-11-07 14:18:21: Loss at step 9: 5.56409
2017-11-07 14:18:23: Loss at step 10: 5.55115
2017-11-07 14:18:25: Loss at step 11: 5.53616
2017-11-07 14:18:26: Loss at step 12: 5.53234
2017-11-07 14:18:28: Loss at step 13: 5.52216
2017-11-07 14:18:30: Loss at step 14: 5.51348
2017-11-07 14:18:32: Loss at step 15: 5.50518
2017-11-07 14:18:34: Loss at step 16: 5.49984
2017-11-07 14:18:36: Loss at step 17: 5.49259
2017-11-07 14:18:38: Loss at step 18: 5.48465
2017-11-07 14:18:39: Loss at step 19: 5.4753
2017-11-07 14:18:41: Loss at step 20: 5.46965
2017-11-07 14:18:43: Loss at step 21: 5.46572
2017-11-07 14:18:45: Loss at step 22: 5.45516
2017-11-07 14:18:47: Loss at step 23: 5.44981
2017-11-07 14:18:49: Loss at step 24: 5.44262
2017-11-07 14:18:50: Loss at step 25: 5.43526
2017-11-07 14:18:52: Loss at step 26: 5.43028
2017-11-07 14:18:54: Loss at step 27: 5.42756
2017-11-07 14:18:56: Loss at step 28: 5.42117
2017-11-07 14:18:58: Loss at step 29: 5.4137
2017-11-07 14:19:00: Loss at step 30: 5.40762
2017-11-07 14:19:02: Loss at step 31: 5.40548
2017-11-07 14:19:03: Loss at step 32: 5.39965
2017-11-07 14:19:05: Loss at step 33: 5.39217
2017-11-07 14:19:07: Loss at step 34: 5.37888
2017-11-07 14:19:09: Loss at step 35: 5.38637
2017-11-07 14:19:11: Loss at step 36: 5.37827
2017-11-07 14:19:13: Loss at step 37: 5.37208
2017-11-07 14:19:14: Loss at step 38: 5.37143
2017-11-07 14:19:16: Loss at step 39: 5.3635
2017-11-07 14:19:18: Loss at step 40: 5.35778
2017-11-07 14:19:20: Loss at step 41: 5.35969
2017-11-07 14:19:22: Loss at step 42: 5.35261
2017-11-07 14:19:23: Loss at step 43: 5.34346
2017-11-07 14:19:25: Loss at step 44: 5.33998
2017-11-07 14:19:27: Loss at step 45: 5.33764
2017-11-07 14:19:29: Loss at step 46: 5.33128
2017-11-07 14:19:31: Loss at step 47: 5.3268
2017-11-07 14:19:33: Loss at step 48: 5.32272
2017-11-07 14:19:34: Loss at step 49: 5.32514
2017-11-07 14:19:36: Loss at step 50: 5.31963
2017-11-07 14:19:38: Loss at step 51: 5.31568
2017-11-07 14:19:40: Loss at step 52: 5.30763
2017-11-07 14:19:42: Loss at step 53: 5.30431
2017-11-07 14:19:44: Loss at step 54: 5.29918
2017-11-07 14:19:45: Loss at step 55: 5.30023
2017-11-07 14:19:47: Loss at step 56: 5.29332
2017-11-07 14:19:49: Loss at step 57: 5.29655
2017-11-07 14:19:51: Loss at step 58: 5.28646
2017-11-07 14:19:53: Loss at step 59: 5.2855
2017-11-07 14:19:54: Loss at step 60: 5.27392
2017-11-07 14:19:56: Loss at step 61: 5.27568
2017-11-07 14:19:58: Loss at step 62: 5.27481
2017-11-07 14:20:00: Loss at step 63: 5.26353
2017-11-07 14:20:02: Loss at step 64: 5.26039
2017-11-07 14:20:04: Loss at step 65: 5.26557
2017-11-07 14:20:05: Loss at step 66: 5.2613
2017-11-07 14:20:07: Loss at step 67: 5.2534
2017-11-07 14:20:09: Loss at step 68: 5.24052
2017-11-07 14:20:11: Loss at step 69: 5.24811
2017-11-07 14:20:13: Loss at step 70: 5.24908
2017-11-07 14:20:15: Loss at step 71: 5.23981
2017-11-07 14:20:16: Loss at step 72: 5.24449
2017-11-07 14:20:18: Loss at step 73: 5.2364
2017-11-07 14:20:20: Loss at step 74: 5.22736
2017-11-07 14:20:22: Loss at step 75: 5.23137
2017-11-07 14:20:24: Loss at step 76: 5.22961
2017-11-07 14:20:26: Loss at step 77: 5.22011
2017-11-07 14:20:27: Loss at step 78: 5.21987
2017-11-07 14:20:29: Loss at step 79: 5.2222
2017-11-07 14:20:31: Loss at step 80: 5.2155
2017-11-07 14:20:33: Loss at step 81: 5.20893
2017-11-07 14:20:35: Loss at step 82: 5.206
2017-11-07 14:20:36: Loss at step 83: 5.20221
2017-11-07 14:20:38: Loss at step 84: 5.20435
2017-11-07 14:20:40: Loss at step 85: 5.20477
2017-11-07 14:20:42: Loss at step 86: 5.19683
2017-11-07 14:20:44: Loss at step 87: 5.19769
2017-11-07 14:20:46: Loss at step 88: 5.19651
2017-11-07 14:20:47: Loss at step 89: 5.18906
2017-11-07 14:20:49: Loss at step 90: 5.18629
2017-11-07 14:20:51: Loss at step 91: 5.1882
2017-11-07 14:20:53: Loss at step 92: 5.17973
2017-11-07 14:20:55: Loss at step 93: 5.18035
2017-11-07 14:20:57: Loss at step 94: 5.17375
2017-11-07 14:20:58: Loss at step 95: 5.17665
2017-11-07 14:21:00: Loss at step 96: 5.17845
2017-11-07 14:21:02: Loss at step 97: 5.17
2017-11-07 14:21:04: Loss at step 98: 5.16556
2017-11-07 14:21:06: Loss at step 99: 5.17015
2017-11-07 14:21:08: Loss at step 100: 5.16782
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-100
2017-11-07 14:21:09: Loss at step 101: 5.17361
2017-11-07 14:21:11: Loss at step 102: 5.1628
2017-11-07 14:21:13: Loss at step 103: 5.15464
2017-11-07 14:21:15: Loss at step 104: 5.15072
2017-11-07 14:21:17: Loss at step 105: 5.14512
2017-11-07 14:21:19: Loss at step 106: 5.15339
2017-11-07 14:21:20: Loss at step 107: 5.1441
2017-11-07 14:21:22: Loss at step 108: 5.14144
2017-11-07 14:21:24: Loss at step 109: 5.14207
2017-11-07 14:21:26: Loss at step 110: 5.13854
2017-11-07 14:21:28: Loss at step 111: 5.12383
2017-11-07 14:21:29: Loss at step 112: 5.13476
2017-11-07 14:21:31: Loss at step 113: 5.13051
2017-11-07 14:21:33: Loss at step 114: 5.12662
2017-11-07 14:21:35: Loss at step 115: 5.12935
2017-11-07 14:21:37: Loss at step 116: 5.12291
2017-11-07 14:21:39: Loss at step 117: 5.13318
2017-11-07 14:21:40: Loss at step 118: 5.12485
2017-11-07 14:21:42: Loss at step 119: 5.11463
2017-11-07 14:21:44: Loss at step 120: 5.12657
2017-11-07 14:21:46: Loss at step 121: 5.11587
2017-11-07 14:21:48: Loss at step 122: 5.11013
2017-11-07 14:21:49: Loss at step 123: 5.11367
2017-11-07 14:21:51: Loss at step 124: 5.11178
2017-11-07 14:21:53: Loss at step 125: 5.10315
2017-11-07 14:21:55: Loss at step 126: 5.10782
2017-11-07 14:21:57: Loss at step 127: 5.10349
2017-11-07 14:21:59: Loss at step 128: 5.10154
2017-11-07 14:22:00: Loss at step 129: 5.09973
2017-11-07 14:22:02: Loss at step 130: 5.09654
2017-11-07 14:22:04: Loss at step 131: 5.10766
2017-11-07 14:22:06: Loss at step 132: 5.09585
2017-11-07 14:22:08: Loss at step 133: 5.08142
2017-11-07 14:22:09: Loss at step 134: 5.0801
2017-11-07 14:22:11: Loss at step 135: 5.09332
2017-11-07 14:22:13: Loss at step 136: 5.08074
2017-11-07 14:22:15: Loss at step 137: 5.07267
2017-11-07 14:22:17: Loss at step 138: 5.07847
2017-11-07 14:22:19: Loss at step 139: 5.08427
2017-11-07 14:22:20: Loss at step 140: 5.06683
2017-11-07 14:22:22: Loss at step 141: 5.07269
2017-11-07 14:22:24: Loss at step 142: 5.07557
2017-11-07 14:22:26: Loss at step 143: 5.06237
2017-11-07 14:22:28: Loss at step 144: 5.07546
2017-11-07 14:22:30: Loss at step 145: 5.06747
2017-11-07 14:22:31: Loss at step 146: 5.0618
2017-11-07 14:22:33: Loss at step 147: 5.06369
2017-11-07 14:22:35: Loss at step 148: 5.0721
2017-11-07 14:22:37: Loss at step 149: 5.06298
2017-11-07 14:22:39: Loss at step 150: 5.06508
2017-11-07 14:22:40: Loss at step 151: 5.05245
2017-11-07 14:22:42: Loss at step 152: 5.05236
2017-11-07 14:22:44: Loss at step 153: 5.06109
2017-11-07 14:22:46: Loss at step 154: 5.04865
2017-11-07 14:22:48: Loss at step 155: 5.05102
2017-11-07 14:22:50: Loss at step 156: 5.04522
2017-11-07 14:22:51: Loss at step 157: 5.04605
2017-11-07 14:22:53: Loss at step 158: 5.0448
2017-11-07 14:22:55: Loss at step 159: 5.04356
2017-11-07 14:22:57: Loss at step 160: 5.04417
2017-11-07 14:22:59: Loss at step 161: 5.03844
2017-11-07 14:23:00: Loss at step 162: 5.04025
2017-11-07 14:23:02: Loss at step 163: 5.05217
2017-11-07 14:23:04: Loss at step 164: 5.03101
2017-11-07 14:23:06: Loss at step 165: 5.03956
2017-11-07 14:23:08: Loss at step 166: 5.03364
2017-11-07 14:23:10: Loss at step 167: 5.02375
2017-11-07 14:23:11: Loss at step 168: 5.03108
2017-11-07 14:23:13: Loss at step 169: 5.02173
2017-11-07 14:23:15: Loss at step 170: 5.02682
2017-11-07 14:23:17: Loss at step 171: 5.02885
2017-11-07 14:23:19: Loss at step 172: 5.01419
2017-11-07 14:23:20: Loss at step 173: 5.01163
2017-11-07 14:23:22: Loss at step 174: 5.01861
2017-11-07 14:23:24: Loss at step 175: 5.00316
2017-11-07 14:23:26: Loss at step 176: 5.01129
2017-11-07 14:23:28: Loss at step 177: 5.02795
2017-11-07 14:23:30: Loss at step 178: 5.00622
2017-11-07 14:23:31: Loss at step 179: 5.00605
2017-11-07 14:23:33: Loss at step 180: 5.01133
2017-11-07 14:23:35: Loss at step 181: 5.00814
2017-11-07 14:23:37: Loss at step 182: 5.00317
2017-11-07 14:23:39: Loss at step 183: 5.02388
2017-11-07 14:23:40: Loss at step 184: 5.00797
2017-11-07 14:23:42: Loss at step 185: 5.01091
2017-11-07 14:23:44: Loss at step 186: 4.99928
2017-11-07 14:23:46: Loss at step 187: 5.00829
2017-11-07 14:23:48: Loss at step 188: 5.00194
2017-11-07 14:23:49: Loss at step 189: 4.99267
2017-11-07 14:23:51: Loss at step 190: 4.994
2017-11-07 14:23:53: Loss at step 191: 4.99309
2017-11-07 14:23:55: Loss at step 192: 4.99052
2017-11-07 14:23:57: Loss at step 193: 4.99223
2017-11-07 14:23:59: Loss at step 194: 4.98473
2017-11-07 14:24:00: Loss at step 195: 4.9845
2017-11-07 14:24:02: Loss at step 196: 4.98666
2017-11-07 14:24:04: Loss at step 197: 4.97601
2017-11-07 14:24:06: Loss at step 198: 4.99003
2017-11-07 14:24:08: Loss at step 199: 4.98131
2017-11-07 14:24:09: Loss at step 200: 4.98025
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-200
2017-11-07 14:24:11: Loss at step 201: 4.9739
2017-11-07 14:24:13: Loss at step 202: 4.97341
2017-11-07 14:24:15: Loss at step 203: 4.98155
2017-11-07 14:24:17: Loss at step 204: 4.97473
2017-11-07 14:24:19: Loss at step 205: 4.95871
2017-11-07 14:24:20: Loss at step 206: 4.97231
2017-11-07 14:24:22: Loss at step 207: 4.97024
2017-11-07 14:24:24: Loss at step 208: 4.97257
2017-11-07 14:24:26: Loss at step 209: 4.9645
2017-11-07 14:24:28: Loss at step 210: 4.96985
2017-11-07 14:24:29: Loss at step 211: 4.95963
2017-11-07 14:24:31: Loss at step 212: 4.95515
2017-11-07 14:24:33: Loss at step 213: 4.96181
2017-11-07 14:24:35: Loss at step 214: 4.94913
2017-11-07 14:24:37: Loss at step 215: 4.96268
2017-11-07 14:24:39: Loss at step 216: 4.95298
2017-11-07 14:24:41: Loss at step 217: 4.95809
2017-11-07 14:24:42: Loss at step 218: 4.95242
2017-11-07 14:24:44: Loss at step 219: 4.96319
2017-11-07 14:24:46: Loss at step 220: 4.95869
2017-11-07 14:24:48: Loss at step 221: 4.95421
2017-11-07 14:24:50: Loss at step 222: 4.94587
2017-11-07 14:24:51: Loss at step 223: 4.94914
2017-11-07 14:24:53: Loss at step 224: 4.94362
2017-11-07 14:24:55: Loss at step 225: 4.94977
2017-11-07 14:24:57: Loss at step 226: 4.94522
2017-11-07 14:24:59: Loss at step 227: 4.93256
2017-11-07 14:25:01: Loss at step 228: 4.95092
2017-11-07 14:25:02: Loss at step 229: 4.95317
2017-11-07 14:25:04: Loss at step 230: 4.94217
2017-11-07 14:25:06: Loss at step 231: 4.94475
2017-11-07 14:25:08: Loss at step 232: 4.93688
2017-11-07 14:25:10: Loss at step 233: 4.94535
2017-11-07 14:25:11: Loss at step 234: 4.92181
2017-11-07 14:25:13: Loss at step 235: 4.93935
2017-11-07 14:25:15: Loss at step 236: 4.92203
2017-11-07 14:25:17: Loss at step 237: 4.9324
2017-11-07 14:25:19: Loss at step 238: 4.9343
2017-11-07 14:25:21: Loss at step 239: 4.93317
2017-11-07 14:25:22: Loss at step 240: 4.93685
2017-11-07 14:25:24: Loss at step 241: 4.92429
2017-11-07 14:25:26: Loss at step 242: 4.91857
2017-11-07 14:25:28: Loss at step 243: 4.90924
2017-11-07 14:25:30: Loss at step 244: 4.92318
2017-11-07 14:25:32: Loss at step 245: 4.92451
2017-11-07 14:25:33: Loss at step 246: 4.91967
2017-11-07 14:25:35: Loss at step 247: 4.9178
2017-11-07 14:25:37: Loss at step 248: 4.91709
2017-11-07 14:25:39: Loss at step 249: 4.91978
2017-11-07 14:25:41: Loss at step 250: 4.91925
2017-11-07 14:25:43: Loss at step 251: 4.89344
2017-11-07 14:25:44: Loss at step 252: 4.90767
2017-11-07 14:25:46: Loss at step 253: 4.90311
2017-11-07 14:25:48: Loss at step 254: 4.90683
2017-11-07 14:25:50: Loss at step 255: 4.91495
2017-11-07 14:25:52: Loss at step 256: 4.90404
2017-11-07 14:25:54: Loss at step 257: 4.90804
2017-11-07 14:25:55: Loss at step 258: 4.90997
2017-11-07 14:25:57: Loss at step 259: 4.93291
2017-11-07 14:25:59: Loss at step 260: 4.90518
2017-11-07 14:26:01: Loss at step 261: 4.911
2017-11-07 14:26:03: Loss at step 262: 4.9105
2017-11-07 14:26:04: Loss at step 263: 4.90785
2017-11-07 14:26:06: Loss at step 264: 4.91376
2017-11-07 14:26:08: Loss at step 265: 4.90397
2017-11-07 14:26:10: Loss at step 266: 4.89947
2017-11-07 14:26:12: Loss at step 267: 4.90712
2017-11-07 14:26:14: Loss at step 268: 4.88743
2017-11-07 14:26:15: Loss at step 269: 4.8986
2017-11-07 14:26:17: Loss at step 270: 4.88077
2017-11-07 14:26:19: Loss at step 271: 4.89826
2017-11-07 14:26:21: Loss at step 272: 4.89427
2017-11-07 14:26:23: Loss at step 273: 4.88569
2017-11-07 14:26:25: Loss at step 274: 4.89576
2017-11-07 14:26:26: Loss at step 275: 4.88248
2017-11-07 14:26:28: Loss at step 276: 4.87862
2017-11-07 14:26:30: Loss at step 277: 4.89095
2017-11-07 14:26:32: Loss at step 278: 4.89966
2017-11-07 14:26:34: Loss at step 279: 4.88058
2017-11-07 14:26:35: Loss at step 280: 4.88544
2017-11-07 14:26:37: Loss at step 281: 4.88893
2017-11-07 14:26:39: Loss at step 282: 4.88256
2017-11-07 14:26:41: Loss at step 283: 4.8911
2017-11-07 14:26:43: Loss at step 284: 4.87155
2017-11-07 14:26:45: Loss at step 285: 4.88401
2017-11-07 14:26:46: Loss at step 286: 4.87414
2017-11-07 14:26:48: Loss at step 287: 4.8741
2017-11-07 14:26:50: Loss at step 288: 4.88559
2017-11-07 14:26:52: Loss at step 289: 4.87724
2017-11-07 14:26:54: Loss at step 290: 4.88102
2017-11-07 14:26:56: Loss at step 291: 4.88279
2017-11-07 14:26:57: Loss at step 292: 4.88024
2017-11-07 14:26:59: Loss at step 293: 4.87902
2017-11-07 14:27:01: Loss at step 294: 4.87591
2017-11-07 14:27:03: Loss at step 295: 4.87415
2017-11-07 14:27:05: Loss at step 296: 4.87946
2017-11-07 14:27:06: Loss at step 297: 4.87303
2017-11-07 14:27:08: Loss at step 298: 4.86685
2017-11-07 14:27:10: Loss at step 299: 4.86433
2017-11-07 14:27:12: Loss at step 300: 4.85525
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-300
2017-11-07 14:27:14: Loss at step 301: 4.86531
2017-11-07 14:27:16: Loss at step 302: 4.84983
2017-11-07 14:27:17: Loss at step 303: 4.85729
2017-11-07 14:27:19: Loss at step 304: 4.87173
2017-11-07 14:27:21: Loss at step 305: 4.86893
2017-11-07 14:27:23: Loss at step 306: 4.86017
2017-11-07 14:27:25: Loss at step 307: 4.86507
2017-11-07 14:27:26: Loss at step 308: 4.86648
2017-11-07 14:27:28: Loss at step 309: 4.86025
2017-11-07 14:27:30: Loss at step 310: 4.83745
2017-11-07 14:27:32: Loss at step 311: 4.85097
2017-11-07 14:27:34: Loss at step 312: 4.8598
2017-11-07 14:27:36: Loss at step 313: 4.87009
2017-11-07 14:27:37: Loss at step 314: 4.86354
2017-11-07 14:27:39: Loss at step 315: 4.86846
2017-11-07 14:27:41: Loss at step 316: 4.85046
2017-11-07 14:27:43: Loss at step 317: 4.84652
2017-11-07 14:27:45: Loss at step 318: 4.84866
2017-11-07 14:27:47: Loss at step 319: 4.8566
2017-11-07 14:27:48: Loss at step 320: 4.8448
2017-11-07 14:27:50: Loss at step 321: 4.8554
2017-11-07 14:27:52: Loss at step 322: 4.83646
2017-11-07 14:27:54: Loss at step 323: 4.85859
2017-11-07 14:27:56: Loss at step 324: 4.84889
2017-11-07 14:27:57: Loss at step 325: 4.84324
2017-11-07 14:27:59: Loss at step 326: 4.83435
2017-11-07 14:28:01: Loss at step 327: 4.83961
2017-11-07 14:28:03: Loss at step 328: 4.85291
2017-11-07 14:28:05: Loss at step 329: 4.84754
2017-11-07 14:28:07: Loss at step 330: 4.84063
2017-11-07 14:28:08: Loss at step 331: 4.83632
2017-11-07 14:28:10: Loss at step 332: 4.84575
2017-11-07 14:28:12: Loss at step 333: 4.83973
2017-11-07 14:28:14: Loss at step 334: 4.85343
2017-11-07 14:28:16: Loss at step 335: 4.82981
2017-11-07 14:28:18: Loss at step 336: 4.83689
2017-11-07 14:28:19: Loss at step 337: 4.84268
2017-11-07 14:28:21: Loss at step 338: 4.83123
2017-11-07 14:28:23: Loss at step 339: 4.83174
2017-11-07 14:28:25: Loss at step 340: 4.83959
2017-11-07 14:28:27: Loss at step 341: 4.82855
2017-11-07 14:28:28: Loss at step 342: 4.8351
2017-11-07 14:28:30: Loss at step 343: 4.823
2017-11-07 14:28:32: Loss at step 344: 4.82896
2017-11-07 14:28:34: Loss at step 345: 4.82255
2017-11-07 14:28:36: Loss at step 346: 4.82053
2017-11-07 14:28:38: Loss at step 347: 4.83893
2017-11-07 14:28:39: Loss at step 348: 4.8158
2017-11-07 14:28:41: Loss at step 349: 4.81518
2017-11-07 14:28:43: Loss at step 350: 4.82696
2017-11-07 14:28:45: Loss at step 351: 4.82569
2017-11-07 14:28:47: Loss at step 352: 4.83246
2017-11-07 14:28:49: Loss at step 353: 4.81937
2017-11-07 14:28:50: Loss at step 354: 4.81242
2017-11-07 14:28:52: Loss at step 355: 4.82694
2017-11-07 14:28:54: Loss at step 356: 4.83589
2017-11-07 14:28:56: Loss at step 357: 4.80179
2017-11-07 14:28:58: Loss at step 358: 4.82231
2017-11-07 14:29:00: Loss at step 359: 4.78302
2017-11-07 14:29:01: Loss at step 360: 4.8094
2017-11-07 14:29:03: Loss at step 361: 4.80467
2017-11-07 14:29:05: Loss at step 362: 4.81854
2017-11-07 14:29:07: Loss at step 363: 4.80324
2017-11-07 14:29:09: Loss at step 364: 4.80136
2017-11-07 14:29:11: Loss at step 365: 4.81937
2017-11-07 14:29:12: Loss at step 366: 4.81507
2017-11-07 14:29:14: Loss at step 367: 4.82958
2017-11-07 14:29:16: Loss at step 368: 4.81854
2017-11-07 14:29:18: Loss at step 369: 4.80907
2017-11-07 14:29:20: Loss at step 370: 4.82189
2017-11-07 14:29:22: Loss at step 371: 4.81333
2017-11-07 14:29:23: Loss at step 372: 4.80911
2017-11-07 14:29:25: Loss at step 373: 4.80637
2017-11-07 14:29:27: Loss at step 374: 4.80418
2017-11-07 14:29:29: Loss at step 375: 4.80401
2017-11-07 14:29:31: Loss at step 376: 4.78982
2017-11-07 14:29:33: Loss at step 377: 4.80692
2017-11-07 14:29:34: Loss at step 378: 4.80196
2017-11-07 14:29:36: Loss at step 379: 4.79773
2017-11-07 14:29:38: Loss at step 380: 4.81512
2017-11-07 14:29:40: Loss at step 381: 4.79285
2017-11-07 14:29:42: Loss at step 382: 4.81302
2017-11-07 14:29:44: Loss at step 383: 4.79861
2017-11-07 14:29:46: Loss at step 384: 4.79742
2017-11-07 14:29:47: Loss at step 385: 4.80333
2017-11-07 14:29:49: Loss at step 386: 4.79384
2017-11-07 14:29:51: Loss at step 387: 4.79679
2017-11-07 14:29:53: Loss at step 388: 4.79781
2017-11-07 14:29:55: Loss at step 389: 4.80318
2017-11-07 14:29:56: Loss at step 390: 4.80064
2017-11-07 14:29:58: Loss at step 391: 4.7859
2017-11-07 14:30:00: Loss at step 392: 4.79615
2017-11-07 14:30:02: Loss at step 393: 4.79253
2017-11-07 14:30:04: Loss at step 394: 4.78495
2017-11-07 14:30:06: Loss at step 395: 4.80191
2017-11-07 14:30:07: Loss at step 396: 4.80302
2017-11-07 14:30:09: Loss at step 397: 4.78561
2017-11-07 14:30:11: Loss at step 398: 4.7864
2017-11-07 14:30:13: Loss at step 399: 4.78558
2017-11-07 14:30:15: Loss at step 400: 4.78446
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-400
2017-11-07 14:30:17: Loss at step 401: 4.80465
2017-11-07 14:30:19: Loss at step 402: 4.7972
2017-11-07 14:30:20: Loss at step 403: 4.77425
2017-11-07 14:30:22: Loss at step 404: 4.78734
2017-11-07 14:30:24: Loss at step 405: 4.78675
2017-11-07 14:30:26: Loss at step 406: 4.78159
2017-11-07 14:30:28: Loss at step 407: 4.79791
2017-11-07 14:30:30: Loss at step 408: 4.78218
2017-11-07 14:30:31: Loss at step 409: 4.78147
2017-11-07 14:30:33: Loss at step 410: 4.79266
2017-11-07 14:30:35: Loss at step 411: 4.79218
2017-11-07 14:30:37: Loss at step 412: 4.79792
2017-11-07 14:30:39: Loss at step 413: 4.77618
2017-11-07 14:30:41: Loss at step 414: 4.78723
2017-11-07 14:30:42: Loss at step 415: 4.77591
2017-11-07 14:30:44: Loss at step 416: 4.79553
2017-11-07 14:30:46: Loss at step 417: 4.79064
2017-11-07 14:30:48: Loss at step 418: 4.79151
2017-11-07 14:30:50: Loss at step 419: 4.79483
2017-11-07 14:30:51: Loss at step 420: 4.78476
2017-11-07 14:30:53: Loss at step 421: 4.77611
2017-11-07 14:30:55: Loss at step 422: 4.75902
2017-11-07 14:30:57: Loss at step 423: 4.75774
2017-11-07 14:30:59: Loss at step 424: 4.78622
2017-11-07 14:31:01: Loss at step 425: 4.77682
2017-11-07 14:31:02: Loss at step 426: 4.77455
2017-11-07 14:31:04: Loss at step 427: 4.7839
2017-11-07 14:31:06: Loss at step 428: 4.77476
2017-11-07 14:31:08: Loss at step 429: 4.76387
2017-11-07 14:31:10: Loss at step 430: 4.79274
2017-11-07 14:31:12: Loss at step 431: 4.76106
2017-11-07 14:31:13: Loss at step 432: 4.78806
2017-11-07 14:31:15: Loss at step 433: 4.75902
2017-11-07 14:31:17: Loss at step 434: 4.77444
2017-11-07 14:31:19: Loss at step 435: 4.79393
2017-11-07 14:31:21: Loss at step 436: 4.7932
2017-11-07 14:31:22: Loss at step 437: 4.75775
2017-11-07 14:31:24: Loss at step 438: 4.77507
2017-11-07 14:31:26: Loss at step 439: 4.77359
2017-11-07 14:31:28: Loss at step 440: 4.74559
2017-11-07 14:31:30: Loss at step 441: 4.74473
2017-11-07 14:31:32: Loss at step 442: 4.75855
2017-11-07 14:31:33: Loss at step 443: 4.76836
2017-11-07 14:31:35: Loss at step 444: 4.75832
2017-11-07 14:31:37: Loss at step 445: 4.78105
2017-11-07 14:31:39: Loss at step 446: 4.77791
2017-11-07 14:31:41: Loss at step 447: 4.75031
2017-11-07 14:31:43: Loss at step 448: 4.75545
2017-11-07 14:31:44: Loss at step 449: 4.75468
2017-11-07 14:31:46: Loss at step 450: 4.77708
2017-11-07 14:31:48: Loss at step 451: 4.76786
2017-11-07 14:31:50: Loss at step 452: 4.76994
2017-11-07 14:31:52: Loss at step 453: 4.73942
2017-11-07 14:31:54: Loss at step 454: 4.75619
2017-11-07 14:31:55: Loss at step 455: 4.76581
2017-11-07 14:31:57: Loss at step 456: 4.75169
2017-11-07 14:31:59: Loss at step 457: 4.74972
2017-11-07 14:32:01: Loss at step 458: 4.75194
2017-11-07 14:32:03: Loss at step 459: 4.75853
2017-11-07 14:32:05: Loss at step 460: 4.75196
2017-11-07 14:32:06: Loss at step 461: 4.7792
2017-11-07 14:32:08: Loss at step 462: 4.74103
2017-11-07 14:32:10: Loss at step 463: 4.74461
2017-11-07 14:32:12: Loss at step 464: 4.7534
2017-11-07 14:32:14: Loss at step 465: 4.75607
2017-11-07 14:32:16: Loss at step 466: 4.74984
2017-11-07 14:32:17: Loss at step 467: 4.75171
2017-11-07 14:32:19: Loss at step 468: 4.75623
2017-11-07 14:32:21: Loss at step 469: 4.76863
2017-11-07 14:32:23: Loss at step 470: 4.74918
2017-11-07 14:32:25: Loss at step 471: 4.76319
2017-11-07 14:32:27: Loss at step 472: 4.7415
2017-11-07 14:32:28: Loss at step 473: 4.75261
2017-11-07 14:32:30: Loss at step 474: 4.74418
2017-11-07 14:32:32: Loss at step 475: 4.74467
2017-11-07 14:32:34: Loss at step 476: 4.76616
2017-11-07 14:32:36: Loss at step 477: 4.76065
2017-11-07 14:32:38: Loss at step 478: 4.75348
2017-11-07 14:32:39: Loss at step 479: 4.7515
2017-11-07 14:32:41: Loss at step 480: 4.73224
2017-11-07 14:32:43: Loss at step 481: 4.74351
2017-11-07 14:32:45: Loss at step 482: 4.75231
2017-11-07 14:32:47: Loss at step 483: 4.74856
2017-11-07 14:32:48: Loss at step 484: 4.74262
2017-11-07 14:32:50: Loss at step 485: 4.73733
2017-11-07 14:32:52: Loss at step 486: 4.74598
2017-11-07 14:32:54: Loss at step 487: 4.72978
2017-11-07 14:32:56: Loss at step 488: 4.7646
2017-11-07 14:32:58: Loss at step 489: 4.74997
2017-11-07 14:32:59: Loss at step 490: 4.74462
2017-11-07 14:33:01: Loss at step 491: 4.736
2017-11-07 14:33:03: Loss at step 492: 4.73718
2017-11-07 14:33:05: Loss at step 493: 4.74939
2017-11-07 14:33:07: Loss at step 494: 4.73474
2017-11-07 14:33:09: Loss at step 495: 4.727
2017-11-07 14:33:10: Loss at step 496: 4.7236
2017-11-07 14:33:12: Loss at step 497: 4.7426
2017-11-07 14:33:14: Loss at step 498: 4.71283
2017-11-07 14:33:16: Loss at step 499: 4.73482
2017-11-07 14:33:18: Loss at step 500: 4.75297
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-500
2017-11-07 14:33:20: Loss at step 501: 4.71638
2017-11-07 14:33:22: Loss at step 502: 4.71
2017-11-07 14:33:23: Loss at step 503: 4.75598
2017-11-07 14:33:25: Loss at step 504: 4.73535
2017-11-07 14:33:27: Loss at step 505: 4.73964
2017-11-07 14:33:29: Loss at step 506: 4.74098
2017-11-07 14:33:31: Loss at step 507: 4.73249
2017-11-07 14:33:33: Loss at step 508: 4.72815
2017-11-07 14:33:34: Loss at step 509: 4.7368
2017-11-07 14:33:36: Loss at step 510: 4.74802
2017-11-07 14:33:38: Loss at step 511: 4.72558
2017-11-07 14:33:40: Loss at step 512: 4.7284
2017-11-07 14:33:42: Loss at step 513: 4.72586
2017-11-07 14:33:44: Loss at step 514: 4.7206
2017-11-07 14:33:45: Loss at step 515: 4.73191
2017-11-07 14:33:47: Loss at step 516: 4.73989
2017-11-07 14:33:49: Loss at step 517: 4.7217
2017-11-07 14:33:51: Loss at step 518: 4.73859
2017-11-07 14:33:53: Loss at step 519: 4.72456
2017-11-07 14:33:54: Loss at step 520: 4.72856
2017-11-07 14:33:56: Loss at step 521: 4.73409
2017-11-07 14:33:58: Loss at step 522: 4.71835
2017-11-07 14:34:00: Loss at step 523: 4.71782
2017-11-07 14:34:02: Loss at step 524: 4.73418
2017-11-07 14:34:04: Loss at step 525: 4.72247
2017-11-07 14:34:05: Loss at step 526: 4.72308
2017-11-07 14:34:07: Loss at step 527: 4.71865
2017-11-07 14:34:09: Loss at step 528: 4.73852
2017-11-07 14:34:11: Loss at step 529: 4.72362
2017-11-07 14:34:13: Loss at step 530: 4.70119
2017-11-07 14:34:15: Loss at step 531: 4.7146
2017-11-07 14:34:16: Loss at step 532: 4.72458
2017-11-07 14:34:18: Loss at step 533: 4.7249
2017-11-07 14:34:20: Loss at step 534: 4.72384
2017-11-07 14:34:22: Loss at step 535: 4.73492
2017-11-07 14:34:24: Loss at step 536: 4.7364
2017-11-07 14:34:26: Loss at step 537: 4.70888
2017-11-07 14:34:28: Loss at step 538: 4.68156
2017-11-07 14:34:29: Loss at step 539: 4.74604
2017-11-07 14:34:31: Loss at step 540: 4.71402
2017-11-07 14:34:33: Loss at step 541: 4.69849
2017-11-07 14:34:35: Loss at step 542: 4.70943
2017-11-07 14:34:37: Loss at step 543: 4.70245
2017-11-07 14:34:39: Loss at step 544: 4.70894
2017-11-07 14:34:40: Loss at step 545: 4.71815
2017-11-07 14:34:42: Loss at step 546: 4.71746
2017-11-07 14:34:44: Loss at step 547: 4.70746
2017-11-07 14:34:46: Loss at step 548: 4.72266
2017-11-07 14:34:48: Loss at step 549: 4.72026
2017-11-07 14:34:50: Loss at step 550: 4.71834
2017-11-07 14:34:51: Loss at step 551: 4.70226
2017-11-07 14:34:53: Loss at step 552: 4.71503
2017-11-07 14:34:55: Loss at step 553: 4.73036
2017-11-07 14:34:57: Loss at step 554: 4.72606
2017-11-07 14:34:59: Loss at step 555: 4.72088
2017-11-07 14:35:01: Loss at step 556: 4.71446
2017-11-07 14:35:02: Loss at step 557: 4.70181
2017-11-07 14:35:04: Loss at step 558: 4.71762
2017-11-07 14:35:06: Loss at step 559: 4.71325
2017-11-07 14:35:08: Loss at step 560: 4.70791
2017-11-07 14:35:10: Loss at step 561: 4.74531
2017-11-07 14:35:11: Loss at step 562: 4.73719
2017-11-07 14:35:13: Loss at step 563: 4.71385
2017-11-07 14:35:15: Loss at step 564: 4.70007
2017-11-07 14:35:17: Loss at step 565: 4.70911
2017-11-07 14:35:19: Loss at step 566: 4.70582
2017-11-07 14:35:21: Loss at step 567: 4.71023
2017-11-07 14:35:23: Loss at step 568: 4.71485
2017-11-07 14:35:24: Loss at step 569: 4.71146
2017-11-07 14:35:26: Loss at step 570: 4.71392
2017-11-07 14:35:28: Loss at step 571: 4.71334
2017-11-07 14:35:30: Loss at step 572: 4.69726
2017-11-07 14:35:32: Loss at step 573: 4.70801
2017-11-07 14:35:33: Loss at step 574: 4.70996
2017-11-07 14:35:35: Loss at step 575: 4.69923
2017-11-07 14:35:37: Loss at step 576: 4.70496
2017-11-07 14:35:39: Loss at step 577: 4.69911
2017-11-07 14:35:41: Loss at step 578: 4.68828
2017-11-07 14:35:43: Loss at step 579: 4.69806
2017-11-07 14:35:45: Loss at step 580: 4.72587
2017-11-07 14:35:46: Loss at step 581: 4.6895
2017-11-07 14:35:48: Loss at step 582: 4.71319
2017-11-07 14:35:50: Loss at step 583: 4.69829
2017-11-07 14:35:52: Loss at step 584: 4.70673
2017-11-07 14:35:54: Loss at step 585: 4.70989
2017-11-07 14:35:55: Loss at step 586: 4.70818
2017-11-07 14:35:57: Loss at step 587: 4.71446
2017-11-07 14:35:59: Loss at step 588: 4.71875
2017-11-07 14:36:01: Loss at step 589: 4.69374
2017-11-07 14:36:03: Loss at step 590: 4.7005
2017-11-07 14:36:05: Loss at step 591: 4.67583
2017-11-07 14:36:06: Loss at step 592: 4.70356
2017-11-07 14:36:08: Loss at step 593: 4.70328
2017-11-07 14:36:10: Loss at step 594: 4.68996
2017-11-07 14:36:12: Loss at step 595: 4.71111
2017-11-07 14:36:14: Loss at step 596: 4.68696
2017-11-07 14:36:16: Loss at step 597: 4.70151
2017-11-07 14:36:17: Loss at step 598: 4.69469
2017-11-07 14:36:19: Loss at step 599: 4.70593
2017-11-07 14:36:21: Loss at step 600: 4.71593
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-600
2017-11-07 14:36:23: Loss at step 601: 4.69692
2017-11-07 14:36:25: Loss at step 602: 4.69814
2017-11-07 14:36:27: Loss at step 603: 4.69032
2017-11-07 14:36:28: Loss at step 604: 4.6947
2017-11-07 14:36:30: Loss at step 605: 4.702
2017-11-07 14:36:32: Loss at step 606: 4.68778
2017-11-07 14:36:34: Loss at step 607: 4.70376
2017-11-07 14:36:36: Loss at step 608: 4.6898
2017-11-07 14:36:38: Loss at step 609: 4.71187
2017-11-07 14:36:39: Loss at step 610: 4.70591
2017-11-07 14:36:41: Loss at step 611: 4.70258
2017-11-07 14:36:43: Loss at step 612: 4.67962
2017-11-07 14:36:45: Loss at step 613: 4.68537
2017-11-07 14:36:47: Loss at step 614: 4.67867
2017-11-07 14:36:49: Loss at step 615: 4.67254
2017-11-07 14:36:50: Loss at step 616: 4.69597
2017-11-07 14:36:52: Loss at step 617: 4.67976
2017-11-07 14:36:54: Loss at step 618: 4.68689
2017-11-07 14:36:56: Loss at step 619: 4.69337
2017-11-07 14:36:58: Loss at step 620: 4.68473
2017-11-07 14:37:00: Loss at step 621: 4.69995
2017-11-07 14:37:02: Loss at step 622: 4.69535
2017-11-07 14:37:03: Loss at step 623: 4.67631
2017-11-07 14:37:05: Loss at step 624: 4.69183
2017-11-07 14:37:07: Loss at step 625: 4.68946
2017-11-07 14:37:09: Loss at step 626: 4.70273
2017-11-07 14:37:11: Loss at step 627: 4.67701
2017-11-07 14:37:13: Loss at step 628: 4.67925
2017-11-07 14:37:14: Loss at step 629: 4.69572
2017-11-07 14:37:16: Loss at step 630: 4.67553
2017-11-07 14:37:18: Loss at step 631: 4.66522
2017-11-07 14:37:20: Loss at step 632: 4.69705
2017-11-07 14:37:22: Loss at step 633: 4.69476
2017-11-07 14:37:24: Loss at step 634: 4.6799
2017-11-07 14:37:25: Loss at step 635: 4.6594
2017-11-07 14:37:27: Loss at step 636: 4.68411
2017-11-07 14:37:29: Loss at step 637: 4.68129
2017-11-07 14:37:31: Loss at step 638: 4.67639
2017-11-07 14:37:33: Loss at step 639: 4.69265
2017-11-07 14:37:35: Loss at step 640: 4.68508
2017-11-07 14:37:36: Loss at step 641: 4.68428
2017-11-07 14:37:38: Loss at step 642: 4.65688
2017-11-07 14:37:40: Loss at step 643: 4.6721
2017-11-07 14:37:42: Loss at step 644: 4.672
2017-11-07 14:37:44: Loss at step 645: 4.68384
2017-11-07 14:37:46: Loss at step 646: 4.68309
2017-11-07 14:37:47: Loss at step 647: 4.67417
2017-11-07 14:37:49: Loss at step 648: 4.66556
2017-11-07 14:37:51: Loss at step 649: 4.6735
2017-11-07 14:37:53: Loss at step 650: 4.66488
2017-11-07 14:37:55: Loss at step 651: 4.66897
2017-11-07 14:37:57: Loss at step 652: 4.6747
2017-11-07 14:37:59: Loss at step 653: 4.68752
2017-11-07 14:38:00: Loss at step 654: 4.65885
2017-11-07 14:38:02: Loss at step 655: 4.67605
2017-11-07 14:38:04: Loss at step 656: 4.66928
2017-11-07 14:38:06: Loss at step 657: 4.67881
2017-11-07 14:38:08: Loss at step 658: 4.70509
2017-11-07 14:38:10: Loss at step 659: 4.68472
2017-11-07 14:38:11: Loss at step 660: 4.67333
2017-11-07 14:38:13: Loss at step 661: 4.67829
2017-11-07 14:38:15: Loss at step 662: 4.66834
2017-11-07 14:38:17: Loss at step 663: 4.67655
2017-11-07 14:38:19: Loss at step 664: 4.67201
2017-11-07 14:38:21: Loss at step 665: 4.68444
2017-11-07 14:38:22: Loss at step 666: 4.65262
2017-11-07 14:38:24: Loss at step 667: 4.69632
2017-11-07 14:38:26: Loss at step 668: 4.66476
2017-11-07 14:38:28: Loss at step 669: 4.6546
2017-11-07 14:38:30: Loss at step 670: 4.67051
2017-11-07 14:38:32: Loss at step 671: 4.66341
2017-11-07 14:38:34: Loss at step 672: 4.66497
2017-11-07 14:38:35: Loss at step 673: 4.67966
2017-11-07 14:38:37: Loss at step 674: 4.67566
2017-11-07 14:38:39: Loss at step 675: 4.67043
2017-11-07 14:38:41: Loss at step 676: 4.66667
2017-11-07 14:38:43: Loss at step 677: 4.65722
2017-11-07 14:38:45: Loss at step 678: 4.64515
2017-11-07 14:38:47: Loss at step 679: 4.63347
2017-11-07 14:38:48: Loss at step 680: 4.66497
2017-11-07 14:38:50: Loss at step 681: 4.65763
2017-11-07 14:38:52: Loss at step 682: 4.66406
2017-11-07 14:38:54: Loss at step 683: 4.68094
2017-11-07 14:38:56: Loss at step 684: 4.6817
2017-11-07 14:38:58: Loss at step 685: 4.66155
2017-11-07 14:38:59: Loss at step 686: 4.67135
2017-11-07 14:39:01: Loss at step 687: 4.65431
2017-11-07 14:39:03: Loss at step 688: 4.66889
2017-11-07 14:39:05: Loss at step 689: 4.65634
2017-11-07 14:39:07: Loss at step 690: 4.65341
2017-11-07 14:39:09: Loss at step 691: 4.68151
2017-11-07 14:39:10: Loss at step 692: 4.66411
2017-11-07 14:39:12: Loss at step 693: 4.65989
2017-11-07 14:39:14: Loss at step 694: 4.66358
2017-11-07 14:39:16: Loss at step 695: 4.66016
2017-11-07 14:39:18: Loss at step 696: 4.66473
2017-11-07 14:39:20: Loss at step 697: 4.66157
2017-11-07 14:39:22: Loss at step 698: 4.63704
2017-11-07 14:39:23: Loss at step 699: 4.66629
2017-11-07 14:39:25: Loss at step 700: 4.66673
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-700
2017-11-07 14:39:27: Loss at step 701: 4.68248
2017-11-07 14:39:29: Loss at step 702: 4.66333
2017-11-07 14:39:31: Loss at step 703: 4.6731
2017-11-07 14:39:33: Loss at step 704: 4.65908
2017-11-07 14:39:34: Loss at step 705: 4.66387
2017-11-07 14:39:36: Loss at step 706: 4.67123
2017-11-07 14:39:38: Loss at step 707: 4.66667
2017-11-07 14:39:40: Loss at step 708: 4.67298
2017-11-07 14:39:42: Loss at step 709: 4.67359
2017-11-07 14:39:44: Loss at step 710: 4.66733
2017-11-07 14:39:45: Loss at step 711: 4.6495
2017-11-07 14:39:47: Loss at step 712: 4.67112
2017-11-07 14:39:49: Loss at step 713: 4.66303
2017-11-07 14:39:51: Loss at step 714: 4.64833
2017-11-07 14:39:53: Loss at step 715: 4.66371
2017-11-07 14:39:55: Loss at step 716: 4.67171
2017-11-07 14:39:56: Loss at step 717: 4.64984
2017-11-07 14:39:58: Loss at step 718: 4.68115
2017-11-07 14:40:00: Loss at step 719: 4.62785
2017-11-07 14:40:02: Loss at step 720: 4.66223
2017-11-07 14:40:04: Loss at step 721: 4.66921
2017-11-07 14:40:06: Loss at step 722: 4.67007
2017-11-07 14:40:08: Loss at step 723: 4.63807
2017-11-07 14:40:09: Loss at step 724: 4.65969
2017-11-07 14:40:11: Loss at step 725: 4.67017
2017-11-07 14:40:13: Loss at step 726: 4.64223
2017-11-07 14:40:15: Loss at step 727: 4.65299
2017-11-07 14:40:17: Loss at step 728: 4.65354
2017-11-07 14:40:19: Loss at step 729: 4.66915
2017-11-07 14:40:20: Loss at step 730: 4.65933
2017-11-07 14:40:22: Loss at step 731: 4.63958
2017-11-07 14:40:24: Loss at step 732: 4.66471
2017-11-07 14:40:26: Loss at step 733: 4.63939
2017-11-07 14:40:28: Loss at step 734: 4.66016
2017-11-07 14:40:30: Loss at step 735: 4.67509
2017-11-07 14:40:32: Loss at step 736: 4.64105
2017-11-07 14:40:33: Loss at step 737: 4.65487
2017-11-07 14:40:35: Loss at step 738: 4.6509
2017-11-07 14:40:37: Loss at step 739: 4.65098
2017-11-07 14:40:39: Loss at step 740: 4.6403
2017-11-07 14:40:41: Loss at step 741: 4.65299
2017-11-07 14:40:43: Loss at step 742: 4.65099
2017-11-07 14:40:45: Loss at step 743: 4.63857
2017-11-07 14:40:46: Loss at step 744: 4.67627
2017-11-07 14:40:48: Loss at step 745: 4.66112
2017-11-07 14:40:50: Loss at step 746: 4.6601
2017-11-07 14:40:52: Loss at step 747: 4.64885
2017-11-07 14:40:54: Loss at step 748: 4.64699
2017-11-07 14:40:56: Loss at step 749: 4.65593
2017-11-07 14:40:57: Loss at step 750: 4.64833
2017-11-07 14:40:59: Loss at step 751: 4.65635
2017-11-07 14:41:01: Loss at step 752: 4.65612
2017-11-07 14:41:03: Loss at step 753: 4.67793
2017-11-07 14:41:05: Loss at step 754: 4.64275
2017-11-07 14:41:07: Loss at step 755: 4.64339
2017-11-07 14:41:09: Loss at step 756: 4.63586
2017-11-07 14:41:10: Loss at step 757: 4.65332
2017-11-07 14:41:12: Loss at step 758: 4.64457
2017-11-07 14:41:14: Loss at step 759: 4.65009
2017-11-07 14:41:16: Loss at step 760: 4.65575
2017-11-07 14:41:18: Loss at step 761: 4.64103
2017-11-07 14:41:20: Loss at step 762: 4.65394
2017-11-07 14:41:21: Loss at step 763: 4.63199
2017-11-07 14:41:23: Loss at step 764: 4.65405
2017-11-07 14:41:25: Loss at step 765: 4.63899
2017-11-07 14:41:27: Loss at step 766: 4.62427
2017-11-07 14:41:29: Loss at step 767: 4.64835
2017-11-07 14:41:31: Loss at step 768: 4.6541
2017-11-07 14:41:33: Loss at step 769: 4.63927
2017-11-07 14:41:35: Loss at step 770: 4.62433
2017-11-07 14:41:36: Loss at step 771: 4.65671
2017-11-07 14:41:38: Loss at step 772: 4.65803
2017-11-07 14:41:40: Loss at step 773: 4.6335
2017-11-07 14:41:42: Loss at step 774: 4.64165
2017-11-07 14:41:44: Loss at step 775: 4.64527
2017-11-07 14:41:46: Loss at step 776: 4.64457
2017-11-07 14:41:47: Loss at step 777: 4.6384
2017-11-07 14:41:49: Loss at step 778: 4.63489
2017-11-07 14:41:51: Loss at step 779: 4.65604
2017-11-07 14:41:53: Loss at step 780: 4.64814
2017-11-07 14:41:55: Loss at step 781: 4.64639
2017-11-07 14:41:57: Loss at step 782: 4.63124
2017-11-07 14:41:58: Loss at step 783: 4.64929
2017-11-07 14:42:00: Loss at step 784: 4.649
2017-11-07 14:42:02: Loss at step 785: 4.62952
2017-11-07 14:42:04: Loss at step 786: 4.63092
2017-11-07 14:42:06: Loss at step 787: 4.65934
2017-11-07 14:42:08: Loss at step 788: 4.63276
2017-11-07 14:42:10: Loss at step 789: 4.63387
2017-11-07 14:42:11: Loss at step 790: 4.64334
2017-11-07 14:42:13: Loss at step 791: 4.6052
2017-11-07 14:42:15: Loss at step 792: 4.64766
2017-11-07 14:42:17: Loss at step 793: 4.62485
2017-11-07 14:42:19: Loss at step 794: 4.66032
2017-11-07 14:42:21: Loss at step 795: 4.62765
2017-11-07 14:42:22: Loss at step 796: 4.64489
2017-11-07 14:42:24: Loss at step 797: 4.60793
2017-11-07 14:42:26: Loss at step 798: 4.64557
2017-11-07 14:42:28: Loss at step 799: 4.63901
2017-11-07 14:42:30: Loss at step 800: 4.6449
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-800
2017-11-07 14:42:32: Loss at step 801: 4.64145
2017-11-07 14:42:34: Loss at step 802: 4.63373
2017-11-07 14:42:35: Loss at step 803: 4.63812
2017-11-07 14:42:37: Loss at step 804: 4.63114
2017-11-07 14:42:39: Loss at step 805: 4.62329
2017-11-07 14:42:41: Loss at step 806: 4.63604
2017-11-07 14:42:43: Loss at step 807: 4.66768
2017-11-07 14:42:45: Loss at step 808: 4.64055
2017-11-07 14:42:46: Loss at step 809: 4.6549
2017-11-07 14:42:48: Loss at step 810: 4.64168
2017-11-07 14:42:50: Loss at step 811: 4.6501
2017-11-07 14:42:52: Loss at step 812: 4.64602
2017-11-07 14:42:54: Loss at step 813: 4.63919
2017-11-07 14:42:56: Loss at step 814: 4.6517
2017-11-07 14:42:57: Loss at step 815: 4.6196
2017-11-07 14:42:59: Loss at step 816: 4.6345
2017-11-07 14:43:01: Loss at step 817: 4.64091
2017-11-07 14:43:03: Loss at step 818: 4.62985
2017-11-07 14:43:05: Loss at step 819: 4.63781
2017-11-07 14:43:07: Loss at step 820: 4.63665
2017-11-07 14:43:09: Loss at step 821: 4.63939
2017-11-07 14:43:10: Loss at step 822: 4.64038
2017-11-07 14:43:12: Loss at step 823: 4.63304
2017-11-07 14:43:14: Loss at step 824: 4.64673
2017-11-07 14:43:16: Loss at step 825: 4.64121
2017-11-07 14:43:18: Loss at step 826: 4.63818
2017-11-07 14:43:20: Loss at step 827: 4.63514
2017-11-07 14:43:21: Loss at step 828: 4.62734
2017-11-07 14:43:23: Loss at step 829: 4.64185
2017-11-07 14:43:25: Loss at step 830: 4.62875
2017-11-07 14:43:27: Loss at step 831: 4.63661
2017-11-07 14:43:29: Loss at step 832: 4.62864
2017-11-07 14:43:31: Loss at step 833: 4.62655
2017-11-07 14:43:32: Loss at step 834: 4.62836
2017-11-07 14:43:34: Loss at step 835: 4.64482
2017-11-07 14:43:36: Loss at step 836: 4.63491
2017-11-07 14:43:38: Loss at step 837: 4.61193
2017-11-07 14:43:40: Loss at step 838: 4.63246
2017-11-07 14:43:42: Loss at step 839: 4.62362
2017-11-07 14:43:44: Loss at step 840: 4.64681
2017-11-07 14:43:45: Loss at step 841: 4.63022
2017-11-07 14:43:47: Loss at step 842: 4.61584
2017-11-07 14:43:49: Loss at step 843: 4.64256
2017-11-07 14:43:51: Loss at step 844: 4.61858
2017-11-07 14:43:53: Loss at step 845: 4.62974
2017-11-07 14:43:55: Loss at step 846: 4.62974
2017-11-07 14:43:56: Loss at step 847: 4.63973
2017-11-07 14:43:58: Loss at step 848: 4.61403
2017-11-07 14:44:00: Loss at step 849: 4.61287
2017-11-07 14:44:02: Loss at step 850: 4.63685
2017-11-07 14:44:04: Loss at step 851: 4.61968
2017-11-07 14:44:06: Loss at step 852: 4.64049
2017-11-07 14:44:08: Loss at step 853: 4.63347
2017-11-07 14:44:09: Loss at step 854: 4.62139
2017-11-07 14:44:11: Loss at step 855: 4.64154
2017-11-07 14:44:13: Loss at step 856: 4.61802
2017-11-07 14:44:15: Loss at step 857: 4.62751
2017-11-07 14:44:17: Loss at step 858: 4.61465
2017-11-07 14:44:19: Loss at step 859: 4.60361
2017-11-07 14:44:20: Loss at step 860: 4.64747
2017-11-07 14:44:22: Loss at step 861: 4.63271
2017-11-07 14:44:24: Loss at step 862: 4.60646
2017-11-07 14:44:26: Loss at step 863: 4.62841
2017-11-07 14:44:28: Loss at step 864: 4.61088
2017-11-07 14:44:30: Loss at step 865: 4.64455
2017-11-07 14:44:32: Loss at step 866: 4.62383
2017-11-07 14:44:33: Loss at step 867: 4.62029
2017-11-07 14:44:35: Loss at step 868: 4.63411
2017-11-07 14:44:37: Loss at step 869: 4.62383
2017-11-07 14:44:39: Loss at step 870: 4.64598
2017-11-07 14:44:41: Loss at step 871: 4.64169
2017-11-07 14:44:43: Loss at step 872: 4.61813
2017-11-07 14:44:44: Loss at step 873: 4.61113
2017-11-07 14:44:46: Loss at step 874: 4.63897
2017-11-07 14:44:48: Loss at step 875: 4.60541
2017-11-07 14:44:50: Loss at step 876: 4.62427
2017-11-07 14:44:52: Loss at step 877: 4.6334
2017-11-07 14:44:54: Loss at step 878: 4.62417
2017-11-07 14:44:55: Loss at step 879: 4.62897
2017-11-07 14:44:57: Loss at step 880: 4.64161
2017-11-07 14:44:59: Loss at step 881: 4.64446
2017-11-07 14:45:01: Loss at step 882: 4.60719
2017-11-07 14:45:03: Loss at step 883: 4.6423
2017-11-07 14:45:05: Loss at step 884: 4.63499
2017-11-07 14:45:06: Loss at step 885: 4.60295
2017-11-07 14:45:08: Loss at step 886: 4.6193
2017-11-07 14:45:10: Loss at step 887: 4.62612
2017-11-07 14:45:12: Loss at step 888: 4.6537
2017-11-07 14:45:14: Loss at step 889: 4.64063
2017-11-07 14:45:16: Loss at step 890: 4.62941
2017-11-07 14:45:18: Loss at step 891: 4.6135
2017-11-07 14:45:19: Loss at step 892: 4.61846
2017-11-07 14:45:21: Loss at step 893: 4.62748
2017-11-07 14:45:23: Loss at step 894: 4.6395
2017-11-07 14:45:25: Loss at step 895: 4.63175
2017-11-07 14:45:27: Loss at step 896: 4.63558
2017-11-07 14:45:29: Loss at step 897: 4.60412
2017-11-07 14:45:30: Loss at step 898: 4.61948
2017-11-07 14:45:32: Loss at step 899: 4.62297
2017-11-07 14:45:34: Loss at step 900: 4.59327
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-900
2017-11-07 14:45:36: Loss at step 901: 4.61285
2017-11-07 14:45:38: Loss at step 902: 4.62116
2017-11-07 14:45:40: Loss at step 903: 4.60229
2017-11-07 14:45:42: Loss at step 904: 4.59792
2017-11-07 14:45:43: Loss at step 905: 4.6212
2017-11-07 14:45:45: Loss at step 906: 4.6101
2017-11-07 14:45:47: Loss at step 907: 4.60712
2017-11-07 14:45:49: Loss at step 908: 4.63152
2017-11-07 14:45:51: Loss at step 909: 4.6089
2017-11-07 14:45:53: Loss at step 910: 4.6115
2017-11-07 14:45:55: Loss at step 911: 4.6131
2017-11-07 14:45:56: Loss at step 912: 4.62566
2017-11-07 14:45:58: Loss at step 913: 4.61329
2017-11-07 14:46:00: Loss at step 914: 4.62091
2017-11-07 14:46:02: Loss at step 915: 4.60672
2017-11-07 14:46:04: Loss at step 916: 4.60091
2017-11-07 14:46:06: Loss at step 917: 4.62962
2017-11-07 14:46:08: Loss at step 918: 4.61843
2017-11-07 14:46:09: Loss at step 919: 4.61877
2017-11-07 14:46:11: Loss at step 920: 4.63815
2017-11-07 14:46:13: Loss at step 921: 4.60127
2017-11-07 14:46:15: Loss at step 922: 4.61114
2017-11-07 14:46:17: Loss at step 923: 4.6328
2017-11-07 14:46:19: Loss at step 924: 4.62294
2017-11-07 14:46:20: Loss at step 925: 4.61081
2017-11-07 14:46:22: Loss at step 926: 4.60824
2017-11-07 14:46:24: Loss at step 927: 4.60018
2017-11-07 14:46:26: Loss at step 928: 4.6378
2017-11-07 14:46:28: Loss at step 929: 4.59694
2017-11-07 14:46:30: Loss at step 930: 4.60723
2017-11-07 14:46:32: Loss at step 931: 4.61594
2017-11-07 14:46:33: Loss at step 932: 4.61768
2017-11-07 14:46:35: Loss at step 933: 4.61924
2017-11-07 14:46:37: Loss at step 934: 4.62912
2017-11-07 14:46:39: Loss at step 935: 4.6216
2017-11-07 14:46:41: Loss at step 936: 4.59245
2017-11-07 14:46:43: Loss at step 937: 4.61621
2017-11-07 14:46:44: Loss at step 938: 4.61087
2017-11-07 14:46:46: Loss at step 939: 4.61397
2017-11-07 14:46:48: Loss at step 940: 4.61378
2017-11-07 14:46:50: Loss at step 941: 4.59896
2017-11-07 14:46:52: Loss at step 942: 4.61631
2017-11-07 14:46:54: Loss at step 943: 4.59653
2017-11-07 14:46:56: Loss at step 944: 4.62086
2017-11-07 14:46:57: Loss at step 945: 4.6182
2017-11-07 14:46:59: Loss at step 946: 4.63522
2017-11-07 14:47:01: Loss at step 947: 4.59638
2017-11-07 14:47:03: Loss at step 948: 4.61487
2017-11-07 14:47:05: Loss at step 949: 4.59971
2017-11-07 14:47:07: Loss at step 950: 4.61982
2017-11-07 14:47:09: Loss at step 951: 4.59768
2017-11-07 14:47:10: Loss at step 952: 4.6053
2017-11-07 14:47:12: Loss at step 953: 4.61234
2017-11-07 14:47:14: Loss at step 954: 4.62793
2017-11-07 14:47:16: Loss at step 955: 4.61084
2017-11-07 14:47:18: Loss at step 956: 4.61565
2017-11-07 14:47:20: Loss at step 957: 4.61475
2017-11-07 14:47:21: Loss at step 958: 4.61959
2017-11-07 14:47:23: Loss at step 959: 4.60082
2017-11-07 14:47:25: Loss at step 960: 4.59443
2017-11-07 14:47:27: Loss at step 961: 4.62601
2017-11-07 14:47:29: Loss at step 962: 4.60844
2017-11-07 14:47:31: Loss at step 963: 4.6005
2017-11-07 14:47:33: Loss at step 964: 4.58283
2017-11-07 14:47:34: Loss at step 965: 4.63317
2017-11-07 14:47:36: Loss at step 966: 4.62014
2017-11-07 14:47:38: Loss at step 967: 4.59427
2017-11-07 14:47:40: Loss at step 968: 4.59229
2017-11-07 14:47:42: Loss at step 969: 4.61553
2017-11-07 14:47:44: Loss at step 970: 4.60528
2017-11-07 14:47:46: Loss at step 971: 4.60597
2017-11-07 14:47:47: Loss at step 972: 4.60849
2017-11-07 14:47:49: Loss at step 973: 4.61414
2017-11-07 14:47:51: Loss at step 974: 4.62254
2017-11-07 14:47:53: Loss at step 975: 4.60599
2017-11-07 14:47:55: Loss at step 976: 4.60979
2017-11-07 14:47:57: Loss at step 977: 4.61249
2017-11-07 14:47:59: Loss at step 978: 4.61572
2017-11-07 14:48:00: Loss at step 979: 4.61873
2017-11-07 14:48:02: Loss at step 980: 4.58902
2017-11-07 14:48:04: Loss at step 981: 4.61511
2017-11-07 14:48:06: Loss at step 982: 4.59409
2017-11-07 14:48:08: Loss at step 983: 4.61802
2017-11-07 14:48:10: Loss at step 984: 4.60294
2017-11-07 14:48:11: Loss at step 985: 4.59593
2017-11-07 14:48:13: Loss at step 986: 4.59402
2017-11-07 14:48:15: Loss at step 987: 4.57869
2017-11-07 14:48:17: Loss at step 988: 4.5975
2017-11-07 14:48:19: Loss at step 989: 4.60231
2017-11-07 14:48:21: Loss at step 990: 4.59235
2017-11-07 14:48:23: Loss at step 991: 4.61503
2017-11-07 14:48:25: Loss at step 992: 4.58844
2017-11-07 14:48:26: Loss at step 993: 4.59474
2017-11-07 14:48:28: Loss at step 994: 4.61858
2017-11-07 14:48:30: Loss at step 995: 4.59034
2017-11-07 14:48:32: Loss at step 996: 4.61423
2017-11-07 14:48:34: Loss at step 997: 4.6049
2017-11-07 14:48:36: Loss at step 998: 4.60973
2017-11-07 14:48:37: Loss at step 999: 4.59447
2017-11-07 14:48:39: Loss at step 1000: 4.592
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-1000
2017-11-07 14:48:41: Loss at step 1001: 4.58915
2017-11-07 14:48:43: Loss at step 1002: 4.59857
2017-11-07 14:48:45: Loss at step 1003: 4.6378
2017-11-07 14:48:47: Loss at step 1004: 4.62235
2017-11-07 14:48:49: Loss at step 1005: 4.59381
2017-11-07 14:48:50: Loss at step 1006: 4.59873
2017-11-07 14:48:52: Loss at step 1007: 4.60271
2017-11-07 14:48:54: Loss at step 1008: 4.58758
2017-11-07 14:48:56: Loss at step 1009: 4.58753
2017-11-07 14:48:58: Loss at step 1010: 4.58445
2017-11-07 14:49:00: Loss at step 1011: 4.59644
2017-11-07 14:49:02: Loss at step 1012: 4.60279
2017-11-07 14:49:03: Loss at step 1013: 4.61601
2017-11-07 14:49:05: Loss at step 1014: 4.60609
2017-11-07 14:49:07: Loss at step 1015: 4.58676
2017-11-07 14:49:09: Loss at step 1016: 4.5811
2017-11-07 14:49:11: Loss at step 1017: 4.5823
2017-11-07 14:49:13: Loss at step 1018: 4.59651
2017-11-07 14:49:14: Loss at step 1019: 4.59591
2017-11-07 14:49:16: Loss at step 1020: 4.59102
2017-11-07 14:49:18: Loss at step 1021: 4.61378
2017-11-07 14:49:20: Loss at step 1022: 4.58297
2017-11-07 14:49:22: Loss at step 1023: 4.6021
2017-11-07 14:49:24: Loss at step 1024: 4.59667
2017-11-07 14:49:26: Loss at step 1025: 4.60019
2017-11-07 14:49:27: Loss at step 1026: 4.60639
2017-11-07 14:49:29: Loss at step 1027: 4.6148
2017-11-07 14:49:31: Loss at step 1028: 4.58908
2017-11-07 14:49:33: Loss at step 1029: 4.59142
2017-11-07 14:49:35: Loss at step 1030: 4.58757
2017-11-07 14:49:37: Loss at step 1031: 4.58603
2017-11-07 14:49:39: Loss at step 1032: 4.588
2017-11-07 14:49:41: Loss at step 1033: 4.58155
2017-11-07 14:49:42: Loss at step 1034: 4.59777
2017-11-07 14:49:44: Loss at step 1035: 4.60496
2017-11-07 14:49:46: Loss at step 1036: 4.5912
2017-11-07 14:49:48: Loss at step 1037: 4.60525
2017-11-07 14:49:50: Loss at step 1038: 4.59754
2017-11-07 14:49:52: Loss at step 1039: 4.6047
2017-11-07 14:49:53: Loss at step 1040: 4.58513
2017-11-07 14:49:55: Loss at step 1041: 4.58878
2017-11-07 14:49:57: Loss at step 1042: 4.57508
2017-11-07 14:49:59: Loss at step 1043: 4.59987
2017-11-07 14:50:01: Loss at step 1044: 4.57945
2017-11-07 14:50:03: Loss at step 1045: 4.62535
2017-11-07 14:50:05: Loss at step 1046: 4.58564
2017-11-07 14:50:06: Loss at step 1047: 4.58467
2017-11-07 14:50:08: Loss at step 1048: 4.60487
2017-11-07 14:50:10: Loss at step 1049: 4.6089
2017-11-07 14:50:12: Loss at step 1050: 4.6058
2017-11-07 14:50:14: Loss at step 1051: 4.58968
2017-11-07 14:50:16: Loss at step 1052: 4.59104
2017-11-07 14:50:17: Loss at step 1053: 4.59217
2017-11-07 14:50:19: Loss at step 1054: 4.60597
2017-11-07 14:50:21: Loss at step 1055: 4.59266
2017-11-07 14:50:23: Loss at step 1056: 4.59805
2017-11-07 14:50:25: Loss at step 1057: 4.57486
2017-11-07 14:50:27: Loss at step 1058: 4.59763
2017-11-07 14:50:29: Loss at step 1059: 4.60336
2017-11-07 14:50:30: Loss at step 1060: 4.61459
2017-11-07 14:50:32: Loss at step 1061: 4.59112
2017-11-07 14:50:34: Loss at step 1062: 4.59362
2017-11-07 14:50:36: Loss at step 1063: 4.6007
2017-11-07 14:50:38: Loss at step 1064: 4.59093
2017-11-07 14:50:40: Loss at step 1065: 4.58895
2017-11-07 14:50:41: Loss at step 1066: 4.59578
2017-11-07 14:50:43: Loss at step 1067: 4.59283
2017-11-07 14:50:45: Loss at step 1068: 4.61392
2017-11-07 14:50:47: Loss at step 1069: 4.58692
2017-11-07 14:50:49: Loss at step 1070: 4.59159
2017-11-07 14:50:51: Loss at step 1071: 4.59582
2017-11-07 14:50:52: Loss at step 1072: 4.59576
2017-11-07 14:50:54: Loss at step 1073: 4.59101
2017-11-07 14:50:56: Loss at step 1074: 4.59143
2017-11-07 14:50:58: Loss at step 1075: 4.60043
2017-11-07 14:51:00: Loss at step 1076: 4.58612
2017-11-07 14:51:02: Loss at step 1077: 4.59406
2017-11-07 14:51:04: Loss at step 1078: 4.59355
2017-11-07 14:51:05: Loss at step 1079: 4.60121
2017-11-07 14:51:07: Loss at step 1080: 4.59884
2017-11-07 14:51:09: Loss at step 1081: 4.58755
2017-11-07 14:51:11: Loss at step 1082: 4.59438
2017-11-07 14:51:13: Loss at step 1083: 4.58188
2017-11-07 14:51:15: Loss at step 1084: 4.60749
2017-11-07 14:51:16: Loss at step 1085: 4.60539
2017-11-07 14:51:18: Loss at step 1086: 4.60796
2017-11-07 14:51:20: Loss at step 1087: 4.60852
2017-11-07 14:51:22: Loss at step 1088: 4.59489
2017-11-07 14:51:24: Loss at step 1089: 4.59973
2017-11-07 14:51:26: Loss at step 1090: 4.56778
2017-11-07 14:51:28: Loss at step 1091: 4.58194
2017-11-07 14:51:29: Loss at step 1092: 4.58273
2017-11-07 14:51:31: Loss at step 1093: 4.59788
2017-11-07 14:51:33: Loss at step 1094: 4.61018
2017-11-07 14:51:35: Loss at step 1095: 4.5818
2017-11-07 14:51:37: Loss at step 1096: 4.60047
2017-11-07 14:51:39: Loss at step 1097: 4.57382
2017-11-07 14:51:40: Loss at step 1098: 4.59602
2017-11-07 14:51:42: Loss at step 1099: 4.5737
2017-11-07 14:51:44: Loss at step 1100: 4.5765
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-1100
2017-11-07 14:51:46: Loss at step 1101: 4.58926
2017-11-07 14:51:48: Loss at step 1102: 4.58041
2017-11-07 14:51:50: Loss at step 1103: 4.59358
2017-11-07 14:51:52: Loss at step 1104: 4.55796
2017-11-07 14:51:54: Loss at step 1105: 4.58829
2017-11-07 14:51:55: Loss at step 1106: 4.57877
2017-11-07 14:51:57: Loss at step 1107: 4.58783
2017-11-07 14:51:59: Loss at step 1108: 4.60007
2017-11-07 14:52:01: Loss at step 1109: 4.60385
2017-11-07 14:52:03: Loss at step 1110: 4.61041
2017-11-07 14:52:05: Loss at step 1111: 4.59203
2017-11-07 14:52:06: Loss at step 1112: 4.6018
2017-11-07 14:52:08: Loss at step 1113: 4.59311
2017-11-07 14:52:10: Loss at step 1114: 4.58981
2017-11-07 14:52:12: Loss at step 1115: 4.56616
2017-11-07 14:52:14: Loss at step 1116: 4.59345
2017-11-07 14:52:16: Loss at step 1117: 4.57178
2017-11-07 14:52:18: Loss at step 1118: 4.56415
2017-11-07 14:52:19: Loss at step 1119: 4.59584
2017-11-07 14:52:21: Loss at step 1120: 4.59617
2017-11-07 14:52:23: Loss at step 1121: 4.5914
2017-11-07 14:52:25: Loss at step 1122: 4.59107
2017-11-07 14:52:27: Loss at step 1123: 4.58369
2017-11-07 14:52:29: Loss at step 1124: 4.58858
2017-11-07 14:52:30: Loss at step 1125: 4.58629
2017-11-07 14:52:32: Loss at step 1126: 4.56538
2017-11-07 14:52:34: Loss at step 1127: 4.58504
2017-11-07 14:52:36: Loss at step 1128: 4.58827
2017-11-07 14:52:38: Loss at step 1129: 4.56336
2017-11-07 14:52:40: Loss at step 1130: 4.58023
2017-11-07 14:52:42: Loss at step 1131: 4.57947
2017-11-07 14:52:43: Loss at step 1132: 4.58728
2017-11-07 14:52:45: Loss at step 1133: 4.58652
2017-11-07 14:52:47: Loss at step 1134: 4.58547
2017-11-07 14:52:49: Loss at step 1135: 4.58968
2017-11-07 14:52:51: Loss at step 1136: 4.58721
2017-11-07 14:52:53: Loss at step 1137: 4.58543
2017-11-07 14:52:55: Loss at step 1138: 4.57891
2017-11-07 14:52:57: Loss at step 1139: 4.57444
2017-11-07 14:52:58: Loss at step 1140: 4.57137
2017-11-07 14:53:00: Loss at step 1141: 4.5452
2017-11-07 14:53:02: Loss at step 1142: 4.58671
2017-11-07 14:53:04: Loss at step 1143: 4.59164
2017-11-07 14:53:06: Loss at step 1144: 4.57142
2017-11-07 14:53:08: Loss at step 1145: 4.57715
2017-11-07 14:53:10: Loss at step 1146: 4.57028
2017-11-07 14:53:11: Loss at step 1147: 4.58522
2017-11-07 14:53:13: Loss at step 1148: 4.56526
2017-11-07 14:53:15: Loss at step 1149: 4.5659
2017-11-07 14:53:17: Loss at step 1150: 4.59424
2017-11-07 14:53:19: Loss at step 1151: 4.56863
2017-11-07 14:53:21: Loss at step 1152: 4.56586
2017-11-07 14:53:23: Loss at step 1153: 4.59271
2017-11-07 14:53:24: Loss at step 1154: 4.61521
2017-11-07 14:53:26: Loss at step 1155: 4.60912
2017-11-07 14:53:28: Loss at step 1156: 4.56153
2017-11-07 14:53:30: Loss at step 1157: 4.57845
2017-11-07 14:53:32: Loss at step 1158: 4.57135
2017-11-07 14:53:34: Loss at step 1159: 4.56074
2017-11-07 14:53:36: Loss at step 1160: 4.56717
2017-11-07 14:53:38: Loss at step 1161: 4.56693
2017-11-07 14:53:39: Loss at step 1162: 4.58198
2017-11-07 14:53:41: Loss at step 1163: 4.60664
2017-11-07 14:53:43: Loss at step 1164: 4.56264
2017-11-07 14:53:45: Loss at step 1165: 4.57723
2017-11-07 14:53:47: Loss at step 1166: 4.57687
2017-11-07 14:53:49: Loss at step 1167: 4.56408
2017-11-07 14:53:51: Loss at step 1168: 4.58949
2017-11-07 14:53:52: Loss at step 1169: 4.58253
2017-11-07 14:53:54: Loss at step 1170: 4.58036
2017-11-07 14:53:56: Loss at step 1171: 4.58497
2017-11-07 14:53:58: Loss at step 1172: 4.57738
2017-11-07 14:54:00: Loss at step 1173: 4.57837
2017-11-07 14:54:02: Loss at step 1174: 4.5937
2017-11-07 14:54:04: Loss at step 1175: 4.57354
2017-11-07 14:54:05: Loss at step 1176: 4.57817
2017-11-07 14:54:07: Loss at step 1177: 4.58113
2017-11-07 14:54:09: Loss at step 1178: 4.58156
2017-11-07 14:54:11: Loss at step 1179: 4.58296
2017-11-07 14:54:13: Loss at step 1180: 4.5957
2017-11-07 14:54:15: Loss at step 1181: 4.56849
2017-11-07 14:54:17: Loss at step 1182: 4.56834
2017-11-07 14:54:18: Loss at step 1183: 4.58783
2017-11-07 14:54:20: Loss at step 1184: 4.57839
2017-11-07 14:54:22: Loss at step 1185: 4.57644
2017-11-07 14:54:24: Loss at step 1186: 4.55905
2017-11-07 14:54:26: Loss at step 1187: 4.58147
2017-11-07 14:54:28: Loss at step 1188: 4.59076
2017-11-07 14:54:30: Loss at step 1189: 4.56668
2017-11-07 14:54:31: Loss at step 1190: 4.56621
2017-11-07 14:54:33: Loss at step 1191: 4.5802
2017-11-07 14:54:35: Loss at step 1192: 4.56956
2017-11-07 14:54:37: Loss at step 1193: 4.57419
2017-11-07 14:54:39: Loss at step 1194: 4.58679
2017-11-07 14:54:41: Loss at step 1195: 4.58034
2017-11-07 14:54:43: Loss at step 1196: 4.55792
2017-11-07 14:54:44: Loss at step 1197: 4.59689
2017-11-07 14:54:46: Loss at step 1198: 4.57058
2017-11-07 14:54:48: Loss at step 1199: 4.57308
2017-11-07 14:54:50: Loss at step 1200: 4.59748
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-1200
2017-11-07 14:54:52: Loss at step 1201: 4.5433
2017-11-07 14:54:54: Loss at step 1202: 4.57429
2017-11-07 14:54:56: Loss at step 1203: 4.57947
2017-11-07 14:54:57: Loss at step 1204: 4.58825
2017-11-07 14:54:59: Loss at step 1205: 4.58357
2017-11-07 14:55:01: Loss at step 1206: 4.58095
2017-11-07 14:55:03: Loss at step 1207: 4.59689
2017-11-07 14:55:05: Loss at step 1208: 4.57386
2017-11-07 14:55:07: Loss at step 1209: 4.57046
2017-11-07 14:55:09: Loss at step 1210: 4.58759
2017-11-07 14:55:10: Loss at step 1211: 4.55706
2017-11-07 14:55:12: Loss at step 1212: 4.56826
2017-11-07 14:55:14: Loss at step 1213: 4.57021
2017-11-07 14:55:16: Loss at step 1214: 4.5944
2017-11-07 14:55:18: Loss at step 1215: 4.57406
2017-11-07 14:55:20: Loss at step 1216: 4.58627
2017-11-07 14:55:22: Loss at step 1217: 4.5624
2017-11-07 14:55:23: Loss at step 1218: 4.56253
2017-11-07 14:55:25: Loss at step 1219: 4.5922
2017-11-07 14:55:27: Loss at step 1220: 4.57633
2017-11-07 14:55:29: Loss at step 1221: 4.57128
2017-11-07 14:55:31: Loss at step 1222: 4.56218
2017-11-07 14:55:33: Loss at step 1223: 4.60045
2017-11-07 14:55:35: Loss at step 1224: 4.56387
2017-11-07 14:55:36: Loss at step 1225: 4.58651
2017-11-07 14:55:38: Loss at step 1226: 4.57664
2017-11-07 14:55:40: Loss at step 1227: 4.56377
2017-11-07 14:55:42: Loss at step 1228: 4.55772
2017-11-07 14:55:44: Loss at step 1229: 4.57848
2017-11-07 14:55:46: Loss at step 1230: 4.55299
2017-11-07 14:55:48: Loss at step 1231: 4.57882
2017-11-07 14:55:49: Loss at step 1232: 4.55098
2017-11-07 14:55:51: Loss at step 1233: 4.56755
2017-11-07 14:55:53: Loss at step 1234: 4.56243
2017-11-07 14:55:55: Loss at step 1235: 4.60308
2017-11-07 14:55:57: Loss at step 1236: 4.57374
2017-11-07 14:55:59: Loss at step 1237: 4.60077
2017-11-07 14:56:01: Loss at step 1238: 4.57768
2017-11-07 14:56:02: Loss at step 1239: 4.56161
2017-11-07 14:56:04: Loss at step 1240: 4.54622
2017-11-07 14:56:06: Loss at step 1241: 4.58101
2017-11-07 14:56:08: Loss at step 1242: 4.57588
2017-11-07 14:56:10: Loss at step 1243: 4.574
2017-11-07 14:56:12: Loss at step 1244: 4.5509
2017-11-07 14:56:14: Loss at step 1245: 4.54948
2017-11-07 14:56:15: Loss at step 1246: 4.58349
2017-11-07 14:56:17: Loss at step 1247: 4.57393
2017-11-07 14:56:19: Loss at step 1248: 4.58877
2017-11-07 14:56:21: Loss at step 1249: 4.58096
2017-11-07 14:56:23: Loss at step 1250: 4.57421
2017-11-07 14:56:25: Loss at step 1251: 4.56762
2017-11-07 14:56:27: Loss at step 1252: 4.59399
2017-11-07 14:56:28: Loss at step 1253: 4.56796
2017-11-07 14:56:30: Loss at step 1254: 4.57811
2017-11-07 14:56:32: Loss at step 1255: 4.55858
2017-11-07 14:56:34: Loss at step 1256: 4.56723
2017-11-07 14:56:36: Loss at step 1257: 4.56687
2017-11-07 14:56:38: Loss at step 1258: 4.58257
2017-11-07 14:56:39: Loss at step 1259: 4.56797
2017-11-07 14:56:41: Loss at step 1260: 4.55568
2017-11-07 14:56:43: Loss at step 1261: 4.59992
2017-11-07 14:56:45: Loss at step 1262: 4.56045
2017-11-07 14:56:47: Loss at step 1263: 4.56316
2017-11-07 14:56:49: Loss at step 1264: 4.54262
2017-11-07 14:56:51: Loss at step 1265: 4.57459
2017-11-07 14:56:53: Loss at step 1266: 4.55863
2017-11-07 14:56:54: Loss at step 1267: 4.58863
2017-11-07 14:56:56: Loss at step 1268: 4.57603
2017-11-07 14:56:58: Loss at step 1269: 4.57556
2017-11-07 14:57:00: Loss at step 1270: 4.56961
2017-11-07 14:57:02: Loss at step 1271: 4.56682
2017-11-07 14:57:04: Loss at step 1272: 4.56517
2017-11-07 14:57:06: Loss at step 1273: 4.57126
2017-11-07 14:57:07: Loss at step 1274: 4.55474
2017-11-07 14:57:09: Loss at step 1275: 4.55965
2017-11-07 14:57:11: Loss at step 1276: 4.57841
2017-11-07 14:57:13: Loss at step 1277: 4.5619
2017-11-07 14:57:15: Loss at step 1278: 4.55696
2017-11-07 14:57:17: Loss at step 1279: 4.55851
2017-11-07 14:57:19: Loss at step 1280: 4.53712
2017-11-07 14:57:20: Loss at step 1281: 4.58466
2017-11-07 14:57:22: Loss at step 1282: 4.56848
2017-11-07 14:57:24: Loss at step 1283: 4.5797
2017-11-07 14:57:26: Loss at step 1284: 4.56846
2017-11-07 14:57:28: Loss at step 1285: 4.5689
2017-11-07 14:57:30: Loss at step 1286: 4.55998
2017-11-07 14:57:32: Loss at step 1287: 4.57072
2017-11-07 14:57:33: Loss at step 1288: 4.56303
2017-11-07 14:57:35: Loss at step 1289: 4.57276
2017-11-07 14:57:37: Loss at step 1290: 4.55572
2017-11-07 14:57:39: Loss at step 1291: 4.55093
2017-11-07 14:57:41: Loss at step 1292: 4.56627
2017-11-07 14:57:43: Loss at step 1293: 4.5664
2017-11-07 14:57:45: Loss at step 1294: 4.54979
2017-11-07 14:57:47: Loss at step 1295: 4.5777
2017-11-07 14:57:48: Loss at step 1296: 4.55654
2017-11-07 14:57:50: Loss at step 1297: 4.54486
2017-11-07 14:57:52: Loss at step 1298: 4.55354
2017-11-07 14:57:54: Loss at step 1299: 4.55959
2017-11-07 14:57:56: Loss at step 1300: 4.58978
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-1300
2017-11-07 14:57:58: Loss at step 1301: 4.5571
2017-11-07 14:58:00: Loss at step 1302: 4.55059
2017-11-07 14:58:02: Loss at step 1303: 4.55501
2017-11-07 14:58:03: Loss at step 1304: 4.56498
2017-11-07 14:58:05: Loss at step 1305: 4.57314
2017-11-07 14:58:07: Loss at step 1306: 4.58658
2017-11-07 14:58:09: Loss at step 1307: 4.55376
2017-11-07 14:58:11: Loss at step 1308: 4.57809
2017-11-07 14:58:13: Loss at step 1309: 4.56743
2017-11-07 14:58:14: Loss at step 1310: 4.55759
2017-11-07 14:58:16: Loss at step 1311: 4.57539
2017-11-07 14:58:18: Loss at step 1312: 4.55621
2017-11-07 14:58:20: Loss at step 1313: 4.56159
2017-11-07 14:58:22: Loss at step 1314: 4.58097
2017-11-07 14:58:24: Loss at step 1315: 4.54443
2017-11-07 14:58:26: Loss at step 1316: 4.55798
2017-11-07 14:58:27: Loss at step 1317: 4.56848
2017-11-07 14:58:29: Loss at step 1318: 4.57003
2017-11-07 14:58:31: Loss at step 1319: 4.5787
2017-11-07 14:58:33: Loss at step 1320: 4.56392
2017-11-07 14:58:35: Loss at step 1321: 4.59042
2017-11-07 14:58:37: Loss at step 1322: 4.5499
2017-11-07 14:58:39: Loss at step 1323: 4.55431
2017-11-07 14:58:40: Loss at step 1324: 4.5465
2017-11-07 14:58:42: Loss at step 1325: 4.55666
2017-11-07 14:58:44: Loss at step 1326: 4.55147
2017-11-07 14:58:46: Loss at step 1327: 4.55966
2017-11-07 14:58:48: Loss at step 1328: 4.56696
2017-11-07 14:58:50: Loss at step 1329: 4.54316
2017-11-07 14:58:52: Loss at step 1330: 4.58086
2017-11-07 14:58:53: Loss at step 1331: 4.55341
2017-11-07 14:58:55: Loss at step 1332: 4.54644
2017-11-07 14:58:57: Loss at step 1333: 4.58876
2017-11-07 14:58:59: Loss at step 1334: 4.55428
2017-11-07 14:59:01: Loss at step 1335: 4.56991
2017-11-07 14:59:03: Loss at step 1336: 4.54637
2017-11-07 14:59:05: Loss at step 1337: 4.56239
2017-11-07 14:59:07: Loss at step 1338: 4.54571
2017-11-07 14:59:08: Loss at step 1339: 4.57228
2017-11-07 14:59:10: Loss at step 1340: 4.56708
2017-11-07 14:59:12: Loss at step 1341: 4.5541
2017-11-07 14:59:14: Loss at step 1342: 4.56195
2017-11-07 14:59:16: Loss at step 1343: 4.54915
2017-11-07 14:59:18: Loss at step 1344: 4.54555
2017-11-07 14:59:20: Loss at step 1345: 4.5729
2017-11-07 14:59:21: Loss at step 1346: 4.56119
2017-11-07 14:59:23: Loss at step 1347: 4.56979
2017-11-07 14:59:25: Loss at step 1348: 4.56426
2017-11-07 14:59:27: Loss at step 1349: 4.57287
2017-11-07 14:59:29: Loss at step 1350: 4.5702
2017-11-07 14:59:31: Loss at step 1351: 4.57827
2017-11-07 14:59:32: Loss at step 1352: 4.57965
2017-11-07 14:59:34: Loss at step 1353: 4.55392
2017-11-07 14:59:36: Loss at step 1354: 4.56639
2017-11-07 14:59:38: Loss at step 1355: 4.58444
2017-11-07 14:59:40: Loss at step 1356: 4.56779
2017-11-07 14:59:42: Loss at step 1357: 4.57617
2017-11-07 14:59:43: Loss at step 1358: 4.59196
2017-11-07 14:59:45: Loss at step 1359: 4.57004
2017-11-07 14:59:47: Loss at step 1360: 4.56784
2017-11-07 14:59:49: Loss at step 1361: 4.55231
2017-11-07 14:59:51: Loss at step 1362: 4.5537
2017-11-07 14:59:53: Loss at step 1363: 4.5609
2017-11-07 14:59:55: Loss at step 1364: 4.56379
2017-11-07 14:59:56: Loss at step 1365: 4.57456
2017-11-07 14:59:58: Loss at step 1366: 4.5566
2017-11-07 15:00:00: Loss at step 1367: 4.54572
2017-11-07 15:00:02: Loss at step 1368: 4.56005
2017-11-07 15:00:04: Loss at step 1369: 4.56082
2017-11-07 15:00:06: Loss at step 1370: 4.5491
2017-11-07 15:00:08: Loss at step 1371: 4.57779
2017-11-07 15:00:10: Loss at step 1372: 4.55048
2017-11-07 15:00:11: Loss at step 1373: 4.54808
2017-11-07 15:00:13: Loss at step 1374: 4.56558
2017-11-07 15:00:15: Loss at step 1375: 4.57131
2017-11-07 15:00:17: Loss at step 1376: 4.56421
2017-11-07 15:00:19: Loss at step 1377: 4.5559
2017-11-07 15:00:21: Loss at step 1378: 4.58002
2017-11-07 15:00:23: Loss at step 1379: 4.56066
2017-11-07 15:00:24: Loss at step 1380: 4.55551
2017-11-07 15:00:26: Loss at step 1381: 4.5413
2017-11-07 15:00:28: Loss at step 1382: 4.55599
2017-11-07 15:00:30: Loss at step 1383: 4.56262
2017-11-07 15:00:32: Loss at step 1384: 4.5454
2017-11-07 15:00:34: Loss at step 1385: 4.56295
2017-11-07 15:00:36: Loss at step 1386: 4.55683
2017-11-07 15:00:37: Loss at step 1387: 4.55416
2017-11-07 15:00:39: Loss at step 1388: 4.56532
2017-11-07 15:00:41: Loss at step 1389: 4.55411
2017-11-07 15:00:43: Loss at step 1390: 4.58103
2017-11-07 15:00:45: Loss at step 1391: 4.58471
2017-11-07 15:00:47: Loss at step 1392: 4.57998
2017-11-07 15:00:49: Loss at step 1393: 4.54398
2017-11-07 15:00:50: Loss at step 1394: 4.54856
2017-11-07 15:00:52: Loss at step 1395: 4.5514
2017-11-07 15:00:54: Loss at step 1396: 4.56017
2017-11-07 15:00:56: Loss at step 1397: 4.56005
2017-11-07 15:00:58: Loss at step 1398: 4.54309
2017-11-07 15:01:00: Loss at step 1399: 4.58224
2017-11-07 15:01:02: Loss at step 1400: 4.5657
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-1400
2017-11-07 15:01:03: Loss at step 1401: 4.52639
2017-11-07 15:01:05: Loss at step 1402: 4.58027
2017-11-07 15:01:07: Loss at step 1403: 4.55364
2017-11-07 15:01:09: Loss at step 1404: 4.53056
2017-11-07 15:01:11: Loss at step 1405: 4.55822
2017-11-07 15:01:13: Loss at step 1406: 4.55263
2017-11-07 15:01:15: Loss at step 1407: 4.53964
2017-11-07 15:01:16: Loss at step 1408: 4.56552
2017-11-07 15:01:18: Loss at step 1409: 4.56526
2017-11-07 15:01:20: Loss at step 1410: 4.54007
2017-11-07 15:01:22: Loss at step 1411: 4.56411
2017-11-07 15:01:24: Loss at step 1412: 4.5701
2017-11-07 15:01:26: Loss at step 1413: 4.54531
2017-11-07 15:01:28: Loss at step 1414: 4.55674
2017-11-07 15:01:30: Loss at step 1415: 4.54328
2017-11-07 15:01:31: Loss at step 1416: 4.5539
2017-11-07 15:01:33: Loss at step 1417: 4.53774
2017-11-07 15:01:35: Loss at step 1418: 4.54648
2017-11-07 15:01:37: Loss at step 1419: 4.56108
2017-11-07 15:01:39: Loss at step 1420: 4.56332
2017-11-07 15:01:41: Loss at step 1421: 4.56229
2017-11-07 15:01:43: Loss at step 1422: 4.56518
2017-11-07 15:01:44: Loss at step 1423: 4.53615
2017-11-07 15:01:46: Loss at step 1424: 4.55588
2017-11-07 15:01:48: Loss at step 1425: 4.54867
2017-11-07 15:01:50: Loss at step 1426: 4.5392
2017-11-07 15:01:52: Loss at step 1427: 4.55039
2017-11-07 15:01:54: Loss at step 1428: 4.5644
2017-11-07 15:01:55: Loss at step 1429: 4.53734
2017-11-07 15:01:57: Loss at step 1430: 4.55372
2017-11-07 15:01:59: Loss at step 1431: 4.54341
2017-11-07 15:02:01: Loss at step 1432: 4.55823
2017-11-07 15:02:03: Loss at step 1433: 4.55207
2017-11-07 15:02:05: Loss at step 1434: 4.57218
2017-11-07 15:02:07: Loss at step 1435: 4.55833
2017-11-07 15:02:08: Loss at step 1436: 4.5746
2017-11-07 15:02:10: Loss at step 1437: 4.54466
2017-11-07 15:02:12: Loss at step 1438: 4.53903
2017-11-07 15:02:14: Loss at step 1439: 4.55014
2017-11-07 15:02:16: Loss at step 1440: 4.5407
2017-11-07 15:02:18: Loss at step 1441: 4.55308
2017-11-07 15:02:20: Loss at step 1442: 4.551
2017-11-07 15:02:22: Loss at step 1443: 4.52524
2017-11-07 15:02:23: Loss at step 1444: 4.55936
2017-11-07 15:02:25: Loss at step 1445: 4.55677
2017-11-07 15:02:27: Loss at step 1446: 4.5629
2017-11-07 15:02:29: Loss at step 1447: 4.54539
2017-11-07 15:02:31: Loss at step 1448: 4.54375
2017-11-07 15:02:33: Loss at step 1449: 4.55294
2017-11-07 15:02:35: Loss at step 1450: 4.54601
2017-11-07 15:02:37: Loss at step 1451: 4.5773
2017-11-07 15:02:38: Loss at step 1452: 4.54353
2017-11-07 15:02:40: Loss at step 1453: 4.56074
2017-11-07 15:02:42: Loss at step 1454: 4.55374
2017-11-07 15:02:44: Loss at step 1455: 4.55223
2017-11-07 15:02:46: Loss at step 1456: 4.55956
2017-11-07 15:02:48: Loss at step 1457: 4.53563
2017-11-07 15:02:50: Loss at step 1458: 4.56833
2017-11-07 15:02:51: Loss at step 1459: 4.5527
2017-11-07 15:02:53: Loss at step 1460: 4.54619
2017-11-07 15:02:55: Loss at step 1461: 4.56207
2017-11-07 15:02:57: Loss at step 1462: 4.54152
2017-11-07 15:02:59: Loss at step 1463: 4.55799
2017-11-07 15:03:01: Loss at step 1464: 4.5551
2017-11-07 15:03:03: Loss at step 1465: 4.56078
2017-11-07 15:03:04: Loss at step 1466: 4.57254
2017-11-07 15:03:06: Loss at step 1467: 4.55751
2017-11-07 15:03:08: Loss at step 1468: 4.54903
2017-11-07 15:03:10: Loss at step 1469: 4.55646
2017-11-07 15:03:12: Loss at step 1470: 4.53896
2017-11-07 15:03:14: Loss at step 1471: 4.52592
2017-11-07 15:03:16: Loss at step 1472: 4.56946
2017-11-07 15:03:18: Loss at step 1473: 4.54781
2017-11-07 15:03:20: Loss at step 1474: 4.57124
2017-11-07 15:03:22: Loss at step 1475: 4.53107
2017-11-07 15:03:23: Loss at step 1476: 4.56649
2017-11-07 15:03:25: Loss at step 1477: 4.57247
2017-11-07 15:03:27: Loss at step 1478: 4.56681
2017-11-07 15:03:29: Loss at step 1479: 4.53365
2017-11-07 15:03:31: Loss at step 1480: 4.54566
2017-11-07 15:03:33: Loss at step 1481: 4.55046
2017-11-07 15:03:34: Loss at step 1482: 4.57712
2017-11-07 15:03:36: Loss at step 1483: 4.52516
2017-11-07 15:03:38: Loss at step 1484: 4.56399
2017-11-07 15:03:40: Loss at step 1485: 4.58189
2017-11-07 15:03:42: Loss at step 1486: 4.5559
2017-11-07 15:03:44: Loss at step 1487: 4.57001
2017-11-07 15:03:46: Loss at step 1488: 4.5552
2017-11-07 15:03:47: Loss at step 1489: 4.54101
2017-11-07 15:03:49: Loss at step 1490: 4.57238
2017-11-07 15:03:51: Loss at step 1491: 4.54981
2017-11-07 15:03:53: Loss at step 1492: 4.53643
2017-11-07 15:03:55: Loss at step 1493: 4.55786
2017-11-07 15:03:57: Loss at step 1494: 4.56387
2017-11-07 15:03:59: Loss at step 1495: 4.52386
2017-11-07 15:04:00: Loss at step 1496: 4.56541
2017-11-07 15:04:02: Loss at step 1497: 4.56054
2017-11-07 15:04:04: Loss at step 1498: 4.55196
2017-11-07 15:04:06: Loss at step 1499: 4.53694
2017-11-07 15:04:08: Loss at step 1500: 4.55076
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-1500
2017-11-07 15:04:10: Loss at step 1501: 4.53104
2017-11-07 15:04:12: Loss at step 1502: 4.55358
2017-11-07 15:04:13: Loss at step 1503: 4.53664
2017-11-07 15:04:15: Loss at step 1504: 4.53843
2017-11-07 15:04:17: Loss at step 1505: 4.54373
2017-11-07 15:04:19: Loss at step 1506: 4.55013
2017-11-07 15:04:21: Loss at step 1507: 4.55275
2017-11-07 15:04:23: Loss at step 1508: 4.54178
2017-11-07 15:04:25: Loss at step 1509: 4.53762
2017-11-07 15:04:26: Loss at step 1510: 4.55206
2017-11-07 15:04:28: Loss at step 1511: 4.5514
2017-11-07 15:04:30: Loss at step 1512: 4.52051
2017-11-07 15:04:32: Loss at step 1513: 4.54516
2017-11-07 15:04:34: Loss at step 1514: 4.55413
2017-11-07 15:04:36: Loss at step 1515: 4.56241
2017-11-07 15:04:38: Loss at step 1516: 4.5486
2017-11-07 15:04:39: Loss at step 1517: 4.56105
2017-11-07 15:04:41: Loss at step 1518: 4.5493
2017-11-07 15:04:43: Loss at step 1519: 4.54452
2017-11-07 15:04:45: Loss at step 1520: 4.52814
2017-11-07 15:04:47: Loss at step 1521: 4.53833
2017-11-07 15:04:49: Loss at step 1522: 4.547
2017-11-07 15:04:51: Loss at step 1523: 4.58304
2017-11-07 15:04:52: Loss at step 1524: 4.5493
2017-11-07 15:04:54: Loss at step 1525: 4.55022
2017-11-07 15:04:56: Loss at step 1526: 4.57104
2017-11-07 15:04:58: Loss at step 1527: 4.55407
2017-11-07 15:05:00: Loss at step 1528: 4.54777
2017-11-07 15:05:02: Loss at step 1529: 4.55375
2017-11-07 15:05:04: Loss at step 1530: 4.51651
2017-11-07 15:05:05: Loss at step 1531: 4.53699
2017-11-07 15:05:07: Loss at step 1532: 4.56788
2017-11-07 15:05:09: Loss at step 1533: 4.55319
2017-11-07 15:05:11: Loss at step 1534: 4.54709
2017-11-07 15:05:13: Loss at step 1535: 4.54132
2017-11-07 15:05:15: Loss at step 1536: 4.5617
2017-11-07 15:05:17: Loss at step 1537: 4.53621
2017-11-07 15:05:18: Loss at step 1538: 4.55483
2017-11-07 15:05:20: Loss at step 1539: 4.56546
2017-11-07 15:05:22: Loss at step 1540: 4.57988
2017-11-07 15:05:24: Loss at step 1541: 4.56471
2017-11-07 15:05:26: Loss at step 1542: 4.54697
2017-11-07 15:05:28: Loss at step 1543: 4.56813
2017-11-07 15:05:29: Loss at step 1544: 4.55705
2017-11-07 15:05:31: Loss at step 1545: 4.56015
2017-11-07 15:05:33: Loss at step 1546: 4.5438
2017-11-07 15:05:35: Loss at step 1547: 4.56994
2017-11-07 15:05:37: Loss at step 1548: 4.53782
2017-11-07 15:05:39: Loss at step 1549: 4.54577
2017-11-07 15:05:41: Loss at step 1550: 4.54218
2017-11-07 15:05:43: Loss at step 1551: 4.55123
2017-11-07 15:05:44: Loss at step 1552: 4.54384
2017-11-07 15:05:46: Loss at step 1553: 4.55298
2017-11-07 15:05:48: Loss at step 1554: 4.55117
2017-11-07 15:05:50: Loss at step 1555: 4.56194
2017-11-07 15:05:52: Loss at step 1556: 4.56038
2017-11-07 15:05:54: Loss at step 1557: 4.57043
2017-11-07 15:05:55: Loss at step 1558: 4.54259
2017-11-07 15:05:57: Loss at step 1559: 4.53671
2017-11-07 15:05:59: Loss at step 1560: 4.53204
2017-11-07 15:06:01: Loss at step 1561: 4.53788
2017-11-07 15:06:03: Loss at step 1562: 4.51612
2017-11-07 15:06:05: Loss at step 1563: 4.56079
2017-11-07 15:06:07: Loss at step 1564: 4.5253
2017-11-07 15:06:09: Loss at step 1565: 4.54022
2017-11-07 15:06:11: Loss at step 1566: 4.53001
2017-11-07 15:06:12: Loss at step 1567: 4.52911
2017-11-07 15:06:14: Loss at step 1568: 4.53835
2017-11-07 15:06:16: Loss at step 1569: 4.56813
2017-11-07 15:06:18: Loss at step 1570: 4.54057
2017-11-07 15:06:20: Loss at step 1571: 4.5651
2017-11-07 15:06:22: Loss at step 1572: 4.53044
2017-11-07 15:06:24: Loss at step 1573: 4.53642
2017-11-07 15:06:25: Loss at step 1574: 4.55944
2017-11-07 15:06:27: Loss at step 1575: 4.54862
2017-11-07 15:06:29: Loss at step 1576: 4.56672
2017-11-07 15:06:31: Loss at step 1577: 4.56452
2017-11-07 15:06:33: Loss at step 1578: 4.54945
2017-11-07 15:06:35: Loss at step 1579: 4.51209
2017-11-07 15:06:37: Loss at step 1580: 4.55345
2017-11-07 15:06:38: Loss at step 1581: 4.52828
2017-11-07 15:06:40: Loss at step 1582: 4.5519
2017-11-07 15:06:42: Loss at step 1583: 4.53839
2017-11-07 15:06:44: Loss at step 1584: 4.5503
2017-11-07 15:06:46: Loss at step 1585: 4.54195
2017-11-07 15:06:48: Loss at step 1586: 4.54207
2017-11-07 15:06:50: Loss at step 1587: 4.54459
2017-11-07 15:06:52: Loss at step 1588: 4.53013
2017-11-07 15:06:54: Loss at step 1589: 4.5396
2017-11-07 15:06:55: Loss at step 1590: 4.52917
2017-11-07 15:06:57: Loss at step 1591: 4.56265
2017-11-07 15:06:59: Loss at step 1592: 4.55974
2017-11-07 15:07:01: Loss at step 1593: 4.55472
2017-11-07 15:07:03: Loss at step 1594: 4.55319
2017-11-07 15:07:05: Loss at step 1595: 4.53601
2017-11-07 15:07:06: Loss at step 1596: 4.53023
2017-11-07 15:07:09: Loss at step 1597: 4.53244
2017-11-07 15:07:10: Loss at step 1598: 4.55667
2017-11-07 15:07:12: Loss at step 1599: 4.55359
2017-11-07 15:07:14: Loss at step 1600: 4.54996
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-1600
2017-11-07 15:07:16: Loss at step 1601: 4.52285
2017-11-07 15:07:18: Loss at step 1602: 4.55548
2017-11-07 15:07:20: Loss at step 1603: 4.52882
2017-11-07 15:07:22: Loss at step 1604: 4.56218
2017-11-07 15:07:23: Loss at step 1605: 4.53796
2017-11-07 15:07:25: Loss at step 1606: 4.55189
2017-11-07 15:07:27: Loss at step 1607: 4.53935
2017-11-07 15:07:29: Loss at step 1608: 4.53036
2017-11-07 15:07:31: Loss at step 1609: 4.55084
2017-11-07 15:07:33: Loss at step 1610: 4.54711
2017-11-07 15:07:35: Loss at step 1611: 4.52063
2017-11-07 15:07:36: Loss at step 1612: 4.53052
2017-11-07 15:07:38: Loss at step 1613: 4.55109
2017-11-07 15:07:40: Loss at step 1614: 4.53135
2017-11-07 15:07:42: Loss at step 1615: 4.52879
2017-11-07 15:07:44: Loss at step 1616: 4.58631
2017-11-07 15:07:46: Loss at step 1617: 4.53816
2017-11-07 15:07:48: Loss at step 1618: 4.54764
2017-11-07 15:07:49: Loss at step 1619: 4.56372
2017-11-07 15:07:51: Loss at step 1620: 4.55353
2017-11-07 15:07:53: Loss at step 1621: 4.54021
2017-11-07 15:07:55: Loss at step 1622: 4.55277
2017-11-07 15:07:57: Loss at step 1623: 4.54344
2017-11-07 15:07:59: Loss at step 1624: 4.54262
2017-11-07 15:08:01: Loss at step 1625: 4.53133
2017-11-07 15:08:02: Loss at step 1626: 4.57029
2017-11-07 15:08:04: Loss at step 1627: 4.53349
2017-11-07 15:08:06: Loss at step 1628: 4.54119
2017-11-07 15:08:08: Loss at step 1629: 4.54384
2017-11-07 15:08:10: Loss at step 1630: 4.54528
2017-11-07 15:08:12: Loss at step 1631: 4.53594
2017-11-07 15:08:14: Loss at step 1632: 4.53875
2017-11-07 15:08:16: Loss at step 1633: 4.52241
2017-11-07 15:08:17: Loss at step 1634: 4.55435
2017-11-07 15:08:19: Loss at step 1635: 4.52763
2017-11-07 15:08:21: Loss at step 1636: 4.53992
2017-11-07 15:08:23: Loss at step 1637: 4.54737
2017-11-07 15:08:25: Loss at step 1638: 4.51687
2017-11-07 15:08:27: Loss at step 1639: 4.55073
2017-11-07 15:08:29: Loss at step 1640: 4.5389
2017-11-07 15:08:30: Loss at step 1641: 4.53482
2017-11-07 15:08:32: Loss at step 1642: 4.55615
2017-11-07 15:08:34: Loss at step 1643: 4.56943
2017-11-07 15:08:36: Loss at step 1644: 4.55002
2017-11-07 15:08:38: Loss at step 1645: 4.54032
2017-11-07 15:08:40: Loss at step 1646: 4.54864
2017-11-07 15:08:41: Loss at step 1647: 4.53771
2017-11-07 15:08:43: Loss at step 1648: 4.5385
2017-11-07 15:08:45: Loss at step 1649: 4.54797
2017-11-07 15:08:47: Loss at step 1650: 4.55746
2017-11-07 15:08:49: Loss at step 1651: 4.54582
2017-11-07 15:08:51: Loss at step 1652: 4.53507
2017-11-07 15:08:53: Loss at step 1653: 4.53592
2017-11-07 15:08:54: Loss at step 1654: 4.52777
2017-11-07 15:08:56: Loss at step 1655: 4.55992
2017-11-07 15:08:58: Loss at step 1656: 4.55878
2017-11-07 15:09:00: Loss at step 1657: 4.53064
2017-11-07 15:09:02: Loss at step 1658: 4.55972
2017-11-07 15:09:04: Loss at step 1659: 4.55004
2017-11-07 15:09:05: Loss at step 1660: 4.5436
2017-11-07 15:09:07: Loss at step 1661: 4.54604
2017-11-07 15:09:09: Loss at step 1662: 4.52847
2017-11-07 15:09:11: Loss at step 1663: 4.54063
2017-11-07 15:09:13: Loss at step 1664: 4.53932
2017-11-07 15:09:15: Loss at step 1665: 4.55039
2017-11-07 15:09:17: Loss at step 1666: 4.5502
2017-11-07 15:09:18: Loss at step 1667: 4.54007
2017-11-07 15:09:20: Loss at step 1668: 4.51419
2017-11-07 15:09:22: Loss at step 1669: 4.55826
2017-11-07 15:09:24: Loss at step 1670: 4.55333
2017-11-07 15:09:26: Loss at step 1671: 4.5455
2017-11-07 15:09:28: Loss at step 1672: 4.53954
2017-11-07 15:09:30: Loss at step 1673: 4.51894
2017-11-07 15:09:32: Loss at step 1674: 4.52813
2017-11-07 15:09:34: Loss at step 1675: 4.55349
2017-11-07 15:09:35: Loss at step 1676: 4.53148
2017-11-07 15:09:37: Loss at step 1677: 4.51586
2017-11-07 15:09:39: Loss at step 1678: 4.56011
2017-11-07 15:09:41: Loss at step 1679: 4.5464
2017-11-07 15:09:43: Loss at step 1680: 4.51847
2017-11-07 15:09:45: Loss at step 1681: 4.52928
2017-11-07 15:09:47: Loss at step 1682: 4.55424
2017-11-07 15:09:48: Loss at step 1683: 4.55581
2017-11-07 15:09:50: Loss at step 1684: 4.53898
2017-11-07 15:09:52: Loss at step 1685: 4.51826
2017-11-07 15:09:54: Loss at step 1686: 4.54747
2017-11-07 15:09:56: Loss at step 1687: 4.5487
2017-11-07 15:09:58: Loss at step 1688: 4.53772
2017-11-07 15:10:00: Loss at step 1689: 4.51454
2017-11-07 15:10:01: Loss at step 1690: 4.56482
2017-11-07 15:10:03: Loss at step 1691: 4.53136
2017-11-07 15:10:05: Loss at step 1692: 4.52363
2017-11-07 15:10:07: Loss at step 1693: 4.54358
2017-11-07 15:10:09: Loss at step 1694: 4.53313
2017-11-07 15:10:11: Loss at step 1695: 4.54513
2017-11-07 15:10:13: Loss at step 1696: 4.544
2017-11-07 15:10:15: Loss at step 1697: 4.50232
2017-11-07 15:10:16: Loss at step 1698: 4.53093
2017-11-07 15:10:18: Loss at step 1699: 4.54308
2017-11-07 15:10:20: Loss at step 1700: 4.53197
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-1700
2017-11-07 15:10:22: Loss at step 1701: 4.5549
2017-11-07 15:10:24: Loss at step 1702: 4.5553
2017-11-07 15:10:26: Loss at step 1703: 4.5137
2017-11-07 15:10:28: Loss at step 1704: 4.55264
2017-11-07 15:10:29: Loss at step 1705: 4.56529
2017-11-07 15:10:31: Loss at step 1706: 4.53451
2017-11-07 15:10:33: Loss at step 1707: 4.55549
2017-11-07 15:10:35: Loss at step 1708: 4.54325
2017-11-07 15:10:37: Loss at step 1709: 4.53473
2017-11-07 15:10:39: Loss at step 1710: 4.54762
2017-11-07 15:10:41: Loss at step 1711: 4.55102
2017-11-07 15:10:43: Loss at step 1712: 4.52516
2017-11-07 15:10:45: Loss at step 1713: 4.54501
2017-11-07 15:10:46: Loss at step 1714: 4.54235
2017-11-07 15:10:48: Loss at step 1715: 4.56367
2017-11-07 15:10:50: Loss at step 1716: 4.53503
2017-11-07 15:10:52: Loss at step 1717: 4.54209
2017-11-07 15:10:54: Loss at step 1718: 4.54415
2017-11-07 15:10:56: Loss at step 1719: 4.51836
2017-11-07 15:10:58: Loss at step 1720: 4.53819
2017-11-07 15:10:59: Loss at step 1721: 4.54583
2017-11-07 15:11:01: Loss at step 1722: 4.53631
2017-11-07 15:11:03: Loss at step 1723: 4.53366
2017-11-07 15:11:05: Loss at step 1724: 4.55053
2017-11-07 15:11:07: Loss at step 1725: 4.52571
2017-11-07 15:11:09: Loss at step 1726: 4.54919
2017-11-07 15:11:11: Loss at step 1727: 4.52558
2017-11-07 15:11:12: Loss at step 1728: 4.56108
2017-11-07 15:11:14: Loss at step 1729: 4.51824
2017-11-07 15:11:16: Loss at step 1730: 4.54
2017-11-07 15:11:18: Loss at step 1731: 4.52282
2017-11-07 15:11:20: Loss at step 1732: 4.55732
2017-11-07 15:11:22: Loss at step 1733: 4.53849
2017-11-07 15:11:24: Loss at step 1734: 4.53138
2017-11-07 15:11:25: Loss at step 1735: 4.54437
2017-11-07 15:11:27: Loss at step 1736: 4.55434
2017-11-07 15:11:29: Loss at step 1737: 4.53034
2017-11-07 15:11:31: Loss at step 1738: 4.53099
2017-11-07 15:11:33: Loss at step 1739: 4.54299
2017-11-07 15:11:35: Loss at step 1740: 4.54004
2017-11-07 15:11:37: Loss at step 1741: 4.5076
2017-11-07 15:11:38: Loss at step 1742: 4.53387
2017-11-07 15:11:40: Loss at step 1743: 4.53956
2017-11-07 15:11:42: Loss at step 1744: 4.5397
2017-11-07 15:11:44: Loss at step 1745: 4.53561
2017-11-07 15:11:46: Loss at step 1746: 4.56173
2017-11-07 15:11:48: Loss at step 1747: 4.52528
2017-11-07 15:11:49: Loss at step 1748: 4.56497
2017-11-07 15:11:51: Loss at step 1749: 4.55215
2017-11-07 15:11:53: Loss at step 1750: 4.52999
2017-11-07 15:11:55: Loss at step 1751: 4.5167
2017-11-07 15:11:57: Loss at step 1752: 4.54408
2017-11-07 15:11:59: Loss at step 1753: 4.55662
2017-11-07 15:12:01: Loss at step 1754: 4.52154
2017-11-07 15:12:03: Loss at step 1755: 4.52288
2017-11-07 15:12:04: Loss at step 1756: 4.52484
2017-11-07 15:12:06: Loss at step 1757: 4.53006
2017-11-07 15:12:08: Loss at step 1758: 4.54548
2017-11-07 15:12:10: Loss at step 1759: 4.52759
2017-11-07 15:12:12: Loss at step 1760: 4.53098
2017-11-07 15:12:14: Loss at step 1761: 4.53209
2017-11-07 15:12:16: Loss at step 1762: 4.53442
2017-11-07 15:12:17: Loss at step 1763: 4.5288
2017-11-07 15:12:20: Loss at step 1764: 4.53488
2017-11-07 15:12:21: Loss at step 1765: 4.53097
2017-11-07 15:12:23: Loss at step 1766: 4.52522
2017-11-07 15:12:25: Loss at step 1767: 4.5295
2017-11-07 15:12:27: Loss at step 1768: 4.53192
2017-11-07 15:12:29: Loss at step 1769: 4.54613
2017-11-07 15:12:31: Loss at step 1770: 4.54061
2017-11-07 15:12:33: Loss at step 1771: 4.55423
2017-11-07 15:12:34: Loss at step 1772: 4.53646
2017-11-07 15:12:36: Loss at step 1773: 4.53131
2017-11-07 15:12:38: Loss at step 1774: 4.51731
2017-11-07 15:12:40: Loss at step 1775: 4.54408
2017-11-07 15:12:42: Loss at step 1776: 4.56084
2017-11-07 15:12:44: Loss at step 1777: 4.53193
2017-11-07 15:12:46: Loss at step 1778: 4.53615
2017-11-07 15:12:47: Loss at step 1779: 4.53884
2017-11-07 15:12:49: Loss at step 1780: 4.54541
2017-11-07 15:12:51: Loss at step 1781: 4.5205
2017-11-07 15:12:53: Loss at step 1782: 4.54132
2017-11-07 15:12:55: Loss at step 1783: 4.53854
2017-11-07 15:12:57: Loss at step 1784: 4.54719
2017-11-07 15:12:59: Loss at step 1785: 4.526
2017-11-07 15:13:00: Loss at step 1786: 4.52162
2017-11-07 15:13:02: Loss at step 1787: 4.55454
2017-11-07 15:13:04: Loss at step 1788: 4.49988
2017-11-07 15:13:06: Loss at step 1789: 4.52331
2017-11-07 15:13:08: Loss at step 1790: 4.52903
2017-11-07 15:13:10: Loss at step 1791: 4.53754
2017-11-07 15:13:12: Loss at step 1792: 4.54451
2017-11-07 15:13:14: Loss at step 1793: 4.52847
2017-11-07 15:13:15: Loss at step 1794: 4.54372
2017-11-07 15:13:17: Loss at step 1795: 4.50909
2017-11-07 15:13:19: Loss at step 1796: 4.55131
2017-11-07 15:13:21: Loss at step 1797: 4.53707
2017-11-07 15:13:23: Loss at step 1798: 4.51869
2017-11-07 15:13:25: Loss at step 1799: 4.54002
2017-11-07 15:13:27: Loss at step 1800: 4.54566
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-1800
2017-11-07 15:13:28: Loss at step 1801: 4.53391
2017-11-07 15:13:30: Loss at step 1802: 4.53218
2017-11-07 15:13:32: Loss at step 1803: 4.52551
2017-11-07 15:13:34: Loss at step 1804: 4.54928
2017-11-07 15:13:36: Loss at step 1805: 4.54406
2017-11-07 15:13:38: Loss at step 1806: 4.53179
2017-11-07 15:13:40: Loss at step 1807: 4.53757
2017-11-07 15:13:41: Loss at step 1808: 4.56272
2017-11-07 15:13:43: Loss at step 1809: 4.52377
2017-11-07 15:13:45: Loss at step 1810: 4.53194
2017-11-07 15:13:47: Loss at step 1811: 4.52379
2017-11-07 15:13:49: Loss at step 1812: 4.53527
2017-11-07 15:13:51: Loss at step 1813: 4.55066
2017-11-07 15:13:53: Loss at step 1814: 4.529
2017-11-07 15:13:54: Loss at step 1815: 4.52685
2017-11-07 15:13:56: Loss at step 1816: 4.55329
2017-11-07 15:13:58: Loss at step 1817: 4.53178
2017-11-07 15:14:00: Loss at step 1818: 4.51494
2017-11-07 15:14:02: Loss at step 1819: 4.51812
2017-11-07 15:14:04: Loss at step 1820: 4.53937
2017-11-07 15:14:06: Loss at step 1821: 4.55269
2017-11-07 15:14:07: Loss at step 1822: 4.51731
2017-11-07 15:14:09: Loss at step 1823: 4.5194
2017-11-07 15:14:11: Loss at step 1824: 4.53397
2017-11-07 15:14:13: Loss at step 1825: 4.55954
2017-11-07 15:14:15: Loss at step 1826: 4.53489
2017-11-07 15:14:17: Loss at step 1827: 4.52136
2017-11-07 15:14:19: Loss at step 1828: 4.53698
2017-11-07 15:14:21: Loss at step 1829: 4.54867
2017-11-07 15:14:22: Loss at step 1830: 4.52293
2017-11-07 15:14:24: Loss at step 1831: 4.51009
2017-11-07 15:14:26: Loss at step 1832: 4.54822
2017-11-07 15:14:28: Loss at step 1833: 4.54022
2017-11-07 15:14:30: Loss at step 1834: 4.5307
2017-11-07 15:14:32: Loss at step 1835: 4.52355
2017-11-07 15:14:34: Loss at step 1836: 4.54125
2017-11-07 15:14:36: Loss at step 1837: 4.50298
2017-11-07 15:14:38: Loss at step 1838: 4.51416
2017-11-07 15:14:40: Loss at step 1839: 4.52682
2017-11-07 15:14:41: Loss at step 1840: 4.53311
2017-11-07 15:14:43: Loss at step 1841: 4.5423
2017-11-07 15:14:45: Loss at step 1842: 4.52924
2017-11-07 15:14:47: Loss at step 1843: 4.50539
2017-11-07 15:14:49: Loss at step 1844: 4.51559
2017-11-07 15:14:51: Loss at step 1845: 4.47871
2017-11-07 15:14:53: Loss at step 1846: 4.52227
2017-11-07 15:14:54: Loss at step 1847: 4.53627
2017-11-07 15:14:56: Loss at step 1848: 4.53077
2017-11-07 15:14:58: Loss at step 1849: 4.53938
2017-11-07 15:15:00: Loss at step 1850: 4.52001
2017-11-07 15:15:02: Loss at step 1851: 4.53451
2017-11-07 15:15:04: Loss at step 1852: 4.5247
2017-11-07 15:15:06: Loss at step 1853: 4.50745
2017-11-07 15:15:08: Loss at step 1854: 4.5179
2017-11-07 15:15:09: Loss at step 1855: 4.52679
2017-11-07 15:15:11: Loss at step 1856: 4.52558
2017-11-07 15:15:13: Loss at step 1857: 4.52633
2017-11-07 15:15:15: Loss at step 1858: 4.52703
2017-11-07 15:15:17: Loss at step 1859: 4.54387
2017-11-07 15:15:19: Loss at step 1860: 4.53046
2017-11-07 15:15:21: Loss at step 1861: 4.50748
2017-11-07 15:15:23: Loss at step 1862: 4.52763
2017-11-07 15:15:24: Loss at step 1863: 4.52602
2017-11-07 15:15:26: Loss at step 1864: 4.5139
2017-11-07 15:15:28: Loss at step 1865: 4.52615
2017-11-07 15:15:30: Loss at step 1866: 4.52468
2017-11-07 15:15:32: Loss at step 1867: 4.54872
2017-11-07 15:15:34: Loss at step 1868: 4.52201
2017-11-07 15:15:36: Loss at step 1869: 4.50175
2017-11-07 15:15:38: Loss at step 1870: 4.53994
2017-11-07 15:15:39: Loss at step 1871: 4.52103
2017-11-07 15:15:41: Loss at step 1872: 4.54347
2017-11-07 15:15:43: Loss at step 1873: 4.54222
2017-11-07 15:15:45: Loss at step 1874: 4.54607
2017-11-07 15:15:47: Loss at step 1875: 4.51198
2017-11-07 15:15:49: Loss at step 1876: 4.53879
2017-11-07 15:15:51: Loss at step 1877: 4.55098
2017-11-07 15:15:53: Loss at step 1878: 4.53075
2017-11-07 15:15:54: Loss at step 1879: 4.55002
2017-11-07 15:15:56: Loss at step 1880: 4.52788
2017-11-07 15:15:58: Loss at step 1881: 4.51088
2017-11-07 15:16:00: Loss at step 1882: 4.53791
2017-11-07 15:16:02: Loss at step 1883: 4.53351
2017-11-07 15:16:04: Loss at step 1884: 4.51345
2017-11-07 15:16:06: Loss at step 1885: 4.5009
2017-11-07 15:16:08: Loss at step 1886: 4.52071
2017-11-07 15:16:09: Loss at step 1887: 4.53397
2017-11-07 15:16:11: Loss at step 1888: 4.54356
2017-11-07 15:16:13: Loss at step 1889: 4.53251
2017-11-07 15:16:15: Loss at step 1890: 4.54394
2017-11-07 15:16:17: Loss at step 1891: 4.54392
2017-11-07 15:16:19: Loss at step 1892: 4.5321
2017-11-07 15:16:21: Loss at step 1893: 4.5389
2017-11-07 15:16:22: Loss at step 1894: 4.53421
2017-11-07 15:16:24: Loss at step 1895: 4.51713
2017-11-07 15:16:26: Loss at step 1896: 4.53037
2017-11-07 15:16:28: Loss at step 1897: 4.53202
2017-11-07 15:16:30: Loss at step 1898: 4.53713
2017-11-07 15:16:32: Loss at step 1899: 4.53041
2017-11-07 15:16:34: Loss at step 1900: 4.53872
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-1900
2017-11-07 15:16:36: Loss at step 1901: 4.53622
2017-11-07 15:16:37: Loss at step 1902: 4.54622
2017-11-07 15:16:39: Loss at step 1903: 4.53742
2017-11-07 15:16:41: Loss at step 1904: 4.51822
2017-11-07 15:16:43: Loss at step 1905: 4.5253
2017-11-07 15:16:45: Loss at step 1906: 4.52458
2017-11-07 15:16:47: Loss at step 1907: 4.52145
2017-11-07 15:16:49: Loss at step 1908: 4.5369
2017-11-07 15:16:51: Loss at step 1909: 4.53453
2017-11-07 15:16:53: Loss at step 1910: 4.54337
2017-11-07 15:16:54: Loss at step 1911: 4.54177
2017-11-07 15:16:56: Loss at step 1912: 4.53423
2017-11-07 15:16:58: Loss at step 1913: 4.52254
2017-11-07 15:17:00: Loss at step 1914: 4.50368
2017-11-07 15:17:02: Loss at step 1915: 4.50711
2017-11-07 15:17:04: Loss at step 1916: 4.5115
2017-11-07 15:17:06: Loss at step 1917: 4.54773
2017-11-07 15:17:08: Loss at step 1918: 4.52128
2017-11-07 15:17:09: Loss at step 1919: 4.52452
2017-11-07 15:17:11: Loss at step 1920: 4.52518
2017-11-07 15:17:13: Loss at step 1921: 4.52214
2017-11-07 15:17:15: Loss at step 1922: 4.52684
2017-11-07 15:17:17: Loss at step 1923: 4.54333
2017-11-07 15:17:19: Loss at step 1924: 4.53821
2017-11-07 15:17:21: Loss at step 1925: 4.52534
2017-11-07 15:17:22: Loss at step 1926: 4.52765
2017-11-07 15:17:24: Loss at step 1927: 4.49099
2017-11-07 15:17:26: Loss at step 1928: 4.53683
2017-11-07 15:17:28: Loss at step 1929: 4.5201
2017-11-07 15:17:30: Loss at step 1930: 4.54288
2017-11-07 15:17:32: Loss at step 1931: 4.51679
2017-11-07 15:17:34: Loss at step 1932: 4.5474
2017-11-07 15:17:36: Loss at step 1933: 4.53133
2017-11-07 15:17:38: Loss at step 1934: 4.52448
2017-11-07 15:17:39: Loss at step 1935: 4.54575
2017-11-07 15:17:41: Loss at step 1936: 4.53275
2017-11-07 15:17:43: Loss at step 1937: 4.53218
2017-11-07 15:17:45: Loss at step 1938: 4.51054
2017-11-07 15:17:47: Loss at step 1939: 4.53939
2017-11-07 15:17:49: Loss at step 1940: 4.52613
2017-11-07 15:17:51: Loss at step 1941: 4.54192
2017-11-07 15:17:53: Loss at step 1942: 4.51968
2017-11-07 15:17:54: Loss at step 1943: 4.54146
2017-11-07 15:17:56: Loss at step 1944: 4.5126
2017-11-07 15:17:58: Loss at step 1945: 4.53705
2017-11-07 15:18:00: Loss at step 1946: 4.52797
2017-11-07 15:18:02: Loss at step 1947: 4.52254
2017-11-07 15:18:04: Loss at step 1948: 4.50688
2017-11-07 15:18:06: Loss at step 1949: 4.55423
2017-11-07 15:18:07: Loss at step 1950: 4.55157
2017-11-07 15:18:09: Loss at step 1951: 4.52683
2017-11-07 15:18:11: Loss at step 1952: 4.53045
2017-11-07 15:18:13: Loss at step 1953: 4.52708
2017-11-07 15:18:15: Loss at step 1954: 4.55688
2017-11-07 15:18:17: Loss at step 1955: 4.54076
2017-11-07 15:18:19: Loss at step 1956: 4.5265
2017-11-07 15:18:20: Loss at step 1957: 4.52945
2017-11-07 15:18:22: Loss at step 1958: 4.5167
2017-11-07 15:18:24: Loss at step 1959: 4.52225
2017-11-07 15:18:26: Loss at step 1960: 4.52932
2017-11-07 15:18:28: Loss at step 1961: 4.521
2017-11-07 15:18:30: Loss at step 1962: 4.53383
2017-11-07 15:18:32: Loss at step 1963: 4.51062
2017-11-07 15:18:33: Loss at step 1964: 4.49262
2017-11-07 15:18:35: Loss at step 1965: 4.53202
2017-11-07 15:18:37: Loss at step 1966: 4.53318
2017-11-07 15:18:39: Loss at step 1967: 4.54655
2017-11-07 15:18:41: Loss at step 1968: 4.51965
2017-11-07 15:18:43: Loss at step 1969: 4.53641
2017-11-07 15:18:45: Loss at step 1970: 4.53675
2017-11-07 15:18:47: Loss at step 1971: 4.54134
2017-11-07 15:18:48: Loss at step 1972: 4.51167
2017-11-07 15:18:50: Loss at step 1973: 4.50748
2017-11-07 15:18:52: Loss at step 1974: 4.52986
2017-11-07 15:18:54: Loss at step 1975: 4.52349
2017-11-07 15:18:56: Loss at step 1976: 4.51923
2017-11-07 15:18:58: Loss at step 1977: 4.51119
2017-11-07 15:19:00: Loss at step 1978: 4.5204
2017-11-07 15:19:01: Loss at step 1979: 4.52014
2017-11-07 15:19:03: Loss at step 1980: 4.5288
2017-11-07 15:19:05: Loss at step 1981: 4.51823
2017-11-07 15:19:07: Loss at step 1982: 4.53902
2017-11-07 15:19:09: Loss at step 1983: 4.53066
2017-11-07 15:19:11: Loss at step 1984: 4.53805
2017-11-07 15:19:13: Loss at step 1985: 4.5348
2017-11-07 15:19:15: Loss at step 1986: 4.51255
2017-11-07 15:19:16: Loss at step 1987: 4.52283
2017-11-07 15:19:18: Loss at step 1988: 4.53933
2017-11-07 15:19:20: Loss at step 1989: 4.52818
2017-11-07 15:19:22: Loss at step 1990: 4.48804
2017-11-07 15:19:24: Loss at step 1991: 4.50844
2017-11-07 15:19:26: Loss at step 1992: 4.51249
2017-11-07 15:19:28: Loss at step 1993: 4.50279
2017-11-07 15:19:29: Loss at step 1994: 4.53305
2017-11-07 15:19:31: Loss at step 1995: 4.50231
2017-11-07 15:19:33: Loss at step 1996: 4.51307
2017-11-07 15:19:35: Loss at step 1997: 4.53807
2017-11-07 15:19:37: Loss at step 1998: 4.5323
2017-11-07 15:19:39: Loss at step 1999: 4.49931
2017-11-07 15:19:41: Loss at step 2000: 4.52359
Model saved in file: ./saves.tf.Mines6/(12, 12)/model-2000
2017-11-07 15:19:43: Loss at step 2001: 4.53256
---------------------------------------------------------------------------
KeyboardInterrupt Traceback (most recent call last)
<ipython-input-14-77e251ad031b> in <module>()
1 # Train
2 for iteration in range(10001):
----> 3 batch_xs, batch_ys, _ = next_training_batch(1000)
4 summary, loss, _ = sess.run([merged, cross_entropy, train_step],
5 feed_dict={mineCountsOneHot: batch_xs, validGuessAverages: batch_ys})
<ipython-input-5-15fb2e3cfda7> in next_training_batch(n)
6 for _ in range(n):
7 board = np.random.random(dimensions) < mineProbability
----> 8 counts = boardPartialMineCounts(board)
9 validGuesses = np.append(((counts == -1).astype(int) - board).flatten().astype(float),
10 board.flatten().astype(float))
<ipython-input-4-4fcb378700ff> in boardPartialMineCounts(board)
6 row,col = index
7 if not(x) and result[row,col] == -1 and r.uniform(0,1) < clearProbability:
----> 8 clearSquare(board,result,row,col)
9 return result
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
18 for c in range(col-1,col+2):
19 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 20 clearSquare(board,adjacency,r,c)
21 return False
<ipython-input-3-547dcf1a9399> in clearSquare(board, adjacency, row, col)
12 for c in range(col-1,col+2):
13 if 0 <= r and r < rows and 0 <= c and c < cols:
---> 14 n += board[r,c]
15 adjacency[row,col] = n
16 if n == 0:
KeyboardInterrupt:
In [15]:
# Test trained model on larger batch size
batch_xs, batch_ys, _ = next_training_batch(10000)
print(sess.run(cross_entropy, feed_dict={mineCountsOneHot: batch_xs, validGuessAverages: batch_ys}))
4.52306
In [16]:
# Run a test
batchSize = 10000
batch_xs, batch_ys, _ = next_training_batch(batchSize)
predictions = sess.run(tf.nn.softmax(y), feed_dict={mineCountsOneHot: batch_xs, validGuessAverages: batch_ys})
bestSquares = [pred.argmax() for pred in predictions]
unfrees = (batch_ys == 0).astype(int)
frees = [unfrees[i][bestSquares[i]] for i in range(batchSize)]
print("Number of errors for batch size of ", batchSize)
print(sum(frees))
Number of errors for batch size of 10000
240
In [ ]:
# Find boards that we failed on
batchSize = 1000
batch_xs, batch_ys, _ = next_training_batch(batchSize)
predictions = sess.run(tf.nn.softmax(y), feed_dict={mineCountsOneHot: batch_xs, validGuessAverages: batch_ys})
bestSquares = [pred.argmax() for pred in predictions]
unfrees = (batch_ys == 0).astype(int)
guesses = [unfrees[i][bestSquares[i]] for i in range(batchSize)]
for i in range(batchSize):
if guesses[i] == 1:
print(batch_xs[i].reshape(dimensions))
summary = sess.run(tf.summary.image('mine_miss', tf.reshape((batch_xs[i]+1).astype(float),[-1,rows,cols,1]), 100))
writer.add_summary(summary)
In [ ]:
In [ ]:
#batch_xs = [[-1,1,-1,0,0,0,-1,-1,-1,1,1,1,-1,-1,1,2,2,1,1,-1,2,1,1,-1,-1,2,2,-1,-1,2,-1,1,1,0,-1,-1,2,-1,-1,4,-1,2,-1,1,2,-1,1,0,1,2,-1,3,2,2,1,-1,2,-1,1,0,0,1,1,-1,-1,-1,-1,-1,-1,1,1,-1,-1,0,0,3,-1,4,1,2,-1,1,-1,-1,0,0,0,2,-1,-1,-1,2,-1,1,0,0,0,-1,1,2,-1,2,1,2,2,3,3,2,-1,-1,1,-1,1,-1,0,1,2,-1,-1,-1,1,1,1,-1,1,0,-1,-1,-1,-1,-1,-1,-1,-1,0,-1,-1,-1,-1,-1,1,-1,-1,-1]]
batch_xs0 = [-1] * (size)
batch_xs0[0] = 1
batch_xs0[1] = 1
batch_xs0[cols] = 1
predictions = sess.run(tf.nn.softmax(y), feed_dict={mineCountsOneHot: [batch_xs0]})
bestSquares = [pred.argmax() for pred in predictions]
print(bestSquares[0] // cols, bestSquares[0] % cols)
In [17]:
np.save("./W", sess.run(W))
In [18]:
np.save("./b", sess.run(b))
In [19]:
np.savez("./model", sess.run([W,b]))
In [ ]:
Content source: rzil/honours
Similar notebooks: