In [ ]:
# Softmax classifier for guessing minesweeper board position that is mine-free

In [1]:
# Import libraries for simulation
import tensorflow as tf
import numpy as np
import random as r
import datetime as dt


/home/ruben/anaconda3/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: compiletime version 3.5 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.6
  return f(*args, **kwds)

In [2]:
dimensions = (12,12)
mineProbability = 0.16      # Probability that a square contain a mine

In [3]:
# This is a matrix that maps mine board vectors to mine count vectors
def minesweepMatrix(dimensions):
    rows,cols = dimensions
    size = rows * cols
    A = np.zeros([size,size],dtype=int)
    for rA in range(size):
        for cA in range(size):
            inRow, inCol = divmod(rA,cols)
            outRow, outCol = divmod(cA,cols)
            A[rA,cA] = abs(inRow-outRow) <= 1 and abs(inCol-outCol) <= 1
    return(A)

In [4]:
# Converts a board of mines into a board of mine counts
def boardMineCounts(board):
    return(minesweepMatrix(board.shape).dot(board.flatten()).reshape(board.shape))

In [5]:
# This takes a mine board and gives a mine count with mines removed, and other random squares removed
def boardPartialMineCounts(board):
    missingProbability = r.uniform(0.05,0.8)
    result = boardMineCounts(board)
    for index, x in np.ndenumerate(board):
        if x: result[index] = -1
        elif r.uniform(0, 1) < missingProbability: result[index] = -1
    return result

In [6]:
# Generates a random training batch of size at most n
def next_training_batch(n):
    batch_xs = []
    batch_ys = []
    for _ in range(n):
        board = np.random.random(dimensions) < mineProbability
        counts = boardPartialMineCounts(board)
        frees = ((counts == -1).astype(int) - board).flatten().astype(float)    #(1 - board).flatten().astype(float)
        freesSum = sum(frees)
        if freesSum > 0:
            batch_xs.append(counts.flatten())
            batch_ys.append(frees / freesSum)
    return (np.asarray(batch_xs), np.asarray(batch_ys))

In [8]:
# Create the model
rows, cols = dimensions
size = rows*cols
mineCounts = tf.placeholder(tf.int32, [None, size], name="mineCounts")
mineCountsOneHot = tf.reshape(tf.one_hot(mineCounts+1,10), [-1, size*10])
W = tf.Variable(tf.random_normal([size*10, size], stddev=0.01), name="W")
b = tf.Variable(tf.random_normal([size], stddev=0.01), name="b")
y = tf.matmul(mineCountsOneHot, W) + b

In [9]:
mineFreeAverages = tf.placeholder(tf.float32, [None, size], name="mineFreeAverages")

In [10]:
# Loss function
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=mineFreeAverages, logits=y))

In [11]:
# Summaries for tensorboard
with tf.name_scope('W_reshape'):
    image_shaped_W = tf.reshape(W, [-1, size*10, size, 1])
    tf.summary.image('W', image_shaped_W, 1000)

with tf.name_scope('b_reshape'):
    image_shaped_b = tf.reshape(b, [-1, rows, cols, 1])
    tf.summary.image('b', image_shaped_b, 1000)

_ = tf.summary.scalar('accuracy', cross_entropy)

In [12]:
# Optimiser
train_step = tf.train.AdamOptimizer().minimize(cross_entropy)

In [13]:
# Create session and initialise or restore stuff
savePath = './saves.tf.Mines4/' + str(dimensions) + '/'
saver = tf.train.Saver()

sess = tf.InteractiveSession()

merged = tf.summary.merge_all()
writer = tf.summary.FileWriter('.', sess.graph)

In [14]:
tf.global_variables_initializer().run()

In [15]:
# Restore model?
#saver.restore(sess, savePath + "model-10000")

In [16]:
# Train
for iteration in range(10001):
    batch_xs, batch_ys = next_training_batch(100)
    if iteration % 10 == 0:
        summary, loss, _ = sess.run([merged, cross_entropy, train_step],
                                   feed_dict={mineCounts: batch_xs, mineFreeAverages: batch_ys})
        writer.add_summary(summary, iteration)
        print('%s: Loss at step %s: %s' % (dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), iteration, loss))
    else:
        _ = sess.run(train_step, feed_dict={mineCounts: batch_xs, mineFreeAverages: batch_ys})
    if iteration % 1000 == 0:
        save_path = saver.save(sess, savePath + 'model', global_step=iteration)
        print("Model saved in file: %s" % save_path)


2017-11-06 09:47:57: Loss at step 0: 4.97615
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-0
2017-11-06 09:48:03: Loss at step 10: 4.96654
2017-11-06 09:48:10: Loss at step 20: 4.95619
2017-11-06 09:48:17: Loss at step 30: 4.95212
2017-11-06 09:48:23: Loss at step 40: 4.94011
2017-11-06 09:48:30: Loss at step 50: 4.93051
2017-11-06 09:48:36: Loss at step 60: 4.92285
2017-11-06 09:48:43: Loss at step 70: 4.91178
2017-11-06 09:48:49: Loss at step 80: 4.89853
2017-11-06 09:48:56: Loss at step 90: 4.89784
2017-11-06 09:49:03: Loss at step 100: 4.88127
2017-11-06 09:49:09: Loss at step 110: 4.87443
2017-11-06 09:49:16: Loss at step 120: 4.86731
2017-11-06 09:49:22: Loss at step 130: 4.85594
2017-11-06 09:49:29: Loss at step 140: 4.84375
2017-11-06 09:49:35: Loss at step 150: 4.83709
2017-11-06 09:49:42: Loss at step 160: 4.83293
2017-11-06 09:49:48: Loss at step 170: 4.83216
2017-11-06 09:49:55: Loss at step 180: 4.79398
2017-11-06 09:50:01: Loss at step 190: 4.82019
2017-11-06 09:50:08: Loss at step 200: 4.79496
2017-11-06 09:50:15: Loss at step 210: 4.79184
2017-11-06 09:50:21: Loss at step 220: 4.77996
2017-11-06 09:50:28: Loss at step 230: 4.75758
2017-11-06 09:50:34: Loss at step 240: 4.76043
2017-11-06 09:50:41: Loss at step 250: 4.73649
2017-11-06 09:50:47: Loss at step 260: 4.76994
2017-11-06 09:50:54: Loss at step 270: 4.76078
2017-11-06 09:51:00: Loss at step 280: 4.74593
2017-11-06 09:51:07: Loss at step 290: 4.74122
2017-11-06 09:51:13: Loss at step 300: 4.72421
2017-11-06 09:51:20: Loss at step 310: 4.72434
2017-11-06 09:51:26: Loss at step 320: 4.6974
2017-11-06 09:51:33: Loss at step 330: 4.71213
2017-11-06 09:51:39: Loss at step 340: 4.68157
2017-11-06 09:51:46: Loss at step 350: 4.68245
2017-11-06 09:51:53: Loss at step 360: 4.70058
2017-11-06 09:51:59: Loss at step 370: 4.64064
2017-11-06 09:52:06: Loss at step 380: 4.67164
2017-11-06 09:52:12: Loss at step 390: 4.68231
2017-11-06 09:52:19: Loss at step 400: 4.67395
2017-11-06 09:52:25: Loss at step 410: 4.65682
2017-11-06 09:52:32: Loss at step 420: 4.65388
2017-11-06 09:52:38: Loss at step 430: 4.61493
2017-11-06 09:52:45: Loss at step 440: 4.65259
2017-11-06 09:52:51: Loss at step 450: 4.63916
2017-11-06 09:52:58: Loss at step 460: 4.59036
2017-11-06 09:53:05: Loss at step 470: 4.61649
2017-11-06 09:53:11: Loss at step 480: 4.61741
2017-11-06 09:53:18: Loss at step 490: 4.60699
2017-11-06 09:53:24: Loss at step 500: 4.60255
2017-11-06 09:53:31: Loss at step 510: 4.60705
2017-11-06 09:53:37: Loss at step 520: 4.60459
2017-11-06 09:53:44: Loss at step 530: 4.58854
2017-11-06 09:53:50: Loss at step 540: 4.55304
2017-11-06 09:53:57: Loss at step 550: 4.54314
2017-11-06 09:54:03: Loss at step 560: 4.54127
2017-11-06 09:54:10: Loss at step 570: 4.55408
2017-11-06 09:54:16: Loss at step 580: 4.57075
2017-11-06 09:54:23: Loss at step 590: 4.55191
2017-11-06 09:54:30: Loss at step 600: 4.52598
2017-11-06 09:54:36: Loss at step 610: 4.55419
2017-11-06 09:54:43: Loss at step 620: 4.53248
2017-11-06 09:54:49: Loss at step 630: 4.5103
2017-11-06 09:54:56: Loss at step 640: 4.52329
2017-11-06 09:55:02: Loss at step 650: 4.5268
2017-11-06 09:55:09: Loss at step 660: 4.52185
2017-11-06 09:55:15: Loss at step 670: 4.50226
2017-11-06 09:55:22: Loss at step 680: 4.47818
2017-11-06 09:55:29: Loss at step 690: 4.48609
2017-11-06 09:55:35: Loss at step 700: 4.518
2017-11-06 09:55:42: Loss at step 710: 4.52432
2017-11-06 09:55:48: Loss at step 720: 4.47831
2017-11-06 09:55:55: Loss at step 730: 4.45544
2017-11-06 09:56:01: Loss at step 740: 4.44384
2017-11-06 09:56:08: Loss at step 750: 4.4535
2017-11-06 09:56:14: Loss at step 760: 4.46461
2017-11-06 09:56:21: Loss at step 770: 4.45378
2017-11-06 09:56:28: Loss at step 780: 4.49404
2017-11-06 09:56:34: Loss at step 790: 4.44366
2017-11-06 09:56:41: Loss at step 800: 4.46649
2017-11-06 09:56:47: Loss at step 810: 4.45485
2017-11-06 09:56:54: Loss at step 820: 4.40892
2017-11-06 09:57:00: Loss at step 830: 4.46788
2017-11-06 09:57:07: Loss at step 840: 4.44203
2017-11-06 09:57:14: Loss at step 850: 4.40416
2017-11-06 09:57:20: Loss at step 860: 4.44318
2017-11-06 09:57:27: Loss at step 870: 4.44371
2017-11-06 09:57:33: Loss at step 880: 4.43379
2017-11-06 09:57:40: Loss at step 890: 4.38827
2017-11-06 09:57:46: Loss at step 900: 4.39187
2017-11-06 09:57:53: Loss at step 910: 4.41047
2017-11-06 09:58:00: Loss at step 920: 4.36456
2017-11-06 09:58:06: Loss at step 930: 4.37739
2017-11-06 09:58:13: Loss at step 940: 4.41493
2017-11-06 09:58:19: Loss at step 950: 4.38192
2017-11-06 09:58:26: Loss at step 960: 4.36409
2017-11-06 09:58:32: Loss at step 970: 4.38259
2017-11-06 09:58:39: Loss at step 980: 4.37626
2017-11-06 09:58:45: Loss at step 990: 4.43388
2017-11-06 09:58:52: Loss at step 1000: 4.40451
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-1000
2017-11-06 09:58:59: Loss at step 1010: 4.38419
2017-11-06 09:59:05: Loss at step 1020: 4.37407
2017-11-06 09:59:12: Loss at step 1030: 4.3448
2017-11-06 09:59:18: Loss at step 1040: 4.35358
2017-11-06 09:59:25: Loss at step 1050: 4.36724
2017-11-06 09:59:31: Loss at step 1060: 4.39544
2017-11-06 09:59:38: Loss at step 1070: 4.32642
2017-11-06 09:59:45: Loss at step 1080: 4.3393
2017-11-06 09:59:51: Loss at step 1090: 4.41686
2017-11-06 09:59:58: Loss at step 1100: 4.30179
2017-11-06 10:00:04: Loss at step 1110: 4.28377
2017-11-06 10:00:11: Loss at step 1120: 4.30603
2017-11-06 10:00:17: Loss at step 1130: 4.32419
2017-11-06 10:00:24: Loss at step 1140: 4.35282
2017-11-06 10:00:31: Loss at step 1150: 4.37626
2017-11-06 10:00:37: Loss at step 1160: 4.34785
2017-11-06 10:00:44: Loss at step 1170: 4.39643
2017-11-06 10:00:51: Loss at step 1180: 4.32525
2017-11-06 10:00:57: Loss at step 1190: 4.31764
2017-11-06 10:01:04: Loss at step 1200: 4.33533
2017-11-06 10:01:11: Loss at step 1210: 4.30678
2017-11-06 10:01:18: Loss at step 1220: 4.3588
2017-11-06 10:01:24: Loss at step 1230: 4.27834
2017-11-06 10:01:31: Loss at step 1240: 4.31612
2017-11-06 10:01:37: Loss at step 1250: 4.37383
2017-11-06 10:01:44: Loss at step 1260: 4.28146
2017-11-06 10:01:51: Loss at step 1270: 4.36217
2017-11-06 10:01:57: Loss at step 1280: 4.33023
2017-11-06 10:02:04: Loss at step 1290: 4.26849
2017-11-06 10:02:11: Loss at step 1300: 4.28455
2017-11-06 10:02:17: Loss at step 1310: 4.36996
2017-11-06 10:02:24: Loss at step 1320: 4.29356
2017-11-06 10:02:30: Loss at step 1330: 4.32483
2017-11-06 10:02:37: Loss at step 1340: 4.3149
2017-11-06 10:02:44: Loss at step 1350: 4.26867
2017-11-06 10:02:50: Loss at step 1360: 4.2995
2017-11-06 10:02:57: Loss at step 1370: 4.32713
2017-11-06 10:03:04: Loss at step 1380: 4.3125
2017-11-06 10:03:10: Loss at step 1390: 4.26903
2017-11-06 10:03:17: Loss at step 1400: 4.30862
2017-11-06 10:03:23: Loss at step 1410: 4.36798
2017-11-06 10:03:30: Loss at step 1420: 4.31148
2017-11-06 10:03:37: Loss at step 1430: 4.2647
2017-11-06 10:03:43: Loss at step 1440: 4.26318
2017-11-06 10:03:50: Loss at step 1450: 4.29001
2017-11-06 10:03:57: Loss at step 1460: 4.25563
2017-11-06 10:04:03: Loss at step 1470: 4.26651
2017-11-06 10:04:10: Loss at step 1480: 4.27608
2017-11-06 10:04:16: Loss at step 1490: 4.2755
2017-11-06 10:04:23: Loss at step 1500: 4.2518
2017-11-06 10:04:29: Loss at step 1510: 4.27719
2017-11-06 10:04:36: Loss at step 1520: 4.25329
2017-11-06 10:04:43: Loss at step 1530: 4.28812
2017-11-06 10:04:49: Loss at step 1540: 4.25919
2017-11-06 10:04:56: Loss at step 1550: 4.20095
2017-11-06 10:05:02: Loss at step 1560: 4.28759
2017-11-06 10:05:09: Loss at step 1570: 4.21236
2017-11-06 10:05:15: Loss at step 1580: 4.28634
2017-11-06 10:05:22: Loss at step 1590: 4.24999
2017-11-06 10:05:29: Loss at step 1600: 4.26449
2017-11-06 10:05:35: Loss at step 1610: 4.22481
2017-11-06 10:05:42: Loss at step 1620: 4.2831
2017-11-06 10:05:48: Loss at step 1630: 4.24842
2017-11-06 10:05:55: Loss at step 1640: 4.3063
2017-11-06 10:06:01: Loss at step 1650: 4.20385
2017-11-06 10:06:08: Loss at step 1660: 4.32671
2017-11-06 10:06:15: Loss at step 1670: 4.23345
2017-11-06 10:06:21: Loss at step 1680: 4.29692
2017-11-06 10:06:28: Loss at step 1690: 4.22474
2017-11-06 10:06:34: Loss at step 1700: 4.23527
2017-11-06 10:06:41: Loss at step 1710: 4.21025
2017-11-06 10:06:47: Loss at step 1720: 4.28632
2017-11-06 10:06:54: Loss at step 1730: 4.29179
2017-11-06 10:07:01: Loss at step 1740: 4.19269
2017-11-06 10:07:07: Loss at step 1750: 4.23707
2017-11-06 10:07:14: Loss at step 1760: 4.23297
2017-11-06 10:07:20: Loss at step 1770: 4.21388
2017-11-06 10:07:27: Loss at step 1780: 4.22434
2017-11-06 10:07:33: Loss at step 1790: 4.17666
2017-11-06 10:07:40: Loss at step 1800: 4.18607
2017-11-06 10:07:47: Loss at step 1810: 4.17475
2017-11-06 10:07:53: Loss at step 1820: 4.1837
2017-11-06 10:08:00: Loss at step 1830: 4.24967
2017-11-06 10:08:06: Loss at step 1840: 4.26028
2017-11-06 10:08:13: Loss at step 1850: 4.22317
2017-11-06 10:08:19: Loss at step 1860: 4.26801
2017-11-06 10:08:26: Loss at step 1870: 4.22046
2017-11-06 10:08:33: Loss at step 1880: 4.24806
2017-11-06 10:08:39: Loss at step 1890: 4.23694
2017-11-06 10:08:46: Loss at step 1900: 4.10268
2017-11-06 10:08:52: Loss at step 1910: 4.19341
2017-11-06 10:08:59: Loss at step 1920: 4.25718
2017-11-06 10:09:05: Loss at step 1930: 4.23693
2017-11-06 10:09:12: Loss at step 1940: 4.16401
2017-11-06 10:09:19: Loss at step 1950: 4.17932
2017-11-06 10:09:25: Loss at step 1960: 4.19248
2017-11-06 10:09:32: Loss at step 1970: 4.20138
2017-11-06 10:09:38: Loss at step 1980: 4.20385
2017-11-06 10:09:45: Loss at step 1990: 4.11774
2017-11-06 10:09:51: Loss at step 2000: 4.19127
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-2000
2017-11-06 10:09:58: Loss at step 2010: 4.24906
2017-11-06 10:10:05: Loss at step 2020: 4.18375
2017-11-06 10:10:11: Loss at step 2030: 4.10548
2017-11-06 10:10:18: Loss at step 2040: 4.11203
2017-11-06 10:10:24: Loss at step 2050: 4.14423
2017-11-06 10:10:31: Loss at step 2060: 4.15103
2017-11-06 10:10:38: Loss at step 2070: 4.17229
2017-11-06 10:10:44: Loss at step 2080: 4.18241
2017-11-06 10:10:51: Loss at step 2090: 4.2076
2017-11-06 10:10:58: Loss at step 2100: 4.28793
2017-11-06 10:11:04: Loss at step 2110: 4.13907
2017-11-06 10:11:11: Loss at step 2120: 4.08982
2017-11-06 10:11:17: Loss at step 2130: 4.26315
2017-11-06 10:11:24: Loss at step 2140: 4.14049
2017-11-06 10:11:30: Loss at step 2150: 4.11696
2017-11-06 10:11:37: Loss at step 2160: 4.26217
2017-11-06 10:11:44: Loss at step 2170: 4.17251
2017-11-06 10:11:51: Loss at step 2180: 4.2237
2017-11-06 10:11:57: Loss at step 2190: 4.18976
2017-11-06 10:12:04: Loss at step 2200: 4.09299
2017-11-06 10:12:10: Loss at step 2210: 4.13581
2017-11-06 10:12:17: Loss at step 2220: 4.22351
2017-11-06 10:12:23: Loss at step 2230: 4.17171
2017-11-06 10:12:30: Loss at step 2240: 4.20429
2017-11-06 10:12:37: Loss at step 2250: 4.19128
2017-11-06 10:12:43: Loss at step 2260: 4.22285
2017-11-06 10:12:50: Loss at step 2270: 4.18556
2017-11-06 10:12:56: Loss at step 2280: 4.12125
2017-11-06 10:13:03: Loss at step 2290: 4.22167
2017-11-06 10:13:09: Loss at step 2300: 4.2584
2017-11-06 10:13:16: Loss at step 2310: 4.26631
2017-11-06 10:13:23: Loss at step 2320: 4.13728
2017-11-06 10:13:29: Loss at step 2330: 4.08672
2017-11-06 10:13:36: Loss at step 2340: 4.20136
2017-11-06 10:13:42: Loss at step 2350: 4.08508
2017-11-06 10:13:49: Loss at step 2360: 4.12576
2017-11-06 10:13:55: Loss at step 2370: 4.21913
2017-11-06 10:14:02: Loss at step 2380: 4.14943
2017-11-06 10:14:09: Loss at step 2390: 4.20678
2017-11-06 10:14:15: Loss at step 2400: 4.26511
2017-11-06 10:14:22: Loss at step 2410: 4.1636
2017-11-06 10:14:28: Loss at step 2420: 4.14464
2017-11-06 10:14:35: Loss at step 2430: 4.22459
2017-11-06 10:14:41: Loss at step 2440: 4.20437
2017-11-06 10:14:48: Loss at step 2450: 4.15486
2017-11-06 10:14:55: Loss at step 2460: 4.14311
2017-11-06 10:15:01: Loss at step 2470: 4.19345
2017-11-06 10:15:08: Loss at step 2480: 4.20398
2017-11-06 10:15:14: Loss at step 2490: 4.15762
2017-11-06 10:15:21: Loss at step 2500: 4.14397
2017-11-06 10:15:28: Loss at step 2510: 4.15766
2017-11-06 10:15:34: Loss at step 2520: 4.13582
2017-11-06 10:15:41: Loss at step 2530: 4.20924
2017-11-06 10:15:47: Loss at step 2540: 4.16725
2017-11-06 10:15:54: Loss at step 2550: 4.15265
2017-11-06 10:16:00: Loss at step 2560: 4.13177
2017-11-06 10:16:07: Loss at step 2570: 4.22356
2017-11-06 10:16:14: Loss at step 2580: 4.15357
2017-11-06 10:16:20: Loss at step 2590: 4.16866
2017-11-06 10:16:27: Loss at step 2600: 4.12164
2017-11-06 10:16:33: Loss at step 2610: 4.13827
2017-11-06 10:16:40: Loss at step 2620: 4.23864
2017-11-06 10:16:46: Loss at step 2630: 4.21023
2017-11-06 10:16:53: Loss at step 2640: 4.04023
2017-11-06 10:17:00: Loss at step 2650: 4.23305
2017-11-06 10:17:06: Loss at step 2660: 4.17951
2017-11-06 10:17:13: Loss at step 2670: 4.13174
2017-11-06 10:17:19: Loss at step 2680: 4.17081
2017-11-06 10:17:26: Loss at step 2690: 4.21708
2017-11-06 10:17:32: Loss at step 2700: 4.25785
2017-11-06 10:17:39: Loss at step 2710: 4.14021
2017-11-06 10:17:46: Loss at step 2720: 4.21055
2017-11-06 10:17:52: Loss at step 2730: 4.14553
2017-11-06 10:17:59: Loss at step 2740: 4.18764
2017-11-06 10:18:05: Loss at step 2750: 4.15765
2017-11-06 10:18:12: Loss at step 2760: 4.15743
2017-11-06 10:18:19: Loss at step 2770: 4.12101
2017-11-06 10:18:25: Loss at step 2780: 4.17707
2017-11-06 10:18:32: Loss at step 2790: 4.17764
2017-11-06 10:18:38: Loss at step 2800: 4.10617
2017-11-06 10:18:45: Loss at step 2810: 4.05636
2017-11-06 10:18:51: Loss at step 2820: 4.19613
2017-11-06 10:18:58: Loss at step 2830: 4.18324
2017-11-06 10:19:05: Loss at step 2840: 4.12712
2017-11-06 10:19:11: Loss at step 2850: 4.06
2017-11-06 10:19:18: Loss at step 2860: 4.04957
2017-11-06 10:19:24: Loss at step 2870: 4.19397
2017-11-06 10:19:31: Loss at step 2880: 4.05191
2017-11-06 10:19:37: Loss at step 2890: 4.19829
2017-11-06 10:19:44: Loss at step 2900: 4.12784
2017-11-06 10:19:51: Loss at step 2910: 4.06141
2017-11-06 10:19:57: Loss at step 2920: 4.14141
2017-11-06 10:20:04: Loss at step 2930: 4.12923
2017-11-06 10:20:10: Loss at step 2940: 4.12915
2017-11-06 10:20:17: Loss at step 2950: 4.11584
2017-11-06 10:20:24: Loss at step 2960: 4.11043
2017-11-06 10:20:30: Loss at step 2970: 4.19596
2017-11-06 10:20:37: Loss at step 2980: 4.12839
2017-11-06 10:20:43: Loss at step 2990: 4.08932
2017-11-06 10:20:50: Loss at step 3000: 4.26011
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-3000
2017-11-06 10:20:56: Loss at step 3010: 4.16971
2017-11-06 10:21:03: Loss at step 3020: 4.15102
2017-11-06 10:21:09: Loss at step 3030: 4.10149
2017-11-06 10:21:16: Loss at step 3040: 4.15389
2017-11-06 10:21:23: Loss at step 3050: 4.11659
2017-11-06 10:21:29: Loss at step 3060: 4.12636
2017-11-06 10:21:36: Loss at step 3070: 4.1855
2017-11-06 10:21:42: Loss at step 3080: 4.06599
2017-11-06 10:21:49: Loss at step 3090: 4.14044
2017-11-06 10:21:55: Loss at step 3100: 4.08347
2017-11-06 10:22:02: Loss at step 3110: 4.14713
2017-11-06 10:22:09: Loss at step 3120: 4.14048
2017-11-06 10:22:15: Loss at step 3130: 4.15988
2017-11-06 10:22:22: Loss at step 3140: 4.13702
2017-11-06 10:22:28: Loss at step 3150: 4.12487
2017-11-06 10:22:35: Loss at step 3160: 4.12977
2017-11-06 10:22:41: Loss at step 3170: 4.10046
2017-11-06 10:22:48: Loss at step 3180: 4.08077
2017-11-06 10:22:54: Loss at step 3190: 4.17926
2017-11-06 10:23:01: Loss at step 3200: 4.10548
2017-11-06 10:23:08: Loss at step 3210: 4.15633
2017-11-06 10:23:14: Loss at step 3220: 4.20821
2017-11-06 10:23:21: Loss at step 3230: 4.11693
2017-11-06 10:23:27: Loss at step 3240: 4.14803
2017-11-06 10:23:34: Loss at step 3250: 4.13236
2017-11-06 10:23:40: Loss at step 3260: 4.00171
2017-11-06 10:23:47: Loss at step 3270: 4.14175
2017-11-06 10:23:53: Loss at step 3280: 4.0819
2017-11-06 10:24:00: Loss at step 3290: 4.05597
2017-11-06 10:24:07: Loss at step 3300: 4.16331
2017-11-06 10:24:13: Loss at step 3310: 4.11614
2017-11-06 10:24:20: Loss at step 3320: 4.12916
2017-11-06 10:24:26: Loss at step 3330: 4.09842
2017-11-06 10:24:33: Loss at step 3340: 4.1424
2017-11-06 10:24:40: Loss at step 3350: 3.99474
2017-11-06 10:24:46: Loss at step 3360: 4.13916
2017-11-06 10:24:53: Loss at step 3370: 4.1845
2017-11-06 10:24:59: Loss at step 3380: 4.16926
2017-11-06 10:25:06: Loss at step 3390: 4.10973
2017-11-06 10:25:12: Loss at step 3400: 4.23046
2017-11-06 10:25:19: Loss at step 3410: 4.04878
2017-11-06 10:25:26: Loss at step 3420: 4.03863
2017-11-06 10:25:32: Loss at step 3430: 4.09786
2017-11-06 10:25:39: Loss at step 3440: 4.13433
2017-11-06 10:25:45: Loss at step 3450: 4.09914
2017-11-06 10:25:52: Loss at step 3460: 4.09835
2017-11-06 10:25:58: Loss at step 3470: 4.12781
2017-11-06 10:26:05: Loss at step 3480: 4.10683
2017-11-06 10:26:12: Loss at step 3490: 4.07587
2017-11-06 10:26:18: Loss at step 3500: 4.10672
2017-11-06 10:26:25: Loss at step 3510: 4.04751
2017-11-06 10:26:31: Loss at step 3520: 4.14033
2017-11-06 10:26:38: Loss at step 3530: 4.09707
2017-11-06 10:26:44: Loss at step 3540: 4.05078
2017-11-06 10:26:51: Loss at step 3550: 4.09735
2017-11-06 10:26:58: Loss at step 3560: 4.03425
2017-11-06 10:27:04: Loss at step 3570: 4.09411
2017-11-06 10:27:11: Loss at step 3580: 4.03119
2017-11-06 10:27:18: Loss at step 3590: 4.17572
2017-11-06 10:27:25: Loss at step 3600: 4.11631
2017-11-06 10:27:31: Loss at step 3610: 4.11732
2017-11-06 10:27:38: Loss at step 3620: 4.14629
2017-11-06 10:27:45: Loss at step 3630: 4.07539
2017-11-06 10:27:51: Loss at step 3640: 4.08336
2017-11-06 10:27:58: Loss at step 3650: 4.19621
2017-11-06 10:28:05: Loss at step 3660: 4.1073
2017-11-06 10:28:11: Loss at step 3670: 4.12273
2017-11-06 10:28:18: Loss at step 3680: 4.17728
2017-11-06 10:28:24: Loss at step 3690: 4.01724
2017-11-06 10:28:31: Loss at step 3700: 4.15624
2017-11-06 10:28:37: Loss at step 3710: 4.06253
2017-11-06 10:28:44: Loss at step 3720: 4.13378
2017-11-06 10:28:51: Loss at step 3730: 4.12498
2017-11-06 10:28:57: Loss at step 3740: 4.22151
2017-11-06 10:29:04: Loss at step 3750: 4.0845
2017-11-06 10:29:10: Loss at step 3760: 4.12264
2017-11-06 10:29:17: Loss at step 3770: 4.12746
2017-11-06 10:29:24: Loss at step 3780: 4.14218
2017-11-06 10:29:30: Loss at step 3790: 4.09018
2017-11-06 10:29:37: Loss at step 3800: 4.10506
2017-11-06 10:29:43: Loss at step 3810: 4.13991
2017-11-06 10:29:50: Loss at step 3820: 4.21107
2017-11-06 10:29:56: Loss at step 3830: 4.17659
2017-11-06 10:30:03: Loss at step 3840: 4.16581
2017-11-06 10:30:09: Loss at step 3850: 4.07206
2017-11-06 10:30:16: Loss at step 3860: 4.15194
2017-11-06 10:30:23: Loss at step 3870: 4.05069
2017-11-06 10:30:29: Loss at step 3880: 4.15472
2017-11-06 10:30:36: Loss at step 3890: 4.11079
2017-11-06 10:30:42: Loss at step 3900: 4.03282
2017-11-06 10:30:49: Loss at step 3910: 4.05943
2017-11-06 10:30:56: Loss at step 3920: 4.20266
2017-11-06 10:31:02: Loss at step 3930: 4.10178
2017-11-06 10:31:09: Loss at step 3940: 4.06951
2017-11-06 10:31:15: Loss at step 3950: 4.02254
2017-11-06 10:31:22: Loss at step 3960: 4.14199
2017-11-06 10:31:28: Loss at step 3970: 4.08807
2017-11-06 10:31:35: Loss at step 3980: 4.15882
2017-11-06 10:31:42: Loss at step 3990: 4.05724
2017-11-06 10:31:48: Loss at step 4000: 4.10311
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-4000
2017-11-06 10:31:55: Loss at step 4010: 4.13575
2017-11-06 10:32:01: Loss at step 4020: 4.06822
2017-11-06 10:32:08: Loss at step 4030: 4.10547
2017-11-06 10:32:15: Loss at step 4040: 4.11115
2017-11-06 10:32:21: Loss at step 4050: 4.1715
2017-11-06 10:32:28: Loss at step 4060: 4.1542
2017-11-06 10:32:34: Loss at step 4070: 4.12839
2017-11-06 10:32:41: Loss at step 4080: 4.10877
2017-11-06 10:32:47: Loss at step 4090: 4.09905
2017-11-06 10:32:54: Loss at step 4100: 4.12655
2017-11-06 10:33:01: Loss at step 4110: 4.04167
2017-11-06 10:33:07: Loss at step 4120: 4.16386
2017-11-06 10:33:14: Loss at step 4130: 4.15263
2017-11-06 10:33:20: Loss at step 4140: 4.12848
2017-11-06 10:33:27: Loss at step 4150: 4.16718
2017-11-06 10:33:34: Loss at step 4160: 4.10433
2017-11-06 10:33:40: Loss at step 4170: 4.08075
2017-11-06 10:33:47: Loss at step 4180: 4.10035
2017-11-06 10:33:53: Loss at step 4190: 4.02224
2017-11-06 10:34:00: Loss at step 4200: 4.05628
2017-11-06 10:34:06: Loss at step 4210: 4.18595
2017-11-06 10:34:13: Loss at step 4220: 4.02418
2017-11-06 10:34:20: Loss at step 4230: 4.04082
2017-11-06 10:34:26: Loss at step 4240: 4.11303
2017-11-06 10:34:33: Loss at step 4250: 4.10463
2017-11-06 10:34:39: Loss at step 4260: 4.054
2017-11-06 10:34:46: Loss at step 4270: 4.14172
2017-11-06 10:34:52: Loss at step 4280: 4.01848
2017-11-06 10:34:59: Loss at step 4290: 4.11033
2017-11-06 10:35:06: Loss at step 4300: 4.09068
2017-11-06 10:35:12: Loss at step 4310: 4.13955
2017-11-06 10:35:19: Loss at step 4320: 4.11129
2017-11-06 10:35:25: Loss at step 4330: 4.03516
2017-11-06 10:35:32: Loss at step 4340: 3.9929
2017-11-06 10:35:39: Loss at step 4350: 4.11838
2017-11-06 10:35:45: Loss at step 4360: 4.15233
2017-11-06 10:35:52: Loss at step 4370: 4.03218
2017-11-06 10:35:58: Loss at step 4380: 4.05731
2017-11-06 10:36:05: Loss at step 4390: 4.10355
2017-11-06 10:36:11: Loss at step 4400: 4.06705
2017-11-06 10:36:18: Loss at step 4410: 4.17374
2017-11-06 10:36:25: Loss at step 4420: 4.08625
2017-11-06 10:36:31: Loss at step 4430: 4.12899
2017-11-06 10:36:38: Loss at step 4440: 4.20315
2017-11-06 10:36:44: Loss at step 4450: 4.08601
2017-11-06 10:36:51: Loss at step 4460: 4.13734
2017-11-06 10:36:58: Loss at step 4470: 3.98976
2017-11-06 10:37:04: Loss at step 4480: 4.09494
2017-11-06 10:37:11: Loss at step 4490: 4.1014
2017-11-06 10:37:17: Loss at step 4500: 4.16742
2017-11-06 10:37:24: Loss at step 4510: 4.15398
2017-11-06 10:37:30: Loss at step 4520: 4.06463
2017-11-06 10:37:37: Loss at step 4530: 4.02288
2017-11-06 10:37:44: Loss at step 4540: 4.06889
2017-11-06 10:37:50: Loss at step 4550: 4.09453
2017-11-06 10:37:57: Loss at step 4560: 4.00597
2017-11-06 10:38:03: Loss at step 4570: 4.18619
2017-11-06 10:38:10: Loss at step 4580: 4.12554
2017-11-06 10:38:16: Loss at step 4590: 4.07017
2017-11-06 10:38:23: Loss at step 4600: 4.12917
2017-11-06 10:38:30: Loss at step 4610: 4.05058
2017-11-06 10:38:36: Loss at step 4620: 4.22105
2017-11-06 10:38:43: Loss at step 4630: 4.06305
2017-11-06 10:38:49: Loss at step 4640: 4.09316
2017-11-06 10:38:56: Loss at step 4650: 4.02887
2017-11-06 10:39:03: Loss at step 4660: 4.03597
2017-11-06 10:39:09: Loss at step 4670: 4.0723
2017-11-06 10:39:16: Loss at step 4680: 4.11651
2017-11-06 10:39:22: Loss at step 4690: 4.1097
2017-11-06 10:39:29: Loss at step 4700: 4.15222
2017-11-06 10:39:36: Loss at step 4710: 4.11191
2017-11-06 10:39:42: Loss at step 4720: 4.11323
2017-11-06 10:39:49: Loss at step 4730: 4.03096
2017-11-06 10:39:55: Loss at step 4740: 4.16206
2017-11-06 10:40:02: Loss at step 4750: 4.18045
2017-11-06 10:40:08: Loss at step 4760: 4.01916
2017-11-06 10:40:15: Loss at step 4770: 4.02638
2017-11-06 10:40:22: Loss at step 4780: 4.14721
2017-11-06 10:40:28: Loss at step 4790: 4.15425
2017-11-06 10:40:35: Loss at step 4800: 4.22033
2017-11-06 10:40:41: Loss at step 4810: 4.0855
2017-11-06 10:40:48: Loss at step 4820: 4.08591
2017-11-06 10:40:55: Loss at step 4830: 4.18471
2017-11-06 10:41:01: Loss at step 4840: 4.00459
2017-11-06 10:41:08: Loss at step 4850: 4.20881
2017-11-06 10:41:14: Loss at step 4860: 4.06762
2017-11-06 10:41:21: Loss at step 4870: 4.03022
2017-11-06 10:41:27: Loss at step 4880: 4.05529
2017-11-06 10:41:34: Loss at step 4890: 4.04652
2017-11-06 10:41:41: Loss at step 4900: 4.12272
2017-11-06 10:41:47: Loss at step 4910: 4.1081
2017-11-06 10:41:54: Loss at step 4920: 4.12669
2017-11-06 10:42:00: Loss at step 4930: 4.06885
2017-11-06 10:42:07: Loss at step 4940: 4.04125
2017-11-06 10:42:14: Loss at step 4950: 4.01778
2017-11-06 10:42:20: Loss at step 4960: 4.05302
2017-11-06 10:42:27: Loss at step 4970: 4.08813
2017-11-06 10:42:33: Loss at step 4980: 4.02344
2017-11-06 10:42:40: Loss at step 4990: 4.05153
2017-11-06 10:42:46: Loss at step 5000: 4.08449
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-5000
2017-11-06 10:42:53: Loss at step 5010: 4.01608
2017-11-06 10:43:00: Loss at step 5020: 3.97202
2017-11-06 10:43:06: Loss at step 5030: 4.06657
2017-11-06 10:43:13: Loss at step 5040: 4.04748
2017-11-06 10:43:19: Loss at step 5050: 4.09387
2017-11-06 10:43:26: Loss at step 5060: 4.19646
2017-11-06 10:43:33: Loss at step 5070: 3.98096
2017-11-06 10:43:39: Loss at step 5080: 4.09971
2017-11-06 10:43:46: Loss at step 5090: 4.08018
2017-11-06 10:43:52: Loss at step 5100: 3.97637
2017-11-06 10:43:59: Loss at step 5110: 4.12944
2017-11-06 10:44:05: Loss at step 5120: 4.09478
2017-11-06 10:44:12: Loss at step 5130: 4.13756
2017-11-06 10:44:19: Loss at step 5140: 4.06988
2017-11-06 10:44:25: Loss at step 5150: 4.09276
2017-11-06 10:44:32: Loss at step 5160: 4.00183
2017-11-06 10:44:38: Loss at step 5170: 3.99101
2017-11-06 10:44:45: Loss at step 5180: 4.12111
2017-11-06 10:44:51: Loss at step 5190: 4.04455
2017-11-06 10:44:58: Loss at step 5200: 4.08476
2017-11-06 10:45:05: Loss at step 5210: 4.09026
2017-11-06 10:45:11: Loss at step 5220: 4.05058
2017-11-06 10:45:18: Loss at step 5230: 4.1
2017-11-06 10:45:24: Loss at step 5240: 4.06581
2017-11-06 10:45:31: Loss at step 5250: 4.14531
2017-11-06 10:45:37: Loss at step 5260: 4.06103
2017-11-06 10:45:44: Loss at step 5270: 4.09071
2017-11-06 10:45:51: Loss at step 5280: 4.19195
2017-11-06 10:45:57: Loss at step 5290: 4.23559
2017-11-06 10:46:04: Loss at step 5300: 4.13797
2017-11-06 10:46:10: Loss at step 5310: 4.14401
2017-11-06 10:46:17: Loss at step 5320: 4.17338
2017-11-06 10:46:23: Loss at step 5330: 4.06014
2017-11-06 10:46:30: Loss at step 5340: 4.11214
2017-11-06 10:46:37: Loss at step 5350: 4.23201
2017-11-06 10:46:43: Loss at step 5360: 4.15451
2017-11-06 10:46:50: Loss at step 5370: 4.07752
2017-11-06 10:46:56: Loss at step 5380: 3.98426
2017-11-06 10:47:03: Loss at step 5390: 4.04308
2017-11-06 10:47:09: Loss at step 5400: 4.07886
2017-11-06 10:47:16: Loss at step 5410: 4.0892
2017-11-06 10:47:23: Loss at step 5420: 4.04848
2017-11-06 10:47:29: Loss at step 5430: 4.14306
2017-11-06 10:47:36: Loss at step 5440: 4.07114
2017-11-06 10:47:42: Loss at step 5450: 4.1348
2017-11-06 10:47:49: Loss at step 5460: 4.06071
2017-11-06 10:47:55: Loss at step 5470: 4.01252
2017-11-06 10:48:02: Loss at step 5480: 4.02527
2017-11-06 10:48:09: Loss at step 5490: 4.12711
2017-11-06 10:48:15: Loss at step 5500: 4.03598
2017-11-06 10:48:22: Loss at step 5510: 4.11129
2017-11-06 10:48:28: Loss at step 5520: 4.06169
2017-11-06 10:48:35: Loss at step 5530: 4.08664
2017-11-06 10:48:41: Loss at step 5540: 4.12114
2017-11-06 10:48:48: Loss at step 5550: 4.13296
2017-11-06 10:48:55: Loss at step 5560: 4.01454
2017-11-06 10:49:01: Loss at step 5570: 4.07417
2017-11-06 10:49:08: Loss at step 5580: 4.0727
2017-11-06 10:49:14: Loss at step 5590: 4.0023
2017-11-06 10:49:21: Loss at step 5600: 4.03579
2017-11-06 10:49:28: Loss at step 5610: 4.15141
2017-11-06 10:49:34: Loss at step 5620: 4.10863
2017-11-06 10:49:41: Loss at step 5630: 4.14464
2017-11-06 10:49:47: Loss at step 5640: 4.03598
2017-11-06 10:49:54: Loss at step 5650: 4.15158
2017-11-06 10:50:00: Loss at step 5660: 3.98692
2017-11-06 10:50:07: Loss at step 5670: 4.03434
2017-11-06 10:50:14: Loss at step 5680: 4.13156
2017-11-06 10:50:20: Loss at step 5690: 4.02387
2017-11-06 10:50:27: Loss at step 5700: 4.09821
2017-11-06 10:50:33: Loss at step 5710: 4.10111
2017-11-06 10:50:40: Loss at step 5720: 4.11416
2017-11-06 10:50:46: Loss at step 5730: 4.13295
2017-11-06 10:50:53: Loss at step 5740: 4.16376
2017-11-06 10:51:00: Loss at step 5750: 4.11649
2017-11-06 10:51:06: Loss at step 5760: 4.10002
2017-11-06 10:51:13: Loss at step 5770: 4.052
2017-11-06 10:51:19: Loss at step 5780: 4.14619
2017-11-06 10:51:26: Loss at step 5790: 4.08222
2017-11-06 10:51:32: Loss at step 5800: 4.06405
2017-11-06 10:51:39: Loss at step 5810: 4.11958
2017-11-06 10:51:46: Loss at step 5820: 4.05291
2017-11-06 10:51:52: Loss at step 5830: 4.14893
2017-11-06 10:51:59: Loss at step 5840: 4.12156
2017-11-06 10:52:05: Loss at step 5850: 4.12946
2017-11-06 10:52:12: Loss at step 5860: 4.01768
2017-11-06 10:52:18: Loss at step 5870: 3.96029
2017-11-06 10:52:25: Loss at step 5880: 3.98256
2017-11-06 10:52:32: Loss at step 5890: 4.11072
2017-11-06 10:52:38: Loss at step 5900: 4.02819
2017-11-06 10:52:45: Loss at step 5910: 4.0884
2017-11-06 10:52:51: Loss at step 5920: 3.93462
2017-11-06 10:52:58: Loss at step 5930: 4.02809
2017-11-06 10:53:04: Loss at step 5940: 4.20664
2017-11-06 10:53:11: Loss at step 5950: 4.20502
2017-11-06 10:53:18: Loss at step 5960: 4.07174
2017-11-06 10:53:24: Loss at step 5970: 4.06337
2017-11-06 10:53:31: Loss at step 5980: 4.08406
2017-11-06 10:53:37: Loss at step 5990: 4.07352
2017-11-06 10:53:44: Loss at step 6000: 3.98667
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-6000
2017-11-06 10:53:51: Loss at step 6010: 4.11794
2017-11-06 10:53:57: Loss at step 6020: 4.02028
2017-11-06 10:54:04: Loss at step 6030: 4.04301
2017-11-06 10:54:10: Loss at step 6040: 4.11078
2017-11-06 10:54:17: Loss at step 6050: 4.03879
2017-11-06 10:54:23: Loss at step 6060: 4.0702
2017-11-06 10:54:30: Loss at step 6070: 4.04417
2017-11-06 10:54:37: Loss at step 6080: 4.15205
2017-11-06 10:54:43: Loss at step 6090: 4.07875
2017-11-06 10:54:50: Loss at step 6100: 4.09895
2017-11-06 10:54:56: Loss at step 6110: 4.06469
2017-11-06 10:55:03: Loss at step 6120: 4.09761
2017-11-06 10:55:10: Loss at step 6130: 4.12986
2017-11-06 10:55:16: Loss at step 6140: 4.10426
2017-11-06 10:55:23: Loss at step 6150: 4.18066
2017-11-06 10:55:29: Loss at step 6160: 4.08779
2017-11-06 10:55:36: Loss at step 6170: 4.09465
2017-11-06 10:55:42: Loss at step 6180: 4.13369
2017-11-06 10:55:49: Loss at step 6190: 4.01127
2017-11-06 10:55:56: Loss at step 6200: 4.13457
2017-11-06 10:56:02: Loss at step 6210: 4.03784
2017-11-06 10:56:09: Loss at step 6220: 4.04857
2017-11-06 10:56:15: Loss at step 6230: 4.03947
2017-11-06 10:56:22: Loss at step 6240: 4.10167
2017-11-06 10:56:28: Loss at step 6250: 3.96451
2017-11-06 10:56:35: Loss at step 6260: 4.06429
2017-11-06 10:56:42: Loss at step 6270: 4.06905
2017-11-06 10:56:48: Loss at step 6280: 4.12366
2017-11-06 10:56:55: Loss at step 6290: 4.05767
2017-11-06 10:57:01: Loss at step 6300: 4.16053
2017-11-06 10:57:08: Loss at step 6310: 3.96751
2017-11-06 10:57:14: Loss at step 6320: 4.12382
2017-11-06 10:57:21: Loss at step 6330: 3.97167
2017-11-06 10:57:28: Loss at step 6340: 4.10428
2017-11-06 10:57:34: Loss at step 6350: 4.09392
2017-11-06 10:57:41: Loss at step 6360: 4.10873
2017-11-06 10:57:47: Loss at step 6370: 3.96999
2017-11-06 10:57:54: Loss at step 6380: 4.04505
2017-11-06 10:58:01: Loss at step 6390: 4.13039
2017-11-06 10:58:07: Loss at step 6400: 4.03822
2017-11-06 10:58:14: Loss at step 6410: 4.09659
2017-11-06 10:58:21: Loss at step 6420: 4.11836
2017-11-06 10:58:27: Loss at step 6430: 4.0641
2017-11-06 10:58:34: Loss at step 6440: 4.05553
2017-11-06 10:58:41: Loss at step 6450: 4.04733
2017-11-06 10:58:48: Loss at step 6460: 3.98453
2017-11-06 10:58:54: Loss at step 6470: 4.01452
2017-11-06 10:59:01: Loss at step 6480: 3.96235
2017-11-06 10:59:08: Loss at step 6490: 3.97091
2017-11-06 10:59:14: Loss at step 6500: 4.11731
2017-11-06 10:59:21: Loss at step 6510: 4.12821
2017-11-06 10:59:28: Loss at step 6520: 4.09671
2017-11-06 10:59:35: Loss at step 6530: 3.9954
2017-11-06 10:59:41: Loss at step 6540: 4.01679
2017-11-06 10:59:48: Loss at step 6550: 4.09882
2017-11-06 10:59:55: Loss at step 6560: 4.15221
2017-11-06 11:00:01: Loss at step 6570: 4.02086
2017-11-06 11:00:08: Loss at step 6580: 4.00349
2017-11-06 11:00:15: Loss at step 6590: 4.07737
2017-11-06 11:00:21: Loss at step 6600: 4.14157
2017-11-06 11:00:28: Loss at step 6610: 4.10394
2017-11-06 11:00:34: Loss at step 6620: 4.11917
2017-11-06 11:00:41: Loss at step 6630: 4.12444
2017-11-06 11:00:48: Loss at step 6640: 4.07092
2017-11-06 11:00:54: Loss at step 6650: 4.13941
2017-11-06 11:01:01: Loss at step 6660: 4.03978
2017-11-06 11:01:07: Loss at step 6670: 4.11186
2017-11-06 11:01:14: Loss at step 6680: 4.08624
2017-11-06 11:01:21: Loss at step 6690: 4.02884
2017-11-06 11:01:27: Loss at step 6700: 4.03364
2017-11-06 11:01:34: Loss at step 6710: 3.96466
2017-11-06 11:01:40: Loss at step 6720: 4.07772
2017-11-06 11:01:47: Loss at step 6730: 4.09383
2017-11-06 11:01:54: Loss at step 6740: 4.09354
2017-11-06 11:02:00: Loss at step 6750: 4.08533
2017-11-06 11:02:07: Loss at step 6760: 4.12014
2017-11-06 11:02:13: Loss at step 6770: 4.08323
2017-11-06 11:02:20: Loss at step 6780: 4.09374
2017-11-06 11:02:27: Loss at step 6790: 4.12899
2017-11-06 11:02:33: Loss at step 6800: 4.01099
2017-11-06 11:02:40: Loss at step 6810: 4.07888
2017-11-06 11:02:47: Loss at step 6820: 3.9977
2017-11-06 11:02:54: Loss at step 6830: 4.02959
2017-11-06 11:03:01: Loss at step 6840: 3.98307
2017-11-06 11:03:07: Loss at step 6850: 4.06857
2017-11-06 11:03:14: Loss at step 6860: 4.08125
2017-11-06 11:03:21: Loss at step 6870: 3.97359
2017-11-06 11:03:27: Loss at step 6880: 4.07388
2017-11-06 11:03:34: Loss at step 6890: 4.0173
2017-11-06 11:03:41: Loss at step 6900: 4.11419
2017-11-06 11:03:48: Loss at step 6910: 4.12847
2017-11-06 11:03:55: Loss at step 6920: 4.10905
2017-11-06 11:04:01: Loss at step 6930: 4.0322
2017-11-06 11:04:09: Loss at step 6940: 4.04721
2017-11-06 11:04:15: Loss at step 6950: 3.96631
2017-11-06 11:04:22: Loss at step 6960: 4.12861
2017-11-06 11:04:29: Loss at step 6970: 4.02876
2017-11-06 11:04:35: Loss at step 6980: 4.08305
2017-11-06 11:04:42: Loss at step 6990: 4.13697
2017-11-06 11:04:49: Loss at step 7000: 4.08476
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-7000
2017-11-06 11:04:55: Loss at step 7010: 4.08733
2017-11-06 11:05:02: Loss at step 7020: 4.03793
2017-11-06 11:05:09: Loss at step 7030: 4.05702
2017-11-06 11:05:16: Loss at step 7040: 4.04438
2017-11-06 11:05:22: Loss at step 7050: 3.97624
2017-11-06 11:05:29: Loss at step 7060: 4.09785
2017-11-06 11:05:36: Loss at step 7070: 4.05868
2017-11-06 11:05:43: Loss at step 7080: 3.97453
2017-11-06 11:05:49: Loss at step 7090: 4.04015
2017-11-06 11:05:56: Loss at step 7100: 4.0667
2017-11-06 11:06:03: Loss at step 7110: 4.04204
2017-11-06 11:06:09: Loss at step 7120: 4.08179
2017-11-06 11:06:16: Loss at step 7130: 4.13254
2017-11-06 11:06:23: Loss at step 7140: 4.00748
2017-11-06 11:06:29: Loss at step 7150: 4.05395
2017-11-06 11:06:36: Loss at step 7160: 4.08005
2017-11-06 11:06:43: Loss at step 7170: 3.97438
2017-11-06 11:06:49: Loss at step 7180: 4.04211
2017-11-06 11:06:57: Loss at step 7190: 4.0458
2017-11-06 11:07:03: Loss at step 7200: 4.07185
2017-11-06 11:07:10: Loss at step 7210: 3.9754
2017-11-06 11:07:17: Loss at step 7220: 4.03629
2017-11-06 11:07:23: Loss at step 7230: 4.0607
2017-11-06 11:07:30: Loss at step 7240: 4.00732
2017-11-06 11:07:37: Loss at step 7250: 3.93003
2017-11-06 11:07:44: Loss at step 7260: 4.08316
2017-11-06 11:07:50: Loss at step 7270: 4.0574
2017-11-06 11:07:57: Loss at step 7280: 4.16089
2017-11-06 11:08:04: Loss at step 7290: 4.09541
2017-11-06 11:08:11: Loss at step 7300: 4.05893
2017-11-06 11:08:18: Loss at step 7310: 4.09709
2017-11-06 11:08:24: Loss at step 7320: 4.12052
2017-11-06 11:08:31: Loss at step 7330: 4.02195
2017-11-06 11:08:38: Loss at step 7340: 4.15454
2017-11-06 11:08:45: Loss at step 7350: 4.05627
2017-11-06 11:08:51: Loss at step 7360: 4.14338
2017-11-06 11:08:58: Loss at step 7370: 4.04125
2017-11-06 11:09:05: Loss at step 7380: 4.04766
2017-11-06 11:09:12: Loss at step 7390: 4.09295
2017-11-06 11:09:19: Loss at step 7400: 4.05273
2017-11-06 11:09:25: Loss at step 7410: 4.05346
2017-11-06 11:09:32: Loss at step 7420: 4.09792
2017-11-06 11:09:39: Loss at step 7430: 4.06337
2017-11-06 11:09:45: Loss at step 7440: 4.06843
2017-11-06 11:09:52: Loss at step 7450: 4.09056
2017-11-06 11:09:59: Loss at step 7460: 4.09495
2017-11-06 11:10:05: Loss at step 7470: 4.09544
2017-11-06 11:10:12: Loss at step 7480: 4.01025
2017-11-06 11:10:18: Loss at step 7490: 4.19539
2017-11-06 11:10:25: Loss at step 7500: 4.00022
2017-11-06 11:10:32: Loss at step 7510: 4.04656
2017-11-06 11:10:38: Loss at step 7520: 4.05542
2017-11-06 11:10:45: Loss at step 7530: 4.06252
2017-11-06 11:10:52: Loss at step 7540: 4.04469
2017-11-06 11:10:58: Loss at step 7550: 3.95385
2017-11-06 11:11:05: Loss at step 7560: 4.11355
2017-11-06 11:11:11: Loss at step 7570: 4.11836
2017-11-06 11:11:18: Loss at step 7580: 4.11051
2017-11-06 11:11:24: Loss at step 7590: 4.05106
2017-11-06 11:11:31: Loss at step 7600: 4.10798
2017-11-06 11:11:38: Loss at step 7610: 4.1337
2017-11-06 11:11:44: Loss at step 7620: 4.07425
2017-11-06 11:11:51: Loss at step 7630: 4.05623
2017-11-06 11:11:57: Loss at step 7640: 4.09019
2017-11-06 11:12:04: Loss at step 7650: 4.08397
2017-11-06 11:12:11: Loss at step 7660: 4.103
2017-11-06 11:12:17: Loss at step 7670: 4.03375
2017-11-06 11:12:24: Loss at step 7680: 4.15793
2017-11-06 11:12:30: Loss at step 7690: 3.99594
2017-11-06 11:12:37: Loss at step 7700: 4.15764
2017-11-06 11:12:44: Loss at step 7710: 3.9936
2017-11-06 11:12:50: Loss at step 7720: 4.0283
2017-11-06 11:12:57: Loss at step 7730: 3.91856
2017-11-06 11:13:03: Loss at step 7740: 3.94965
2017-11-06 11:13:10: Loss at step 7750: 4.09358
2017-11-06 11:13:17: Loss at step 7760: 4.09659
2017-11-06 11:13:23: Loss at step 7770: 4.11024
2017-11-06 11:13:30: Loss at step 7780: 4.06048
2017-11-06 11:13:36: Loss at step 7790: 4.09675
2017-11-06 11:13:43: Loss at step 7800: 4.0567
2017-11-06 11:13:49: Loss at step 7810: 4.19749
2017-11-06 11:13:56: Loss at step 7820: 4.00117
2017-11-06 11:14:03: Loss at step 7830: 4.05893
2017-11-06 11:14:09: Loss at step 7840: 4.11187
2017-11-06 11:14:16: Loss at step 7850: 4.07709
2017-11-06 11:14:22: Loss at step 7860: 4.01727
2017-11-06 11:14:29: Loss at step 7870: 4.10974
2017-11-06 11:14:36: Loss at step 7880: 4.03746
2017-11-06 11:14:42: Loss at step 7890: 4.0956
2017-11-06 11:14:49: Loss at step 7900: 4.01823
2017-11-06 11:14:55: Loss at step 7910: 4.01351
2017-11-06 11:15:02: Loss at step 7920: 3.98603
2017-11-06 11:15:09: Loss at step 7930: 4.18912
2017-11-06 11:15:15: Loss at step 7940: 4.03969
2017-11-06 11:15:22: Loss at step 7950: 4.06691
2017-11-06 11:15:28: Loss at step 7960: 4.0661
2017-11-06 11:15:35: Loss at step 7970: 4.02401
2017-11-06 11:15:41: Loss at step 7980: 4.09108
2017-11-06 11:15:48: Loss at step 7990: 4.01708
2017-11-06 11:15:55: Loss at step 8000: 4.01001
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-8000
2017-11-06 11:16:01: Loss at step 8010: 4.02938
2017-11-06 11:16:08: Loss at step 8020: 3.95334
2017-11-06 11:16:14: Loss at step 8030: 4.17669
2017-11-06 11:16:21: Loss at step 8040: 4.11184
2017-11-06 11:16:28: Loss at step 8050: 4.05137
2017-11-06 11:16:34: Loss at step 8060: 4.07584
2017-11-06 11:16:41: Loss at step 8070: 4.03552
2017-11-06 11:16:48: Loss at step 8080: 4.02599
2017-11-06 11:16:54: Loss at step 8090: 4.11788
2017-11-06 11:17:01: Loss at step 8100: 4.05716
2017-11-06 11:17:08: Loss at step 8110: 4.14183
2017-11-06 11:17:14: Loss at step 8120: 4.06777
2017-11-06 11:17:21: Loss at step 8130: 4.16293
2017-11-06 11:17:27: Loss at step 8140: 4.16067
2017-11-06 11:17:34: Loss at step 8150: 3.9832
2017-11-06 11:17:41: Loss at step 8160: 4.08429
2017-11-06 11:17:47: Loss at step 8170: 4.069
2017-11-06 11:17:54: Loss at step 8180: 4.12703
2017-11-06 11:18:01: Loss at step 8190: 4.05436
2017-11-06 11:18:07: Loss at step 8200: 4.04485
2017-11-06 11:18:14: Loss at step 8210: 4.07384
2017-11-06 11:18:21: Loss at step 8220: 3.99264
2017-11-06 11:18:28: Loss at step 8230: 3.99257
2017-11-06 11:18:34: Loss at step 8240: 4.01218
2017-11-06 11:18:41: Loss at step 8250: 4.14935
2017-11-06 11:18:48: Loss at step 8260: 4.03837
2017-11-06 11:18:54: Loss at step 8270: 4.12392
2017-11-06 11:19:01: Loss at step 8280: 4.16779
2017-11-06 11:19:08: Loss at step 8290: 4.02727
2017-11-06 11:19:14: Loss at step 8300: 4.12817
2017-11-06 11:19:21: Loss at step 8310: 3.9983
2017-11-06 11:19:27: Loss at step 8320: 4.13125
2017-11-06 11:19:34: Loss at step 8330: 4.10632
2017-11-06 11:19:41: Loss at step 8340: 4.08557
2017-11-06 11:19:47: Loss at step 8350: 4.05611
2017-11-06 11:19:54: Loss at step 8360: 4.10325
2017-11-06 11:20:00: Loss at step 8370: 4.02432
2017-11-06 11:20:07: Loss at step 8380: 4.12782
2017-11-06 11:20:13: Loss at step 8390: 3.95347
2017-11-06 11:20:20: Loss at step 8400: 4.08054
2017-11-06 11:20:27: Loss at step 8410: 4.12349
2017-11-06 11:20:33: Loss at step 8420: 4.17168
2017-11-06 11:20:40: Loss at step 8430: 4.05832
2017-11-06 11:20:46: Loss at step 8440: 4.09307
2017-11-06 11:20:53: Loss at step 8450: 3.99149
2017-11-06 11:21:00: Loss at step 8460: 3.97649
2017-11-06 11:21:06: Loss at step 8470: 4.1265
2017-11-06 11:21:13: Loss at step 8480: 4.10959
2017-11-06 11:21:20: Loss at step 8490: 4.00409
2017-11-06 11:21:26: Loss at step 8500: 4.05166
2017-11-06 11:21:33: Loss at step 8510: 4.0291
2017-11-06 11:21:40: Loss at step 8520: 4.01697
2017-11-06 11:21:46: Loss at step 8530: 4.0703
2017-11-06 11:21:53: Loss at step 8540: 4.02369
2017-11-06 11:22:00: Loss at step 8550: 3.98684
2017-11-06 11:22:07: Loss at step 8560: 4.08523
2017-11-06 11:22:13: Loss at step 8570: 4.01986
2017-11-06 11:22:20: Loss at step 8580: 4.08698
2017-11-06 11:22:27: Loss at step 8590: 4.20879
2017-11-06 11:22:33: Loss at step 8600: 4.04064
2017-11-06 11:22:40: Loss at step 8610: 4.08969
2017-11-06 11:22:46: Loss at step 8620: 4.05002
2017-11-06 11:22:53: Loss at step 8630: 4.21123
2017-11-06 11:23:00: Loss at step 8640: 4.14405
2017-11-06 11:23:06: Loss at step 8650: 4.08141
2017-11-06 11:23:13: Loss at step 8660: 4.11447
2017-11-06 11:23:19: Loss at step 8670: 4.06205
2017-11-06 11:23:26: Loss at step 8680: 4.14992
2017-11-06 11:23:33: Loss at step 8690: 4.02808
2017-11-06 11:23:39: Loss at step 8700: 4.03855
2017-11-06 11:23:46: Loss at step 8710: 4.13124
2017-11-06 11:23:52: Loss at step 8720: 4.04068
2017-11-06 11:23:59: Loss at step 8730: 4.00127
2017-11-06 11:24:05: Loss at step 8740: 4.03866
2017-11-06 11:24:12: Loss at step 8750: 4.11273
2017-11-06 11:24:19: Loss at step 8760: 4.16832
2017-11-06 11:24:25: Loss at step 8770: 4.12706
2017-11-06 11:24:32: Loss at step 8780: 4.09101
2017-11-06 11:24:38: Loss at step 8790: 4.04295
2017-11-06 11:24:45: Loss at step 8800: 3.99253
2017-11-06 11:24:52: Loss at step 8810: 4.0902
2017-11-06 11:24:58: Loss at step 8820: 4.00172
2017-11-06 11:25:05: Loss at step 8830: 3.93763
2017-11-06 11:25:12: Loss at step 8840: 4.16542
2017-11-06 11:25:18: Loss at step 8850: 4.0881
2017-11-06 11:25:25: Loss at step 8860: 4.00146
2017-11-06 11:25:32: Loss at step 8870: 4.16857
2017-11-06 11:25:38: Loss at step 8880: 4.09092
2017-11-06 11:25:45: Loss at step 8890: 4.04317
2017-11-06 11:25:52: Loss at step 8900: 4.15075
2017-11-06 11:25:58: Loss at step 8910: 4.12898
2017-11-06 11:26:05: Loss at step 8920: 4.11537
2017-11-06 11:26:11: Loss at step 8930: 4.16137
2017-11-06 11:26:18: Loss at step 8940: 4.04223
2017-11-06 11:26:25: Loss at step 8950: 4.1558
2017-11-06 11:26:31: Loss at step 8960: 4.12549
2017-11-06 11:26:38: Loss at step 8970: 4.05228
2017-11-06 11:26:44: Loss at step 8980: 4.11178
2017-11-06 11:26:51: Loss at step 8990: 4.03878
2017-11-06 11:26:58: Loss at step 9000: 4.05094
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-9000
2017-11-06 11:27:04: Loss at step 9010: 4.06619
2017-11-06 11:27:11: Loss at step 9020: 4.00843
2017-11-06 11:27:17: Loss at step 9030: 4.12803
2017-11-06 11:27:24: Loss at step 9040: 4.04581
2017-11-06 11:27:31: Loss at step 9050: 4.14092
2017-11-06 11:27:37: Loss at step 9060: 4.04974
2017-11-06 11:27:44: Loss at step 9070: 4.01706
2017-11-06 11:27:50: Loss at step 9080: 4.05727
2017-11-06 11:27:57: Loss at step 9090: 4.11784
2017-11-06 11:28:04: Loss at step 9100: 4.04054
2017-11-06 11:28:10: Loss at step 9110: 4.04241
2017-11-06 11:28:17: Loss at step 9120: 4.05412
2017-11-06 11:28:24: Loss at step 9130: 4.16615
2017-11-06 11:28:30: Loss at step 9140: 4.0924
2017-11-06 11:28:37: Loss at step 9150: 4.10313
2017-11-06 11:28:44: Loss at step 9160: 4.05904
2017-11-06 11:28:50: Loss at step 9170: 3.99775
2017-11-06 11:28:57: Loss at step 9180: 4.14048
2017-11-06 11:29:04: Loss at step 9190: 4.08514
2017-11-06 11:29:11: Loss at step 9200: 3.97047
2017-11-06 11:29:17: Loss at step 9210: 4.07869
2017-11-06 11:29:24: Loss at step 9220: 3.97254
2017-11-06 11:29:30: Loss at step 9230: 4.10442
2017-11-06 11:29:37: Loss at step 9240: 4.12224
2017-11-06 11:29:44: Loss at step 9250: 4.13512
2017-11-06 11:29:51: Loss at step 9260: 4.06539
2017-11-06 11:29:57: Loss at step 9270: 4.08157
2017-11-06 11:30:04: Loss at step 9280: 4.08554
2017-11-06 11:30:11: Loss at step 9290: 4.06795
2017-11-06 11:30:17: Loss at step 9300: 4.10229
2017-11-06 11:30:24: Loss at step 9310: 4.02596
2017-11-06 11:30:31: Loss at step 9320: 4.02131
2017-11-06 11:30:37: Loss at step 9330: 4.08264
2017-11-06 11:30:44: Loss at step 9340: 4.17603
2017-11-06 11:30:51: Loss at step 9350: 4.13033
2017-11-06 11:30:57: Loss at step 9360: 4.12941
2017-11-06 11:31:04: Loss at step 9370: 4.08487
2017-11-06 11:31:11: Loss at step 9380: 4.06461
2017-11-06 11:31:17: Loss at step 9390: 4.02673
2017-11-06 11:31:24: Loss at step 9400: 4.06636
2017-11-06 11:31:31: Loss at step 9410: 4.00272
2017-11-06 11:31:37: Loss at step 9420: 4.10954
2017-11-06 11:31:44: Loss at step 9430: 4.06427
2017-11-06 11:31:51: Loss at step 9440: 4.14418
2017-11-06 11:31:57: Loss at step 9450: 4.0871
2017-11-06 11:32:04: Loss at step 9460: 4.11413
2017-11-06 11:32:10: Loss at step 9470: 4.06219
2017-11-06 11:32:17: Loss at step 9480: 4.08874
2017-11-06 11:32:24: Loss at step 9490: 4.08803
2017-11-06 11:32:30: Loss at step 9500: 4.08167
2017-11-06 11:32:37: Loss at step 9510: 4.06626
2017-11-06 11:32:43: Loss at step 9520: 4.16472
2017-11-06 11:32:50: Loss at step 9530: 4.09137
2017-11-06 11:32:57: Loss at step 9540: 4.13266
2017-11-06 11:33:03: Loss at step 9550: 4.08768
2017-11-06 11:33:10: Loss at step 9560: 4.10669
2017-11-06 11:33:16: Loss at step 9570: 4.00035
2017-11-06 11:33:23: Loss at step 9580: 4.00734
2017-11-06 11:33:30: Loss at step 9590: 4.12008
2017-11-06 11:33:36: Loss at step 9600: 4.02472
2017-11-06 11:33:43: Loss at step 9610: 4.09983
2017-11-06 11:33:49: Loss at step 9620: 4.02749
2017-11-06 11:33:56: Loss at step 9630: 4.07377
2017-11-06 11:34:03: Loss at step 9640: 4.07196
2017-11-06 11:34:09: Loss at step 9650: 4.04858
2017-11-06 11:34:16: Loss at step 9660: 4.08644
2017-11-06 11:34:23: Loss at step 9670: 4.09253
2017-11-06 11:34:29: Loss at step 9680: 4.00212
2017-11-06 11:34:36: Loss at step 9690: 4.10695
2017-11-06 11:34:42: Loss at step 9700: 4.16968
2017-11-06 11:34:49: Loss at step 9710: 4.03022
2017-11-06 11:34:56: Loss at step 9720: 4.09566
2017-11-06 11:35:02: Loss at step 9730: 4.13837
2017-11-06 11:35:09: Loss at step 9740: 4.20991
2017-11-06 11:35:15: Loss at step 9750: 4.08723
2017-11-06 11:35:22: Loss at step 9760: 4.10516
2017-11-06 11:35:29: Loss at step 9770: 4.03926
2017-11-06 11:35:35: Loss at step 9780: 4.1327
2017-11-06 11:35:42: Loss at step 9790: 4.02155
2017-11-06 11:35:48: Loss at step 9800: 4.09284
2017-11-06 11:35:55: Loss at step 9810: 3.99556
2017-11-06 11:36:02: Loss at step 9820: 4.05304
2017-11-06 11:36:08: Loss at step 9830: 4.14569
2017-11-06 11:36:15: Loss at step 9840: 4.12982
2017-11-06 11:36:21: Loss at step 9850: 4.15931
2017-11-06 11:36:28: Loss at step 9860: 4.12346
2017-11-06 11:36:35: Loss at step 9870: 3.95286
2017-11-06 11:36:41: Loss at step 9880: 4.00625
2017-11-06 11:36:48: Loss at step 9890: 4.1362
2017-11-06 11:36:55: Loss at step 9900: 4.15648
2017-11-06 11:37:01: Loss at step 9910: 3.95586
2017-11-06 11:37:08: Loss at step 9920: 4.05806
2017-11-06 11:37:14: Loss at step 9930: 4.00715
2017-11-06 11:37:21: Loss at step 9940: 4.00376
2017-11-06 11:37:28: Loss at step 9950: 4.06719
2017-11-06 11:37:34: Loss at step 9960: 4.19397
2017-11-06 11:37:41: Loss at step 9970: 4.0765
2017-11-06 11:37:47: Loss at step 9980: 4.11454
2017-11-06 11:37:54: Loss at step 9990: 4.10128
2017-11-06 11:38:01: Loss at step 10000: 4.11924
Model saved in file: ./saves.tf.Mines4/(12, 12)/model-10000

In [17]:
# Test trained model on larger batch size
batch_xs, batch_ys = next_training_batch(1000)
print(sess.run(cross_entropy, feed_dict={mineCounts: batch_xs, mineFreeAverages: batch_ys}))


4.07932

In [25]:
# Run a test
batchSize = 10000
batch_xs, batch_ys = next_training_batch(batchSize)

predictions = sess.run(tf.nn.softmax(y), feed_dict={mineCounts: batch_xs, mineFreeAverages: batch_ys})
bestSquares = [pred.argmax() for pred in predictions]
unfrees = (batch_ys == 0).astype(int)
frees = [unfrees[i][bestSquares[i]] for i in range(batchSize)]
print("Number of errors for batch size of ", batchSize)
print(sum(frees))


Number of errors for batch size of  10000
166

In [28]:
# Find boards that we failed on
batchSize = 1000
batch_xs, batch_ys = next_training_batch(batchSize)

predictions = sess.run(tf.nn.softmax(y), feed_dict={mineCounts: batch_xs, mineFreeAverages: batch_ys})
bestSquares = [pred.argmax() for pred in predictions]
unfrees = (batch_ys == 0).astype(int)
guesses = [unfrees[i][bestSquares[i]] for i in range(batchSize)]
for i in range(batchSize):
    if guesses[i] == 1:
        print(batch_xs[i].reshape(dimensions))
        summary = sess.run(tf.summary.image('mine_miss', tf.reshape((batch_xs[i]+1).astype(float),[-1,rows,cols,1]), 100))
        writer.add_summary(summary)


[[-1 -1  1  0 -1  1 -1  1  1 -1  1 -1]
 [-1  3  3 -1 -1  2 -1  3 -1  3  2  1]
 [ 1 -1 -1  2 -1  2 -1 -1 -1  2 -1  1]
 [-1 -1 -1  2  0  1  2 -1 -1  3 -1  2]
 [-1  2  1 -1  1  1  1 -1  1  1 -1  2]
 [-1  1  0  1 -1  2  1 -1  1 -1 -1 -1]
 [ 2  2 -1  1 -1 -1  1  0  1 -1  3 -1]
 [-1  2 -1  0 -1 -1  2  2  4  3 -1  1]
 [-1 -1  1  0  0 -1  2 -1 -1 -1  1  0]
 [-1 -1  1  0 -1  0  2 -1  5  3 -1 -1]
 [-1 -1 -1  0  0  1  3  3  3 -1 -1 -1]
 [ 2 -1  1  0  0  1 -1 -1 -1 -1  1  0]]
[[ 1 -1 -1 -1  1 -1 -1 -1 -1 -1 -1 -1]
 [-1 -1 -1 -1 -1 -1 -1 -1  2  2 -1 -1]
 [-1 -1 -1 -1  5 -1 -1 -1 -1 -1 -1 -1]
 [-1 -1 -1 -1  4 -1 -1 -1 -1 -1 -1  3]
 [-1 -1 -1 -1  2 -1 -1 -1 -1  2 -1  2]
 [-1 -1 -1 -1  1 -1 -1 -1 -1  2 -1 -1]
 [ 1 -1 -1 -1 -1  0 -1 -1 -1 -1 -1 -1]
 [-1 -1 -1 -1  2 -1 -1 -1 -1  2 -1  2]
 [-1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1]
 [-1  1 -1  1 -1 -1 -1 -1 -1  1 -1 -1]
 [-1 -1  0 -1 -1  0 -1 -1 -1 -1 -1  0]
 [ 0  0  0 -1 -1 -1 -1 -1 -1 -1 -1 -1]]
[[-1  3  1  1  0  0  1  2 -1 -1  0  0]
 [-1  4 -1  1  0  0  2 -1  4  2  0  0]
 [ 3 -1  4  3  2  1  2 -1 -1  3 -1  1]
 [ 3 -1 -1 -1 -1  1  1  2  3 -1 -1  1]
 [-1  2  3 -1 -1  1  0  0  1  3  4  3]
 [ 2  2  1 -1 -1  1  1  1  1  1 -1 -1]
 [-1  2 -1  2 -1 -1  2 -1 -1  2 -1  2]
 [ 1  2 -1  2 -1  1  3 -1 -1 -1  1  0]
 [ 1  2  2  1  1  1  3 -1  5  3  2  0]
 [ 2 -1  2  1  2 -1  3  3 -1 -1  1  0]
 [-1 -1  3 -1  2  3 -1  4  3  3  2  1]
 [-1 -1  2  1  1  2 -1  3 -1  1  1 -1]]
[[-1  0 -1 -1  2  3 -1 -1 -1  3 -1 -1]
 [-1  1 -1 -1 -1  5 -1 -1 -1 -1  4 -1]
 [-1  1 -1  2 -1 -1 -1 -1 -1 -1  3 -1]
 [ 1 -1 -1 -1  4 -1  3 -1  3  3  3 -1]
 [-1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1]
 [-1  2 -1 -1 -1  3  1 -1 -1 -1 -1  1]
 [-1  1 -1  2 -1  3  1 -1  2 -1 -1  1]
 [-1  1  1 -1  1 -1 -1  2  2 -1 -1  1]
 [ 2 -1 -1 -1  1  2  1  2 -1  4 -1  1]
 [-1 -1 -1 -1 -1 -1 -1  1 -1 -1  1 -1]
 [ 2 -1 -1 -1 -1 -1 -1  0 -1  1 -1 -1]
 [-1 -1 -1 -1 -1  2  0  0 -1  0 -1 -1]]
[[ 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1]
 [-1  0  0  1 -1  3 -1  2 -1 -1  2  1]
 [ 0  0 -1 -1 -1 -1  3 -1  3 -1 -1 -1]
 [ 0  0 -1 -1 -1 -1 -1 -1  2  2 -1  2]
 [ 1  1 -1 -1 -1 -1 -1 -1  1  1 -1 -1]
 [-1 -1 -1  0  0  1 -1  1  0 -1 -1 -1]
 [-1 -1  2  1 -1  1  1  1 -1 -1 -1 -1]
 [ 1  1  2 -1 -1 -1 -1 -1 -1 -1  4 -1]
 [ 0 -1 -1 -1 -1 -1 -1 -1  2  2 -1 -1]
 [-1  1 -1  3  3 -1 -1 -1 -1 -1  1  1]
 [ 0  1 -1  2 -1 -1  2 -1 -1  1 -1 -1]
 [-1 -1 -1  1 -1 -1  1  1 -1  1  0 -1]]
[[-1 -1 -1  0 -1  1 -1  1 -1  0  0 -1]
 [-1 -1  0  0  0 -1 -1  1  1  1 -1  0]
 [-1  0  0  0  1 -1  3  2  1 -1  1 -1]
 [ 1 -1 -1 -1  2 -1 -1  2  1 -1  1  0]
 [ 2 -1  3 -1 -1 -1 -1  3  1  1  0  0]
 [-1  3 -1 -1  2  1 -1 -1 -1  1 -1  0]
 [-1  3  2 -1  1 -1  1  3  3  4  3  2]
 [-1 -1  3 -1  0  1 -1  4 -1 -1 -1 -1]
 [ 2 -1 -1  1  0 -1 -1 -1 -1 -1 -1  2]
 [ 1 -1  2  1  0 -1 -1 -1 -1 -1 -1 -1]
 [ 0  0 -1  1  1  1  1 -1  4  3  1 -1]
 [ 0  0 -1  1 -1 -1  1 -1 -1  1  0  0]]
[[ 1  1  0  1 -1 -1 -1  1 -1  0 -1 -1]
 [-1 -1  0 -1 -1 -1 -1  1 -1 -1  0 -1]
 [ 1  2 -1 -1 -1  3  2 -1  1  1  1 -1]
 [-1  3 -1  2  0  0  0 -1  1 -1 -1 -1]
 [-1 -1 -1 -1 -1  1  2  1  2 -1  1  0]
 [-1 -1  2 -1 -1 -1 -1 -1  1 -1 -1  0]
 [-1 -1 -1  0  2 -1 -1  1  1 -1  1  1]
 [-1 -1 -1 -1  2 -1  1  0 -1 -1  1 -1]
 [-1 -1  1  1 -1  2 -1  1  1 -1  1  1]
 [ 0  0 -1  2  2 -1  0  1 -1  1  0  0]
 [ 1  1  1 -1  1  0  1  2 -1  2 -1 -1]
 [-1  1  1 -1  1  0 -1 -1  1  1 -1  1]]
[[ 0 -1 -1 -1  1 -1 -1 -1 -1 -1 -1 -1]
 [-1 -1 -1 -1 -1  1  3 -1 -1 -1  1 -1]
 [-1 -1 -1  3  2  2 -1  3  2  0 -1 -1]
 [-1 -1 -1  2 -1  2 -1 -1 -1 -1 -1  1]
 [ 1  1 -1  1 -1  1 -1  2  2  3 -1 -1]
 [-1 -1  1 -1  0 -1  0 -1 -1 -1 -1 -1]
 [ 1  2 -1 -1 -1  1 -1  1 -1 -1 -1  2]
 [ 0  1 -1  1 -1 -1 -1 -1  1 -1 -1  2]
 [-1  2  3  2  2  1  1 -1 -1 -1 -1 -1]
 [ 2 -1 -1 -1  2 -1 -1 -1 -1  1  1 -1]
 [ 2 -1 -1  3 -1 -1 -1 -1 -1 -1  0 -1]
 [-1  1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1]]
[[-1 -1 -1 -1  1 -1 -1 -1 -1 -1  2 -1]
 [-1  0  1 -1 -1 -1 -1 -1 -1 -1  5  2]
 [ 0 -1  0  0  0  0  0 -1 -1 -1 -1 -1]
 [ 1 -1 -1 -1 -1  1 -1  1  2  3 -1 -1]
 [-1 -1 -1  0 -1  1 -1 -1  2 -1 -1 -1]
 [-1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1]
 [-1 -1 -1 -1 -1 -1  1 -1  2 -1  3 -1]
 [ 0 -1 -1 -1 -1 -1  1 -1 -1  1  1  0]
 [-1 -1  1 -1 -1 -1  2 -1 -1  1 -1 -1]
 [-1 -1  1  2 -1 -1 -1 -1 -1 -1  0 -1]
 [-1 -1 -1 -1 -1 -1 -1 -1 -1  1 -1 -1]
 [-1 -1  0 -1  0  2 -1 -1 -1  0 -1 -1]]
[[-1 -1  0  0 -1 -1 -1 -1 -1  1 -1  0]
 [ 0 -1 -1 -1  3 -1 -1  1  2 -1 -1 -1]
 [-1 -1 -1 -1 -1  2  1 -1  2  2  3 -1]
 [-1  1 -1 -1 -1  1 -1  0 -1 -1  2 -1]
 [-1 -1 -1  1 -1 -1 -1 -1  1  1 -1 -1]
 [ 2 -1 -1 -1  1 -1  0  0  1 -1 -1 -1]
 [ 1 -1  3 -1 -1 -1 -1  1 -1 -1 -1 -1]
 [-1 -1 -1 -1 -1 -1 -1 -1  4 -1 -1  2]
 [-1 -1 -1 -1  3 -1  2 -1 -1  5 -1  2]
 [-1 -1 -1  3 -1  0 -1  3 -1  4 -1 -1]
 [-1  4 -1 -1  0  0 -1 -1  1  3 -1  2]
 [ 2 -1  2 -1  0 -1 -1  0  0 -1 -1  2]]
[[ 1 -1 -1  0 -1 -1 -1 -1  0 -1 -1 -1]
 [-1  2  0 -1 -1  1 -1 -1  1 -1  1 -1]
 [-1  2  0 -1 -1 -1 -1 -1 -1  2 -1  2]
 [-1  2  1 -1 -1 -1 -1 -1 -1 -1 -1  1]
 [ 1  2 -1  2 -1 -1  1  1 -1 -1 -1  1]
 [-1  2  1 -1  1 -1 -1 -1  4 -1  1 -1]
 [-1  2  1 -1 -1 -1 -1 -1 -1 -1  1  0]
 [-1 -1  1  0  1 -1 -1 -1 -1  2 -1 -1]
 [ 2 -1 -1 -1  1 -1 -1 -1 -1  1  2 -1]
 [-1 -1 -1 -1  3  3  2 -1 -1 -1 -1 -1]
 [-1 -1 -1 -1 -1 -1  3 -1  1 -1 -1 -1]
 [-1 -1 -1 -1 -1 -1  3 -1 -1 -1 -1 -1]]
[[ 0 -1 -1 -1 -1 -1 -1  1 -1 -1 -1 -1]
 [ 1 -1 -1 -1 -1 -1 -1 -1  2 -1 -1  3]
 [-1 -1  1 -1 -1 -1 -1 -1 -1 -1 -1 -1]
 [-1  2 -1 -1 -1  1  0 -1 -1 -1 -1  2]
 [-1 -1  1  2 -1 -1 -1 -1 -1 -1 -1 -1]
 [ 1 -1 -1 -1 -1 -1 -1  0  1  1 -1 -1]
 [-1 -1 -1  2  2  3 -1 -1 -1 -1 -1  0]
 [ 0 -1 -1  1 -1 -1 -1 -1 -1  2 -1  0]
 [-1  0 -1 -1 -1 -1 -1 -1 -1 -1 -1  0]
 [ 1 -1  2  2 -1  2 -1 -1 -1  3 -1 -1]
 [-1 -1 -1 -1 -1 -1 -1 -1 -1 -1  0 -1]
 [-1  1 -1 -1  0 -1  1  2 -1 -1 -1 -1]]
[[ 0 -1 -1 -1  1  0  0  2 -1  2 -1  1]
 [ 0  1  2 -1  1 -1  1  3 -1  2 -1 -1]
 [ 0  0  0  0  0  1  2 -1  2  2  3  3]
 [ 1  1  1  0  0  2 -1  3 -1  1 -1 -1]
 [ 2 -1  2 -1  1  3 -1  2  1  2  3  2]
 [-1  3 -1  2 -1  2 -1  2  2 -1 -1  1]
 [ 1  3  3  5  3  2  2 -1  3  2 -1 -1]
 [ 1  2 -1 -1 -1  1  2 -1  3  2 -1 -1]
 [-1 -1  3  4  3  1  1  1  2 -1 -1 -1]
 [ 1  1  1 -1  1  0  0  0  1 -1  4 -1]
 [ 0  0  1  1  1  0  0  0  0  1 -1  2]
 [ 0  0  0 -1 -1  0 -1  0  0  1  1  1]]
[[-1 -1 -1 -1  1 -1  1 -1 -1 -1 -1 -1]
 [-1 -1 -1 -1 -1 -1 -1 -1 -1  4 -1  1]
 [-1  1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1]
 [ 0 -1 -1  3 -1 -1 -1  1  2 -1 -1 -1]
 [-1  1  2 -1 -1 -1  2 -1 -1 -1 -1 -1]
 [-1 -1 -1 -1  2  2 -1 -1 -1 -1 -1 -1]
 [-1  0 -1 -1  1  1 -1 -1  3 -1 -1 -1]
 [-1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1]
 [-1  1 -1 -1 -1 -1 -1  2 -1 -1  1 -1]
 [-1 -1  2 -1 -1 -1 -1 -1 -1 -1  1 -1]
 [-1 -1  1 -1  1  2 -1 -1 -1 -1 -1 -1]
 [-1  0  1 -1 -1 -1 -1 -1 -1 -1 -1  0]]
[[-1 -1  0 -1 -1  2  1 -1  2 -1 -1 -1]
 [ 2  2  1  3 -1  3  2 -1 -1 -1 -1 -1]
 [ 2 -1 -1  3 -1 -1  2 -1  4 -1  3  1]
 [-1  4  2  3 -1  2 -1 -1 -1 -1  3 -1]
 [-1  4 -1 -1 -1 -1 -1  2 -1  1  2  1]
 [ 1 -1 -1  2 -1 -1 -1  1 -1 -1  1 -1]
 [-1  1  1  1  2 -1  4 -1 -1  1 -1 -1]
 [ 1  2 -1  1 -1 -1 -1 -1  1 -1  2 -1]
 [ 1 -1 -1 -1  1 -1  2 -1  1 -1 -1  1]
 [ 2  4 -1 -1 -1  0  0 -1 -1  2  2  1]
 [-1 -1 -1 -1  1  1 -1  0  1 -1 -1 -1]
 [ 1  2 -1 -1  1 -1  1  0  1 -1  2 -1]]
[[-1 -1 -1 -1  1 -1 -1  1 -1  1  1 -1]
 [-1  1 -1  1  1 -1 -1  1 -1  1  1 -1]
 [ 1  1 -1  0 -1 -1 -1 -1 -1  0  1  1]
 [-1 -1 -1 -1  0  1 -1 -1  2 -1  1  1]
 [-1 -1  1 -1 -1  1 -1 -1  2  1 -1  1]
 [ 1  2  2  1 -1 -1  1  1  1  1  1  1]
 [-1 -1 -1  1 -1  1 -1 -1  1 -1  0  0]
 [-1 -1  2  1 -1 -1 -1  1 -1 -1  1 -1]
 [ 1 -1  1  0 -1 -1 -1 -1  2  3 -1 -1]
 [ 3  3  2 -1 -1 -1  0 -1 -1  2 -1  2]
 [-1 -1 -1  0 -1  0 -1 -1 -1  3 -1 -1]
 [-1 -1 -1  0 -1  0  0 -1 -1 -1 -1 -1]]
[[-1  0 -1 -1 -1  2 -1 -1 -1 -1 -1 -1]
 [-1  1 -1  2 -1 -1 -1  4 -1 -1 -1  0]
 [-1 -1 -1 -1 -1 -1  3 -1 -1 -1 -1  0]
 [-1  2 -1 -1 -1 -1  2  2 -1 -1  0 -1]
 [ 0 -1 -1  3 -1  3  1 -1  1  2 -1 -1]
 [-1  1  0  1 -1  2  1 -1  1 -1 -1  2]
 [-1 -1 -1  1 -1 -1  2  1 -1 -1  4 -1]
 [-1 -1 -1  0 -1  2 -1  2  1  0  2 -1]
 [-1 -1  1  1  1  2 -1 -1  2  1 -1 -1]
 [-1 -1 -1 -1 -1  3  2 -1 -1  1 -1  1]
 [-1 -1 -1 -1 -1 -1 -1  2 -1  1 -1 -1]
 [-1 -1  2 -1  2 -1  2 -1  0  0 -1 -1]]
[[-1  2  1  1  1  1  1 -1  1  0  0  0]
 [ 2 -1  1  1 -1  1  1  1  1  0  1  1]
 [ 1  1  1  1  1  2  1  1  1  1  2 -1]
 [ 0  0  1  1  1  2 -1  3  3 -1  4  2]
 [ 0  1  2 -1  1  2 -1 -1  3 -1 -1  1]
 [ 1  2 -1  2  1  1  2  2  2  2  3  2]
 [-1  2  1  1  0  0  0  0  0  1 -1 -1]
 [-1  2  1  1  0  1  2  3  2  3 -1  4]
 [ 1  2 -1  1  0  1 -1 -1 -1  3 -1 -1]
 [-1 -1  2  2  0  1  3 -1  4  3  4  3]
 [ 1  2 -1  2  1  2  2  2  2 -1  2 -1]
 [ 0  1  1  2 -1  2 -1 -1  1  1  2  1]]
[[-1 -1  1 -1  1 -1 -1 -1 -1  0 -1 -1]
 [-1 -1  2  1 -1 -1 -1 -1  1  1 -1 -1]
 [-1 -1 -1 -1 -1  1 -1  1 -1 -1 -1 -1]
 [-1 -1 -1  2 -1 -1  2 -1 -1 -1 -1 -1]
 [-1 -1 -1  2 -1  4 -1  2  2 -1  2 -1]
 [-1 -1 -1 -1 -1 -1 -1  1 -1 -1 -1 -1]
 [ 0 -1 -1 -1 -1 -1  1 -1 -1 -1 -1  2]
 [-1 -1 -1 -1  3  2  2  3 -1 -1 -1 -1]
 [-1  1 -1 -1 -1  2 -1 -1 -1 -1 -1 -1]
 [ 2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1]
 [-1 -1  3 -1 -1 -1 -1 -1 -1 -1  1  1]
 [ 1  1 -1 -1 -1 -1 -1 -1  1  0 -1 -1]]

In [ ]:


In [ ]: