In [ ]:
# Import libraries for simulation
import tensorflow as tf
import numpy as np
In [ ]:
dimensions = (12,12)
mineProbability = 0.2
In [ ]:
# count the number of mines in the proximity of given square, including square itself
def countMines(board,r,c):
count = 0
rows, cols = board.shape
for i in [r-1,r,r+1]:
if i >= 0 and i < rows:
for j in [c-1,c,c+1]:
if j >= 0 and j < cols:
count += int(board[i,j])
return count
In [ ]:
# Converts a board of mines into a board of mine counts
def boardMineCounts(board):
mineInfo = np.zeros(board.shape, dtype = int)
rows, cols = board.shape
for i in range(rows):
for j in range(cols):
mineInfo[i,j] = countMines(board,i,j)
return mineInfo
In [ ]:
'''
def boardPartialMineCounts(board):
result = boardMineCounts(board)
for index, x in np.ndenumerate(board):
if x: result[index] = -1
elif r.uniform(0, 1) < missingProbability: result[index] = -1
return result
'''
In [ ]:
# Generates a random training batch of size n
def next_training_batch(n):
batch_xs = []
batch_ys = []
for _ in range(n):
board = np.random.random(dimensions) < mineProbability
counts = boardMineCounts(board)
batch_xs.append(counts.flatten().astype(float))
batch_ys.append(board.flatten().astype(float))
return (np.asarray(batch_xs), np.asarray(batch_ys))
In [ ]:
# Create the model
rows, cols = dimensions
size = rows*cols
x = tf.placeholder(tf.float32, [None, size])
W = tf.Variable(tf.random_normal([size, size], stddev=0.01))
b = tf.Variable(tf.random_normal([size], stddev=0.01))
y = tf.sigmoid(tf.matmul(x, W) + b)
In [ ]:
# Placeholder for the 'labels', ie the correct answer
y_ = tf.placeholder(tf.float32, [None, size])
In [ ]:
# Loss function
mean_squared_error = tf.losses.mean_squared_error(labels=y_, predictions=y)
In [ ]:
# Summaries for tensorboard
with tf.name_scope('W_reshape'):
image_shaped_W = tf.reshape(W, [-1, size, size, 1])
tf.summary.image('W', image_shaped_W, 1000)
with tf.name_scope('b_reshape'):
image_shaped_b = tf.reshape(-b, [-1, rows, cols, 1])
tf.summary.image('b', image_shaped_b, 1000)
_ = tf.summary.scalar('accuracy', mean_squared_error)
In [ ]:
# Optimiser
train_step = tf.train.AdamOptimizer().minimize(mean_squared_error)
In [ ]:
# Create session and initialise or restore stuff
saver = tf.train.Saver({"W": W, "b": b})
sess = tf.InteractiveSession()
merged = tf.summary.merge_all()
writer = tf.summary.FileWriter('.', sess.graph)
tf.global_variables_initializer().run()
In [ ]:
# Restore model?
#saver.restore(sess, "./saves1/model-100000")
In [ ]:
# Train
for iteration in range(100001):
batch_xs, batch_ys = next_training_batch(100)
summary, _ = sess.run([merged, train_step], feed_dict={x: batch_xs, y_: batch_ys})
writer.add_summary(summary, iteration)
if iteration % 100 == 0:
acc = sess.run(mean_squared_error, feed_dict={x: batch_xs, y_: batch_ys})
print('Accuracy at step %s: %s' % (iteration, acc))
if iteration % 1000 == 0:
save_path = saver.save(sess, './saves1/model', global_step=iteration)
print("Model saved in file: %s" % save_path)
In [ ]:
# Test trained model
batch_xs, batch_ys = next_training_batch(1000)
print(sess.run(mean_squared_error, feed_dict={x: batch_xs, y_: batch_ys}))
In [ ]:
# Run a single randomised test
mineCounts, mines = next_training_batch(1)
print("mines")
print(mines.astype(int).reshape(dimensions))
print("predicted mines")
result = sess.run(y, feed_dict={x: mineCounts})
predictions = (result > 0.5).astype(int)
print(predictions.reshape(dimensions))
print("errors")
print((predictions != mines.astype(int)).astype(int).sum())
print("----")
print("mine counts")
print(mineCounts.astype(int).reshape(dimensions))
print("predicted mine counts")
print(boardMineCounts(predictions.reshape(dimensions)))
print("errors")
print((mineCounts.astype(int).reshape(dimensions) != boardMineCounts(predictions.reshape(dimensions))).astype(int).sum())
In [ ]:
print(sess.run(-b))
In [ ]: