In [1]:
import tensorflow as tf
import numpy as np
import math
import matplotlib.pyplot as plt
from preprocessing import directory_to_data_files, sample_data
import time

data_filenames = ['./Data/data_19x19_filter.csv']
board_size = 19
D = board_size*board_size
n_batches = 100000 # Number of batches
batch_size = 10000 # Number of training examples to randomly sample from data file (note: repeated sampling could give repeat examples)
empty_board_string = "0 " * (D - 1) + "0"

# Define the placeholders that will persist across all versions of the network
# For the CNN we want the board in matrix form as input

# X = tf.placeholder(tf.float32, [None, board_size, board_size, 1])
# The below placeholder is used when we have the [19x19x2] input type
X = tf.placeholder(tf.float32, [None, board_size, board_size, 2])
Y_ = tf.placeholder(tf.float32, [None, 2])

# Add a variable learning rate to the model in case we want some scheduling
learning_rate = tf.placeholder(tf.float32)
# Dropout probability, modifiable just in case, p that we keep a node
dropout = tf.placeholder(tf.float32)


/usr/local/lib/python2.7/site-packages/matplotlib/font_manager.py:273: UserWarning: Matplotlib is building the font cache using fc-list. This may take a moment.
  warnings.warn('Matplotlib is building the font cache using fc-list. This may take a moment.')

In [2]:
def read_my_csv(filename_queue):
    # Set up the reader
    reader = tf.TextLineReader()
    # Grab the values from the file(s)
    key, value = reader.read(filename_queue)
    # Perform the decoding
    default_values = [["0"],[empty_board_string],[empty_board_string]]
    col1, col2, col3 = tf.decode_csv(value, record_defaults=default_values)
    # Perform preporcessing here
    split_col2 = tf.string_split(tf.expand_dims(col2, axis=0), delimiter=" ")
    features = tf.reshape(tf.string_to_number(split_col2.values, out_type=tf.float32),[D])
    split_col1 = tf.string_split(tf.expand_dims(col1, axis=0), delimiter=" ")
    labels = tf.reshape(tf.string_to_number(split_col1.values, out_type=tf.float32),[1])
    return features, labels

In [3]:
def input_pipeline(filenames, batch_size):
    filename_queue = tf.train.string_input_producer(filenames, shuffle=True)
    example, label = read_my_csv(filename_queue)
    min_after_dequeue = 100
    capacity = min_after_dequeue + 3 * batch_size
    # Create the batches using shuffle_batch which performs random shuffling
    example_batch, label_batch = tf.train.shuffle_batch([example, label], 
                                                        batch_size=batch_size, 
                                                        capacity=capacity, 
                                                        min_after_dequeue=min_after_dequeue)
    return example_batch, label_batch

In [4]:
example_batch, label_batch = input_pipeline(data_filenames, batch_size)
test_batch, test_label_batch = input_pipeline(data_filenames, 10000)

In [5]:
# Output depths of our convolutional layers
# This means we have a [19 x 19 x Depth] box as output (with stride of 1)
L1 = 64  
L2 = 8  
L3 = 16

# Set our number of neurons in the fully connected layer
neurons = 500

######################
# Weights and biases #
######################

# Convolutional layers
# For the weight matrix [x, x, y, z], we have filters of size x by x with depth of y
# The output of the layer will have a new depth of z, as we have z of these filters
# W1 = tf.Variable(tf.truncated_normal([7, 7, 1, L1], stddev=0.01))
# The below weights are used when we have the [19x19x2] input type
W1 = tf.Variable(tf.truncated_normal([7, 7, 2, L1], stddev=0.01))
B1 = tf.Variable(tf.constant(0.1, tf.float32, [L1]))
# W2 = tf.Variable(tf.truncated_normal([3, 3, L1, L2], stddev=0.01))
# B2 = tf.Variable(tf.constant(0.1, tf.float32, [L2]))
# W3 = tf.Variable(tf.truncated_normal([3, 3, L2, L3], stddev=0.01))
# B3 = tf.Variable(tf.constant(0.1, tf.float32, [L3]))

# Fully Connected Layer
# We unwrap the input from the previous layer to build a 1-D vector
# Be careful here! In the format [x * x * L3, neurons], x is the size of the board after the third layer
# This means that x will change as our stride changes in the next section!
L3_size = 6
W4 = tf.Variable(tf.truncated_normal([L3_size * L3_size * L1, neurons], stddev=0.01))
B4 = tf.Variable(tf.constant(0.1, tf.float32, [neurons]))

# Output layer
# This is our layer of size D that tells us which move to use
W5 = tf.Variable(tf.truncated_normal([neurons, 2], stddev=0.01))
B5 = tf.Variable(tf.constant(0.1, tf.float32, [2,]))

In [6]:
# Now we construct the graph of the network
# It seems that stride of 1 keeps same size, stride of 2 halves the dimensions (round up)
stride = 1  # Output is 19x19
Y1 = tf.nn.relu(tf.nn.conv2d(X, W1, strides=[1, stride, stride, 1], padding='VALID') + B1)
YY1 = tf.nn.max_pool(Y1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='VALID')
# stride = 2  # Output becomes 10x10 instead of 19x19 with stride of 2
# Y2 = tf.nn.relu(tf.nn.conv2d(Y1, W2, strides=[1, stride, stride, 1], padding='VALID') + B2)
# # Output is 5x5 with stride of 2
# Y3 = tf.nn.relu(tf.nn.conv2d(Y2, W3, strides=[1, stride, stride, 1], padding='VALID') + B3)

# Now we want to reshape the output so that it is a 1-D vector
# The middle two ints are the size of the output, while L3 is the depth of the final layer
# These two integers are the size of the final layer Y3
YY3 = tf.reshape(YY1, shape=[-1, L3_size * L3_size * L1]) # unrolling

Y4 = tf.nn.relu(tf.matmul(YY3, W4) + B4)
YY4 = tf.nn.dropout(Y4, dropout)
Ylogits = tf.matmul(YY4, W5) + B5
Y = tf.nn.softmax(Ylogits)

# Do this on the logits so that we avoid taking log(0) and getting NaNs
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=Ylogits, labels=Y_)
# Multiply this by our batch size to normalize
cross_entropy = tf.reduce_mean(cross_entropy)*batch_size

correct = tf.equal(tf.argmax(Y, 1), tf.argmax(Y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))

# AdamOptimizer is what I keep seeing in example code and on stackoverflow, 
# its the one we read about in our reading on SGA
train_step = tf.train.RMSPropOptimizer(learning_rate).minimize(cross_entropy)
# train_step = tf.train.AdamOptimizer(learning_rate).minimize(cross_entropy)
# train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(cross_entropy)

In [7]:
train_ces = []
train_accs = []
test_accs = []
test_ces = []

with tf.Session() as sess:
    start_time = time.time()
    sess.run(tf.global_variables_initializer())
    
    coordinator = tf.train.Coordinator()
    threads = tf.train.start_queue_runners(coord=coordinator)
    test_x_batch, test_y_batch = sess.run([test_batch, test_label_batch])
    # Edit the y-values
    test_y_batch = [np.concatenate((np.where(yy > 0, yy, 0), np.where(yy < 0, abs(yy), 0))) for yy in test_y_batch]
    # Reshape from 1-D vectors to matrices
    test_x_batch = [np.reshape(x, [board_size, board_size, 1]) for x in test_x_batch]
    # The below stacks into 1s for self, 1s for other giving us [19x19x2] input
    test_x_batch = [np.dstack((np.where(xx > 0, xx, 0), np.where(xx < 0, xx, 0)*-1)) for xx in test_x_batch]
    test_data = {X: test_x_batch, Y_: test_y_batch, learning_rate:0.005, dropout:1.0}
    i = -1
    while True:
        i += 1
#     for i in xrange(n_batches):
        train_x_batch, train_y_batch = sess.run([example_batch, label_batch])
        # Edit the y-values
        train_y_batch = [np.concatenate((np.where(yy > 0, yy, 0), np.where(yy < 0, abs(yy), 0))) for yy in train_y_batch]
        # Reshape from 1-D vectors to matrices
        train_x_batch = [np.reshape(x, [board_size, board_size, 1]) for x in train_x_batch]
        # The below stacks into 1s for self, 1s for other giving us [19x19x2] input
        train_x_batch = [np.dstack((np.where(xx > 0, xx, 0), np.where(xx < 0, xx, 0)*-1)) for xx in train_x_batch]
        train_data = {X: train_x_batch, Y_: train_y_batch, learning_rate:0.005, dropout:0.75}
        sess.run(train_step, feed_dict=train_data)
        if i==0:                
            print 'Test      Test           Train     Train'
            print 'Accuracy  Cross_entropy  Accuracy  Cross_entropy  Batch'
            print '--------  -------------  --------  -------------  -----'
        if i%10==0 or i==n_batches-1:            
            test_accuracy, test_cross_entropy = sess.run([accuracy, cross_entropy], feed_dict = test_data)
            train_accuracy, train_cross_entropy = sess.run([accuracy, cross_entropy], feed_dict = train_data)
            print '%.4f        %.0f          %.3f       %.0f          %d' % \
            (test_accuracy, test_cross_entropy, train_accuracy, train_cross_entropy, i)
            test_accs.append(test_accuracy)
            test_ces.append(test_cross_entropy)
            train_accs.append(train_accuracy)
            train_ces.append(train_cross_entropy)

    coordinator.request_stop()
    coordinator.join(threads)
    sess.close()


Test      Test           Train     Train
Accuracy  Cross_entropy  Accuracy  Cross_entropy  Batch
--------  -------------  --------  -------------  -----
0.5048        21417          0.503       21495          0
0.4955        6933          0.503       6932          10
0.5063        6934          0.507       6929          20
0.5048        6931          0.506       6932          30
0.5039        6931          0.514       6929          40
0.5048        6931          0.507       6931          50
0.5048        6931          0.499       6932          60
0.5048        6931          0.503       6931          70
0.5048        6931          0.504       6931          80
0.5048        6931          0.498       6932          90
0.5048        6931          0.507       6930          100
0.5048        6931          0.503       6931          110
0.5048        6935          0.521       6924          120
0.5048        6932          0.512       6929          130
0.5048        6931          0.507       6930          140
0.5045        6931          0.501       6931          150
0.5043        6930          0.499       6932          160
0.5049        6931          0.497       6931          170
0.4952        6933          0.510       6929          180
0.4952        6937          0.493       6940          190
0.5156        6924          0.527       6917          200
0.5198        6921          0.537       6908          210
0.5338        6914          0.553       6888          220
0.5279        6919          0.536       6911          230
0.5216        6924          0.526       6900          240
0.5385        6891          0.568       6814          250
0.5298        6920          0.546       6865          260
0.5394        6885          0.553       6855          270
0.5300        6899          0.542       6879          280
0.5530        6836          0.593       6656          290
0.5443        6870          0.549       6843          300
0.5648        6812          0.596       6671          310
0.5615        6813          0.593       6704          320
0.5527        6824          0.575       6761          330
0.5746        6746          0.618       6533          340
0.5616        6818          0.583       6770          350
0.5732        6733          0.617       6496          360
0.5309        6922          0.549       6806          370
0.5852        6643          0.645       6373          380
0.5856        6728          0.611       6621          390
0.6003        6569          0.652       6299          400
0.5990        6657          0.627       6547          410
0.5314        6908          0.546       6888          420
0.5822        6707          0.612       6548          430
0.5524        6840          0.554       6832          440
0.6109        6533          0.660       6232          450
0.5973        6601          0.642       6194          460
0.5510        6815          0.567       6765          470
0.6112        6568          0.630       6453          480
0.6298        6503          0.650       6400          490
0.5724        6730          0.587       6630          500
0.5907        6548          0.612       6357          510
0.5464        6880          0.553       6857          520
0.5623        6678          0.582       6519          530
0.6328        6221          0.681       5753          540
0.5949        6646          0.623       6538          550
0.6000        6528          0.618       6452          560
0.5463        6741          0.552       6641          570
0.6788        5968          0.717       5668          580
0.5965        6714          0.620       6223          590
0.6063        6427          0.627       6230          600
0.5189        7331          0.531       7214          610
0.6987        5658          0.757       5185          620
0.5705        6903          0.577       6683          630
0.5949        6738          0.589       6693          640
0.6949        5760          0.716       5516          650
0.5882        6502          0.589       6457          660
0.7198        5386          0.764       5038          670
0.7224        5221          0.792       4640          680
0.6465        6015          0.672       5642          690
0.6843        6269          0.687       6191          700
0.7295        5194          0.773       4791          710
0.6848        6022          0.689       5948          720
0.7440        5008          0.786       4550          730
0.6565        6096          0.684       5834          740
0.7464        4980          0.794       4556          750
0.7051        5268          0.742       4907          760
0.5463        6829          0.546       6819          770
0.5282        6705          0.528       6725          780
0.5572        6784          0.574       6705          790
0.7275        5343          0.735       5291          800
0.6783        6288          0.687       6202          810
0.7170        5620          0.717       5502          820
0.7649        4894          0.770       4770          830
0.6485        6348          0.649       6234          840
0.7333        5287          0.739       5185          850
0.7855        4372          0.817       3967          860
0.5960        6622          0.602       6531          870
0.7828        4506          0.789       4348          880
0.7926        4153          0.832       3691          890
0.7865        4373          0.813       4058          900
0.7884        4326          0.813       4019          910
0.7931        4125          0.826       3715          920
0.8062        3911          0.822       3706          930
0.7029        5238          0.694       5347          940
0.7760        4224          0.793       3969          950
0.8143        3887          0.808       3899          960
0.6905        5722          0.683       5779          970
0.6390        6475          0.615       6478          980
0.7744        5207          0.770       5209          990
0.8268        3551          0.846       3267          1000
0.7147        5020          0.699       5284          1010
0.8322        3667          0.830       3645          1020
0.7086        4935          0.700       5181          1030
0.8139        3696          0.822       3573          1040
0.8304        3494          0.843       3328          1050
0.7643        4312          0.790       3981          1060
0.8318        3531          0.822       3634          1070
0.7097        4992          0.708       5114          1080
0.7611        4979          0.766       4874          1090
0.8365        3349          0.852       3135          1100
0.6645        5501          0.658       5484          1110
0.8237        3520          0.837       3461          1120
0.6636        5645          0.666       5730          1130
0.8351        3350          0.848       3170          1140
0.8315        3670          0.820       3693          1150
0.7239        4811          0.728       4825          1160
0.8457        3170          0.863       2936          1170
0.8309        3844          0.835       3832          1180
0.7674        4299          0.748       4536          1190
0.8400        3135          0.839       3188          1200
0.8082        3632          0.800       3743          1210
0.8452        3166          0.845       3268          1220
0.8064        3627          0.806       3659          1230
0.8336        3278          0.831       3368          1240
0.8102        3560          0.804       3657          1250
0.8561        2989          0.853       2985          1260
0.8112        3551          0.815       3506          1270
0.8154        3506          0.810       3599          1280
0.8147        3533          0.812       3540          1290
0.8130        3575          0.808       3676          1300
0.8261        3279          0.832       3263          1310
0.8294        3280          0.838       3330          1320
0.8289        3255          0.829       3319          1330
0.8399        3160          0.840       3247          1340
0.8394        3229          0.832       3362          1350
0.8350        3083          0.843       3108          1360
0.8352        3143          0.836       3200          1370
0.8485        2951          0.846       3031          1380
0.8300        3228          0.827       3321          1390
0.8281        3281          0.830       3278          1400
0.8398        3030          0.840       3123          1410
0.8366        3131          0.836       3214          1420
0.8367        3177          0.826       3253          1430
0.8252        3273          0.821       3376          1440
0.8262        3283          0.829       3312          1450
0.8524        2879          0.847       3005          1460
0.8556        2761          0.854       2842          1470
0.8434        3024          0.837       3166          1480
0.8344        3086          0.833       3181          1490
0.8539        2752          0.863       2727          1500
0.8495        2885          0.846       2975          1510
0.8600        2669          0.853       2841          1520
0.8430        3026          0.835       3116          1530
0.8474        2928          0.846       2999          1540
0.8542        2856          0.851       2893          1550
0.8157        3408          0.804       3628          1560
0.8484        2886          0.842       3005          1570
0.8459        2901          0.842       2977          1580
0.8565        2740          0.855       2843          1590
0.8507        2849          0.845       3021          1600
0.8674        2584          0.871       2549          1610
0.8485        2841          0.849       2894          1620
0.8431        2941          0.832       3161          1630
0.8506        2857          0.851       2934          1640
0.8656        2557          0.862       2703          1650
0.8500        2874          0.850       2928          1660
0.8631        2622          0.860       2707          1670
0.8374        3063          0.833       3170          1680
0.8604        2733          0.853       2830          1690
0.8677        2548          0.866       2614          1700
0.8484        2943          0.842       3071          1710
0.8519        2797          0.850       2935          1720
0.8381        2991          0.836       3081          1730
0.8731        2440          0.871       2588          1740
0.8685        2520          0.867       2586          1750
0.8609        2555          0.864       2696          1760
0.8706        2462          0.871       2498          1770
0.8543        2698          0.859       2745          1780
0.8669        2525          0.860       2708          1790
0.8623        2682          0.854       2874          1800
0.8717        2464          0.868       2590          1810
0.8671        2502          0.864       2634          1820
0.8710        2439          0.870       2589          1830
0.8515        2855          0.843       3073          1840
0.8924        2100          0.866       2630          1850
0.9006        1971          0.869       2535          1860
0.8903        2171          0.864       2658          1870
0.8942        2027          0.882       2370          1880
0.8908        2182          0.867       2598          1890
0.8863        2175          0.861       2639          1900
0.8812        2324          0.862       2678          1910
0.8766        2420          0.850       2853          1920
0.8915        2143          0.868       2482          1930
0.8877        2143          0.873       2425          1940
0.8740        2415          0.855       2848          1950
0.8827        2253          0.870       2589          1960
0.8859        2123          0.879       2421          1970
0.8850        2139          0.877       2423          1980
0.8881        2048          0.880       2347          1990
0.8820        2194          0.870       2527          2000
0.8851        2164          0.873       2495          2010
0.8895        2118          0.873       2496          2020
0.8822        2247          0.865       2619          2030
0.8810        2258          0.879       2398          2040
0.8709        2450          0.855       2712          2050
0.8878        2114          0.881       2338          2060
0.8766        2331          0.862       2672          2070
0.8842        2210          0.878       2438          2080
0.8921        2030          0.877       2300          2090
0.8845        2214          0.871       2514          2100
0.8605        2615          0.845       2963          2110
0.8897        2100          0.888       2279          2120
0.8729        2471          0.856       2752          2130
0.8838        2242          0.868       2542          2140
0.8848        2192          0.871       2401          2150
0.8883        2155          0.878       2341          2160
0.8853        2169          0.877       2399          2170
0.8895        2058          0.884       2248          2180
0.8864        2162          0.883       2350          2190
0.8841        2158          0.873       2385          2200
0.8787        2314          0.872       2522          2210
0.8875        2094          0.878       2389          2220
0.8921        2035          0.888       2194          2230
0.8870        2144          0.887       2242          2240
0.8862        2165          0.876       2463          2250
0.8916        2011          0.891       2120          2260
0.8888        2074          0.879       2305          2270
0.8775        2268          0.867       2517          2280
0.8904        2017          0.887       2247          2290
0.8910        2025          0.878       2316          2300
0.8787        2244          0.874       2486          2310
0.8950        1993          0.881       2269          2320
0.8890        2041          0.883       2231          2330
0.8857        2164          0.877       2365          2340
0.8872        2046          0.879       2357          2350
0.8862        2079          0.881       2378          2360
0.8952        1968          0.887       2187          2370
0.8760        2388          0.866       2628          2380
0.8949        1950          0.891       2060          2390
0.8863        2108          0.872       2453          2400
0.8724        2405          0.861       2773          2410
0.8909        2035          0.887       2213          2420
0.8798        2314          0.868       2595          2430
0.8827        2184          0.881       2337          2440
0.8809        2280          0.863       2577          2450
0.8957        1936          0.891       2150          2460
0.8947        1996          0.895       2115          2470
0.8933        1982          0.887       2209          2480
0.8926        1961          0.887       2189          2490
0.8808        2203          0.878       2359          2500
0.8922        2064          0.893       2149          2510
0.8936        2041          0.882       2247          2520
0.8962        1955          0.885       2192          2530
0.8935        1984          0.883       2227          2540
0.8966        1978          0.896       2094          2550
0.8992        1938          0.890       2097          2560
0.8818        2238          0.876       2474          2570
0.8913        2006          0.900       2052          2580
0.8977        1915          0.896       2129          2590
0.8936        1973          0.889       2182          2600
0.8895        2043          0.888       2223          2610
0.8928        2030          0.886       2263          2620
0.8971        2014          0.884       2256          2630
0.8981        1934          0.891       2125          2640
0.8914        2043          0.887       2199          2650
0.8966        1957          0.890       2143          2660
0.8897        2036          0.887       2216          2670
0.8962        1914          0.890       2062          2680
0.8903        1993          0.891       2152          2690
0.8977        1881          0.896       2018          2700
0.8891        2074          0.887       2228          2710
0.8945        1963          0.888       2080          2720
0.8910        2037          0.888       2199          2730
0.8922        1967          0.895       2107          2740
0.8906        2005          0.882       2247          2750
0.8960        1925          0.894       2046          2760
0.8844        2205          0.858       2635          2770
0.9101        1737          0.888       2162          2780
0.9060        1731          0.890       2153          2790
0.9093        1711          0.884       2232          2800
0.9066        1753          0.895       2056          2810
0.9052        1754          0.896       2062          2820
0.9015        1851          0.882       2296          2830
0.9112        1675          0.899       1929          2840
0.9124        1668          0.892       2110          2850
0.9102        1632          0.900       1961          2860
0.9001        1859          0.885       2227          2870
0.9085        1684          0.895       2056          2880
0.8953        1918          0.886       2217          2890
0.9081        1689          0.899       2025          2900
0.8975        1967          0.880       2331          2910
0.8963        1896          0.886       2241          2920
0.9037        1817          0.894       2030          2930
0.9086        1748          0.898       2014          2940
0.9015        1906          0.892       2185          2950
0.9024        1763          0.893       2026          2960
0.9047        1727          0.898       1974          2970
0.9044        1800          0.892       2107          2980
0.9083        1716          0.905       1879          2990
0.9021        1782          0.892       2095          3000
0.8992        1959          0.887       2228          3010
0.9040        1782          0.897       2011          3020
0.9030        1808          0.887       2153          3030
0.8919        2009          0.877       2318          3040
0.9007        1846          0.897       2080          3050
0.9071        1691          0.902       1852          3060
0.8934        1938          0.886       2205          3070
0.9013        1886          0.893       2134          3080
0.9032        1782          0.896       1992          3090
0.8974        1890          0.886       2252          3100
0.8818        2233          0.869       2536          3110
0.9010        1905          0.895       2095          3120
0.8933        1981          0.884       2274          3130
0.9074        1705          0.899       1939          3140
0.9095        1754          0.897       1984          3150
0.9065        1735          0.907       1819          3160
0.9025        1769          0.894       2058          3170
0.9051        1695          0.905       1874          3180
0.9029        1762          0.904       1878          3190
0.9020        1799          0.898       2043          3200
0.8966        1917          0.892       2179          3210
0.9071        1712          0.903       1911          3220
0.8869        2143          0.871       2451          3230
0.9086        1671          0.904       1897          3240
0.8976        1928          0.884       2198          3250
0.9017        1807          0.891       2099          3260
0.9043        1755          0.899       1980          3270
0.9049        1763          0.898       1946          3280
0.9051        1698          0.898       1949          3290
0.9043        1749          0.894       2045          3300
0.9040        1724          0.901       1893          3310
0.9013        1832          0.889       2140          3320
0.9085        1732          0.902       1962          3330
0.8975        1901          0.889       2185          3340
0.8989        1945          0.883       2280          3350
0.9063        1751          0.898       2031          3360
0.9050        1759          0.897       1964          3370
0.9060        1764          0.896       1995          3380
0.8943        1949          0.887       2178          3390
0.8986        1866          0.895       2075          3400
0.9021        1882          0.891       2140          3410
0.9054        1730          0.903       1922          3420
0.8997        1980          0.882       2210          3430
0.9059        1735          0.894       1991          3440
0.9054        1796          0.903       1952          3450
0.9002        1914          0.891       2063          3460
0.9020        1831          0.892       2043          3470
0.9061        1774          0.900       1945          3480
0.9060        1735          0.900       1946          3490
0.9094        1656          0.904       1862          3500
0.9034        1775          0.898       1958          3510
0.9042        1786          0.897       1991          3520
0.8956        1933          0.895       2130          3530
0.9052        1716          0.900       1944          3540
0.9078        1672          0.901       1914          3550
0.9042        1749          0.893       2007          3560
0.9037        1741          0.899       1951          3570
0.8930        2007          0.887       2214          3580
0.9012        1846          0.895       2099          3590
0.9073        1628          0.907       1777          3600
0.8906        2048          0.888       2226          3610
0.9042        1728          0.898       2011          3620
0.8988        1861          0.895       2111          3630
0.9061        1761          0.904       1968          3640
0.9048        1789          0.897       1977          3650
0.9088        1625          0.908       1798          3660
0.8995        1802          0.896       2037          3670
0.9062        1778          0.897       2031          3680
0.9082        1671          0.899       1945          3690
0.9188        1578          0.894       2052          3700
0.9233        1468          0.904       1892          3710
0.9173        1493          0.897       2014          3720
0.9162        1594          0.893       2067          3730
0.9194        1475          0.895       2003          3740
0.9205        1515          0.902       1906          3750
0.9126        1655          0.892       2080          3760
0.9167        1524          0.897       1939          3770
0.9156        1553          0.905       1861          3780
0.9210        1523          0.904       1881          3790
0.9159        1671          0.894       2050          3800
0.9149        1570          0.905       1868          3810
0.9211        1533          0.902       1912          3820
0.9096        1724          0.887       2221          3830
0.9202        1495          0.908       1789          3840
0.9127        1630          0.896       1957          3850
0.9159        1545          0.899       1954          3860
0.9058        1904          0.884       2276          3870
0.9139        1613          0.902       2008          3880
0.9134        1581          0.909       1913          3890
0.9141        1562          0.901       1927          3900
0.9163        1526          0.900       1932          3910
0.9150        1525          0.908       1818          3920
0.9179        1516          0.905       1819          3930
0.9180        1508          0.911       1759          3940
0.9120        1582          0.900       2059          3950
0.9154        1574          0.901       1901          3960
0.9177        1546          0.904       1877          3970
0.9164        1571          0.910       1809          3980
0.9144        1579          0.908       1856          3990
0.9191        1471          0.910       1717          4000
INFO:tensorflow:Error reported to Coordinator: <type 'exceptions.RuntimeError'>, Attempted to use a closed Session.
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-7-687af6ae30fd> in <module>()
     30         train_x_batch = [np.dstack((np.where(xx > 0, xx, 0), np.where(xx < 0, xx, 0)*-1)) for xx in train_x_batch]
     31         train_data = {X: train_x_batch, Y_: train_y_batch, learning_rate:0.005, dropout:0.75}
---> 32         sess.run(train_step, feed_dict=train_data)
     33         if i==0:
     34             print 'Test      Test           Train     Train'

/usr/local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in run(self, fetches, feed_dict, options, run_metadata)
    765     try:
    766       result = self._run(None, fetches, feed_dict, options_ptr,
--> 767                          run_metadata_ptr)
    768       if run_metadata:
    769         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/usr/local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run(self, handle, fetches, feed_dict, options, run_metadata)
    963     if final_fetches or final_targets:
    964       results = self._do_run(handle, final_targets, final_fetches,
--> 965                              feed_dict_string, options, run_metadata)
    966     else:
    967       results = []

/usr/local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
   1013     if handle is None:
   1014       return self._do_call(_run_fn, self._session, feed_dict, fetch_list,
-> 1015                            target_list, options, run_metadata)
   1016     else:
   1017       return self._do_call(_prun_fn, self._session, handle, feed_dict,

/usr/local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_call(self, fn, *args)
   1020   def _do_call(self, fn, *args):
   1021     try:
-> 1022       return fn(*args)
   1023     except errors.OpError as e:
   1024       message = compat.as_text(e.message)

/usr/local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
   1002         return tf_session.TF_Run(session, options,
   1003                                  feed_dict, fetch_list, target_list,
-> 1004                                  status, run_metadata)
   1005 
   1006     def _prun_fn(session, handle, feed_dict, fetch_list):

KeyboardInterrupt: 

In [16]:
batches = [i*10 for i in xrange(len(test_accs))]
plt.title('CNN Value Network')
plt.plot(batches, test_accs, label='Test Accuracy')
plt.plot(batches, train_accs, label='Training Accuracy')
plt.legend(loc='best')
# plt.xlabel('Batch')
plt.ylabel('Accuracy')
plt.show()
plt.title('CNN Value Network Cross-Entropy')
plt.plot(batches, test_ces, label='Test Cross-Entropy')
plt.plot(batches, train_ces, label='Training Cross-Entropy')
plt.legend(loc='best')
plt.ylabel('Cross-Entropy')
# plt.xlabel('Batch')

plt.show()



In [ ]: