In [2]:
# create a simple multilayer covnet, with Rectified Linear Units (ReLu) activation functions and cross entropy loss.
import tensorflow as tf
import numpy as np
In [3]:
# define two functions for making weights and bias, specifcally, slightly positive bias since we're using Relu activation
def weight_variable(shape):
initial = tf.truncated_normal(shape, stddev = 0.1)
return tf.Variable(initial)
def bias_variable(shape):
initial = tf.constant(0.1, shape = shape)
return tf.Variable(initial)
In [ ]: