In [1]:
import numpy as np  
import tensorflow as tf

In [2]:
import csv

def load_data(filename):
    x=[]
    target=[]
    a=[]
    with open(filename) as csv_file:
        data_file = csv.reader(csv_file)
        for row in data_file:
            a.append(row)
    print a[0]
    np_a=np.array(a)
    x=np_a[1:,:-1].astype(np.float32)  #read after first row
    target=np_a[1:,-1].astype(np.float32)
    return x, target

In [4]:
X, y=load_data('room/train.csv')


['Temperature', 'Humidity', 'Light', 'CO2', 'HumidityRatio', 'Occupancy']

In [5]:
print len(X)
print len(y)


8143
8143

In [6]:
print X[0]


[  2.31800003e+01   2.72719994e+01   4.26000000e+02   7.21250000e+02
   4.79298783e-03]

In [7]:
#*** Inspect data
n_samples=len(X)
n_features = len(X[0])
print n_samples
print n_features


8143
5

In [12]:
losses = []
training_steps = 50
learning_rate = 0.0001

with tf.Session() as sess:
  input = tf.constant(X.astype(np.float32))
  target = tf.constant(np.transpose([y]).astype(np.float32))
  weights = tf.Variable(tf.random_normal([5, 1], 0, 0.1))
  
  tf.initialize_all_variables().run()
                      
  yhat = tf.matmul(input, weights)
  yerror = tf.sub(yhat, target)   
  loss = tf.nn.l2_loss(yerror)
  #2. Use sigmoid
  #yhat2=tf.sigmoid(yhat)
  #yerror = tf.sub(yhat2, target)   
  #loss = tf.nn.l2_loss(yerror)

  loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(yhat, target))
  
  update_weights = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss)
  
  count=1
  for _ in range(training_steps):
    update_weights.run()
    losses.append(loss.eval())
#    if (count<10):
#        print yhat.eval()
#        print yerror.eval()
    count +=1
    
  print losses
  # Training is done, get the final values for the graphs
  betas = weights.eval()
  yhat = yhat.eval()


[9.6451797, 5.5538592, 5.6349611, 4.9434986, 4.2920718, 6.3652496, 1.2269092, 0.58861834, 1.1666225, 5.3445373, 0.23696084, 0.25284705, 0.49302828, 1.9579509, 2.925565, 4.5533347, 0.33062381, 0.32181808, 0.31267282, 0.3048681, 0.29722393, 0.29046565, 0.28409836, 0.27929148, 0.27597418, 0.28044716, 0.30147675, 0.39860299, 0.79624313, 1.8351703, 1.7771947, 3.2095923, 0.55300552, 0.39299574, 0.3769815, 0.36684507, 0.35698965, 0.34737581, 0.33799917, 0.32887438, 0.32002047, 0.31145713, 0.30320495, 0.29528481, 0.2877171, 0.28052175, 0.27371672, 0.2673184, 0.26134023, 0.25579357]

In [ ]: