In [41]:
#import desired packages
import tensorflow as tf
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
%matplotlib inline

In [42]:
#import data 
data=pd.read_csv('/Users/Enkay/Documents/Viky/python/tensorflow/iris/iris.data', names=['f1','f2','f3','f4','f5'])

In [43]:
data


Out[43]:
f1 f2 f3 f4 f5
0 5.1 3.5 1.4 0.2 Iris-setosa
1 4.9 3.0 1.4 0.2 Iris-setosa
2 4.7 3.2 1.3 0.2 Iris-setosa
3 4.6 3.1 1.5 0.2 Iris-setosa
4 5.0 3.6 1.4 0.2 Iris-setosa
5 5.4 3.9 1.7 0.4 Iris-setosa
6 4.6 3.4 1.4 0.3 Iris-setosa
7 5.0 3.4 1.5 0.2 Iris-setosa
8 4.4 2.9 1.4 0.2 Iris-setosa
9 4.9 3.1 1.5 0.1 Iris-setosa
10 5.4 3.7 1.5 0.2 Iris-setosa
11 4.8 3.4 1.6 0.2 Iris-setosa
12 4.8 3.0 1.4 0.1 Iris-setosa
13 4.3 3.0 1.1 0.1 Iris-setosa
14 5.8 4.0 1.2 0.2 Iris-setosa
15 5.7 4.4 1.5 0.4 Iris-setosa
16 5.4 3.9 1.3 0.4 Iris-setosa
17 5.1 3.5 1.4 0.3 Iris-setosa
18 5.7 3.8 1.7 0.3 Iris-setosa
19 5.1 3.8 1.5 0.3 Iris-setosa
20 5.4 3.4 1.7 0.2 Iris-setosa
21 5.1 3.7 1.5 0.4 Iris-setosa
22 4.6 3.6 1.0 0.2 Iris-setosa
23 5.1 3.3 1.7 0.5 Iris-setosa
24 4.8 3.4 1.9 0.2 Iris-setosa
25 5.0 3.0 1.6 0.2 Iris-setosa
26 5.0 3.4 1.6 0.4 Iris-setosa
27 5.2 3.5 1.5 0.2 Iris-setosa
28 5.2 3.4 1.4 0.2 Iris-setosa
29 4.7 3.2 1.6 0.2 Iris-setosa
... ... ... ... ... ...
120 6.9 3.2 5.7 2.3 Iris-virginica
121 5.6 2.8 4.9 2.0 Iris-virginica
122 7.7 2.8 6.7 2.0 Iris-virginica
123 6.3 2.7 4.9 1.8 Iris-virginica
124 6.7 3.3 5.7 2.1 Iris-virginica
125 7.2 3.2 6.0 1.8 Iris-virginica
126 6.2 2.8 4.8 1.8 Iris-virginica
127 6.1 3.0 4.9 1.8 Iris-virginica
128 6.4 2.8 5.6 2.1 Iris-virginica
129 7.2 3.0 5.8 1.6 Iris-virginica
130 7.4 2.8 6.1 1.9 Iris-virginica
131 7.9 3.8 6.4 2.0 Iris-virginica
132 6.4 2.8 5.6 2.2 Iris-virginica
133 6.3 2.8 5.1 1.5 Iris-virginica
134 6.1 2.6 5.6 1.4 Iris-virginica
135 7.7 3.0 6.1 2.3 Iris-virginica
136 6.3 3.4 5.6 2.4 Iris-virginica
137 6.4 3.1 5.5 1.8 Iris-virginica
138 6.0 3.0 4.8 1.8 Iris-virginica
139 6.9 3.1 5.4 2.1 Iris-virginica
140 6.7 3.1 5.6 2.4 Iris-virginica
141 6.9 3.1 5.1 2.3 Iris-virginica
142 5.8 2.7 5.1 1.9 Iris-virginica
143 6.8 3.2 5.9 2.3 Iris-virginica
144 6.7 3.3 5.7 2.5 Iris-virginica
145 6.7 3.0 5.2 2.3 Iris-virginica
146 6.3 2.5 5.0 1.9 Iris-virginica
147 6.5 3.0 5.2 2.0 Iris-virginica
148 6.2 3.4 5.4 2.3 Iris-virginica
149 5.9 3.0 5.1 1.8 Iris-virginica

150 rows × 5 columns


In [44]:
data["f5"].value_counts()


Out[44]:
Iris-setosa        50
Iris-versicolor    50
Iris-virginica     50
Name: f5, dtype: int64

In [45]:
sns.FacetGrid(data, hue="f5", size=5) \
   .map(plt.scatter, "f1", "f2") \
   .add_legend()


Out[45]:
<seaborn.axisgrid.FacetGrid at 0x11c716090>

In [46]:
#map data into arrays
s=np.asarray([1,0,0])
ve=np.asarray([0,1,0])
vi=np.asarray([0,0,1])
data['f5'] = data['f5'].map({'Iris-setosa': s, 'Iris-versicolor': ve,'Iris-virginica':vi})

data


In [47]:
data


Out[47]:
f1 f2 f3 f4 f5
0 5.1 3.5 1.4 0.2 [1, 0, 0]
1 4.9 3.0 1.4 0.2 [1, 0, 0]
2 4.7 3.2 1.3 0.2 [1, 0, 0]
3 4.6 3.1 1.5 0.2 [1, 0, 0]
4 5.0 3.6 1.4 0.2 [1, 0, 0]
5 5.4 3.9 1.7 0.4 [1, 0, 0]
6 4.6 3.4 1.4 0.3 [1, 0, 0]
7 5.0 3.4 1.5 0.2 [1, 0, 0]
8 4.4 2.9 1.4 0.2 [1, 0, 0]
9 4.9 3.1 1.5 0.1 [1, 0, 0]
10 5.4 3.7 1.5 0.2 [1, 0, 0]
11 4.8 3.4 1.6 0.2 [1, 0, 0]
12 4.8 3.0 1.4 0.1 [1, 0, 0]
13 4.3 3.0 1.1 0.1 [1, 0, 0]
14 5.8 4.0 1.2 0.2 [1, 0, 0]
15 5.7 4.4 1.5 0.4 [1, 0, 0]
16 5.4 3.9 1.3 0.4 [1, 0, 0]
17 5.1 3.5 1.4 0.3 [1, 0, 0]
18 5.7 3.8 1.7 0.3 [1, 0, 0]
19 5.1 3.8 1.5 0.3 [1, 0, 0]
20 5.4 3.4 1.7 0.2 [1, 0, 0]
21 5.1 3.7 1.5 0.4 [1, 0, 0]
22 4.6 3.6 1.0 0.2 [1, 0, 0]
23 5.1 3.3 1.7 0.5 [1, 0, 0]
24 4.8 3.4 1.9 0.2 [1, 0, 0]
25 5.0 3.0 1.6 0.2 [1, 0, 0]
26 5.0 3.4 1.6 0.4 [1, 0, 0]
27 5.2 3.5 1.5 0.2 [1, 0, 0]
28 5.2 3.4 1.4 0.2 [1, 0, 0]
29 4.7 3.2 1.6 0.2 [1, 0, 0]
... ... ... ... ... ...
120 6.9 3.2 5.7 2.3 [0, 0, 1]
121 5.6 2.8 4.9 2.0 [0, 0, 1]
122 7.7 2.8 6.7 2.0 [0, 0, 1]
123 6.3 2.7 4.9 1.8 [0, 0, 1]
124 6.7 3.3 5.7 2.1 [0, 0, 1]
125 7.2 3.2 6.0 1.8 [0, 0, 1]
126 6.2 2.8 4.8 1.8 [0, 0, 1]
127 6.1 3.0 4.9 1.8 [0, 0, 1]
128 6.4 2.8 5.6 2.1 [0, 0, 1]
129 7.2 3.0 5.8 1.6 [0, 0, 1]
130 7.4 2.8 6.1 1.9 [0, 0, 1]
131 7.9 3.8 6.4 2.0 [0, 0, 1]
132 6.4 2.8 5.6 2.2 [0, 0, 1]
133 6.3 2.8 5.1 1.5 [0, 0, 1]
134 6.1 2.6 5.6 1.4 [0, 0, 1]
135 7.7 3.0 6.1 2.3 [0, 0, 1]
136 6.3 3.4 5.6 2.4 [0, 0, 1]
137 6.4 3.1 5.5 1.8 [0, 0, 1]
138 6.0 3.0 4.8 1.8 [0, 0, 1]
139 6.9 3.1 5.4 2.1 [0, 0, 1]
140 6.7 3.1 5.6 2.4 [0, 0, 1]
141 6.9 3.1 5.1 2.3 [0, 0, 1]
142 5.8 2.7 5.1 1.9 [0, 0, 1]
143 6.8 3.2 5.9 2.3 [0, 0, 1]
144 6.7 3.3 5.7 2.5 [0, 0, 1]
145 6.7 3.0 5.2 2.3 [0, 0, 1]
146 6.3 2.5 5.0 1.9 [0, 0, 1]
147 6.5 3.0 5.2 2.0 [0, 0, 1]
148 6.2 3.4 5.4 2.3 [0, 0, 1]
149 5.9 3.0 5.1 1.8 [0, 0, 1]

150 rows × 5 columns


In [48]:
#shuffle the data
data=data.iloc[np.random.permutation(len(data))]

In [49]:
data


Out[49]:
f1 f2 f3 f4 f5
22 4.6 3.6 1.0 0.2 [1, 0, 0]
138 6.0 3.0 4.8 1.8 [0, 0, 1]
94 5.6 2.7 4.2 1.3 [0, 1, 0]
75 6.6 3.0 4.4 1.4 [0, 1, 0]
50 7.0 3.2 4.7 1.4 [0, 1, 0]
36 5.5 3.5 1.3 0.2 [1, 0, 0]
47 4.6 3.2 1.4 0.2 [1, 0, 0]
68 6.2 2.2 4.5 1.5 [0, 1, 0]
60 5.0 2.0 3.5 1.0 [0, 1, 0]
21 5.1 3.7 1.5 0.4 [1, 0, 0]
136 6.3 3.4 5.6 2.4 [0, 0, 1]
133 6.3 2.8 5.1 1.5 [0, 0, 1]
76 6.8 2.8 4.8 1.4 [0, 1, 0]
140 6.7 3.1 5.6 2.4 [0, 0, 1]
90 5.5 2.6 4.4 1.2 [0, 1, 0]
25 5.0 3.0 1.6 0.2 [1, 0, 0]
64 5.6 2.9 3.6 1.3 [0, 1, 0]
23 5.1 3.3 1.7 0.5 [1, 0, 0]
128 6.4 2.8 5.6 2.1 [0, 0, 1]
142 5.8 2.7 5.1 1.9 [0, 0, 1]
20 5.4 3.4 1.7 0.2 [1, 0, 0]
35 5.0 3.2 1.2 0.2 [1, 0, 0]
37 4.9 3.1 1.5 0.1 [1, 0, 0]
124 6.7 3.3 5.7 2.1 [0, 0, 1]
125 7.2 3.2 6.0 1.8 [0, 0, 1]
91 6.1 3.0 4.6 1.4 [0, 1, 0]
19 5.1 3.8 1.5 0.3 [1, 0, 0]
61 5.9 3.0 4.2 1.5 [0, 1, 0]
120 6.9 3.2 5.7 2.3 [0, 0, 1]
105 7.6 3.0 6.6 2.1 [0, 0, 1]
... ... ... ... ... ...
57 4.9 2.4 3.3 1.0 [0, 1, 0]
69 5.6 2.5 3.9 1.1 [0, 1, 0]
38 4.4 3.0 1.3 0.2 [1, 0, 0]
52 6.9 3.1 4.9 1.5 [0, 1, 0]
49 5.0 3.3 1.4 0.2 [1, 0, 0]
106 4.9 2.5 4.5 1.7 [0, 0, 1]
55 5.7 2.8 4.5 1.3 [0, 1, 0]
71 6.1 2.8 4.0 1.3 [0, 1, 0]
13 4.3 3.0 1.1 0.1 [1, 0, 0]
101 5.8 2.7 5.1 1.9 [0, 0, 1]
110 6.5 3.2 5.1 2.0 [0, 0, 1]
147 6.5 3.0 5.2 2.0 [0, 0, 1]
102 7.1 3.0 5.9 2.1 [0, 0, 1]
119 6.0 2.2 5.0 1.5 [0, 0, 1]
113 5.7 2.5 5.0 2.0 [0, 0, 1]
134 6.1 2.6 5.6 1.4 [0, 0, 1]
126 6.2 2.8 4.8 1.8 [0, 0, 1]
83 6.0 2.7 5.1 1.6 [0, 1, 0]
11 4.8 3.4 1.6 0.2 [1, 0, 0]
32 5.2 4.1 1.5 0.1 [1, 0, 0]
58 6.6 2.9 4.6 1.3 [0, 1, 0]
98 5.1 2.5 3.0 1.1 [0, 1, 0]
8 4.4 2.9 1.4 0.2 [1, 0, 0]
66 5.6 3.0 4.5 1.5 [0, 1, 0]
115 6.4 3.2 5.3 2.3 [0, 0, 1]
87 6.3 2.3 4.4 1.3 [0, 1, 0]
85 6.0 3.4 4.5 1.6 [0, 1, 0]
92 5.8 2.6 4.0 1.2 [0, 1, 0]
81 5.5 2.4 3.7 1.0 [0, 1, 0]
31 5.4 3.4 1.5 0.4 [1, 0, 0]

150 rows × 5 columns


In [50]:
data=data.reset_index(drop=True)

In [51]:
data


Out[51]:
f1 f2 f3 f4 f5
0 4.6 3.6 1.0 0.2 [1, 0, 0]
1 6.0 3.0 4.8 1.8 [0, 0, 1]
2 5.6 2.7 4.2 1.3 [0, 1, 0]
3 6.6 3.0 4.4 1.4 [0, 1, 0]
4 7.0 3.2 4.7 1.4 [0, 1, 0]
5 5.5 3.5 1.3 0.2 [1, 0, 0]
6 4.6 3.2 1.4 0.2 [1, 0, 0]
7 6.2 2.2 4.5 1.5 [0, 1, 0]
8 5.0 2.0 3.5 1.0 [0, 1, 0]
9 5.1 3.7 1.5 0.4 [1, 0, 0]
10 6.3 3.4 5.6 2.4 [0, 0, 1]
11 6.3 2.8 5.1 1.5 [0, 0, 1]
12 6.8 2.8 4.8 1.4 [0, 1, 0]
13 6.7 3.1 5.6 2.4 [0, 0, 1]
14 5.5 2.6 4.4 1.2 [0, 1, 0]
15 5.0 3.0 1.6 0.2 [1, 0, 0]
16 5.6 2.9 3.6 1.3 [0, 1, 0]
17 5.1 3.3 1.7 0.5 [1, 0, 0]
18 6.4 2.8 5.6 2.1 [0, 0, 1]
19 5.8 2.7 5.1 1.9 [0, 0, 1]
20 5.4 3.4 1.7 0.2 [1, 0, 0]
21 5.0 3.2 1.2 0.2 [1, 0, 0]
22 4.9 3.1 1.5 0.1 [1, 0, 0]
23 6.7 3.3 5.7 2.1 [0, 0, 1]
24 7.2 3.2 6.0 1.8 [0, 0, 1]
25 6.1 3.0 4.6 1.4 [0, 1, 0]
26 5.1 3.8 1.5 0.3 [1, 0, 0]
27 5.9 3.0 4.2 1.5 [0, 1, 0]
28 6.9 3.2 5.7 2.3 [0, 0, 1]
29 7.6 3.0 6.6 2.1 [0, 0, 1]
... ... ... ... ... ...
120 4.9 2.4 3.3 1.0 [0, 1, 0]
121 5.6 2.5 3.9 1.1 [0, 1, 0]
122 4.4 3.0 1.3 0.2 [1, 0, 0]
123 6.9 3.1 4.9 1.5 [0, 1, 0]
124 5.0 3.3 1.4 0.2 [1, 0, 0]
125 4.9 2.5 4.5 1.7 [0, 0, 1]
126 5.7 2.8 4.5 1.3 [0, 1, 0]
127 6.1 2.8 4.0 1.3 [0, 1, 0]
128 4.3 3.0 1.1 0.1 [1, 0, 0]
129 5.8 2.7 5.1 1.9 [0, 0, 1]
130 6.5 3.2 5.1 2.0 [0, 0, 1]
131 6.5 3.0 5.2 2.0 [0, 0, 1]
132 7.1 3.0 5.9 2.1 [0, 0, 1]
133 6.0 2.2 5.0 1.5 [0, 0, 1]
134 5.7 2.5 5.0 2.0 [0, 0, 1]
135 6.1 2.6 5.6 1.4 [0, 0, 1]
136 6.2 2.8 4.8 1.8 [0, 0, 1]
137 6.0 2.7 5.1 1.6 [0, 1, 0]
138 4.8 3.4 1.6 0.2 [1, 0, 0]
139 5.2 4.1 1.5 0.1 [1, 0, 0]
140 6.6 2.9 4.6 1.3 [0, 1, 0]
141 5.1 2.5 3.0 1.1 [0, 1, 0]
142 4.4 2.9 1.4 0.2 [1, 0, 0]
143 5.6 3.0 4.5 1.5 [0, 1, 0]
144 6.4 3.2 5.3 2.3 [0, 0, 1]
145 6.3 2.3 4.4 1.3 [0, 1, 0]
146 6.0 3.4 4.5 1.6 [0, 1, 0]
147 5.8 2.6 4.0 1.2 [0, 1, 0]
148 5.5 2.4 3.7 1.0 [0, 1, 0]
149 5.4 3.4 1.5 0.4 [1, 0, 0]

150 rows × 5 columns


In [52]:
#training data
x_input=data.ix[0:105,['f1','f2','f3','f4']]
temp=data['f5']
y_input=temp[0:106]
#test data
x_test=data.ix[106:149,['f1','f2','f3','f4']]
y_test=temp[106:150]

In [53]:
#placeholders and variables. input has 4 features and output has 3 classes
x=tf.placeholder(tf.float32,shape=[None,4])
y_=tf.placeholder(tf.float32,shape=[None, 3])
#weight and bias
W=tf.Variable(tf.zeros([4,3]))
b=tf.Variable(tf.zeros([3]))

In [54]:
# model 
#softmax function for multiclass classification
y = tf.nn.softmax(tf.matmul(x, W) + b)

In [55]:
#loss function
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))

In [56]:
#optimiser -
train_step = tf.train.AdamOptimizer(0.01).minimize(cross_entropy)
#calculating accuracy of our model 
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

In [57]:
#session parameters
sess = tf.InteractiveSession()
#initialising variables
init = tf.initialize_all_variables()
sess.run(init)
#number of interations
epoch=2000


Exception AssertionError: AssertionError("Nesting violated for default stack of <type 'weakref'> objects",) in <bound method InteractiveSession.__del__ of <tensorflow.python.client.session.InteractiveSession object at 0x11c2a0cd0>> ignored

In [58]:
for step in xrange(epoch):
   _, c=sess.run([train_step,cross_entropy], feed_dict={x: x_input, y_:[t for t in y_input.as_matrix()]})
   if step%500==0 :
       print c


1.09861
0.142379
0.0841468
0.0633221

In [59]:
#random testing at Sn.130
a=data.ix[130,['f1','f2','f3','f4']]
b=a.reshape(1,4)
largest = sess.run(tf.arg_max(y,1), feed_dict={x: b})[0]
if largest==0:
    print "flower is :Iris-setosa"
elif largest==1:
    print "flower is :Iris-versicolor"
else :
    print "flower is :Iris-virginica"


flower is :Iris-virginica

In [60]:
print sess.run(accuracy,feed_dict={x: x_test, y_:[t for t in y_test.as_matrix()]})


0.977273

In [ ]: