$$ H(x) = W(x) + b $$$$ cost(W,b) = \frac{1}{m} \sum_{i=0}^{m} ( h( x^i ) - y^i )^2 $$

In [1]:
import sys
sys.version


Out[1]:
'3.6.0 (default, Jan 13 2017, 20:56:47) \n[GCC 5.4.0 20160609]'

In [2]:
import tensorflow as tf
tf.__version__


Out[2]:
'1.0.0'

In [3]:
import matplotlib.pyplot as plt

In [22]:
%matplotlib inline
x_data = [1, 2, 3]
y_data = [1, 2 ,3]

plt.plot(x_data,y_data)
plt.show()


$$ H(x) = W(x) + b $$

In [5]:
W = tf.Variable(tf.random_normal([1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
h = W * x_data + b

print(tf.random_normal([1]))


Tensor("random_normal_2:0", shape=(1,), dtype=float32)
$$ cost(W,b) = \frac{1}{m} \sum_{i=0}^{m} ( h( x^i ) - y^i )^2 $$

In [6]:
print(h)
cost = tf.reduce_mean(tf.square(h - y_data))
#t = [1., 2., 3., 4.]
#tf.reduce_mean(t) ==> 2.5


Tensor("add:0", shape=(3,), dtype=float32)

In [7]:
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
train = optimizer.minimize(cost)

In [8]:
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)

In [9]:
for step in range(2000):
    sess.run(train)
    if step % 20 == 0:
        print(step,sess.run(cost),sess.run(W), sess.run(b))


0 0.263906 [ 0.47970423] [ 0.75174206]
20 0.0894689 [ 0.64042497] [ 0.77635783]
40 0.0798951 [ 0.67029911] [ 0.74557966]
60 0.0725496 [ 0.68702942] [ 0.71108371]
80 0.0658906 [ 0.70185548] [ 0.67771715]
100 0.059843 [ 0.71587837] [ 0.64587188]
120 0.0543503 [ 0.72923207] [ 0.61551875]
140 0.0493618 [ 0.74195731] [ 0.58659166]
160 0.0448312 [ 0.75408435] [ 0.5590241]
180 0.0407164 [ 0.76564151] [ 0.5327521]
200 0.0369793 [ 0.7766555] [ 0.50771451]
220 0.0335852 [ 0.78715187] [ 0.48385376]
240 0.0305026 [ 0.79715496] [ 0.46111441]
260 0.0277029 [ 0.80668789] [ 0.43944377]
280 0.0251603 [ 0.81577283] [ 0.41879153]
300 0.022851 [ 0.82443082] [ 0.39910987]
320 0.0207536 [ 0.83268195] [ 0.38035321]
340 0.0188488 [ 0.8405453] [ 0.36247805]
360 0.0171187 [ 0.84803909] [ 0.34544289]
380 0.0155475 [ 0.85518068] [ 0.32920834]
400 0.0141205 [ 0.8619867] [ 0.31373677]
420 0.0128245 [ 0.86847281] [ 0.29899228]
440 0.0116474 [ 0.87465411] [ 0.28494066]
460 0.0105783 [ 0.88054484] [ 0.27154949]
480 0.00960743 [ 0.88615882] [ 0.25878769]
500 0.00872562 [ 0.89150894] [ 0.24662569]
520 0.00792475 [ 0.89660758] [ 0.2350352]
540 0.00719738 [ 0.90146673] [ 0.2239894]
560 0.00653678 [ 0.90609735] [ 0.21346273]
580 0.00593681 [ 0.91051042] [ 0.20343079]
600 0.00539191 [ 0.91471612] [ 0.19387032]
620 0.00489701 [ 0.91872424] [ 0.1847591]
640 0.00444755 [ 0.92254382] [ 0.17607605]
660 0.00403933 [ 0.92618394] [ 0.16780116]
680 0.00366859 [ 0.92965305] [ 0.15991518]
700 0.00333188 [ 0.93295902] [ 0.15239981]
720 0.00302606 [ 0.93610972] [ 0.14523758]
740 0.00274831 [ 0.93911237] [ 0.13841198]
760 0.00249607 [ 0.94197387] [ 0.13190712]
780 0.00226697 [ 0.94470084] [ 0.12570797]
800 0.0020589 [ 0.94729972] [ 0.11980017]
820 0.00186992 [ 0.94977635] [ 0.11417002]
840 0.0016983 [ 0.9521367] [ 0.10880448]
860 0.00154242 [ 0.95438612] [ 0.10369107]
880 0.00140085 [ 0.9565298] [ 0.09881798]
900 0.00127228 [ 0.95857269] [ 0.09417392]
920 0.0011555 [ 0.96051967] [ 0.08974809]
940 0.00104945 [ 0.96237504] [ 0.08553027]
960 0.000953121 [ 0.96414334] [ 0.08151066]
980 0.00086564 [ 0.96582848] [ 0.07767995]
1000 0.00078619 [ 0.96743441] [ 0.0740293]
1020 0.000714027 [ 0.96896487] [ 0.0705502]
1040 0.000648491 [ 0.97042346] [ 0.06723451]
1060 0.00058897 [ 0.97181344] [ 0.06407472]
1080 0.00053491 [ 0.97313809] [ 0.06106345]
1100 0.000485818 [ 0.97440046] [ 0.0581937]
1120 0.000441225 [ 0.97560358] [ 0.05545879]
1140 0.000400727 [ 0.97675014] [ 0.05285241]
1160 0.000363947 [ 0.97784281] [ 0.05036854]
1180 0.000330543 [ 0.9788841] [ 0.04800142]
1200 0.000300204 [ 0.97987646] [ 0.04574554]
1220 0.000272651 [ 0.98082227] [ 0.04359562]
1240 0.000247623 [ 0.98172355] [ 0.04154675]
1260 0.000224897 [ 0.98258245] [ 0.0395942]
1280 0.000204255 [ 0.983401] [ 0.03773342]
1300 0.000185507 [ 0.98418111] [ 0.03596009]
1320 0.000168482 [ 0.9849245] [ 0.03427013]
1340 0.000153016 [ 0.98563302] [ 0.03265955]
1360 0.000138972 [ 0.98630822] [ 0.03112469]
1380 0.000126217 [ 0.98695171] [ 0.02966195]
1400 0.000114633 [ 0.98756486] [ 0.02826795]
1420 0.000104111 [ 0.98814923] [ 0.02693948]
1440 9.45546e-05 [ 0.98870623] [ 0.02567344]
1460 8.58767e-05 [ 0.98923689] [ 0.02446691]
1480 7.79944e-05 [ 0.98974288] [ 0.02331706]
1500 7.08361e-05 [ 0.99022502] [ 0.02222114]
1520 6.43343e-05 [ 0.99068439] [ 0.02117676]
1540 5.84291e-05 [ 0.99112219] [ 0.02018149]
1560 5.30664e-05 [ 0.99153942] [ 0.019233]
1580 4.81955e-05 [ 0.99193704] [ 0.0183291]
1600 4.37712e-05 [ 0.99231595] [ 0.0174677]
1620 3.97535e-05 [ 0.99267703] [ 0.01664678]
1640 3.61055e-05 [ 0.99302125] [ 0.01586444]
1660 3.27907e-05 [ 0.99334919] [ 0.01511886]
1680 2.97819e-05 [ 0.99366176] [ 0.01440832]
1700 2.70475e-05 [ 0.99395967] [ 0.01373117]
1720 2.45657e-05 [ 0.9942435] [ 0.01308583]
1740 2.23104e-05 [ 0.99451405] [ 0.01247087]
1760 2.0263e-05 [ 0.99477178] [ 0.01188479]
1780 1.84033e-05 [ 0.99501753] [ 0.01132631]
1800 1.67141e-05 [ 0.99525172] [ 0.01079399]
1820 1.51798e-05 [ 0.99547487] [ 0.0102867]
1840 1.37868e-05 [ 0.99568748] [ 0.00980328]
1860 1.25212e-05 [ 0.9958902] [ 0.00934255]
1880 1.13718e-05 [ 0.99608338] [ 0.00890347]
1900 1.0328e-05 [ 0.99626738] [ 0.00848504]
1920 9.38029e-06 [ 0.99644285] [ 0.00808628]
1940 8.51934e-06 [ 0.99660999] [ 0.00770625]
1960 7.73755e-06 [ 0.99676937] [ 0.00734409]
1980 7.02684e-06 [ 0.99692118] [ 0.00699892]

With placeholder


In [10]:
x_data = [1., 2., 3.]
y_data = [2., 4., 6.]

W = tf.Variable(tf.random_normal([1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')

X = tf.placeholder(tf.float32,shape=[None])
Y = tf.placeholder(tf.float32,shape=[None])

h = W * X + b
cost = tf.reduce_mean(tf.square(h - Y))
a = tf.Variable(0.1)
init = tf.global_variables_initializer()
opt = tf.train.GradientDescentOptimizer(a)
train = opt.minimize(cost)

sess = tf.Session()
sess.run(init)

In [11]:
for step in range(2000):
    sess.run(train,feed_dict={X:x_data, Y:y_data})
    if step % 20 == 0:
        print(step,sess.run(cost,feed_dict={X:x_data, Y:y_data}),sess.run(W), sess.run(b))


0 0.491723 [ 1.85756636] [ 0.97638631]
20 0.0410794 [ 1.76459897] [ 0.53512192]
40 0.0155209 [ 1.8553046] [ 0.3289268]
60 0.00586423 [ 1.91105902] [ 0.20218347]
80 0.00221566 [ 1.94533014] [ 0.12427742]
100 0.000837142 [ 1.96639574] [ 0.07639039]
120 0.000316295 [ 1.97934425] [ 0.04695532]
140 0.000119502 [ 1.98730338] [ 0.02886227]
160 4.51513e-05 [ 1.99219573] [ 0.01774087]
180 1.7059e-05 [ 1.99520302] [ 0.01090488]
200 6.4456e-06 [ 1.99705136] [ 0.00670298]
220 2.43535e-06 [ 1.99818742] [ 0.00412019]
240 9.20429e-07 [ 1.99888587] [ 0.00253264]
260 3.47655e-07 [ 1.99931514] [ 0.00155679]
280 1.31418e-07 [ 1.99957907] [ 0.00095704]
300 4.96145e-08 [ 1.9997412] [ 0.00058819]
320 1.87606e-08 [ 1.99984086] [ 0.00036157]
340 7.07778e-09 [ 1.99990213] [ 0.00022221]
360 2.67287e-09 [ 1.99993992] [ 0.00013665]
380 1.00645e-09 [ 1.99996305] [  8.39866916e-05]
400 3.82443e-10 [ 1.99997735] [  5.16571417e-05]
420 1.44856e-10 [ 1.99998605] [  3.17412450e-05]
440 5.47971e-11 [ 1.99999142] [  1.95024240e-05]
460 2.03878e-11 [ 1.99999475] [  1.19525057e-05]
480 7.67386e-12 [ 1.99999678] [  7.39076268e-06]
500 3.18323e-12 [ 1.99999797] [  4.51384540e-06]
520 9.85286e-13 [ 1.99999869] [  2.79723236e-06]
540 3.78956e-13 [ 1.99999917] [  1.71640238e-06]
560 2.46321e-13 [ 1.99999928] [  1.33493302e-06]
580 2.46321e-13 [ 1.99999952] [  1.08062000e-06]
600 1.51582e-13 [ 1.99999988] [  5.71993496e-07]
620 1.7053e-13 [ 2.] [  2.69996463e-07]
640 0.0 [ 2.] [  1.11050738e-07]
660 0.0 [ 2.] [  1.11050738e-07]
680 0.0 [ 2.] [  1.11050738e-07]
700 0.0 [ 2.] [  1.11050738e-07]
720 0.0 [ 2.] [  1.11050738e-07]
740 0.0 [ 2.] [  1.11050738e-07]
760 0.0 [ 2.] [  1.11050738e-07]
780 0.0 [ 2.] [  1.11050738e-07]
800 0.0 [ 2.] [  1.11050738e-07]
820 0.0 [ 2.] [  1.11050738e-07]
840 0.0 [ 2.] [  1.11050738e-07]
860 0.0 [ 2.] [  1.11050738e-07]
880 0.0 [ 2.] [  1.11050738e-07]
900 0.0 [ 2.] [  1.11050738e-07]
920 0.0 [ 2.] [  1.11050738e-07]
940 0.0 [ 2.] [  1.11050738e-07]
960 0.0 [ 2.] [  1.11050738e-07]
980 0.0 [ 2.] [  1.11050738e-07]
1000 0.0 [ 2.] [  1.11050738e-07]
1020 0.0 [ 2.] [  1.11050738e-07]
1040 0.0 [ 2.] [  1.11050738e-07]
1060 0.0 [ 2.] [  1.11050738e-07]
1080 0.0 [ 2.] [  1.11050738e-07]
1100 0.0 [ 2.] [  1.11050738e-07]
1120 0.0 [ 2.] [  1.11050738e-07]
1140 0.0 [ 2.] [  1.11050738e-07]
1160 0.0 [ 2.] [  1.11050738e-07]
1180 0.0 [ 2.] [  1.11050738e-07]
1200 0.0 [ 2.] [  1.11050738e-07]
1220 0.0 [ 2.] [  1.11050738e-07]
1240 0.0 [ 2.] [  1.11050738e-07]
1260 0.0 [ 2.] [  1.11050738e-07]
1280 0.0 [ 2.] [  1.11050738e-07]
1300 0.0 [ 2.] [  1.11050738e-07]
1320 0.0 [ 2.] [  1.11050738e-07]
1340 0.0 [ 2.] [  1.11050738e-07]
1360 0.0 [ 2.] [  1.11050738e-07]
1380 0.0 [ 2.] [  1.11050738e-07]
1400 0.0 [ 2.] [  1.11050738e-07]
1420 0.0 [ 2.] [  1.11050738e-07]
1440 0.0 [ 2.] [  1.11050738e-07]
1460 0.0 [ 2.] [  1.11050738e-07]
1480 0.0 [ 2.] [  1.11050738e-07]
1500 0.0 [ 2.] [  1.11050738e-07]
1520 0.0 [ 2.] [  1.11050738e-07]
1540 0.0 [ 2.] [  1.11050738e-07]
1560 0.0 [ 2.] [  1.11050738e-07]
1580 0.0 [ 2.] [  1.11050738e-07]
1600 0.0 [ 2.] [  1.11050738e-07]
1620 0.0 [ 2.] [  1.11050738e-07]
1640 0.0 [ 2.] [  1.11050738e-07]
1660 0.0 [ 2.] [  1.11050738e-07]
1680 0.0 [ 2.] [  1.11050738e-07]
1700 0.0 [ 2.] [  1.11050738e-07]
1720 0.0 [ 2.] [  1.11050738e-07]
1740 0.0 [ 2.] [  1.11050738e-07]
1760 0.0 [ 2.] [  1.11050738e-07]
1780 0.0 [ 2.] [  1.11050738e-07]
1800 0.0 [ 2.] [  1.11050738e-07]
1820 0.0 [ 2.] [  1.11050738e-07]
1840 0.0 [ 2.] [  1.11050738e-07]
1860 0.0 [ 2.] [  1.11050738e-07]
1880 0.0 [ 2.] [  1.11050738e-07]
1900 0.0 [ 2.] [  1.11050738e-07]
1920 0.0 [ 2.] [  1.11050738e-07]
1940 0.0 [ 2.] [  1.11050738e-07]
1960 0.0 [ 2.] [  1.11050738e-07]
1980 0.0 [ 2.] [  1.11050738e-07]

Predict value with feed


In [12]:
print( sess.run(h, feed_dict={X:[5]}) )


[ 10.]

In [13]:
print( sess.run(h, feed_dict={X:[2.3]}) )


[ 4.5999999]

Cost minizied(desent algorithm)


In [14]:
X = [1., 2., 3.]
Y = [1., 2., 3.]
m = n_samples = len(X)

W = tf.placeholder(tf.float32)
h = tf.multiply(X,W)

cost = tf.reduce_sum(tf.pow(h - Y, 2))/(m)
init = tf.global_variables_initializer()

W_val = []
cost_val = []

sess = tf.Session()
sess.run(init)
for i in range(-30, 50):
    curr_cost, curr_w = sess.run([cost,W], feed_dict={W: i*0.1})
    W_val.append(i*0.1)
    cost_val.append(curr_cost)
    
plt.plot(W_val, cost_val)
plt.ylabel('Cost')
plt.xlabel('W')

plt.show()


minimized(Gradient descent)


In [15]:
x_data = [1., 2., 3.]
y_data = [1., 2., 3.]

W = tf.Variable(5.0)
X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)

h = W * X

Derivative

$$ cost(W) = \frac{1}{2m} \sum_{i=1}^{m} (W( x^i) - y^i)^2 $$$$ Gradient descent algorithm W := W - a \frac{ \partial}{\partial{m}} cost(W) $$$$ W:= W - a \frac{1}{m} \sum_{i=1}^{m} (W( x^i) - y^i)x^i $$

In [16]:
cost = tf.reduce_mean(tf.square(h - Y))
descent = W - tf.multiply(0.1, tf.reduce_mean(tf.multiply((tf.multiply(W,X) - Y), X)))
update = W.assign(descent)

init = tf.global_variables_initializer()

sess = tf.Session()
sess.run(init)

for step in range(100):
    sess.run(update, feed_dict={X:x_data, Y:y_data})
    print(step,sess.run(cost, feed_dict={X:x_data, Y:y_data}), sess.run(W))


0 21.2385 3.13333
1 6.04118 2.13778
2 1.71838 1.60681
3 0.488784 1.32363
4 0.139032 1.17261
5 0.0395468 1.09206
6 0.0112489 1.0491
7 0.00319967 1.02618
8 0.000910135 1.01397
9 0.000258876 1.00745
10 7.36358e-05 1.00397
11 2.09459e-05 1.00212
12 5.95745e-06 1.00113
13 1.69446e-06 1.0006
14 4.82022e-07 1.00032
15 1.37134e-07 1.00017
16 3.8992e-08 1.00009
17 1.11053e-08 1.00005
18 3.15166e-09 1.00003
19 8.92366e-10 1.00001
20 2.54924e-10 1.00001
21 7.31622e-11 1.0
22 2.14868e-11 1.0
23 6.63173e-12 1.0
24 1.80478e-12 1.0
25 5.16328e-13 1.0
26 2.65269e-13 1.0
27 9.9476e-14 1.0
28 0.0 1.0
29 0.0 1.0
30 0.0 1.0
31 0.0 1.0
32 0.0 1.0
33 0.0 1.0
34 0.0 1.0
35 0.0 1.0
36 0.0 1.0
37 0.0 1.0
38 0.0 1.0
39 0.0 1.0
40 0.0 1.0
41 0.0 1.0
42 0.0 1.0
43 0.0 1.0
44 0.0 1.0
45 0.0 1.0
46 0.0 1.0
47 0.0 1.0
48 0.0 1.0
49 0.0 1.0
50 0.0 1.0
51 0.0 1.0
52 0.0 1.0
53 0.0 1.0
54 0.0 1.0
55 0.0 1.0
56 0.0 1.0
57 0.0 1.0
58 0.0 1.0
59 0.0 1.0
60 0.0 1.0
61 0.0 1.0
62 0.0 1.0
63 0.0 1.0
64 0.0 1.0
65 0.0 1.0
66 0.0 1.0
67 0.0 1.0
68 0.0 1.0
69 0.0 1.0
70 0.0 1.0
71 0.0 1.0
72 0.0 1.0
73 0.0 1.0
74 0.0 1.0
75 0.0 1.0
76 0.0 1.0
77 0.0 1.0
78 0.0 1.0
79 0.0 1.0
80 0.0 1.0
81 0.0 1.0
82 0.0 1.0
83 0.0 1.0
84 0.0 1.0
85 0.0 1.0
86 0.0 1.0
87 0.0 1.0
88 0.0 1.0
89 0.0 1.0
90 0.0 1.0
91 0.0 1.0
92 0.0 1.0
93 0.0 1.0
94 0.0 1.0
95 0.0 1.0
96 0.0 1.0
97 0.0 1.0
98 0.0 1.0
99 0.0 1.0

In [17]:
#compute_gradient and apply_gradient
"""
gradient = tf.reduce_mean((W*X-Y)*X)*2

cost = tf.reduce_mean(tf.square(h-Y))

optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)

gvs = optimizer.compute_gradients(cost)
apply_gradients = optimizer.apply_gradients(gvs)
"""


Out[17]:
'\ngradient = tf.reduce_mean((W*X-Y)*X)*2\n\ncost = tf.reduce_mean(tf.square(h-Y))\n\noptimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)\n\ngvs = optimizer.compute_gradients(cost)\napply_gradients = optimizer.apply_gradients(gvs)\n'

Multi variable Linear regression

$$ H(x_1,x_2) = w_1x_1 + w_2x_2 + w_3x_3 $$

In [18]:
x1_data = [73,93,89,96,73]
x2_data = [80,88,91,98,66]
x3_data = [75,93,90,100,70]
y_data = [152,185,180,196,142]

In [19]:
x1 = tf.placeholder(tf.float32)
x2 = tf.placeholder(tf.float32)
x3 = tf.placeholder(tf.float32)
y = tf.placeholder(tf.float32)

W1 = tf.Variable(tf.random_uniform([1], -1.0, 1.0), name='weight1')
W2 = tf.Variable(tf.random_uniform([1], -1.0, 1.0), name='weight2')
W3 = tf.Variable(tf.random_uniform([1], -1.0, 1.0), name='weight3')
b = tf.Variable(tf.random_uniform([1], -1.0, 1.0),name='bias')

h = W1 * x1 + W2 * x2 + W3*x3 + b

cost = tf.reduce_mean(tf.square(h - y))
opt = tf.train.GradientDescentOptimizer(learning_rate=1e-5)
train = opt.minimize(cost)

init = tf.global_variables_initializer()

sess = tf.Session()
sess.run(init)

for step in range(2000):
    cost_val, h_val, _ = sess.run([cost, h, train] , feed_dict={x1:x1_data, x2: x2_data, x3:x3_data, y:y_data})
    if step % 20 == 0:
         print(step,cost_val, h_val)


0 86713.5 [-107.59642792 -130.98194885 -128.1579895  -139.57611084 -100.40460205]
20 3.64122 [ 153.66677856  183.06309509  181.26239014  197.37599182  139.13829041]
40 3.60608 [ 153.65539551  183.07545471  181.26107788  197.37561035  139.15332031]
60 3.57137 [ 153.64170837  183.08486938  181.25692749  197.37216187  139.1660614 ]
80 3.537 [ 153.62811279  183.09425354  181.25282288  197.36872864  139.17875671]
100 3.503 [ 153.61457825  183.10359192  181.24873352  197.36531067  139.19137573]
120 3.46935 [ 153.60110474  183.112854    181.24465942  197.36192322  139.20394897]
140 3.43609 [ 153.58770752  183.12207031  181.24060059  197.35855103  139.21643066]
160 3.40319 [ 153.57440186  183.1312561   181.23658752  197.35517883  139.22883606]
180 3.3706 [ 153.56115723  183.14038086  181.23258972  197.3518219   139.24121094]
200 3.3384 [ 153.54800415  183.14945984  181.22862244  197.34851074  139.25350952]
220 3.30653 [ 153.53489685  183.15847778  181.22465515  197.34518433  139.26573181]
240 3.27501 [ 153.5218811   183.16746521  181.22073364  197.34190369  139.27789307]
260 3.24383 [ 153.50891113  183.17637634  181.21681213  197.33860779  139.28997803]
280 3.21295 [ 153.49603271  183.18527222  181.21292114  197.33535767  139.30203247]
300 3.18243 [ 153.48321533  183.1940918   181.20904541  197.33210754  139.31399536]
320 3.15224 [ 153.4704895   183.20288086  181.20523071  197.32887268  139.32591248]
340 3.12236 [ 153.45779419  183.21159363  181.20137024  197.32563782  139.33773804]
360 3.09281 [ 153.44519043  183.22029114  181.19758606  197.32243347  139.34951782]
380 3.06356 [ 153.43266296  183.22894287  181.19380188  197.31924438  139.36123657]
400 3.03464 [ 153.42019653  183.23751831  181.19003296  197.31607056  139.37289429]
420 3.00602 [ 153.40779114  183.24606323  181.18629456  197.31291199  139.38449097]
440 2.97772 [ 153.39546204  183.25456238  181.18257141  197.30976868  139.39601135]
460 2.94971 [ 153.38322449  183.26303101  181.17887878  197.30667114  139.40750122]
480 2.92201 [ 153.37103271  183.27142334  181.17520142  197.30355835  139.41891479]
500 2.89461 [ 153.35888672  183.27978516  181.17153931  197.30044556  139.43023682]
520 2.86749 [ 153.34683228  183.2881012   181.16790771  197.29737854  139.44154358]
540 2.84067 [ 153.33480835  183.29634094  181.16427612  197.29429626  139.45275879]
560 2.81412 [ 153.32290649  183.30459595  181.16070557  197.29127502  139.46395874]
580 2.78788 [ 153.31103516  183.3127594   181.15710449  197.28820801  139.47503662]
600 2.76189 [ 153.29924011  183.32089233  181.15354919  197.28520203  139.4861145 ]
620 2.73619 [ 153.2875061   183.32899475  181.15000916  197.28218079  139.49710083]
640 2.71078 [ 153.27583313  183.33703613  181.14649963  197.27919006  139.50802612]
660 2.68563 [ 153.26422119  183.34503174  181.14299011  197.2762146   139.51890564]
680 2.66075 [ 153.25270081  183.35299683  181.13952637  197.27325439  139.52973938]
700 2.63615 [ 153.24119568  183.36088562  181.13604736  197.27029419  139.54048157]
720 2.61178 [ 153.22981262  183.36877441  181.13261414  197.26737976  139.55122375]
740 2.5877 [ 153.21846008  183.37660217  181.12918091  197.26445007  139.56185913]
760 2.56387 [ 153.20715332  183.38438416  181.12579346  197.2615509   139.57244873]
780 2.54029 [ 153.19593811  183.39213562  181.12242126  197.25865173  139.58299255]
800 2.51697 [ 153.18475342  183.39982605  181.11904907  197.25576782  139.59346008]
820 2.49389 [ 153.17364502  183.40748596  181.11569214  197.25289917  139.60388184]
840 2.47105 [ 153.16261292  183.41511536  181.11238098  197.25004578  139.61427307]
860 2.44847 [ 153.15162659  183.42268372  181.10905457  197.24720764  139.62457275]
880 2.42615 [ 153.14070129  183.43019104  181.10577393  197.24436951  139.63482666]
900 2.40404 [ 153.12985229  183.43769836  181.10250854  197.24157715  139.64505005]
920 2.38217 [ 153.11903381  183.44512939  181.09924316  197.23876953  139.65519714]
940 2.36053 [ 153.10830688  183.45256042  181.0960083   197.23599243  139.66529846]
960 2.33912 [ 153.09762573  183.45994568  181.09281921  197.23324585  139.67536926]
980 2.31795 [ 153.08699036  183.4672699   181.08959961  197.23048401  139.68534851]
1000 2.29699 [ 153.07641602  183.47454834  181.08641052  197.22770691  139.69528198]
1020 2.27627 [ 153.06591797  183.48181152  181.08325195  197.2250061   139.70518494]
1040 2.25578 [ 153.0554657   183.48901367  181.08010864  197.22227478  139.71499634]
1060 2.23548 [ 153.04507446  183.4961853   181.07696533  197.21957397  139.72479248]
1080 2.21543 [ 153.034729    183.5032959   181.0738678   197.21685791  139.73451233]
1100 2.19558 [ 153.02445984  183.51039124  181.07077026  197.21418762  139.74420166]
1120 2.17595 [ 153.01423645  183.5174408   181.06770325  197.21153259  139.75382996]
1140 2.15649 [ 153.00405884  183.52445984  181.06462097  197.2088623   139.76341248]
1160 2.13727 [ 152.99395752  183.53144836  181.06159973  197.20623779  139.77294922]
1180 2.11823 [ 152.98390198  183.53839111  181.05856323  197.20358276  139.78242493]
1200 2.09945 [ 152.97390747  183.54527283  181.05555725  197.20098877  139.7918396 ]
1220 2.08084 [ 152.96394348  183.55212402  181.05256653  197.19836426  139.8012085 ]
1240 2.0624 [ 152.95407104  183.55897522  181.04960632  197.19578552  139.81056213]
1260 2.04419 [ 152.94422913  183.56575012  181.04664612  197.19320679  139.81983948]
1280 2.02617 [ 152.93444824  183.57250977  181.04370117  197.19062805  139.82905579]
1300 2.00833 [ 152.92471313  183.57922363  181.04078674  197.18807983  139.83824158]
1320 1.99068 [ 152.91505432  183.58590698  181.03787231  197.18554688  139.84739685]
1340 1.97322 [ 152.90542603  183.59254456  181.03497314  197.18299866  139.85647583]
1360 1.95596 [ 152.89585876  183.59913635  181.03210449  197.18048096  139.86550903]
1380 1.93889 [ 152.88633728  183.60569763  181.02923584  197.17797852  139.8744812 ]
1400 1.92198 [ 152.87687683  183.61224365  181.02641296  197.17549133  139.88343811]
1420 1.90526 [ 152.86747742  183.61872864  181.02357483  197.17298889  139.89233398]
1440 1.88872 [ 152.85810852  183.62518311  181.02075195  197.17051697  139.90116882]
1460 1.87234 [ 152.84880066  183.63162231  181.01795959  197.1680603   139.9099884 ]
1480 1.85615 [ 152.83953857  183.63800049  181.0151825   197.16560364  139.91873169]
1500 1.84012 [ 152.83035278  183.64437866  181.01242065  197.16317749  139.92745972]
1520 1.82426 [ 152.82118225  183.6506958   181.00965881  197.16073608  139.93611145]
1540 1.8086 [ 152.81207275  183.6569519   181.00692749  197.1583252   139.94473267]
1560 1.79308 [ 152.80302429  183.66320801  181.00421143  197.15591431  139.95330811]
1580 1.77774 [ 152.79403687  183.66943359  181.00152588  197.15353394  139.96183777]
1600 1.76256 [ 152.78507996  183.6756134   180.99882507  197.15116882  139.97032166]
1620 1.74753 [ 152.77616882  183.6817627   180.99615479  197.14878845  139.97875977]
1640 1.73267 [ 152.76731873  183.68788147  180.99349976  197.1464386   139.9871521 ]
1660 1.71795 [ 152.75849915  183.69396973  180.99085999  197.14407349  139.99549866]
1680 1.70342 [ 152.7497406   183.69999695  180.98822021  197.14173889  140.00379944]
1700 1.68903 [ 152.74104309  183.70602417  180.98562622  197.13943481  140.0120697 ]
1720 1.67479 [ 152.7323761   183.71200562  180.98301697  197.13711548  140.02027893]
1740 1.6607 [ 152.72377014  183.71795654  180.98043823  197.13479614  140.02845764]
1760 1.64675 [ 152.71517944  183.72387695  180.9778595   197.13249207  140.03657532]
1780 1.63295 [ 152.70666504  183.72975159  180.97529602  197.13020325  140.04467773]
1800 1.6193 [ 152.69822693  183.73562622  180.97277832  197.12794495  140.05274963]
1820 1.60581 [ 152.68978882  183.74143982  180.97024536  197.1257019   140.06074524]
1840 1.59246 [ 152.68141174  183.7472229   180.96772766  197.12342834  140.06869507]
1860 1.57926 [ 152.67308044  183.75296021  180.96522522  197.12120056  140.07661438]
1880 1.56619 [ 152.66477966  183.75868225  180.96273804  197.11897278  140.08448792]
1900 1.55325 [ 152.65653992  183.76438904  180.96028137  197.116745    140.09233093]
1920 1.54046 [ 152.64834595  183.77005005  180.95782471  197.11454773  140.10012817]
1940 1.52779 [ 152.64021301  183.7756958   180.95539856  197.11236572  140.10791016]
1960 1.51528 [ 152.63209534  183.78128052  180.95295715  197.11016846  140.11560059]
1980 1.50287 [ 152.6240387   183.78684998  180.95053101  197.10797119  140.12327576]

In [20]:
x_data = [ [73,80,75], [93,88,93], [89,91,90],[96,98,100], [73,66,70]]
y_data = [[152],[185],[180],[196],[142]]


X = tf.placeholder(tf.float32, shape=[None,3])
Y = tf.placeholder(tf.float32, shape=[None,1])

W = tf.Variable(tf.random_normal([3,1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')

h = tf.matmul(X,W)+b

cost = tf.reduce_mean(tf.square(h - y_data))
a = tf.Variable(0.1)
opt = tf.train.GradientDescentOptimizer(learning_rate=1e-5)
train = opt.minimize(cost)

init = tf.global_variables_initializer()

sess = tf.Session()
sess.run(init)

for step in range(3000):
    cost_val, h_val, _ =  sess.run([cost,h,train], feed_dict={X:x_data, Y: y_data}) 
    if step % 20 == 0:
        print(step, cost_val, h_val)


0 182.278 [[ 166.56640625]
 [ 196.84835815]
 [ 196.13084412]
 [ 210.44512939]
 [ 151.48468018]]
20 3.98919 [[ 154.70605469]
 [ 182.61924744]
 [ 182.09700012]
 [ 195.1650238 ]
 [ 140.63594055]]
40 3.95618 [[ 154.69215393]
 [ 182.62850952]
 [ 182.0925293 ]
 [ 195.16259766]
 [ 140.6474762 ]]
60 3.92349 [[ 154.6783905 ]
 [ 182.6378479 ]
 [ 182.0881958 ]
 [ 195.16027832]
 [ 140.6590271 ]]
80 3.89116 [[ 154.66471863]
 [ 182.6471405 ]
 [ 182.08390808]
 [ 195.15800476]
 [ 140.67053223]]
100 3.8592 [[ 154.65113831]
 [ 182.65637207]
 [ 182.07963562]
 [ 195.15574646]
 [ 140.68196106]]
120 3.82756 [[ 154.63761902]
 [ 182.66557312]
 [ 182.07540894]
 [ 195.15351868]
 [ 140.69332886]]
140 3.79625 [[ 154.62414551]
 [ 182.67469788]
 [ 182.07115173]
 [ 195.15126038]
 [ 140.7046051 ]]
160 3.76524 [[ 154.61076355]
 [ 182.68380737]
 [ 182.06694031]
 [ 195.14904785]
 [ 140.71586609]]
180 3.73461 [[ 154.59745789]
 [ 182.69285583]
 [ 182.0627594 ]
 [ 195.14683533]
 [ 140.72702026]]
200 3.70427 [[ 154.584198  ]
 [ 182.70184326]
 [ 182.05859375]
 [ 195.14465332]
 [ 140.73812866]]
220 3.67425 [[ 154.5710144 ]
 [ 182.71078491]
 [ 182.05444336]
 [ 195.14245605]
 [ 140.74919128]]
240 3.64456 [[ 154.55792236]
 [ 182.7197113 ]
 [ 182.05033875]
 [ 195.14031982]
 [ 140.76017761]]
260 3.6152 [[ 154.54489136]
 [ 182.72854614]
 [ 182.04621887]
 [ 195.13815308]
 [ 140.77108765]]
280 3.5861 [[ 154.53190613]
 [ 182.73735046]
 [ 182.04214478]
 [ 195.13604736]
 [ 140.78196716]]
300 3.55735 [[ 154.51901245]
 [ 182.74610901]
 [ 182.03808594]
 [ 195.13392639]
 [ 140.79277039]]
320 3.52888 [[ 154.50619507]
 [ 182.75483704]
 [ 182.03405762]
 [ 195.1318512 ]
 [ 140.80351257]]
340 3.50072 [[ 154.49342346]
 [ 182.76348877]
 [ 182.03001404]
 [ 195.12974548]
 [ 140.81419373]]
360 3.47289 [[ 154.48072815]
 [ 182.77209473]
 [ 182.02604675]
 [ 195.12768555]
 [ 140.82481384]]
380 3.44528 [[ 154.46807861]
 [ 182.78068542]
 [ 182.02204895]
 [ 195.12561035]
 [ 140.83535767]]
400 3.41801 [[ 154.45553589]
 [ 182.78921509]
 [ 182.01809692]
 [ 195.12358093]
 [ 140.84588623]]
420 3.39101 [[ 154.44303894]
 [ 182.79769897]
 [ 182.01416016]
 [ 195.12156677]
 [ 140.85632324]]
440 3.36429 [[ 154.43060303]
 [ 182.80613708]
 [ 182.01023865]
 [ 195.11955261]
 [ 140.86671448]]
460 3.33788 [[ 154.41822815]
 [ 182.81451416]
 [ 182.00634766]
 [ 195.11753845]
 [ 140.87702942]]
480 3.3117 [[ 154.40592957]
 [ 182.82289124]
 [ 182.00247192]
 [ 195.11557007]
 [ 140.88729858]]
500 3.28585 [[ 154.39369202]
 [ 182.83119202]
 [ 181.99864197]
 [ 195.11360168]
 [ 140.89750671]]
520 3.26023 [[ 154.38153076]
 [ 182.83946228]
 [ 181.99478149]
 [ 195.1116333 ]
 [ 140.90766907]]
540 3.23489 [[ 154.36940002]
 [ 182.84765625]
 [ 181.99095154]
 [ 195.10968018]
 [ 140.91775513]]
560 3.20984 [[ 154.3573761 ]
 [ 182.85586548]
 [ 181.98719788]
 [ 195.10777283]
 [ 140.92779541]]
580 3.18502 [[ 154.34539795]
 [ 182.86398315]
 [ 181.98339844]
 [ 195.10585022]
 [ 140.93778992]]
600 3.16046 [[ 154.33348083]
 [ 182.87208557]
 [ 181.97962952]
 [ 195.10395813]
 [ 140.94770813]]
620 3.13619 [[ 154.3216095 ]
 [ 182.88009644]
 [ 181.97587585]
 [ 195.10203552]
 [ 140.95756531]]
640 3.11215 [[ 154.30982971]
 [ 182.88812256]
 [ 181.97218323]
 [ 195.10018921]
 [ 140.96740723]]
660 3.08836 [[ 154.2980957 ]
 [ 182.89608765]
 [ 181.96847534]
 [ 195.0983429 ]
 [ 140.97715759]]
680 3.06484 [[ 154.28642273]
 [ 182.9039917 ]
 [ 181.96479797]
 [ 195.09648132]
 [ 140.98686218]]
700 3.04156 [[ 154.27481079]
 [ 182.91186523]
 [ 181.96112061]
 [ 195.09463501]
 [ 140.99649048]]
720 3.01852 [[ 154.26325989]
 [ 182.91970825]
 [ 181.95750427]
 [ 195.09281921]
 [ 141.00610352]]
740 2.99569 [[ 154.25177002]
 [ 182.92750549]
 [ 181.95385742]
 [ 195.09101868]
 [ 141.01565552]]
760 2.97314 [[ 154.24034119]
 [ 182.93525696]
 [ 181.95024109]
 [ 195.08920288]
 [ 141.02511597]]
780 2.95081 [[ 154.22897339]
 [ 182.94296265]
 [ 181.94665527]
 [ 195.0874176 ]
 [ 141.03456116]]
800 2.92872 [[ 154.21766663]
 [ 182.95063782]
 [ 181.94309998]
 [ 195.08566284]
 [ 141.04394531]]
820 2.90685 [[ 154.2064209 ]
 [ 182.95826721]
 [ 181.93952942]
 [ 195.08390808]
 [ 141.05328369]]
840 2.88522 [[ 154.19523621]
 [ 182.96586609]
 [ 181.93600464]
 [ 195.08216858]
 [ 141.06256104]]
860 2.86382 [[ 154.18408203]
 [ 182.97338867]
 [ 181.93247986]
 [ 195.08042908]
 [ 141.07174683]]
880 2.84265 [[ 154.17303467]
 [ 182.980896  ]
 [ 181.92897034]
 [ 195.07872009]
 [ 141.08093262]]
900 2.8217 [[ 154.16201782]
 [ 182.9883728 ]
 [ 181.92553711]
 [ 195.07702637]
 [ 141.09007263]]
920 2.80093 [[ 154.15104675]
 [ 182.99580383]
 [ 181.92204285]
 [ 195.07531738]
 [ 141.09913635]]
940 2.78043 [[ 154.14015198]
 [ 183.00318909]
 [ 181.91860962]
 [ 195.07362366]
 [ 141.10813904]]
960 2.76013 [[ 154.12931824]
 [ 183.01052856]
 [ 181.91517639]
 [ 195.07196045]
 [ 141.11709595]]
980 2.74003 [[ 154.11854553]
 [ 183.01786804]
 [ 181.91178894]
 [ 195.0703125 ]
 [ 141.1260376 ]]
1000 2.72013 [[ 154.10780334]
 [ 183.02513123]
 [ 181.90837097]
 [ 195.06866455]
 [ 141.1348877 ]]
1020 2.70044 [[ 154.09710693]
 [ 183.03234863]
 [ 181.90499878]
 [ 195.06703186]
 [ 141.14370728]]
1040 2.68097 [[ 154.08650208]
 [ 183.03956604]
 [ 181.90164185]
 [ 195.06539917]
 [ 141.15246582]]
1060 2.6617 [[ 154.07595825]
 [ 183.04672241]
 [ 181.89830017]
 [ 195.063797  ]
 [ 141.16119385]]
1080 2.64261 [[ 154.06542969]
 [ 183.05384827]
 [ 181.89497375]
 [ 195.06221008]
 [ 141.16984558]]
1100 2.62376 [[ 154.05500793]
 [ 183.06092834]
 [ 181.8916626 ]
 [ 195.06062317]
 [ 141.1784668 ]]
1120 2.60507 [[ 154.04458618]
 [ 183.06796265]
 [ 181.8883667 ]
 [ 195.05903625]
 [ 141.18702698]]
1140 2.58659 [[ 154.03424072]
 [ 183.07496643]
 [ 181.88510132]
 [ 195.05747986]
 [ 141.19555664]]
1160 2.56832 [[ 154.02397156]
 [ 183.08197021]
 [ 181.88186646]
 [ 195.05592346]
 [ 141.20402527]]
1180 2.55021 [[ 154.01371765]
 [ 183.08888245]
 [ 181.87860107]
 [ 195.05438232]
 [ 141.21243286]]
1200 2.5323 [[ 154.0035553 ]
 [ 183.09579468]
 [ 181.87539673]
 [ 195.0528717 ]
 [ 141.2208252 ]]
1220 2.51455 [[ 153.9934082 ]
 [ 183.10266113]
 [ 181.87216187]
 [ 195.05134583]
 [ 141.22914124]]
1240 2.49702 [[ 153.9833374 ]
 [ 183.10948181]
 [ 181.86898804]
 [ 195.04985046]
 [ 141.23742676]]
1260 2.47967 [[ 153.97331238]
 [ 183.11625671]
 [ 181.86579895]
 [ 195.0483551 ]
 [ 141.24565125]]
1280 2.4625 [[ 153.96336365]
 [ 183.12301636]
 [ 181.86264038]
 [ 195.046875  ]
 [ 141.25384521]]
1300 2.44548 [[ 153.95343018]
 [ 183.12974548]
 [ 181.85949707]
 [ 195.0453949 ]
 [ 141.26197815]]
1320 2.42868 [[ 153.943573  ]
 [ 183.13641357]
 [ 181.85638428]
 [ 195.04394531]
 [ 141.27008057]]
1340 2.41202 [[ 153.93374634]
 [ 183.14306641]
 [ 181.85325623]
 [ 195.04248047]
 [ 141.27810669]]
1360 2.39555 [[ 153.92401123]
 [ 183.14968872]
 [ 181.85017395]
 [ 195.0410614 ]
 [ 141.28614807]]
1380 2.37926 [[ 153.91429138]
 [ 183.15625   ]
 [ 181.84709167]
 [ 195.03964233]
 [ 141.29408264]]
1400 2.36311 [[ 153.90463257]
 [ 183.16281128]
 [ 181.84402466]
 [ 195.03822327]
 [ 141.30200195]]
1420 2.34717 [[ 153.89501953]
 [ 183.16929626]
 [ 181.84098816]
 [ 195.03681946]
 [ 141.30984497]]
1440 2.33134 [[ 153.88545227]
 [ 183.17578125]
 [ 181.8379364 ]
 [ 195.03541565]
 [ 141.31768799]]
1460 2.31571 [[ 153.8759613 ]
 [ 183.18223572]
 [ 181.83493042]
 [ 195.03404236]
 [ 141.32545471]]
1480 2.30022 [[ 153.86647034]
 [ 183.18862915]
 [ 181.83192444]
 [ 195.03265381]
 [ 141.33319092]]
1500 2.28491 [[ 153.85704041]
 [ 183.19497681]
 [ 181.82891846]
 [ 195.03128052]
 [ 141.34086609]]
1520 2.26975 [[ 153.84770203]
 [ 183.20133972]
 [ 181.82597351]
 [ 195.029953  ]
 [ 141.348526  ]]
1540 2.25475 [[ 153.83837891]
 [ 183.2076416 ]
 [ 181.82301331]
 [ 195.02861023]
 [ 141.35612488]]
1560 2.2399 [[ 153.82910156]
 [ 183.21389771]
 [ 181.8200531 ]
 [ 195.02726746]
 [ 141.36367798]]
1580 2.22521 [[ 153.81990051]
 [ 183.22015381]
 [ 181.81713867]
 [ 195.0259552 ]
 [ 141.37120056]]
1600 2.21067 [[ 153.81071472]
 [ 183.22633362]
 [ 181.81419373]
 [ 195.02461243]
 [ 141.37864685]]
1620 2.19627 [[ 153.80162048]
 [ 183.23254395]
 [ 181.81132507]
 [ 195.02334595]
 [ 141.3861084 ]]
1640 2.18204 [[ 153.79255676]
 [ 183.23867798]
 [ 181.80844116]
 [ 195.02206421]
 [ 141.39347839]]
1660 2.16792 [[ 153.7835083 ]
 [ 183.24478149]
 [ 181.80557251]
 [ 195.02079773]
 [ 141.40084839]]
1680 2.15399 [[ 153.77453613]
 [ 183.25085449]
 [ 181.80271912]
 [ 195.01951599]
 [ 141.40814209]]
1700 2.14016 [[ 153.76556396]
 [ 183.25686646]
 [ 181.79983521]
 [ 195.01821899]
 [ 141.41537476]]
1720 2.12649 [[ 153.75669861]
 [ 183.26290894]
 [ 181.79704285]
 [ 195.01701355]
 [ 141.42262268]]
1740 2.11299 [[ 153.74786377]
 [ 183.26889038]
 [ 181.79425049]
 [ 195.01576233]
 [ 141.42979431]]
1760 2.09961 [[ 153.73907471]
 [ 183.27482605]
 [ 181.79142761]
 [ 195.01452637]
 [ 141.43690491]]
1780 2.08636 [[ 153.73033142]
 [ 183.28074646]
 [ 181.78865051]
 [ 195.01332092]
 [ 141.44403076]]
1800 2.07324 [[ 153.72161865]
 [ 183.28663635]
 [ 181.78587341]
 [ 195.01210022]
 [ 141.45106506]]
1820 2.06024 [[ 153.71295166]
 [ 183.29249573]
 [ 181.78311157]
 [ 195.01091003]
 [ 141.45809937]]
1840 2.04743 [[ 153.7043457 ]
 [ 183.29830933]
 [ 181.78038025]
 [ 195.00970459]
 [ 141.46507263]]
1860 2.03471 [[ 153.69578552]
 [ 183.30412292]
 [ 181.77766418]
 [ 195.00854492]
 [ 141.47201538]]
1880 2.02212 [[ 153.68727112]
 [ 183.30989075]
 [ 181.77494812]
 [ 195.00738525]
 [ 141.47891235]]
1900 2.00968 [[ 153.67877197]
 [ 183.31559753]
 [ 181.77224731]
 [ 195.00621033]
 [ 141.48574829]]
1920 1.99735 [[ 153.67034912]
 [ 183.32131958]
 [ 181.76956177]
 [ 195.00506592]
 [ 141.49256897]]
1940 1.98516 [[ 153.66195679]
 [ 183.32698059]
 [ 181.76687622]
 [ 195.00390625]
 [ 141.49935913]]
1960 1.97308 [[ 153.65361023]
 [ 183.3326416 ]
 [ 181.76422119]
 [ 195.0027771 ]
 [ 141.50608826]]
1980 1.96114 [[ 153.64530945]
 [ 183.33824158]
 [ 181.76158142]
 [ 195.00166321]
 [ 141.51280212]]
2000 1.94931 [[ 153.63703918]
 [ 183.34381104]
 [ 181.75892639]
 [ 195.00053406]
 [ 141.51945496]]
2020 1.9376 [[ 153.6288147 ]
 [ 183.34938049]
 [ 181.75631714]
 [ 194.99942017]
 [ 141.52607727]]
2040 1.92602 [[ 153.62065125]
 [ 183.35490417]
 [ 181.75370789]
 [ 194.99832153]
 [ 141.53266907]]
2060 1.91455 [[ 153.61250305]
 [ 183.36039734]
 [ 181.75111389]
 [ 194.99723816]
 [ 141.53919983]]
2080 1.90319 [[ 153.60441589]
 [ 183.36587524]
 [ 181.74853516]
 [ 194.99615479]
 [ 141.54573059]]
2100 1.89194 [[ 153.59635925]
 [ 183.37130737]
 [ 181.74594116]
 [ 194.99508667]
 [ 141.55218506]]
2120 1.88084 [[ 153.58834839]
 [ 183.37670898]
 [ 181.7434082 ]
 [ 194.99403381]
 [ 141.55862427]]
2140 1.86981 [[ 153.5803833 ]
 [ 183.38209534]
 [ 181.74084473]
 [ 194.99298096]
 [ 141.56503296]]
2160 1.85892 [[ 153.57244873]
 [ 183.38743591]
 [ 181.73831177]
 [ 194.99194336]
 [ 141.57138062]]
2180 1.84812 [[ 153.56455994]
 [ 183.39277649]
 [ 181.73579407]
 [ 194.9908905 ]
 [ 141.57771301]]
2200 1.83747 [[ 153.55674744]
 [ 183.39808655]
 [ 181.73330688]
 [ 194.98988342]
 [ 141.58399963]]
2220 1.82691 [[ 153.54893494]
 [ 183.40335083]
 [ 181.73083496]
 [ 194.98887634]
 [ 141.590271  ]]
2240 1.81646 [[ 153.54116821]
 [ 183.40856934]
 [ 181.72833252]
 [ 194.98783875]
 [ 141.59646606]]
2260 1.80609 [[ 153.53346252]
 [ 183.41381836]
 [ 181.72587585]
 [ 194.98686218]
 [ 141.60267639]]
2280 1.79582 [[ 153.52577209]
 [ 183.41900635]
 [ 181.72340393]
 [ 194.98588562]
 [ 141.60881042]]
2300 1.78568 [[ 153.51812744]
 [ 183.42416382]
 [ 181.72096252]
 [ 194.9848938 ]
 [ 141.6149292 ]]
2320 1.77566 [[ 153.51051331]
 [ 183.42927551]
 [ 181.71853638]
 [ 194.98390198]
 [ 141.62098694]]
2340 1.76572 [[ 153.50294495]
 [ 183.43437195]
 [ 181.71611023]
 [ 194.98294067]
 [ 141.62702942]]
2360 1.75588 [[ 153.49542236]
 [ 183.43945312]
 [ 181.71369934]
 [ 194.98196411]
 [ 141.63304138]]
2380 1.74616 [[ 153.48796082]
 [ 183.44450378]
 [ 181.71130371]
 [ 194.98101807]
 [ 141.63900757]]
2400 1.73651 [[ 153.48051453]
 [ 183.44953918]
 [ 181.70892334]
 [ 194.98010254]
 [ 141.64494324]]
2420 1.72695 [[ 153.47309875]
 [ 183.45455933]
 [ 181.70655823]
 [ 194.97915649]
 [ 141.65086365]]
2440 1.71751 [[ 153.4657135 ]
 [ 183.45950317]
 [ 181.70417786]
 [ 194.97822571]
 [ 141.65672302]]
2460 1.70817 [[ 153.45840454]
 [ 183.46446228]
 [ 181.70184326]
 [ 194.97732544]
 [ 141.66255188]]
2480 1.69891 [[ 153.45111084]
 [ 183.46939087]
 [ 181.69950867]
 [ 194.97640991]
 [ 141.66835022]]
2500 1.68975 [[ 153.44384766]
 [ 183.47427368]
 [ 181.69718933]
 [ 194.9755249 ]
 [ 141.67411804]]
2520 1.68067 [[ 153.43663025]
 [ 183.47917175]
 [ 181.69487   ]
 [ 194.97460938]
 [ 141.67985535]]
2540 1.67169 [[ 153.4294281 ]
 [ 183.48397827]
 [ 181.69255066]
 [ 194.97372437]
 [ 141.68554688]]
2560 1.66281 [[ 153.42230225]
 [ 183.48881531]
 [ 181.69029236]
 [ 194.97286987]
 [ 141.69122314]]
2580 1.654 [[ 153.41517639]
 [ 183.49360657]
 [ 181.68800354]
 [ 194.97198486]
 [ 141.69685364]]
2600 1.64529 [[ 153.40811157]
 [ 183.49838257]
 [ 181.68572998]
 [ 194.97113037]
 [ 141.70245361]]
2620 1.63668 [[ 153.40109253]
 [ 183.50312805]
 [ 181.68348694]
 [ 194.97027588]
 [ 141.70802307]]
2640 1.62814 [[ 153.39407349]
 [ 183.50782776]
 [ 181.68122864]
 [ 194.96940613]
 [ 141.71356201]]
2660 1.61969 [[ 153.38713074]
 [ 183.51254272]
 [ 181.67900085]
 [ 194.96858215]
 [ 141.71907043]]
2680 1.61133 [[ 153.38020325]
 [ 183.51719666]
 [ 181.67677307]
 [ 194.96775818]
 [ 141.72454834]]
2700 1.60303 [[ 153.37329102]
 [ 183.52185059]
 [ 181.67456055]
 [ 194.9669342 ]
 [ 141.72998047]]
2720 1.59483 [[ 153.36645508]
 [ 183.526474  ]
 [ 181.67234802]
 [ 194.96611023]
 [ 141.73539734]]
2740 1.58672 [[ 153.3596344 ]
 [ 183.53106689]
 [ 181.67016602]
 [ 194.96531677]
 [ 141.74078369]]
2760 1.57869 [[ 153.35284424]
 [ 183.53562927]
 [ 181.66798401]
 [ 194.96450806]
 [ 141.74612427]]
2780 1.57074 [[ 153.34609985]
 [ 183.54019165]
 [ 181.66581726]
 [ 194.96369934]
 [ 141.75143433]]
2800 1.56286 [[ 153.33938599]
 [ 183.54472351]
 [ 181.66366577]
 [ 194.96292114]
 [ 141.75672913]]
2820 1.55505 [[ 153.33270264]
 [ 183.54922485]
 [ 181.66151428]
 [ 194.9621582 ]
 [ 141.76197815]]
2840 1.54732 [[ 153.3260498 ]
 [ 183.55371094]
 [ 181.65937805]
 [ 194.96139526]
 [ 141.76721191]]
2860 1.53969 [[ 153.31942749]
 [ 183.55815125]
 [ 181.65725708]
 [ 194.96061707]
 [ 141.7723999 ]]
2880 1.53212 [[ 153.31285095]
 [ 183.56257629]
 [ 181.65512085]
 [ 194.95985413]
 [ 141.77754211]]
2900 1.52463 [[ 153.30630493]
 [ 183.56698608]
 [ 181.6530304 ]
 [ 194.95910645]
 [ 141.78269958]]
2920 1.51721 [[ 153.29981995]
 [ 183.57138062]
 [ 181.65093994]
 [ 194.95838928]
 [ 141.78779602]]
2940 1.50986 [[ 153.29333496]
 [ 183.57574463]
 [ 181.64884949]
 [ 194.95765686]
 [ 141.7928772 ]]
2960 1.50261 [[ 153.28689575]
 [ 183.58006287]
 [ 181.64677429]
 [ 194.95690918]
 [ 141.79792786]]
2980 1.4954 [[ 153.28045654]
 [ 183.58439636]
 [ 181.64471436]
 [ 194.95617676]
 [ 141.80293274]]

Remove bias

$ \left[\begin{array}{ccc} b&w_1&w_2&w_3 \end{array} \right]$ $

\left[\begin{array}{ddd}1\\x_1\\x_2\\x_3 \end{array} \right]$

$ \left[\begin{array}{eee} bx1+w_1 * x1+w_2 * x_2 + w_3 * x_3\end{array} \right]$


In [ ]: