In [1]:
import tensorflow as tf

In [7]:
hello = tf.constant("Hello, TensorFlow!")
sess = tf.Session()
print (sess.run(hello))


Hello, TensorFlow!

In [10]:
node1 = tf.constant(3.0, tf.float32)
node2 = tf.constant(4.0)
node3 = tf.add(node1, node2)

In [11]:
print("node1:", node1, "node2:", node2)
print("node3: ", node3)


node1: Tensor("Const_2:0", shape=(), dtype=float32) node2: Tensor("Const_3:0", shape=(), dtype=float32)
node3:  Tensor("Add:0", shape=(), dtype=float32)

In [12]:
# X and Y data
x_train = [1, 2, 3]
y_train = [1, 2, 3]

# Try to find values for W and b to compute y_data = x_data * W + b
# We know that W should be 1 and b should be 0
# But let TensorFlow figure it out
W = tf.Variable(tf.random_normal([1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')

# Our hypothesis XW+b
hypothesis = x_train * W + b

# cost/loss function  tf.reduce_mean(t) ==> average value of t
cost = tf.reduce_mean(tf.square(hypothesis - y_train))

# Minimize
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
train = optimizer.minimize(cost)

# Launch the graph in a session.
sess = tf.Session()
# Initializes global variables in the graph.
sess.run(tf.global_variables_initializer())

# Fit the line
for step in range(2001):
    sess.run(train)
    if step % 20 == 0:
        print(step, sess.run(cost), sess.run(W), sess.run(b))


0 20.1834 [-1.51091158] [ 1.02429187]
20 0.593376 [ 0.08785752] [ 1.62754512]
40 0.378093 [ 0.27172229] [ 1.61308157]
60 0.341932 [ 0.31937432] [ 1.54317856]
80 0.310535 [ 0.35263965] [ 1.47121716]
100 0.282032 [ 0.38318491] [ 1.40212905]
120 0.256146 [ 0.4121846] [ 1.3362391]
140 0.232636 [ 0.43981087] [ 1.27344096]
160 0.211284 [ 0.46613798] [ 1.21359372]
180 0.191891 [ 0.49122754] [ 1.15655959]
200 0.174279 [ 0.51513785] [ 1.10220551]
220 0.158283 [ 0.53792459] [ 1.05040586]
240 0.143755 [ 0.55964047] [ 1.00104082]
260 0.130561 [ 0.58033562] [ 0.95399565]
280 0.118577 [ 0.60005832] [ 0.90916139]
300 0.107694 [ 0.61885411] [ 0.8664341]
320 0.0978092 [ 0.63676655] [ 0.82571495]
340 0.0888319 [ 0.65383714] [ 0.7869094]
360 0.0806785 [ 0.67010558] [ 0.74992752]
380 0.0732735 [ 0.6856094] [ 0.71468371]
400 0.0665482 [ 0.70038462] [ 0.6810962]
420 0.0604401 [ 0.71446544] [ 0.64908713]
440 0.0548927 [ 0.72788447] [ 0.61858243]
460 0.0498544 [ 0.74067295] [ 0.58951133]
480 0.0452786 [ 0.75286043] [ 0.56180632]
500 0.0411227 [ 0.76447505] [ 0.53540349]
520 0.0373483 [ 0.77554387] [ 0.51024145]
540 0.0339203 [ 0.78609258] [ 0.48626193]
560 0.030807 [ 0.79614538] [ 0.46340945]
580 0.0279794 [ 0.80572575] [ 0.4416309]
600 0.0254114 [ 0.81485593] [ 0.42087585]
620 0.023079 [ 0.82355708] [ 0.40109631]
640 0.0209607 [ 0.83184922] [ 0.38224632]
660 0.0190368 [ 0.83975172] [ 0.36428213]
680 0.0172896 [ 0.84728277] [ 0.34716222]
700 0.0157027 [ 0.85445988] [ 0.33084691]
720 0.0142614 [ 0.86129975] [ 0.31529829]
740 0.0129524 [ 0.8678183] [ 0.30048037]
760 0.0117636 [ 0.87403023] [ 0.28635889]
780 0.0106839 [ 0.8799504] [ 0.27290103]
800 0.0097033 [ 0.88559228] [ 0.26007569]
820 0.00881268 [ 0.89096898] [ 0.24785316]
840 0.00800382 [ 0.89609301] [ 0.23620497]
860 0.0072692 [ 0.9009763] [ 0.22510417]
880 0.006602 [ 0.90562999] [ 0.2145251]
900 0.00599605 [ 0.91006511] [ 0.20444323]
920 0.0054457 [ 0.91429168] [ 0.19483514]
940 0.00494587 [ 0.91831952] [ 0.18567866]
960 0.00449192 [ 0.92215836] [ 0.17695247]
980 0.00407964 [ 0.92581654] [ 0.16863637]
1000 0.0037052 [ 0.92930299] [ 0.16071108]
1020 0.00336511 [ 0.93262541] [ 0.15315823]
1040 0.00305625 [ 0.93579179] [ 0.14596036]
1060 0.00277574 [ 0.93880939] [ 0.13910073]
1080 0.00252097 [ 0.94168514] [ 0.13256346]
1100 0.00228958 [ 0.9444257] [ 0.12633348]
1120 0.00207943 [ 0.94703746] [ 0.12039628]
1140 0.00188858 [ 0.94952649] [ 0.11473811]
1160 0.00171524 [ 0.95189857] [ 0.10934586]
1180 0.00155781 [ 0.95415914] [ 0.10420702]
1200 0.00141483 [ 0.95631349] [ 0.09930968]
1220 0.00128497 [ 0.95836657] [ 0.09464248]
1240 0.00116703 [ 0.96032327] [ 0.09019466]
1260 0.00105991 [ 0.96218789] [ 0.08595581]
1280 0.000962631 [ 0.96396494] [ 0.08191621]
1300 0.000874277 [ 0.96565843] [ 0.07806644]
1320 0.000794032 [ 0.96727234] [ 0.07439762]
1340 0.000721153 [ 0.96881038] [ 0.07090124]
1360 0.000654963 [ 0.97027624] [ 0.06756915]
1380 0.000594846 [ 0.97167313] [ 0.06439361]
1400 0.000540249 [ 0.9730044] [ 0.06136734]
1420 0.000490662 [ 0.97427315] [ 0.05848328]
1440 0.000445628 [ 0.97548217] [ 0.05573477]
1460 0.000404727 [ 0.97663444] [ 0.05311545]
1480 0.000367579 [ 0.97773248] [ 0.05061923]
1500 0.000333841 [ 0.9787789] [ 0.04824035]
1520 0.000303198 [ 0.97977638] [ 0.04597324]
1540 0.00027537 [ 0.98072678] [ 0.04381261]
1560 0.000250096 [ 0.98163259] [ 0.04175356]
1580 0.00022714 [ 0.98249578] [ 0.03979127]
1600 0.000206293 [ 0.98331839] [ 0.03792122]
1620 0.000187357 [ 0.98410237] [ 0.03613905]
1640 0.000170161 [ 0.98484945] [ 0.03444067]
1660 0.000154543 [ 0.98556155] [ 0.03282208]
1680 0.000140359 [ 0.98624009] [ 0.03127956]
1700 0.000127476 [ 0.98688674] [ 0.02980952]
1720 0.000115777 [ 0.98750299] [ 0.02840859]
1740 0.000105149 [ 0.98809034] [ 0.02707351]
1760 9.54984e-05 [ 0.98864996] [ 0.02580117]
1780 8.67347e-05 [ 0.98918337] [ 0.02458863]
1800 7.87727e-05 [ 0.98969179] [ 0.0234331]
1820 7.15431e-05 [ 0.99017638] [ 0.02233172]
1840 6.49754e-05 [ 0.99063796] [ 0.02128214]
1860 5.90112e-05 [ 0.99107796] [ 0.02028194]
1880 5.35948e-05 [ 0.99149728] [ 0.01932875]
1900 4.86763e-05 [ 0.99189687] [ 0.01842033]
1920 4.4208e-05 [ 0.99227768] [ 0.01755465]
1940 4.015e-05 [ 0.99264061] [ 0.01672965]
1960 3.64658e-05 [ 0.99298644] [ 0.0159434]
1980 3.31191e-05 [ 0.99331605] [ 0.01519412]
2000 3.00783e-05 [ 0.99363023] [ 0.01448005]

In [13]:
# Try to find values for W and b to compute y_data = W * x_data + b
# We know that W should be 1 and b should be 0
# But let's use TensorFlow to figure it out
W = tf.Variable(tf.random_normal([1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')

# Now we can use X and Y in place of x_data and y_data
# # placeholders for a tensor that will be always fed using feed_dict
# See http://stackoverflow.com/questions/36693740/
X = tf.placeholder(tf.float32, shape=[None])
Y = tf.placeholder(tf.float32, shape=[None])

# Our hypothesis XW+b
hypothesis = X * W + b

# cost/loss function
cost = tf.reduce_mean(tf.square(hypothesis - Y))

# Minimize
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
train = optimizer.minimize(cost)

# Launch the graph in a session.
sess = tf.Session()
# Initializes global variables in the graph.
sess.run(tf.global_variables_initializer())

# Fit the line
for step in range(2001):
    cost_val, W_val, b_val, _ = \
        sess.run([cost, W, b, train],
                 feed_dict={X: [1, 2, 3], Y: [1, 2, 3]})
    if step % 20 == 0:
        print(step, cost_val, W_val, b_val)

# Learns best fit W:[ 1.],  b:[ 0]
'''
...
1980 1.32962e-05 [ 1.00423515] [-0.00962736]
2000 1.20761e-05 [ 1.00403607] [-0.00917497]
'''

# Testing our model
print(sess.run(hypothesis, feed_dict={X: [5]}))
print(sess.run(hypothesis, feed_dict={X: [2.5]}))
print(sess.run(hypothesis, feed_dict={X: [1.5, 3.5]}))

'''
[ 5.0110054]
[ 2.50091505]
[ 1.49687922  3.50495124]
'''


# Fit the line with new training data
for step in range(2001):
    cost_val, W_val, b_val, _ = \
        sess.run([cost, W, b, train],
                 feed_dict={X: [1, 2, 3, 4, 5],
                            Y: [2.1, 3.1, 4.1, 5.1, 6.1]})
    if step % 20 == 0:
        print(step, cost_val, W_val, b_val)

# Testing our model
print(sess.run(hypothesis, feed_dict={X: [5]}))
print(sess.run(hypothesis, feed_dict={X: [2.5]}))
print(sess.run(hypothesis, feed_dict={X: [1.5, 3.5]}))


0 15.6697 [-0.6766386] [ 0.10877003]
20 0.207245 [ 0.57277751] [ 0.61861598]
40 0.0610695 [ 0.70432103] [ 0.6385771]
60 0.0543114 [ 0.72883058] [ 0.61323541]
80 0.049316 [ 0.74258512] [ 0.58486021]
100 0.0447895 [ 0.75477892] [ 0.55741614]
120 0.0406786 [ 0.76631266] [ 0.53122354]
140 0.0369449 [ 0.77729595] [ 0.50625843]
160 0.033554 [ 0.78776228] [ 0.48246616]
180 0.0304742 [ 0.79773664] [ 0.45979208]
200 0.0276772 [ 0.80724227] [ 0.43818358]
220 0.0251369 [ 0.81630129] [ 0.41759053]
240 0.0228297 [ 0.82493442] [ 0.39796531]
260 0.0207343 [ 0.83316183] [ 0.37926236]
280 0.0188312 [ 0.84100264] [ 0.36143842]
300 0.0171028 [ 0.84847492] [ 0.34445211]
320 0.015533 [ 0.85559607] [ 0.32826409]
340 0.0141074 [ 0.86238259] [ 0.31283686]
360 0.0128125 [ 0.86885005] [ 0.29813468]
380 0.0116365 [ 0.87501359] [ 0.28412345]
400 0.0105685 [ 0.88088757] [ 0.27077064]
420 0.00959848 [ 0.88648528] [ 0.25804543]
440 0.0087175 [ 0.89182019] [ 0.24591826]
460 0.00791738 [ 0.89690417] [ 0.23436101]
480 0.00719069 [ 0.90174931] [ 0.22334692]
500 0.00653068 [ 0.90636671] [ 0.21285044]
520 0.00593128 [ 0.91076714] [ 0.2028473]
540 0.00538688 [ 0.91496074] [ 0.19331424]
560 0.00489246 [ 0.91895723] [ 0.1842292]
580 0.0044434 [ 0.92276597] [ 0.17557111]
600 0.00403557 [ 0.92639571] [ 0.16731991]
620 0.00366517 [ 0.92985475] [ 0.15945654]
640 0.00332877 [ 0.93315136] [ 0.15196271]
660 0.00302325 [ 0.93629301] [ 0.14482105]
680 0.00274576 [ 0.93928695] [ 0.13801505]
700 0.00249375 [ 0.94214022] [ 0.13152887]
720 0.00226486 [ 0.94485945] [ 0.1253475]
740 0.00205698 [ 0.94745082] [ 0.11945663]
760 0.00186819 [ 0.94992036] [ 0.11384267]
780 0.00169671 [ 0.95227396] [ 0.10849248]
800 0.00154098 [ 0.95451695] [ 0.10339373]
820 0.00139955 [ 0.95665443] [ 0.0985346]
840 0.00127109 [ 0.95869148] [ 0.09390385]
860 0.00115442 [ 0.96063286] [ 0.08949075]
880 0.00104847 [ 0.96248299] [ 0.08528503]
900 0.000952234 [ 0.96424609] [ 0.08127695]
920 0.000864839 [ 0.96592647] [ 0.07745724]
940 0.000785459 [ 0.96752781] [ 0.07381702]
960 0.000713366 [ 0.96905369] [ 0.07034791]
980 0.00064789 [ 0.97050816] [ 0.06704188]
1000 0.000588425 [ 0.97189415] [ 0.06389115]
1020 0.000534419 [ 0.97321504] [ 0.0608885]
1040 0.00048537 [ 0.97447383] [ 0.05802698]
1060 0.000440818 [ 0.9756735] [ 0.0552999]
1080 0.000400356 [ 0.97681671] [ 0.052701]
1100 0.00036361 [ 0.97790623] [ 0.05022427]
1120 0.000330237 [ 0.97894454] [ 0.04786393]
1140 0.000299928 [ 0.97993398] [ 0.04561455]
1160 0.0002724 [ 0.98087716] [ 0.04347083]
1180 0.000247396 [ 0.98177588] [ 0.04142783]
1200 0.000224688 [ 0.98263234] [ 0.03948085]
1220 0.000204066 [ 0.98344857] [ 0.03762538]
1240 0.000185336 [ 0.98422635] [ 0.03585714]
1260 0.000168326 [ 0.98496777] [ 0.03417198]
1280 0.000152875 [ 0.9856742] [ 0.03256599]
1300 0.000138844 [ 0.98634744] [ 0.0310355]
1320 0.0001261 [ 0.98698908] [ 0.02957693]
1340 0.000114528 [ 0.98760051] [ 0.02818691]
1360 0.000104015 [ 0.98818326] [ 0.02686225]
1380 9.44682e-05 [ 0.98873854] [ 0.02559983]
1400 8.57977e-05 [ 0.98926771] [ 0.02439679]
1420 7.79224e-05 [ 0.98977214] [ 0.02325023]
1440 7.07707e-05 [ 0.99025303] [ 0.02215749]
1460 6.42736e-05 [ 0.99071097] [ 0.0211161]
1480 5.83753e-05 [ 0.99114758] [ 0.0201237]
1500 5.30168e-05 [ 0.99156362] [ 0.01917795]
1520 4.81503e-05 [ 0.99196011] [ 0.01827665]
1540 4.37315e-05 [ 0.99233794] [ 0.0174177]
1560 3.97175e-05 [ 0.99269801] [ 0.01659912]
1580 3.60715e-05 [ 0.99304116] [ 0.01581903]
1600 3.27614e-05 [ 0.99336821] [ 0.0150756]
1620 2.97545e-05 [ 0.99367988] [ 0.01436711]
1640 2.70232e-05 [ 0.99397689] [ 0.01369191]
1660 2.45426e-05 [ 0.99425995] [ 0.01304845]
1680 2.22903e-05 [ 0.99452972] [ 0.01243521]
1700 2.02445e-05 [ 0.9947868] [ 0.01185082]
1720 1.83866e-05 [ 0.99503177] [ 0.01129389]
1740 1.66986e-05 [ 0.99526525] [ 0.01076313]
1760 1.51661e-05 [ 0.99548775] [ 0.01025731]
1780 1.37738e-05 [ 0.99569988] [ 0.00977525]
1800 1.25097e-05 [ 0.99590194] [ 0.00931584]
1820 1.13616e-05 [ 0.99609452] [ 0.00887804]
1840 1.03189e-05 [ 0.99627805] [ 0.00846081]
1860 9.37222e-06 [ 0.99645293] [ 0.00806321]
1880 8.51199e-06 [ 0.99661964] [ 0.0076843]
1900 7.73055e-06 [ 0.99677849] [ 0.0073232]
1920 7.02115e-06 [ 0.99692988] [ 0.00697904]
1940 6.37658e-06 [ 0.99707419] [ 0.00665105]
1960 5.7914e-06 [ 0.99721169] [ 0.00633848]
1980 5.26002e-06 [ 0.99734271] [ 0.00604061]
2000 4.77708e-06 [ 0.99746758] [ 0.00575675]
[ 4.99309492]
[ 2.49942589]
[ 1.50195813  3.49689341]
0 1.21406 [ 1.06367934] [ 0.02779355]
20 0.163984 [ 1.26102054] [ 0.15382862]
40 0.14319 [ 1.24483621] [ 0.21604624]
60 0.125049 [ 1.2288059] [ 0.2739383]
80 0.109206 [ 1.21382117] [ 0.32803771]
100 0.0953705 [ 1.1998179] [ 0.3785941]
120 0.0832877 [ 1.1867317] [ 0.42583954]
140 0.0727358 [ 1.17450225] [ 0.46999085]
160 0.0635207 [ 1.16307402] [ 0.51125079]
180 0.0554731 [ 1.15239418] [ 0.54980838]
200 0.0484451 [ 1.14241374] [ 0.58584094]
220 0.0423075 [ 1.13308704] [ 0.61951345]
240 0.0369474 [ 1.12437105] [ 0.65098095]
260 0.0322665 [ 1.11622584] [ 0.6803875]
280 0.0281785 [ 1.10861421] [ 0.7078681]
300 0.0246085 [ 1.10150099] [ 0.73354918]
320 0.0214908 [ 1.09485364] [ 0.75754833]
340 0.0187681 [ 1.08864152] [ 0.77997571]
360 0.0163903 [ 1.08283651] [ 0.80093431]
380 0.0143138 [ 1.07741129] [ 0.82052046]
400 0.0125003 [ 1.07234144] [ 0.8388238]
420 0.0109166 [ 1.06760383] [ 0.85592854]
440 0.00953356 [ 1.06317651] [ 0.87191296]
460 0.00832572 [ 1.05903888] [ 0.88685083]
480 0.00727092 [ 1.05517244] [ 0.90081018]
500 0.00634974 [ 1.05155921] [ 0.91385514]
520 0.00554529 [ 1.04818249] [ 0.92604595]
540 0.00484274 [ 1.04502702] [ 0.93743825]
560 0.0042292 [ 1.04207814] [ 0.94808459]
580 0.0036934 [ 1.03932238] [ 0.95803356]
600 0.00322547 [ 1.0367471] [ 0.96733117]
620 0.00281683 [ 1.0343405] [ 0.97601962]
640 0.00245995 [ 1.03209162] [ 0.98413914]
660 0.0021483 [ 1.02998984] [ 0.99172693]
680 0.00187614 [ 1.02802575] [ 0.99881774]
700 0.00163844 [ 1.0261904] [ 1.00544429]
720 0.00143085 [ 1.0244751] [ 1.01163709]
740 0.00124957 [ 1.02287221] [ 1.01742399]
760 0.00109127 [ 1.02137434] [ 1.0228318]
780 0.000953015 [ 1.01997459] [ 1.02788544]
800 0.000832275 [ 1.01866639] [ 1.03260827]
820 0.000726827 [ 1.0174439] [ 1.03702176]
840 0.00063475 [ 1.01630151] [ 1.04114616]
860 0.000554332 [ 1.01523387] [ 1.04500055]
880 0.000484095 [ 1.01423621] [ 1.04860294]
900 0.000422763 [ 1.01330376] [ 1.05196893]
920 0.000369207 [ 1.01243258] [ 1.05511439]
940 0.000322426 [ 1.01161838] [ 1.05805397]
960 0.000281575 [ 1.01085746] [ 1.06080127]
980 0.0002459 [ 1.01014626] [ 1.06336844]
1000 0.000214748 [ 1.00948179] [ 1.06576753]
1020 0.000187542 [ 1.00886083] [ 1.0680095]
1040 0.000163785 [ 1.00828063] [ 1.07010436]
1060 0.000143033 [ 1.00773835] [ 1.07206213]
1080 0.000124913 [ 1.00723147] [ 1.07389176]
1100 0.000109085 [ 1.00675797] [ 1.07560158]
1120 9.52663e-05 [ 1.00631535] [ 1.07719934]
1140 8.32005e-05 [ 1.00590181] [ 1.07869232]
1160 7.26612e-05 [ 1.00551546] [ 1.08008742]
1180 6.34557e-05 [ 1.00515413] [ 1.08139157]
1200 5.54161e-05 [ 1.00481665] [ 1.08261025]
1220 4.83946e-05 [ 1.00450122] [ 1.08374918]
1240 4.22612e-05 [ 1.0042063] [ 1.08481371]
1260 3.69087e-05 [ 1.00393081] [ 1.08580828]
1280 3.22316e-05 [ 1.00367343] [ 1.08673763]
1300 2.81487e-05 [ 1.00343287] [ 1.08760619]
1320 2.45815e-05 [ 1.00320804] [ 1.08841789]
1340 2.14686e-05 [ 1.00299799] [ 1.08917618]
1360 1.87468e-05 [ 1.00280154] [ 1.08988547]
1380 1.63716e-05 [ 1.00261807] [ 1.09054792]
1400 1.4297e-05 [ 1.00244653] [ 1.09116697]
1420 1.24859e-05 [ 1.00228643] [ 1.09174562]
1440 1.09043e-05 [ 1.00213659] [ 1.09228623]
1460 9.52218e-06 [ 1.00199664] [ 1.09279144]
1480 8.31612e-06 [ 1.00186598] [ 1.09326351]
1500 7.2626e-06 [ 1.00174367] [ 1.09370458]
1520 6.34257e-06 [ 1.00162947] [ 1.09411693]
1540 5.53852e-06 [ 1.00152278] [ 1.09450233]
1560 4.83648e-06 [ 1.001423] [ 1.09486234]
1580 4.22407e-06 [ 1.0013299] [ 1.09519875]
1600 3.68883e-06 [ 1.00124276] [ 1.09551322]
1620 3.22174e-06 [ 1.00116146] [ 1.09580708]
1640 2.81347e-06 [ 1.0010854] [ 1.0960815]
1660 2.45735e-06 [ 1.00101435] [ 1.09633803]
1680 2.14643e-06 [ 1.00094795] [ 1.09657764]
1700 1.87454e-06 [ 1.00088596] [ 1.09680164]
1720 1.63722e-06 [ 1.00082779] [ 1.09701097]
1740 1.42946e-06 [ 1.00077379] [ 1.09720671]
1760 1.24832e-06 [ 1.000723] [ 1.0973897]
1780 1.0903e-06 [ 1.00067568] [ 1.09756064]
1800 9.52373e-07 [ 1.00063157] [ 1.09772027]
1820 8.31732e-07 [ 1.00059009] [ 1.09786952]
1840 7.2643e-07 [ 1.00055146] [ 1.09800899]
1860 6.34432e-07 [ 1.00051534] [ 1.09813929]
1880 5.54004e-07 [ 1.00048161] [ 1.09826112]
1900 4.83851e-07 [ 1.00045013] [ 1.09837484]
1920 4.22678e-07 [ 1.00042081] [ 1.09848118]
1940 3.6903e-07 [ 1.00039315] [ 1.0985806]
1960 3.2238e-07 [ 1.0003674] [ 1.09867358]
1980 2.81497e-07 [ 1.00034332] [ 1.09876025]
2000 2.46027e-07 [ 1.00032103] [ 1.09884131]
[ 6.10044622]
[ 3.59964371]
[ 2.5993228  4.5999651]

In [ ]:


In [ ]: