In [1]:
import tensorflow as tf
In [2]:
x_train = [1,2,3]
y_train = [1,2,3]
In [3]:
w = tf.Variable(tf.random_normal([1]), name = 'weight') # tf.random_normal([1])에서 [1]은 shape이다
b = tf.Variable(tf.random_normal([1]), name = 'bias')
hypothesis = x_train * w + b
In [4]:
cost = tf.reduce_mean(tf.square(hypothesis - y_train))
In [5]:
t = [1,23,4,5]
tf.reduce_mean(t)
Out[5]:
<tf.Tensor 'Mean_1:0' shape=() dtype=int32>
In [6]:
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
train = optimizer.minimize(cost)
In [7]:
sess = tf.Session()
sess.run(tf.global_variables_initializer()) #우리가 여기에서 변수 두개를 선언했다. W,b 이걸 초기화하고 써야 한다.
In [8]:
for step in range(2001):
sess.run(train)
if step % 20 == 0:
print(step, sess.run(cost), sess.run(w), sess.run(b))
0 2.47806 [ 0.47580069] [-0.46648303]
20 0.0275064 [ 1.02466393] [-0.21395126]
40 0.00482547 [ 1.07342148] [-0.1819379]
60 0.00419982 [ 1.07472396] [-0.17129664]
80 0.00381268 [ 1.07166481] [-0.16304719]
100 0.00346273 [ 1.06833994] [-0.15536568]
120 0.00314491 [ 1.06513238] [-0.14806232]
140 0.00285625 [ 1.06207168] [-0.14110382]
160 0.00259408 [ 1.05915439] [-0.13447225]
180 0.00235598 [ 1.05637443] [-0.12815252]
200 0.00213974 [ 1.05372512] [-0.12212981]
220 0.00194335 [ 1.05120027] [-0.11639017]
240 0.00176499 [ 1.04879403] [-0.11092026]
260 0.00160299 [ 1.04650092] [-0.10570739]
280 0.00145586 [ 1.04431558] [-0.10073958]
300 0.00132223 [ 1.04223287] [-0.09600522]
320 0.00120088 [ 1.04024804] [-0.09149335]
340 0.00109065 [ 1.03835642] [-0.08719339]
360 0.000990547 [ 1.03655386] [-0.08309558]
380 0.000899634 [ 1.03483593] [-0.07919036]
400 0.000817058 [ 1.03319883] [-0.07546873]
420 0.000742066 [ 1.03163862] [-0.07192198]
440 0.000673956 [ 1.03015172] [-0.06854197]
460 0.000612099 [ 1.02873468] [-0.06532074]
480 0.000555916 [ 1.02738416] [-0.06225089]
500 0.000504894 [ 1.0260973] [-0.0593253]
520 0.000458551 [ 1.02487075] [-0.05653721]
540 0.000416462 [ 1.02370203] [-0.05388018]
560 0.00037824 [ 1.02258801] [-0.05134801]
580 0.000343523 [ 1.02152669] [-0.04893489]
600 0.000311994 [ 1.02051461] [-0.04663514]
620 0.000283354 [ 1.01955044] [-0.04444326]
640 0.000257345 [ 1.0186317] [-0.04235449]
660 0.000233726 [ 1.01775599] [-0.0403639]
680 0.000212273 [ 1.01692164] [-0.03846689]
700 0.00019279 [ 1.01612628] [-0.03665906]
720 0.000175096 [ 1.01536858] [-0.03493623]
740 0.000159023 [ 1.01464629] [-0.03329439]
760 0.000144427 [ 1.01395786] [-0.03172968]
780 0.000131172 [ 1.01330197] [-0.03023851]
800 0.000119133 [ 1.01267684] [-0.02881743]
820 0.000108196 [ 1.01208103] [-0.0274631]
840 9.82666e-05 [ 1.01151323] [-0.0261724]
860 8.92476e-05 [ 1.01097226] [-0.02494239]
880 8.10564e-05 [ 1.01045668] [-0.02377021]
900 7.36178e-05 [ 1.00996518] [-0.02265322]
920 6.68605e-05 [ 1.00949681] [-0.02158856]
940 6.07243e-05 [ 1.00905049] [-0.02057395]
960 5.51499e-05 [ 1.00862515] [-0.01960701]
980 5.00884e-05 [ 1.00821984] [-0.01868558]
1000 4.54904e-05 [ 1.00783348] [-0.01780742]
1020 4.13147e-05 [ 1.00746536] [-0.01697052]
1040 3.75237e-05 [ 1.00711453] [-0.01617296]
1060 3.40802e-05 [ 1.00678027] [-0.01541291]
1080 3.09506e-05 [ 1.0064615] [-0.01468863]
1100 2.81105e-05 [ 1.00615788] [-0.01399829]
1120 2.55301e-05 [ 1.00586843] [-0.01334041]
1140 2.3187e-05 [ 1.0055927] [-0.01271347]
1160 2.10589e-05 [ 1.00532985] [-0.01211598]
1180 1.91265e-05 [ 1.00507939] [-0.0115466]
1200 1.73705e-05 [ 1.00484073] [-0.01100399]
1220 1.57766e-05 [ 1.00461316] [-0.01048687]
1240 1.43279e-05 [ 1.00439632] [-0.009994]
1260 1.30131e-05 [ 1.00418973] [-0.00952429]
1280 1.18191e-05 [ 1.0039928] [-0.00907671]
1300 1.07345e-05 [ 1.00380528] [-0.00865015]
1320 9.74932e-06 [ 1.00362647] [-0.00824364]
1340 8.85383e-06 [ 1.003456] [-0.00785626]
1360 8.04154e-06 [ 1.00329351] [-0.00748705]
1380 7.30358e-06 [ 1.00313878] [-0.0071352]
1400 6.63332e-06 [ 1.00299132] [-0.00679989]
1420 6.02443e-06 [ 1.00285077] [-0.00648038]
1440 5.47139e-06 [ 1.0027169] [-0.00617588]
1460 4.96964e-06 [ 1.00258923] [-0.00588569]
1480 4.51346e-06 [ 1.00246763] [-0.00560917]
1500 4.09941e-06 [ 1.00235152] [-0.00534562]
1520 3.72316e-06 [ 1.00224113] [-0.00509445]
1540 3.38164e-06 [ 1.00213587] [-0.00485508]
1560 3.07108e-06 [ 1.0020355] [-0.00462696]
1580 2.78955e-06 [ 1.00193989] [-0.00440955]
1600 2.53345e-06 [ 1.0018487] [-0.00420238]
1620 2.30089e-06 [ 1.00176179] [-0.00400495]
1640 2.08989e-06 [ 1.00167918] [-0.00381679]
1660 1.8981e-06 [ 1.00160027] [-0.00363749]
1680 1.72408e-06 [ 1.00152504] [-0.00346662]
1700 1.56576e-06 [ 1.0014534] [-0.00330373]
1720 1.42219e-06 [ 1.00138521] [-0.00314854]
1740 1.29165e-06 [ 1.00132] [-0.00300064]
1760 1.17327e-06 [ 1.00125802] [-0.00285967]
1780 1.06553e-06 [ 1.00119913] [-0.00272537]
1800 9.67871e-07 [ 1.00114286] [-0.00259741]
1820 8.7924e-07 [ 1.00108922] [-0.00247548]
1840 7.98617e-07 [ 1.00103807] [-0.00235926]
1860 7.25233e-07 [ 1.00098932] [-0.00224847]
1880 6.58801e-07 [ 1.00094283] [-0.00214287]
1900 5.98327e-07 [ 1.0008986] [-0.00204223]
1920 5.43434e-07 [ 1.0008564] [-0.00194635]
1940 4.93732e-07 [ 1.00081599] [-0.00185502]
1960 4.48434e-07 [ 1.00077784] [-0.001768]
1980 4.07401e-07 [ 1.00074148] [-0.00168501]
2000 3.69948e-07 [ 1.00070667] [-0.00160589]
In [8]:
## placeholder을 사용하여
x = tf.placeholder(tf.float32)
y = tf.placeholder(tf.float32)
In [9]:
for step in range(2001):
cost_val, W_val, b_val, _ = \
sess.run([cost, w, b, train], feed_dict={x:[1,2,3], y:[1,2,3]})
if step % 20 == 0:
print(step, cost_val, W_val, b_val)
0 4.45886 [-0.07555465] [ 0.48607397]
20 0.124145 [ 0.59432214] [ 0.73567456]
40 0.0771592 [ 0.6723597] [ 0.7270425]
60 0.0697545 [ 0.69337285] [ 0.69534433]
80 0.0633493 [ 0.70831788] [ 0.66290092]
100 0.0575348 [ 0.72207677] [ 0.63176948]
120 0.052254 [ 0.73514301] [ 0.6020807]
140 0.0474579 [ 0.74759072] [ 0.57378536]
160 0.0431021 [ 0.75945312] [ 0.54681951]
180 0.039146 [ 0.77075797] [ 0.52112103]
200 0.035553 [ 0.78153145] [ 0.49663028]
220 0.0322898 [ 0.79179871] [ 0.47329047]
240 0.0293261 [ 0.80158347] [ 0.45104757]
260 0.0266344 [ 0.81090838] [ 0.42984995]
280 0.0241898 [ 0.81979489] [ 0.40964851]
300 0.0219696 [ 0.82826382] [ 0.39039662]
320 0.0199531 [ 0.83633488] [ 0.37204942]
340 0.0181218 [ 0.84402645] [ 0.35456449]
360 0.0164585 [ 0.85135663] [ 0.33790129]
380 0.0149478 [ 0.85834235] [ 0.32202122]
400 0.0135759 [ 0.86499965] [ 0.30688742]
420 0.0123298 [ 0.87134421] [ 0.29246479]
440 0.0111982 [ 0.87739056] [ 0.27872005]
460 0.0101703 [ 0.88315272] [ 0.26562124]
480 0.00923685 [ 0.88864422] [ 0.25313798]
500 0.00838907 [ 0.89387745] [ 0.24124141]
520 0.00761909 [ 0.89886481] [ 0.229904]
540 0.00691979 [ 0.9036178] [ 0.21909942]
560 0.00628466 [ 0.90814739] [ 0.20880258]
580 0.00570782 [ 0.91246414] [ 0.19898959]
600 0.00518395 [ 0.91657799] [ 0.18963781]
620 0.00470813 [ 0.92049849] [ 0.18072556]
640 0.00427601 [ 0.92423475] [ 0.17223215]
660 0.00388354 [ 0.92779541] [ 0.16413793]
680 0.00352709 [ 0.93118876] [ 0.15642405]
700 0.00320337 [ 0.93442261] [ 0.14907272]
720 0.00290935 [ 0.93750459] [ 0.1420669]
740 0.00264232 [ 0.94044161] [ 0.13539028]
760 0.00239979 [ 0.94324064] [ 0.12902741]
780 0.00217953 [ 0.94590807] [ 0.12296362]
800 0.00197949 [ 0.94845015] [ 0.1171848]
820 0.00179781 [ 0.95087284] [ 0.11167759]
840 0.00163279 [ 0.95318162] [ 0.10642914]
860 0.00148293 [ 0.95538193] [ 0.10142737]
880 0.00134682 [ 0.95747882] [ 0.09666066]
900 0.0012232 [ 0.95947701] [ 0.092118]
920 0.00111093 [ 0.96138155] [ 0.0877888]
940 0.00100897 [ 0.96319652] [ 0.08366304]
960 0.000916358 [ 0.96492612] [ 0.07973117]
980 0.000832256 [ 0.96657449] [ 0.07598409]
1000 0.000755866 [ 0.96814537] [ 0.07241311]
1020 0.000686492 [ 0.96964222] [ 0.06901003]
1040 0.000623478 [ 0.9710691] [ 0.06576682]
1060 0.000566254 [ 0.97242874] [ 0.06267601]
1080 0.000514283 [ 0.97372448] [ 0.05973045]
1100 0.000467079 [ 0.97495937] [ 0.05692331]
1120 0.000424208 [ 0.97613615] [ 0.05424813]
1140 0.000385274 [ 0.97725767] [ 0.05169867]
1160 0.000349914 [ 0.97832644] [ 0.04926904]
1180 0.000317799 [ 0.9793449] [ 0.04695361]
1200 0.000288626 [ 0.98031574] [ 0.04474704]
1220 0.000262138 [ 0.98124081] [ 0.04264407]
1240 0.000238077 [ 0.98212242] [ 0.04063996]
1260 0.000216224 [ 0.98296261] [ 0.03873003]
1280 0.000196379 [ 0.98376328] [ 0.03690984]
1300 0.000178353 [ 0.9845264] [ 0.03517522]
1320 0.000161983 [ 0.98525363] [ 0.03352205]
1340 0.000147116 [ 0.98594666] [ 0.03194662]
1360 0.000133614 [ 0.98660707] [ 0.03044523]
1380 0.000121349 [ 0.9872365] [ 0.02901442]
1400 0.000110212 [ 0.9878363] [ 0.02765086]
1420 0.000100097 [ 0.98840797] [ 0.02635139]
1440 9.09084e-05 [ 0.98895282] [ 0.02511297]
1460 8.25645e-05 [ 0.98947203] [ 0.02393275]
1480 7.49867e-05 [ 0.98996681] [ 0.02280798]
1500 6.81043e-05 [ 0.99043834] [ 0.02173603]
1520 6.18526e-05 [ 0.9908877] [ 0.02071445]
1540 5.61761e-05 [ 0.99131596] [ 0.01974093]
1560 5.10193e-05 [ 0.99172407] [ 0.01881317]
1580 4.63364e-05 [ 0.99211305] [ 0.017929]
1600 4.20826e-05 [ 0.99248374] [ 0.01708638]
1620 3.82211e-05 [ 0.99283695] [ 0.01628335]
1640 3.47129e-05 [ 0.99317354] [ 0.01551811]
1660 3.1527e-05 [ 0.99349439] [ 0.01478883]
1680 2.86329e-05 [ 0.9938001] [ 0.01409381]
1700 2.60053e-05 [ 0.99409145] [ 0.01343147]
1720 2.36183e-05 [ 0.99436915] [ 0.01280025]
1740 2.14504e-05 [ 0.99463379] [ 0.0121987]
1760 1.9482e-05 [ 0.99488592] [ 0.01162543]
1780 1.76942e-05 [ 0.99512625] [ 0.01107911]
1800 1.60697e-05 [ 0.99535537] [ 0.01055843]
1820 1.45947e-05 [ 0.99557364] [ 0.0100622]
1840 1.32551e-05 [ 0.99578166] [ 0.00958932]
1860 1.20386e-05 [ 0.99597991] [ 0.00913866]
1880 1.09337e-05 [ 0.99616879] [ 0.00870918]
1900 9.93008e-06 [ 0.99634886] [ 0.00829989]
1920 9.01843e-06 [ 0.99652046] [ 0.00790983]
1940 8.19117e-06 [ 0.99668396] [ 0.0075381]
1960 7.43891e-06 [ 0.99683982] [ 0.00718384]
1980 6.75652e-06 [ 0.99698836] [ 0.00684622]
2000 6.13626e-06 [ 0.99712986] [ 0.00652447]
In [ ]:
Content source: JaeGyu/PythonEx_1
Similar notebooks: