In [25]:
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

In [26]:
%matplotlib inline

In [15]:
a = tf.add(2,3)

In [16]:
print(a)


Tensor("Add_5:0", shape=(), dtype=int32)

In [17]:
with tf.Session() as sess:
    print(sess.run(a))


5

In [18]:
a = 1
b = 2

In [19]:
_add = tf.add(a,b)
_mul = tf.multiply(a,b)
with tf.Session() as sess:
    print(sess.run([_add,_mul]))  #리스트 형식으로 실행 할 수 있음


[3, 2]

In [20]:
x = 2
y = 3
op1 = tf.add(x,y)
op2 = tf.multiply(x,y)
op3 = tf.pow(op2,op1)

In [21]:
with tf.Session() as sess:
    print(sess.run(op3))


7776

In [39]:
num_points = 1000
vectors_set = []

In [40]:
for i in range(num_points):
    x1 = np.random.normal(.0, 1.0)
#     y1 = x1 * 0.1 + 0.3 + np.random.normal(0., 0.03)
    y1 = np.sin(x1) + np.random.normal(0., 0.1)
    vectors_set.append([x1,y1])

In [41]:
x_data = [v[0] for v in vectors_set]
y_data = [v[1] for v in vectors_set]

In [42]:
plt.plot(x_data, y_data, 'ro')
plt.legend()
plt.show()


/Users/jaegyuhan/anaconda3/lib/python3.5/site-packages/matplotlib/axes/_axes.py:519: UserWarning: No labelled objects found. Use label='...' kwarg on individual plots.
  warnings.warn("No labelled objects found. "

In [29]:
W = tf.Variable(tf.random_uniform([1], -1.0, 1.0))
b = tf.Variable(tf.zeros([1]))
y = W*x_data + b

In [30]:
loss = tf.reduce_mean(tf.square(y - y_data))
optimizer = tf.train.GradientDescentOptimizer(0.5)
train = optimizer.minimize(loss)

In [32]:
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    for step in range(100):
        sess.run(train)
        print(step, sess.run(W), sess.run(b), sess.run(loss))


0 [ 0.15690017] [ 0.30027756] 0.00187275
1 [ 0.13903181] [ 0.30037194] 0.00133453
2 [ 0.12677942] [ 0.30044121] 0.00108146
3 [ 0.11837791] [ 0.30048868] 0.000962468
4 [ 0.11261695] [ 0.30052122] 0.000906519
5 [ 0.10866664] [ 0.30054358] 0.000880213
6 [ 0.10595789] [ 0.30055887] 0.000867844
7 [ 0.10410049] [ 0.30056936] 0.000862029
8 [ 0.10282686] [ 0.30057657] 0.000859294
9 [ 0.10195352] [ 0.30058151] 0.000858008
10 [ 0.10135467] [ 0.30058488] 0.000857404
11 [ 0.10094404] [ 0.30058721] 0.000857119
12 [ 0.10066247] [ 0.30058879] 0.000856986
13 [ 0.1004694] [ 0.30058989] 0.000856923
14 [ 0.100337] [ 0.30059063] 0.000856893
15 [ 0.10024621] [ 0.30059114] 0.00085688
16 [ 0.10018396] [ 0.3005915] 0.000856873
17 [ 0.10014128] [ 0.30059174] 0.00085687
18 [ 0.10011201] [ 0.30059192] 0.000856868
19 [ 0.10009194] [ 0.30059204] 0.000856868
20 [ 0.10007818] [ 0.30059209] 0.000856868
21 [ 0.10006874] [ 0.30059215] 0.000856867
22 [ 0.10006227] [ 0.30059218] 0.000856867
23 [ 0.10005783] [ 0.30059221] 0.000856867
24 [ 0.10005479] [ 0.30059224] 0.000856867
25 [ 0.10005271] [ 0.30059224] 0.000856867
26 [ 0.10005128] [ 0.30059224] 0.000856867
27 [ 0.10005029] [ 0.30059227] 0.000856867
28 [ 0.10004962] [ 0.30059227] 0.000856867
29 [ 0.10004916] [ 0.30059227] 0.000856867
30 [ 0.10004885] [ 0.30059227] 0.000856867
31 [ 0.10004863] [ 0.30059227] 0.000856867
32 [ 0.10004848] [ 0.30059227] 0.000856867
33 [ 0.10004838] [ 0.30059227] 0.000856867
34 [ 0.10004831] [ 0.30059227] 0.000856867
35 [ 0.10004826] [ 0.30059227] 0.000856867
36 [ 0.10004823] [ 0.30059227] 0.000856867
37 [ 0.10004821] [ 0.30059227] 0.000856867
38 [ 0.10004819] [ 0.30059227] 0.000856867
39 [ 0.10004818] [ 0.30059227] 0.000856867
40 [ 0.10004817] [ 0.30059227] 0.000856867
41 [ 0.10004816] [ 0.30059227] 0.000856867
42 [ 0.10004816] [ 0.30059227] 0.000856867
43 [ 0.10004816] [ 0.30059227] 0.000856867
44 [ 0.10004816] [ 0.30059227] 0.000856867
45 [ 0.10004816] [ 0.30059227] 0.000856867
46 [ 0.10004816] [ 0.30059227] 0.000856867
47 [ 0.10004816] [ 0.30059227] 0.000856867
48 [ 0.10004816] [ 0.30059227] 0.000856867
49 [ 0.10004816] [ 0.30059227] 0.000856867
50 [ 0.10004816] [ 0.30059227] 0.000856867
51 [ 0.10004816] [ 0.30059227] 0.000856867
52 [ 0.10004816] [ 0.30059227] 0.000856867
53 [ 0.10004816] [ 0.30059227] 0.000856867
54 [ 0.10004816] [ 0.30059227] 0.000856867
55 [ 0.10004816] [ 0.30059227] 0.000856867
56 [ 0.10004816] [ 0.30059227] 0.000856867
57 [ 0.10004816] [ 0.30059227] 0.000856867
58 [ 0.10004816] [ 0.30059227] 0.000856867
59 [ 0.10004816] [ 0.30059227] 0.000856867
60 [ 0.10004816] [ 0.30059227] 0.000856867
61 [ 0.10004816] [ 0.30059227] 0.000856867
62 [ 0.10004816] [ 0.30059227] 0.000856867
63 [ 0.10004816] [ 0.30059227] 0.000856867
64 [ 0.10004816] [ 0.30059227] 0.000856867
65 [ 0.10004816] [ 0.30059227] 0.000856867
66 [ 0.10004816] [ 0.30059227] 0.000856867
67 [ 0.10004816] [ 0.30059227] 0.000856867
68 [ 0.10004816] [ 0.30059227] 0.000856867
69 [ 0.10004816] [ 0.30059227] 0.000856867
70 [ 0.10004816] [ 0.30059227] 0.000856867
71 [ 0.10004816] [ 0.30059227] 0.000856867
72 [ 0.10004816] [ 0.30059227] 0.000856867
73 [ 0.10004816] [ 0.30059227] 0.000856867
74 [ 0.10004816] [ 0.30059227] 0.000856867
75 [ 0.10004816] [ 0.30059227] 0.000856867
76 [ 0.10004816] [ 0.30059227] 0.000856867
77 [ 0.10004816] [ 0.30059227] 0.000856867
78 [ 0.10004816] [ 0.30059227] 0.000856867
79 [ 0.10004816] [ 0.30059227] 0.000856867
80 [ 0.10004816] [ 0.30059227] 0.000856867
81 [ 0.10004816] [ 0.30059227] 0.000856867
82 [ 0.10004816] [ 0.30059227] 0.000856867
83 [ 0.10004816] [ 0.30059227] 0.000856867
84 [ 0.10004816] [ 0.30059227] 0.000856867
85 [ 0.10004816] [ 0.30059227] 0.000856867
86 [ 0.10004816] [ 0.30059227] 0.000856867
87 [ 0.10004816] [ 0.30059227] 0.000856867
88 [ 0.10004816] [ 0.30059227] 0.000856867
89 [ 0.10004816] [ 0.30059227] 0.000856867
90 [ 0.10004816] [ 0.30059227] 0.000856867
91 [ 0.10004816] [ 0.30059227] 0.000856867
92 [ 0.10004816] [ 0.30059227] 0.000856867
93 [ 0.10004816] [ 0.30059227] 0.000856867
94 [ 0.10004816] [ 0.30059227] 0.000856867
95 [ 0.10004816] [ 0.30059227] 0.000856867
96 [ 0.10004816] [ 0.30059227] 0.000856867
97 [ 0.10004816] [ 0.30059227] 0.000856867
98 [ 0.10004816] [ 0.30059227] 0.000856867
99 [ 0.10004816] [ 0.30059227] 0.000856867

In [ ]: