In [27]:
# 거창하게 GUN이라고 했지만 사실 해상도를 거의 두배씩 두번 올렸다.

import tensorflow as tf
from PIL import Image
import numpy as np

#이미지, 상수들
learning_rate=1e-4
ratio=16/9
H1=360
W1=int(ratio*H1)
H15=480
W15=int(ratio*H15)
H2=720
W2=int(ratio*H2)
path="../06/"
pref1="360p/"
pref2="720p/"
suff1="_360.jpg"
suff2="_720.jpg"
train_num=500#1000
file_num=1#6#30
#batch_num=1000

In [39]:
#가중치 초기화 함수
def weight_variable(shape, name):
  initial = tf.truncated_normal(shape, stddev=0.01)
  return tf.Variable(initial, name=name)
#절편 초기화 함수
def bias_variable(shape, name):
  initial = 0.#tf.constant(0.1, shape=shape)
  return tf.Variable(initial, name=name)
#2D 컨벌루션 실행
def conv2d(x, W, B):
  return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')#+B

def getimage(idx):
    img_1=Image.open(path+pref1+str(idx)+suff1)
    array_1=np.array(img_1)[:, :]
    array_1=array_1.astype(np.float32)
    
    img_1_720 = img_1.resize((W2, H2), Image.BILINEAR)
    array_1_720=np.array(img_1_720)[:, :]
    array_1_720=array_1_720.astype(np.float32)

    img_2=Image.open(path+pref2+str(idx)+suff2)
    array_2=np.array(img_2)[:, :, 0:3]
    array_2=array_2.astype(np.float32)
    return array_1,array_1_720, array_2

def l_relu(x, alpha=0.):
    return tf.nn.relu(x)-alpha*tf.nn.relu(-x)

def asImage(tensor):
    result = tensor[0].astype(np.uint8)
    return Image.fromarray(result, 'RGB')

def showres(index, steps):
    inputarray ,test144, test720 = getimage(index)
    A=sess.run(y_result, feed_dict={x_image144:[inputarray], x_image:[test144], y_image:[test720]})
    result = A.astype(np.uint8)
    #Image.fromarray(array144, 'RGB').save('results/img144.jpg')
    #Image.fromarray(array720, 'RGB').save('results/img720.jpg')
    asImage(result).save('results/'+str(steps)+'.jpg')

In [47]:
x_image144 = tf.placeholder(np.float32, shape=[None, H1, W1, 3], name='original_image_360')
x_image = tf.placeholder(np.float32, shape=[None, H2, W2, 3], name='bilinear_magnified_image_720')
y_image = tf.placeholder(np.float32, shape=[None, H2, W2, 3], name='answer_image')

weight1 = weight_variable([20, 20, 3, 30], name = 'weight1')
bias1 = bias_variable([30], name = 'bias1')
weight2 = weight_variable([1, 1, 30, 20], name = 'weight2')
bias2 = bias_variable([20], name='bias2')
weight3 = weight_variable([5, 5, 20, 3], name = 'weight3')
bias3 = bias_variable([3], name = 'bias3')

weight4 = weight_variable([20, 20, 3, 30], name = 'weight4')
bias4 = bias_variable([30], name = 'bias4')
weight5 = weight_variable([1, 1, 30, 20], name = 'weight5')
bias5 = bias_variable([20], name='bias5')
weight6 = weight_variable([5, 5, 20, 3], name = 'weight6')
bias6 = bias_variable([3], name = 'bias6')

In [48]:
Img1=tf.image.resize_bilinear(x_image144, (H15,W15))
F1 = l_relu(conv2d(Img1, weight1, bias1), alpha = 0.3)
F2 = l_relu(conv2d(F1, weight2, bias2), alpha = 0.3)
F3 = l_relu(conv2d(F2, weight3, bias3), alpha = 0.3)
Img2=tf.image.resize_bilinear(F3, (H2,W2))
F4 = l_relu(conv2d(Img2, weight4, bias4), alpha = 0.3)
F5 = l_relu(conv2d(F4, weight5, bias5), alpha = 0.3)
F6 = l_relu(conv2d(F5, weight6, bias6), alpha = 0.3)
y_result= l_relu(x_image+F6,alpha=0.)

In [ ]:
cost = tf.reduce_mean(tf.square(y_image-y_result))
train_step = tf.train.AdamOptimizer(learning_rate).minimize(cost)
saver = tf.train.Saver()
sess = tf.Session()
sess.run(tf.global_variables_initializer())
#saver.restore(sess, "01/models.ckpt")

In [ ]:
for steps in range(train_num):
    for index in range(1, file_num+1):
        ##array144은 144p인 이미지를 확대해서 720p으로 만들어놓은것.
        inputarray, array144, array720 = getimage(index)
        #asImage([array720]).show()
        sess.run(train_step, feed_dict={x_image144:[inputarray], x_image:[array144], y_image:[array720]})
    inputarray, array144, array720 = getimage(index)
    print (str(steps).zfill(3), sess.run(cost, feed_dict={x_image144:[inputarray],x_image:[array144], y_image:[array720]}))
    if(steps%5==0):
        showres(index, steps)
print ("끝났다. 근데 사실 이걸 본적은 한번도 없다.")


000 67.6674
001 67.667
002 67.6672
003 67.6683
004 67.6692
005 67.6697
006 67.6696
007 67.6691
008 67.6679
009 67.6665
010 67.6643
011 67.6617
012 67.6585
013 67.655
014 67.651
015 67.6462
016 67.6406
017 67.6335
018 67.6251
019 67.6151
020 67.6025
021 67.5865
022 67.5669
023 67.5436
024 67.5158
025 67.482
026 67.44
027 67.3876
028 67.3241
029 67.2511
030 67.1689
031 67.0757
032 66.9643
033 66.8333
034 66.6815
035 66.5079
036 66.3109
037 66.0887
038 65.8404
039 65.5676
040 65.2714
041 64.9569
042 64.6299
043 64.2959
044 63.9645
045 63.6485
046 63.3666
047 63.1327
048 62.9552
049 62.8364
050 62.7912
051 62.8617
052 62.9474
053 62.9509
054 62.9052
055 62.8225
056 62.709
057 62.5748
058 62.451
059 62.3113
060 62.2311
061 62.1934
062 62.1749
063 62.1549
064 62.1239
065 62.0771
066 62.0284
067 61.9747
068 61.9191
069 61.8638
070 61.8083
071 61.756
072 61.7033
073 61.6525
074 61.607
075 61.5611
076 61.5095
077 61.4543
078 61.3966
079 61.3368
080 61.2767
081 61.2171
082 61.1583
083 61.1002
084 61.041
085 60.9811
086 60.9191
087 60.855
088 60.7883
089 60.7229
090 60.6583
091 60.595
092 60.53
093 60.4625
094 60.3948
095 60.3258
096 60.2567
097 60.1873
098 60.1174
099 60.048
100 59.9789
101 59.909
102 59.8377
103 59.7672
104 59.6981
105 59.6314
106 59.5635
107 59.4942
108 59.4269
109 59.3615
110 59.2958
111 59.2292
112 59.1624
113 59.097
114 59.0336
115 58.9713
116 58.9098
117 58.8488
118 58.789
119 58.73
120 58.6718
121 58.6139
122 58.5561
123 58.4983
124 58.4402
125 58.3821
126 58.3242
127 58.2654
128 58.2057
129 58.145
130 58.0835
131 58.0212
132 57.9584
133 57.8943
134 57.8301
135 57.7644
136 57.6978
137 57.6302
138 57.5629
139 57.4936
140 57.424
141 57.3539
142 57.2829
143 57.2109
144 57.1382
145 57.0647
146 56.9902
147 56.9144
148 56.837
149 56.7596
150 56.6805
151 56.6003
152 56.5191
153 56.4369

In [ ]: