In [ ]:
%matplotlib inline
import matplotlib.pylab as plt
from keras.models import Sequential
from keras.layers.core import Flatten, Dense, Dropout
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.optimizers import SGD
import cv2, numpy as np
from keras import backend as K
from keras_model.vgg_19_keras import VGG_19
import pandas as pd
import sys
import joblib
import os
import glob
def listdir_nohidden(path):
return glob.glob(os.path.join(path, '*'))
K.set_image_dim_ordering('th')
In [ ]:
if not os.path.exists('downloads'):
os.makedirs('downloads')
if not os.path.exists('theano_model/weights'):
os.makedirs('theano_model/weights')
print 'Downloaded Files:',listdir_nohidden('downloads')
for file_path in listdir_nohidden('downloads'):
if'downloads/vgg19_weights.h5' == file_path:
print 'Already Downloaded'
else:
print 'download from https://drive.google.com/file/d/0Bz7KyqmuGsilZ2RVeVhKY0FyRmc/view?usp=sharing and move to downloads'
In [ ]:
model = VGG_19('downloads/vgg19_weights.h5')
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(optimizer=sgd, loss='categorical_crossentropy')
In [ ]:
directory = 'theano_model/weights'
# Write all weights to output
weights_list = []
bias_list = []
for i,layer in enumerate(model.layers):
weights = layer.get_weights()
len_weights = len(weights)
if len_weights == 2:
layer_name = layer.name
weights_name = os.path.join(directory,layer_name + '_weights')
bias_name = os.path.join(directory,layer_name + '_bias')
weight = weights[0]
bias = weights[1]
joblib.dump(weight,weights_name)
joblib.dump(bias,bias_name)
weights_list.append(weights_name)
bias_list.append(bias_name)
joblib.dump(bias_list,'theano_model/weight_names/layer_names_bias')
joblib.dump(weights_list,'theano_model/weight_names/layer_names_weights')
In [ ]:
# Write all outputs to a text file for the summary
stdout = sys.stdout #keep a handle on the real standard output
sys.stdout = open('VGG19_Summary.txt', 'w')
model.summary()
sys.stdout.close()
sys.stdout = stdout
In [ ]:
from theano_model import vgg19_model
reload(vgg19_model)
VGG19 = vgg19_model.VGG19
layer_weights = vgg19_model.layer_weights
bias_weights = vgg19_model.bias_weights
In [ ]:
import numpy as np
In [ ]:
layer_names = layer_weights.keys()
layer_names = np.sort(layer_names)
In [ ]:
layer_names
In [ ]:
import pickle
normalized = pickle.load(open('vgg19_normalized.pkl'))
In [ ]:
import os
import joblib
directory = 'theano_model/weights'
# Write all weights to output
weights_list = []
bias_list = []
count = 0
for i,norm_weights in enumerate(normalized['param values']):
# weights = layer.get_weights()
# len_weights = len(weights)
# if len_weights == 2:
# layer_name = layer.name
# weights_name = os.path.join(directory,layer_name + '_weights')
# bias_name = os.path.join(directory,layer_name + '_bias')
# weight = weights[0]
# bias = weights[1]
if i%2 == 0:
joblib.dump(norm_weights,os.path.join(directory,str(count) + '_w'))
else:
joblib.dump(norm_weights,os.path.join(directory,str(count) + '_b'))
count = count + 1
# weights_list.append(weights_name)
# bias_list.append(bias_name)
# joblib.dump(bias_list,'theano_model/weight_names/layer_names_bias')
# joblib.dump(weights_list,'theano_model/weight_names/layer_names_weights')
In [ ]:
normalized.keys()
In [ ]:
# Read in layer weights and save to a dictionary
cwd = os.getcwd()
direct = os.path.join(cwd,'theano_model','weights')
# weights_layer_paths = joblib.load(os.path.join(cwd,'weight_names','layer_names_weights'))
layer_weights = {}
for layer_weight_path in range(16):
# head,layer_name = os.path.split(layer_weight_path)
layer_weights[str(layer_weight_path) + '_w'] = joblib.load(os.path.join(direct,str(layer_weight_path) + '_w'))
# Read in bias weights and save to a dictionary
for bias_layer_path in range(16):
layer_weights[str(bias_layer_path) + '_b'] = joblib.load(os.path.join(direct,str(bias_layer_path) + '_b'))
In [ ]:
layer_weights.keys()
In [ ]: