In [12]:
from IPython.core.display import display, HTML
display(HTML("<style>.container { width:70% !important; }</style>"))
In [4]:
#Create references to important directories we will use over and over
import os, sys
current_dir = os.getcwd()
SCRIPTS_HOME_DIR = current_dir
DATA_HOME_DIR = current_dir+'/data'
In [5]:
#import modules
from utils import *
%matplotlib inline
In [6]:
%cd $DATA_HOME_DIR
#Set path to sample/ path if desired
path = DATA_HOME_DIR
train_path=path + '/train/binary/'
valid_path=path + '/valid/binary/'
In [7]:
img_rows, img_cols = 16, 32
in_shape = (img_rows, img_cols)
batch_size = 64
nb_classes = 3
In [13]:
gen = image.ImageDataGenerator(rescale=1. / 255) #from keras preprocessing.
#generates batches of tensor image data, data will loop in batches indefinitely
#rescales data before doing any other iteration (here, will rescale pixel intensities from 0-255 to 0-1)
#what does this "looping in batches indefinitely" actually mean?
In [9]:
train_batches = get_batches(train_path, batch_size=batch_size,
target_size=in_shape, color_mode="grayscale",
gen=gen)
val_batches = get_batches(valid_path, batch_size=batch_size,
target_size=in_shape, color_mode="grayscale",
gen=gen)
In [ ]:
model = Sequential([
Convolution2D(32,3,3, border_mode='same', activation='relu', input_shape=(img_rows, img_cols, 1)),
MaxPooling2D(),
Convolution2D(64,3,3, border_mode='same', activation='relu'),
MaxPooling2D(),
Flatten(),
Dense(1024, activation='relu'),
Dense(nb_classes, activation='softmax')
])
model.compile(Adam(), loss='categorical_crossentropy', metrics=['accuracy'])
In [ ]: