# New architecture for handwritting

 - MNIST sequence
 - Datasets
 - Dilated convolutions
 - CTC


Next steps:

- change each conv 1d by a module whit batch normalization and convs 1x1

In [1]:
import os
import sys
import argparse
import math
import logging
import random
import cv2

import numpy as np 
import tensorflow as tf
import pandas as pd

from PIL import Image

import matplotlib.pyplot as plt
%matplotlib inline
plt.rcParams['figure.figsize'] = (10, 10) 

data_path = '/home/ubuntu/data/oxford_syntetic_text/mnt/ramdisk/max/90kDICT32px'

experiment_dir ='/home/ubuntu/data/oxford_syntetic_text/models/test03'

In [2]:
def dense_to_sparse(dense_tensor, out_type):
    indices = tf.where(tf.not_equal(dense_tensor, tf.constant(-1, dense_tensor.dtype)))
    values = tf.gather_nd(dense_tensor, indices)
    shape = tf.shape(dense_tensor, out_type=out_type)
    return tf.SparseTensor(indices, values, shape)


def decode_word(l, decoder_dict, blank_code=-1):
        return ''.join([decoder_dict[x] for x in l if x!=blank_code])
    
    

def evaluate_wer(pred, real):
    wer = 0
    for p, r in zip(pred, real):
        try:
            if p != r:
                wer +=1
        except:
            print(p, r)
    wer = wer / len(pred)
    return wer

In [3]:
df_train = pd.read_csv(os.path.join(data_path,'annotation_train.txt'), delimiter=' ', names=['file', 'n'])
df_val   = pd.read_csv(os.path.join(data_path,'annotation_val.txt'  ), delimiter=' ', names=['file', 'n'])
df_test  = pd.read_csv(os.path.join(data_path,'annotation_test.txt' ), delimiter=' ', names=['file', 'n'])
print('Images train: ', df_train.shape[0])
print('Images valid: ', df_val.shape[0])
print('Images test:  ', df_test.shape[0])

        
# Cuda devices
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID" 
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
gpu_options = tf.GPUOptions(allow_growth = True)
    

# Decoder dict and num_classes
char_list = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E',
     'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U',
     'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k',
     'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']    
encoder_dict = {}
for i, c in enumerate(char_list):
    encoder_dict[c] = i
decoder_dict = {}
for e in encoder_dict:
    decoder_dict[encoder_dict[e]]=e
num_classes = len(decoder_dict) + 1


Images train:  7224612
Images valid:  802734
Images test:   891927

In [4]:
def adjust_image(img_file, x_size=192, y_size=48, x_blanks_ini=0):
    
    im = Image.open(img_file)
    if np.max(im)>0:
        x, y = im.size
        factor = y_size/y
        new_x = min( max(1, int(factor*x)), x_size-x_blanks_ini)
        if len(np.array(im.resize((new_x, y_size))).shape) == 3:
            img = np.array(im.resize((new_x, y_size)))[:,:,0]
        else:
            img = np.array(im.resize((new_x, y_size)))
        img_adjusted = np.concatenate([np.zeros((y_size, x_blanks_ini)), img], axis=1)
        new_x_size = img_adjusted.shape[1]
        if new_x_size < x_size:
            img_adjusted = np.concatenate([img_adjusted, np.zeros((y_size, x_size-new_x_size))], axis=1)

        if np.max(img_adjusted)>0:
            return img_adjusted
        else:
            return []
    else:
        return []


def read_word_image(f, target_c, x_size=192, y_size=48, target_max_size=19):
    '''
    
    '''
    # adjust and resize to the final size
    img_adjusted = adjust_image(f, x_size=x_size, y_size=y_size)
    
    if img_adjusted != []:

        #Calculate image_len
        image_len = np.max(np.nonzero(np.max(img_adjusted, axis=0))) 

        # Target
        target_ini = [encoder_dict[k] for k in target_c] # encode to 
        if len(target_ini)>target_max_size: # Pendiente de resolver mejor
            target_ini = target_ini[:target_max_size]
        target_len = len(target_ini)
        target = np.ones([target_max_size], dtype=np.uint8)*(-1)
        target[:target_len] = target_ini
                        
        return img_adjusted/255, list(target), image_len, target_len
    
    else:
        return [], None, None, None

In [ ]:


In [5]:
def data_generator(file_list, path_files=data_path, batch_size=16, max_files=0):
    if max_files==0: # all files
        num_batches = len(file_list)//batch_size
    else:
        num_batches = min(max_files//batch_size, len(file_list)//batch_size)
    n = 0    
    
    # Shuffle files
    np.random.shuffle(file_list)
    
    for j in range(num_batches):
        images_batch = []
        images_len_batch = []
        target_batch = []
        target_len_batch = []
        for i in range(batch_size):
            f = os.path.join(path_files, file_list[n])
            target_c = f.split('/')[-1].split('.')[0].split('_')[1] 
            img, t, img_l, t_l = read_word_image(os.path.join(path_files, f), target_c, x_size=192, y_size=48, target_max_size=19)
            img = np.reshape(img, (48, 192, 1))
            images_batch += [img]
            images_len_batch += [img_l]
            target_batch += [t]
            target_len_batch += [t_l]
            n += 1
            
        yield np.array(images_batch), images_len_batch, target_batch, target_len_batch
            
# test
trn_generator = data_generator(list(df_test.file), path_files=data_path, batch_size=16, max_files=100) 
images_b, images_len_b, target_b, target_len_b = next(trn_generator)
print(images_b.shape)


/home/jorge/anaconda3/envs/tf18/lib/python3.5/site-packages/ipykernel/__main__.py:32: DeprecationWarning: elementwise != comparison failed; this will raise an error in the future.
(16, 48, 192, 1)

In [6]:
# Size of 
filters_list = [512,512,512,512,512]
kernels_list = [3,3,3,3,3]
dilations_list = [1,1,2,4,8,16]

                        
if 1:
    # Model
    graph = tf.Graph()
    with graph.as_default():

        #with tf.device('/cpu:0'): # Check how to put this on CPU
        if 1:

            #Placeholders
            with tf.name_scope('inputs') as scope:

                # List of TFRecod filenames (for train, valid and test)
                images_batch_ph = tf.placeholder(tf.float32, shape=[None, 48, 192, 1], name='images_batch_ph')
                image_len_ph = tf.placeholder(tf.int32, shape=[None], name='image_len_ph')
                labels_batch_ph = tf.placeholder(tf.int32, shape=[None, 19], name='labels_batch_ph')
                labels_len_batch_ph = tf.placeholder(tf.int32, shape=[None], name='labels_len_batch_ph')
                
                # Convert target to sparse
                target = tf.cast(dense_to_sparse(labels_batch_ph, tf.int64), tf.int32)
                target_len = tf.cast(labels_len_batch_ph, tf.int32)

                # Dropout parameter
                #keep_prob = tf.placeholder(tf.float32, name='keep_prob')


        if 1:
            with tf.name_scope('model') as scope:


                # First 2 2D convolution of 3x3with 10 filters
                conv2d = tf.layers.conv2d(images_batch_ph, 20, 5, padding='SAME')
                conv2d = tf.layers.conv2d(conv2d, 20, 5, padding='SAME')
                conv2d = tf.layers.max_pooling2d(conv2d, 2, 2)

                conv2d = tf.layers.conv2d(conv2d, 50, 5, padding='SAME')
                conv2d = tf.layers.conv2d(conv2d, 50, 5, padding='SAME')
                conv2d = tf.layers.max_pooling2d(conv2d, 2, 2)
                
                
                # convert to list by filters
                conv2d_unstack = tf.unstack(conv2d, axis=-1)
                conv1_list=[]
                for conv2_filter in conv2d_unstack: # for each filter of shape 28x140 - 7X35 ...
                    conv2_filter_transpose = tf.transpose(conv2_filter, (0, 2, 1)) #  convert to 140x28 - 35X7
                    conv1_list += [conv2_filter_transpose] 
                # Concatenate
                convf = tf.concat(conv1_list, axis=-1) # out of 140x200 
                
                # Final 1d convolutions stacked
                for filters, kernel, dilation in zip(filters_list, kernels_list, dilations_list):
                    convf_out = tf.layers.conv1d(convf, filters=filters, kernel_size=[kernel], activation=tf.nn.relu, padding='SAME', dilation_rate=[dilation])
                    convf = tf.concat([convf_out, convf], axis=-1) # Residual connections
    
                # Reshape vector to classes+1 with dense layers
                logits_input = tf.layers.conv1d(convf, filters=num_classes, kernel_size=[1], activation=tf.nn.relu, padding='SAME')
                
                
                

            # Create logits
            with tf.name_scope("Logit") as scope:
                logits = tf.transpose(logits_input, (1, 0, 2), name='logits') #Time major [t, b, NClasses+1], for CTC
                #variable_summaries(logits, 'logits')

            # Create CTC loss
            with tf.name_scope("loss") as scope:
                sequence_len = tf.ones_like(image_len_ph)*tf.constant(35)
                sequence_len = tf.cast(sequence_len, tf.int32)
                loss = tf.nn.ctc_loss(target, logits, sequence_len, ignore_longer_outputs_than_inputs=True)    
                cost = tf.reduce_mean(loss, name='cost')
                cost_summary = tf.summary.scalar("cost", cost)


            #Optimizer
            with tf.name_scope("train") as scope:
                global_step = tf.Variable(0, trainable=False)
                optimizer = tf.train.MomentumOptimizer(learning_rate=0.0001, momentum=0.97)
                
                gvs = optimizer.compute_gradients(cost)
                #for i,t in enumerate(gvs):
                #    logger.info('gradients: %s - %s', i, t)
                #    variable_summaries(t, 'grad'+str(i))
                #capped_gvs = gvs
                capped_gvs = [(tf.clip_by_value(grad, -1.0, 1.0), var) for grad, var in gvs]
                train_op = optimizer.apply_gradients(capped_gvs, global_step=global_step)



            # decode CTC
            with tf.name_scope("predict") as scope:
                decoded, log_prob = tf.nn.ctc_beam_search_decoder(logits, sequence_len, merge_repeated=False)

                prediction = tf.cast(decoded[0], tf.int32, name='prediction')
                dense_prediction = tf.sparse_to_dense(prediction.indices, prediction.dense_shape,
                                                      prediction.values, default_value=-1)


            # Accuracy --> Levensteing distance: CER/num_chars 
            with tf.name_scope("accuracy") as scope:
                accuracy = tf.reduce_mean(tf.edit_distance(tf.cast(decoded[0], tf.int32), target), name='acuracy')
                accuracy_summary = tf.summary.scalar("accuracy", accuracy)

            # Summaries
            summaries_dir = os.path.join(experiment_dir)
            merged = tf.summary.merge_all()

            # Saver
            tf.add_to_collection('images_batch_ph', images_batch_ph)
            tf.add_to_collection('image_len_ph', image_len_ph)
            tf.add_to_collection('labels_batch_ph', labels_batch_ph)
            tf.add_to_collection('labels_len_batch_ph', labels_len_batch_ph)
            tf.add_to_collection('logits', logits)
            tf.add_to_collection('dense_prediction', dense_prediction)

            saver = tf.train.Saver(max_to_keep=5)



    print('Model created!')


Model created!

In [7]:
def train_step(epoch, decoder_dict):
    '''
    '''
    step=1
    cost_l = []
    acc_l = []
    train_generator = data_generator(list(df_train.file)[:150000])
    for img, img_len, t, t_len in train_generator:
        _, ce, acc = sess.run([train_op, cost, accuracy], 
                              feed_dict={images_batch_ph: img, image_len_ph: img_len,
                                        labels_batch_ph: t, labels_len_batch_ph: t_len})
        cost_l += [ce]
        acc_l += [acc]
        step += 1
        if step%500 == 0:
            # Sumaries train
            summary_str, pred = sess.run([merged, dense_prediction], 
                                  feed_dict={images_batch_ph: img, image_len_ph: img_len,
                                        labels_batch_ph: t, labels_len_batch_ph: t_len}) 
            train_writer.add_summary(summary_str, epoch)
            pred = [decode_word(w, decoder_dict) for w in pred]
            real = [decode_word(w, decoder_dict) for w in t]
            wer = evaluate_wer(pred, real)
            
            print('TRAIN - Epoch:', epoch,
                  ' - Step:', step,
                  ' - Cost:', np.mean(cost_l),
                  ' - CER:', np.mean(acc_l),
                  ' - WER (step):', wer)
            cost_l = [] #Reset
            acc_l = []  
            
            print('\nTrain examples pred vs real:')
            for i in range(7):
                print(pred[i], ' - ', real[i])

In [8]:
def eval_step(decoder_dict):
    '''
    '''
    step=1
    cost_l = []
    cer_l = []
    wer_l = []
    pred_l = []
    real_l = []
    val_generator = data_generator(list(df_val.file)[:2048])
    for img, img_len, t, t_len in val_generator:
        ce, acc, pred = sess.run([cost, accuracy, dense_prediction], 
                              feed_dict={images_batch_ph: img, image_len_ph: img_len,
                                        labels_batch_ph: t, labels_len_batch_ph: t_len})
        pred = [decode_word(w, decoder_dict) for w in pred]
        real = [decode_word(w, decoder_dict) for w in t]
        cost_l += [ce]
        cer_l += [acc]
        pred_l += pred
        real_l += real
        step +=1
        
    # Sumaries eval
    summary_str = sess.run(merged, feed_dict={images_batch_ph: img, image_len_ph: img_len,
                                        labels_batch_ph: t, labels_len_batch_ph: t_len}) 
    test_writer.add_summary(summary_str, epoch)
    
    wer = evaluate_wer(pred_l, real_l)
            
    print('TEST - Cost:', np.mean(cost_l),
            ' - CER:', np.mean(cer_l),
            ' - WER:', wer)
    print('\nTest examples pred vs real:')
    for i in range(10):
        print(pred_l[i], ' - ', real_l[i])
    
    
    
    return cost_l, cer_l, wer_l

In [9]:
num_epochs = 8



# Train the model
gpu_options = tf.GPUOptions(allow_growth = False)
with tf.Session(graph=graph, config=tf.ConfigProto(gpu_options=gpu_options, log_device_placement=True)) as sess:


    train_writer = tf.summary.FileWriter(os.path.join(experiment_dir, 'train'), graph=graph)
    test_writer = tf.summary.FileWriter(os.path.join(experiment_dir, 'test'))


    # Initialize vars if dont exist previous checkpoints. 
    ckpt = tf.train.get_checkpoint_state(experiment_dir)
    if ckpt == None:
        # Initialize vars
        sess.run(tf.global_variables_initializer())
        print('vars initialized!')
        epoch_ini = 1
    else:
        # Load last model
        saver.restore(sess, ckpt.model_checkpoint_path)
        if os.path.basename(ckpt.model_checkpoint_path).split('-')[-1] == 'best_model':
            epoch_ini = 1
        else:
            epoch_ini = int(os.path.basename(ckpt.model_checkpoint_path).split('-')[-1]) + 1
        print('model loaded: %s', ckpt.model_checkpoint_path)



    # Compute for num_epochs.
    if 1:
        cost_val_l = []
        cer_val_l = []
        wer_val_l = []
        continue_training = True
        epoch = epoch_ini
        while (epoch < num_epochs and continue_training):

            # Train phase
            print('Training epoch:', epoch)
            train_step(epoch, decoder_dict)

            # Test phase
            print('Testing epoch:', epoch)
            cost_val, cer_val, wer_val = eval_step(decoder_dict)
            cost_val_l += [cost_val]

                
            # Check accuracy improventents and stop training
            if len(cost_val_l) > 10:
                print('cost val %s', cost_val_l)
                if np.min(cost_val_l) < np.min(cost_val_l[-10]):
                    continue_training = False
                    print('STOPING TRAINING')
                
            #Save model
            save_path = saver.save(sess, os.path.join(experiment_dir, 'model'), global_step=epoch)
            print("Model saved in file: %s" % save_path)  

           
            epoch += 1


vars initialized!
Training epoch: 1
/home/jorge/anaconda3/envs/tf18/lib/python3.5/site-packages/ipykernel/__main__.py:32: DeprecationWarning: elementwise != comparison failed; this will raise an error in the future.
TRAIN - Epoch: 1  - Step: 500  - Cost: 35.6782  - CER: 0.9600266  - WER (step): 1.0

Train examples pred vs real:
e  -  goatee
e  -  finn
e  -  Resharpened
e  -  STUPENDOUS
e  -  frisbee
e  -  cps
ae  -  Liberties
TRAIN - Epoch: 1  - Step: 1000  - Cost: 31.081186  - CER: 0.952635  - WER (step): 1.0

Train examples pred vs real:
s  -  STILLED
Ce  -  SERAGLIO
s  -  AUTOPILOT
s  -  Permanently
S  -  Biblical
s  -  PARASITICALLY
s  -  Vertigo
TRAIN - Epoch: 1  - Step: 1500  - Cost: 30.77977  - CER: 0.93838406  - WER (step): 1.0

Train examples pred vs real:
Ce  -  Incompetents
ee  -  misplaced
Se  -  Pronuclear
ee  -  Particularity
Re  -  Equestriennes
Se  -  REATTAINS
e  -  GARMON
TRAIN - Epoch: 1  - Step: 2000  - Cost: 30.224266  - CER: 0.94111973  - WER (step): 1.0

Train examples pred vs real:
e  -  HALO
E  -  SE
a  -  Centerboards
E  -  PUFFERS
e  -  defacement
a  -  Majorly
a  -  Tenpins
TRAIN - Epoch: 1  - Step: 2500  - Cost: 29.862984  - CER: 0.9379568  - WER (step): 1.0

Train examples pred vs real:
e  -  bunchiest
a  -  egocentric
ae  -  Qualmish
E  -  GARLICKY
EiTNT  -  YTTERBIUM
E  -  laureateship
e  -  Digitizing
TRAIN - Epoch: 1  - Step: 3000  - Cost: 29.178844  - CER: 0.93120885  - WER (step): 1.0

Train examples pred vs real:
A  -  SADHU
i  -  shithead
uo  -  Humanize
a  -  compotes
o  -  COGENTLY
o  -  Troyes
E  -  CAP
TRAIN - Epoch: 1  - Step: 3500  - Cost: 28.45084  - CER: 0.9163275  - WER (step): 1.0

Train examples pred vs real:
i  -  Ted
iee  -  sleetier
oee  -  mussorgsky
En  -  LINESMEN
Ai  -  attained
o  -  cassocks
an  -  INVERTING
TRAIN - Epoch: 1  - Step: 4000  - Cost: 27.418177  - CER: 0.8953489  - WER (step): 1.0

Train examples pred vs real:
E  -  romeos
ae  -  aspirator
A  -  AGENCY
o  -  sums
an  -  CONTRAFLOWS
E  -  TABBIES
e  -  MADISON
TRAIN - Epoch: 1  - Step: 4500  - Cost: 26.84177  - CER: 0.8626128  - WER (step): 1.0

Train examples pred vs real:
ans  -  STADIUMS
u  -  burn
oae  -  Draughtboards
TN  -  TEEING
ee  -  Igneous
Caes  -  Captures
itls  -  Differential
TRAIN - Epoch: 1  - Step: 5000  - Cost: 25.327375  - CER: 0.8158164  - WER (step): 1.0

Train examples pred vs real:
AIIT  -  PARTURITION
Ceees  -  ICEBERGS
st  -  sb
Iti  -  agoraphobia
sacE  -  Stupefy
at  -  borer
apied  -  Vaporized
TRAIN - Epoch: 1  - Step: 5500  - Cost: 23.917294  - CER: 0.7606441  - WER (step): 1.0

Train examples pred vs real:
oS  -  raconteurs
LO  -  Mylar
Caeei  -  Nonbelievers
SET  -  Swamp
AEES  -  WARNER
IRPES  -  VITRIFIES
AS  -  BANKNOTE
TRAIN - Epoch: 1  - Step: 6000  - Cost: 22.244884  - CER: 0.70634025  - WER (step): 1.0

Train examples pred vs real:
IUronreS  -  backwardness
rsle  -  Insole
Elrerh  -  Wolfish
cianmantly  -  adamantly
SLETEs  -  SUBJUGATES
HleAS  -  theorems
Lruge  -  Luge
TRAIN - Epoch: 1  - Step: 6500  - Cost: 20.114738  - CER: 0.62552375  - WER (step): 1.0

Train examples pred vs real:
Syioaas  -  STATIONERY
Wn  -  Vdt
MuNsEIaTs  -  AMUSEMENTS
Pr  -  Prays
dbanetnt  -  debarment
UNESs  -  WILINESS
EaacGs  -  SPRUCES
TRAIN - Epoch: 1  - Step: 7000  - Cost: 18.069944  - CER: 0.5533528  - WER (step): 0.875

Train examples pred vs real:
CNct  -  CONDIGN
UOSpecig  -  unspecific
Slumlard  -  slumlord
FiD  -  OPINION
Drong  -  Daylong
Shtendhnens  -  entrenchments
fale  -  faille
TRAIN - Epoch: 1  - Step: 7500  - Cost: 16.532211  - CER: 0.500781  - WER (step): 0.875

Train examples pred vs real:
aASOPEA  -  CASSIOPEIA
sAy  -  OSCULATED
AUTCOPOn  -  AUTOPILOT
Bebing  -  Ebbing
Foaiest  -  Foxiest
Soboaed  -  Showboated
MIOURNERS  -  MOURNERS
TRAIN - Epoch: 1  - Step: 8000  - Cost: 15.316027  - CER: 0.46097192  - WER (step): 0.875

Train examples pred vs real:
bameites  -  bicarbonates
Peieaayes  -  Psychoanalytic
CosES  -  GRUNEWALD
IT  -  RUFFLY
Piumpialisn  -  triumphalism
dendsetters  -  trendsetters
P  -  RETROFIT
TRAIN - Epoch: 1  - Step: 8500  - Cost: 14.280104  - CER: 0.4335304  - WER (step): 0.75

Train examples pred vs real:
inereourse  -  intercourse
ENOE  -  enoe
lemzed  -  itemized
Seudhgere  -  Southgate
Camnt  -  Command
DAMP  -  DAMP
Lyts  -  LACING
TRAIN - Epoch: 1  - Step: 9000  - Cost: 13.565559  - CER: 0.41275203  - WER (step): 0.875

Train examples pred vs real:
iaepes  -  Trooper
RENY  -  RENT
laer  -  laxer
ContesT  -  contest
VERENS  -  STERNNESS
liopathig  -  Idiopathic
RognizancG  -  recognizance
Testing epoch: 1
TEST - Cost: 12.728888  - CER: 0.3836714  - WER: 0.87109375

Test examples pred vs real:
PaINESS  -  MEALINESS
Tovmie  -  Townie
PrerCarefn  -  Overcareful
svs  -  Cumulatively
PuEy  -  anyway
Pliteatiatg  -  existentially
PremSed  -  Premised
DEROEINg  -  DISROBING
Wooer  -  Wooer
KissER  -  KISSER
Model saved in file: /home/ubuntu/data/oxford_syntetic_text/models/test03/model-1
Training epoch: 2
TRAIN - Epoch: 2  - Step: 500  - Cost: 12.1165495  - CER: 0.37129605  - WER (step): 0.875

Train examples pred vs real:
Capeyron  -  Clapeyron
Blarmaceauies  -  Pharmaceutics
CYUS  -  CYGNUS
BPONENT  -  EXPONENT
spnniDs  -  SPUMIER
Stolidness  -  Stolidness
RINCEDOKS  -  PRINCEDOMS
TRAIN - Epoch: 2  - Step: 1000  - Cost: 11.620479  - CER: 0.36164  - WER (step): 0.625

Train examples pred vs real:
Touts  -  Touts
Wvdest  -  Vividest
Concavity  -  concavity
PERrORe  -  PERFORMS
Non  -  Noon
Transporter  -  Transporter
MIRSPAIES  -  WARRANTIES
TRAIN - Epoch: 2  - Step: 1500  - Cost: 11.380301  - CER: 0.34783965  - WER (step): 0.8125

Train examples pred vs real:
Ppate  -  Impala
LILIA  -  LILIA
Deharked  -  Debarked
Polgphane  -  polyphemus
gEOMSTANCED  -  CIRCUMSTANCED
FEARISAICRL  -  PHARISAICAL
Roland  -  Rolland
TRAIN - Epoch: 2  - Step: 2000  - Cost: 10.891138  - CER: 0.3372918  - WER (step): 0.8125

Train examples pred vs real:
MURPLERS  -  MUFFLERS
ELbarts  -  Pilchards
BiH  -  Loftiest
Tety  -  tracery
Buoy  -  Buoy
BODieaior  -  application
POLTBUROS  -  POLITBUROS
TRAIN - Epoch: 2  - Step: 2500  - Cost: 10.708041  - CER: 0.33225393  - WER (step): 0.875

Train examples pred vs real:
MUSKELLUNG  -  MUSKELLUNGE
PARACHUIST  -  PARACHUTIST
Tenentry  -  Tenantry
sIPSRIE  -  IMPOSING
Chanelaos  -  Chancellor
TAISCRIPTS  -  TRANSCRIPTS
MANDALAS  -  MANDALAS
TRAIN - Epoch: 2  - Step: 3000  - Cost: 10.472994  - CER: 0.32158172  - WER (step): 0.8125

Train examples pred vs real:
TRAMIPLED  -  TRAMPLED
Outmatrhes  -  outmatches
Destepping  -  Overstepping
Oerng  -  shuttering
Camplaining  -  Complaining
GraL  -  CREEL
headboard  -  headboard
TRAIN - Epoch: 2  - Step: 3500  - Cost: 10.003018  - CER: 0.3093385  - WER (step): 0.8125

Train examples pred vs real:
Caning  -  Caning
cONWAY  -  CONWAY
Ors  -  Draperies
subsides  -  subsides
SEXILY  -  SEXILY
WstnsPNy  -  MILITIAMAN
Consirucine  -  Constructive
TRAIN - Epoch: 2  - Step: 4000  - Cost: 9.780706  - CER: 0.30351558  - WER (step): 0.625

Train examples pred vs real:
COWRIE  -  COWRIE
smowboarding  -  snowboarding
beeng  -  benz
MACYS  -  Macys
beaness  -  leanness
Unawares  -  Unawares
LAWAI  -  HAWAII
TRAIN - Epoch: 2  - Step: 4500  - Cost: 9.452562  - CER: 0.29407758  - WER (step): 0.8125

Train examples pred vs real:
hrltlng  -  halting
cra  -  era
Scavenging  -  Scavenging
rerooents  -  permanents
ircons  -  Zircons
Digpesd  -  Dispersal
Dimness  -  Dimness
TRAIN - Epoch: 2  - Step: 5000  - Cost: 9.364994  - CER: 0.29326832  - WER (step): 0.875

Train examples pred vs real:
MNCROWAVE  -  MICROWAVE
Ointments  -  ointments
PEBBLED  -  PEBBLED
DaNsEUSES  -  DANSEUSES
NEPALESE  -  NEPALESE
Dreia  -  hardin
Cugering  -  Queering
TRAIN - Epoch: 2  - Step: 5500  - Cost: 9.375883  - CER: 0.29304263  - WER (step): 0.6875

Train examples pred vs real:
fints  -  flints
Colitis  -  Colitis
RnOOper  -  improper
Deppiet  -  peppier
AUNCHLNe  -  MUNCHING
RAMS  -  RAMS
Hllbby  -  flabby
TRAIN - Epoch: 2  - Step: 6000  - Cost: 9.026805  - CER: 0.28196585  - WER (step): 0.75

Train examples pred vs real:
Dysprosium  -  Dysprosium
Inpractical  -  impractical
GINGED  -  clinched
crppe  -  cropper
cobstoppers  -  Gobstoppers
INRALE  -  Inhale
HAddENED  -  maddened
TRAIN - Epoch: 2  - Step: 6500  - Cost: 8.795625  - CER: 0.2730014  - WER (step): 0.8125

Train examples pred vs real:
Bruldlzes  -  Brutalizes
MarTOD  -  marrow
HTUAL  -  RITUAL
Released  -  released
Peddss  -  purist
mnarking  -  marking
ATE  -  APPRAISE
TRAIN - Epoch: 2  - Step: 7000  - Cost: 8.681094  - CER: 0.26896235  - WER (step): 0.625

Train examples pred vs real:
Fnell  -  Knell
CONVULSIVELY  -  CONVULSIVELY
Cnfit  -  unfit
overclock  -  overclock
pogo  -  pogo
OSHERS  -  JOSHERS
Hackguards  -  Blackguards
TRAIN - Epoch: 2  - Step: 7500  - Cost: 8.779804  - CER: 0.27426815  - WER (step): 0.875

Train examples pred vs real:
Spaablocss  -  Spamblocks
D  -  20
LuTTRG  -  CLIMACTERIC
scoFFLANY  -  scofflaw
sertuplets  -  Sextuplets
Collapsing  -  Collapsing
DNSTEaD  -  INSTEAD
TRAIN - Epoch: 2  - Step: 8000  - Cost: 8.672175  - CER: 0.26894805  - WER (step): 0.625

Train examples pred vs real:
Pouches  -  Pouches
teaplies  -  yearlies
sparsest  -  sparsest
Upa  -  upa
CRaYONED  -  crayoned
lousth  -  kenneth
debited  -  debited
TRAIN - Epoch: 2  - Step: 8500  - Cost: 8.4327345  - CER: 0.264398  - WER (step): 0.8125

Train examples pred vs real:
storybaok  -  storybook
uttingly  -  cuttingly
MONOMANIAS  -  MONOMANIAC
BICNEES  -  EXCHANGES
OBSeSSIVEN  -  OBSESSIVELY
muchincd  -  machined
faciitator  -  facilitator
TRAIN - Epoch: 2  - Step: 9000  - Cost: 8.330441  - CER: 0.260161  - WER (step): 0.6875

Train examples pred vs real:
CDTES  -  Cotes
GUSTED  -  GUSTED
rearmost  -  rearmost
BASED  -  BIASED
RaCeIeS  -  Racemes
STUners  -  Tuners
LUNCHROOS  -  LUNCHROOMS
Testing epoch: 2
TEST - Cost: 8.075815  - CER: 0.25502428  - WER: 0.677734375

Test examples pred vs real:
HEcASTAR  -  Megastar
sharpens  -  sharpens
Relse  -  Reuse
CHEMICAL  -  CHEMICAL
Dekanhy  -  Dashingly
Wilae  -  williwaws
TRUCES  -  TRUCES
gentility  -  gentility
Grabby  -  Grabby
RaEEEREE  -  TRANSVERSES
Model saved in file: /home/ubuntu/data/oxford_syntetic_text/models/test03/model-2
Training epoch: 3
TRAIN - Epoch: 3  - Step: 500  - Cost: 7.4960165  - CER: 0.23698474  - WER (step): 0.6875

Train examples pred vs real:
heathman  -  heathman
siccd  -  Nicaea
collapsible  -  collapsible
sa  -  Slaw
PENDIG  -  PENDING
Benspecdaliztimt  -  nonspecializing
Flashguns  -  Flashguns
TRAIN - Epoch: 3  - Step: 1000  - Cost: 7.2155633  - CER: 0.23254313  - WER (step): 0.75

Train examples pred vs real:
jussed  -  Fussed
SwaTS  -  Swats
TAPInG  -  TAPING
OSPREYS  -  OSPREYS
vesteryear  -  yesteryear
FERNANDD  -  fernando
MARSHIALNG  -  MARSHALING
TRAIN - Epoch: 3  - Step: 1500  - Cost: 7.3845973  - CER: 0.23487358  - WER (step): 0.5625

Train examples pred vs real:
SWNTHESZING  -  SYNTHESIZING
LeapPraise  -  Reappraise
Torsos  -  Torsos
unsaddled  -  unsaddled
mnangoes  -  mangoes
VEwpOint  -  Viewpoint
sokeE  -  Jouster
TRAIN - Epoch: 3  - Step: 2000  - Cost: 7.3589473  - CER: 0.23321918  - WER (step): 0.6875

Train examples pred vs real:
IEMpERAT  -  intemperate
salmonellae  -  salmonellae
Pesne  -  Delaying
Bstuary  -  Estuary
langitnds  -  longitude
Peraite  -  Parasitic
BANCARNATED  -  REINCARNATED
TRAIN - Epoch: 3  - Step: 2500  - Cost: 7.255906  - CER: 0.2307145  - WER (step): 0.875

Train examples pred vs real:
restablihner  -  reestablishment
Omiesions  -  Omissions
Adenoitls  -  Adenoids
QBTZERS  -  KIBITZERS
POTMANTEAU  -  PORTMANTEAU
FURLONGS  -  FURLONGS
iermaphrodite  -  hermaphroditic
TRAIN - Epoch: 3  - Step: 3000  - Cost: 7.240041  - CER: 0.23302837  - WER (step): 0.6875

Train examples pred vs real:
lonesome  -  lonesome
sosier  -  crosier
EXTRUDES  -  extrudes
Detrier  -  Pettier
Touted  -  Touted
MPERISHDLE  -  IMPERISHABLE
SYSOP  -  SYSOP
TRAIN - Epoch: 3  - Step: 3500  - Cost: 7.19937  - CER: 0.22849393  - WER (step): 0.625

Train examples pred vs real:
palace  -  palace
tumoy  -  tumor
SEARANG  -  SEMARANG
Cnehiroiing  -  stockbroking
gibert  -  gilbert
abralham  -  abraham
PRESERWING  -  PRESERVING
TRAIN - Epoch: 3  - Step: 4000  - Cost: 7.197333  - CER: 0.2283383  - WER (step): 0.625

Train examples pred vs real:
PURCHASABL  -  PURCHASABLE
delegated  -  delegated
whitman  -  whitman
NOTABLY  -  NOTABLY
gareered  -  Careered
SHRINKABLE  -  SHRINKABLE
ldle  -  idle
TRAIN - Epoch: 3  - Step: 4500  - Cost: 6.9388456  - CER: 0.22188556  - WER (step): 0.6875

Train examples pred vs real:
SY  -  SWOOP
Qo  -  aol
vablw  -  Viably
bantus  -  bantus
GAndLeS  -  candles
Sinker  -  Sinker
REGNAnT  -  REGNANT
TRAIN - Epoch: 3  - Step: 5000  - Cost: 6.833347  - CER: 0.21876597  - WER (step): 0.75

Train examples pred vs real:
TeLeTIon  -  TELETHON
Hrt  -  Yurt
garder  -  guarder
centrels  -  centrals
fantastic  -  fantastic
iestimably  -  inestimably
Disimulating  -  Dissimulating
TRAIN - Epoch: 3  - Step: 5500  - Cost: 6.90179  - CER: 0.22154674  - WER (step): 0.5625

Train examples pred vs real:
sCLOGD  -  ECLOGUE
TIPSIER  -  TIPSIER
FORESTS  -  FORESTS
howler  -  howler
lottie  -  lottie
Buyouts  -  Buyouts
PRsOTHERAPST  -  PHYSIOTHERAPIST
TRAIN - Epoch: 3  - Step: 6000  - Cost: 6.744445  - CER: 0.21499887  - WER (step): 0.625

Train examples pred vs real:
VEGETATING  -  VEGETATING
roted  -  Toked
Fippery  -  Frippery
ArIstocracy  -  Aristocracy
Kuerat  -  Leavens
UNDERMINED  -  UNDERMINED
Deliverymen  -  Deliverymen
TRAIN - Epoch: 3  - Step: 6500  - Cost: 6.9064713  - CER: 0.22072612  - WER (step): 0.5

Train examples pred vs real:
AINIBIKES  -  MINIBIKES
flits  -  flits
ConcERMe  -  CONCERNING
BACKBTERS  -  BACKBITERS
TUNIS  -  TUNIS
GHtMOTHERAEY  -  CHEMOTHERAPY
CVET  -  CIVET
TRAIN - Epoch: 3  - Step: 7000  - Cost: 6.7305217  - CER: 0.21475127  - WER (step): 0.4375

Train examples pred vs real:
RANBOARDS  -  DRAINBOARDS
diskiust  -  duskiest
laundry  -  Laundry
TUrRBOCHARGERS  -  TURBOCHARGERS
bremen  -  bremen
Vaporize  -  Vaporize
WAGER  -  WAGER
TRAIN - Epoch: 3  - Step: 7500  - Cost: 6.9851017  - CER: 0.22680564  - WER (step): 0.5

Train examples pred vs real:
Rvroted  -  Penetrated
Pressie  -  Pressie
inventr  -  inventor
RECIPROCATING  -  RECIPROCATING
bazbeaur  -  bazbeaux
doltish  -  doltish
SHOUTER  -  SHOUTER
TRAIN - Epoch: 3  - Step: 8000  - Cost: 6.593973  - CER: 0.21130514  - WER (step): 0.5625

Train examples pred vs real:
OurACITORIES  -  OLFACTORIES
sabbaths  -  sabbaths
OMaR  -  OMAR
droplets  -  droplets
Gome  -  Geomagnetic
evildoer  -  evildoer
TeetRTS  -  Jesters
TRAIN - Epoch: 3  - Step: 8500  - Cost: 6.757958  - CER: 0.21404848  - WER (step): 0.6875

Train examples pred vs real:
AUTERT  -  austerity
SRHEPTOCOSCAL  -  STREPTOCOCCAL
DISLODGES  -  DISLODGES
VEW  -  VIEW
CHANCELLORSvILIE  -  Chancellorsville
CHORORNOROCAREN  -  CHLOROFLUOROCARBON
EyVEyE  -  anyways
TRAIN - Epoch: 3  - Step: 9000  - Cost: 6.667889  - CER: 0.21201065  - WER (step): 0.5

Train examples pred vs real:
deathezneel  -  Leatherneck
Sbuly  -  study
THUMBPRINT  -  THUMBPRINT
Hckinne  -  Mckinney
CRUCIAL  -  CRUCIAL
pension  -  pension
Seminoles  -  Seminoles
Testing epoch: 3
TEST - Cost: 7.045307  - CER: 0.232079  - WER: 0.62451171875

Test examples pred vs real:
Aduthood  -  Adulthood
chiropody  -  Chiropody
Meteorologic  -  Meteorologic
Creeplness  -  Creepiness
UosUED  -  DEPRESSURIZED
Sgrets  -  Signets
camulates  -  accumulation
marina  -  marina
BURAL  -  burial
IRAISOuG  -  INFRASONIC
Model saved in file: /home/ubuntu/data/oxford_syntetic_text/models/test03/model-3
Training epoch: 4
TRAIN - Epoch: 4  - Step: 500  - Cost: 5.6714907  - CER: 0.18524598  - WER (step): 0.25

Train examples pred vs real:
adjuration  -  adjuration
EoOS  -  EXHUMATION
JOURNAL  -  Journal
Tarps  -  Tarps
Backstabber  -  Backstabber
PROSELYTED  -  PROSELYTED
ramovO  -  ramova
TRAIN - Epoch: 4  - Step: 1000  - Cost: 5.8014464  - CER: 0.19153015  - WER (step): 0.25

Train examples pred vs real:
cascaded  -  cascaded
rattrap  -  rattrap
walkway  -  walkway
Humid  -  Humid
earldom  -  earldom
grpped  -  gripped
txstl  -  Unproductively
TRAIN - Epoch: 4  - Step: 1500  - Cost: 5.7928576  - CER: 0.19150628  - WER (step): 0.4375

Train examples pred vs real:
VAnnED  -  VANNED
Biannual  -  Biannual
Jacobson  -  Jacobson
micheal  -  micheal
DISC  -  DISC
ADY  -  ady
Ehermet  -  Ethernet
TRAIN - Epoch: 4  - Step: 2000  - Cost: 5.9026937  - CER: 0.19437452  - WER (step): 0.5

Train examples pred vs real:
Satirize  -  Satirize
DENEFtS  -  BENEFITS
SPAnIEs  -  SPARKIEST
Atradtively  -  Attractively
HUSKEd  -  HUSKED
Flavorings  -  Flavorings
hntermingle  -  intermingle
TRAIN - Epoch: 4  - Step: 2500  - Cost: 5.7787147  - CER: 0.19149728  - WER (step): 0.4375

Train examples pred vs real:
Seaborg  -  seaborg
cotter  -  cotter
Prmeial  -  Parricidal
mpole  -  maypole
Recces  -  Recces
DEFILEMENT  -  DEFILEMENT
BRENTON  -  BRENTON
TRAIN - Epoch: 4  - Step: 3000  - Cost: 5.75595  - CER: 0.18894185  - WER (step): 0.5

Train examples pred vs real:
disburses  -  disburses
Refrigerator  -  Refrigerator
UnOTsHiEY  -  KUIBYSHEV
POPULARIZES  -  POPULARIZES
Eartnsheking  -  Earthshaking
questioned  -  questioned
COMMUNICANTS  -  COMMUNICANTS
TRAIN - Epoch: 4  - Step: 3500  - Cost: 5.902918  - CER: 0.19217606  - WER (step): 0.625

Train examples pred vs real:
lamings  -  flamings
tehydrates  -  dehydrates
Sectary  -  Sectary
CANAPE  -  CANAPE
Ramp  -  Ramp
preides  -  presides
BEROGATION  -  ABROGATION
TRAIN - Epoch: 4  - Step: 4000  - Cost: 5.846954  - CER: 0.19183232  - WER (step): 0.5625

Train examples pred vs real:
Springiness  -  Springiness
promotions  -  promotions
Fluorescing  -  fluorescing
Orated  -  Orated
trampoline  -  trampoline
REAmS  -  Pravus
TrVIDIOUsLY  -  INVIDIOUSLY
TRAIN - Epoch: 4  - Step: 4500  - Cost: 5.91248  - CER: 0.19136031  - WER (step): 0.4375

Train examples pred vs real:
OUTPOINT  -  OUTPOINT
Wsherete  -  usherette
Leaky  -  Leaky
THING  -  THING
Signalizes  -  Signalizes
Rete  -  Radar
PETITIONED  -  PETITIONED
TRAIN - Epoch: 4  - Step: 5000  - Cost: 5.829629  - CER: 0.19306815  - WER (step): 0.375

Train examples pred vs real:
Lawfulness  -  Lawfulness
SYMBOLIG  -  SYMBOLIC
Fronds  -  Fronds
Orerteacting  -  Overreacting
peddler  -  peddler
REINS  -  REINS
rebroadcast  -  rebroadcast
TRAIN - Epoch: 4  - Step: 5500  - Cost: 5.811638  - CER: 0.18674944  - WER (step): 0.4375

Train examples pred vs real:
Wnreale  -  Unseals
TAKEOFF  -  TAKEOFF
RerUdiATe  -  Repudiate
FAIRLY  -  FAIRLY
consists  -  consists
appending  -  appending
Wiscatorial  -  piscatorial
TRAIN - Epoch: 4  - Step: 6000  - Cost: 5.751504  - CER: 0.1867997  - WER (step): 0.5625

Train examples pred vs real:
coNCEVE  -  CONCEIVE
Mu  -  Mu
OL  -  OPAL
Lotn  -  Loan
beck  -  beck
etiologies  -  etiologies
Minuting  -  Minuting
TRAIN - Epoch: 4  - Step: 6500  - Cost: 5.924909  - CER: 0.19201489  - WER (step): 0.5

Train examples pred vs real:
Ms  -  Loaf
CHASED  -  CHASED
CHIEFTAINS  -  CHIEFTAINS
FENDER  -  FENDER
BENGALS  -  BENGALS
UnCnimty  -  unanimity
Pridium  -  iridium
TRAIN - Epoch: 4  - Step: 7000  - Cost: 5.711339  - CER: 0.18982247  - WER (step): 0.75

Train examples pred vs real:
ASsaying  -  Assaying
DNPiPES  -  DRAINPIPES
Trapdoer  -  Trapdoor
suPERmEN  -  SUPERMEN
GONGEALMENT  -  CONCEALMENT
aOMBARDMENT  -  BOMBARDMENT
Whelped  -  Whelped
TRAIN - Epoch: 4  - Step: 7500  - Cost: 5.6133385  - CER: 0.18096691  - WER (step): 0.625

Train examples pred vs real:
CEBUANO  -  CEBUANO
Quilter  -  Quilter
Stagnat  -  Stagnate
ineptly  -  ineptly
HOmuNior  -  NONUNION
idbomatically  -  idiomatically
Recentest  -  Recentest
TRAIN - Epoch: 4  - Step: 8000  - Cost: 5.778626  - CER: 0.18806449  - WER (step): 0.3125

Train examples pred vs real:
Impaired  -  Impaired
Apropriations  -  Appropriations
SHORTSTOPS  -  SHORTSTOPS
CURLICUED  -  CURLICUED
poignant  -  poignant
Cornfields  -  Cornfields
Refurbishes  -  Refurbishes
TRAIN - Epoch: 4  - Step: 8500  - Cost: 5.6961775  - CER: 0.1876857  - WER (step): 0.4375

Train examples pred vs real:
RepuLsIvENESS  -  REPULSIVENESS
Functionalists  -  Functionalists
NONVOCATIONA  -  NONVOCATIONAL
diocesan  -  diocesan
AnLoHy  -  sellotaping
FEEHS  -  PETERS
Snock  -  Smock
TRAIN - Epoch: 4  - Step: 9000  - Cost: 5.660532  - CER: 0.18476166  - WER (step): 0.8125

Train examples pred vs real:
SHUDDENED  -  SHUDDERED
Tlde  -  Tilde
dealizes  -  Idealizes
AEtoicRg  -  METHAMPHETAMINE
Eutectic  -  Eutectic
Feheries  -  fisheries
SYMBoLiGALLY  -  SYMBOLICALLY
Testing epoch: 4
TEST - Cost: 6.102847  - CER: 0.19424246  - WER: 0.54931640625

Test examples pred vs real:
GLIMPSE  -  GLIMPSE
Stability  -  stability
SousncihvoAa  -  Councilwoman
LIKELIER  -  LIKELIER
legrooms  -  legrooms
Soming  -  Booming
Leapfrogs  -  Leapfrogs
brahmanism  -  brahmanism
lses  -  lasses
Peony  -  Peony
Model saved in file: /home/ubuntu/data/oxford_syntetic_text/models/test03/model-4
Training epoch: 5
TRAIN - Epoch: 5  - Step: 500  - Cost: 4.8327284  - CER: 0.16181011  - WER (step): 0.4375

Train examples pred vs real:
Fitchis  -  Litchis
Admission  -  Admission
CETHOGRAeHCALY  -  ORTHOGRAPHICALLY
BOWLINES  -  BOWLINES
PATHETICALY  -  PATHETICALLY
WINIER  -  Winier
PRISMS  -  PRISMS
TRAIN - Epoch: 5  - Step: 1000  - Cost: 5.0880237  - CER: 0.16811088  - WER (step): 0.5

Train examples pred vs real:
BLeNCHRED  -  BLENCHED
Cavert  -  Calvert
libeled  -  libeled
Ileared  -  Cleared
wavier  -  wavier
Slipknot  -  Slipknot
ALLUSIVE  -  ALLUSIVE
TRAIN - Epoch: 5  - Step: 1500  - Cost: 4.8715177  - CER: 0.16291134  - WER (step): 0.3125

Train examples pred vs real:
MAUDLIN  -  MAUDLIN
MAZAMA  -  MAZAMA
Dedauehees  -  Debauchees
sunbathed  -  sunbathed
Haggard  -  Haggard
SPECTROSCOPE  -  SPECTROSCOPE
indesecribable  -  indescribable
TRAIN - Epoch: 5  - Step: 2000  - Cost: 4.881193  - CER: 0.16194434  - WER (step): 0.375

Train examples pred vs real:
NIGLkTDRESSES  -  NIGHTDRESSES
Receipting  -  Receipting
Msting  -  infesting
JACKKNIFING  -  JACKKNIFING
tarbel  -  tarbell
WEAVING  -  WEAVING
decipherable  -  decipherable
TRAIN - Epoch: 5  - Step: 2500  - Cost: 4.8269153  - CER: 0.16132635  - WER (step): 0.1875

Train examples pred vs real:
LOCANDA  -  LOCANDA
DARTOZnN  -  CARTOON
LOOPY  -  LOOPY
nonskid  -  nonskid
BRAINLESS  -  BRAINLESS
Epoxies  -  Epoxies
Legends  -  Legends
TRAIN - Epoch: 5  - Step: 3000  - Cost: 4.810408  - CER: 0.16152532  - WER (step): 0.5625

Train examples pred vs real:
pesoS  -  pesos
faininess  -  faintness
MAG  -  MAG
EDEN  -  EDEN
CORNWALlS  -  CORNWALLIS
CONSPECTUS  -  CONSPECTUS
CRoiotiags  -  Circulations
TRAIN - Epoch: 5  - Step: 3500  - Cost: 4.97621  - CER: 0.16819888  - WER (step): 0.5625

Train examples pred vs real:
Maiestey  -  Mainstay
IMNIGRNTS  -  IMMIGRANTS
eoneaARN  -  forewarn
voGuES  -  VOGUES
HERMITS  -  HERMITS
Wholesalors  -  Wholesalers
COPPERS  -  COPPERS
TRAIN - Epoch: 5  - Step: 4000  - Cost: 4.962827  - CER: 0.16706643  - WER (step): 0.3125

Train examples pred vs real:
zonked  -  zonked
DEARIES  -  DEARIES
ballot  -  ballot
nymphomanl  -  nymphomania
Neater  -  Neater
CEns  -  beacons
CROOKING  -  CROOKING
TRAIN - Epoch: 5  - Step: 4500  - Cost: 4.9980636  - CER: 0.16437079  - WER (step): 0.5625

Train examples pred vs real:
Cremates  -  Cremates
Mermgraed  -  Monogrammed
rcns  -  dbms
wellie  -  wellie
maughts  -  naughts
strode  -  Strode
revarns  -  rewarms
TRAIN - Epoch: 5  - Step: 5000  - Cost: 4.8821945  - CER: 0.16226383  - WER (step): 0.5

Train examples pred vs real:
TROLIPE  -  TROUPE
doe  -  Jove
REOpROACHiING  -  REPROACHING
Submersing  -  Submerging
Sternums  -  Sternums
Swellest  -  Swellest
Embryologist  -  Embryologist
TRAIN - Epoch: 5  - Step: 5500  - Cost: 5.021497  - CER: 0.16543895  - WER (step): 0.3125

Train examples pred vs real:
HILLARY  -  HILLARY
transoms  -  transoms
Pests  -  Pests
PIGGED  -  PIGGED
Touchy  -  Touchy
PERSPIRES  -  PERSPIRES
VERIFIED  -  VERIFIED
TRAIN - Epoch: 5  - Step: 6000  - Cost: 4.9432163  - CER: 0.16607891  - WER (step): 0.75

Train examples pred vs real:
NANTES  -  INANITIES
ALtERATIONS  -  ALTERATIONS
LIbrarIANS  -  Librarians
IATETOS  -  INFATUATIONS
tecunlees  -  accumulates
tailight  -  taillight
Inanimately  -  Inanimately
TRAIN - Epoch: 5  - Step: 6500  - Cost: 5.0927176  - CER: 0.17179565  - WER (step): 0.375

Train examples pred vs real:
belonging  -  belonging
salotken  -  Solotken
BOMBeR  -  BOMBER
Heredlitary  -  Hereditary
Bisexual  -  Bisexual
burnish  -  burnish
Cyped  -  Gyved
TRAIN - Epoch: 5  - Step: 7000  - Cost: 4.9632893  - CER: 0.16687545  - WER (step): 0.4375

Train examples pred vs real:
Apportlonad  -  Apportioned
cheating  -  cheating
RETOOLING  -  RETOOLING
Ethopian  -  Ethopian
transsexual  -  transsexual
FIELDING  -  FIELDING
Retro  -  Retro
TRAIN - Epoch: 5  - Step: 7500  - Cost: 4.9782763  - CER: 0.16505432  - WER (step): 0.75

Train examples pred vs real:
uhisperers  -  whisperers
ECNe  -  ACNE
Lyer  -  Iyer
Indite  -  Indite
tesfred  -  falsified
PANTD  -  PANTO
Plodder  -  Plodder
TRAIN - Epoch: 5  - Step: 8000  - Cost: 4.8824935  - CER: 0.16259135  - WER (step): 0.5625

Train examples pred vs real:
harbou  -  harbour
ONSTRUCTOR  -  CONSTRUCTOR
Hilometers  -  Kilometers
Hygiene  -  Hygiene
Compukories  -  Compulsories
MILLENNIUM  -  MILLENNIUM
MUMMY  -  MUMMY
TRAIN - Epoch: 5  - Step: 8500  - Cost: 5.046124  - CER: 0.16789177  - WER (step): 0.5625

Train examples pred vs real:
ACHEsON  -  ACHESON
Bathed  -  Bathed
Fours  -  Fours
Hnrels  -  advancements
CORKUPTEH  -  CORRUPTER
CRASSER  -  CRASSER
IMPELLED  -  IMPELLED
TRAIN - Epoch: 5  - Step: 9000  - Cost: 4.9402156  - CER: 0.16999395  - WER (step): 0.5625

Train examples pred vs real:
UNINHITRITED  -  UNINHIBITED
madcap  -  madcap
absCuraTthie  -  obscurities
Opposed  -  Opposed
ainifloppiese  -  Minifloppieses
taconteurs  -  raconteurs
Judicious  -  Judicious
Testing epoch: 5
TEST - Cost: 5.8354535  - CER: 0.18938038  - WER: 0.53271484375

Test examples pred vs real:
skined  -  skived
CAPTURE  -  CAPTURE
ineducabte  -  ineducable
LAPS  -  LAPS
FISHWIFE  -  FISHWIFE
Slating  -  Slating
PRENISA  -  PRENSA
SHIPWRICHT  -  SHIPWRIGHT
paobi  -  jacobi
winegrower  -  winegrower
Model saved in file: /home/ubuntu/data/oxford_syntetic_text/models/test03/model-5
Training epoch: 6
TRAIN - Epoch: 6  - Step: 500  - Cost: 4.1377983  - CER: 0.14263263  - WER (step): 0.5

Train examples pred vs real:
congers  -  congers
MISsOURIan  -  MISSOURIAN
Dienmbodiment  -  Disembodiment
Bemp  -  Hump
luigi  -  luigi
Tutting  -  Tutting
imstetag  -  imstetag
TRAIN - Epoch: 6  - Step: 1000  - Cost: 4.133939  - CER: 0.14066285  - WER (step): 0.4375

Train examples pred vs real:
steadiness  -  steadiness
RELAELS  -  RELABELS
overstated  -  overstated
CORD  -  CORD
Wrongdoers  -  wrongdoers
Nicer  -  Nicer
ripped  -  ripped
TRAIN - Epoch: 6  - Step: 1500  - Cost: 4.2437263  - CER: 0.14338686  - WER (step): 0.3125

Train examples pred vs real:
removal  -  removal
dionibosns  -  firstborns
Disported  -  Disported
REFFED  -  REFFED
maratha  -  maratha
Teasel  -  Teasel
RESISTLESS  -  RESISTLESS
TRAIN - Epoch: 6  - Step: 2000  - Cost: 4.1944785  - CER: 0.14185326  - WER (step): 0.5625

Train examples pred vs real:
Srdurable  -  Perdurable
MECESSITY  -  NECESSITY
RUSTICITY  -  RUSTICITY
impressive  -  impressive
Fizl  -  Frizz
whiten  -  Whiten
FURSD  -  PURSUED
TRAIN - Epoch: 6  - Step: 2500  - Cost: 4.214797  - CER: 0.14287294  - WER (step): 0.4375

Train examples pred vs real:
FORNICATED  -  FORNICATED
Outlay  -  Outlay
Fasts  -  Fasts
Satelted  -  Satellited
impiutes  -  imputes
DAMONDBACK  -  DIAMONDBACK
Empornd  -  tempering
TRAIN - Epoch: 6  - Step: 3000  - Cost: 4.2070217  - CER: 0.14311641  - WER (step): 0.375

Train examples pred vs real:
Dinkies  -  Dinkies
acanthuses  -  acanthuses
teueyS  -  Leaders
unmemorable  -  unmemorable
Nee  -  Nee
Asses  -  Asses
notate  -  notate
TRAIN - Epoch: 6  - Step: 3500  - Cost: 4.2755656  - CER: 0.14236319  - WER (step): 0.4375

Train examples pred vs real:
CrucT  -  CHURCH
Asolution  -  Absolution
rakeawvays  -  takeaways
Schrpate  -  Serrate
MANSLAUGHTER  -  MANSLAUGHTER
UNSETTLES  -  UNSETTLES
Gamester  -  Gamester
TRAIN - Epoch: 6  - Step: 4000  - Cost: 4.3741584  - CER: 0.1496136  - WER (step): 0.5625

Train examples pred vs real:
INGRAN  -  INGRAIN
codex  -  codex
significations  -  significations
Titenium  -  Titanium
ABDUL  -  ABDUL
WINDIER  -  WINDIER
Whimnsicadlly  -  Whimsically
TRAIN - Epoch: 6  - Step: 4500  - Cost: 4.3591356  - CER: 0.14883949  - WER (step): 0.3125

Train examples pred vs real:
UNEASIER  -  UNEASIER
Waldos  -  Waldos
Painted  -  Painted
Determination  -  Determination
koctuntiog  -  Hoodwinking
Runs  -  Runs
mosSIpIG  -  Gossiping
TRAIN - Epoch: 6  - Step: 5000  - Cost: 4.38048  - CER: 0.1496348  - WER (step): 0.6875

Train examples pred vs real:
TIRAQI  -  IRAQI
NUNE  -  NUNEZ
CORISTIANS  -  CHRISTIANS
Daring  -  Daring
Jamest  -  Tamest
hoardled  -  hoarded
imPrudence  -  imprudence
TRAIN - Epoch: 6  - Step: 5500  - Cost: 4.4238253  - CER: 0.1482385  - WER (step): 0.3125

Train examples pred vs real:
matches  -  matches
Dentar  -  Dental
bollard  -  bollard
VICTIMS  -  VICTIMS
decorated  -  decorated
CODPIECE  -  CODPIECE
wINCING  -  WINCING
TRAIN - Epoch: 6  - Step: 6000  - Cost: 4.3814135  - CER: 0.150983  - WER (step): 0.5

Train examples pred vs real:
fatlock  -  fetlock
Gladdest  -  Gladdest
PaLmSTrY  -  PALMISTRY
utewarmly  -  lukewarmly
REPRESINT  -  REPRESENT
foolproct  -  foolproof
Cala  -  Cala
TRAIN - Epoch: 6  - Step: 6500  - Cost: 4.3588986  - CER: 0.14936475  - WER (step): 0.25

Train examples pred vs real:
Hurding  -  Hurdling
unaltered  -  unaltered
Protection  -  Protection
algae  -  algae
etlAisy  -  outlandishly
PENSIONS  -  PENSIONS
Advantaged  -  Advantaged
TRAIN - Epoch: 6  - Step: 7000  - Cost: 4.372674  - CER: 0.14858654  - WER (step): 0.5

Train examples pred vs real:
Squcakines  -  squeakiness
Pomposity  -  Pomposity
Eilbustering  -  Filibustering
chiropractor  -  chiropractor
lover  -  louver
Manhar  -  Manhar
GLMEBT  -  GLUMMEST
TRAIN - Epoch: 6  - Step: 7500  - Cost: 4.3564982  - CER: 0.14924225  - WER (step): 0.5625

Train examples pred vs real:
BLINDER  -  BLINDER
TABOOED  -  TABOOED
parallelogram  -  parallelogram
marmoread  -  Marmoreal
RANCHInG  -  Ranching
Ride  -  Ride
counselling  -  counselling
TRAIN - Epoch: 6  - Step: 8500  - Cost: 4.4574914  - CER: 0.15050185  - WER (step): 0.4375

Train examples pred vs real:
oilcioth  -  Oilcloth
seped  -  Steeped
RIZAL  -  RIZAL
Ratbos  -  Bathos
DEGENERATED  -  DEGENERATED
Players  -  Players
posSeR  -  dosser
TRAIN - Epoch: 6  - Step: 9000  - Cost: 4.465163  - CER: 0.15072708  - WER (step): 0.5

Train examples pred vs real:
Mmonophonic  -  monophonic
AMiITY  -  AMITY
biker  -  biker
Disoppeurunce  -  Disappearance
Robuster  -  Robuster
FRETS  -  FRETS
RAInING  -  RAINING
Testing epoch: 6
TEST - Cost: 6.0577497  - CER: 0.2005625  - WER: 0.54833984375

Test examples pred vs real:
Alete  -  Mute
Overshot  -  Overshot
biocheuists  -  biochemists
Overwarked  -  Overworked
Fiftieth  -  Fiftieth
halogens  -  halogens
Rdet  -  Labials
capitalize  -  capitalize
Atonal  -  Atonal
AWAKENING  -  AWAKENING
Model saved in file: /home/ubuntu/data/oxford_syntetic_text/models/test03/model-6
Training epoch: 7
TRAIN - Epoch: 7  - Step: 500  - Cost: 3.4662578  - CER: 0.12058305  - WER (step): 0.3125

Train examples pred vs real:
SyllABLE  -  Syllable
BLudERbUS  -  Blunderbuss
SHADES  -  SHADES
guatemalans  -  guatemalans
ISTRUMENIS  -  INSTRUMENTS
faff  -  faff
Eections  -  Ejections
TRAIN - Epoch: 7  - Step: 1000  - Cost: 3.4775496  - CER: 0.121187255  - WER (step): 0.3125

Train examples pred vs real:
UNSNAP  -  UNSNAP
CB  -  CB
locations  -  locations
Spire  -  Spire
antertaining  -  entertaining
meRNEReeS  -  DOCUMENTARIES
recting  -  racking
TRAIN - Epoch: 7  - Step: 1500  - Cost: 3.6600277  - CER: 0.12443053  - WER (step): 0.1875

Train examples pred vs real:
SPELEOLOGICAL  -  SPELEOLOGICAL
charles  -  charles
paradise  -  paradise
Medcafey  -  Mcdaniel
Dispiriting  -  Dispiriting
deputations  -  deputations
DOT  -  DOT
TRAIN - Epoch: 7  - Step: 2000  - Cost: 3.6945307  - CER: 0.12818924  - WER (step): 0.375

Train examples pred vs real:
INLGbitTy  -  INELIGIBILITY
DEFUSED  -  DIFFUSED
Johnnie  -  Johnnie
Overpay  -  Overpay
distinctively  -  distinctively
IEALDEE  -  WILDFIRE
klee  -  klee
TRAIN - Epoch: 7  - Step: 2500  - Cost: 3.675247  - CER: 0.12721229  - WER (step): 0.5625

Train examples pred vs real:
SEPARATE  -  SEPARATE
Sconpions  -  Scorpions
TransceNdenE  -  Transcendence
Speleological  -  Speleological
fOONELINESS  -  LOVELINESS
sundanely  -  Mundanely
RenEWVABLE  -  Renewable
TRAIN - Epoch: 7  - Step: 3000  - Cost: 3.7182398  - CER: 0.13064092  - WER (step): 0.375

Train examples pred vs real:
DISABUSE  -  DISABUSE
michelle  -  michelle
gOAnO  -  groans
thinnest  -  thinnest
bureaucracies  -  bureaucracies
FLIER  -  FLIER
Jamie  -  Jamie
TRAIN - Epoch: 7  - Step: 3500  - Cost: 3.8413043  - CER: 0.13243225  - WER (step): 0.1875

Train examples pred vs real:
appeases  -  appeases
bernd  -  bernd
DEPOLARIZATION  -  DEPOLARIZATION
Fleece  -  Fleece
Skids  -  Skids
Fistful  -  Fistful
suITcase  -  Suitcase
TRAIN - Epoch: 7  - Step: 4000  - Cost: 3.8783276  - CER: 0.13643527  - WER (step): 0.4375

Train examples pred vs real:
Herakles  -  Herakles
Sates  -  Ingrates
mscalling  -  miscalling
Purerr  -  Purer
BERNSTEIN  -  BERNSTEIN
Ianiost  -  loamiest
Dedications  -  Dedications
TRAIN - Epoch: 7  - Step: 4500  - Cost: 3.8436744  - CER: 0.13265616  - WER (step): 0.4375

Train examples pred vs real:
proms  -  proms
AssocIaIiVe  -  Associative
RECONSIGNING  -  RECONSIGNING
AMBIC  -  IAMBIC
DIANONDS  -  DIAMONDS
partieipate  -  participate
arrangers  -  arrangers
TRAIN - Epoch: 7  - Step: 5000  - Cost: 3.7686284  - CER: 0.13048752  - WER (step): 0.625

Train examples pred vs real:
Rents  -  Rents
TUCkIng  -  tucking
MMarcia  -  Marcia
HARNESSES  -  HARNESSES
CAdENCED  -  Cadenced
Strike  -  strike
DRAFTSMOMEG  -  DRAFTSWOMEN
TRAIN - Epoch: 7  - Step: 5500  - Cost: 3.886708  - CER: 0.13331464  - WER (step): 0.25

Train examples pred vs real:
sterilizer  -  sterilizer
Cufifences  -  Confidences
MUSES  -  MUSES
apostasies  -  apostasies
IaSACHUSETTS  -  MASSACHUSETTS
Unfitted  -  Unfitted
Miasmc  -  Miasma
TRAIN - Epoch: 7  - Step: 6000  - Cost: 3.8941245  - CER: 0.13217601  - WER (step): 0.5

Train examples pred vs real:
Mencius  -  Mencius
mARCOIC  -  NARCOTIC
Figs  -  Figs
indirectly  -  indirectly
Groupers  -  Groupers
CREDENCS  -  CREDENCE
Suspension  -  Suspension
TRAIN - Epoch: 7  - Step: 6500  - Cost: 3.821488  - CER: 0.1331894  - WER (step): 0.625

Train examples pred vs real:
WIRED  -  WIRED
BlaNDO  -  Blando
CumBeriNG  -  cumbering
Loyally  -  Loyally
Denying  -  Denying
catskilils  -  catskills
Equlibrium  -  Equilibrium
TRAIN - Epoch: 7  - Step: 7000  - Cost: 3.9405348  - CER: 0.13586591  - WER (step): 0.4375

Train examples pred vs real:
GOVERNESSES  -  GOVERNESSES
BOASTED  -  BOASTED
saturation  -  saturation
SHAGgINg  -  SHAGGING
NoTEPAPeR  -  Notepaper
Csian  -  censurable
CRIBBING  -  CRIBBING
TRAIN - Epoch: 7  - Step: 7500  - Cost: 3.808667  - CER: 0.13109575  - WER (step): 0.3125

Train examples pred vs real:
PRINCIPLES  -  PRINCIPLES
chlloroforms  -  chloroforms
skimnier  -  skinnier
dleferred  -  deferred
murnikdier  -  murkier
solicits  -  solicits
CONTRAVENTION  -  CONTRAVENTION
TRAIN - Epoch: 7  - Step: 8000  - Cost: 3.9027636  - CER: 0.13269821  - WER (step): 0.5

Train examples pred vs real:
disclaims  -  disclaims
ROOTs  -  ROOTS
POMEGRANATES  -  POMEGRANATES
Inequaliies  -  Inequalities
womanlier  -  womanlier
DEAR  -  SODDY
Aoopig  -  Hooping
TRAIN - Epoch: 7  - Step: 8500  - Cost: 3.817919  - CER: 0.13408868  - WER (step): 0.5

Train examples pred vs real:
Shiner  -  Shiner
Mothbaled  -  Mothballed
vitreous  -  vitreous
DHLECTICS  -  DIALECTICS
BUCKSAWS  -  BUCKSAWS
gainers  -  gainers
trtangular  -  triangular
TRAIN - Epoch: 7  - Step: 9000  - Cost: 3.883566  - CER: 0.13359202  - WER (step): 0.5

Train examples pred vs real:
garnishment  -  garnishment
OUTSIZE  -  OUTSIZE
DUCKBILLS  -  DUCKBILLS
Egomania  -  Egomania
ADUNCT  -  ADJUNCT
Pariodontal  -  Periodontal
NSTRUCTIONS  -  INSTRUCTIONS
Testing epoch: 7
TEST - Cost: 5.795496  - CER: 0.17465673  - WER: 0.5029296875

Test examples pred vs real:
Engravers  -  Engravers
urbana  -  urbana
Invoice  -  Invoice
opulently  -  opulently
boutmentalization  -  departmentalization
PRCVE  -  PROVE
HUTHORITY  -  authority
faye  -  faye
UNSTOP  -  UNSTOP
sharpens  -  sharpens
Model saved in file: /home/ubuntu/data/oxford_syntetic_text/models/test03/model-7

In [ ]: