In [1]:
import tensorflow as tf
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.gpu_options.per_process_gpu_memory_fraction=1

import time
import collections
import scipy
from corpus import Corpus
import numpy as np
from random import randint, random


def xavier_init(fan_in, fan_out, constant = 1):
    low = -constant * np.sqrt(6.0 / (fan_in + fan_out))
    high = constant * np.sqrt(6.0 / (fan_in + fan_out))
    return tf.random_uniform((fan_in, fan_out),
                             minval = low, maxval = high,
                             dtype = tf.float32)

corp_path='/home/velkey/corp/webkorpusz.wpl'
corp=Corpus(corpus_path=corp_path,language="Hun",size=100000,encoding_len=10)
all_features=corp.featurize_data_charlevel_onehot(corp.hun_lower)
train=all_features[0:int(len(all_features)*0.8)]
test=all_features[int(len(all_features)*0.8):len(all_features)]
x_train = train.reshape((len(train), np.prod(train.shape[1:])))
x_test = test.reshape((len(test), np.prod(test.shape[1:])))
print(x_train.shape)

class Timer:
    def __init__(self):
        self.timers=dict()
    def add(self,str):
        self.timers[str]=time.time()
    def get(self,str):
        return time.time()-self.timers[str]
timer=Timer()

def logtsv(array):
    string=""
    for item in array:
        string+=str(item)
        string+="\t"
    string=string[0:len(string)-1]+"\n"
    with open("train.tsv", "a") as myfile:
        myfile.write(string)


Corpus initalized, fields: ['unique', 'lower', 'hun_lower', 'lower_unique', 'hun_lower_unique'] 
Unique words:  25545
(60152, 360)

In [2]:
class Autoencoder_ffnn():
    def __init__(self,experiment,tf_session, inputdim,layerlist,encode_index,optimizer = tf.train.AdamOptimizer(),nonlinear=tf.nn.relu):
        """
        """
        self.experiment=experiment
        
        self.layerlist=layerlist
        self.layernum=len(layerlist)
        self.n_input = inputdim
        self.encode_index=encode_index
        self.display_step=10

        network_weights = self._initialize_weights()
        self.weights = network_weights  

        self._create_layers(nonlinear)

        # cost
        self.cost =  0.5*tf.reduce_sum(tf.pow(tf.subtract(self.reconstruction, self.x), 2.0))
        self.optimizer = optimizer.minimize(self.cost)

        init = tf.global_variables_initializer()
        self.sess = tf_session
        self.sess.run(init)
        
        self.size=0
        nums=[self.n_input,layerlist]
        for i in range(1,len(nums)):
            self.size+=4*layerlist[i]*layerlist[i-1]
        

    def _initialize_weights(self):
        all_weights = dict()
        
        all_weights['w1']=tf.Variable(xavier_init(self.n_input, self.layerlist[0]))
        all_weights['b1'] = tf.Variable(tf.random_normal([self.layerlist[0]], dtype=tf.float32))
        
        for i in range(1,self.layernum):
            all_weights['w'+str(i+1)]=tf.Variable(xavier_init(self.layerlist[i-1], self.layerlist[i]))
            all_weights['b'+str(i+1)] = tf.Variable(tf.random_normal([self.layerlist[i]], dtype=tf.float32))

        return all_weights
    
    def _create_layers(self,nonlinearity=tf.nn.relu):
        """
        """
        self.x = tf.placeholder(tf.float32, [None, self.n_input])
        layer=nonlinearity(tf.add(tf.matmul(self.x, self.weights['w1']), self.weights['b1']))

        for i in range(1,self.layernum-1):
            if i==self.encode_index:
                self.encoded=layer
            layer=nonlinearity(tf.add(tf.matmul(layer, self.weights['w'+str(i+1)]), self.weights['b'+str(i+1)]))
            
        self.reconstruction=tf.add(tf.matmul(layer, self.weights['w'+str(self.layernum)]), self.weights['b'+str(self.layernum)])

    def partial_fit(self, X):
        cost, opt = self.sess.run((self.cost, self.optimizer), feed_dict={self.x: X})
        return cost

    def calc_total_cost(self, X):
        return self.sess.run(self.cost, feed_dict = {self.x: X})

    def encode(self, X):
        return self.sess.run(self.encoded, feed_dict={self.x: X})

    def decode(self, encoded = None):
        if encoded is None:
            encoded = np.random.normal(size=self.weights["b1"])
        return self.sess.run(self.reconstruction, feed_dict={self.encoded: encoded})

    def reconstruct(self, X):
        return self.sess.run(self.reconstruction, feed_dict={self.x: X})
    
    def train(self,X_train,X_test,batch_size,max_epochs):
        breaker=False
        testlog=collections.deque(maxlen=30)
        
        for epoch in range(max_epochs):
            avg_cost = 0.
            total_batch = int(len(X_train) / batch_size)
            # Loop over all batches
            for i in range(total_batch):
                batch_xs = self.get_random_block_from_data(X_train, batch_size)
                cost = self.partial_fit(batch_xs)
                avg_cost += cost/ batch_size
                
                #early stop
                testlog.append(self.calc_total_cost(X_test))
                for i in range(8):
                    if len(testlog)>20 and testlog[-i]>=testlog[-10-i]*0.995:
                        breaker=True
                    else:
                        breaker=False
                if breaker:
                    print("STOPPED OVERFIT")
                    break
            # Display logs per epoch step
            if epoch % self.display_step == 0:
                print ("Epoch:", '%04d' % (epoch + 1), "cost=", "{:.9f}".format(avg_cost))
            if breaker:
                break
                
    def get_random_block_from_data(self,data, batch_size):
        start_index = np.random.randint(0, len(data) - batch_size)
        return data[start_index:(start_index + batch_size)]

In [3]:
class experiment:
    
    def __init__(self,out_dim,minw,maxw,encoded_width,layermin=1,layermax=5):
        self.len=randint(layermin,layermax)*2
        self.weights=[randint(minw,maxw) for n in range(self.len)]
        self.weights[int(self.len/2-1)]=encoded_width
        self.weights[-1]=out_dim
        
    def set(self,weights):
        self.len=len(weights)
        self.weights=weights
        
class evolution:
    
    def __init__(self,x_train,x_test,population_size,encoder,dim,repeat_runs=2,epoch=30,batch=512,disp_freq=1):
        """
        """
        self.encoded_width=encoder
        self.dim=dim
        self.min=10
        self.max=200
        self.repeat_runs=repeat_runs
        
        self.training_epochs = epoch
        self.batch_size = batch
        self.display_step = disp_freq
        self.x_train=x_train
        self.x_test=x_test
        
        self.learnrate=0.001
        self.batchsize=512
        self.maxepoch=100
        self.optimizer=tf.train.AdamOptimizer(learning_rate = self.learnrate)
        
        
        self.retain_p=0.2
        self.random_select_p=0.05
        self.mutate_p=0.1
        self.mutate_len_p=0.1
        self.mutate_width_p=0.4
        self.population_size=population_size
        self.population=self.gen_population(population_size)
        
        self.target=0
        

        
    def ekv(self,e):
        return e
       
    
    def gen_population(self,count):
        """
        count: the number of individuals in the population
        """
        self.sess = tf.Session(config=config)
        
        population=[]
        for x in range(count):
            exp=experiment(out_dim=self.dim,minw=self.min,maxw=self.max,encoded_width=self.encoded_width)
            population.append(Autoencoder_ffnn(experiment=exp,tf_session=self.sess,inputdim=self.dim,layerlist=exp.weights,
                                               encode_index=int(exp.len/2-1),
                                               optimizer = self.optimizer))
        return population
    
    def new_generation(self,experiments):
        """
        
        """
        self.sess.close()
        self.sess = tf.Session(config=config)
        
        print("New generation is being created.")
        
        population=[]
        for x in range(len(experiments)):
    
            population.append(Autoencoder_ffnn(experiment=experiments[x],tf_session=self.sess,inputdim=self.dim,layerlist=experiments[x].weights,
                                               encode_index=int(experiments[x].len/2-1),
                                               optimizer = self.optimizer))
        return population
    
    def train_population(self):
        self.population_fitness=[]
        for individual in self.population:
            sum_cost=0
            for i in range(self.repeat_runs): #average the model's fitness
                individual.train(self.x_train,x_test,self.batchsize,self.maxepoch)
                sum_cost+=individual.calc_total_cost(self.x_test)
            self.population_fitness.append(sum_cost/self.repeat_runs)
        return self.population_fitness
    

    def grade(self):
        'Find average fitness for a population.'
        summed = sum(self.population_fitness)
        self.graded= summed / (self.population_size * 1.0)
        return self.graded
    
    def mutate(self,group):
        for individual in group:
            if self.mutate_p > random():
                if self.mutate_len_p>random():
                    if random()<0.5:
                        individual.len+=2
                        individual.weights=[randint(self.min, self.max),randint(self.min, self.max)]+individual.weights
                        individual.weights[int(individual.len/2-1)]=self.encoded_width
                    else :
                        if individual.len!=2:
                            individual.len-=2
                            individual.weights=individual.weights[2:]
                            individual.weights[int(individual.len/2-1)]=self.encoded_width
                if self.mutate_width_p>random():
                    pos_to_mutate = randint(0,individual.len-2)
                    if pos_to_mutate!=int(individual.len/2-1):
                        if 0.5>random():
                            individual.weights[pos_to_mutate] +=20
                        else:
                            individual.weights[pos_to_mutate] -=20
        self.mutants=group
        return group

    def evolve(self):
        self.train_population()
        
        #select top individs
        graded = [(self.population_fitness[x], self.population[x].experiment) for x in range(self.population_size)]
        graded = [ x[1] for x in sorted(graded)]
        retain_length = int(len(graded)*self.retain_p)
        parents = graded[:retain_length]
        
        
        # randomly add other individuals to
        # promote genetic diversity
        for individual in graded[retain_length:]:
            if self.random_select_p > random():
                parents.append(individual)
        
        # mutate 
        mutants=self.mutate(parents)
       
        # crossover parents to create children (aka sex)
        mutants_length = len(mutants)
        desired_length = self.population_size - mutants_length
        children = []
        while len(children) < desired_length:
            male = randint(0, mutants_length-1)
            female = randint(0, mutants_length-1)
            if male != female:
                male = mutants[male]
                female = mutants[female]
                
                child=experiment(out_dim=self.dim,minw=self.min,maxw=self.max,encoded_width=self.encoded_width)
                weights = male.weights[:int(male.len/2-1)]+female.weights[int(female.len/2-1):]
                child.set(weights)
                children.append(child)
                
        mutants.extend(children)
        
        self.population=self.new_generation(mutants)
        return mutants

In [2]:
x=evolution(x_train,x_test,80,100,360)


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-2-c02a27733344> in <module>()
----> 1 x=evolution(x_train,x_test,80,100,360)

NameError: name 'x_train' is not defined

In [ ]:
for i in range(30):
    for i in range(x.population_size):
        print(x.population[i].experiment.weights)
        
    x.evolve()
    print(x.grade())


[187, 128, 190, 100, 119, 177, 103, 360]
[100, 360]
[100, 360]
[78, 28, 185, 163, 100, 79, 49, 53, 126, 360]
[108, 162, 57, 144, 100, 64, 169, 52, 135, 360]
[196, 173, 10, 100, 14, 80, 180, 360]
[32, 105, 74, 191, 100, 27, 184, 34, 177, 360]
[35, 74, 38, 18, 100, 61, 44, 171, 186, 360]
[100, 360]
[100, 360]
[173, 83, 70, 100, 149, 48, 28, 360]
[68, 161, 39, 100, 14, 89, 14, 360]
[79, 57, 91, 100, 139, 12, 136, 360]
[15, 100, 111, 360]
[100, 360]
[21, 100, 157, 360]
[192, 115, 100, 62, 74, 360]
[101, 70, 126, 168, 100, 15, 152, 117, 113, 360]
[66, 75, 86, 71, 100, 89, 11, 167, 17, 360]
[146, 21, 74, 100, 18, 86, 62, 360]
[65, 75, 149, 100, 97, 153, 200, 360]
[100, 360]
[68, 100, 153, 360]
[160, 123, 151, 182, 100, 52, 158, 184, 136, 360]
[100, 360]
[27, 100, 61, 360]
[32, 56, 118, 120, 100, 56, 71, 77, 28, 360]
[100, 360]
[11, 69, 33, 183, 100, 187, 163, 124, 93, 360]
[23, 164, 100, 91, 44, 360]
[66, 101, 100, 29, 16, 360]
[47, 198, 162, 147, 100, 165, 77, 124, 94, 360]
[164, 89, 187, 100, 194, 146, 35, 360]
[100, 360]
[100, 360]
[166, 24, 33, 100, 45, 99, 91, 360]
[121, 67, 100, 154, 123, 360]
[140, 80, 19, 100, 159, 165, 57, 360]
[200, 100, 141, 360]
[47, 26, 142, 100, 113, 134, 48, 360]
[185, 124, 124, 100, 83, 169, 159, 360]
[40, 159, 195, 193, 100, 28, 48, 83, 14, 360]
[100, 360]
[27, 108, 100, 135, 76, 360]
[185, 20, 61, 114, 100, 32, 104, 170, 153, 360]
[100, 360]
[183, 100, 12, 360]
[70, 39, 194, 100, 159, 128, 79, 360]
[100, 360]
[193, 75, 186, 100, 83, 186, 17, 360]
[170, 35, 52, 100, 40, 149, 112, 360]
[70, 64, 100, 24, 181, 360]
[100, 360]
[145, 35, 168, 100, 160, 37, 110, 360]
[138, 186, 113, 100, 169, 190, 10, 360]
[22, 181, 147, 100, 195, 14, 53, 360]
[154, 166, 100, 166, 141, 360]
[64, 100, 170, 360]
[108, 52, 85, 100, 93, 76, 187, 360]
[100, 360]
[100, 360]
[19, 120, 100, 75, 150, 360]
[86, 61, 100, 55, 171, 360]
[148, 67, 146, 170, 100, 175, 188, 169, 130, 360]
[99, 101, 100, 66, 26, 360]
[171, 77, 36, 180, 100, 23, 130, 71, 95, 360]
[196, 100, 88, 360]
[103, 89, 57, 100, 134, 193, 120, 360]
[105, 198, 64, 131, 100, 139, 47, 43, 134, 360]
[59, 92, 123, 100, 192, 77, 168, 360]
[84, 83, 100, 152, 148, 360]
[100, 360]
[21, 50, 83, 100, 140, 104, 200, 360]
[100, 360]
[168, 137, 19, 19, 100, 125, 143, 196, 27, 360]
[76, 195, 61, 64, 100, 171, 49, 35, 23, 360]
[31, 100, 128, 360]
[37, 100, 67, 360]
[100, 360]
[165, 91, 33, 100, 16, 37, 192, 360]
STOPPED OVERFIT
Epoch: 0001 cost= 2367.262086391
STOPPED OVERFIT
Epoch: 0001 cost= 101.842037201
Epoch: 0001 cost= 2222.901229143
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 40.376818180
Epoch: 0001 cost= 1498.755828857
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 47.165527582
STOPPED OVERFIT
Epoch: 0001 cost= 1164.144130468
STOPPED OVERFIT
Epoch: 0001 cost= 62.841074944
STOPPED OVERFIT
Epoch: 0001 cost= 1138.764196157
STOPPED OVERFIT
Epoch: 0001 cost= 63.628291130
STOPPED OVERFIT
Epoch: 0001 cost= 1047.222422123
STOPPED OVERFIT
Epoch: 0001 cost= 63.697357893
STOPPED OVERFIT
Epoch: 0001 cost= 1028.531661749
STOPPED OVERFIT
Epoch: 0001 cost= 64.081914663
STOPPED OVERFIT
Epoch: 0001 cost= 917.734226465
STOPPED OVERFIT
Epoch: 0001 cost= 63.386874199
Epoch: 0001 cost= 1460.549552441
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 33.388400674
Epoch: 0001 cost= 1324.388271570
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 32.825429440
STOPPED OVERFIT
Epoch: 0001 cost= 1527.400264025
STOPPED OVERFIT
Epoch: 0001 cost= 62.744399071
STOPPED OVERFIT
Epoch: 0001 cost= 1705.818437576
STOPPED OVERFIT
Epoch: 0001 cost= 63.148641348
STOPPED OVERFIT
Epoch: 0001 cost= 1056.504188776
STOPPED OVERFIT
Epoch: 0001 cost= 63.219482183
Epoch: 0001 cost= 1293.820358515
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 55.078820705
Epoch: 0001 cost= 1365.112502337
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 35.295630693
Epoch: 0001 cost= 1173.786051512
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 50.632376432
STOPPED OVERFIT
Epoch: 0001 cost= 1286.220030546
STOPPED OVERFIT
Epoch: 0001 cost= 63.977319717
STOPPED OVERFIT
Epoch: 0001 cost= 923.471908092
STOPPED OVERFIT
Epoch: 0001 cost= 63.415194988
STOPPED OVERFIT
Epoch: 0001 cost= 1925.670148611
STOPPED OVERFIT
Epoch: 0001 cost= 64.384858847
Epoch: 0001 cost= 1405.734410763
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 52.101088762
STOPPED OVERFIT
Epoch: 0001 cost= 950.586648703
STOPPED OVERFIT
Epoch: 0001 cost= 241.662052870
Epoch: 0001 cost= 1422.983196497
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 74.364209652
Epoch: 0001 cost= 1162.768792391
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 51.803593397
STOPPED OVERFIT
Epoch: 0001 cost= 937.275418758
STOPPED OVERFIT
Epoch: 0001 cost= 63.878551483
Epoch: 0001 cost= 1398.300847173
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 75.077825069
Epoch: 0001 cost= 1562.781561613
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 49.653299093
STOPPED OVERFIT
Epoch: 0001 cost= 1792.149374723
STOPPED OVERFIT
Epoch: 0001 cost= 63.539009094
Epoch: 0001 cost= 1398.235465527
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 37.183797956
STOPPED OVERFIT
Epoch: 0001 cost= 952.833792448
STOPPED OVERFIT
Epoch: 0001 cost= 64.410020828
STOPPED OVERFIT
Epoch: 0001 cost= 1274.397703171
STOPPED OVERFIT
Epoch: 0001 cost= 62.366560698
STOPPED OVERFIT
Epoch: 0001 cost= 1716.560683012
STOPPED OVERFIT
Epoch: 0001 cost= 62.589920044
Epoch: 0001 cost= 1181.869242191
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 50.092033863
STOPPED OVERFIT
Epoch: 0001 cost= 1488.809853315
STOPPED OVERFIT
Epoch: 0001 cost= 96.514636755
Epoch: 0001 cost= 1470.111584425
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 36.471978426
Epoch: 0001 cost= 1419.096791506
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 32.649675846
STOPPED OVERFIT
Epoch: 0001 cost= 1096.556561470
STOPPED OVERFIT
Epoch: 0001 cost= 62.005381584
Epoch: 0001 cost= 1082.553012371
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 50.536731720
STOPPED OVERFIT
Epoch: 0001 cost= 1269.090770483
STOPPED OVERFIT
Epoch: 0001 cost= 62.436159611
Epoch: 0001 cost= 1131.218850613
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 46.880603075
STOPPED OVERFIT
Epoch: 0001 cost= 1291.491787434
STOPPED OVERFIT
Epoch: 0001 cost= 62.446233749
Epoch: 0001 cost= 1055.012787104
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 50.029641628
STOPPED OVERFIT
Epoch: 0001 cost= 2735.654777765
STOPPED OVERFIT
Epoch: 0001 cost= 63.488662720
Epoch: 0001 cost= 1364.708093286
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 55.044153094
STOPPED OVERFIT
Epoch: 0001 cost= 1363.716374636
STOPPED OVERFIT
Epoch: 0001 cost= 62.752167940
STOPPED OVERFIT
Epoch: 0001 cost= 1040.231750011
Epoch: 0001 cost= 321.822274685
STOPPED OVERFIT
Epoch: 0001 cost= 1470.221117735
STOPPED OVERFIT
Epoch: 0001 cost= 212.524214029
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 2149.850048542
STOPPED OVERFIT
Epoch: 0001 cost= 63.002091408
STOPPED OVERFIT
Epoch: 0001 cost= 1108.268388510
STOPPED OVERFIT
Epoch: 0001 cost= 83.522187233
Epoch: 0001 cost= 1405.568808079
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 36.403264523
STOPPED OVERFIT
Epoch: 0001 cost= 2194.189058304
STOPPED OVERFIT
Epoch: 0001 cost= 63.292116404
STOPPED OVERFIT
Epoch: 0001 cost= 1069.123739243
STOPPED OVERFIT
Epoch: 0001 cost= 63.396530390
STOPPED OVERFIT
Epoch: 0001 cost= 1050.660199642
STOPPED OVERFIT
Epoch: 0001 cost= 61.781189680
Epoch: 0001 cost= 1558.359797716
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 55.356663346
STOPPED OVERFIT
Epoch: 0001 cost= 1073.194339752
STOPPED OVERFIT
Epoch: 0001 cost= 63.579840183
STOPPED OVERFIT
Epoch: 0001 cost= 2004.780941010
STOPPED OVERFIT
Epoch: 0001 cost= 62.127760172
STOPPED OVERFIT
Epoch: 0001 cost= 1191.722557783
STOPPED OVERFIT
Epoch: 0001 cost= 63.423276424
Epoch: 0001 cost= 1048.363401175
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 50.710232258
Epoch: 0001 cost= 1117.788092375
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 52.500295639
Epoch: 0001 cost= 1042.413454294
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 46.281381369
Epoch: 0001 cost= 1400.448939085
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 35.300086498
Epoch: 0001 cost= 1390.210048437
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 69.352099299
STOPPED OVERFIT
Epoch: 0001 cost= 1083.973036289
STOPPED OVERFIT
Epoch: 0001 cost= 63.106489420
STOPPED OVERFIT
Epoch: 0001 cost= 1066.397331953
STOPPED OVERFIT
Epoch: 0001 cost= 270.169881582
STOPPED OVERFIT
Epoch: 0001 cost= 1091.915199280
STOPPED OVERFIT
Epoch: 0001 cost= 72.486382961
STOPPED OVERFIT
Epoch: 0001 cost= 1464.235874653
STOPPED OVERFIT
Epoch: 0001 cost= 61.085835695
STOPPED OVERFIT
Epoch: 0001 cost= 1165.152892828
STOPPED OVERFIT
Epoch: 0001 cost= 63.345845461
Epoch: 0001 cost= 1448.831675291
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 44.473892689
STOPPED OVERFIT
Epoch: 0001 cost= 1090.980935097
STOPPED OVERFIT
Epoch: 0001 cost= 62.728454828
STOPPED OVERFIT
Epoch: 0001 cost= 1050.045458555
STOPPED OVERFIT
Epoch: 0001 cost= 63.157920361
STOPPED OVERFIT
Epoch: 0001 cost= 923.975401402
STOPPED OVERFIT
Epoch: 0001 cost= 63.417997122
STOPPED OVERFIT
Epoch: 0001 cost= 1120.978978872
Epoch: 0001 cost= 320.909000158
STOPPED OVERFIT
Epoch: 0001 cost= 1256.721629739
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 37.837784767
STOPPED OVERFIT
Epoch: 0001 cost= 896.649611235
STOPPED OVERFIT
Epoch: 0001 cost= 62.781502485
Epoch: 0001 cost= 1312.412507415
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 43.378741741
STOPPED OVERFIT
Epoch: 0001 cost= 1588.558032036
STOPPED OVERFIT
Epoch: 0001 cost= 62.867079258
STOPPED OVERFIT
Epoch: 0001 cost= 1767.820912123
STOPPED OVERFIT
Epoch: 0001 cost= 63.510520935
Epoch: 0001 cost= 1282.520243406
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 49.318700075
STOPPED OVERFIT
Epoch: 0001 cost= 1371.395009279
Epoch: 0001 cost= 329.428046465
STOPPED OVERFIT
Epoch: 0001 cost= 1390.772536278
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 55.140363574
STOPPED OVERFIT
Epoch: 0001 cost= 987.376025200
STOPPED OVERFIT
Epoch: 0001 cost= 64.344998360
New generation is being created.
38565.0582764
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[31, 100, 128, 360]
[145, 35, 168, 100, 160, 37, 110, 360]
[100, 360]
[100, 360]
[31, 100, 360]
[100, 360]
[100, 360]
[100, 360]
[145, 35, 168, 100, 360]
[100, 360]
[100, 360]
[31, 100, 360]
[100, 360]
[100, 360]
[100, 128, 360]
[100, 360]
[100, 360]
[100, 360]
[145, 35, 168, 100, 360]
[100, 128, 360]
[100, 128, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[145, 35, 168, 100, 360]
[100, 360]
[100, 360]
[145, 35, 168, 100, 360]
[100, 360]
[145, 35, 168, 100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[31, 100, 360]
[100, 360]
[100, 360]
[100, 360]
[145, 35, 168, 100, 360]
[100, 360]
[31, 100, 360]
[100, 128, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[31, 100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 160, 37, 110, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 360]
[100, 160, 37, 110, 360]
Epoch: 0001 cost= 3896.100538254
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 33.468525052
Epoch: 0001 cost= 1486.077942848
STOPPED OVERFIT
Epoch: 0001 cost= 187.889020324
STOPPED OVERFIT
Epoch: 0001 cost= 1457.894551754
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 38.495630145
Epoch: 0001 cost= 1530.659444571
STOPPED OVERFIT
STOPPED OVERFIT
Epoch: 0001 cost= 50.047795057
Epoch: 0001 cost= 1496.906819820
STOPPED OVERFIT

In [ ]:


In [ ]:


In [1]:
from genetic import evolution

In [ ]:


In [ ]:


In [ ]: