In [1]:
# import libs
from keras.models import Sequential
from keras.layers import Dense
import numpy as np
Using TensorFlow backend.
In [2]:
# seed
np.random.seed(1)
In [5]:
dataset = np.genfromtxt('challenge_dataset.txt', delimiter = ',')
dataset
Out[5]:
array([[ 6.1101 , 17.592 ],
[ 5.5277 , 9.1302 ],
[ 8.5186 , 13.662 ],
[ 7.0032 , 11.854 ],
[ 5.8598 , 6.8233 ],
[ 8.3829 , 11.886 ],
[ 7.4764 , 4.3483 ],
[ 8.5781 , 12. ],
[ 6.4862 , 6.5987 ],
[ 5.0546 , 3.8166 ],
[ 5.7107 , 3.2522 ],
[ 14.164 , 15.505 ],
[ 5.734 , 3.1551 ],
[ 8.4084 , 7.2258 ],
[ 5.6407 , 0.71618],
[ 5.3794 , 3.5129 ],
[ 6.3654 , 5.3048 ],
[ 5.1301 , 0.56077],
[ 6.4296 , 3.6518 ],
[ 7.0708 , 5.3893 ],
[ 6.1891 , 3.1386 ],
[ 20.27 , 21.767 ],
[ 5.4901 , 4.263 ],
[ 6.3261 , 5.1875 ],
[ 5.5649 , 3.0825 ],
[ 18.945 , 22.638 ],
[ 12.828 , 13.501 ],
[ 10.957 , 7.0467 ],
[ 13.176 , 14.692 ],
[ 22.203 , 24.147 ],
[ 5.2524 , -1.22 ],
[ 6.5894 , 5.9966 ],
[ 9.2482 , 12.134 ],
[ 5.8918 , 1.8495 ],
[ 8.2111 , 6.5426 ],
[ 7.9334 , 4.5623 ],
[ 8.0959 , 4.1164 ],
[ 5.6063 , 3.3928 ],
[ 12.836 , 10.117 ],
[ 6.3534 , 5.4974 ],
[ 5.4069 , 0.55657],
[ 6.8825 , 3.9115 ],
[ 11.708 , 5.3854 ],
[ 5.7737 , 2.4406 ],
[ 7.8247 , 6.7318 ],
[ 7.0931 , 1.0463 ],
[ 5.0702 , 5.1337 ],
[ 5.8014 , 1.844 ],
[ 11.7 , 8.0043 ],
[ 5.5416 , 1.0179 ],
[ 7.5402 , 6.7504 ],
[ 5.3077 , 1.8396 ],
[ 7.4239 , 4.2885 ],
[ 7.6031 , 4.9981 ],
[ 6.3328 , 1.4233 ],
[ 6.3589 , -1.4211 ],
[ 6.2742 , 2.4756 ],
[ 5.6397 , 4.6042 ],
[ 9.3102 , 3.9624 ],
[ 9.4536 , 5.4141 ],
[ 8.8254 , 5.1694 ],
[ 5.1793 , -0.74279],
[ 21.279 , 17.929 ],
[ 14.908 , 12.054 ],
[ 18.959 , 17.054 ],
[ 7.2182 , 4.8852 ],
[ 8.2951 , 5.7442 ],
[ 10.236 , 7.7754 ],
[ 5.4994 , 1.0173 ],
[ 20.341 , 20.992 ],
[ 10.136 , 6.6799 ],
[ 7.3345 , 4.0259 ],
[ 6.0062 , 1.2784 ],
[ 7.2259 , 3.3411 ],
[ 5.0269 , -2.6807 ],
[ 6.5479 , 0.29678],
[ 7.5386 , 3.8845 ],
[ 5.0365 , 5.7014 ],
[ 10.274 , 6.7526 ],
[ 5.1077 , 2.0576 ],
[ 5.7292 , 0.47953],
[ 5.1884 , 0.20421],
[ 6.3557 , 0.67861],
[ 9.7687 , 7.5435 ],
[ 6.5159 , 5.3436 ],
[ 8.5172 , 4.2415 ],
[ 9.1802 , 6.7981 ],
[ 6.002 , 0.92695],
[ 5.5204 , 0.152 ],
[ 5.0594 , 2.8214 ],
[ 5.7077 , 1.8451 ],
[ 7.6366 , 4.2959 ],
[ 5.8707 , 7.2029 ],
[ 5.3054 , 1.9869 ],
[ 8.2934 , 0.14454],
[ 13.394 , 9.0551 ],
[ 5.4369 , 0.61705]])
In [6]:
# split the data
x = dataset[:, 0]
y = dataset[:, 1]
In [8]:
# build the model
model = Sequential()
model.add(Dense(10, input_dim = 1, activation = 'relu'))
model.add(Dense(7, activation = 'relu'))
model.add(Dense(1, activation = 'sigmoid'))
In [9]:
# compile the model
model.compile(loss = 'binary_crossentropy', optimizer = 'adam', metrics = ['accuracy'])
In [12]:
# fit the model
model.fit(x, y, epochs= 100, batch_size = 10)
Epoch 1/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 2/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 3/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 4/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 5/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 6/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 7/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 8/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 9/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 10/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 11/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 12/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 13/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 14/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 15/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 16/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 17/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 18/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 19/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 20/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 21/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 22/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 23/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 24/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 25/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 26/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 27/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 28/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 29/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 30/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 31/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 32/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 33/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 34/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 35/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 36/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 37/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 38/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 39/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 40/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 41/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 42/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 43/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 44/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 45/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 46/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 47/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 48/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 49/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 50/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 51/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 52/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 53/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 54/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 55/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 56/100
97/97 [==============================] - ETA: 0s - loss: -118.2760 - acc: 0.0000e+00 - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 57/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 58/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 59/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 60/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 61/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 62/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 63/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 64/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 65/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 66/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 67/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 68/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 69/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 70/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 71/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 72/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 73/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 74/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 75/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 76/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 77/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 78/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 79/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 80/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 81/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 82/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 83/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 84/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 85/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 86/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 87/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 88/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 89/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 90/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 91/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 92/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 93/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 94/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 95/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 96/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 97/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 98/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 99/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Epoch 100/100
97/97 [==============================] - 0s - loss: -77.1474 - acc: 0.0000e+00
Out[12]:
<keras.callbacks.History at 0x7f1ed3700f60>
In [ ]:
Content source: raul-jr3/dope-learning
Similar notebooks: