Keras plus TensorFlow

Installation Instructions

Codebook of diabetics dataset

  1. Number of times pregnant
  2. Plasma glucose concentration a 2 hours in an oral glucose tolerance test
  3. Diastolic blood pressure (mm Hg)
  4. Triceps skin fold thickness (mm)
  5. 2-Hour serum insulin (mu U/ml)
  6. Body mass index (weight in kg/(height in m)^2)
  7. Diabetes pedigree function
  8. Age (years)
  9. Class variable (0 or 1)

    Class Distribution: (class value 1 is interpreted as "tested positive for diabetes")

    Class Value Number of instances 0 500 1 268


In [1]:
# Create first network with Keras
from keras.models import Sequential
from keras.layers import Dense
import numpy
# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)
# load pima indians dataset
dataset = numpy.loadtxt("pima-indians-diabetes.data", delimiter=",")
# split into input (X) and output (Y) variables
X = dataset[:,0:8]
Y = dataset[:,8]
# create model
model = Sequential()
model.add(Dense(12, input_dim=8, init='uniform', activation='relu'))
model.add(Dense(8, init='uniform', activation='relu'))
model.add(Dense(1, init='uniform', activation='sigmoid'))
# Compile model
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# Fit the model
model.fit(X, Y, epochs=150, batch_size=10,  verbose=2)
# calculate predictions
predictions = model.predict(X)
# round predictions
rounded = [round(x[0]) for x in predictions]
print(rounded)


Using TensorFlow backend.
C:\Users\Gaziz\Anaconda3\lib\site-packages\ipykernel_launcher.py:15: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(12, input_dim=8, activation="relu", kernel_initializer="uniform")`
  from ipykernel import kernelapp as app
C:\Users\Gaziz\Anaconda3\lib\site-packages\ipykernel_launcher.py:16: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(8, activation="relu", kernel_initializer="uniform")`
  app.launch_new_instance()
C:\Users\Gaziz\Anaconda3\lib\site-packages\ipykernel_launcher.py:17: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(1, activation="sigmoid", kernel_initializer="uniform")`
Epoch 1/150
 - 2s - loss: 0.6771 - acc: 0.6510
Epoch 2/150
 - 0s - loss: 0.6596 - acc: 0.6510
Epoch 3/150
 - 0s - loss: 0.6475 - acc: 0.6510
Epoch 4/150
 - 0s - loss: 0.6387 - acc: 0.6510
Epoch 5/150
 - 0s - loss: 0.6307 - acc: 0.6510
Epoch 6/150
 - 0s - loss: 0.6114 - acc: 0.6849
Epoch 7/150
 - 0s - loss: 0.6097 - acc: 0.6745
Epoch 8/150
 - 0s - loss: 0.6016 - acc: 0.6992
Epoch 9/150
 - 0s - loss: 0.5951 - acc: 0.6940
Epoch 10/150
 - 0s - loss: 0.5982 - acc: 0.6953
Epoch 11/150
 - 0s - loss: 0.5914 - acc: 0.6810
Epoch 12/150
 - 0s - loss: 0.5906 - acc: 0.6979
Epoch 13/150
 - 0s - loss: 0.5875 - acc: 0.6810
Epoch 14/150
 - 0s - loss: 0.5842 - acc: 0.6901
Epoch 15/150
 - 0s - loss: 0.5797 - acc: 0.7044
Epoch 16/150
 - 0s - loss: 0.5794 - acc: 0.6940
Epoch 17/150
 - 0s - loss: 0.5775 - acc: 0.7057
Epoch 18/150
 - 0s - loss: 0.5829 - acc: 0.7005
Epoch 19/150
 - 0s - loss: 0.5737 - acc: 0.7096
Epoch 20/150
 - 0s - loss: 0.5758 - acc: 0.7005
Epoch 21/150
 - 0s - loss: 0.5706 - acc: 0.7096
Epoch 22/150
 - 0s - loss: 0.5741 - acc: 0.6992
Epoch 23/150
 - 0s - loss: 0.5691 - acc: 0.7148
Epoch 24/150
 - 0s - loss: 0.5765 - acc: 0.7057
Epoch 25/150
 - 0s - loss: 0.5640 - acc: 0.7096
Epoch 26/150
 - 0s - loss: 0.5776 - acc: 0.7031
Epoch 27/150
 - 0s - loss: 0.5699 - acc: 0.7031
Epoch 28/150
 - 0s - loss: 0.5625 - acc: 0.7161
Epoch 29/150
 - 0s - loss: 0.5667 - acc: 0.7122
Epoch 30/150
 - 0s - loss: 0.5632 - acc: 0.7070
Epoch 31/150
 - 0s - loss: 0.5613 - acc: 0.7122
Epoch 32/150
 - 0s - loss: 0.5574 - acc: 0.7174
Epoch 33/150
 - 0s - loss: 0.5554 - acc: 0.7201
Epoch 34/150
 - 0s - loss: 0.5570 - acc: 0.7240
Epoch 35/150
 - 0s - loss: 0.5518 - acc: 0.7240
Epoch 36/150
 - 0s - loss: 0.5514 - acc: 0.7109
Epoch 37/150
 - 0s - loss: 0.5493 - acc: 0.7214
Epoch 38/150
 - 0s - loss: 0.5563 - acc: 0.7201
Epoch 39/150
 - 0s - loss: 0.5507 - acc: 0.7292
Epoch 40/150
 - 0s - loss: 0.5545 - acc: 0.7227
Epoch 41/150
 - 0s - loss: 0.5471 - acc: 0.7318
Epoch 42/150
 - 0s - loss: 0.5478 - acc: 0.7201
Epoch 43/150
 - 0s - loss: 0.5416 - acc: 0.7331
Epoch 44/150
 - 0s - loss: 0.5458 - acc: 0.7331
Epoch 45/150
 - 0s - loss: 0.5448 - acc: 0.7409
Epoch 46/150
 - 0s - loss: 0.5377 - acc: 0.7214
Epoch 47/150
 - 0s - loss: 0.5400 - acc: 0.7318
Epoch 48/150
 - 0s - loss: 0.5382 - acc: 0.7383
Epoch 49/150
 - 0s - loss: 0.5340 - acc: 0.7331
Epoch 50/150
 - 0s - loss: 0.5364 - acc: 0.7396
Epoch 51/150
 - 0s - loss: 0.5341 - acc: 0.7279
Epoch 52/150
 - 0s - loss: 0.5398 - acc: 0.7227
Epoch 53/150
 - 0s - loss: 0.5354 - acc: 0.7292
Epoch 54/150
 - 0s - loss: 0.5346 - acc: 0.7240
Epoch 55/150
 - 0s - loss: 0.5341 - acc: 0.7383
Epoch 56/150
 - 0s - loss: 0.5361 - acc: 0.7344
Epoch 57/150
 - 0s - loss: 0.5296 - acc: 0.7279
Epoch 58/150
 - 0s - loss: 0.5315 - acc: 0.7279
Epoch 59/150
 - 0s - loss: 0.5267 - acc: 0.7370
Epoch 60/150
 - 0s - loss: 0.5291 - acc: 0.7292
Epoch 61/150
 - 0s - loss: 0.5242 - acc: 0.7305
Epoch 62/150
 - 0s - loss: 0.5269 - acc: 0.7409
Epoch 63/150
 - 0s - loss: 0.5297 - acc: 0.7435
Epoch 64/150
 - 0s - loss: 0.5274 - acc: 0.7435
Epoch 65/150
 - 0s - loss: 0.5228 - acc: 0.7461
Epoch 66/150
 - 0s - loss: 0.5192 - acc: 0.7383
Epoch 67/150
 - 0s - loss: 0.5176 - acc: 0.7344
Epoch 68/150
 - 0s - loss: 0.5227 - acc: 0.7409
Epoch 69/150
 - 0s - loss: 0.5161 - acc: 0.7526
Epoch 70/150
 - 0s - loss: 0.5227 - acc: 0.7305
Epoch 71/150
 - 0s - loss: 0.5158 - acc: 0.7474
Epoch 72/150
 - 0s - loss: 0.5180 - acc: 0.7513
Epoch 73/150
 - 0s - loss: 0.5115 - acc: 0.7539
Epoch 74/150
 - 0s - loss: 0.5167 - acc: 0.7396
Epoch 75/150
 - 0s - loss: 0.5107 - acc: 0.7513
Epoch 76/150
 - 0s - loss: 0.5101 - acc: 0.7591
Epoch 77/150
 - 0s - loss: 0.5101 - acc: 0.7539
Epoch 78/150
 - 0s - loss: 0.5107 - acc: 0.7422
Epoch 79/150
 - 0s - loss: 0.5138 - acc: 0.7422
Epoch 80/150
 - 0s - loss: 0.5072 - acc: 0.7539
Epoch 81/150
 - 0s - loss: 0.5042 - acc: 0.7526
Epoch 82/150
 - 0s - loss: 0.5029 - acc: 0.7604
Epoch 83/150
 - 0s - loss: 0.5008 - acc: 0.7617
Epoch 84/150
 - 0s - loss: 0.4990 - acc: 0.7591
Epoch 85/150
 - 0s - loss: 0.5022 - acc: 0.7500
Epoch 86/150
 - 0s - loss: 0.5069 - acc: 0.7526
Epoch 87/150
 - 0s - loss: 0.5036 - acc: 0.7643
Epoch 88/150
 - 0s - loss: 0.4966 - acc: 0.7474
Epoch 89/150
 - 0s - loss: 0.5029 - acc: 0.7734
Epoch 90/150
 - 0s - loss: 0.4947 - acc: 0.7773
Epoch 91/150
 - 0s - loss: 0.4912 - acc: 0.7617
Epoch 92/150
 - 0s - loss: 0.4955 - acc: 0.7617
Epoch 93/150
 - 0s - loss: 0.4876 - acc: 0.7565
Epoch 94/150
 - 0s - loss: 0.4963 - acc: 0.7617
Epoch 95/150
 - 0s - loss: 0.4841 - acc: 0.7604
Epoch 96/150
 - 0s - loss: 0.4880 - acc: 0.7682
Epoch 97/150
 - 0s - loss: 0.4853 - acc: 0.7721
Epoch 98/150
 - 0s - loss: 0.4845 - acc: 0.7734
Epoch 99/150
 - 0s - loss: 0.4811 - acc: 0.7747
Epoch 100/150
 - 0s - loss: 0.4786 - acc: 0.7682
Epoch 101/150
 - 0s - loss: 0.4821 - acc: 0.7786
Epoch 102/150
 - 0s - loss: 0.4836 - acc: 0.7682
Epoch 103/150
 - 0s - loss: 0.4817 - acc: 0.7617
Epoch 104/150
 - 0s - loss: 0.4848 - acc: 0.7799
Epoch 105/150
 - 0s - loss: 0.4918 - acc: 0.7539
Epoch 106/150
 - 0s - loss: 0.4818 - acc: 0.7695
Epoch 107/150
 - 0s - loss: 0.4795 - acc: 0.7760
Epoch 108/150
 - 0s - loss: 0.4800 - acc: 0.7734
Epoch 109/150
 - 0s - loss: 0.4747 - acc: 0.7630
Epoch 110/150
 - 0s - loss: 0.4742 - acc: 0.7839
Epoch 111/150
 - 0s - loss: 0.4801 - acc: 0.7773
Epoch 112/150
 - 0s - loss: 0.4703 - acc: 0.7786
Epoch 113/150
 - 0s - loss: 0.4802 - acc: 0.7630
Epoch 114/150
 - 0s - loss: 0.4825 - acc: 0.7630
Epoch 115/150
 - 0s - loss: 0.4729 - acc: 0.7708
Epoch 116/150
 - 0s - loss: 0.4787 - acc: 0.7682
Epoch 117/150
 - 0s - loss: 0.4737 - acc: 0.7682
Epoch 118/150
 - 0s - loss: 0.4798 - acc: 0.7604
Epoch 119/150
 - 0s - loss: 0.4662 - acc: 0.7826
Epoch 120/150
 - 0s - loss: 0.4696 - acc: 0.7695
Epoch 121/150
 - 0s - loss: 0.4792 - acc: 0.7812
Epoch 122/150
 - 0s - loss: 0.4754 - acc: 0.7826
Epoch 123/150
 - 0s - loss: 0.4656 - acc: 0.7786
Epoch 124/150
 - 0s - loss: 0.4611 - acc: 0.7865
Epoch 125/150
 - 0s - loss: 0.4693 - acc: 0.7721
Epoch 126/150
 - 0s - loss: 0.4674 - acc: 0.7643
Epoch 127/150
 - 0s - loss: 0.4681 - acc: 0.7721
Epoch 128/150
 - 0s - loss: 0.4584 - acc: 0.7826
Epoch 129/150
 - 0s - loss: 0.4652 - acc: 0.7786
Epoch 130/150
 - 0s - loss: 0.4587 - acc: 0.7773
Epoch 131/150
 - 0s - loss: 0.4619 - acc: 0.7721
Epoch 132/150
 - 0s - loss: 0.4658 - acc: 0.7786
Epoch 133/150
 - 0s - loss: 0.4641 - acc: 0.7773
Epoch 134/150
 - 0s - loss: 0.4616 - acc: 0.7682
Epoch 135/150
 - 0s - loss: 0.4563 - acc: 0.7826
Epoch 136/150
 - 0s - loss: 0.4634 - acc: 0.7839
Epoch 137/150
 - 0s - loss: 0.4553 - acc: 0.7839
Epoch 138/150
 - 0s - loss: 0.4604 - acc: 0.7721
Epoch 139/150
 - 0s - loss: 0.4473 - acc: 0.7891
Epoch 140/150
 - 0s - loss: 0.4621 - acc: 0.7773
Epoch 141/150
 - 0s - loss: 0.4544 - acc: 0.7852
Epoch 142/150
 - 0s - loss: 0.4611 - acc: 0.7747
Epoch 143/150
 - 0s - loss: 0.4525 - acc: 0.7826
Epoch 144/150
 - 0s - loss: 0.4558 - acc: 0.7878
Epoch 145/150
 - 0s - loss: 0.4664 - acc: 0.7786
Epoch 146/150
 - 0s - loss: 0.4544 - acc: 0.7839
Epoch 147/150
 - 0s - loss: 0.4622 - acc: 0.7799
Epoch 148/150
 - 0s - loss: 0.4526 - acc: 0.7878
Epoch 149/150
 - 0s - loss: 0.4524 - acc: 0.7878
Epoch 150/150
 - 0s - loss: 0.4524 - acc: 0.7904
[1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]

In [2]:
# Create first network with Keras
from keras.models import Sequential
from keras.layers import Dense
import numpy
# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)
# load pima indians dataset
dataset = numpy.loadtxt("iris.txt", delimiter=",")
# split into input (X) and output (Y) variables
X = dataset[:,0:4]
Y = dataset[:,4:6]
# create model
model = Sequential()
model.add(Dense(12, input_dim=4, init='uniform', activation='relu'))
model.add(Dense(8, init='uniform', activation='relu'))
model.add(Dense(2, init='uniform', activation='sigmoid'))
# Compile model
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
# Fit the model
model.fit(X, Y, epochs=500, batch_size=10,  verbose=2)
# calculate predictions
predictions = model.predict(X)
# round predictions
rounded = [[round(x[0]),round(x[1])] for x in predictions]
print(rounded)


C:\Users\Gaziz\Anaconda3\lib\site-packages\ipykernel_launcher.py:15: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(12, input_dim=4, activation="relu", kernel_initializer="uniform")`
  from ipykernel import kernelapp as app
C:\Users\Gaziz\Anaconda3\lib\site-packages\ipykernel_launcher.py:16: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(8, activation="relu", kernel_initializer="uniform")`
  app.launch_new_instance()
C:\Users\Gaziz\Anaconda3\lib\site-packages\ipykernel_launcher.py:17: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(2, activation="sigmoid", kernel_initializer="uniform")`
Epoch 1/500
 - 0s - loss: 0.4621 - acc: 0.6600
Epoch 2/500
 - 0s - loss: 0.4621 - acc: 0.6667
Epoch 3/500
 - 0s - loss: 0.4621 - acc: 0.6667
Epoch 4/500
 - 0s - loss: 0.4621 - acc: 0.4667
Epoch 5/500
 - 0s - loss: 0.4620 - acc: 0.4733
Epoch 6/500
 - 0s - loss: 0.4619 - acc: 0.6667
Epoch 7/500
 - 0s - loss: 0.4618 - acc: 0.6667
Epoch 8/500
 - 0s - loss: 0.4616 - acc: 0.4933
Epoch 9/500
 - 0s - loss: 0.4611 - acc: 0.4333
Epoch 10/500
 - 0s - loss: 0.4606 - acc: 0.5267
Epoch 11/500
 - 0s - loss: 0.4599 - acc: 0.6533
Epoch 12/500
 - 0s - loss: 0.4595 - acc: 0.4733
Epoch 13/500
 - 0s - loss: 0.4592 - acc: 0.6200
Epoch 14/500
 - 0s - loss: 0.4574 - acc: 0.5333
Epoch 15/500
 - 0s - loss: 0.4566 - acc: 0.5667
Epoch 16/500
 - 0s - loss: 0.4549 - acc: 0.3600
Epoch 17/500
 - 0s - loss: 0.4539 - acc: 0.3333
Epoch 18/500
 - 0s - loss: 0.4518 - acc: 0.3333
Epoch 19/500
 - 0s - loss: 0.4503 - acc: 0.3333
Epoch 20/500
 - 0s - loss: 0.4483 - acc: 0.3333
Epoch 21/500
 - 0s - loss: 0.4465 - acc: 0.3333
Epoch 22/500
 - 0s - loss: 0.4435 - acc: 0.3333
Epoch 23/500
 - 0s - loss: 0.4416 - acc: 0.3333
Epoch 24/500
 - 0s - loss: 0.4375 - acc: 0.3333
Epoch 25/500
 - 0s - loss: 0.4342 - acc: 0.3867
Epoch 26/500
 - 0s - loss: 0.4317 - acc: 0.4867
Epoch 27/500
 - 0s - loss: 0.4246 - acc: 0.4333
Epoch 28/500
 - 0s - loss: 0.4209 - acc: 0.4000
Epoch 29/500
 - 0s - loss: 0.4151 - acc: 0.4133
Epoch 30/500
 - 0s - loss: 0.4100 - acc: 0.5600
Epoch 31/500
 - 0s - loss: 0.4016 - acc: 0.6133
Epoch 32/500
 - 0s - loss: 0.3962 - acc: 0.4867
Epoch 33/500
 - 0s - loss: 0.3865 - acc: 0.6000
Epoch 34/500
 - 0s - loss: 0.3789 - acc: 0.6267
Epoch 35/500
 - 0s - loss: 0.3681 - acc: 0.6200
Epoch 36/500
 - 0s - loss: 0.3605 - acc: 0.6333
Epoch 37/500
 - 0s - loss: 0.3470 - acc: 0.6267
Epoch 38/500
 - 0s - loss: 0.3388 - acc: 0.5933
Epoch 39/500
 - 0s - loss: 0.3280 - acc: 0.6267
Epoch 40/500
 - 0s - loss: 0.3180 - acc: 0.6333
Epoch 41/500
 - 0s - loss: 0.3056 - acc: 0.6267
Epoch 42/500
 - 0s - loss: 0.2947 - acc: 0.6267
Epoch 43/500
 - 0s - loss: 0.2827 - acc: 0.6467
Epoch 44/500
 - 0s - loss: 0.2766 - acc: 0.6333
Epoch 45/500
 - 0s - loss: 0.2659 - acc: 0.6333
Epoch 46/500
 - 0s - loss: 0.2602 - acc: 0.6133
Epoch 47/500
 - 0s - loss: 0.2418 - acc: 0.6400
Epoch 48/500
 - 0s - loss: 0.2328 - acc: 0.6267
Epoch 49/500
 - 0s - loss: 0.2244 - acc: 0.6333
Epoch 50/500
 - 0s - loss: 0.2167 - acc: 0.6200
Epoch 51/500
 - 0s - loss: 0.2035 - acc: 0.6400
Epoch 52/500
 - 0s - loss: 0.1966 - acc: 0.6267
Epoch 53/500
 - 0s - loss: 0.1860 - acc: 0.6400
Epoch 54/500
 - 0s - loss: 0.1767 - acc: 0.6400
Epoch 55/500
 - 0s - loss: 0.1722 - acc: 0.6467
Epoch 56/500
 - 0s - loss: 0.1666 - acc: 0.6333
Epoch 57/500
 - 0s - loss: 0.1560 - acc: 0.6467
Epoch 58/500
 - 0s - loss: 0.1474 - acc: 0.6400
Epoch 59/500
 - 0s - loss: 0.1406 - acc: 0.6333
Epoch 60/500
 - 0s - loss: 0.1301 - acc: 0.6467
Epoch 61/500
 - 0s - loss: 0.1247 - acc: 0.6400
Epoch 62/500
 - 0s - loss: 0.1201 - acc: 0.6333
Epoch 63/500
 - 0s - loss: 0.1180 - acc: 0.6400
Epoch 64/500
 - 0s - loss: 0.1064 - acc: 0.6333
Epoch 65/500
 - 0s - loss: 0.1013 - acc: 0.6467
Epoch 66/500
 - 0s - loss: 0.1016 - acc: 0.6400
Epoch 67/500
 - 0s - loss: 0.1000 - acc: 0.6400
Epoch 68/500
 - 0s - loss: 0.1036 - acc: 0.6267
Epoch 69/500
 - 0s - loss: 0.0925 - acc: 0.6467
Epoch 70/500
 - 0s - loss: 0.0902 - acc: 0.6400
Epoch 71/500
 - 0s - loss: 0.0865 - acc: 0.6400
Epoch 72/500
 - 0s - loss: 0.0874 - acc: 0.6400
Epoch 73/500
 - 0s - loss: 0.0830 - acc: 0.6467
Epoch 74/500
 - 0s - loss: 0.0863 - acc: 0.6467
Epoch 75/500
 - 0s - loss: 0.0860 - acc: 0.6333
Epoch 76/500
 - 0s - loss: 0.0839 - acc: 0.6400
Epoch 77/500
 - 0s - loss: 0.0873 - acc: 0.6267
Epoch 78/500
 - 0s - loss: 0.0854 - acc: 0.6400
Epoch 79/500
 - 0s - loss: 0.0752 - acc: 0.6400
Epoch 80/500
 - 0s - loss: 0.0770 - acc: 0.6400
Epoch 81/500
 - 0s - loss: 0.0778 - acc: 0.6400
Epoch 82/500
 - 0s - loss: 0.0773 - acc: 0.6467
Epoch 83/500
 - 0s - loss: 0.0919 - acc: 0.6333
Epoch 84/500
 - 0s - loss: 0.0762 - acc: 0.6467
Epoch 85/500
 - 0s - loss: 0.0729 - acc: 0.6467
Epoch 86/500
 - 0s - loss: 0.0718 - acc: 0.6400
Epoch 87/500
 - 0s - loss: 0.0706 - acc: 0.6467
Epoch 88/500
 - 0s - loss: 0.0727 - acc: 0.6467
Epoch 89/500
 - 0s - loss: 0.0750 - acc: 0.6400
Epoch 90/500
 - 0s - loss: 0.0687 - acc: 0.6467
Epoch 91/500
 - 0s - loss: 0.0691 - acc: 0.6400
Epoch 92/500
 - 0s - loss: 0.0698 - acc: 0.6467
Epoch 93/500
 - 0s - loss: 0.0750 - acc: 0.6333
Epoch 94/500
 - 0s - loss: 0.0734 - acc: 0.6467
Epoch 95/500
 - 0s - loss: 0.0655 - acc: 0.6467
Epoch 96/500
 - 0s - loss: 0.0730 - acc: 0.6400
Epoch 97/500
 - 0s - loss: 0.0696 - acc: 0.6400
Epoch 98/500
 - 0s - loss: 0.0639 - acc: 0.6467
Epoch 99/500
 - 0s - loss: 0.0710 - acc: 0.6467
Epoch 100/500
 - 0s - loss: 0.0723 - acc: 0.6467
Epoch 101/500
 - 0s - loss: 0.0643 - acc: 0.6467
Epoch 102/500
 - 0s - loss: 0.0652 - acc: 0.6467
Epoch 103/500
 - 0s - loss: 0.0651 - acc: 0.6400
Epoch 104/500
 - 0s - loss: 0.0826 - acc: 0.6267
Epoch 105/500
 - 0s - loss: 0.0892 - acc: 0.6333
Epoch 106/500
 - 0s - loss: 0.0664 - acc: 0.6467
Epoch 107/500
 - 0s - loss: 0.0642 - acc: 0.6467
Epoch 108/500
 - 0s - loss: 0.0641 - acc: 0.6467
Epoch 109/500
 - 0s - loss: 0.0625 - acc: 0.6467
Epoch 110/500
 - 0s - loss: 0.0667 - acc: 0.6400
Epoch 111/500
 - 0s - loss: 0.0636 - acc: 0.6467
Epoch 112/500
 - 0s - loss: 0.0634 - acc: 0.6467
Epoch 113/500
 - 0s - loss: 0.0625 - acc: 0.6467
Epoch 114/500
 - 0s - loss: 0.0619 - acc: 0.6400
Epoch 115/500
 - 0s - loss: 0.0685 - acc: 0.6400
Epoch 116/500
 - 0s - loss: 0.0624 - acc: 0.6400
Epoch 117/500
 - 0s - loss: 0.0655 - acc: 0.6400
Epoch 118/500
 - 0s - loss: 0.0633 - acc: 0.6333
Epoch 119/500
 - 0s - loss: 0.0739 - acc: 0.6400
Epoch 120/500
 - 0s - loss: 0.0616 - acc: 0.6467
Epoch 121/500
 - 0s - loss: 0.0597 - acc: 0.6400
Epoch 122/500
 - 0s - loss: 0.0625 - acc: 0.6467
Epoch 123/500
 - 0s - loss: 0.0595 - acc: 0.6467
Epoch 124/500
 - 0s - loss: 0.0604 - acc: 0.6467
Epoch 125/500
 - 0s - loss: 0.0635 - acc: 0.6400
Epoch 126/500
 - 0s - loss: 0.0593 - acc: 0.6467
Epoch 127/500
 - 0s - loss: 0.0606 - acc: 0.6400
Epoch 128/500
 - 0s - loss: 0.0581 - acc: 0.6467
Epoch 129/500
 - 0s - loss: 0.0681 - acc: 0.6333
Epoch 130/500
 - 0s - loss: 0.0713 - acc: 0.6333
Epoch 131/500
 - 0s - loss: 0.0645 - acc: 0.6400
Epoch 132/500
 - 0s - loss: 0.0622 - acc: 0.6400
Epoch 133/500
 - 0s - loss: 0.0576 - acc: 0.6467
Epoch 134/500
 - 0s - loss: 0.0600 - acc: 0.6467
Epoch 135/500
 - 0s - loss: 0.0579 - acc: 0.6467
Epoch 136/500
 - 0s - loss: 0.0596 - acc: 0.6400
Epoch 137/500
 - 0s - loss: 0.0610 - acc: 0.6467
Epoch 138/500
 - 0s - loss: 0.0583 - acc: 0.6400
Epoch 139/500
 - 0s - loss: 0.0584 - acc: 0.6400
Epoch 140/500
 - 0s - loss: 0.0581 - acc: 0.6400
Epoch 141/500
 - 0s - loss: 0.0566 - acc: 0.6467
Epoch 142/500
 - 0s - loss: 0.0618 - acc: 0.6400
Epoch 143/500
 - 0s - loss: 0.0581 - acc: 0.6467
Epoch 144/500
 - 0s - loss: 0.0592 - acc: 0.6400
Epoch 145/500
 - 0s - loss: 0.0616 - acc: 0.6400
Epoch 146/500
 - 0s - loss: 0.0563 - acc: 0.6467
Epoch 147/500
 - 0s - loss: 0.0577 - acc: 0.6467
Epoch 148/500
 - 0s - loss: 0.0612 - acc: 0.6400
Epoch 149/500
 - 0s - loss: 0.0549 - acc: 0.6467
Epoch 150/500
 - 0s - loss: 0.0601 - acc: 0.6467
Epoch 151/500
 - 0s - loss: 0.0571 - acc: 0.6467
Epoch 152/500
 - 0s - loss: 0.0573 - acc: 0.6467
Epoch 153/500
 - 0s - loss: 0.0563 - acc: 0.6467
Epoch 154/500
 - 0s - loss: 0.0596 - acc: 0.6400
Epoch 155/500
 - 0s - loss: 0.0555 - acc: 0.6467
Epoch 156/500
 - 0s - loss: 0.0573 - acc: 0.6467
Epoch 157/500
 - 0s - loss: 0.0563 - acc: 0.6467
Epoch 158/500
 - 0s - loss: 0.0567 - acc: 0.6467
Epoch 159/500
 - 0s - loss: 0.0548 - acc: 0.6467
Epoch 160/500
 - 0s - loss: 0.0589 - acc: 0.6400
Epoch 161/500
 - 0s - loss: 0.0568 - acc: 0.6467
Epoch 162/500
 - 0s - loss: 0.0573 - acc: 0.6467
Epoch 163/500
 - 0s - loss: 0.0550 - acc: 0.6467
Epoch 164/500
 - 0s - loss: 0.0550 - acc: 0.6467
Epoch 165/500
 - 0s - loss: 0.0545 - acc: 0.6467
Epoch 166/500
 - 0s - loss: 0.0542 - acc: 0.6467
Epoch 167/500
 - 0s - loss: 0.0564 - acc: 0.6467
Epoch 168/500
 - 0s - loss: 0.0598 - acc: 0.6467
Epoch 169/500
 - 0s - loss: 0.0654 - acc: 0.6400
Epoch 170/500
 - 0s - loss: 0.0592 - acc: 0.6467
Epoch 171/500
 - 0s - loss: 0.0538 - acc: 0.6467
Epoch 172/500
 - 0s - loss: 0.0598 - acc: 0.6400
Epoch 173/500
 - 0s - loss: 0.0548 - acc: 0.6400
Epoch 174/500
 - 0s - loss: 0.0590 - acc: 0.6400
Epoch 175/500
 - 0s - loss: 0.0539 - acc: 0.6533
Epoch 176/500
 - 0s - loss: 0.0556 - acc: 0.6400
Epoch 177/500
 - 0s - loss: 0.0567 - acc: 0.6467
Epoch 178/500
 - 0s - loss: 0.0557 - acc: 0.6467
Epoch 179/500
 - 0s - loss: 0.0563 - acc: 0.6467
Epoch 180/500
 - 0s - loss: 0.0561 - acc: 0.6467
Epoch 181/500
 - 0s - loss: 0.0556 - acc: 0.6467
Epoch 182/500
 - 0s - loss: 0.0581 - acc: 0.6467
Epoch 183/500
 - 0s - loss: 0.0579 - acc: 0.6467
Epoch 184/500
 - 0s - loss: 0.0531 - acc: 0.6467
Epoch 185/500
 - 0s - loss: 0.0542 - acc: 0.6467
Epoch 186/500
 - 0s - loss: 0.0541 - acc: 0.6467
Epoch 187/500
 - 0s - loss: 0.0535 - acc: 0.6467
Epoch 188/500
 - 0s - loss: 0.0555 - acc: 0.6467
Epoch 189/500
 - 0s - loss: 0.0538 - acc: 0.6467
Epoch 190/500
 - 0s - loss: 0.0584 - acc: 0.6400
Epoch 191/500
 - 0s - loss: 0.0599 - acc: 0.6400
Epoch 192/500
 - 0s - loss: 0.0540 - acc: 0.6467
Epoch 193/500
 - 0s - loss: 0.0577 - acc: 0.6400
Epoch 194/500
 - 0s - loss: 0.0548 - acc: 0.6400
Epoch 195/500
 - 0s - loss: 0.0535 - acc: 0.6467
Epoch 196/500
 - 0s - loss: 0.0552 - acc: 0.6467
Epoch 197/500
 - 0s - loss: 0.0525 - acc: 0.6467
Epoch 198/500
 - 0s - loss: 0.0540 - acc: 0.6467
Epoch 199/500
 - 0s - loss: 0.0543 - acc: 0.6400
Epoch 200/500
 - 0s - loss: 0.0545 - acc: 0.6400
Epoch 201/500
 - 0s - loss: 0.0570 - acc: 0.6467
Epoch 202/500
 - 0s - loss: 0.0559 - acc: 0.6533
Epoch 203/500
 - 0s - loss: 0.0538 - acc: 0.6467
Epoch 204/500
 - 0s - loss: 0.0553 - acc: 0.6467
Epoch 205/500
 - 0s - loss: 0.0668 - acc: 0.6333
Epoch 206/500
 - 0s - loss: 0.0552 - acc: 0.6400
Epoch 207/500
 - 0s - loss: 0.0538 - acc: 0.6533
Epoch 208/500
 - 0s - loss: 0.0533 - acc: 0.6467
Epoch 209/500
 - 0s - loss: 0.0646 - acc: 0.6333
Epoch 210/500
 - 0s - loss: 0.0593 - acc: 0.6467
Epoch 211/500
 - 0s - loss: 0.0538 - acc: 0.6467
Epoch 212/500
 - 0s - loss: 0.0544 - acc: 0.6467
Epoch 213/500
 - 0s - loss: 0.0551 - acc: 0.6400
Epoch 214/500
 - 0s - loss: 0.0531 - acc: 0.6467
Epoch 215/500
 - 0s - loss: 0.0517 - acc: 0.6467
Epoch 216/500
 - 0s - loss: 0.0523 - acc: 0.6467
Epoch 217/500
 - 0s - loss: 0.0513 - acc: 0.6467
Epoch 218/500
 - 0s - loss: 0.0527 - acc: 0.6467
Epoch 219/500
 - 0s - loss: 0.0544 - acc: 0.6467
Epoch 220/500
 - 0s - loss: 0.0554 - acc: 0.6400
Epoch 221/500
 - 0s - loss: 0.0545 - acc: 0.6400
Epoch 222/500
 - 0s - loss: 0.0655 - acc: 0.6467
Epoch 223/500
 - 0s - loss: 0.0472 - acc: 0.6533
Epoch 224/500
 - 0s - loss: 0.0581 - acc: 0.6333
Epoch 225/500
 - 0s - loss: 0.0528 - acc: 0.6467
Epoch 226/500
 - 0s - loss: 0.0535 - acc: 0.6533
Epoch 227/500
 - 0s - loss: 0.0542 - acc: 0.6467
Epoch 228/500
 - 0s - loss: 0.0582 - acc: 0.6400
Epoch 229/500
 - 0s - loss: 0.0548 - acc: 0.6400
Epoch 230/500
 - 0s - loss: 0.0510 - acc: 0.6467
Epoch 231/500
 - 0s - loss: 0.0524 - acc: 0.6467
Epoch 232/500
 - 0s - loss: 0.0513 - acc: 0.6467
Epoch 233/500
 - 0s - loss: 0.0536 - acc: 0.6400
Epoch 234/500
 - 0s - loss: 0.0527 - acc: 0.6467
Epoch 235/500
 - 0s - loss: 0.0567 - acc: 0.6400
Epoch 236/500
 - 0s - loss: 0.0661 - acc: 0.6400
Epoch 237/500
 - 0s - loss: 0.0520 - acc: 0.6400
Epoch 238/500
 - 0s - loss: 0.0542 - acc: 0.6467
Epoch 239/500
 - 0s - loss: 0.0526 - acc: 0.6467
Epoch 240/500
 - 0s - loss: 0.0525 - acc: 0.6467
Epoch 241/500
 - 0s - loss: 0.0541 - acc: 0.6467
Epoch 242/500
 - 0s - loss: 0.0526 - acc: 0.6467
Epoch 243/500
 - 0s - loss: 0.0511 - acc: 0.6467
Epoch 244/500
 - 0s - loss: 0.0544 - acc: 0.6467
Epoch 245/500
 - 0s - loss: 0.0594 - acc: 0.6400
Epoch 246/500
 - 0s - loss: 0.0577 - acc: 0.6467
Epoch 247/500
 - 0s - loss: 0.0513 - acc: 0.6467
Epoch 248/500
 - 0s - loss: 0.0540 - acc: 0.6467
Epoch 249/500
 - 0s - loss: 0.0538 - acc: 0.6467
Epoch 250/500
 - 0s - loss: 0.0511 - acc: 0.6467
Epoch 251/500
 - 0s - loss: 0.0517 - acc: 0.6467
Epoch 252/500
 - 0s - loss: 0.0549 - acc: 0.6467
Epoch 253/500
 - 0s - loss: 0.0526 - acc: 0.6467
Epoch 254/500
 - 0s - loss: 0.0536 - acc: 0.6467
Epoch 255/500
 - 0s - loss: 0.0522 - acc: 0.6467
Epoch 256/500
 - 0s - loss: 0.0512 - acc: 0.6467
Epoch 257/500
 - 0s - loss: 0.0578 - acc: 0.6467
Epoch 258/500
 - 0s - loss: 0.0541 - acc: 0.6467
Epoch 259/500
 - 0s - loss: 0.0494 - acc: 0.6467
Epoch 260/500
 - 0s - loss: 0.0557 - acc: 0.6400
Epoch 261/500
 - 0s - loss: 0.0511 - acc: 0.6533
Epoch 262/500
 - 0s - loss: 0.0517 - acc: 0.6467
Epoch 263/500
 - 0s - loss: 0.0509 - acc: 0.6467
Epoch 264/500
 - 0s - loss: 0.0514 - acc: 0.6467
Epoch 265/500
 - 0s - loss: 0.0502 - acc: 0.6467
Epoch 266/500
 - 0s - loss: 0.0525 - acc: 0.6467
Epoch 267/500
 - 0s - loss: 0.0517 - acc: 0.6467
Epoch 268/500
 - 0s - loss: 0.0554 - acc: 0.6467
Epoch 269/500
 - 0s - loss: 0.0564 - acc: 0.6400
Epoch 270/500
 - 0s - loss: 0.0502 - acc: 0.6467
Epoch 271/500
 - 0s - loss: 0.0502 - acc: 0.6467
Epoch 272/500
 - 0s - loss: 0.0501 - acc: 0.6467
Epoch 273/500
 - 0s - loss: 0.0497 - acc: 0.6467
Epoch 274/500
 - 0s - loss: 0.0511 - acc: 0.6467
Epoch 275/500
 - 0s - loss: 0.0500 - acc: 0.6467
Epoch 276/500
 - 0s - loss: 0.0515 - acc: 0.6467
Epoch 277/500
 - 0s - loss: 0.0553 - acc: 0.6467
Epoch 278/500
 - 0s - loss: 0.0512 - acc: 0.6533
Epoch 279/500
 - 0s - loss: 0.0520 - acc: 0.6467
Epoch 280/500
 - 0s - loss: 0.0502 - acc: 0.6533
Epoch 281/500
 - 0s - loss: 0.0552 - acc: 0.6400
Epoch 282/500
 - 0s - loss: 0.0597 - acc: 0.6400
Epoch 283/500
 - 0s - loss: 0.0497 - acc: 0.6467
Epoch 284/500
 - 0s - loss: 0.0532 - acc: 0.6467
Epoch 285/500
 - 0s - loss: 0.0524 - acc: 0.6533
Epoch 286/500
 - 0s - loss: 0.0497 - acc: 0.6533
Epoch 287/500
 - 0s - loss: 0.0512 - acc: 0.6467
Epoch 288/500
 - 0s - loss: 0.0530 - acc: 0.6467
Epoch 289/500
 - 0s - loss: 0.0508 - acc: 0.6467
Epoch 290/500
 - 0s - loss: 0.0514 - acc: 0.6533
Epoch 291/500
 - 0s - loss: 0.0519 - acc: 0.6400
Epoch 292/500
 - 0s - loss: 0.0496 - acc: 0.6467
Epoch 293/500
 - 0s - loss: 0.0531 - acc: 0.6533
Epoch 294/500
 - 0s - loss: 0.0500 - acc: 0.6467
Epoch 295/500
 - 0s - loss: 0.0522 - acc: 0.6467
Epoch 296/500
 - 0s - loss: 0.0530 - acc: 0.6467
Epoch 297/500
 - 0s - loss: 0.0480 - acc: 0.6467
Epoch 298/500
 - 0s - loss: 0.0505 - acc: 0.6533
Epoch 299/500
 - 0s - loss: 0.0506 - acc: 0.6467
Epoch 300/500
 - 0s - loss: 0.0517 - acc: 0.6533
Epoch 301/500
 - 0s - loss: 0.0524 - acc: 0.6467
Epoch 302/500
 - 0s - loss: 0.0568 - acc: 0.6400
Epoch 303/500
 - 0s - loss: 0.0512 - acc: 0.6400
Epoch 304/500
 - 0s - loss: 0.0508 - acc: 0.6467
Epoch 305/500
 - 0s - loss: 0.0559 - acc: 0.6467
Epoch 306/500
 - 0s - loss: 0.0511 - acc: 0.6467
Epoch 307/500
 - 0s - loss: 0.0501 - acc: 0.6533
Epoch 308/500
 - 0s - loss: 0.0508 - acc: 0.6467
Epoch 309/500
 - 0s - loss: 0.0654 - acc: 0.6400
Epoch 310/500
 - 0s - loss: 0.0498 - acc: 0.6400
Epoch 311/500
 - 0s - loss: 0.0522 - acc: 0.6533
Epoch 312/500
 - 0s - loss: 0.0497 - acc: 0.6467
Epoch 313/500
 - 0s - loss: 0.0504 - acc: 0.6467
Epoch 314/500
 - 0s - loss: 0.0522 - acc: 0.6533
Epoch 315/500
 - 0s - loss: 0.0552 - acc: 0.6400
Epoch 316/500
 - 0s - loss: 0.0574 - acc: 0.6400
Epoch 317/500
 - 0s - loss: 0.0525 - acc: 0.6467
Epoch 318/500
 - 0s - loss: 0.0481 - acc: 0.6533
Epoch 319/500
 - 0s - loss: 0.0619 - acc: 0.6333
Epoch 320/500
 - 0s - loss: 0.0835 - acc: 0.6267
Epoch 321/500
 - 0s - loss: 0.0520 - acc: 0.6467
Epoch 322/500
 - 0s - loss: 0.0569 - acc: 0.6467
Epoch 323/500
 - 0s - loss: 0.0525 - acc: 0.6400
Epoch 324/500
 - 0s - loss: 0.0551 - acc: 0.6400
Epoch 325/500
 - 0s - loss: 0.0634 - acc: 0.6400
Epoch 326/500
 - 0s - loss: 0.0585 - acc: 0.6333
Epoch 327/500
 - 0s - loss: 0.0542 - acc: 0.6467
Epoch 328/500
 - 0s - loss: 0.0480 - acc: 0.6533
Epoch 329/500
 - 0s - loss: 0.0517 - acc: 0.6467
Epoch 330/500
 - 0s - loss: 0.0502 - acc: 0.6467
Epoch 331/500
 - 0s - loss: 0.0512 - acc: 0.6467
Epoch 332/500
 - 0s - loss: 0.0502 - acc: 0.6533
Epoch 333/500
 - 0s - loss: 0.0558 - acc: 0.6467
Epoch 334/500
 - 0s - loss: 0.0578 - acc: 0.6467
Epoch 335/500
 - 0s - loss: 0.0506 - acc: 0.6467
Epoch 336/500
 - 0s - loss: 0.0578 - acc: 0.6467
Epoch 337/500
 - 0s - loss: 0.0549 - acc: 0.6400
Epoch 338/500
 - 0s - loss: 0.0487 - acc: 0.6467
Epoch 339/500
 - 0s - loss: 0.0528 - acc: 0.6467
Epoch 340/500
 - 0s - loss: 0.0492 - acc: 0.6467
Epoch 341/500
 - 0s - loss: 0.0499 - acc: 0.6467
Epoch 342/500
 - 0s - loss: 0.0482 - acc: 0.6467
Epoch 343/500
 - 0s - loss: 0.0504 - acc: 0.6467
Epoch 344/500
 - 0s - loss: 0.0499 - acc: 0.6467
Epoch 345/500
 - 0s - loss: 0.0572 - acc: 0.6400
Epoch 346/500
 - 0s - loss: 0.0501 - acc: 0.6533
Epoch 347/500
 - 0s - loss: 0.0488 - acc: 0.6533
Epoch 348/500
 - 0s - loss: 0.0529 - acc: 0.6467
Epoch 349/500
 - 0s - loss: 0.0515 - acc: 0.6533
Epoch 350/500
 - 0s - loss: 0.0491 - acc: 0.6467
Epoch 351/500
 - 0s - loss: 0.0509 - acc: 0.6467
Epoch 352/500
 - 0s - loss: 0.0466 - acc: 0.6467
Epoch 353/500
 - 0s - loss: 0.0529 - acc: 0.6467
Epoch 354/500
 - 0s - loss: 0.0507 - acc: 0.6467
Epoch 355/500
 - 0s - loss: 0.0483 - acc: 0.6467
Epoch 356/500
 - 0s - loss: 0.0514 - acc: 0.6467
Epoch 357/500
 - 0s - loss: 0.0475 - acc: 0.6467
Epoch 358/500
 - 0s - loss: 0.0544 - acc: 0.6467
Epoch 359/500
 - 0s - loss: 0.0512 - acc: 0.6400
Epoch 360/500
 - 0s - loss: 0.0488 - acc: 0.6467
Epoch 361/500
 - 0s - loss: 0.0492 - acc: 0.6533
Epoch 362/500
 - 0s - loss: 0.0487 - acc: 0.6467
Epoch 363/500
 - 0s - loss: 0.0512 - acc: 0.6533
Epoch 364/500
 - 0s - loss: 0.0508 - acc: 0.6467
Epoch 365/500
 - 0s - loss: 0.0623 - acc: 0.6333
Epoch 366/500
 - 0s - loss: 0.0555 - acc: 0.6467
Epoch 367/500
 - 0s - loss: 0.0517 - acc: 0.6467
Epoch 368/500
 - 0s - loss: 0.0500 - acc: 0.6400
Epoch 369/500
 - 0s - loss: 0.0484 - acc: 0.6467
Epoch 370/500
 - 0s - loss: 0.0484 - acc: 0.6467
Epoch 371/500
 - 0s - loss: 0.0501 - acc: 0.6467
Epoch 372/500
 - 0s - loss: 0.0491 - acc: 0.6467
Epoch 373/500
 - 0s - loss: 0.0531 - acc: 0.6467
Epoch 374/500
 - 0s - loss: 0.0475 - acc: 0.6533
Epoch 375/500
 - 0s - loss: 0.0534 - acc: 0.6400
Epoch 376/500
 - 0s - loss: 0.0479 - acc: 0.6533
Epoch 377/500
 - 0s - loss: 0.0487 - acc: 0.6533
Epoch 378/500
 - 0s - loss: 0.0550 - acc: 0.6533
Epoch 379/500
 - 0s - loss: 0.0480 - acc: 0.6467
Epoch 380/500
 - 0s - loss: 0.0495 - acc: 0.6467
Epoch 381/500
 - 0s - loss: 0.0503 - acc: 0.6400
Epoch 382/500
 - 0s - loss: 0.0541 - acc: 0.6533
Epoch 383/500
 - 0s - loss: 0.0476 - acc: 0.6467
Epoch 384/500
 - 0s - loss: 0.0490 - acc: 0.6467
Epoch 385/500
 - 0s - loss: 0.0550 - acc: 0.6400
Epoch 386/500
 - 0s - loss: 0.0492 - acc: 0.6467
Epoch 387/500
 - 0s - loss: 0.0528 - acc: 0.6400
Epoch 388/500
 - 0s - loss: 0.0530 - acc: 0.6467
Epoch 389/500
 - 0s - loss: 0.0497 - acc: 0.6467
Epoch 390/500
 - 0s - loss: 0.0505 - acc: 0.6467
Epoch 391/500
 - 0s - loss: 0.0516 - acc: 0.6533
Epoch 392/500
 - 0s - loss: 0.0510 - acc: 0.6467
Epoch 393/500
 - 0s - loss: 0.0483 - acc: 0.6467
Epoch 394/500
 - 0s - loss: 0.0487 - acc: 0.6533
Epoch 395/500
 - 0s - loss: 0.0492 - acc: 0.6467
Epoch 396/500
 - 0s - loss: 0.0520 - acc: 0.6533
Epoch 397/500
 - 0s - loss: 0.0530 - acc: 0.6467
Epoch 398/500
 - 0s - loss: 0.0482 - acc: 0.6467
Epoch 399/500
 - 0s - loss: 0.0499 - acc: 0.6467
Epoch 400/500
 - 0s - loss: 0.0523 - acc: 0.6533
Epoch 401/500
 - 0s - loss: 0.0486 - acc: 0.6467
Epoch 402/500
 - 0s - loss: 0.0499 - acc: 0.6467
Epoch 403/500
 - 0s - loss: 0.0490 - acc: 0.6533
Epoch 404/500
 - 0s - loss: 0.0570 - acc: 0.6400
Epoch 405/500
 - 0s - loss: 0.0503 - acc: 0.6533
Epoch 406/500
 - 0s - loss: 0.0493 - acc: 0.6533
Epoch 407/500
 - 0s - loss: 0.0492 - acc: 0.6467
Epoch 408/500
 - 0s - loss: 0.0498 - acc: 0.6467
Epoch 409/500
 - 0s - loss: 0.0476 - acc: 0.6533
Epoch 410/500
 - 0s - loss: 0.0482 - acc: 0.6533
Epoch 411/500
 - 0s - loss: 0.0482 - acc: 0.6467
Epoch 412/500
 - 0s - loss: 0.0487 - acc: 0.6467
Epoch 413/500
 - 0s - loss: 0.0597 - acc: 0.6400
Epoch 414/500
 - 0s - loss: 0.0490 - acc: 0.6467
Epoch 415/500
 - 0s - loss: 0.0620 - acc: 0.6400
Epoch 416/500
 - 0s - loss: 0.0499 - acc: 0.6533
Epoch 417/500
 - 0s - loss: 0.0485 - acc: 0.6533
Epoch 418/500
 - 0s - loss: 0.0492 - acc: 0.6467
Epoch 419/500
 - 0s - loss: 0.0489 - acc: 0.6533
Epoch 420/500
 - 0s - loss: 0.0468 - acc: 0.6533
Epoch 421/500
 - 0s - loss: 0.0542 - acc: 0.6400
Epoch 422/500
 - 0s - loss: 0.0514 - acc: 0.6400
Epoch 423/500
 - 0s - loss: 0.0491 - acc: 0.6533
Epoch 424/500
 - 0s - loss: 0.0491 - acc: 0.6467
Epoch 425/500
 - 0s - loss: 0.0473 - acc: 0.6467
Epoch 426/500
 - 0s - loss: 0.0525 - acc: 0.6533
Epoch 427/500
 - 0s - loss: 0.0516 - acc: 0.6400
Epoch 428/500
 - 0s - loss: 0.0454 - acc: 0.6533
Epoch 429/500
 - 0s - loss: 0.0519 - acc: 0.6467
Epoch 430/500
 - 0s - loss: 0.0477 - acc: 0.6533
Epoch 431/500
 - 0s - loss: 0.0476 - acc: 0.6467
Epoch 432/500
 - 0s - loss: 0.0482 - acc: 0.6533
Epoch 433/500
 - 0s - loss: 0.0486 - acc: 0.6467
Epoch 434/500
 - 0s - loss: 0.0506 - acc: 0.6467
Epoch 435/500
 - 0s - loss: 0.0478 - acc: 0.6533
Epoch 436/500
 - 0s - loss: 0.0482 - acc: 0.6467
Epoch 437/500
 - 0s - loss: 0.0505 - acc: 0.6533
Epoch 438/500
 - 0s - loss: 0.0484 - acc: 0.6467
Epoch 439/500
 - 0s - loss: 0.0523 - acc: 0.6533
Epoch 440/500
 - 0s - loss: 0.0499 - acc: 0.6400
Epoch 441/500
 - 0s - loss: 0.0512 - acc: 0.6533
Epoch 442/500
 - 0s - loss: 0.0523 - acc: 0.6467
Epoch 443/500
 - 0s - loss: 0.0500 - acc: 0.6467
Epoch 444/500
 - 0s - loss: 0.0474 - acc: 0.6533
Epoch 445/500
 - 0s - loss: 0.0521 - acc: 0.6400
Epoch 446/500
 - 0s - loss: 0.0512 - acc: 0.6533
Epoch 447/500
 - 0s - loss: 0.0461 - acc: 0.6467
Epoch 448/500
 - 0s - loss: 0.0514 - acc: 0.6467
Epoch 449/500
 - 0s - loss: 0.0510 - acc: 0.6467
Epoch 450/500
 - 0s - loss: 0.0501 - acc: 0.6467
Epoch 451/500
 - 0s - loss: 0.0468 - acc: 0.6533
Epoch 452/500
 - 0s - loss: 0.0502 - acc: 0.6467
Epoch 453/500
 - 0s - loss: 0.0470 - acc: 0.6467
Epoch 454/500
 - 0s - loss: 0.0523 - acc: 0.6400
Epoch 455/500
 - 0s - loss: 0.0512 - acc: 0.6467
Epoch 456/500
 - 0s - loss: 0.0472 - acc: 0.6467
Epoch 457/500
 - 0s - loss: 0.0508 - acc: 0.6533
Epoch 458/500
 - 0s - loss: 0.0477 - acc: 0.6467
Epoch 459/500
 - 0s - loss: 0.0474 - acc: 0.6467
Epoch 460/500
 - 0s - loss: 0.0500 - acc: 0.6533
Epoch 461/500
 - 0s - loss: 0.0477 - acc: 0.6467
Epoch 462/500
 - 0s - loss: 0.0561 - acc: 0.6467
Epoch 463/500
 - 0s - loss: 0.0678 - acc: 0.6333
Epoch 464/500
 - 0s - loss: 0.0502 - acc: 0.6533
Epoch 465/500
 - 0s - loss: 0.0491 - acc: 0.6467
Epoch 466/500
 - 0s - loss: 0.0522 - acc: 0.6533
Epoch 467/500
 - 0s - loss: 0.0474 - acc: 0.6467
Epoch 468/500
 - 0s - loss: 0.0478 - acc: 0.6533
Epoch 469/500
 - 0s - loss: 0.0479 - acc: 0.6467
Epoch 470/500
 - 0s - loss: 0.0518 - acc: 0.6400
Epoch 471/500
 - 0s - loss: 0.0497 - acc: 0.6533
Epoch 472/500
 - 0s - loss: 0.0492 - acc: 0.6467
Epoch 473/500
 - 0s - loss: 0.0499 - acc: 0.6467
Epoch 474/500
 - 0s - loss: 0.0483 - acc: 0.6533
Epoch 475/500
 - 0s - loss: 0.0465 - acc: 0.6467
Epoch 476/500
 - 0s - loss: 0.0488 - acc: 0.6533
Epoch 477/500
 - 0s - loss: 0.0477 - acc: 0.6467
Epoch 478/500
 - 0s - loss: 0.0506 - acc: 0.6533
Epoch 479/500
 - 0s - loss: 0.0557 - acc: 0.6400
Epoch 480/500
 - 0s - loss: 0.0477 - acc: 0.6467
Epoch 481/500
 - 0s - loss: 0.0490 - acc: 0.6467
Epoch 482/500
 - 0s - loss: 0.0507 - acc: 0.6533
Epoch 483/500
 - 0s - loss: 0.0461 - acc: 0.6533
Epoch 484/500
 - 0s - loss: 0.0508 - acc: 0.6467
Epoch 485/500
 - 0s - loss: 0.0481 - acc: 0.6533
Epoch 486/500
 - 0s - loss: 0.0479 - acc: 0.6467
Epoch 487/500
 - 0s - loss: 0.0471 - acc: 0.6533
Epoch 488/500
 - 0s - loss: 0.0494 - acc: 0.6467
Epoch 489/500
 - 0s - loss: 0.0469 - acc: 0.6467
Epoch 490/500
 - 0s - loss: 0.0481 - acc: 0.6467
Epoch 491/500
 - 0s - loss: 0.0514 - acc: 0.6467
Epoch 492/500
 - 0s - loss: 0.0496 - acc: 0.6533
Epoch 493/500
 - 0s - loss: 0.0478 - acc: 0.6533
Epoch 494/500
 - 0s - loss: 0.0512 - acc: 0.6400
Epoch 495/500
 - 0s - loss: 0.0460 - acc: 0.6467
Epoch 496/500
 - 0s - loss: 0.0501 - acc: 0.6467
Epoch 497/500
 - 0s - loss: 0.0522 - acc: 0.6467
Epoch 498/500
 - 0s - loss: 0.0512 - acc: 0.6467
Epoch 499/500
 - 0s - loss: 0.0521 - acc: 0.6467
Epoch 500/500
 - 0s - loss: 0.0506 - acc: 0.6467
[[0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 1.0], [0.0, 1.0], [0.0, 0.0], [0.0, 1.0], [0.0, 0.0], [0.0, 1.0], [0.0, 0.0], [0.0, 1.0], [0.0, 1.0], [0.0, 0.0], [0.0, 1.0], [0.0, 0.0], [0.0, 1.0], [0.0, 0.0], [0.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 1.0], [0.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 1.0], [0.0, 1.0], [0.0, 0.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [0.0, 1.0], [1.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [1.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [1.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]]

In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [3]:
import numpy
import pandas
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
from keras.utils import np_utils
from sklearn.cross_validation import train_test_split
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import LabelEncoder
from sklearn.pipeline import Pipeline

# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)
# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)
# load dataset
dataframe = pandas.read_csv("iris.csv", header=None)
dataset = dataframe.values
X = dataset[:,0:4].astype(float)
Y = dataset[:,4]


# encode class values as integers
encoder = LabelEncoder()
encoder.fit(Y)
encoded_Y = encoder.transform(Y)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y = np_utils.to_categorical(encoded_Y)
# encode class values as integers
encoder = LabelEncoder()
encoder.fit(Y)
encoded_Y = encoder.transform(Y)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y = np_utils.to_categorical(encoded_Y)


# define baseline model
def baseline_model():
    # create model
    model = Sequential()
    model.add(Dense(8, input_dim=4, activation='relu'))
    model.add(Dense(3, activation='softmax'))
# Compile model
    model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
    return model
estimator = KerasClassifier(build_fn=baseline_model, epochs=200, batch_size=5, verbose=0)
kfold = KFold(n_splits=10, shuffle=True, random_state=seed)
results = cross_val_score(estimator, X, dummy_y, cv=kfold)
print("Baseline: %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100))
X_train, X_test, Y_train, Y_test = train_test_split(X, dummy_y, test_size=0.33, random_state=seed)
estimator.fit(X_train, Y_train)
predictions = estimator.predict(X_test)
print(predictions)
print(encoder.inverse_transform(predictions))


C:\Users\Gaziz\Anaconda3\lib\site-packages\sklearn\cross_validation.py:41: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.
  "This module will be removed in 0.20.", DeprecationWarning)
Baseline: 97.33% (4.42%)
[2 1 0 1 2 0 1 1 0 1 1 1 0 2 0 2 2 2 0 0 1 2 1 2 2 2 1 1 2 2 2 1 0 2 1 0 0
 0 0 2 2 1 2 2 1 0 1 1 2 0]
['Iris-virginica' 'Iris-versicolor' 'Iris-setosa' 'Iris-versicolor'
 'Iris-virginica' 'Iris-setosa' 'Iris-versicolor' 'Iris-versicolor'
 'Iris-setosa' 'Iris-versicolor' 'Iris-versicolor' 'Iris-versicolor'
 'Iris-setosa' 'Iris-virginica' 'Iris-setosa' 'Iris-virginica'
 'Iris-virginica' 'Iris-virginica' 'Iris-setosa' 'Iris-setosa'
 'Iris-versicolor' 'Iris-virginica' 'Iris-versicolor' 'Iris-virginica'
 'Iris-virginica' 'Iris-virginica' 'Iris-versicolor' 'Iris-versicolor'
 'Iris-virginica' 'Iris-virginica' 'Iris-virginica' 'Iris-versicolor'
 'Iris-setosa' 'Iris-virginica' 'Iris-versicolor' 'Iris-setosa'
 'Iris-setosa' 'Iris-setosa' 'Iris-setosa' 'Iris-virginica'
 'Iris-virginica' 'Iris-versicolor' 'Iris-virginica' 'Iris-virginica'
 'Iris-versicolor' 'Iris-setosa' 'Iris-versicolor' 'Iris-versicolor'
 'Iris-virginica' 'Iris-setosa']

In [ ]:


In [4]:
import numpy
import pandas
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
from keras.utils import np_utils


# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)
# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)
# load dataset
dataframe = pandas.read_csv("iris.csv", header=None)
dataset = dataframe.values
X = dataset[:,0:4].astype(float)
Y = dataset[:,4]


# encode class values as integers
encoder = LabelEncoder()
encoder.fit(Y)
encoded_Y = encoder.transform(Y)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y = np_utils.to_categorical(encoded_Y)
# encode class values as integers
encoder = LabelEncoder()
encoder.fit(Y)
encoded_Y = encoder.transform(Y)
# convert integers to dummy variables (i.e. one hot encoded)
ytrain = np_utils.to_categorical(encoded_Y)


# define baseline model
#build model
model = Sequential()
model.add(Dense(output_dim=10, input_dim=4))
model.add(Activation("relu"))
model.add(Dense(output_dim=3))
model.add(Activation("softmax"))

#choose optimizer and loss function
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])

#train
model.fit(X, ytrain, nb_epoch=400, batch_size=120)


C:\Users\Gaziz\Anaconda3\lib\site-packages\ipykernel_launcher.py:40: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(input_dim=4, units=10)`
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-4-ed435cd98414> in <module>()
     39 model = Sequential()
     40 model.add(Dense(output_dim=10, input_dim=4))
---> 41 model.add(Activation("relu"))
     42 model.add(Dense(output_dim=3))
     43 model.add(Activation("softmax"))

NameError: name 'Activation' is not defined

In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]: