Learn by Coding Examples in Applied Machine Learning

How to Evaluate the Performance Of Deep Learning Modelsin Python?

In [5]:
# ignore warnings
import warnings
warnings.filterwarnings("ignore")

Automatic Verification Dataset

In [6]:
# MLP with automatic validation set
from keras.models import Sequential
from keras.layers import Dense
import numpy

# fix random seed for reproducibility
numpy.random.seed(7)

# load pima indians dataset
dataset = numpy.loadtxt("pima.indians.diabetes.data.csv", delimiter=",")

# split into input (X) and output (Y) variables
X = dataset[:,0:8]
Y = dataset[:,8]

# create model
model = Sequential()
model.add(Dense(12, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))

# Compile model
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])

# Fit the model
hist = model.fit(X, Y, validation_split=0.33, epochs=150, batch_size=10)
Train on 514 samples, validate on 254 samples
Epoch 1/150
514/514 [==============================] - 1s 2ms/step - loss: 5.8013 - acc: 0.6401 - val_loss: 5.2654 - val_acc: 0.6732
Epoch 2/150
514/514 [==============================] - 0s 175us/step - loss: 5.7274 - acc: 0.6401 - val_loss: 4.3001 - val_acc: 0.6732
Epoch 3/150
514/514 [==============================] - 0s 178us/step - loss: 2.0524 - acc: 0.5584 - val_loss: 1.1010 - val_acc: 0.6457
Epoch 4/150
514/514 [==============================] - 0s 220us/step - loss: 0.9109 - acc: 0.5720 - val_loss: 0.8697 - val_acc: 0.6299
Epoch 5/150
514/514 [==============================] - 0s 214us/step - loss: 0.7530 - acc: 0.6187 - val_loss: 0.7678 - val_acc: 0.6299
Epoch 6/150
514/514 [==============================] - 0s 188us/step - loss: 0.7050 - acc: 0.6595 - val_loss: 0.7267 - val_acc: 0.6496
Epoch 7/150
514/514 [==============================] - 0s 176us/step - loss: 0.6779 - acc: 0.6984 - val_loss: 0.7149 - val_acc: 0.6142
Epoch 8/150
514/514 [==============================] - 0s 180us/step - loss: 0.6728 - acc: 0.6790 - val_loss: 0.7059 - val_acc: 0.6457
Epoch 9/150
514/514 [==============================] - 0s 168us/step - loss: 0.6692 - acc: 0.6634 - val_loss: 0.8382 - val_acc: 0.4921
Epoch 10/150
514/514 [==============================] - 0s 195us/step - loss: 0.6697 - acc: 0.6907 - val_loss: 0.6901 - val_acc: 0.6850
Epoch 11/150
514/514 [==============================] - 0s 144us/step - loss: 0.6402 - acc: 0.6848 - val_loss: 0.6989 - val_acc: 0.6850
Epoch 12/150
514/514 [==============================] - 0s 158us/step - loss: 0.6338 - acc: 0.6693 - val_loss: 0.6917 - val_acc: 0.6654
Epoch 13/150
514/514 [==============================] - 0s 129us/step - loss: 0.6323 - acc: 0.6926 - val_loss: 0.6645 - val_acc: 0.6732
Epoch 14/150
514/514 [==============================] - 0s 124us/step - loss: 0.6200 - acc: 0.6790 - val_loss: 0.6608 - val_acc: 0.6654
Epoch 15/150
514/514 [==============================] - 0s 134us/step - loss: 0.6288 - acc: 0.6809 - val_loss: 0.6493 - val_acc: 0.6614
Epoch 16/150
514/514 [==============================] - 0s 154us/step - loss: 0.6350 - acc: 0.6887 - val_loss: 0.6560 - val_acc: 0.6693
Epoch 17/150
514/514 [==============================] - 0s 136us/step - loss: 0.6113 - acc: 0.6946 - val_loss: 0.6675 - val_acc: 0.6417
Epoch 18/150
514/514 [==============================] - 0s 125us/step - loss: 0.6036 - acc: 0.6965 - val_loss: 0.6424 - val_acc: 0.6693
Epoch 19/150
514/514 [==============================] - 0s 124us/step - loss: 0.6359 - acc: 0.6809 - val_loss: 0.6316 - val_acc: 0.6772
Epoch 20/150
514/514 [==============================] - 0s 119us/step - loss: 0.5921 - acc: 0.7023 - val_loss: 0.6897 - val_acc: 0.5945
Epoch 21/150
514/514 [==============================] - 0s 107us/step - loss: 0.5904 - acc: 0.7004 - val_loss: 0.6136 - val_acc: 0.6929
Epoch 22/150
514/514 [==============================] - 0s 106us/step - loss: 0.5929 - acc: 0.7023 - val_loss: 0.6214 - val_acc: 0.6654
Epoch 23/150
514/514 [==============================] - 0s 113us/step - loss: 0.5898 - acc: 0.7160 - val_loss: 0.6148 - val_acc: 0.6693
Epoch 24/150
514/514 [==============================] - 0s 108us/step - loss: 0.5747 - acc: 0.7179 - val_loss: 0.6112 - val_acc: 0.7008
Epoch 25/150
514/514 [==============================] - 0s 107us/step - loss: 0.5860 - acc: 0.6887 - val_loss: 0.6197 - val_acc: 0.6614
Epoch 26/150
514/514 [==============================] - 0s 106us/step - loss: 0.5768 - acc: 0.7198 - val_loss: 0.6011 - val_acc: 0.6890
Epoch 27/150
514/514 [==============================] - 0s 106us/step - loss: 0.5771 - acc: 0.7043 - val_loss: 0.6401 - val_acc: 0.6496
Epoch 28/150
514/514 [==============================] - 0s 106us/step - loss: 0.5764 - acc: 0.7121 - val_loss: 0.8253 - val_acc: 0.4921
Epoch 29/150
514/514 [==============================] - 0s 106us/step - loss: 0.5581 - acc: 0.7529 - val_loss: 0.6040 - val_acc: 0.7244
Epoch 30/150
514/514 [==============================] - 0s 115us/step - loss: 0.5696 - acc: 0.7121 - val_loss: 0.6060 - val_acc: 0.7126
Epoch 31/150
514/514 [==============================] - 0s 118us/step - loss: 0.5544 - acc: 0.7276 - val_loss: 0.5965 - val_acc: 0.7008
Epoch 32/150
514/514 [==============================] - 0s 123us/step - loss: 0.5773 - acc: 0.7179 - val_loss: 0.5941 - val_acc: 0.7047
Epoch 33/150
514/514 [==============================] - 0s 113us/step - loss: 0.5702 - acc: 0.7257 - val_loss: 0.5879 - val_acc: 0.7087
Epoch 34/150
514/514 [==============================] - 0s 117us/step - loss: 0.5626 - acc: 0.7237 - val_loss: 0.5905 - val_acc: 0.6929
Epoch 35/150
514/514 [==============================] - 0s 117us/step - loss: 0.5531 - acc: 0.7335 - val_loss: 0.6026 - val_acc: 0.6614
Epoch 36/150
514/514 [==============================] - 0s 119us/step - loss: 0.5804 - acc: 0.7101 - val_loss: 0.5798 - val_acc: 0.6890
Epoch 37/150
514/514 [==============================] - 0s 123us/step - loss: 0.5569 - acc: 0.7121 - val_loss: 0.5789 - val_acc: 0.6890
Epoch 38/150
514/514 [==============================] - 0s 118us/step - loss: 0.5427 - acc: 0.7588 - val_loss: 0.5729 - val_acc: 0.7205
Epoch 39/150
514/514 [==============================] - 0s 122us/step - loss: 0.5494 - acc: 0.7179 - val_loss: 0.5726 - val_acc: 0.7362
Epoch 40/150
514/514 [==============================] - 0s 116us/step - loss: 0.5622 - acc: 0.7412 - val_loss: 0.5686 - val_acc: 0.7323
Epoch 41/150
514/514 [==============================] - 0s 106us/step - loss: 0.5530 - acc: 0.7451 - val_loss: 0.5749 - val_acc: 0.7283
Epoch 42/150
514/514 [==============================] - 0s 106us/step - loss: 0.5649 - acc: 0.7140 - val_loss: 0.6091 - val_acc: 0.6772
Epoch 43/150
514/514 [==============================] - 0s 105us/step - loss: 0.5422 - acc: 0.7257 - val_loss: 0.5784 - val_acc: 0.7008
Epoch 44/150
514/514 [==============================] - 0s 104us/step - loss: 0.5509 - acc: 0.7101 - val_loss: 0.5923 - val_acc: 0.7008
Epoch 45/150
514/514 [==============================] - 0s 115us/step - loss: 0.5387 - acc: 0.7393 - val_loss: 0.5714 - val_acc: 0.7165
Epoch 46/150
514/514 [==============================] - 0s 117us/step - loss: 0.5433 - acc: 0.7354 - val_loss: 0.6049 - val_acc: 0.6890
Epoch 47/150
514/514 [==============================] - 0s 118us/step - loss: 0.5532 - acc: 0.7198 - val_loss: 0.5821 - val_acc: 0.6850
Epoch 48/150
514/514 [==============================] - 0s 117us/step - loss: 0.5435 - acc: 0.7335 - val_loss: 0.5805 - val_acc: 0.6969
Epoch 49/150
514/514 [==============================] - 0s 110us/step - loss: 0.5417 - acc: 0.7471 - val_loss: 0.5929 - val_acc: 0.6890
Epoch 50/150
514/514 [==============================] - 0s 116us/step - loss: 0.5465 - acc: 0.7412 - val_loss: 0.5732 - val_acc: 0.7205
Epoch 51/150
514/514 [==============================] - 0s 111us/step - loss: 0.5331 - acc: 0.7529 - val_loss: 0.5596 - val_acc: 0.7205
Epoch 52/150
514/514 [==============================] - 0s 109us/step - loss: 0.5521 - acc: 0.7023 - val_loss: 0.5987 - val_acc: 0.6890
Epoch 53/150
514/514 [==============================] - 0s 118us/step - loss: 0.5490 - acc: 0.7160 - val_loss: 0.5643 - val_acc: 0.7283
Epoch 54/150
514/514 [==============================] - 0s 119us/step - loss: 0.5398 - acc: 0.7412 - val_loss: 0.5676 - val_acc: 0.7480
Epoch 55/150
514/514 [==============================] - 0s 115us/step - loss: 0.5274 - acc: 0.7490 - val_loss: 0.5582 - val_acc: 0.7283
Epoch 56/150
514/514 [==============================] - 0s 118us/step - loss: 0.5307 - acc: 0.7374 - val_loss: 0.5516 - val_acc: 0.7323
Epoch 57/150
514/514 [==============================] - 0s 115us/step - loss: 0.5451 - acc: 0.7276 - val_loss: 0.5666 - val_acc: 0.7205
Epoch 58/150
514/514 [==============================] - 0s 118us/step - loss: 0.5359 - acc: 0.7529 - val_loss: 0.5666 - val_acc: 0.7283
Epoch 59/150
514/514 [==============================] - 0s 124us/step - loss: 0.5346 - acc: 0.7335 - val_loss: 0.5620 - val_acc: 0.7244
Epoch 60/150
514/514 [==============================] - 0s 115us/step - loss: 0.5321 - acc: 0.7315 - val_loss: 0.5523 - val_acc: 0.7362
Epoch 61/150
514/514 [==============================] - 0s 120us/step - loss: 0.5414 - acc: 0.7335 - val_loss: 0.6179 - val_acc: 0.6850
Epoch 62/150
514/514 [==============================] - 0s 131us/step - loss: 0.5317 - acc: 0.7198 - val_loss: 0.5695 - val_acc: 0.7126
Epoch 63/150
514/514 [==============================] - 0s 138us/step - loss: 0.5215 - acc: 0.7393 - val_loss: 0.5763 - val_acc: 0.7008
Epoch 64/150
514/514 [==============================] - 0s 115us/step - loss: 0.5290 - acc: 0.7568 - val_loss: 0.5453 - val_acc: 0.7441
Epoch 65/150
514/514 [==============================] - 0s 122us/step - loss: 0.5246 - acc: 0.7412 - val_loss: 0.5744 - val_acc: 0.7126
Epoch 66/150
514/514 [==============================] - 0s 119us/step - loss: 0.5370 - acc: 0.7296 - val_loss: 0.5678 - val_acc: 0.7520
Epoch 67/150
514/514 [==============================] - 0s 118us/step - loss: 0.5203 - acc: 0.7607 - val_loss: 0.5487 - val_acc: 0.7362
Epoch 68/150
514/514 [==============================] - 0s 125us/step - loss: 0.5412 - acc: 0.7451 - val_loss: 0.5610 - val_acc: 0.7126
Epoch 69/150
514/514 [==============================] - 0s 119us/step - loss: 0.5423 - acc: 0.7179 - val_loss: 0.5450 - val_acc: 0.7323
Epoch 70/150
514/514 [==============================] - 0s 114us/step - loss: 0.5278 - acc: 0.7335 - val_loss: 0.5633 - val_acc: 0.7283
Epoch 71/150
514/514 [==============================] - 0s 103us/step - loss: 0.5181 - acc: 0.7354 - val_loss: 0.5803 - val_acc: 0.7126
Epoch 72/150
514/514 [==============================] - 0s 103us/step - loss: 0.5199 - acc: 0.7510 - val_loss: 0.5761 - val_acc: 0.7205
Epoch 73/150
514/514 [==============================] - 0s 103us/step - loss: 0.5311 - acc: 0.7140 - val_loss: 0.5576 - val_acc: 0.7165
Epoch 74/150
514/514 [==============================] - 0s 103us/step - loss: 0.5121 - acc: 0.7549 - val_loss: 0.5559 - val_acc: 0.7165
Epoch 75/150
514/514 [==============================] - 0s 104us/step - loss: 0.5222 - acc: 0.7412 - val_loss: 0.5654 - val_acc: 0.7244
Epoch 76/150
514/514 [==============================] - 0s 103us/step - loss: 0.5182 - acc: 0.7490 - val_loss: 0.5494 - val_acc: 0.7283
Epoch 77/150
514/514 [==============================] - 0s 103us/step - loss: 0.5167 - acc: 0.7549 - val_loss: 0.5446 - val_acc: 0.7205
Epoch 78/150
514/514 [==============================] - 0s 102us/step - loss: 0.5169 - acc: 0.7393 - val_loss: 0.5642 - val_acc: 0.7087
Epoch 79/150
514/514 [==============================] - 0s 103us/step - loss: 0.5642 - acc: 0.7276 - val_loss: 0.5510 - val_acc: 0.7126
Epoch 80/150
514/514 [==============================] - 0s 103us/step - loss: 0.5216 - acc: 0.7393 - val_loss: 0.5535 - val_acc: 0.7402
Epoch 81/150
514/514 [==============================] - 0s 103us/step - loss: 0.5314 - acc: 0.7393 - val_loss: 0.5557 - val_acc: 0.7441
Epoch 82/150
514/514 [==============================] - 0s 103us/step - loss: 0.5292 - acc: 0.7218 - val_loss: 0.5423 - val_acc: 0.7283
Epoch 83/150
514/514 [==============================] - 0s 102us/step - loss: 0.5031 - acc: 0.7490 - val_loss: 0.5495 - val_acc: 0.7480
Epoch 84/150
514/514 [==============================] - 0s 102us/step - loss: 0.5117 - acc: 0.7374 - val_loss: 0.5491 - val_acc: 0.7323
Epoch 85/150
514/514 [==============================] - 0s 103us/step - loss: 0.5186 - acc: 0.7393 - val_loss: 0.5419 - val_acc: 0.7362
Epoch 86/150
514/514 [==============================] - 0s 103us/step - loss: 0.5128 - acc: 0.7471 - val_loss: 0.5415 - val_acc: 0.7362
Epoch 87/150
514/514 [==============================] - 0s 106us/step - loss: 0.5116 - acc: 0.7374 - val_loss: 0.5396 - val_acc: 0.7165
Epoch 88/150
514/514 [==============================] - 0s 105us/step - loss: 0.5080 - acc: 0.7354 - val_loss: 0.5490 - val_acc: 0.7402
Epoch 89/150
514/514 [==============================] - 0s 104us/step - loss: 0.5070 - acc: 0.7471 - val_loss: 0.5422 - val_acc: 0.7362
Epoch 90/150
514/514 [==============================] - 0s 108us/step - loss: 0.5137 - acc: 0.7393 - val_loss: 0.5455 - val_acc: 0.7559
Epoch 91/150
514/514 [==============================] - 0s 104us/step - loss: 0.5129 - acc: 0.7529 - val_loss: 0.5420 - val_acc: 0.7283
Epoch 92/150
514/514 [==============================] - 0s 104us/step - loss: 0.5080 - acc: 0.7374 - val_loss: 0.5451 - val_acc: 0.7402
Epoch 93/150
514/514 [==============================] - 0s 104us/step - loss: 0.5117 - acc: 0.7529 - val_loss: 0.5308 - val_acc: 0.7402
Epoch 94/150
514/514 [==============================] - 0s 105us/step - loss: 0.5048 - acc: 0.7490 - val_loss: 0.5498 - val_acc: 0.7402
Epoch 95/150
514/514 [==============================] - 0s 105us/step - loss: 0.5029 - acc: 0.7626 - val_loss: 0.5586 - val_acc: 0.7559
Epoch 96/150
514/514 [==============================] - 0s 104us/step - loss: 0.5210 - acc: 0.7335 - val_loss: 0.5509 - val_acc: 0.7323
Epoch 97/150
514/514 [==============================] - 0s 108us/step - loss: 0.5095 - acc: 0.7471 - val_loss: 0.5418 - val_acc: 0.7362
Epoch 98/150
514/514 [==============================] - 0s 121us/step - loss: 0.5031 - acc: 0.7646 - val_loss: 0.5699 - val_acc: 0.7244
Epoch 99/150
514/514 [==============================] - 0s 117us/step - loss: 0.5229 - acc: 0.7549 - val_loss: 0.5499 - val_acc: 0.7520
Epoch 100/150
514/514 [==============================] - 0s 101us/step - loss: 0.5157 - acc: 0.7237 - val_loss: 0.5514 - val_acc: 0.7323
Epoch 101/150
514/514 [==============================] - 0s 101us/step - loss: 0.5140 - acc: 0.7276 - val_loss: 0.5382 - val_acc: 0.7480
Epoch 102/150
514/514 [==============================] - 0s 105us/step - loss: 0.5021 - acc: 0.7588 - val_loss: 0.5253 - val_acc: 0.7559
Epoch 103/150
514/514 [==============================] - 0s 101us/step - loss: 0.5105 - acc: 0.7451 - val_loss: 0.5607 - val_acc: 0.7244
Epoch 104/150
514/514 [==============================] - 0s 102us/step - loss: 0.5140 - acc: 0.7412 - val_loss: 0.5546 - val_acc: 0.7638
Epoch 105/150
514/514 [==============================] - 0s 105us/step - loss: 0.5029 - acc: 0.7451 - val_loss: 0.5366 - val_acc: 0.7520
Epoch 106/150
514/514 [==============================] - 0s 103us/step - loss: 0.5197 - acc: 0.7315 - val_loss: 0.5305 - val_acc: 0.7559
Epoch 107/150
514/514 [==============================] - 0s 101us/step - loss: 0.5223 - acc: 0.7588 - val_loss: 0.6723 - val_acc: 0.6850
Epoch 108/150
514/514 [==============================] - 0s 101us/step - loss: 0.5189 - acc: 0.7432 - val_loss: 0.5352 - val_acc: 0.7441
Epoch 109/150
514/514 [==============================] - 0s 101us/step - loss: 0.5015 - acc: 0.7510 - val_loss: 0.5625 - val_acc: 0.7402
Epoch 110/150
514/514 [==============================] - 0s 101us/step - loss: 0.5180 - acc: 0.7471 - val_loss: 0.5886 - val_acc: 0.7047
Epoch 111/150
514/514 [==============================] - 0s 101us/step - loss: 0.5257 - acc: 0.7354 - val_loss: 0.5269 - val_acc: 0.7323
Epoch 112/150
514/514 [==============================] - 0s 104us/step - loss: 0.4936 - acc: 0.7665 - val_loss: 0.5370 - val_acc: 0.7638
Epoch 113/150
514/514 [==============================] - 0s 105us/step - loss: 0.4975 - acc: 0.7665 - val_loss: 0.5245 - val_acc: 0.7441
Epoch 114/150
514/514 [==============================] - 0s 102us/step - loss: 0.4944 - acc: 0.7626 - val_loss: 0.5494 - val_acc: 0.7480
Epoch 115/150
514/514 [==============================] - 0s 101us/step - loss: 0.5004 - acc: 0.7568 - val_loss: 0.5333 - val_acc: 0.7559
Epoch 116/150
514/514 [==============================] - 0s 101us/step - loss: 0.4909 - acc: 0.7393 - val_loss: 0.5261 - val_acc: 0.7638
Epoch 117/150
514/514 [==============================] - 0s 101us/step - loss: 0.5145 - acc: 0.7529 - val_loss: 0.5588 - val_acc: 0.7165
Epoch 118/150
514/514 [==============================] - 0s 101us/step - loss: 0.5008 - acc: 0.7510 - val_loss: 0.5265 - val_acc: 0.7402
Epoch 119/150
514/514 [==============================] - 0s 101us/step - loss: 0.4992 - acc: 0.7471 - val_loss: 0.5414 - val_acc: 0.7283
Epoch 120/150
514/514 [==============================] - 0s 101us/step - loss: 0.4922 - acc: 0.7490 - val_loss: 0.5491 - val_acc: 0.7402
Epoch 121/150
514/514 [==============================] - 0s 101us/step - loss: 0.4968 - acc: 0.7549 - val_loss: 0.5689 - val_acc: 0.7165
Epoch 122/150
514/514 [==============================] - 0s 101us/step - loss: 0.4967 - acc: 0.7588 - val_loss: 0.5330 - val_acc: 0.7677
Epoch 123/150
514/514 [==============================] - 0s 101us/step - loss: 0.4985 - acc: 0.7451 - val_loss: 0.5380 - val_acc: 0.7756
Epoch 124/150
514/514 [==============================] - 0s 102us/step - loss: 0.4886 - acc: 0.7646 - val_loss: 0.5455 - val_acc: 0.7480
Epoch 125/150
514/514 [==============================] - 0s 101us/step - loss: 0.5210 - acc: 0.7412 - val_loss: 0.5464 - val_acc: 0.7402
Epoch 126/150
514/514 [==============================] - 0s 101us/step - loss: 0.5002 - acc: 0.7490 - val_loss: 0.5433 - val_acc: 0.7165
Epoch 127/150
514/514 [==============================] - 0s 101us/step - loss: 0.4880 - acc: 0.7704 - val_loss: 0.5495 - val_acc: 0.7598
Epoch 128/150
514/514 [==============================] - 0s 101us/step - loss: 0.4890 - acc: 0.7763 - val_loss: 0.5827 - val_acc: 0.7283
Epoch 129/150
514/514 [==============================] - 0s 101us/step - loss: 0.5204 - acc: 0.7296 - val_loss: 0.5231 - val_acc: 0.7638
Epoch 130/150
514/514 [==============================] - 0s 101us/step - loss: 0.4945 - acc: 0.7490 - val_loss: 0.5196 - val_acc: 0.7441
Epoch 131/150
514/514 [==============================] - 0s 102us/step - loss: 0.4906 - acc: 0.7490 - val_loss: 0.5400 - val_acc: 0.7520
Epoch 132/150
514/514 [==============================] - 0s 102us/step - loss: 0.4914 - acc: 0.7626 - val_loss: 0.5293 - val_acc: 0.7598
Epoch 133/150
514/514 [==============================] - 0s 102us/step - loss: 0.4958 - acc: 0.7549 - val_loss: 0.5172 - val_acc: 0.7402
Epoch 134/150
514/514 [==============================] - 0s 101us/step - loss: 0.4918 - acc: 0.7451 - val_loss: 0.5362 - val_acc: 0.7441
Epoch 135/150
514/514 [==============================] - 0s 101us/step - loss: 0.4793 - acc: 0.7685 - val_loss: 0.5445 - val_acc: 0.7362
Epoch 136/150
514/514 [==============================] - 0s 102us/step - loss: 0.5031 - acc: 0.7588 - val_loss: 0.5230 - val_acc: 0.7559
Epoch 137/150
514/514 [==============================] - 0s 101us/step - loss: 0.5246 - acc: 0.7393 - val_loss: 0.5473 - val_acc: 0.7677
Epoch 138/150
514/514 [==============================] - 0s 101us/step - loss: 0.4937 - acc: 0.7374 - val_loss: 0.5195 - val_acc: 0.7520
Epoch 139/150
514/514 [==============================] - 0s 102us/step - loss: 0.4946 - acc: 0.7529 - val_loss: 0.5610 - val_acc: 0.7638
Epoch 140/150
514/514 [==============================] - 0s 102us/step - loss: 0.4950 - acc: 0.7646 - val_loss: 0.5558 - val_acc: 0.7165
Epoch 141/150
514/514 [==============================] - 0s 101us/step - loss: 0.5018 - acc: 0.7451 - val_loss: 0.5129 - val_acc: 0.7362
Epoch 142/150
514/514 [==============================] - 0s 100us/step - loss: 0.4868 - acc: 0.7685 - val_loss: 0.5222 - val_acc: 0.7598
Epoch 143/150
514/514 [==============================] - 0s 102us/step - loss: 0.4958 - acc: 0.7451 - val_loss: 0.5178 - val_acc: 0.7441
Epoch 144/150
514/514 [==============================] - 0s 101us/step - loss: 0.5357 - acc: 0.7490 - val_loss: 0.5716 - val_acc: 0.7165
Epoch 145/150
514/514 [==============================] - 0s 100us/step - loss: 0.5127 - acc: 0.7335 - val_loss: 0.5233 - val_acc: 0.7559
Epoch 146/150
514/514 [==============================] - 0s 101us/step - loss: 0.5001 - acc: 0.7626 - val_loss: 0.5421 - val_acc: 0.7205
Epoch 147/150
514/514 [==============================] - 0s 103us/step - loss: 0.4856 - acc: 0.7646 - val_loss: 0.5227 - val_acc: 0.7559
Epoch 148/150
514/514 [==============================] - 0s 101us/step - loss: 0.4892 - acc: 0.7685 - val_loss: 0.5193 - val_acc: 0.7441
Epoch 149/150
514/514 [==============================] - 0s 101us/step - loss: 0.4821 - acc: 0.7626 - val_loss: 0.5356 - val_acc: 0.7756
Epoch 150/150
514/514 [==============================] - 0s 104us/step - loss: 0.4840 - acc: 0.7763 - val_loss: 0.5299 - val_acc: 0.7441

Manual Verification Dataset

In [7]:
# MLP with manual validation set
from keras.models import Sequential
from keras.layers import Dense
from sklearn.model_selection import train_test_split
import numpy

# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)

# load pima indians dataset
dataset = numpy.loadtxt("pima.indians.diabetes.data.csv", delimiter=",")

# split into input (X) and output (Y) variables
X = dataset[:,0:8]
Y = dataset[:,8]

# split into 67% for train and 33% for test
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.33, random_state=seed)

# create model
model = Sequential()
model.add(Dense(12, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))

# Compile model
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])

# Fit the model
hist = model.fit(X_train, y_train, validation_data=(X_test,y_test), epochs=150, batch_size=10)
Train on 514 samples, validate on 254 samples
Epoch 1/150
514/514 [==============================] - 1s 2ms/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 2/150
514/514 [==============================] - 0s 147us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 3/150
514/514 [==============================] - 0s 111us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 4/150
514/514 [==============================] - 0s 115us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 5/150
514/514 [==============================] - 0s 119us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 6/150
514/514 [==============================] - 0s 120us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 7/150
514/514 [==============================] - 0s 117us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 8/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 9/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 10/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 11/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 12/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 13/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 14/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 15/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 16/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 17/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 18/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 19/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 20/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 21/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 22/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 23/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 24/150
514/514 [==============================] - 0s 125us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 25/150
514/514 [==============================] - 0s 135us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 26/150
514/514 [==============================] - 0s 137us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 27/150
514/514 [==============================] - 0s 136us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 28/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 29/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 30/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 31/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 32/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 33/150
514/514 [==============================] - 0s 106us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 34/150
514/514 [==============================] - 0s 115us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 35/150
514/514 [==============================] - 0s 135us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 36/150
514/514 [==============================] - 0s 135us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 37/150
514/514 [==============================] - 0s 142us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 38/150
514/514 [==============================] - 0s 116us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 39/150
514/514 [==============================] - 0s 140us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 40/150
514/514 [==============================] - 0s 137us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 41/150
514/514 [==============================] - 0s 133us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 42/150
514/514 [==============================] - 0s 126us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 43/150
514/514 [==============================] - 0s 147us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 44/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 45/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 46/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 47/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 48/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 49/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 50/150
514/514 [==============================] - 0s 120us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 51/150
514/514 [==============================] - 0s 120us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 52/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 53/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 54/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 55/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 56/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 57/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 58/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 59/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 60/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 61/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 62/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 63/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 64/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 65/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 66/150
514/514 [==============================] - 0s 104us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 67/150
514/514 [==============================] - 0s 106us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 68/150
514/514 [==============================] - 0s 104us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 69/150
514/514 [==============================] - 0s 105us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 70/150
514/514 [==============================] - 0s 104us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 71/150
514/514 [==============================] - 0s 104us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 72/150
514/514 [==============================] - 0s 104us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 73/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 74/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 75/150
514/514 [==============================] - 0s 104us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 76/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 77/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 78/150
514/514 [==============================] - 0s 118us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 79/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 80/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 81/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 82/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 83/150
514/514 [==============================] - 0s 110us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 84/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 85/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 86/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 87/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 88/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 89/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 90/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 91/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 92/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 93/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 94/150
514/514 [==============================] - 0s 108us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 95/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 96/150
514/514 [==============================] - 0s 107us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 97/150
514/514 [==============================] - 0s 105us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 98/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 99/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 100/150
514/514 [==============================] - 0s 101us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 101/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 102/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 103/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 104/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 105/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 106/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 107/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 108/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 109/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 110/150
514/514 [==============================] - 0s 104us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 111/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 112/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 113/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 114/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 115/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 116/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 117/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 118/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 119/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 120/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 121/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 122/150
514/514 [==============================] - 0s 109us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 123/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 124/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 125/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 126/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 127/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 128/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 129/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 130/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 131/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 132/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 133/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 134/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 135/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 136/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 137/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 138/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 139/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 140/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 141/150
514/514 [==============================] - 0s 105us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 142/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 143/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 144/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 145/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 146/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 147/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 148/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 149/150
514/514 [==============================] - 0s 102us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378
Epoch 150/150
514/514 [==============================] - 0s 103us/step - loss: 5.5190 - acc: 0.6576 - val_loss: 5.8381 - val_acc: 0.6378

k-Fold Cross Validation

In [9]:
# MLP for Pima Indians Dataset with 10-fold cross validation
from keras.models import Sequential
from keras.layers import Dense
from sklearn.model_selection import StratifiedKFold
import numpy

# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)

# load pima indians dataset
dataset = numpy.loadtxt("pima.indians.diabetes.data.csv", delimiter=",")

# split into input (X) and output (Y) variables
X = dataset[:,0:8]
Y = dataset[:,8]

# define 10-fold cross validation test harness
kfold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)
cvscores = []

for train, test in kfold.split(X, Y):
    
    # create model
    model = Sequential()
    model.add(Dense(12, input_dim=8, activation='relu'))
    model.add(Dense(8, activation='relu'))
    model.add(Dense(1, activation='sigmoid'))

    # Compile model
    model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])

    # Fit the model
    model.fit(X[train], Y[train], epochs=150, batch_size=10, verbose=0)

    # evaluate the model
    scores = model.evaluate(X[test], Y[test], verbose=0)

    print("%s: %.2f%%" % (model.metrics_names[1], scores[1]*100))
    cvscores.append(scores[1] * 100)

print(); print("Accuracy: (mean & std)")
print("%.2f%% (+/- %.2f%%)" % (numpy.mean(cvscores), numpy.std(cvscores)))
acc: 77.92%
acc: 75.32%
acc: 66.23%
acc: 81.82%
acc: 79.22%
acc: 64.94%
acc: 66.23%
acc: 64.94%
acc: 65.79%
acc: 72.37%

Accuracy: (mean & std)
71.48% (+/- 6.30%)
In [ ]: