In [1]:
# ------------------------------------------------------------------------
## How to setup a binary classification Deep Leaning Model in Keras
# ------------------------------------------------------------------------

def Learn_By_Example_311(): 
    
    print()
    print(format('How to setup a binary classification Deep Leaning Model in Keras','*^82'))    
    
    import warnings
    warnings.filterwarnings("ignore")
    
    # load libraries
    import keras as K
    from keras.callbacks import EarlyStopping, ModelCheckpoint
    from keras.initializers import VarianceScaling
    from keras.regularizers import l2
    from keras.models import Sequential
    from keras.layers import Dense, Dropout
    from sklearn.metrics import confusion_matrix
    from keras.datasets import imdb
    from keras.preprocessing.text import Tokenizer    
    
    # load data and Set the number of features we want
    number_of_features = 1000
    
    # Load data and target vector from movie review data
    (train_data, train_target), (test_data, test_target) = imdb.load_data(num_words=number_of_features)
    
    print(); print(train_data.shape);   print(train_data)
    print(); print(train_target.shape); print(train_target)    
    print(); print(test_data.shape);    print(test_data)
    print(); print(test_target.shape);  print(test_target)    
    
    # Convert movie review data to one-hot encoded feature matrix
    tokenizer = Tokenizer(num_words=number_of_features)
    train_features = tokenizer.sequences_to_matrix(train_data, mode='binary')
    test_features = tokenizer.sequences_to_matrix(test_data, mode='binary')
    
    # Define a Deep Learning Model
    model = Sequential()
    model.add(Dense(units=16, input_shape=(number_of_features,), 
                    kernel_regularizer=l2(0.001), # weight regularizer
                    kernel_initializer=VarianceScaling(), # initializer
                    activation='relu'))
    model.add(Dropout(0.5)) # Dropout Layer
    model.add(Dense(units=22, 
                    kernel_regularizer=l2(0.01), # weight regularizer
                    kernel_initializer=VarianceScaling(), # initializer                   
                    activation='relu'))
    model.add(Dropout(0.5)) # Dropout Layer    
    model.add(Dense(1, activation='sigmoid'))
    
    # Compile the Model
    model.compile(loss='binary_crossentropy', optimizer='sgd', 
                  metrics=['acc','mae'])

    # Set callback functions to early stop training and save the best model so far
    callbacks = [EarlyStopping(monitor='loss', patience=20),
                 ModelCheckpoint(filepath='best_model.h5', monitor='loss', 
                                 save_best_only=True)]    

    # Train the Model
    model.fit(train_features, train_target, epochs=150, batch_size=64, verbose = 1,
              # Validate model as a part of fitting process
              validation_data=(test_features, test_target),
              callbacks=callbacks)
    
    # Evaluate the model
    scores = model.evaluate(test_features, test_target)
    print(); print(model.metrics_names); print(scores)
    print("\n%s: %.2f%%" % (model.metrics_names[1], scores[1]*100))
    
    # Confusion Matrix
    y_pred = model.predict(test_features)
    y_pred = (y_pred > 0.5)
    cm = confusion_matrix(test_target, y_pred); print("\nConfusion Matrix:\n", cm)
    
    # More on the Model
    print("\n\nBackend: ", K.backend.backend())    
    print(model.summary())

Learn_By_Example_311()
*********How to setup a binary classification Deep Leaning Model in Keras*********
Using TensorFlow backend.
(25000,)
[list([1, 14, 22, 16, 43, 530, 973, 2, 2, 65, 458, 2, 66, 2, 4, 173, 36, 256, 5, 25, 100, 43, 838, 112, 50, 670, 2, 9, 35, 480, 284, 5, 150, 4, 172, 112, 167, 2, 336, 385, 39, 4, 172, 2, 2, 17, 546, 38, 13, 447, 4, 192, 50, 16, 6, 147, 2, 19, 14, 22, 4, 2, 2, 469, 4, 22, 71, 87, 12, 16, 43, 530, 38, 76, 15, 13, 2, 4, 22, 17, 515, 17, 12, 16, 626, 18, 2, 5, 62, 386, 12, 8, 316, 8, 106, 5, 4, 2, 2, 16, 480, 66, 2, 33, 4, 130, 12, 16, 38, 619, 5, 25, 124, 51, 36, 135, 48, 25, 2, 33, 6, 22, 12, 215, 28, 77, 52, 5, 14, 407, 16, 82, 2, 8, 4, 107, 117, 2, 15, 256, 4, 2, 7, 2, 5, 723, 36, 71, 43, 530, 476, 26, 400, 317, 46, 7, 4, 2, 2, 13, 104, 88, 4, 381, 15, 297, 98, 32, 2, 56, 26, 141, 6, 194, 2, 18, 4, 226, 22, 21, 134, 476, 26, 480, 5, 144, 30, 2, 18, 51, 36, 28, 224, 92, 25, 104, 4, 226, 65, 16, 38, 2, 88, 12, 16, 283, 5, 16, 2, 113, 103, 32, 15, 16, 2, 19, 178, 32])
 list([1, 194, 2, 194, 2, 78, 228, 5, 6, 2, 2, 2, 134, 26, 4, 715, 8, 118, 2, 14, 394, 20, 13, 119, 954, 189, 102, 5, 207, 110, 2, 21, 14, 69, 188, 8, 30, 23, 7, 4, 249, 126, 93, 4, 114, 9, 2, 2, 5, 647, 4, 116, 9, 35, 2, 4, 229, 9, 340, 2, 4, 118, 9, 4, 130, 2, 19, 4, 2, 5, 89, 29, 952, 46, 37, 4, 455, 9, 45, 43, 38, 2, 2, 398, 4, 2, 26, 2, 5, 163, 11, 2, 2, 4, 2, 9, 194, 775, 7, 2, 2, 349, 2, 148, 605, 2, 2, 15, 123, 125, 68, 2, 2, 15, 349, 165, 2, 98, 5, 4, 228, 9, 43, 2, 2, 15, 299, 120, 5, 120, 174, 11, 220, 175, 136, 50, 9, 2, 228, 2, 5, 2, 656, 245, 2, 5, 4, 2, 131, 152, 491, 18, 2, 32, 2, 2, 14, 9, 6, 371, 78, 22, 625, 64, 2, 9, 8, 168, 145, 23, 4, 2, 15, 16, 4, 2, 5, 28, 6, 52, 154, 462, 33, 89, 78, 285, 16, 145, 95])
 list([1, 14, 47, 8, 30, 31, 7, 4, 249, 108, 7, 4, 2, 54, 61, 369, 13, 71, 149, 14, 22, 112, 4, 2, 311, 12, 16, 2, 33, 75, 43, 2, 296, 4, 86, 320, 35, 534, 19, 263, 2, 2, 4, 2, 33, 89, 78, 12, 66, 16, 4, 360, 7, 4, 58, 316, 334, 11, 4, 2, 43, 645, 662, 8, 257, 85, 2, 42, 2, 2, 83, 68, 2, 15, 36, 165, 2, 278, 36, 69, 2, 780, 8, 106, 14, 2, 2, 18, 6, 22, 12, 215, 28, 610, 40, 6, 87, 326, 23, 2, 21, 23, 22, 12, 272, 40, 57, 31, 11, 4, 22, 47, 6, 2, 51, 9, 170, 23, 595, 116, 595, 2, 13, 191, 79, 638, 89, 2, 14, 9, 8, 106, 607, 624, 35, 534, 6, 227, 7, 129, 113])
 ...
 list([1, 11, 6, 230, 245, 2, 9, 6, 2, 446, 2, 45, 2, 84, 2, 2, 21, 4, 912, 84, 2, 325, 725, 134, 2, 2, 84, 5, 36, 28, 57, 2, 21, 8, 140, 8, 703, 5, 2, 84, 56, 18, 2, 14, 9, 31, 7, 4, 2, 2, 2, 2, 2, 18, 6, 20, 207, 110, 563, 12, 8, 2, 2, 8, 97, 6, 20, 53, 2, 74, 4, 460, 364, 2, 29, 270, 11, 960, 108, 45, 40, 29, 2, 395, 11, 6, 2, 500, 7, 2, 89, 364, 70, 29, 140, 4, 64, 2, 11, 4, 2, 26, 178, 4, 529, 443, 2, 5, 27, 710, 117, 2, 2, 165, 47, 84, 37, 131, 818, 14, 595, 10, 10, 61, 2, 2, 10, 10, 288, 2, 2, 34, 2, 2, 4, 65, 496, 4, 231, 7, 790, 5, 6, 320, 234, 2, 234, 2, 2, 7, 496, 4, 139, 929, 2, 2, 2, 5, 2, 18, 4, 2, 2, 250, 11, 2, 2, 4, 2, 2, 747, 2, 372, 2, 2, 541, 2, 7, 4, 59, 2, 4, 2, 2])
 list([1, 2, 2, 69, 72, 2, 13, 610, 930, 8, 12, 582, 23, 5, 16, 484, 685, 54, 349, 11, 2, 2, 45, 58, 2, 13, 197, 12, 16, 43, 23, 2, 5, 62, 30, 145, 402, 11, 2, 51, 575, 32, 61, 369, 71, 66, 770, 12, 2, 75, 100, 2, 8, 4, 105, 37, 69, 147, 712, 75, 2, 44, 257, 390, 5, 69, 263, 514, 105, 50, 286, 2, 23, 4, 123, 13, 161, 40, 5, 421, 4, 116, 16, 897, 13, 2, 40, 319, 2, 112, 2, 11, 2, 121, 25, 70, 2, 4, 719, 2, 13, 18, 31, 62, 40, 8, 2, 4, 2, 7, 14, 123, 5, 942, 25, 8, 721, 12, 145, 5, 202, 12, 160, 580, 202, 12, 6, 52, 58, 2, 92, 401, 728, 12, 39, 14, 251, 8, 15, 251, 5, 2, 12, 38, 84, 80, 124, 12, 9, 23])
 list([1, 17, 6, 194, 337, 7, 4, 204, 22, 45, 254, 8, 106, 14, 123, 4, 2, 270, 2, 5, 2, 2, 732, 2, 101, 405, 39, 14, 2, 4, 2, 9, 115, 50, 305, 12, 47, 4, 168, 5, 235, 7, 38, 111, 699, 102, 7, 4, 2, 2, 9, 24, 6, 78, 2, 17, 2, 2, 21, 27, 2, 2, 5, 2, 2, 92, 2, 4, 2, 7, 4, 204, 42, 97, 90, 35, 221, 109, 29, 127, 27, 118, 8, 97, 12, 157, 21, 2, 2, 9, 6, 66, 78, 2, 4, 631, 2, 5, 2, 272, 191, 2, 6, 2, 8, 2, 2, 2, 544, 5, 383, 2, 848, 2, 2, 497, 2, 8, 2, 2, 2, 21, 60, 27, 239, 9, 43, 2, 209, 405, 10, 10, 12, 764, 40, 4, 248, 20, 12, 16, 5, 174, 2, 72, 7, 51, 6, 2, 22, 4, 204, 131, 9])]

(25000,)
[1 0 0 ... 0 1 0]

(25000,)
[list([1, 591, 202, 14, 31, 6, 717, 10, 10, 2, 2, 5, 4, 360, 7, 4, 177, 2, 394, 354, 4, 123, 9, 2, 2, 2, 10, 10, 13, 92, 124, 89, 488, 2, 100, 28, 2, 14, 31, 23, 27, 2, 29, 220, 468, 8, 124, 14, 286, 170, 8, 157, 46, 5, 27, 239, 16, 179, 2, 38, 32, 25, 2, 451, 202, 14, 6, 717])
 list([1, 14, 22, 2, 6, 176, 7, 2, 88, 12, 2, 23, 2, 5, 109, 943, 4, 114, 9, 55, 606, 5, 111, 7, 4, 139, 193, 273, 23, 4, 172, 270, 11, 2, 2, 4, 2, 2, 109, 2, 21, 4, 22, 2, 8, 6, 2, 2, 10, 10, 4, 105, 987, 35, 841, 2, 19, 861, 2, 5, 2, 2, 45, 55, 221, 15, 670, 2, 526, 14, 2, 4, 405, 5, 2, 7, 27, 85, 108, 131, 4, 2, 2, 2, 405, 9, 2, 133, 5, 50, 13, 104, 51, 66, 166, 14, 22, 157, 9, 4, 530, 239, 34, 2, 2, 45, 407, 31, 7, 41, 2, 105, 21, 59, 299, 12, 38, 950, 5, 2, 15, 45, 629, 488, 2, 127, 6, 52, 292, 17, 4, 2, 185, 132, 2, 2, 2, 488, 2, 47, 6, 392, 173, 4, 2, 2, 270, 2, 4, 2, 7, 4, 65, 55, 73, 11, 346, 14, 20, 9, 6, 976, 2, 7, 2, 861, 2, 5, 2, 30, 2, 2, 56, 4, 841, 5, 990, 692, 8, 4, 2, 398, 229, 10, 10, 13, 2, 670, 2, 14, 9, 31, 7, 27, 111, 108, 15, 2, 19, 2, 2, 875, 551, 14, 22, 9, 2, 21, 45, 2, 5, 45, 252, 8, 2, 6, 565, 921, 2, 39, 4, 529, 48, 25, 181, 8, 67, 35, 2, 22, 49, 238, 60, 135, 2, 14, 9, 290, 4, 58, 10, 10, 472, 45, 55, 878, 8, 169, 11, 374, 2, 25, 203, 28, 8, 818, 12, 125, 4, 2])
 list([1, 111, 748, 2, 2, 2, 2, 4, 87, 2, 2, 7, 31, 318, 2, 7, 4, 498, 2, 748, 63, 29, 2, 220, 686, 2, 5, 17, 12, 575, 220, 2, 17, 6, 185, 132, 2, 16, 53, 928, 11, 2, 74, 4, 438, 21, 27, 2, 589, 8, 22, 107, 2, 2, 997, 2, 8, 35, 2, 2, 11, 22, 231, 54, 29, 2, 29, 100, 2, 2, 34, 2, 2, 2, 5, 2, 98, 31, 2, 33, 6, 58, 14, 2, 2, 8, 4, 365, 7, 2, 2, 356, 346, 4, 2, 2, 63, 29, 93, 11, 2, 11, 2, 33, 6, 58, 54, 2, 431, 748, 7, 32, 2, 16, 11, 94, 2, 10, 10, 4, 993, 2, 7, 4, 2, 2, 2, 2, 8, 847, 8, 2, 121, 31, 7, 27, 86, 2, 2, 16, 6, 465, 993, 2, 2, 573, 17, 2, 42, 4, 2, 37, 473, 6, 711, 6, 2, 7, 328, 212, 70, 30, 258, 11, 220, 32, 7, 108, 21, 133, 12, 9, 55, 465, 849, 2, 53, 33, 2, 2, 37, 70, 2, 4, 2, 2, 74, 476, 37, 62, 91, 2, 169, 4, 2, 2, 146, 655, 2, 5, 258, 12, 184, 2, 546, 5, 849, 2, 7, 4, 22, 2, 18, 631, 2, 797, 7, 4, 2, 71, 348, 425, 2, 2, 19, 2, 5, 2, 11, 661, 8, 339, 2, 4, 2, 2, 7, 4, 2, 10, 10, 263, 787, 9, 270, 11, 6, 2, 4, 2, 2, 121, 4, 2, 26, 2, 19, 68, 2, 5, 28, 446, 6, 318, 2, 8, 67, 51, 36, 70, 81, 8, 2, 2, 36, 2, 8, 2, 2, 18, 6, 711, 4, 2, 26, 2, 2, 11, 14, 636, 720, 12, 426, 28, 77, 776, 8, 97, 38, 111, 2, 2, 168, 2, 2, 137, 2, 18, 27, 173, 9, 2, 17, 6, 2, 428, 2, 232, 11, 4, 2, 37, 272, 40, 2, 247, 30, 656, 6, 2, 54, 2, 2, 98, 6, 2, 40, 558, 37, 2, 98, 4, 2, 2, 15, 14, 9, 57, 2, 5, 2, 6, 275, 711, 2, 2, 2, 98, 6, 2, 10, 10, 2, 19, 14, 2, 267, 162, 711, 37, 2, 752, 98, 4, 2, 2, 90, 19, 6, 2, 7, 2, 2, 2, 4, 2, 2, 930, 8, 508, 90, 4, 2, 8, 4, 2, 17, 2, 2, 2, 4, 2, 8, 2, 189, 4, 2, 2, 2, 4, 2, 5, 95, 271, 23, 6, 2, 2, 2, 2, 33, 2, 6, 425, 2, 2, 2, 2, 7, 4, 2, 2, 469, 4, 2, 54, 4, 150, 2, 2, 280, 53, 2, 2, 18, 339, 29, 2, 27, 2, 5, 2, 68, 2, 19, 2, 2, 4, 2, 7, 263, 65, 2, 34, 6, 2, 2, 43, 159, 29, 9, 2, 9, 387, 73, 195, 584, 10, 10, 2, 4, 58, 810, 54, 14, 2, 117, 22, 16, 93, 5, 2, 4, 192, 15, 12, 16, 93, 34, 6, 2, 2, 33, 4, 2, 7, 15, 2, 2, 2, 325, 12, 62, 30, 776, 8, 67, 14, 17, 6, 2, 44, 148, 687, 2, 203, 42, 203, 24, 28, 69, 2, 2, 11, 330, 54, 29, 93, 2, 21, 845, 2, 27, 2, 7, 819, 4, 22, 2, 17, 6, 2, 787, 7, 2, 2, 2, 100, 30, 4, 2, 2, 2, 2, 42, 2, 11, 4, 2, 42, 101, 704, 7, 101, 999, 15, 2, 94, 2, 180, 5, 9, 2, 34, 2, 45, 6, 2, 22, 60, 6, 2, 31, 11, 94, 2, 96, 21, 94, 749, 9, 57, 975])
 ...
 list([1, 13, 2, 15, 8, 135, 14, 9, 35, 32, 46, 394, 20, 62, 30, 2, 21, 45, 184, 78, 4, 2, 910, 769, 2, 2, 395, 2, 5, 2, 11, 119, 2, 89, 2, 4, 116, 218, 78, 21, 407, 100, 30, 128, 262, 15, 7, 185, 2, 284, 2, 2, 37, 315, 4, 226, 20, 272, 2, 40, 29, 152, 60, 181, 8, 30, 50, 553, 362, 80, 119, 12, 21, 846, 2])
 list([1, 11, 119, 241, 9, 4, 840, 20, 12, 468, 15, 94, 2, 562, 791, 39, 4, 86, 107, 8, 97, 14, 31, 33, 4, 2, 7, 743, 46, 2, 9, 2, 5, 4, 768, 47, 8, 79, 90, 145, 164, 162, 50, 6, 501, 119, 7, 9, 4, 78, 232, 15, 16, 224, 11, 4, 333, 20, 4, 985, 200, 5, 2, 5, 9, 2, 8, 79, 357, 4, 20, 47, 220, 57, 206, 139, 11, 12, 5, 55, 117, 212, 13, 2, 92, 124, 51, 45, 2, 71, 536, 13, 520, 14, 20, 6, 2, 7, 470])
 list([1, 6, 52, 2, 430, 22, 9, 220, 2, 8, 28, 2, 519, 2, 6, 769, 15, 47, 6, 2, 2, 8, 114, 5, 33, 222, 31, 55, 184, 704, 2, 2, 19, 346, 2, 5, 6, 364, 350, 4, 184, 2, 9, 133, 2, 11, 2, 2, 21, 4, 2, 2, 570, 50, 2, 2, 9, 6, 2, 17, 6, 2, 2, 21, 17, 6, 2, 232, 2, 2, 29, 266, 56, 96, 346, 194, 308, 9, 194, 21, 29, 218, 2, 19, 4, 78, 173, 7, 27, 2, 2, 2, 718, 2, 9, 6, 2, 17, 210, 5, 2, 2, 47, 77, 395, 14, 172, 173, 18, 2, 2, 2, 82, 127, 27, 173, 11, 6, 392, 217, 21, 50, 9, 57, 65, 12, 2, 53, 40, 35, 390, 7, 11, 4, 2, 7, 4, 314, 74, 6, 792, 22, 2, 19, 714, 727, 2, 382, 4, 91, 2, 439, 19, 14, 20, 9, 2, 2, 2, 4, 756, 25, 124, 4, 31, 12, 16, 93, 804, 34, 2, 2])]

(25000,)
[0 1 1 ... 0 0 0]
Train on 25000 samples, validate on 25000 samples
Epoch 1/150
25000/25000 [==============================] - 2s 75us/step - loss: 0.9243 - acc: 0.5072 - mae: 0.4993 - val_loss: 0.9011 - val_acc: 0.5657 - val_mae: 0.4979
Epoch 2/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.8823 - acc: 0.5435 - mae: 0.4937 - val_loss: 0.8537 - val_acc: 0.6562 - val_mae: 0.4873
Epoch 3/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.8326 - acc: 0.6078 - mae: 0.4771 - val_loss: 0.7904 - val_acc: 0.7300 - val_mae: 0.4637
Epoch 4/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.7750 - acc: 0.6588 - mae: 0.4511 - val_loss: 0.7207 - val_acc: 0.7822 - val_mae: 0.4322
Epoch 5/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.7216 - acc: 0.6966 - mae: 0.4226 - val_loss: 0.6586 - val_acc: 0.8027 - val_mae: 0.4005
Epoch 6/150
25000/25000 [==============================] - 1s 28us/step - loss: 0.6681 - acc: 0.7334 - mae: 0.3919 - val_loss: 0.5942 - val_acc: 0.8194 - val_mae: 0.3614
Epoch 7/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.6289 - acc: 0.7518 - mae: 0.3687 - val_loss: 0.5489 - val_acc: 0.8278 - val_mae: 0.3334
Epoch 8/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.5921 - acc: 0.7736 - mae: 0.3461 - val_loss: 0.5094 - val_acc: 0.8356 - val_mae: 0.3066
Epoch 9/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.5635 - acc: 0.7848 - mae: 0.3298 - val_loss: 0.4816 - val_acc: 0.8398 - val_mae: 0.2875
Epoch 10/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.5462 - acc: 0.7912 - mae: 0.3200 - val_loss: 0.4672 - val_acc: 0.8443 - val_mae: 0.2827
Epoch 11/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.5240 - acc: 0.7995 - mae: 0.3079 - val_loss: 0.4478 - val_acc: 0.8476 - val_mae: 0.2692
Epoch 12/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.5069 - acc: 0.8100 - mae: 0.2975 - val_loss: 0.4380 - val_acc: 0.8492 - val_mae: 0.2655
Epoch 13/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.4923 - acc: 0.8138 - mae: 0.2901 - val_loss: 0.4239 - val_acc: 0.8542 - val_mae: 0.2570
Epoch 14/150
25000/25000 [==============================] - 1s 33us/step - loss: 0.4836 - acc: 0.8202 - mae: 0.2850 - val_loss: 0.4120 - val_acc: 0.8521 - val_mae: 0.2435
Epoch 15/150
25000/25000 [==============================] - 1s 33us/step - loss: 0.4687 - acc: 0.8226 - mae: 0.2782 - val_loss: 0.4048 - val_acc: 0.8557 - val_mae: 0.2437
Epoch 16/150
25000/25000 [==============================] - 1s 39us/step - loss: 0.4584 - acc: 0.8300 - mae: 0.2709 - val_loss: 0.3982 - val_acc: 0.8561 - val_mae: 0.2381
Epoch 17/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.4508 - acc: 0.8299 - mae: 0.2684 - val_loss: 0.3897 - val_acc: 0.8584 - val_mae: 0.2308
Epoch 18/150
25000/25000 [==============================] - 1s 33us/step - loss: 0.4434 - acc: 0.8363 - mae: 0.2636 - val_loss: 0.3863 - val_acc: 0.8589 - val_mae: 0.2306
Epoch 19/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.4389 - acc: 0.8388 - mae: 0.2612 - val_loss: 0.3801 - val_acc: 0.8599 - val_mae: 0.2236
Epoch 20/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.4310 - acc: 0.8405 - mae: 0.2562 - val_loss: 0.3763 - val_acc: 0.8596 - val_mae: 0.2185
Epoch 21/150
25000/25000 [==============================] - 1s 36us/step - loss: 0.4242 - acc: 0.8435 - mae: 0.2530 - val_loss: 0.3743 - val_acc: 0.8600 - val_mae: 0.2198
Epoch 22/150
25000/25000 [==============================] - 1s 38us/step - loss: 0.4168 - acc: 0.8469 - mae: 0.2493 - val_loss: 0.3715 - val_acc: 0.8596 - val_mae: 0.2164
Epoch 23/150
25000/25000 [==============================] - 1s 37us/step - loss: 0.4147 - acc: 0.8479 - mae: 0.2457 - val_loss: 0.3735 - val_acc: 0.8593 - val_mae: 0.2265
Epoch 24/150
25000/25000 [==============================] - 1s 36us/step - loss: 0.4159 - acc: 0.8464 - mae: 0.2479 - val_loss: 0.3692 - val_acc: 0.8613 - val_mae: 0.2198
Epoch 25/150
25000/25000 [==============================] - 1s 41us/step - loss: 0.4088 - acc: 0.8502 - mae: 0.2438 - val_loss: 0.3685 - val_acc: 0.8608 - val_mae: 0.2192
Epoch 26/150
25000/25000 [==============================] - 1s 33us/step - loss: 0.4031 - acc: 0.8540 - mae: 0.2401 - val_loss: 0.3662 - val_acc: 0.8614 - val_mae: 0.2180
Epoch 27/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3988 - acc: 0.8547 - mae: 0.2378 - val_loss: 0.3659 - val_acc: 0.8611 - val_mae: 0.2182
Epoch 28/150
25000/25000 [==============================] - 1s 34us/step - loss: 0.3974 - acc: 0.8559 - mae: 0.2372 - val_loss: 0.3645 - val_acc: 0.8619 - val_mae: 0.2165
Epoch 29/150
25000/25000 [==============================] - 1s 36us/step - loss: 0.3932 - acc: 0.8577 - mae: 0.2341 - val_loss: 0.3627 - val_acc: 0.8611 - val_mae: 0.2133
Epoch 30/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3890 - acc: 0.8601 - mae: 0.2317 - val_loss: 0.3628 - val_acc: 0.8605 - val_mae: 0.2103
Epoch 31/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3927 - acc: 0.8549 - mae: 0.2333 - val_loss: 0.3621 - val_acc: 0.8610 - val_mae: 0.2101
Epoch 32/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3874 - acc: 0.8596 - mae: 0.2314 - val_loss: 0.3601 - val_acc: 0.8618 - val_mae: 0.2068
Epoch 33/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3869 - acc: 0.8622 - mae: 0.2290 - val_loss: 0.3615 - val_acc: 0.8606 - val_mae: 0.2142
Epoch 34/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3820 - acc: 0.8615 - mae: 0.2269 - val_loss: 0.3594 - val_acc: 0.8606 - val_mae: 0.2059
Epoch 35/150
25000/25000 [==============================] - 1s 34us/step - loss: 0.3811 - acc: 0.8630 - mae: 0.2266 - val_loss: 0.3593 - val_acc: 0.8616 - val_mae: 0.2047
Epoch 36/150
25000/25000 [==============================] - 1s 34us/step - loss: 0.3795 - acc: 0.8622 - mae: 0.2250 - val_loss: 0.3592 - val_acc: 0.8614 - val_mae: 0.2069
Epoch 37/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3748 - acc: 0.8650 - mae: 0.2228 - val_loss: 0.3593 - val_acc: 0.8599 - val_mae: 0.2052
Epoch 38/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3778 - acc: 0.8658 - mae: 0.2230 - val_loss: 0.3589 - val_acc: 0.8609 - val_mae: 0.2065
Epoch 39/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3773 - acc: 0.8641 - mae: 0.2231 - val_loss: 0.3594 - val_acc: 0.8614 - val_mae: 0.2018
Epoch 40/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3734 - acc: 0.8661 - mae: 0.2206 - val_loss: 0.3584 - val_acc: 0.8611 - val_mae: 0.2050
Epoch 41/150
25000/25000 [==============================] - 1s 34us/step - loss: 0.3747 - acc: 0.8671 - mae: 0.2215 - val_loss: 0.3594 - val_acc: 0.8598 - val_mae: 0.2087
Epoch 42/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3737 - acc: 0.8671 - mae: 0.2211 - val_loss: 0.3586 - val_acc: 0.8620 - val_mae: 0.2023
Epoch 43/150
25000/25000 [==============================] - 1s 33us/step - loss: 0.3703 - acc: 0.8690 - mae: 0.2181 - val_loss: 0.3580 - val_acc: 0.8617 - val_mae: 0.2055
Epoch 44/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3715 - acc: 0.8674 - mae: 0.2199 - val_loss: 0.3580 - val_acc: 0.8614 - val_mae: 0.2048
Epoch 45/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.3702 - acc: 0.8662 - mae: 0.2185 - val_loss: 0.3622 - val_acc: 0.8600 - val_mae: 0.1992
Epoch 46/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3717 - acc: 0.8690 - mae: 0.2174 - val_loss: 0.3578 - val_acc: 0.8608 - val_mae: 0.2051
Epoch 47/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3688 - acc: 0.8716 - mae: 0.2175 - val_loss: 0.3578 - val_acc: 0.8615 - val_mae: 0.2030
Epoch 48/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.3679 - acc: 0.8708 - mae: 0.2167 - val_loss: 0.3636 - val_acc: 0.8568 - val_mae: 0.2047
Epoch 49/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3642 - acc: 0.8703 - mae: 0.2148 - val_loss: 0.3585 - val_acc: 0.8615 - val_mae: 0.2059
Epoch 50/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3640 - acc: 0.8709 - mae: 0.2150 - val_loss: 0.3577 - val_acc: 0.8617 - val_mae: 0.1990
Epoch 51/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3632 - acc: 0.8709 - mae: 0.2133 - val_loss: 0.3591 - val_acc: 0.8615 - val_mae: 0.1992
Epoch 52/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3621 - acc: 0.8720 - mae: 0.2127 - val_loss: 0.3595 - val_acc: 0.8604 - val_mae: 0.2018
Epoch 53/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3616 - acc: 0.8745 - mae: 0.2111 - val_loss: 0.3585 - val_acc: 0.8608 - val_mae: 0.2016
Epoch 54/150
25000/25000 [==============================] - 1s 38us/step - loss: 0.3600 - acc: 0.8740 - mae: 0.2117 - val_loss: 0.3579 - val_acc: 0.8603 - val_mae: 0.2032
Epoch 55/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.3648 - acc: 0.8711 - mae: 0.2143 - val_loss: 0.3584 - val_acc: 0.8612 - val_mae: 0.2013
Epoch 56/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3587 - acc: 0.8733 - mae: 0.2102 - val_loss: 0.3578 - val_acc: 0.8614 - val_mae: 0.1995
Epoch 57/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3589 - acc: 0.8734 - mae: 0.2101 - val_loss: 0.3578 - val_acc: 0.8606 - val_mae: 0.2009
Epoch 58/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3592 - acc: 0.8733 - mae: 0.2106 - val_loss: 0.3621 - val_acc: 0.8590 - val_mae: 0.2028
Epoch 59/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3590 - acc: 0.8739 - mae: 0.2100 - val_loss: 0.3583 - val_acc: 0.8606 - val_mae: 0.2020
Epoch 60/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.3579 - acc: 0.8747 - mae: 0.2098 - val_loss: 0.3583 - val_acc: 0.8601 - val_mae: 0.1957
Epoch 61/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.3579 - acc: 0.8740 - mae: 0.2086 - val_loss: 0.3585 - val_acc: 0.8599 - val_mae: 0.2026
Epoch 62/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3576 - acc: 0.8761 - mae: 0.2090 - val_loss: 0.3582 - val_acc: 0.8588 - val_mae: 0.1996
Epoch 63/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3554 - acc: 0.8762 - mae: 0.2066 - val_loss: 0.3599 - val_acc: 0.8599 - val_mae: 0.2031
Epoch 64/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3567 - acc: 0.8755 - mae: 0.2087 - val_loss: 0.3618 - val_acc: 0.8586 - val_mae: 0.1956
Epoch 65/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3522 - acc: 0.8767 - mae: 0.2064 - val_loss: 0.3596 - val_acc: 0.8602 - val_mae: 0.2002
Epoch 66/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3535 - acc: 0.8778 - mae: 0.2060 - val_loss: 0.3594 - val_acc: 0.8590 - val_mae: 0.1980
Epoch 67/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3556 - acc: 0.8762 - mae: 0.2074 - val_loss: 0.3598 - val_acc: 0.8593 - val_mae: 0.2037
Epoch 68/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.3536 - acc: 0.8766 - mae: 0.2067 - val_loss: 0.3587 - val_acc: 0.8597 - val_mae: 0.1992
Epoch 69/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3550 - acc: 0.8773 - mae: 0.2067 - val_loss: 0.3593 - val_acc: 0.8585 - val_mae: 0.2018
Epoch 70/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3532 - acc: 0.8776 - mae: 0.2052 - val_loss: 0.3588 - val_acc: 0.8602 - val_mae: 0.1956
Epoch 71/150
25000/25000 [==============================] - 1s 33us/step - loss: 0.3525 - acc: 0.8782 - mae: 0.2060 - val_loss: 0.3595 - val_acc: 0.8597 - val_mae: 0.1976
Epoch 72/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3546 - acc: 0.8771 - mae: 0.2067 - val_loss: 0.3593 - val_acc: 0.8603 - val_mae: 0.1982
Epoch 73/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3544 - acc: 0.8786 - mae: 0.2058 - val_loss: 0.3592 - val_acc: 0.8597 - val_mae: 0.1941
Epoch 74/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3531 - acc: 0.8781 - mae: 0.2051 - val_loss: 0.3590 - val_acc: 0.8597 - val_mae: 0.1997
Epoch 75/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3498 - acc: 0.8815 - mae: 0.2040 - val_loss: 0.3600 - val_acc: 0.8588 - val_mae: 0.2043
Epoch 76/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3479 - acc: 0.8794 - mae: 0.2034 - val_loss: 0.3623 - val_acc: 0.8588 - val_mae: 0.1953
Epoch 77/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3492 - acc: 0.8780 - mae: 0.2029 - val_loss: 0.3607 - val_acc: 0.8586 - val_mae: 0.1947
Epoch 78/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3478 - acc: 0.8804 - mae: 0.2023 - val_loss: 0.3616 - val_acc: 0.8599 - val_mae: 0.1920
Epoch 79/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3538 - acc: 0.8782 - mae: 0.2049 - val_loss: 0.3611 - val_acc: 0.8598 - val_mae: 0.1908
Epoch 80/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3499 - acc: 0.8798 - mae: 0.2026 - val_loss: 0.3605 - val_acc: 0.8588 - val_mae: 0.2045
Epoch 81/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3462 - acc: 0.8821 - mae: 0.2011 - val_loss: 0.3601 - val_acc: 0.8594 - val_mae: 0.1976
Epoch 82/150
25000/25000 [==============================] - 1s 34us/step - loss: 0.3516 - acc: 0.8806 - mae: 0.2041 - val_loss: 0.3605 - val_acc: 0.8585 - val_mae: 0.1973
Epoch 83/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3451 - acc: 0.8822 - mae: 0.1996 - val_loss: 0.3609 - val_acc: 0.8602 - val_mae: 0.1993
Epoch 84/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.3512 - acc: 0.8780 - mae: 0.2042 - val_loss: 0.3609 - val_acc: 0.8601 - val_mae: 0.1946
Epoch 85/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3459 - acc: 0.8815 - mae: 0.2002 - val_loss: 0.3604 - val_acc: 0.8600 - val_mae: 0.1936
Epoch 86/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3496 - acc: 0.8802 - mae: 0.2032 - val_loss: 0.3615 - val_acc: 0.8592 - val_mae: 0.1951
Epoch 87/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3426 - acc: 0.8823 - mae: 0.1991 - val_loss: 0.3610 - val_acc: 0.8590 - val_mae: 0.1965
Epoch 88/150
25000/25000 [==============================] - 1s 33us/step - loss: 0.3446 - acc: 0.8804 - mae: 0.1991 - val_loss: 0.3621 - val_acc: 0.8586 - val_mae: 0.1974
Epoch 89/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3446 - acc: 0.8834 - mae: 0.1991 - val_loss: 0.3605 - val_acc: 0.8588 - val_mae: 0.2001
Epoch 90/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3447 - acc: 0.8819 - mae: 0.1998 - val_loss: 0.3617 - val_acc: 0.8596 - val_mae: 0.1923
Epoch 91/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3412 - acc: 0.8820 - mae: 0.1974 - val_loss: 0.3600 - val_acc: 0.8594 - val_mae: 0.1944
Epoch 92/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3470 - acc: 0.8801 - mae: 0.2002 - val_loss: 0.3608 - val_acc: 0.8596 - val_mae: 0.1956
Epoch 93/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3396 - acc: 0.8839 - mae: 0.1976 - val_loss: 0.3623 - val_acc: 0.8588 - val_mae: 0.1946
Epoch 94/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3459 - acc: 0.8811 - mae: 0.1996 - val_loss: 0.3612 - val_acc: 0.8604 - val_mae: 0.1934
Epoch 95/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3372 - acc: 0.8866 - mae: 0.1951 - val_loss: 0.3609 - val_acc: 0.8598 - val_mae: 0.1932
Epoch 96/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3417 - acc: 0.8854 - mae: 0.1964 - val_loss: 0.3633 - val_acc: 0.8593 - val_mae: 0.1957
Epoch 97/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3397 - acc: 0.8862 - mae: 0.1956 - val_loss: 0.3660 - val_acc: 0.8520 - val_mae: 0.2027
Epoch 98/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3417 - acc: 0.8826 - mae: 0.1982 - val_loss: 0.3700 - val_acc: 0.8576 - val_mae: 0.1912
Epoch 99/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3382 - acc: 0.8872 - mae: 0.1945 - val_loss: 0.3610 - val_acc: 0.8590 - val_mae: 0.1994
Epoch 100/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3411 - acc: 0.8853 - mae: 0.1961 - val_loss: 0.3615 - val_acc: 0.8585 - val_mae: 0.1991
Epoch 101/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3393 - acc: 0.8815 - mae: 0.1968 - val_loss: 0.3637 - val_acc: 0.8594 - val_mae: 0.1961
Epoch 102/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3364 - acc: 0.8861 - mae: 0.1936 - val_loss: 0.3631 - val_acc: 0.8596 - val_mae: 0.1925
Epoch 103/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3407 - acc: 0.8845 - mae: 0.1956 - val_loss: 0.3611 - val_acc: 0.8598 - val_mae: 0.1968
Epoch 104/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3388 - acc: 0.8863 - mae: 0.1951 - val_loss: 0.3623 - val_acc: 0.8592 - val_mae: 0.1927
Epoch 105/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3348 - acc: 0.8854 - mae: 0.1938 - val_loss: 0.3618 - val_acc: 0.8606 - val_mae: 0.1922
Epoch 106/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3402 - acc: 0.8867 - mae: 0.1945 - val_loss: 0.3611 - val_acc: 0.8594 - val_mae: 0.1978
Epoch 107/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3328 - acc: 0.8883 - mae: 0.1917 - val_loss: 0.3639 - val_acc: 0.8589 - val_mae: 0.1944
Epoch 108/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3362 - acc: 0.8861 - mae: 0.1936 - val_loss: 0.3637 - val_acc: 0.8607 - val_mae: 0.1920
Epoch 109/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3324 - acc: 0.8876 - mae: 0.1916 - val_loss: 0.3637 - val_acc: 0.8598 - val_mae: 0.1944
Epoch 110/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3333 - acc: 0.8858 - mae: 0.1915 - val_loss: 0.3645 - val_acc: 0.8607 - val_mae: 0.1891
Epoch 111/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3339 - acc: 0.8845 - mae: 0.1921 - val_loss: 0.3632 - val_acc: 0.8615 - val_mae: 0.1919
Epoch 112/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3317 - acc: 0.8884 - mae: 0.1904 - val_loss: 0.3648 - val_acc: 0.8606 - val_mae: 0.1880
Epoch 113/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3289 - acc: 0.8888 - mae: 0.1894 - val_loss: 0.3646 - val_acc: 0.8599 - val_mae: 0.1900
Epoch 114/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3332 - acc: 0.8890 - mae: 0.1897 - val_loss: 0.3643 - val_acc: 0.8600 - val_mae: 0.1891
Epoch 115/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3314 - acc: 0.8870 - mae: 0.1899 - val_loss: 0.3676 - val_acc: 0.8567 - val_mae: 0.1938
Epoch 116/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3305 - acc: 0.8884 - mae: 0.1886 - val_loss: 0.3668 - val_acc: 0.8589 - val_mae: 0.1893
Epoch 117/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3299 - acc: 0.8885 - mae: 0.1890 - val_loss: 0.3648 - val_acc: 0.8578 - val_mae: 0.1959
Epoch 118/150
25000/25000 [==============================] - 1s 28us/step - loss: 0.3310 - acc: 0.8876 - mae: 0.1899 - val_loss: 0.3652 - val_acc: 0.8586 - val_mae: 0.1939
Epoch 119/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3302 - acc: 0.8886 - mae: 0.1888 - val_loss: 0.3659 - val_acc: 0.8594 - val_mae: 0.1910
Epoch 120/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3280 - acc: 0.8890 - mae: 0.1877 - val_loss: 0.3646 - val_acc: 0.8594 - val_mae: 0.1947
Epoch 121/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3308 - acc: 0.8861 - mae: 0.1892 - val_loss: 0.3664 - val_acc: 0.8586 - val_mae: 0.1908
Epoch 122/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3259 - acc: 0.8901 - mae: 0.1871 - val_loss: 0.3677 - val_acc: 0.8602 - val_mae: 0.1873
Epoch 123/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3293 - acc: 0.8892 - mae: 0.1885 - val_loss: 0.3657 - val_acc: 0.8593 - val_mae: 0.1930
Epoch 124/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3246 - acc: 0.8891 - mae: 0.1849 - val_loss: 0.3654 - val_acc: 0.8600 - val_mae: 0.1945
Epoch 125/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3283 - acc: 0.8877 - mae: 0.1879 - val_loss: 0.3678 - val_acc: 0.8590 - val_mae: 0.1869
Epoch 126/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3287 - acc: 0.8882 - mae: 0.1865 - val_loss: 0.3652 - val_acc: 0.8567 - val_mae: 0.2002
Epoch 127/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3242 - acc: 0.8909 - mae: 0.1858 - val_loss: 0.3654 - val_acc: 0.8585 - val_mae: 0.1938
Epoch 128/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3263 - acc: 0.8874 - mae: 0.1864 - val_loss: 0.3666 - val_acc: 0.8588 - val_mae: 0.1904
Epoch 129/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3249 - acc: 0.8900 - mae: 0.1855 - val_loss: 0.3715 - val_acc: 0.8578 - val_mae: 0.1929
Epoch 130/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3223 - acc: 0.8902 - mae: 0.1840 - val_loss: 0.3654 - val_acc: 0.8587 - val_mae: 0.1932
Epoch 131/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3255 - acc: 0.8900 - mae: 0.1849 - val_loss: 0.3686 - val_acc: 0.8591 - val_mae: 0.1915
Epoch 132/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3256 - acc: 0.8903 - mae: 0.1863 - val_loss: 0.3663 - val_acc: 0.8587 - val_mae: 0.1919
Epoch 133/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3236 - acc: 0.8905 - mae: 0.1837 - val_loss: 0.3680 - val_acc: 0.8591 - val_mae: 0.1884
Epoch 134/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3238 - acc: 0.8907 - mae: 0.1844 - val_loss: 0.3678 - val_acc: 0.8589 - val_mae: 0.1891
Epoch 135/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.3221 - acc: 0.8925 - mae: 0.1836 - val_loss: 0.3684 - val_acc: 0.8600 - val_mae: 0.1874
Epoch 136/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3222 - acc: 0.8912 - mae: 0.1832 - val_loss: 0.3680 - val_acc: 0.8588 - val_mae: 0.1892
Epoch 137/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3240 - acc: 0.8919 - mae: 0.1832 - val_loss: 0.3661 - val_acc: 0.8594 - val_mae: 0.1926
Epoch 138/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3229 - acc: 0.8907 - mae: 0.1848 - val_loss: 0.3673 - val_acc: 0.8604 - val_mae: 0.1890
Epoch 139/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3212 - acc: 0.8918 - mae: 0.1834 - val_loss: 0.3670 - val_acc: 0.8608 - val_mae: 0.1901
Epoch 140/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3216 - acc: 0.8938 - mae: 0.1815 - val_loss: 0.3714 - val_acc: 0.8559 - val_mae: 0.1965
Epoch 141/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3195 - acc: 0.8930 - mae: 0.1821 - val_loss: 0.3699 - val_acc: 0.8598 - val_mae: 0.1887
Epoch 142/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3224 - acc: 0.8908 - mae: 0.1835 - val_loss: 0.3686 - val_acc: 0.8595 - val_mae: 0.1907
Epoch 143/150
25000/25000 [==============================] - 1s 31us/step - loss: 0.3207 - acc: 0.8905 - mae: 0.1820 - val_loss: 0.3675 - val_acc: 0.8584 - val_mae: 0.1977
Epoch 144/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.3184 - acc: 0.8910 - mae: 0.1817 - val_loss: 0.3700 - val_acc: 0.8593 - val_mae: 0.1908
Epoch 145/150
25000/25000 [==============================] - 1s 36us/step - loss: 0.3171 - acc: 0.8910 - mae: 0.1800 - val_loss: 0.3685 - val_acc: 0.8600 - val_mae: 0.1922
Epoch 146/150
25000/25000 [==============================] - 1s 30us/step - loss: 0.3205 - acc: 0.8888 - mae: 0.1821 - val_loss: 0.3672 - val_acc: 0.8577 - val_mae: 0.1982
Epoch 147/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3149 - acc: 0.8939 - mae: 0.1789 - val_loss: 0.3730 - val_acc: 0.8598 - val_mae: 0.1841
Epoch 148/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3178 - acc: 0.8910 - mae: 0.1806 - val_loss: 0.3683 - val_acc: 0.8588 - val_mae: 0.1939
Epoch 149/150
25000/25000 [==============================] - 1s 29us/step - loss: 0.3172 - acc: 0.8930 - mae: 0.1798 - val_loss: 0.3722 - val_acc: 0.8601 - val_mae: 0.1844
Epoch 150/150
25000/25000 [==============================] - 1s 32us/step - loss: 0.3182 - acc: 0.8947 - mae: 0.1802 - val_loss: 0.3733 - val_acc: 0.8602 - val_mae: 0.1830
25000/25000 [==============================] - 0s 14us/step

['loss', 'acc', 'mae']
[0.373283802986145, 0.8601599931716919, 0.1830013245344162]

acc: 86.02%

Confusion Matrix:
 [[10710  1790]
 [ 1706 10794]]


Backend:  tensorflow
Model: "sequential_1"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_1 (Dense)              (None, 16)                16016     
_________________________________________________________________
dropout_1 (Dropout)          (None, 16)                0         
_________________________________________________________________
dense_2 (Dense)              (None, 22)                374       
_________________________________________________________________
dropout_2 (Dropout)          (None, 22)                0         
_________________________________________________________________
dense_3 (Dense)              (None, 1)                 23        
=================================================================
Total params: 16,413
Trainable params: 16,413
Non-trainable params: 0
_________________________________________________________________
None