## How to use automatic varification within dataset in Keras
def Learn_By_Example_318():
print()
print(format('How to use automatic varification within dataset in Keras','*^82'))
import warnings
warnings.filterwarnings("ignore")
# load libraries
import keras as K
from keras.initializers import VarianceScaling
from keras.regularizers import l2
from keras.models import Sequential
from keras.layers import Dense
from sklearn import datasets
from sklearn.metrics import confusion_matrix
# simulated data
dataset = datasets.make_classification(n_samples=10000, n_features=20, n_informative=5,
n_redundant=2, n_repeated=0, n_classes=2, n_clusters_per_class=2,
weights=None, flip_y=0.01, class_sep=1.0, hypercube=True, shift=0.0,
scale=1.0, shuffle=True, random_state=None)
X = dataset[0]; y = dataset[1]
print(X.shape); print(y.shape)
# Define a Deep Learning Model
model = Sequential()
model.add(Dense(38, input_dim=20,
kernel_regularizer=l2(0.001), # weight regularizer
kernel_initializer=VarianceScaling(), # initializer
activation='tanh'))
model.add(Dense(25,
kernel_regularizer=l2(0.01), # weight regularizer
kernel_initializer=VarianceScaling(), # initializer
activation='relu'))
model.add(Dense(1, activation='sigmoid'))
# Compile the Model
model.compile(loss='binary_crossentropy', optimizer='sgd',
metrics=['acc'])
# ------------------------------------------------------------------------
# Train the Model
model.fit(X, y, epochs=250, batch_size=25, verbose = 1,
# Validate model as a part of fitting process
validation_split=0.25)
# ------------------------------------------------------------------------
# Evaluate the model
scores = model.evaluate(X, y)
print(); print(model.metrics_names); print(scores)
print("\n%s: %.2f%%" % (model.metrics_names[1], scores[1]*100))
# Confusion Matrix
y_pred = model.predict(X)
y_pred = (y_pred > 0.5)
cm = confusion_matrix(y, y_pred); print("\nConfusion Matrix:\n", cm)
# More on the Model
print("\n\nBackend: ", K.backend.backend())
print(model.summary())
Learn_By_Example_318()
************How to use automatic varification within dataset in Keras*************
Using TensorFlow backend.
(10000, 20) (10000,) Train on 7500 samples, validate on 2500 samples Epoch 1/250 7500/7500 [==============================] - 1s 73us/step - loss: 0.9012 - acc: 0.6161 - val_loss: 0.8418 - val_acc: 0.6788 Epoch 2/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.8008 - acc: 0.6972 - val_loss: 0.7662 - val_acc: 0.7292 Epoch 3/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.7363 - acc: 0.7316 - val_loss: 0.7134 - val_acc: 0.7512 Epoch 4/250 7500/7500 [==============================] - 0s 40us/step - loss: 0.6899 - acc: 0.7560 - val_loss: 0.6748 - val_acc: 0.7684 Epoch 5/250 7500/7500 [==============================] - 0s 43us/step - loss: 0.6542 - acc: 0.7685 - val_loss: 0.6433 - val_acc: 0.7756 Epoch 6/250 7500/7500 [==============================] - 0s 43us/step - loss: 0.6247 - acc: 0.7781 - val_loss: 0.6160 - val_acc: 0.7888 Epoch 7/250 7500/7500 [==============================] - 0s 40us/step - loss: 0.5984 - acc: 0.7867 - val_loss: 0.5913 - val_acc: 0.7976 Epoch 8/250 7500/7500 [==============================] - 0s 42us/step - loss: 0.5746 - acc: 0.7941 - val_loss: 0.5682 - val_acc: 0.8052 Epoch 9/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.5524 - acc: 0.8041 - val_loss: 0.5472 - val_acc: 0.8124 Epoch 10/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.5319 - acc: 0.8137 - val_loss: 0.5278 - val_acc: 0.8172 Epoch 11/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.5127 - acc: 0.8213 - val_loss: 0.5102 - val_acc: 0.8220 Epoch 12/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.4954 - acc: 0.8299 - val_loss: 0.4939 - val_acc: 0.8288 Epoch 13/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.4801 - acc: 0.8332 - val_loss: 0.4791 - val_acc: 0.8340 Epoch 14/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.4662 - acc: 0.8407 - val_loss: 0.4667 - val_acc: 0.8412 Epoch 15/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.4540 - acc: 0.8429 - val_loss: 0.4548 - val_acc: 0.8432 Epoch 16/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.4430 - acc: 0.8452 - val_loss: 0.4468 - val_acc: 0.8468 Epoch 17/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.4334 - acc: 0.8504 - val_loss: 0.4361 - val_acc: 0.8476 Epoch 18/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.4245 - acc: 0.8543 - val_loss: 0.4274 - val_acc: 0.8524 Epoch 19/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.4164 - acc: 0.8561 - val_loss: 0.4205 - val_acc: 0.8548 Epoch 20/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.4089 - acc: 0.8616 - val_loss: 0.4132 - val_acc: 0.8540 Epoch 21/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.4023 - acc: 0.8628 - val_loss: 0.4071 - val_acc: 0.8580 Epoch 22/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.3960 - acc: 0.8659 - val_loss: 0.4024 - val_acc: 0.8584 Epoch 23/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3903 - acc: 0.8700 - val_loss: 0.3968 - val_acc: 0.8616 Epoch 24/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.3848 - acc: 0.8709 - val_loss: 0.3926 - val_acc: 0.8612 Epoch 25/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3799 - acc: 0.8757 - val_loss: 0.3882 - val_acc: 0.8644 Epoch 26/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3755 - acc: 0.8755 - val_loss: 0.3839 - val_acc: 0.8680 Epoch 27/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3715 - acc: 0.8777 - val_loss: 0.3796 - val_acc: 0.8684 Epoch 28/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3671 - acc: 0.8779 - val_loss: 0.3765 - val_acc: 0.8700 Epoch 29/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3635 - acc: 0.8795 - val_loss: 0.3738 - val_acc: 0.8704 Epoch 30/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3600 - acc: 0.8812 - val_loss: 0.3709 - val_acc: 0.8720 Epoch 31/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3570 - acc: 0.8839 - val_loss: 0.3679 - val_acc: 0.8736 Epoch 32/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3536 - acc: 0.8832 - val_loss: 0.3654 - val_acc: 0.8708 Epoch 33/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3513 - acc: 0.8859 - val_loss: 0.3639 - val_acc: 0.8752 Epoch 34/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.3475 - acc: 0.8872 - val_loss: 0.3609 - val_acc: 0.8712 Epoch 35/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3462 - acc: 0.8869 - val_loss: 0.3566 - val_acc: 0.8768 Epoch 36/250 7500/7500 [==============================] - 0s 36us/step - loss: 0.3437 - acc: 0.8872 - val_loss: 0.3563 - val_acc: 0.8768 Epoch 37/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3416 - acc: 0.8869 - val_loss: 0.3547 - val_acc: 0.8784 Epoch 38/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3392 - acc: 0.8889 - val_loss: 0.3508 - val_acc: 0.8808 Epoch 39/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3373 - acc: 0.8893 - val_loss: 0.3488 - val_acc: 0.8784 Epoch 40/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3354 - acc: 0.8903 - val_loss: 0.3476 - val_acc: 0.8800 Epoch 41/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3335 - acc: 0.8883 - val_loss: 0.3465 - val_acc: 0.8848 Epoch 42/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3315 - acc: 0.8925 - val_loss: 0.3460 - val_acc: 0.8792 Epoch 43/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3300 - acc: 0.8920 - val_loss: 0.3435 - val_acc: 0.8840 Epoch 44/250 7500/7500 [==============================] - 0s 36us/step - loss: 0.3283 - acc: 0.8936 - val_loss: 0.3409 - val_acc: 0.8844 Epoch 45/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3265 - acc: 0.8920 - val_loss: 0.3414 - val_acc: 0.8848 Epoch 46/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.3251 - acc: 0.8937 - val_loss: 0.3387 - val_acc: 0.8876 Epoch 47/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.3239 - acc: 0.8960 - val_loss: 0.3378 - val_acc: 0.8860 Epoch 48/250 7500/7500 [==============================] - 0s 36us/step - loss: 0.3223 - acc: 0.8951 - val_loss: 0.3356 - val_acc: 0.8884 Epoch 49/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.3207 - acc: 0.8956 - val_loss: 0.3343 - val_acc: 0.8900 Epoch 50/250 7500/7500 [==============================] - 0s 51us/step - loss: 0.3194 - acc: 0.8952 - val_loss: 0.3334 - val_acc: 0.8908 Epoch 51/250 7500/7500 [==============================] - 0s 44us/step - loss: 0.3183 - acc: 0.8983 - val_loss: 0.3326 - val_acc: 0.8880 Epoch 52/250 7500/7500 [==============================] - 0s 47us/step - loss: 0.3166 - acc: 0.8985 - val_loss: 0.3318 - val_acc: 0.8900 Epoch 53/250 7500/7500 [==============================] - 0s 47us/step - loss: 0.3156 - acc: 0.8968 - val_loss: 0.3302 - val_acc: 0.8888 Epoch 54/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.3141 - acc: 0.8985 - val_loss: 0.3308 - val_acc: 0.8888 Epoch 55/250 7500/7500 [==============================] - 0s 47us/step - loss: 0.3134 - acc: 0.8987 - val_loss: 0.3292 - val_acc: 0.8900 Epoch 56/250 7500/7500 [==============================] - 0s 44us/step - loss: 0.3119 - acc: 0.9000 - val_loss: 0.3293 - val_acc: 0.8948 Epoch 57/250 7500/7500 [==============================] - 0s 41us/step - loss: 0.3109 - acc: 0.9005 - val_loss: 0.3262 - val_acc: 0.8928 Epoch 58/250 7500/7500 [==============================] - 0s 41us/step - loss: 0.3099 - acc: 0.9013 - val_loss: 0.3278 - val_acc: 0.8912 Epoch 59/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.3092 - acc: 0.9007 - val_loss: 0.3250 - val_acc: 0.8936 Epoch 60/250 7500/7500 [==============================] - 0s 42us/step - loss: 0.3071 - acc: 0.9036 - val_loss: 0.3252 - val_acc: 0.8888 Epoch 61/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.3068 - acc: 0.9011 - val_loss: 0.3248 - val_acc: 0.8968 Epoch 62/250 7500/7500 [==============================] - 0s 40us/step - loss: 0.3059 - acc: 0.9015 - val_loss: 0.3221 - val_acc: 0.8924 Epoch 63/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3049 - acc: 0.9024 - val_loss: 0.3219 - val_acc: 0.8916 Epoch 64/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3040 - acc: 0.9036 - val_loss: 0.3211 - val_acc: 0.8936 Epoch 65/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.3024 - acc: 0.9044 - val_loss: 0.3211 - val_acc: 0.8936 Epoch 66/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.3021 - acc: 0.9040 - val_loss: 0.3194 - val_acc: 0.8972 Epoch 67/250 7500/7500 [==============================] - 0s 43us/step - loss: 0.3009 - acc: 0.9048 - val_loss: 0.3197 - val_acc: 0.8944 Epoch 68/250 7500/7500 [==============================] - 0s 42us/step - loss: 0.3002 - acc: 0.9040 - val_loss: 0.3176 - val_acc: 0.8980 Epoch 69/250 7500/7500 [==============================] - 0s 43us/step - loss: 0.2995 - acc: 0.9061 - val_loss: 0.3164 - val_acc: 0.8972 Epoch 70/250 7500/7500 [==============================] - 0s 40us/step - loss: 0.2984 - acc: 0.9055 - val_loss: 0.3159 - val_acc: 0.8960 Epoch 71/250 7500/7500 [==============================] - 0s 36us/step - loss: 0.2976 - acc: 0.9063 - val_loss: 0.3154 - val_acc: 0.8952 Epoch 72/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2967 - acc: 0.9056 - val_loss: 0.3141 - val_acc: 0.8988 Epoch 73/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2959 - acc: 0.9057 - val_loss: 0.3142 - val_acc: 0.8952 Epoch 74/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2949 - acc: 0.9087 - val_loss: 0.3144 - val_acc: 0.8976 Epoch 75/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2942 - acc: 0.9072 - val_loss: 0.3143 - val_acc: 0.8988 Epoch 76/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2937 - acc: 0.9065 - val_loss: 0.3122 - val_acc: 0.8960 Epoch 77/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2924 - acc: 0.9088 - val_loss: 0.3126 - val_acc: 0.8960 Epoch 78/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2921 - acc: 0.9059 - val_loss: 0.3107 - val_acc: 0.8980 Epoch 79/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2909 - acc: 0.9085 - val_loss: 0.3119 - val_acc: 0.8988 Epoch 80/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2904 - acc: 0.9075 - val_loss: 0.3098 - val_acc: 0.8980 Epoch 81/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2889 - acc: 0.9107 - val_loss: 0.3095 - val_acc: 0.8996 Epoch 82/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2892 - acc: 0.9091 - val_loss: 0.3083 - val_acc: 0.9004 Epoch 83/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2876 - acc: 0.9124 - val_loss: 0.3078 - val_acc: 0.8988 Epoch 84/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2875 - acc: 0.9087 - val_loss: 0.3084 - val_acc: 0.8960 Epoch 85/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2869 - acc: 0.9116 - val_loss: 0.3074 - val_acc: 0.8948 Epoch 86/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2861 - acc: 0.9113 - val_loss: 0.3069 - val_acc: 0.9024 Epoch 87/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2855 - acc: 0.9111 - val_loss: 0.3057 - val_acc: 0.9008 Epoch 88/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2847 - acc: 0.9115 - val_loss: 0.3085 - val_acc: 0.9024 Epoch 89/250 7500/7500 [==============================] - 0s 44us/step - loss: 0.2842 - acc: 0.9108 - val_loss: 0.3051 - val_acc: 0.9020 Epoch 90/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2833 - acc: 0.9100 - val_loss: 0.3055 - val_acc: 0.8984 Epoch 91/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2827 - acc: 0.9115 - val_loss: 0.3037 - val_acc: 0.9004 Epoch 92/250 7500/7500 [==============================] - 0s 47us/step - loss: 0.2819 - acc: 0.9125 - val_loss: 0.3032 - val_acc: 0.9008 Epoch 93/250 7500/7500 [==============================] - 0s 45us/step - loss: 0.2815 - acc: 0.9120 - val_loss: 0.3025 - val_acc: 0.8996 Epoch 94/250 7500/7500 [==============================] - 0s 42us/step - loss: 0.2809 - acc: 0.9161 - val_loss: 0.3025 - val_acc: 0.9028 Epoch 95/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2798 - acc: 0.9129 - val_loss: 0.3025 - val_acc: 0.9036 Epoch 96/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2793 - acc: 0.9121 - val_loss: 0.3029 - val_acc: 0.8988 Epoch 97/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2787 - acc: 0.9153 - val_loss: 0.3011 - val_acc: 0.9036 Epoch 98/250 7500/7500 [==============================] - 0s 41us/step - loss: 0.2780 - acc: 0.9119 - val_loss: 0.3009 - val_acc: 0.9024 Epoch 99/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2777 - acc: 0.9147 - val_loss: 0.2993 - val_acc: 0.8976 Epoch 100/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2769 - acc: 0.9149 - val_loss: 0.3006 - val_acc: 0.9004 Epoch 101/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2767 - acc: 0.9152 - val_loss: 0.3008 - val_acc: 0.9012 Epoch 102/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2756 - acc: 0.9155 - val_loss: 0.2991 - val_acc: 0.9008 Epoch 103/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2751 - acc: 0.9148 - val_loss: 0.3001 - val_acc: 0.8996 Epoch 104/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2746 - acc: 0.9155 - val_loss: 0.2965 - val_acc: 0.9036 Epoch 105/250 7500/7500 [==============================] - 0s 42us/step - loss: 0.2739 - acc: 0.9164 - val_loss: 0.2967 - val_acc: 0.9012 Epoch 106/250 7500/7500 [==============================] - 0s 40us/step - loss: 0.2733 - acc: 0.9151 - val_loss: 0.2974 - val_acc: 0.9056 Epoch 107/250 7500/7500 [==============================] - 0s 45us/step - loss: 0.2729 - acc: 0.9169 - val_loss: 0.2976 - val_acc: 0.9084 Epoch 108/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2723 - acc: 0.9183 - val_loss: 0.2958 - val_acc: 0.9048 Epoch 109/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2720 - acc: 0.9181 - val_loss: 0.2972 - val_acc: 0.9036 Epoch 110/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2710 - acc: 0.9175 - val_loss: 0.2947 - val_acc: 0.9004 Epoch 111/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2705 - acc: 0.9167 - val_loss: 0.2953 - val_acc: 0.9076 Epoch 112/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2705 - acc: 0.9188 - val_loss: 0.2954 - val_acc: 0.9036 Epoch 113/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2695 - acc: 0.9180 - val_loss: 0.2949 - val_acc: 0.9068 Epoch 114/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2691 - acc: 0.9179 - val_loss: 0.2982 - val_acc: 0.9020 Epoch 115/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2683 - acc: 0.9191 - val_loss: 0.2942 - val_acc: 0.9024 Epoch 116/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2683 - acc: 0.9200 - val_loss: 0.2936 - val_acc: 0.9044 Epoch 117/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2678 - acc: 0.9177 - val_loss: 0.2936 - val_acc: 0.9028 Epoch 118/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2669 - acc: 0.9191 - val_loss: 0.2935 - val_acc: 0.9040 Epoch 119/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2670 - acc: 0.9197 - val_loss: 0.2929 - val_acc: 0.9056 Epoch 120/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2662 - acc: 0.9197 - val_loss: 0.2928 - val_acc: 0.9072 Epoch 121/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2666 - acc: 0.9197 - val_loss: 0.2913 - val_acc: 0.9076 Epoch 122/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2660 - acc: 0.9199 - val_loss: 0.2915 - val_acc: 0.9040 Epoch 123/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2650 - acc: 0.9219 - val_loss: 0.2975 - val_acc: 0.9052 Epoch 124/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2650 - acc: 0.9191 - val_loss: 0.2935 - val_acc: 0.9076 Epoch 125/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2644 - acc: 0.9199 - val_loss: 0.2931 - val_acc: 0.9068 Epoch 126/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2638 - acc: 0.9187 - val_loss: 0.2909 - val_acc: 0.9048 Epoch 127/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2634 - acc: 0.9197 - val_loss: 0.2899 - val_acc: 0.9048 Epoch 128/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2631 - acc: 0.9211 - val_loss: 0.2910 - val_acc: 0.9092 Epoch 129/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2625 - acc: 0.9199 - val_loss: 0.2905 - val_acc: 0.9072 Epoch 130/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2629 - acc: 0.9200 - val_loss: 0.2897 - val_acc: 0.9072 Epoch 131/250 7500/7500 [==============================] - 0s 36us/step - loss: 0.2619 - acc: 0.9223 - val_loss: 0.2900 - val_acc: 0.9056 Epoch 132/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2611 - acc: 0.9213 - val_loss: 0.2903 - val_acc: 0.9088 Epoch 133/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2608 - acc: 0.9227 - val_loss: 0.2910 - val_acc: 0.9048 Epoch 134/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2606 - acc: 0.9224 - val_loss: 0.2894 - val_acc: 0.9044 Epoch 135/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2605 - acc: 0.9209 - val_loss: 0.2880 - val_acc: 0.9092 Epoch 136/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2600 - acc: 0.9213 - val_loss: 0.2881 - val_acc: 0.9056 Epoch 137/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2599 - acc: 0.9236 - val_loss: 0.2881 - val_acc: 0.9076 Epoch 138/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2596 - acc: 0.9236 - val_loss: 0.2876 - val_acc: 0.9080 Epoch 139/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2590 - acc: 0.9211 - val_loss: 0.2878 - val_acc: 0.9112 Epoch 140/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2582 - acc: 0.9252 - val_loss: 0.2908 - val_acc: 0.9088 Epoch 141/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2581 - acc: 0.9219 - val_loss: 0.2923 - val_acc: 0.9048 Epoch 142/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2580 - acc: 0.9240 - val_loss: 0.2874 - val_acc: 0.9120 Epoch 143/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2581 - acc: 0.9213 - val_loss: 0.2891 - val_acc: 0.9120 Epoch 144/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2575 - acc: 0.9239 - val_loss: 0.2922 - val_acc: 0.9072 Epoch 145/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2573 - acc: 0.9229 - val_loss: 0.2915 - val_acc: 0.9052 Epoch 146/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2562 - acc: 0.9241 - val_loss: 0.2893 - val_acc: 0.9064 Epoch 147/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2562 - acc: 0.9248 - val_loss: 0.2866 - val_acc: 0.9100 Epoch 148/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2551 - acc: 0.9241 - val_loss: 0.2899 - val_acc: 0.9036 Epoch 149/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2555 - acc: 0.9228 - val_loss: 0.2884 - val_acc: 0.9100 Epoch 150/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2555 - acc: 0.9245 - val_loss: 0.2881 - val_acc: 0.9072 Epoch 151/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2553 - acc: 0.9244 - val_loss: 0.2864 - val_acc: 0.9128 Epoch 152/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2551 - acc: 0.9235 - val_loss: 0.2840 - val_acc: 0.9140 Epoch 153/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2543 - acc: 0.9240 - val_loss: 0.2855 - val_acc: 0.9124 Epoch 154/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2548 - acc: 0.9249 - val_loss: 0.2857 - val_acc: 0.9092 Epoch 155/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2533 - acc: 0.9236 - val_loss: 0.2868 - val_acc: 0.9104 Epoch 156/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2533 - acc: 0.9245 - val_loss: 0.2859 - val_acc: 0.9124 Epoch 157/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2537 - acc: 0.9255 - val_loss: 0.2853 - val_acc: 0.9092 Epoch 158/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2527 - acc: 0.9231 - val_loss: 0.2874 - val_acc: 0.9068 Epoch 159/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2526 - acc: 0.9233 - val_loss: 0.2860 - val_acc: 0.9124 Epoch 160/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2526 - acc: 0.9255 - val_loss: 0.2854 - val_acc: 0.9092 Epoch 161/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2517 - acc: 0.9255 - val_loss: 0.2853 - val_acc: 0.9124 Epoch 162/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2519 - acc: 0.9249 - val_loss: 0.2844 - val_acc: 0.9140 Epoch 163/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2526 - acc: 0.9272 - val_loss: 0.2854 - val_acc: 0.9144 Epoch 164/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2521 - acc: 0.9261 - val_loss: 0.2840 - val_acc: 0.9088 Epoch 165/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2508 - acc: 0.9252 - val_loss: 0.2901 - val_acc: 0.9084 Epoch 166/250 7500/7500 [==============================] - 0s 40us/step - loss: 0.2504 - acc: 0.9273 - val_loss: 0.2858 - val_acc: 0.9080 Epoch 167/250 7500/7500 [==============================] - 0s 47us/step - loss: 0.2508 - acc: 0.9244 - val_loss: 0.2847 - val_acc: 0.9148 Epoch 168/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2500 - acc: 0.9275 - val_loss: 0.2836 - val_acc: 0.9132 Epoch 169/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2491 - acc: 0.9263 - val_loss: 0.2836 - val_acc: 0.9108 Epoch 170/250 7500/7500 [==============================] - 0s 41us/step - loss: 0.2500 - acc: 0.9265 - val_loss: 0.2842 - val_acc: 0.9136 Epoch 171/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2492 - acc: 0.9263 - val_loss: 0.2840 - val_acc: 0.9112 Epoch 172/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2497 - acc: 0.9244 - val_loss: 0.2865 - val_acc: 0.9080 Epoch 173/250 7500/7500 [==============================] - 0s 40us/step - loss: 0.2489 - acc: 0.9263 - val_loss: 0.2951 - val_acc: 0.9100 Epoch 174/250 7500/7500 [==============================] - 0s 43us/step - loss: 0.2487 - acc: 0.9264 - val_loss: 0.2853 - val_acc: 0.9040 Epoch 175/250 7500/7500 [==============================] - 0s 41us/step - loss: 0.2490 - acc: 0.9261 - val_loss: 0.2856 - val_acc: 0.9084 Epoch 176/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2482 - acc: 0.9272 - val_loss: 0.2904 - val_acc: 0.9072 Epoch 177/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2473 - acc: 0.9277 - val_loss: 0.2848 - val_acc: 0.9116 Epoch 178/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2481 - acc: 0.9275 - val_loss: 0.2874 - val_acc: 0.9108 Epoch 179/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2473 - acc: 0.9252 - val_loss: 0.2869 - val_acc: 0.9096 Epoch 180/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2476 - acc: 0.9268 - val_loss: 0.2855 - val_acc: 0.9092 Epoch 181/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2476 - acc: 0.9277 - val_loss: 0.2835 - val_acc: 0.9116 Epoch 182/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2471 - acc: 0.9272 - val_loss: 0.2857 - val_acc: 0.9104 Epoch 183/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2466 - acc: 0.9253 - val_loss: 0.2828 - val_acc: 0.9152 Epoch 184/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2465 - acc: 0.9261 - val_loss: 0.2827 - val_acc: 0.9164 Epoch 185/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2458 - acc: 0.9267 - val_loss: 0.2836 - val_acc: 0.9120 Epoch 186/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2448 - acc: 0.9263 - val_loss: 0.2829 - val_acc: 0.9100 Epoch 187/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2457 - acc: 0.9289 - val_loss: 0.2845 - val_acc: 0.9144 Epoch 188/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2460 - acc: 0.9284 - val_loss: 0.2917 - val_acc: 0.9080 Epoch 189/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2454 - acc: 0.9275 - val_loss: 0.2833 - val_acc: 0.9112 Epoch 190/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2460 - acc: 0.9285 - val_loss: 0.2836 - val_acc: 0.9136 Epoch 191/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2458 - acc: 0.9281 - val_loss: 0.2846 - val_acc: 0.9156 Epoch 192/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2451 - acc: 0.9279 - val_loss: 0.2841 - val_acc: 0.9108 Epoch 193/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2447 - acc: 0.9307 - val_loss: 0.2821 - val_acc: 0.9096 Epoch 194/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2435 - acc: 0.9271 - val_loss: 0.2893 - val_acc: 0.9124 Epoch 195/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2442 - acc: 0.9289 - val_loss: 0.2884 - val_acc: 0.9112 Epoch 196/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2438 - acc: 0.9268 - val_loss: 0.2886 - val_acc: 0.9092 Epoch 197/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2444 - acc: 0.9268 - val_loss: 0.2879 - val_acc: 0.9104 Epoch 198/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2441 - acc: 0.9300 - val_loss: 0.2814 - val_acc: 0.9156 Epoch 199/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2433 - acc: 0.9275 - val_loss: 0.2822 - val_acc: 0.9148 Epoch 200/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2436 - acc: 0.9285 - val_loss: 0.2840 - val_acc: 0.9104 Epoch 201/250 7500/7500 [==============================] - 0s 42us/step - loss: 0.2428 - acc: 0.9300 - val_loss: 0.2868 - val_acc: 0.9084 Epoch 202/250 7500/7500 [==============================] - 0s 42us/step - loss: 0.2431 - acc: 0.9287 - val_loss: 0.2838 - val_acc: 0.9108 Epoch 203/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2436 - acc: 0.9284 - val_loss: 0.2843 - val_acc: 0.9120 Epoch 204/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2426 - acc: 0.9309 - val_loss: 0.2828 - val_acc: 0.9116 Epoch 205/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2422 - acc: 0.9289 - val_loss: 0.2851 - val_acc: 0.9092 Epoch 206/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2422 - acc: 0.9301 - val_loss: 0.2871 - val_acc: 0.9120 Epoch 207/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2414 - acc: 0.9288 - val_loss: 0.2889 - val_acc: 0.9076 Epoch 208/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2423 - acc: 0.9320 - val_loss: 0.2853 - val_acc: 0.9132 Epoch 209/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2416 - acc: 0.9304 - val_loss: 0.2838 - val_acc: 0.9112 Epoch 210/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2418 - acc: 0.9291 - val_loss: 0.2811 - val_acc: 0.9172 Epoch 211/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2412 - acc: 0.9289 - val_loss: 0.2854 - val_acc: 0.9116 Epoch 212/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2406 - acc: 0.9299 - val_loss: 0.2847 - val_acc: 0.9144 Epoch 213/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2419 - acc: 0.9297 - val_loss: 0.2828 - val_acc: 0.9096 Epoch 214/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2407 - acc: 0.9308 - val_loss: 0.2874 - val_acc: 0.9120 Epoch 215/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2406 - acc: 0.9307 - val_loss: 0.2814 - val_acc: 0.9148 Epoch 216/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2408 - acc: 0.9296 - val_loss: 0.2855 - val_acc: 0.9076 Epoch 217/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2395 - acc: 0.9317 - val_loss: 0.2877 - val_acc: 0.9040 Epoch 218/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2406 - acc: 0.9315 - val_loss: 0.2862 - val_acc: 0.9136 Epoch 219/250 7500/7500 [==============================] - 0s 44us/step - loss: 0.2398 - acc: 0.9312 - val_loss: 0.2821 - val_acc: 0.9128 Epoch 220/250 7500/7500 [==============================] - 0s 42us/step - loss: 0.2396 - acc: 0.9297 - val_loss: 0.2839 - val_acc: 0.9132 Epoch 221/250 7500/7500 [==============================] - 0s 41us/step - loss: 0.2399 - acc: 0.9304 - val_loss: 0.2839 - val_acc: 0.9152 Epoch 222/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2388 - acc: 0.9280 - val_loss: 0.2861 - val_acc: 0.9120 Epoch 223/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2396 - acc: 0.9307 - val_loss: 0.2834 - val_acc: 0.9120 Epoch 224/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2390 - acc: 0.9297 - val_loss: 0.2849 - val_acc: 0.9136 Epoch 225/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2392 - acc: 0.9305 - val_loss: 0.2834 - val_acc: 0.9084 Epoch 226/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2392 - acc: 0.9307 - val_loss: 0.2888 - val_acc: 0.9056 Epoch 227/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2387 - acc: 0.9307 - val_loss: 0.2851 - val_acc: 0.9072 Epoch 228/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2382 - acc: 0.9303 - val_loss: 0.2852 - val_acc: 0.9124 Epoch 229/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2389 - acc: 0.9289 - val_loss: 0.2853 - val_acc: 0.9144 Epoch 230/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2379 - acc: 0.9311 - val_loss: 0.2837 - val_acc: 0.9100 Epoch 231/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2376 - acc: 0.9325 - val_loss: 0.2870 - val_acc: 0.9080 Epoch 232/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2382 - acc: 0.9337 - val_loss: 0.2836 - val_acc: 0.9104 Epoch 233/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2383 - acc: 0.9309 - val_loss: 0.2929 - val_acc: 0.9148 Epoch 234/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2375 - acc: 0.9307 - val_loss: 0.2869 - val_acc: 0.9076 Epoch 235/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2373 - acc: 0.9311 - val_loss: 0.2855 - val_acc: 0.9128 Epoch 236/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2368 - acc: 0.9308 - val_loss: 0.2879 - val_acc: 0.9084 Epoch 237/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2380 - acc: 0.9312 - val_loss: 0.2890 - val_acc: 0.9088 Epoch 238/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2385 - acc: 0.9321 - val_loss: 0.2836 - val_acc: 0.9092 Epoch 239/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2362 - acc: 0.9316 - val_loss: 0.2846 - val_acc: 0.9120 Epoch 240/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2351 - acc: 0.9325 - val_loss: 0.2908 - val_acc: 0.9060 Epoch 241/250 7500/7500 [==============================] - 0s 38us/step - loss: 0.2374 - acc: 0.9327 - val_loss: 0.2882 - val_acc: 0.9120 Epoch 242/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2377 - acc: 0.9317 - val_loss: 0.2855 - val_acc: 0.9124 Epoch 243/250 7500/7500 [==============================] - 0s 37us/step - loss: 0.2355 - acc: 0.9323 - val_loss: 0.2919 - val_acc: 0.9092 Epoch 244/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2354 - acc: 0.9319 - val_loss: 0.2906 - val_acc: 0.9092 Epoch 245/250 7500/7500 [==============================] - 0s 42us/step - loss: 0.2354 - acc: 0.9372 - val_loss: 0.2880 - val_acc: 0.9092 Epoch 246/250 7500/7500 [==============================] - 0s 41us/step - loss: 0.2363 - acc: 0.9313 - val_loss: 0.2854 - val_acc: 0.9060 Epoch 247/250 7500/7500 [==============================] - 0s 40us/step - loss: 0.2357 - acc: 0.9317 - val_loss: 0.2890 - val_acc: 0.9084 Epoch 248/250 7500/7500 [==============================] - 0s 55us/step - loss: 0.2354 - acc: 0.9339 - val_loss: 0.2839 - val_acc: 0.9096 Epoch 249/250 7500/7500 [==============================] - 0s 41us/step - loss: 0.2354 - acc: 0.9337 - val_loss: 0.2948 - val_acc: 0.9096 Epoch 250/250 7500/7500 [==============================] - 0s 39us/step - loss: 0.2356 - acc: 0.9343 - val_loss: 0.2876 - val_acc: 0.9124 10000/10000 [==============================] - 0s 11us/step ['loss', 'acc'] [0.24436279261112212, 0.9289000034332275] acc: 92.89% Confusion Matrix: [[4593 413] [ 298 4696]] Backend: tensorflow Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_1 (Dense) (None, 38) 798 _________________________________________________________________ dense_2 (Dense) (None, 25) 975 _________________________________________________________________ dense_3 (Dense) (None, 1) 26 ================================================================= Total params: 1,799 Trainable params: 1,799 Non-trainable params: 0 _________________________________________________________________ None