# ----------------------------------------------------------------------------
## How to add a Weight Regularization (l2) to a Deep Learning Model in Keras
# ----------------------------------------------------------------------------
def Learn_By_Example_303():
print()
print(format('How to add a Weight Regularization (l2) to a Deep Learning Model in Keras','*^82'))
import warnings
warnings.filterwarnings("ignore")
# load libraries
import keras as K
from keras.regularizers import l2
from keras.models import Sequential
from keras.layers import Dense, Dropout
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix
# simulated data
dataset = datasets.make_classification(n_samples=10000, n_features=20, n_informative=5,
n_redundant=2, n_repeated=0, n_classes=2, n_clusters_per_class=2,
weights=None, flip_y=0.01, class_sep=1.0, hypercube=True, shift=0.0,
scale=1.0, shuffle=True, random_state=None)
X = dataset[0]; y = dataset[1]
print(X.shape); print(y.shape)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)
# Define a Deep Learning Model
model = Sequential()
model.add(Dense(30, input_dim=20, kernel_regularizer=l2(0.01), # weight regularizer
activation='relu'))
model.add(Dropout(0.5)) # Dropout Layer
model.add(Dense(18, kernel_regularizer=l2(0.01), # weight regularizer
activation='relu'))
model.add(Dropout(0.5)) # Dropout Layer
model.add(Dense(1, activation='sigmoid'))
# Compile the Model
model.compile(loss='binary_crossentropy', optimizer='adam',
metrics=['acc'])
# Train the Model
model.fit(X_train, y_train, epochs=150, batch_size=25, verbose = 1)
# Evaluate the model
scores = model.evaluate(X_test, y_test)
print(); print(model.metrics_names); print(scores)
print("\n%s: %.2f%%" % (model.metrics_names[1], scores[1]*100))
# Confusion Matrix
y_pred = model.predict(X_test)
y_pred = (y_pred > 0.5)
cm = confusion_matrix(y_test, y_pred); print("\nConfusion Matrix:\n", cm)
# More on the Model
print("\n\nBackend: ", K.backend.backend())
print(model.summary())
Learn_By_Example_303()
****How to add a Weight Regularization (l2) to a Deep Learning Model in Keras*****
Using TensorFlow backend.
(10000, 20) (10000,) Epoch 1/150 6700/6700 [==============================] - 1s 99us/step - loss: 1.0869 - acc: 0.6000 Epoch 2/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.7916 - acc: 0.7134 Epoch 3/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.6277 - acc: 0.7936 Epoch 4/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.5110 - acc: 0.8399 Epoch 5/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.4452 - acc: 0.8679 Epoch 6/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.4032 - acc: 0.8791 Epoch 7/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3780 - acc: 0.8891 Epoch 8/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3713 - acc: 0.8857 Epoch 9/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3538 - acc: 0.8969 Epoch 10/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3477 - acc: 0.8985 Epoch 11/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.3424 - acc: 0.8993 Epoch 12/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.3402 - acc: 0.9001 Epoch 13/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.3273 - acc: 0.9037 Epoch 14/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.3231 - acc: 0.9066 Epoch 15/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.3179 - acc: 0.9021 Epoch 16/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3127 - acc: 0.9124 Epoch 17/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3122 - acc: 0.9110 Epoch 18/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3158 - acc: 0.9076 Epoch 19/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3085 - acc: 0.9104 Epoch 20/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3035 - acc: 0.9130 Epoch 21/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3081 - acc: 0.9085 Epoch 22/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.3016 - acc: 0.9155 Epoch 23/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2966 - acc: 0.9122 Epoch 24/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2943 - acc: 0.9148 Epoch 25/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2889 - acc: 0.9163 Epoch 26/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2976 - acc: 0.9131 Epoch 27/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2971 - acc: 0.9143 Epoch 28/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2883 - acc: 0.9148 Epoch 29/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2835 - acc: 0.9164 Epoch 30/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2883 - acc: 0.9176 Epoch 31/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2937 - acc: 0.9148 Epoch 32/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2844 - acc: 0.9145 Epoch 33/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2821 - acc: 0.9152 Epoch 34/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2822 - acc: 0.9179 Epoch 35/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2809 - acc: 0.9149 Epoch 36/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2902 - acc: 0.9140 Epoch 37/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2866 - acc: 0.9149 Epoch 38/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2827 - acc: 0.9149 Epoch 39/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2870 - acc: 0.9149 Epoch 40/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2809 - acc: 0.9173 Epoch 41/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2797 - acc: 0.9173 Epoch 42/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2825 - acc: 0.9134 Epoch 43/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2785 - acc: 0.9139 Epoch 44/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2800 - acc: 0.9193 Epoch 45/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2747 - acc: 0.9167 Epoch 46/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2782 - acc: 0.9170 Epoch 47/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2799 - acc: 0.9152 Epoch 48/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2757 - acc: 0.9203 Epoch 49/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2725 - acc: 0.9203 Epoch 50/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2778 - acc: 0.9178 Epoch 51/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2682 - acc: 0.9176 Epoch 52/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2695 - acc: 0.9170 Epoch 53/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2735 - acc: 0.9172 Epoch 54/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2762 - acc: 0.9196 Epoch 55/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2692 - acc: 0.9176 Epoch 56/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2721 - acc: 0.9173 Epoch 57/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2689 - acc: 0.9219 Epoch 58/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2652 - acc: 0.9188 Epoch 59/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2664 - acc: 0.9194 Epoch 60/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2666 - acc: 0.9193 Epoch 61/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2670 - acc: 0.9200 Epoch 62/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2734 - acc: 0.9175 Epoch 63/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2706 - acc: 0.9176 Epoch 64/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2749 - acc: 0.9178 Epoch 65/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2674 - acc: 0.9148 Epoch 66/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2688 - acc: 0.9213 Epoch 67/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2728 - acc: 0.9185 Epoch 68/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2729 - acc: 0.9193 Epoch 69/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2756 - acc: 0.9164 Epoch 70/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2688 - acc: 0.9216 Epoch 71/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2707 - acc: 0.9185 Epoch 72/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2702 - acc: 0.9182 Epoch 73/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2649 - acc: 0.9193 Epoch 74/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.2639 - acc: 0.9216 Epoch 75/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.2637 - acc: 0.9158 Epoch 76/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.2694 - acc: 0.9176 Epoch 77/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.2658 - acc: 0.9178 Epoch 78/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.2628 - acc: 0.9193 Epoch 79/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.2692 - acc: 0.9201 Epoch 80/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.2587 - acc: 0.9212 Epoch 81/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2665 - acc: 0.9255 Epoch 82/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2621 - acc: 0.9196 Epoch 83/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2572 - acc: 0.9261 Epoch 84/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2618 - acc: 0.9222 Epoch 85/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2668 - acc: 0.9181 Epoch 86/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2595 - acc: 0.9218 Epoch 87/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2695 - acc: 0.9209 Epoch 88/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2595 - acc: 0.9225 Epoch 89/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2644 - acc: 0.9207 Epoch 90/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2600 - acc: 0.9234 Epoch 91/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2618 - acc: 0.9200 Epoch 92/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2677 - acc: 0.9207 Epoch 93/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2624 - acc: 0.9207 Epoch 94/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2581 - acc: 0.9240 Epoch 95/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2628 - acc: 0.9216 Epoch 96/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2616 - acc: 0.9199 Epoch 97/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2614 - acc: 0.9227 Epoch 98/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2654 - acc: 0.9191 Epoch 99/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2576 - acc: 0.9210 Epoch 100/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2667 - acc: 0.9178 Epoch 101/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2593 - acc: 0.9197 Epoch 102/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2620 - acc: 0.9215 Epoch 103/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2614 - acc: 0.9233 Epoch 104/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2676 - acc: 0.9173 Epoch 105/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2639 - acc: 0.9196 Epoch 106/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2651 - acc: 0.9207 Epoch 107/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2614 - acc: 0.9212 Epoch 108/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2592 - acc: 0.9213 Epoch 109/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2590 - acc: 0.9231 Epoch 110/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2603 - acc: 0.9187 Epoch 111/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2497 - acc: 0.9233 Epoch 112/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2498 - acc: 0.9255 Epoch 113/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2575 - acc: 0.9237 Epoch 114/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2537 - acc: 0.9212 Epoch 115/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2612 - acc: 0.9252 Epoch 116/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2591 - acc: 0.9221 Epoch 117/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2434 - acc: 0.9254 Epoch 118/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2587 - acc: 0.9197 Epoch 119/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2599 - acc: 0.9225 Epoch 120/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2544 - acc: 0.9260 Epoch 121/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2568 - acc: 0.9228 Epoch 122/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2635 - acc: 0.9188 Epoch 123/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2569 - acc: 0.9228 Epoch 124/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2651 - acc: 0.9193 Epoch 125/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2626 - acc: 0.9210 Epoch 126/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2614 - acc: 0.9210 Epoch 127/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2582 - acc: 0.9291 Epoch 128/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2451 - acc: 0.9272 Epoch 129/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2628 - acc: 0.9239 Epoch 130/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2520 - acc: 0.9237 Epoch 131/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2621 - acc: 0.9204 Epoch 132/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2559 - acc: 0.9228 Epoch 133/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2541 - acc: 0.9218 Epoch 134/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2558 - acc: 0.9240 Epoch 135/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2560 - acc: 0.9173 Epoch 136/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2524 - acc: 0.9227 Epoch 137/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2631 - acc: 0.9231 Epoch 138/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2578 - acc: 0.9213 Epoch 139/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2547 - acc: 0.9245 Epoch 140/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2587 - acc: 0.9273 Epoch 141/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2541 - acc: 0.9230 Epoch 142/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2632 - acc: 0.9193 Epoch 143/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2520 - acc: 0.9264 Epoch 144/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2594 - acc: 0.9237 Epoch 145/150 6700/6700 [==============================] - 0s 35us/step - loss: 0.2571 - acc: 0.9221 Epoch 146/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2505 - acc: 0.9254 Epoch 147/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2625 - acc: 0.9236 Epoch 148/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2511 - acc: 0.9260 Epoch 149/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2468 - acc: 0.9206 Epoch 150/150 6700/6700 [==============================] - 0s 36us/step - loss: 0.2603 - acc: 0.9234 3300/3300 [==============================] - 0s 30us/step ['loss', 'acc'] [0.17403982801870865, 0.9575757384300232] acc: 95.76% Confusion Matrix: [[1630 61] [ 79 1530]] Backend: tensorflow Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_1 (Dense) (None, 30) 630 _________________________________________________________________ dropout_1 (Dropout) (None, 30) 0 _________________________________________________________________ dense_2 (Dense) (None, 18) 558 _________________________________________________________________ dropout_2 (Dropout) (None, 18) 0 _________________________________________________________________ dense_3 (Dense) (None, 1) 19 ================================================================= Total params: 1,207 Trainable params: 1,207 Non-trainable params: 0 _________________________________________________________________ None