# ----------------------------------------------------------------------------
## How to use l1_l2 regularization to a Deep Learning Model in Keras
# ----------------------------------------------------------------------------
def Learn_By_Example_304():
print()
print(format('How to use l1_l2 regularization to a Deep Learning Model in Keras','*^82'))
import warnings
warnings.filterwarnings("ignore")
# load libraries
import keras as K
from keras.regularizers import l1_l2
from keras.models import Sequential
from keras.layers import Dense, Dropout
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix
# simulated data
dataset = datasets.make_classification(n_samples=10000, n_features=20, n_informative=5,
n_redundant=2, n_repeated=0, n_classes=2, n_clusters_per_class=2,
weights=None, flip_y=0.01, class_sep=1.0, hypercube=True, shift=0.0,
scale=1.0, shuffle=True, random_state=None)
X = dataset[0]; y = dataset[1]
print(X.shape); print(y.shape)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)
# Define a Deep Learning Model
model = Sequential()
model.add(Dense(45, input_dim=20, kernel_regularizer=l1_l2(0.001), # weight regularizer
activation='relu'))
model.add(Dropout(0.5)) # Dropout Layer
model.add(Dense(22, kernel_regularizer=l1_l2(0.01), # weight regularizer
activation='relu'))
model.add(Dropout(0.5)) # Dropout Layer
model.add(Dense(1, activation='sigmoid'))
# Compile the Model
model.compile(loss='binary_crossentropy', optimizer='adam',
metrics=['acc','mae'])
# Train the Model
model.fit(X_train, y_train, epochs=150, batch_size=25, verbose = 1)
# Evaluate the model
scores = model.evaluate(X_test, y_test)
print(); print(model.metrics_names); print(scores)
print("\n%s: %.2f%%" % (model.metrics_names[1], scores[1]*100))
# Confusion Matrix
y_pred = model.predict(X_test)
y_pred = (y_pred > 0.5)
cm = confusion_matrix(y_test, y_pred); print("\nConfusion Matrix:\n", cm)
# More on the Model
print("\n\nBackend: ", K.backend.backend())
print(model.summary())
Learn_By_Example_304()
********How to use l1_l2 regularization to a Deep Learning Model in Keras*********
Using TensorFlow backend.
(10000, 20) (10000,) Epoch 1/150 6700/6700 [==============================] - 0s 73us/step - loss: 2.0405 - acc: 0.6043 - mae: 0.4508 Epoch 2/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.9641 - acc: 0.7260 - mae: 0.3974 Epoch 3/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.6997 - acc: 0.7700 - mae: 0.3577 Epoch 4/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.6063 - acc: 0.7922 - mae: 0.3268 Epoch 5/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.5665 - acc: 0.8134 - mae: 0.3055 Epoch 6/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.5518 - acc: 0.8170 - mae: 0.3000 Epoch 7/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.5315 - acc: 0.8270 - mae: 0.2880 Epoch 8/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.5236 - acc: 0.8297 - mae: 0.2828 Epoch 9/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.5239 - acc: 0.8258 - mae: 0.2815 Epoch 10/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.5136 - acc: 0.8340 - mae: 0.2753 Epoch 11/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.5081 - acc: 0.8313 - mae: 0.2734 Epoch 12/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.5105 - acc: 0.8306 - mae: 0.2742 Epoch 13/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4976 - acc: 0.8337 - mae: 0.2696 Epoch 14/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4991 - acc: 0.8324 - mae: 0.2670 Epoch 15/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4917 - acc: 0.8352 - mae: 0.2639 Epoch 16/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4903 - acc: 0.8334 - mae: 0.2626 Epoch 17/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4842 - acc: 0.8366 - mae: 0.2606 Epoch 18/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4783 - acc: 0.8442 - mae: 0.2552 Epoch 19/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4760 - acc: 0.8461 - mae: 0.2547 Epoch 20/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4762 - acc: 0.8425 - mae: 0.2556 Epoch 21/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4715 - acc: 0.8458 - mae: 0.2513 Epoch 22/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4700 - acc: 0.8416 - mae: 0.2539 Epoch 23/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4680 - acc: 0.8472 - mae: 0.2494 Epoch 24/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4663 - acc: 0.8422 - mae: 0.2515 Epoch 25/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4616 - acc: 0.8451 - mae: 0.2473 Epoch 26/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4582 - acc: 0.8497 - mae: 0.2458 Epoch 27/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4635 - acc: 0.8490 - mae: 0.2478 Epoch 28/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4590 - acc: 0.8470 - mae: 0.2458 Epoch 29/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4531 - acc: 0.8497 - mae: 0.2426 Epoch 30/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4517 - acc: 0.8443 - mae: 0.2435 Epoch 31/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4488 - acc: 0.8482 - mae: 0.2397 Epoch 32/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4496 - acc: 0.8524 - mae: 0.2414 Epoch 33/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4452 - acc: 0.8490 - mae: 0.2386 Epoch 34/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4536 - acc: 0.8493 - mae: 0.2432 Epoch 35/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4491 - acc: 0.8564 - mae: 0.2405 Epoch 36/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4460 - acc: 0.8579 - mae: 0.2375 Epoch 37/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4332 - acc: 0.8633 - mae: 0.2297 Epoch 38/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4370 - acc: 0.8590 - mae: 0.2308 Epoch 39/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4331 - acc: 0.8618 - mae: 0.2326 Epoch 40/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4307 - acc: 0.8597 - mae: 0.2284 Epoch 41/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4390 - acc: 0.8622 - mae: 0.2330 Epoch 42/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4337 - acc: 0.8676 - mae: 0.2259 Epoch 43/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4354 - acc: 0.8636 - mae: 0.2299 Epoch 44/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4300 - acc: 0.8633 - mae: 0.2235 Epoch 45/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4322 - acc: 0.8601 - mae: 0.2283 Epoch 46/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4331 - acc: 0.8607 - mae: 0.2264 Epoch 47/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4267 - acc: 0.8678 - mae: 0.2237 Epoch 48/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4197 - acc: 0.8712 - mae: 0.2164 Epoch 49/150 6700/6700 [==============================] - 0s 41us/step - loss: 0.4228 - acc: 0.8684 - mae: 0.2189 Epoch 50/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4213 - acc: 0.8733 - mae: 0.2196 Epoch 51/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4250 - acc: 0.8728 - mae: 0.2208 Epoch 52/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4177 - acc: 0.8652 - mae: 0.2181 Epoch 53/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4216 - acc: 0.8693 - mae: 0.2191 Epoch 54/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4225 - acc: 0.8669 - mae: 0.2216 Epoch 55/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4158 - acc: 0.8709 - mae: 0.2174 Epoch 56/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4185 - acc: 0.8676 - mae: 0.2157 Epoch 57/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4185 - acc: 0.8691 - mae: 0.2183 Epoch 58/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4161 - acc: 0.8709 - mae: 0.2154 Epoch 59/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4194 - acc: 0.8704 - mae: 0.2187 Epoch 60/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4186 - acc: 0.8699 - mae: 0.2175 Epoch 61/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4197 - acc: 0.8699 - mae: 0.2184 Epoch 62/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4135 - acc: 0.8664 - mae: 0.2182 Epoch 63/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4176 - acc: 0.8664 - mae: 0.2169 Epoch 64/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4198 - acc: 0.8684 - mae: 0.2189 Epoch 65/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4136 - acc: 0.8716 - mae: 0.2144 Epoch 66/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4083 - acc: 0.8739 - mae: 0.2112 Epoch 67/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4232 - acc: 0.8719 - mae: 0.2209 Epoch 68/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4101 - acc: 0.8704 - mae: 0.2134 Epoch 69/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4075 - acc: 0.8722 - mae: 0.2118 Epoch 70/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4177 - acc: 0.8742 - mae: 0.2176 Epoch 71/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4120 - acc: 0.8751 - mae: 0.2129 Epoch 72/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4125 - acc: 0.8715 - mae: 0.2151 Epoch 73/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4157 - acc: 0.8728 - mae: 0.2150 Epoch 74/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4042 - acc: 0.8728 - mae: 0.2101 Epoch 75/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4086 - acc: 0.8770 - mae: 0.2101 Epoch 76/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4058 - acc: 0.8770 - mae: 0.2104 Epoch 77/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4082 - acc: 0.8742 - mae: 0.2114 Epoch 78/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4072 - acc: 0.8733 - mae: 0.2119 Epoch 79/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4073 - acc: 0.8785 - mae: 0.2102 Epoch 80/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4049 - acc: 0.8778 - mae: 0.2073 Epoch 81/150 6700/6700 [==============================] - 0s 41us/step - loss: 0.4083 - acc: 0.8734 - mae: 0.2126 Epoch 82/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4132 - acc: 0.8733 - mae: 0.2148 Epoch 83/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4087 - acc: 0.8760 - mae: 0.2130 Epoch 84/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4118 - acc: 0.8734 - mae: 0.2130 Epoch 85/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4049 - acc: 0.8733 - mae: 0.2114 Epoch 86/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4034 - acc: 0.8730 - mae: 0.2097 Epoch 87/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4107 - acc: 0.8743 - mae: 0.2136 Epoch 88/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4043 - acc: 0.8770 - mae: 0.2093 Epoch 89/150 6700/6700 [==============================] - 0s 42us/step - loss: 0.4089 - acc: 0.8757 - mae: 0.2109 Epoch 90/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4017 - acc: 0.8760 - mae: 0.2071 Epoch 91/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4079 - acc: 0.8758 - mae: 0.2113 Epoch 92/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4041 - acc: 0.8746 - mae: 0.2077 Epoch 93/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4000 - acc: 0.8754 - mae: 0.2085 Epoch 94/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4053 - acc: 0.8713 - mae: 0.2095 Epoch 95/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.3989 - acc: 0.8776 - mae: 0.2051 Epoch 96/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.4046 - acc: 0.8718 - mae: 0.2122 Epoch 97/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4109 - acc: 0.8743 - mae: 0.2150 Epoch 98/150 6700/6700 [==============================] - 0s 41us/step - loss: 0.3984 - acc: 0.8766 - mae: 0.2068 Epoch 99/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3918 - acc: 0.8775 - mae: 0.2011 Epoch 100/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4088 - acc: 0.8755 - mae: 0.2119 Epoch 101/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.3945 - acc: 0.8763 - mae: 0.2056 Epoch 102/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4015 - acc: 0.8793 - mae: 0.2080 Epoch 103/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.3940 - acc: 0.8787 - mae: 0.2027 Epoch 104/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3975 - acc: 0.8736 - mae: 0.2065 Epoch 105/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3978 - acc: 0.8787 - mae: 0.2056 Epoch 106/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.3995 - acc: 0.8754 - mae: 0.2085 Epoch 107/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3927 - acc: 0.8782 - mae: 0.2031 Epoch 108/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.4044 - acc: 0.8746 - mae: 0.2111 Epoch 109/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.4008 - acc: 0.8760 - mae: 0.2059 Epoch 110/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.3948 - acc: 0.8790 - mae: 0.2041 Epoch 111/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3918 - acc: 0.8748 - mae: 0.2037 Epoch 112/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4029 - acc: 0.8752 - mae: 0.2088 Epoch 113/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.3931 - acc: 0.8757 - mae: 0.2036 Epoch 114/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3972 - acc: 0.8739 - mae: 0.2066 Epoch 115/150 6700/6700 [==============================] - 0s 37us/step - loss: 0.3957 - acc: 0.8763 - mae: 0.2054 Epoch 116/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.3888 - acc: 0.8785 - mae: 0.1989 Epoch 117/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.4010 - acc: 0.8746 - mae: 0.2079 Epoch 118/150 6700/6700 [==============================] - 0s 43us/step - loss: 0.3906 - acc: 0.8752 - mae: 0.2027 Epoch 119/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3967 - acc: 0.8794 - mae: 0.2054 Epoch 120/150 6700/6700 [==============================] - 0s 44us/step - loss: 0.3948 - acc: 0.8739 - mae: 0.2040 Epoch 121/150 6700/6700 [==============================] - 0s 41us/step - loss: 0.3952 - acc: 0.8776 - mae: 0.2061 Epoch 122/150 6700/6700 [==============================] - 0s 44us/step - loss: 0.3915 - acc: 0.8734 - mae: 0.2044 Epoch 123/150 6700/6700 [==============================] - 0s 42us/step - loss: 0.3933 - acc: 0.8813 - mae: 0.2023 Epoch 124/150 6700/6700 [==============================] - 0s 50us/step - loss: 0.3947 - acc: 0.8772 - mae: 0.2040 Epoch 125/150 6700/6700 [==============================] - 0s 44us/step - loss: 0.3905 - acc: 0.8767 - mae: 0.2025 Epoch 126/150 6700/6700 [==============================] - 0s 51us/step - loss: 0.3843 - acc: 0.8796 - mae: 0.1995 Epoch 127/150 6700/6700 [==============================] - 0s 69us/step - loss: 0.3924 - acc: 0.8715 - mae: 0.2038 Epoch 128/150 6700/6700 [==============================] - 0s 53us/step - loss: 0.3932 - acc: 0.8751 - mae: 0.2036 Epoch 129/150 6700/6700 [==============================] - 0s 56us/step - loss: 0.3950 - acc: 0.8770 - mae: 0.2051 Epoch 130/150 6700/6700 [==============================] - 0s 47us/step - loss: 0.3995 - acc: 0.8694 - mae: 0.2085 Epoch 131/150 6700/6700 [==============================] - 0s 42us/step - loss: 0.3904 - acc: 0.8816 - mae: 0.2007 Epoch 132/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3955 - acc: 0.8763 - mae: 0.2063 Epoch 133/150 6700/6700 [==============================] - 0s 41us/step - loss: 0.3875 - acc: 0.8761 - mae: 0.2003 Epoch 134/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3899 - acc: 0.8767 - mae: 0.2017 Epoch 135/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3844 - acc: 0.8737 - mae: 0.1999 Epoch 136/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.3962 - acc: 0.8767 - mae: 0.2043 Epoch 137/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3907 - acc: 0.8827 - mae: 0.2015 Epoch 138/150 6700/6700 [==============================] - 0s 41us/step - loss: 0.3946 - acc: 0.8734 - mae: 0.2060 Epoch 139/150 6700/6700 [==============================] - 0s 39us/step - loss: 0.3882 - acc: 0.8743 - mae: 0.2007 Epoch 140/150 6700/6700 [==============================] - 0s 38us/step - loss: 0.3918 - acc: 0.8766 - mae: 0.2033 Epoch 141/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.3916 - acc: 0.8773 - mae: 0.2048 Epoch 142/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.3866 - acc: 0.8782 - mae: 0.1995 Epoch 143/150 6700/6700 [==============================] - 0s 41us/step - loss: 0.3961 - acc: 0.8772 - mae: 0.2041 Epoch 144/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.3944 - acc: 0.8767 - mae: 0.2056 Epoch 145/150 6700/6700 [==============================] - 0s 55us/step - loss: 0.3909 - acc: 0.8776 - mae: 0.2022 Epoch 146/150 6700/6700 [==============================] - 0s 44us/step - loss: 0.3912 - acc: 0.8740 - mae: 0.2039 Epoch 147/150 6700/6700 [==============================] - 0s 40us/step - loss: 0.3904 - acc: 0.8740 - mae: 0.2040 Epoch 148/150 6700/6700 [==============================] - 0s 41us/step - loss: 0.3938 - acc: 0.8770 - mae: 0.2049 Epoch 149/150 6700/6700 [==============================] - 0s 46us/step - loss: 0.3907 - acc: 0.8748 - mae: 0.2054 Epoch 150/150 6700/6700 [==============================] - 0s 47us/step - loss: 0.3926 - acc: 0.8736 - mae: 0.2043 3300/3300 [==============================] - 0s 30us/step ['loss', 'acc', 'mae'] [0.30954577937270655, 0.9096969962120056, 0.16899099946022034] acc: 90.97% Confusion Matrix: [[1526 174] [ 124 1476]] Backend: tensorflow Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_1 (Dense) (None, 45) 945 _________________________________________________________________ dropout_1 (Dropout) (None, 45) 0 _________________________________________________________________ dense_2 (Dense) (None, 22) 1012 _________________________________________________________________ dropout_2 (Dropout) (None, 22) 0 _________________________________________________________________ dense_3 (Dense) (None, 1) 23 ================================================================= Total params: 1,980 Trainable params: 1,980 Non-trainable params: 0 _________________________________________________________________ None