In [1]:
# ----------------------------------------------------------------------------
## How to use RandomNormal initializer in a Deep Learning Model in Keras
# ----------------------------------------------------------------------------

def Learn_By_Example_305(): 

    print()
    print(format('How to use RandomNormal initializer to a Deep Learning Model in Keras','*^82'))    
    
    import warnings
    warnings.filterwarnings("ignore")
    
    # load libraries
    import keras as K
    from keras.initializers import RandomNormal
    from keras.regularizers import l1_l2
    from keras.models import Sequential
    from keras.layers import Dense, Dropout
    from sklearn import datasets
    from sklearn.model_selection import train_test_split
    from sklearn.metrics import confusion_matrix
    
    # simulated data
    dataset = datasets.make_classification(n_samples=10000, n_features=20, n_informative=5, 
                n_redundant=2, n_repeated=0, n_classes=2, n_clusters_per_class=2, 
                weights=None, flip_y=0.01, class_sep=1.0, hypercube=True, shift=0.0, 
                scale=1.0, shuffle=True, random_state=None)
    
    X = dataset[0];  y = dataset[1]
    print(X.shape);  print(y.shape)
    
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)    
    
    # Define a Deep Learning Model
    model = Sequential()
    model.add(Dense(45, input_dim=20, 
                    kernel_regularizer=l1_l2(0.001), # weight regularizer
                    kernel_initializer=RandomNormal(mean=0.0, stddev=0.05, seed=None), # initializer
                    activation='relu'))
    model.add(Dropout(0.5)) # Dropout Layer
    model.add(Dense(22, 
                    kernel_regularizer=l1_l2(0.01), # weight regularizer
                    kernel_initializer=RandomNormal(mean=0.0, stddev=0.05, seed=None), # initializer                   
                    activation='relu'))
    model.add(Dropout(0.5)) # Dropout Layer    
    model.add(Dense(1, activation='sigmoid'))
    
    # Compile the Model
    model.compile(loss='binary_crossentropy', optimizer='adam', 
                  metrics=['acc','mae'])
    
    # Train the Model
    model.fit(X_train, y_train, epochs=100, batch_size=25, verbose = 1)
    
    # Evaluate the model
    scores = model.evaluate(X_test, y_test)
    print(); print(model.metrics_names); print(scores)
    print("\n%s: %.2f%%" % (model.metrics_names[1], scores[1]*100))
    
    # Confusion Matrix
    y_pred = model.predict(X_test)
    y_pred = (y_pred > 0.5)
    cm = confusion_matrix(y_test, y_pred); print("\nConfusion Matrix:\n", cm)
    
    # More on the Model
    print("\n\nBackend: ", K.backend.backend())    
    print(model.summary())
    
Learn_By_Example_305()
******How to use RandomNormal initializer to a Deep Learning Model in Keras*******
Using TensorFlow backend.
(10000, 20)
(10000,)
Epoch 1/100
6700/6700 [==============================] - 1s 80us/step - loss: 0.6980 - acc: 0.7667 - mae: 0.4055
Epoch 2/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.5253 - acc: 0.8428 - mae: 0.2871
Epoch 3/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.5025 - acc: 0.8513 - mae: 0.2649
Epoch 4/100
6700/6700 [==============================] - 0s 40us/step - loss: 0.4880 - acc: 0.8603 - mae: 0.2530
Epoch 5/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4810 - acc: 0.8634 - mae: 0.2494
Epoch 6/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4727 - acc: 0.8654 - mae: 0.2449
Epoch 7/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4622 - acc: 0.8684 - mae: 0.2376
Epoch 8/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4613 - acc: 0.8703 - mae: 0.2354
Epoch 9/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4591 - acc: 0.8678 - mae: 0.2336
Epoch 10/100
6700/6700 [==============================] - 0s 44us/step - loss: 0.4486 - acc: 0.8734 - mae: 0.2282
Epoch 11/100
6700/6700 [==============================] - 0s 45us/step - loss: 0.4421 - acc: 0.8758 - mae: 0.2259
Epoch 12/100
6700/6700 [==============================] - 0s 42us/step - loss: 0.4484 - acc: 0.8712 - mae: 0.2264
Epoch 13/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.4410 - acc: 0.8746 - mae: 0.2243
Epoch 14/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.4387 - acc: 0.8761 - mae: 0.2191
Epoch 15/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4304 - acc: 0.8773 - mae: 0.2171
Epoch 16/100
6700/6700 [==============================] - 0s 37us/step - loss: 0.4316 - acc: 0.8794 - mae: 0.2168
Epoch 17/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4273 - acc: 0.8863 - mae: 0.2121
Epoch 18/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.4270 - acc: 0.8769 - mae: 0.2157
Epoch 19/100
6700/6700 [==============================] - 0s 44us/step - loss: 0.4230 - acc: 0.8787 - mae: 0.2115
Epoch 20/100
6700/6700 [==============================] - 0s 43us/step - loss: 0.4177 - acc: 0.8858 - mae: 0.2066
Epoch 21/100
6700/6700 [==============================] - 0s 40us/step - loss: 0.4189 - acc: 0.8854 - mae: 0.2076
Epoch 22/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.4213 - acc: 0.8843 - mae: 0.2104
Epoch 23/100
6700/6700 [==============================] - 0s 40us/step - loss: 0.4116 - acc: 0.8842 - mae: 0.2036
Epoch 24/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4170 - acc: 0.8833 - mae: 0.2071
Epoch 25/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4129 - acc: 0.8858 - mae: 0.2061
Epoch 26/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4051 - acc: 0.8882 - mae: 0.2018
Epoch 27/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4101 - acc: 0.8851 - mae: 0.2059
Epoch 28/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.4070 - acc: 0.8804 - mae: 0.2033
Epoch 29/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4057 - acc: 0.8879 - mae: 0.2006
Epoch 30/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4005 - acc: 0.8897 - mae: 0.1993
Epoch 31/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.4001 - acc: 0.8837 - mae: 0.2003
Epoch 32/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3999 - acc: 0.8858 - mae: 0.1988
Epoch 33/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3977 - acc: 0.8846 - mae: 0.1984
Epoch 34/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.4011 - acc: 0.8842 - mae: 0.2012
Epoch 35/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.4016 - acc: 0.8845 - mae: 0.1999
Epoch 36/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3990 - acc: 0.8840 - mae: 0.1982
Epoch 37/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3955 - acc: 0.8845 - mae: 0.1989
Epoch 38/100
6700/6700 [==============================] - 0s 44us/step - loss: 0.3806 - acc: 0.8821 - mae: 0.1891
Epoch 39/100
6700/6700 [==============================] - 0s 42us/step - loss: 0.3826 - acc: 0.8918 - mae: 0.1905
Epoch 40/100
6700/6700 [==============================] - 0s 40us/step - loss: 0.3914 - acc: 0.8897 - mae: 0.1930
Epoch 41/100
6700/6700 [==============================] - 0s 40us/step - loss: 0.3957 - acc: 0.8857 - mae: 0.1957
Epoch 42/100
6700/6700 [==============================] - 0s 41us/step - loss: 0.3926 - acc: 0.8875 - mae: 0.1973
Epoch 43/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3996 - acc: 0.8854 - mae: 0.1996
Epoch 44/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3831 - acc: 0.8879 - mae: 0.1912
Epoch 45/100
6700/6700 [==============================] - 0s 40us/step - loss: 0.3866 - acc: 0.8839 - mae: 0.1927
Epoch 46/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3927 - acc: 0.8876 - mae: 0.1960
Epoch 47/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3817 - acc: 0.8912 - mae: 0.1895
Epoch 48/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3793 - acc: 0.8918 - mae: 0.1875
Epoch 49/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3835 - acc: 0.8875 - mae: 0.1911
Epoch 50/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3822 - acc: 0.8872 - mae: 0.1908
Epoch 51/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3829 - acc: 0.8894 - mae: 0.1898
Epoch 52/100
6700/6700 [==============================] - 0s 41us/step - loss: 0.3812 - acc: 0.8894 - mae: 0.1908
Epoch 53/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3893 - acc: 0.8888 - mae: 0.1936
Epoch 54/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3867 - acc: 0.8897 - mae: 0.1906
Epoch 55/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3931 - acc: 0.8887 - mae: 0.1978
Epoch 56/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3765 - acc: 0.8973 - mae: 0.1857
Epoch 57/100
6700/6700 [==============================] - 0s 40us/step - loss: 0.3825 - acc: 0.8870 - mae: 0.1915
Epoch 58/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3736 - acc: 0.8904 - mae: 0.1857
Epoch 59/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3776 - acc: 0.8887 - mae: 0.1891
Epoch 60/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3772 - acc: 0.8958 - mae: 0.1860
Epoch 61/100
6700/6700 [==============================] - 0s 40us/step - loss: 0.3814 - acc: 0.8930 - mae: 0.1874
Epoch 62/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3797 - acc: 0.8873 - mae: 0.1890
Epoch 63/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3770 - acc: 0.8894 - mae: 0.1832
Epoch 64/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3775 - acc: 0.8925 - mae: 0.1870
Epoch 65/100
6700/6700 [==============================] - 0s 40us/step - loss: 0.3799 - acc: 0.8890 - mae: 0.1881
Epoch 66/100
6700/6700 [==============================] - 0s 40us/step - loss: 0.3701 - acc: 0.8942 - mae: 0.1828
Epoch 67/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3769 - acc: 0.8885 - mae: 0.1851
Epoch 68/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3772 - acc: 0.8943 - mae: 0.1858
Epoch 69/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3730 - acc: 0.8994 - mae: 0.1814
Epoch 70/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3798 - acc: 0.8899 - mae: 0.1871
Epoch 71/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3675 - acc: 0.8958 - mae: 0.1825
Epoch 72/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3719 - acc: 0.8958 - mae: 0.1808
Epoch 73/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3699 - acc: 0.8927 - mae: 0.1824
Epoch 74/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3666 - acc: 0.8945 - mae: 0.1793
Epoch 75/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3624 - acc: 0.8967 - mae: 0.1790
Epoch 76/100
6700/6700 [==============================] - 0s 39us/step - loss: 0.3745 - acc: 0.8921 - mae: 0.1846
Epoch 77/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3649 - acc: 0.8955 - mae: 0.1790
Epoch 78/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3689 - acc: 0.8958 - mae: 0.1835
Epoch 79/100
6700/6700 [==============================] - 0s 37us/step - loss: 0.3698 - acc: 0.8939 - mae: 0.1802
Epoch 80/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3655 - acc: 0.8951 - mae: 0.1806
Epoch 81/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3662 - acc: 0.8930 - mae: 0.1817
Epoch 82/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3728 - acc: 0.8939 - mae: 0.1847
Epoch 83/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3644 - acc: 0.8948 - mae: 0.1786
Epoch 84/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3661 - acc: 0.8924 - mae: 0.1819
Epoch 85/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3621 - acc: 0.8927 - mae: 0.1786
Epoch 86/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3603 - acc: 0.8975 - mae: 0.1770
Epoch 87/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3621 - acc: 0.8958 - mae: 0.1770
Epoch 88/100
6700/6700 [==============================] - 0s 37us/step - loss: 0.3685 - acc: 0.8885 - mae: 0.1819
Epoch 89/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3617 - acc: 0.8924 - mae: 0.1797
Epoch 90/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3672 - acc: 0.8969 - mae: 0.1808
Epoch 91/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3631 - acc: 0.8967 - mae: 0.1787
Epoch 92/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3634 - acc: 0.8978 - mae: 0.1791
Epoch 93/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3578 - acc: 0.8970 - mae: 0.1759
Epoch 94/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3653 - acc: 0.8969 - mae: 0.1797
Epoch 95/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3581 - acc: 0.8975 - mae: 0.1763
Epoch 96/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3682 - acc: 0.8949 - mae: 0.1818
Epoch 97/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3599 - acc: 0.8979 - mae: 0.1748
Epoch 98/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3635 - acc: 0.8939 - mae: 0.1778
Epoch 99/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3650 - acc: 0.8979 - mae: 0.1783
Epoch 100/100
6700/6700 [==============================] - 0s 38us/step - loss: 0.3597 - acc: 0.8963 - mae: 0.1771
3300/3300 [==============================] - 0s 26us/step

['loss', 'acc', 'mae']
[0.27804881182583896, 0.935757577419281, 0.12871871888637543]

acc: 93.58%

Confusion Matrix:
 [[1588  116]
 [  96 1500]]


Backend:  tensorflow
Model: "sequential_1"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_1 (Dense)              (None, 45)                945       
_________________________________________________________________
dropout_1 (Dropout)          (None, 45)                0         
_________________________________________________________________
dense_2 (Dense)              (None, 22)                1012      
_________________________________________________________________
dropout_2 (Dropout)          (None, 22)                0         
_________________________________________________________________
dense_3 (Dense)              (None, 1)                 23        
=================================================================
Total params: 1,980
Trainable params: 1,980
Non-trainable params: 0
_________________________________________________________________
None