In [ ]:
## How to use GridSerachCV in Deep Leaning using Keras

def Learn_By_Example_314(): 

    print()
    print(format('How to use GridSerachCV in Deep Leaning using Keras','*^82'))    

    import warnings
    warnings.filterwarnings("ignore")

    # load libraries
    import keras as K
    #from keras.callbacks import EarlyStopping, ModelCheckpoint
    from keras.initializers import VarianceScaling
    from keras.regularizers import l2
    from keras.models import Sequential
    from keras.layers import Dense
    from sklearn import datasets    
    from sklearn.model_selection import train_test_split
    from keras.wrappers.scikit_learn import KerasClassifier
    from sklearn.model_selection import GridSearchCV
    from sklearn.metrics import confusion_matrix, classification_report, accuracy_score
    
    # simulated data
    dataset = datasets.make_classification(n_samples=10000, n_features=20, n_informative=5, 
                n_redundant=2, n_repeated=0, n_classes=2, n_clusters_per_class=2, 
                weights=None, flip_y=0.01, class_sep=1.0, hypercube=True, shift=0.0, 
                scale=1.0, shuffle=True, random_state=None)
    X = dataset[0];  y = dataset[1]
    print(X.shape);  print(y.shape)

    # Split Train and Test Datasets
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)    

    # Define a Deep Learning Model
    def create_network(optimizer='RMSprop'):
        model = Sequential()
        model.add(Dense(units=36, input_shape=(X_train.shape[1],), 
                        kernel_regularizer=l2(0.001),           # weight regularizer
                        kernel_initializer=VarianceScaling(),   # initializer
                        activation='relu'))
        model.add(Dense(units=28, 
                        kernel_regularizer=l2(0.01),            # weight regularizer
                        kernel_initializer=VarianceScaling(),   # initializer                   
                        activation='relu'))
        model.add(Dense(units=1, activation='sigmoid'))
    
        # Compile the Model
        model.compile(loss='binary_crossentropy', optimizer = optimizer, 
                      metrics=['acc','mae'])    

        return model
    
    # Wrap Keras model so it can be used by scikit-learn
    neural_network = KerasClassifier(build_fn=create_network, verbose=1)
    
    # Create hyperparameter space
    epochs = [150, 200, 250]
    batches = [5, 10, 100]
    optimizers = ['rmsprop', 'adam', 'sgd']
    # Create hyperparameter options
    hyperparameters = dict(optimizer=optimizers, epochs=epochs, batch_size=batches)

    # Create grid search
    grid = GridSearchCV(estimator=neural_network, param_grid=hyperparameters,
                        cv = 2, n_jobs=-1)

    # Fit grid search
    grid_result = grid.fit(X_train, y_train)
    print(); print(grid_result.best_params_)

    # Evaluate model
    print()
    y_pred = grid_result.predict(X_test)
    y_pred = (y_pred > 0.5)
    cm = confusion_matrix(y_test, y_pred); print("\nConfusion Matrix:\n", cm)
    cr = classification_report(y_test, y_pred); print("\nClassification Report:\n", cr)
    acc = accuracy_score(y_test, y_pred); print("\nAccuracy Score: \n", acc * 100.0)
    print("\n\nBackend: ", K.backend.backend())  
    
Learn_By_Example_314()
***************How to use GridSerachCV in Deep Leaning using Keras****************
Using TensorFlow backend.
(10000, 20)
(10000,)