In [ ]:
## How to setup an experiment in a Deep Learning model

def Snippet_332(): 

    print()
    print(format('How to setup an experiment in a Deep Learning model','*^82'))

    # load libraries
    from keras.datasets import mnist
    from keras.models import Sequential
    from keras.layers.core import Dense, Activation
    from keras.optimizers import SGD, RMSprop, Adam
    from keras.utils import np_utils
    import matplotlib.pyplot as plt    

    # Hyper parameters for network building and training process
    BATCH_SIZE = 128
    VERBOSE = 1
    NB_CLASSES = 10   
    VALIDATION_SPLIT=0.33  

    # data: shuffled and split between train and test sets
    (X_train, y_train), (X_test, y_test) = mnist.load_data()

    # X_train is 60000 rows of 28x28 values --> reshaped in 60000 x 784
    RESHAPED = 784
    X_train = X_train.reshape(60000, RESHAPED)
    X_test = X_test.reshape(10000, RESHAPED)
    X_train = X_train.astype('float32')
    X_test = X_test.astype('float32')

    # normalize the datasets
    X_train /= 255
    X_test /= 255
    print(X_train.shape[0], 'train samples')
    print(X_test.shape[0], 'test samples')

    # convert class vectors to binary class matrices
    Y_train = np_utils.to_categorical(y_train, NB_CLASSES)
    Y_test = np_utils.to_categorical(y_test, NB_CLASSES)

    # ---------------------------------------------------------------------
    # setup an experiment for different OPTIMIZERs, Epoch Sizes & Units
    # and determine the best Accuracy
    # ---------------------------------------------------------------------
    accuracy = []
    for OPTIMIZER in [SGD(), RMSprop(), Adam()]: 
        for NB_EPOCH in [5,10,20]:
            for N_Units_in_Multiple_Layers in [64, 128, 256]:
                model = Sequential()
                model.add(Dense(units = N_Units_in_Multiple_Layers, input_shape=(RESHAPED,))) 
                model.add(Activation('relu'))
                model.add(Dense(units = N_Units_in_Multiple_Layers))
                model.add(Activation('relu'))
                model.add(Dense(units = NB_CLASSES))
                model.add(Activation('softmax'))
                model.summary()
                model.compile(loss='categorical_crossentropy', optimizer=OPTIMIZER, metrics=['accuracy'])
                model.fit(X_train, Y_train, batch_size=BATCH_SIZE, epochs=NB_EPOCH,
                          verbose=VERBOSE, validation_split=VALIDATION_SPLIT)
                score = model.evaluate(X_test, Y_test, verbose=VERBOSE)
                print()
                print('Optimizers: ', OPTIMIZER)
                print('Epoch Sizes: ', NB_EPOCH)            
                print('Neurons or Units: ', N_Units_in_Multiple_Layers)            
                print("Test score:", score[0])
                print('Test accuracy:', score[1])
                accuracy.append(score[1])
                print()
    
    print(accuracy)
    y = accuracy; N = len(y); x = range(N); width = 1./1.5;
    plt.bar(x,y,width); plt.show()

Snippet_332()