In [2]:
# How to predict a timeseries using Multi Layer Perceptron in Keras

def Snippet_393(): 

    print()
    print(format('How to predict a timeseries using Multi Layer Perceptron in Keras','*^92'))

    # load libraries
    import pandas, time
    import numpy as np
    from keras.layers.core import Dense, Dropout
    from keras.optimizers import RMSprop
    from keras.models import Sequential
    import matplotlib.pyplot as plt

    start_time = time.time()    

    # load the dataset
    dataframe = pandas.read_csv('international-airline-passengers.csv', usecols=[1], 
                                engine='python', skipfooter=3)
    dataset = dataframe.values; dataset = dataset.astype('float32')

    # split into train and test sets
    train_size = int(len(dataset) * 0.67) 
    train_dataset, test_dataset = dataset[0:train_size,:], dataset[train_size:len(dataset),:]

    # Window -> X timestep back
    step_back = 3
    X_train, Y_train = [], []
    for i in range(len(train_dataset)-step_back - 1):
        a = train_dataset[i:(i+step_back), 0]
        X_train.append(a)
        Y_train.append(train_dataset[i + step_back, 0])
    X_train = np.array(X_train); Y_train = np.array(Y_train);
    
    X_test, Y_test = [], []
    for i in range(len(test_dataset)-step_back - 1):
        a = test_dataset[i:(i+step_back), 0]
        X_test.append(a)
        Y_test.append(test_dataset[i + step_back, 0])
    X_test = np.array(X_test); Y_test = np.array(Y_test);

    print(X_train); print(Y_train);             print(X_test); print(Y_test);

    # -------------------------------------
    # setup a MLP network in keras
    # -------------------------------------
    model = Sequential()
    model.add(Dense(units = 128, input_dim=step_back, activation='relu'))
    model.add(Dropout(0.2))    
    model.add(Dense(units = 64,  input_dim=step_back, activation='relu'))    
    model.add(Dropout(0.2))
    model.add(Dense(units = 1, activation = 'linear'))
    model.summary()
    model.compile(loss='mean_squared_error', optimizer=RMSprop(lr = 0.01))
    model.fit(X_train, Y_train, epochs=200, batch_size=2, verbose=2)    

    # Estimate model performance
    print()
    trainScore = model.evaluate(X_train, Y_train, verbose=1)
    print('Train Score: %.2f MSE (%.2f RMSE)' % (trainScore, np.sqrt(trainScore)))
    testScore = model.evaluate(X_test, Y_test, verbose=1)
    print('Test Score: %.2f MSE (%.2f RMSE)' % (testScore, np.sqrt(testScore))) 

    # Evaluate the skill of the Trained model
    trainPredict = np.array(model.predict(X_train))
    trainPredict = np.reshape(trainPredict, (len(trainPredict), 1))
    testPredict = np.array(model.predict(X_test))
    testPredict = np.reshape(testPredict, (len(testPredict), 1))    

    # shift train predictions for plotting
    trainPredictPlot = np.empty_like(dataset)
    trainPredictPlot[:, :] = np.nan
    trainPredictPlot[step_back:len(trainPredict)+step_back, :] = trainPredict

    # shift test predictions for plotting
    testPredictPlot = np.empty_like(dataset)
    testPredictPlot[:, :] = np.nan
    testPredictPlot[len(trainPredict)+(step_back*2)+1:len(dataset)-1, :] = testPredict

    # plot baseline and predictions
    plt.plot(dataset)
    plt.plot(trainPredictPlot)
    plt.plot(testPredictPlot)
    plt.show()

    print(); print("Execution Time %s seconds: " % (time.time() - start_time))

Snippet_393()
*************How to predict a timeseries using Multi Layer Perceptron in Keras**************
[[112. 118. 132.]
 [118. 132. 129.]
 [132. 129. 121.]
 [129. 121. 135.]
 [121. 135. 148.]
 [135. 148. 148.]
 [148. 148. 136.]
 [148. 136. 119.]
 [136. 119. 104.]
 [119. 104. 118.]
 [104. 118. 115.]
 [118. 115. 126.]
 [115. 126. 141.]
 [126. 141. 135.]
 [141. 135. 125.]
 [135. 125. 149.]
 [125. 149. 170.]
 [149. 170. 170.]
 [170. 170. 158.]
 [170. 158. 133.]
 [158. 133. 114.]
 [133. 114. 140.]
 [114. 140. 145.]
 [140. 145. 150.]
 [145. 150. 178.]
 [150. 178. 163.]
 [178. 163. 172.]
 [163. 172. 178.]
 [172. 178. 199.]
 [178. 199. 199.]
 [199. 199. 184.]
 [199. 184. 162.]
 [184. 162. 146.]
 [162. 146. 166.]
 [146. 166. 171.]
 [166. 171. 180.]
 [171. 180. 193.]
 [180. 193. 181.]
 [193. 181. 183.]
 [181. 183. 218.]
 [183. 218. 230.]
 [218. 230. 242.]
 [230. 242. 209.]
 [242. 209. 191.]
 [209. 191. 172.]
 [191. 172. 194.]
 [172. 194. 196.]
 [194. 196. 196.]
 [196. 196. 236.]
 [196. 236. 235.]
 [236. 235. 229.]
 [235. 229. 243.]
 [229. 243. 264.]
 [243. 264. 272.]
 [264. 272. 237.]
 [272. 237. 211.]
 [237. 211. 180.]
 [211. 180. 201.]
 [180. 201. 204.]
 [201. 204. 188.]
 [204. 188. 235.]
 [188. 235. 227.]
 [235. 227. 234.]
 [227. 234. 264.]
 [234. 264. 302.]
 [264. 302. 293.]
 [302. 293. 259.]
 [293. 259. 229.]
 [259. 229. 203.]
 [229. 203. 229.]
 [203. 229. 242.]
 [229. 242. 233.]
 [242. 233. 267.]
 [233. 267. 269.]
 [267. 269. 270.]
 [269. 270. 315.]
 [270. 315. 364.]
 [315. 364. 347.]
 [364. 347. 312.]
 [347. 312. 274.]
 [312. 274. 237.]
 [274. 237. 278.]
 [237. 278. 284.]
 [278. 284. 277.]
 [284. 277. 317.]
 [277. 317. 313.]
 [317. 313. 318.]
 [313. 318. 374.]
 [318. 374. 413.]
 [374. 413. 405.]]
[129. 121. 135. 148. 148. 136. 119. 104. 118. 115. 126. 141. 135. 125.
 149. 170. 170. 158. 133. 114. 140. 145. 150. 178. 163. 172. 178. 199.
 199. 184. 162. 146. 166. 171. 180. 193. 181. 183. 218. 230. 242. 209.
 191. 172. 194. 196. 196. 236. 235. 229. 243. 264. 272. 237. 211. 180.
 201. 204. 188. 235. 227. 234. 264. 302. 293. 259. 229. 203. 229. 242.
 233. 267. 269. 270. 315. 364. 347. 312. 274. 237. 278. 284. 277. 317.
 313. 318. 374. 413. 405. 355.]
[[271. 306. 315.]
 [306. 315. 301.]
 [315. 301. 356.]
 [301. 356. 348.]
 [356. 348. 355.]
 [348. 355. 422.]
 [355. 422. 465.]
 [422. 465. 467.]
 [465. 467. 404.]
 [467. 404. 347.]
 [404. 347. 305.]
 [347. 305. 336.]
 [305. 336. 340.]
 [336. 340. 318.]
 [340. 318. 362.]
 [318. 362. 348.]
 [362. 348. 363.]
 [348. 363. 435.]
 [363. 435. 491.]
 [435. 491. 505.]
 [491. 505. 404.]
 [505. 404. 359.]
 [404. 359. 310.]
 [359. 310. 337.]
 [310. 337. 360.]
 [337. 360. 342.]
 [360. 342. 406.]
 [342. 406. 396.]
 [406. 396. 420.]
 [396. 420. 472.]
 [420. 472. 548.]
 [472. 548. 559.]
 [548. 559. 463.]
 [559. 463. 407.]
 [463. 407. 362.]
 [407. 362. 405.]
 [362. 405. 417.]
 [405. 417. 391.]
 [417. 391. 419.]
 [391. 419. 461.]
 [419. 461. 472.]
 [461. 472. 535.]
 [472. 535. 622.]]
[301. 356. 348. 355. 422. 465. 467. 404. 347. 305. 336. 340. 318. 362.
 348. 363. 435. 491. 505. 404. 359. 310. 337. 360. 342. 406. 396. 420.
 472. 548. 559. 463. 407. 362. 405. 417. 391. 419. 461. 472. 535. 622.
 606.]
Model: "sequential_2"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_4 (Dense)              (None, 128)               512       
_________________________________________________________________
dropout_3 (Dropout)          (None, 128)               0         
_________________________________________________________________
dense_5 (Dense)              (None, 64)                8256      
_________________________________________________________________
dropout_4 (Dropout)          (None, 64)                0         
_________________________________________________________________
dense_6 (Dense)              (None, 1)                 65        
=================================================================
Total params: 8,833
Trainable params: 8,833
Non-trainable params: 0
_________________________________________________________________
Epoch 1/200
 - 0s - loss: 6354.5844
Epoch 2/200
 - 0s - loss: 5566.5309
Epoch 3/200
 - 0s - loss: 5082.4953
Epoch 4/200
 - 0s - loss: 4449.6223
Epoch 5/200
 - 0s - loss: 3666.6545
Epoch 6/200
 - 0s - loss: 3946.5469
Epoch 7/200
 - 0s - loss: 3870.3984
Epoch 8/200
 - 0s - loss: 4043.7465
Epoch 9/200
 - 0s - loss: 3828.6764
Epoch 10/200
 - 0s - loss: 5230.2226
Epoch 11/200
 - 0s - loss: 2759.2533
Epoch 12/200
 - 0s - loss: 3356.7849
Epoch 13/200
 - 0s - loss: 2709.3018
Epoch 14/200
 - 0s - loss: 3189.2771
Epoch 15/200
 - 0s - loss: 3559.5434
Epoch 16/200
 - 0s - loss: 3806.9122
Epoch 17/200
 - 0s - loss: 3401.0855
Epoch 18/200
 - 0s - loss: 3884.6998
Epoch 19/200
 - 0s - loss: 3142.9645
Epoch 20/200
 - 0s - loss: 3779.3910
Epoch 21/200
 - 0s - loss: 3485.0579
Epoch 22/200
 - 0s - loss: 4016.6426
Epoch 23/200
 - 0s - loss: 3180.6196
Epoch 24/200
 - 0s - loss: 3118.8854
Epoch 25/200
 - 0s - loss: 2062.1820
Epoch 26/200
 - 0s - loss: 3338.2761
Epoch 27/200
 - 0s - loss: 3304.5139
Epoch 28/200
 - 0s - loss: 3372.5236
Epoch 29/200
 - 0s - loss: 3256.1886
Epoch 30/200
 - 0s - loss: 2490.0734
Epoch 31/200
 - 0s - loss: 2788.9292
Epoch 32/200
 - 0s - loss: 2349.0277
Epoch 33/200
 - 0s - loss: 3197.8806
Epoch 34/200
 - 0s - loss: 2584.1851
Epoch 35/200
 - 0s - loss: 3443.4325
Epoch 36/200
 - 0s - loss: 3202.4100
Epoch 37/200
 - 0s - loss: 1839.3865
Epoch 38/200
 - 0s - loss: 2157.3359
Epoch 39/200
 - 0s - loss: 2223.4793
Epoch 40/200
 - 0s - loss: 3215.6784
Epoch 41/200
 - 0s - loss: 2042.0434
Epoch 42/200
 - 0s - loss: 2015.3053
Epoch 43/200
 - 0s - loss: 2604.8630
Epoch 44/200
 - 0s - loss: 2562.2341
Epoch 45/200
 - 0s - loss: 2671.7176
Epoch 46/200
 - 0s - loss: 2878.0134
Epoch 47/200
 - 0s - loss: 2316.4201
Epoch 48/200
 - 0s - loss: 2554.1492
Epoch 49/200
 - 0s - loss: 2334.6548
Epoch 50/200
 - 0s - loss: 4052.9886
Epoch 51/200
 - 0s - loss: 2144.9307
Epoch 52/200
 - 0s - loss: 2234.6918
Epoch 53/200
 - 0s - loss: 3125.6838
Epoch 54/200
 - 0s - loss: 2698.3071
Epoch 55/200
 - 0s - loss: 2386.8273
Epoch 56/200
 - 0s - loss: 2927.1231
Epoch 57/200
 - 0s - loss: 2243.9478
Epoch 58/200
 - 0s - loss: 4361.3555
Epoch 59/200
 - 0s - loss: 3542.1414
Epoch 60/200
 - 0s - loss: 2399.3987
Epoch 61/200
 - 0s - loss: 3729.1421
Epoch 62/200
 - 0s - loss: 2389.5434
Epoch 63/200
 - 0s - loss: 3180.9783
Epoch 64/200
 - 0s - loss: 4080.2282
Epoch 65/200
 - 0s - loss: 3115.0781
Epoch 66/200
 - 0s - loss: 2723.7833
Epoch 67/200
 - 0s - loss: 2016.8794
Epoch 68/200
 - 0s - loss: 2286.5704
Epoch 69/200
 - 0s - loss: 3434.7671
Epoch 70/200
 - 0s - loss: 1895.5511
Epoch 71/200
 - 0s - loss: 2694.6115
Epoch 72/200
 - 0s - loss: 2081.7948
Epoch 73/200
 - 0s - loss: 2487.2454
Epoch 74/200
 - 0s - loss: 2380.0058
Epoch 75/200
 - 0s - loss: 2915.8262
Epoch 76/200
 - 0s - loss: 2285.3425
Epoch 77/200
 - 0s - loss: 2718.7222
Epoch 78/200
 - 0s - loss: 2218.0599
Epoch 79/200
 - 0s - loss: 1335.4732
Epoch 80/200
 - 0s - loss: 2687.3900
Epoch 81/200
 - 0s - loss: 1688.6945
Epoch 82/200
 - 0s - loss: 3121.3808
Epoch 83/200
 - 0s - loss: 2450.6940
Epoch 84/200
 - 0s - loss: 2584.6751
Epoch 85/200
 - 0s - loss: 1596.6207
Epoch 86/200
 - 0s - loss: 2056.3232
Epoch 87/200
 - 0s - loss: 1601.0249
Epoch 88/200
 - 0s - loss: 2195.7698
Epoch 89/200
 - 0s - loss: 1504.7397
Epoch 90/200
 - 0s - loss: 1799.5510
Epoch 91/200
 - 0s - loss: 1512.3750
Epoch 92/200
 - 0s - loss: 2063.4701
Epoch 93/200
 - 0s - loss: 2243.1225
Epoch 94/200
 - 0s - loss: 2980.0812
Epoch 95/200
 - 0s - loss: 2407.6867
Epoch 96/200
 - 0s - loss: 1849.2725
Epoch 97/200
 - 0s - loss: 1941.2531
Epoch 98/200
 - 0s - loss: 1842.1047
Epoch 99/200
 - 0s - loss: 1960.0866
Epoch 100/200
 - 0s - loss: 2458.2286
Epoch 101/200
 - 0s - loss: 1728.0503
Epoch 102/200
 - 0s - loss: 1563.5309
Epoch 103/200
 - 0s - loss: 1893.7207
Epoch 104/200
 - 0s - loss: 2153.5075
Epoch 105/200
 - 0s - loss: 1786.5785
Epoch 106/200
 - 0s - loss: 2333.3854
Epoch 107/200
 - 0s - loss: 1503.5948
Epoch 108/200
 - 0s - loss: 2481.8172
Epoch 109/200
 - 0s - loss: 1803.7798
Epoch 110/200
 - 0s - loss: 2591.3737
Epoch 111/200
 - 0s - loss: 1913.4576
Epoch 112/200
 - 0s - loss: 2102.7659
Epoch 113/200
 - 0s - loss: 3217.4230
Epoch 114/200
 - 0s - loss: 1728.4393
Epoch 115/200
 - 0s - loss: 2321.9078
Epoch 116/200
 - 0s - loss: 1825.0173
Epoch 117/200
 - 0s - loss: 2155.9577
Epoch 118/200
 - 0s - loss: 2146.6041
Epoch 119/200
 - 0s - loss: 2143.9984
Epoch 120/200
 - 0s - loss: 1919.2772
Epoch 121/200
 - 0s - loss: 1776.2389
Epoch 122/200
 - 0s - loss: 2390.7390
Epoch 123/200
 - 0s - loss: 2082.8445
Epoch 124/200
 - 0s - loss: 1882.1401
Epoch 125/200
 - 0s - loss: 1882.6851
Epoch 126/200
 - 0s - loss: 1912.8949
Epoch 127/200
 - 0s - loss: 2085.8628
Epoch 128/200
 - 0s - loss: 1745.8387
Epoch 129/200
 - 0s - loss: 2014.5907
Epoch 130/200
 - 0s - loss: 2013.0796
Epoch 131/200
 - 0s - loss: 2180.3989
Epoch 132/200
 - 0s - loss: 1967.7064
Epoch 133/200
 - 0s - loss: 2377.6408
Epoch 134/200
 - 0s - loss: 1718.1360
Epoch 135/200
 - 0s - loss: 2206.1255
Epoch 136/200
 - 0s - loss: 1927.2459
Epoch 137/200
 - 0s - loss: 1942.6245
Epoch 138/200
 - 0s - loss: 2314.1142
Epoch 139/200
 - 0s - loss: 1704.2215
Epoch 140/200
 - 0s - loss: 2008.6186
Epoch 141/200
 - 0s - loss: 2124.6220
Epoch 142/200
 - 0s - loss: 1958.3750
Epoch 143/200
 - 0s - loss: 1316.7124
Epoch 144/200
 - 0s - loss: 2499.0454
Epoch 145/200
 - 0s - loss: 2178.3681
Epoch 146/200
 - 0s - loss: 2076.4565
Epoch 147/200
 - 0s - loss: 2332.2561
Epoch 148/200
 - 0s - loss: 2396.5090
Epoch 149/200
 - 0s - loss: 1948.2209
Epoch 150/200
 - 0s - loss: 1897.5689
Epoch 151/200
 - 0s - loss: 1879.5527
Epoch 152/200
 - 0s - loss: 1691.0462
Epoch 153/200
 - 0s - loss: 1428.3973
Epoch 154/200
 - 0s - loss: 2047.7820
Epoch 155/200
 - 0s - loss: 1472.2184
Epoch 156/200
 - 0s - loss: 1339.1820
Epoch 157/200
 - 0s - loss: 2172.0253
Epoch 158/200
 - 0s - loss: 1833.3150
Epoch 159/200
 - 0s - loss: 1603.4441
Epoch 160/200
 - 0s - loss: 1848.5497
Epoch 161/200
 - 0s - loss: 1614.1222
Epoch 162/200
 - 0s - loss: 1803.2305
Epoch 163/200
 - 0s - loss: 1814.6265
Epoch 164/200
 - 0s - loss: 1290.7118
Epoch 165/200
 - 0s - loss: 2335.6294
Epoch 166/200
 - 0s - loss: 2366.2078
Epoch 167/200
 - 0s - loss: 2010.9240
Epoch 168/200
 - 0s - loss: 1987.9208
Epoch 169/200
 - 0s - loss: 1707.3889
Epoch 170/200
 - 0s - loss: 1302.8242
Epoch 171/200
 - 0s - loss: 1989.8813
Epoch 172/200
 - 0s - loss: 1560.0514
Epoch 173/200
 - 0s - loss: 1595.0122
Epoch 174/200
 - 0s - loss: 1801.6399
Epoch 175/200
 - 0s - loss: 1851.7778
Epoch 176/200
 - 0s - loss: 2110.4372
Epoch 177/200
 - 0s - loss: 1954.8976
Epoch 178/200
 - 0s - loss: 1981.0584
Epoch 179/200
 - 0s - loss: 2468.8296
Epoch 180/200
 - 0s - loss: 2028.8767
Epoch 181/200
 - 0s - loss: 2660.9672
Epoch 182/200
 - 0s - loss: 1644.5094
Epoch 183/200
 - 0s - loss: 1911.4367
Epoch 184/200
 - 0s - loss: 1390.3696
Epoch 185/200
 - 0s - loss: 1963.5426
Epoch 186/200
 - 0s - loss: 2200.4783
Epoch 187/200
 - 0s - loss: 2148.2751
Epoch 188/200
 - 0s - loss: 1898.7714
Epoch 189/200
 - 0s - loss: 1508.2441
Epoch 190/200
 - 0s - loss: 1649.5088
Epoch 191/200
 - 0s - loss: 1615.6613
Epoch 192/200
 - 0s - loss: 1747.1083
Epoch 193/200
 - 0s - loss: 1594.3914
Epoch 194/200
 - 0s - loss: 2092.5640
Epoch 195/200
 - 0s - loss: 1660.8906
Epoch 196/200
 - 0s - loss: 1948.0120
Epoch 197/200
 - 0s - loss: 2284.4878
Epoch 198/200
 - 0s - loss: 2079.4778
Epoch 199/200
 - 0s - loss: 2581.5155
Epoch 200/200
 - 0s - loss: 1963.9278

90/90 [==============================] - 0s 343us/step
Train Score: 1996.88 MSE (44.69 RMSE)
43/43 [==============================] - 0s 74us/step
Test Score: 10173.09 MSE (100.86 RMSE)
Execution Time 8.096296072006226 seconds: