For more projects visit: https://setscholars.net
# Suppress warnings in Jupyter Notebooks
import warnings
warnings.filterwarnings("ignore")
import matplotlib.pyplot as plt
plt.style.use('fivethirtyeight')
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow import keras
import tensorflow_datasets as tfds
from sklearn.model_selection import train_test_split
import autokeras as ak
print(ak.__version__)
import logging
tf.get_logger().setLevel(logging.ERROR)
1.0.16
# OpenML Dataset ID
whichDataset = 12 # provide dataset id
import openml
from openml.datasets import get_dataset
dataset = openml.datasets.get_dataset(whichDataset)
# Print a summary
print(
f"This is dataset '{dataset.name}', the target feature is "
f"'{dataset.default_target_attribute}'"
)
print(f"URL: {dataset.url}")
print(dataset.description)
This is dataset 'mfeat-factors', the target feature is 'class' URL: https://www.openml.org/data/v1/download/12/mfeat-factors.arff **Author**: Robert P.W. Duin, Department of Applied Physics, Delft University of Technology **Source**: [UCI](https://archive.ics.uci.edu/ml/datasets/Multiple+Features) - 1998 **Please cite**: [UCI](https://archive.ics.uci.edu/ml/citation_policy.html) **Multiple Features Dataset: Factors** One of a set of 6 datasets describing features of handwritten numerals (0 - 9) extracted from a collection of Dutch utility maps. Corresponding patterns in different datasets correspond to the same original character. 200 instances per class (for a total of 2,000 instances) have been digitized in binary images. ### Attribute Information The attributes represent 216 profile correlations. No more information is known. ### Relevant Papers A slightly different version of the database is used in M. van Breukelen, R.P.W. Duin, D.M.J. Tax, and J.E. den Hartog, Handwritten digit recognition by combined classifiers, Kybernetika, vol. 34, no. 4, 1998, 381-386. The database as is is used in: A.K. Jain, R.P.W. Duin, J. Mao, Statistical Pattern Recognition: A Review, IEEE Transactions on Pattern Analysis and Machine Intelligence archive, Volume 22 Issue 1, January 2000
X, y, categorical_indicator, attribute_names = dataset.get_data(
dataset_format="array", target=dataset.default_target_attribute)
dataset = pd.DataFrame(X, columns=attribute_names)
dataset["target"] = y
print(); print(dataset.shape)
print(); print(dataset.head())
print(); print(dataset.columns.values)
(2000, 217) att1 att2 att3 att4 att5 att6 att7 att8 att9 att10 ... \ 0 98.0 236.0 531.0 673.0 607.0 647.0 2.0 9.0 3.0 6.0 ... 1 121.0 193.0 607.0 611.0 585.0 665.0 7.0 9.0 2.0 4.0 ... 2 115.0 141.0 590.0 605.0 557.0 627.0 12.0 6.0 3.0 3.0 ... 3 90.0 122.0 627.0 692.0 607.0 642.0 0.0 6.0 4.0 5.0 ... 4 157.0 167.0 681.0 666.0 587.0 666.0 8.0 6.0 1.0 4.0 ... att208 att209 att210 att211 att212 att213 att214 att215 att216 \ 0 536.0 628.0 632.0 18.0 36.0 8.0 15.0 12.0 13.0 1 458.0 570.0 634.0 15.0 32.0 11.0 13.0 15.0 11.0 2 498.0 572.0 656.0 20.0 35.0 16.0 14.0 13.0 6.0 3 549.0 628.0 621.0 16.0 35.0 7.0 12.0 15.0 9.0 4 525.0 568.0 653.0 16.0 35.0 10.0 15.0 13.0 13.0 target 0 0 1 0 2 0 3 0 4 0 [5 rows x 217 columns] ['att1' 'att2' 'att3' 'att4' 'att5' 'att6' 'att7' 'att8' 'att9' 'att10' 'att11' 'att12' 'att13' 'att14' 'att15' 'att16' 'att17' 'att18' 'att19' 'att20' 'att21' 'att22' 'att23' 'att24' 'att25' 'att26' 'att27' 'att28' 'att29' 'att30' 'att31' 'att32' 'att33' 'att34' 'att35' 'att36' 'att37' 'att38' 'att39' 'att40' 'att41' 'att42' 'att43' 'att44' 'att45' 'att46' 'att47' 'att48' 'att49' 'att50' 'att51' 'att52' 'att53' 'att54' 'att55' 'att56' 'att57' 'att58' 'att59' 'att60' 'att61' 'att62' 'att63' 'att64' 'att65' 'att66' 'att67' 'att68' 'att69' 'att70' 'att71' 'att72' 'att73' 'att74' 'att75' 'att76' 'att77' 'att78' 'att79' 'att80' 'att81' 'att82' 'att83' 'att84' 'att85' 'att86' 'att87' 'att88' 'att89' 'att90' 'att91' 'att92' 'att93' 'att94' 'att95' 'att96' 'att97' 'att98' 'att99' 'att100' 'att101' 'att102' 'att103' 'att104' 'att105' 'att106' 'att107' 'att108' 'att109' 'att110' 'att111' 'att112' 'att113' 'att114' 'att115' 'att116' 'att117' 'att118' 'att119' 'att120' 'att121' 'att122' 'att123' 'att124' 'att125' 'att126' 'att127' 'att128' 'att129' 'att130' 'att131' 'att132' 'att133' 'att134' 'att135' 'att136' 'att137' 'att138' 'att139' 'att140' 'att141' 'att142' 'att143' 'att144' 'att145' 'att146' 'att147' 'att148' 'att149' 'att150' 'att151' 'att152' 'att153' 'att154' 'att155' 'att156' 'att157' 'att158' 'att159' 'att160' 'att161' 'att162' 'att163' 'att164' 'att165' 'att166' 'att167' 'att168' 'att169' 'att170' 'att171' 'att172' 'att173' 'att174' 'att175' 'att176' 'att177' 'att178' 'att179' 'att180' 'att181' 'att182' 'att183' 'att184' 'att185' 'att186' 'att187' 'att188' 'att189' 'att190' 'att191' 'att192' 'att193' 'att194' 'att195' 'att196' 'att197' 'att198' 'att199' 'att200' 'att201' 'att202' 'att203' 'att204' 'att205' 'att206' 'att207' 'att208' 'att209' 'att210' 'att211' 'att212' 'att213' 'att214' 'att215' 'att216' 'target']
# find missing values in data frame
print()
print(dataset.isnull().sum().sum())
# group by 'target'
#print()
#print(dataset.groupby('target').count())
0
# training and test data split
data = dataset.sample(frac=0.80, random_state=1234)
data_unseen = dataset.drop(data.index)
data.reset_index(inplace=True, drop=True)
data_unseen.reset_index(inplace=True, drop=True)
print('Data for Modeling: ' + str(data.shape))
print('Unseen Data For Predictions: ' + str(data_unseen.shape))
Data for Modeling: (1600, 217) Unseen Data For Predictions: (400, 217)
import pandas_profiling
#df.profile_report()
#import sweetviz as sv
#sweet_report = sv.analyze(df)
#sweet_report.show_notebook(layout='vertical', w=880, h=1000,scale=0.8)
y = dataset.pop('target')
X = dataset
print(y.shape)
print(X.shape)
(2000,) (2000, 216)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20, random_state=42)
print(); print("Training Dataset:")
print(); print(X_train.shape)
print(); print(X_train.head())
print(); print(y_train.shape)
print(); print(y_train.head())
print(); print("\n\nTesting Dataset:")
print(); print(X_test.shape)
print(); print(X_test.head())
print(); print(y_test.shape)
print(); print(y_test.head())
Training Dataset: (1600, 216) att1 att2 att3 att4 att5 att6 att7 att8 att9 att10 ... \ 968 404.0 320.0 833.0 756.0 584.0 523.0 25.0 19.0 12.0 10.0 ... 240 289.0 469.0 1049.0 620.0 604.0 657.0 36.0 20.0 8.0 15.0 ... 819 385.0 457.0 889.0 815.0 719.0 545.0 29.0 26.0 14.0 14.0 ... 692 202.0 372.0 789.0 701.0 595.0 690.0 23.0 18.0 13.0 13.0 ... 420 199.0 269.0 745.0 826.0 625.0 697.0 13.0 19.0 25.0 4.0 ... att207 att208 att209 att210 att211 att212 att213 att214 att215 \ 968 792.0 681.0 563.0 608.0 25.0 12.0 21.0 11.0 15.0 240 1000.0 509.0 543.0 760.0 20.0 11.0 19.0 12.0 17.0 819 816.0 732.0 660.0 556.0 19.0 13.0 19.0 9.0 12.0 692 746.0 572.0 642.0 715.0 23.0 21.0 18.0 12.0 17.0 420 704.0 653.0 636.0 692.0 5.0 24.0 28.0 15.0 14.0 att216 968 13.0 240 14.0 819 3.0 692 17.0 420 15.0 [5 rows x 216 columns] (1600,) 968 4 240 1 819 4 692 3 420 2 Name: target, dtype: int64 Testing Dataset: (400, 216) att1 att2 att3 att4 att5 att6 att7 att8 att9 att10 ... \ 1860 335.0 213.0 922.0 582.0 614.0 656.0 21.0 21.0 7.0 3.0 ... 353 197.0 525.0 984.0 596.0 576.0 709.0 37.0 28.0 13.0 16.0 ... 1333 240.0 320.0 659.0 867.0 647.0 647.0 11.0 7.0 19.0 9.0 ... 905 406.0 330.0 1001.0 689.0 725.0 597.0 30.0 26.0 22.0 11.0 ... 1289 286.0 378.0 796.0 759.0 649.0 660.0 20.0 19.0 20.0 13.0 ... att207 att208 att209 att210 att211 att212 att213 att214 att215 \ 1860 919.0 483.0 631.0 755.0 19.0 18.0 18.0 16.0 16.0 353 933.0 497.0 505.0 774.0 21.0 11.0 16.0 9.0 12.0 1333 574.0 810.0 634.0 660.0 19.0 26.0 16.0 14.0 17.0 905 910.0 572.0 648.0 608.0 18.0 17.0 19.0 10.0 11.0 1289 695.0 678.0 644.0 673.0 28.0 16.0 13.0 10.0 15.0 att216 1860 19.0 353 13.0 1333 11.0 905 3.0 1289 12.0 [5 rows x 216 columns] (400,) 1860 9 353 1 1333 6 905 4 1289 6 Name: target, dtype: int64
print(y_train.dtype)
print(y_test.dtype)
int64 int64
y_train = y_train.astype('str')
y_test = y_test.astype('str')
print(y_train.dtype)
print(y_test.dtype)
object object
# It tries 10 different models.
clf = ak.StructuredDataClassifier(overwrite=True, max_trials=10)
#clf = ak.StructuredDataRegressor(overwrite=True, max_trials=10)
# Feed the structured data classifier with training data.
clf.fit(X_train, y_train, validation_split=0.15, epochs=100, batch_size=32, verbose=1)
print(); print("Model training is complete ... ... ...")
Trial 10 Complete [00h 00m 16s] val_accuracy: 0.9598214030265808 Best val_accuracy So Far: 0.9598214030265808 Total elapsed time: 00h 03m 02s Epoch 1/100 50/50 [==============================] - 2s 3ms/step - loss: 1.9238 - accuracy: 0.3479 Epoch 2/100 50/50 [==============================] - 0s 3ms/step - loss: 0.6854 - accuracy: 0.7778 Epoch 3/100 50/50 [==============================] - 0s 3ms/step - loss: 0.4152 - accuracy: 0.8652 Epoch 4/100 50/50 [==============================] - 0s 3ms/step - loss: 0.3176 - accuracy: 0.8929 Epoch 5/100 50/50 [==============================] - 0s 3ms/step - loss: 0.2661 - accuracy: 0.9128 Epoch 6/100 50/50 [==============================] - 0s 3ms/step - loss: 0.2112 - accuracy: 0.9288 Epoch 7/100 50/50 [==============================] - 0s 3ms/step - loss: 0.1702 - accuracy: 0.9478 Epoch 8/100 50/50 [==============================] - 0s 3ms/step - loss: 0.1412 - accuracy: 0.9562 Epoch 9/100 50/50 [==============================] - 0s 3ms/step - loss: 0.1375 - accuracy: 0.9530 Epoch 10/100 50/50 [==============================] - 0s 3ms/step - loss: 0.1091 - accuracy: 0.9673 Epoch 11/100 50/50 [==============================] - 0s 3ms/step - loss: 0.1225 - accuracy: 0.9644 Epoch 12/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0999 - accuracy: 0.9668 Epoch 13/100 50/50 [==============================] - 0s 3ms/step - loss: 0.1009 - accuracy: 0.9663 Epoch 14/100 50/50 [==============================] - 0s 3ms/step - loss: 0.1055 - accuracy: 0.9695 Epoch 15/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0946 - accuracy: 0.9643 Epoch 16/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0702 - accuracy: 0.9766 Epoch 17/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0557 - accuracy: 0.9852 Epoch 18/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0644 - accuracy: 0.9777 Epoch 19/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0808 - accuracy: 0.9687 Epoch 20/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0572 - accuracy: 0.9786 Epoch 21/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0525 - accuracy: 0.9831 Epoch 22/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0617 - accuracy: 0.9814 Epoch 23/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0626 - accuracy: 0.9760 Epoch 24/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0434 - accuracy: 0.9839 Epoch 25/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0408 - accuracy: 0.9861 Epoch 26/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0465 - accuracy: 0.9849 Epoch 27/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0483 - accuracy: 0.9839 Epoch 28/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0496 - accuracy: 0.9807 Epoch 29/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0725 - accuracy: 0.9719 Epoch 30/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0513 - accuracy: 0.9775 Epoch 31/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0436 - accuracy: 0.9786 Epoch 32/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0457 - accuracy: 0.9832 Epoch 33/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0559 - accuracy: 0.9766 Epoch 34/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0656 - accuracy: 0.9672 Epoch 35/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0483 - accuracy: 0.9830 Epoch 36/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0446 - accuracy: 0.9839 Epoch 37/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0536 - accuracy: 0.9789 Epoch 38/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0565 - accuracy: 0.9736 Epoch 39/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0502 - accuracy: 0.9759 Epoch 40/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0582 - accuracy: 0.9820 Epoch 41/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0456 - accuracy: 0.9817 Epoch 42/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0329 - accuracy: 0.9889 Epoch 43/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0572 - accuracy: 0.9749 Epoch 44/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0485 - accuracy: 0.9793 Epoch 45/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0340 - accuracy: 0.9884 Epoch 46/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0372 - accuracy: 0.9884 Epoch 47/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0338 - accuracy: 0.9853 Epoch 48/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0334 - accuracy: 0.9858 Epoch 49/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0441 - accuracy: 0.9849 Epoch 50/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0388 - accuracy: 0.9830 Epoch 51/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0335 - accuracy: 0.9827 Epoch 52/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0316 - accuracy: 0.9898 Epoch 53/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0337 - accuracy: 0.9845 Epoch 54/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0322 - accuracy: 0.9832 Epoch 55/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0284 - accuracy: 0.9863 Epoch 56/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0268 - accuracy: 0.9860 Epoch 57/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0366 - accuracy: 0.9862 Epoch 58/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0337 - accuracy: 0.9902 Epoch 59/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0301 - accuracy: 0.9860 Epoch 60/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0292 - accuracy: 0.9854 Epoch 61/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0236 - accuracy: 0.9901 Epoch 62/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0474 - accuracy: 0.9760 Epoch 63/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0308 - accuracy: 0.9882 Epoch 64/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0350 - accuracy: 0.9845 Epoch 65/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0408 - accuracy: 0.9845 Epoch 66/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0416 - accuracy: 0.9812 Epoch 67/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0233 - accuracy: 0.9937 Epoch 68/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0278 - accuracy: 0.9919 Epoch 69/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0182 - accuracy: 0.9946 Epoch 70/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0246 - accuracy: 0.9889 Epoch 71/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0456 - accuracy: 0.9780 Epoch 72/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0246 - accuracy: 0.9882 Epoch 73/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0307 - accuracy: 0.9862 Epoch 74/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0405 - accuracy: 0.9841 Epoch 75/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0284 - accuracy: 0.9879 Epoch 76/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0292 - accuracy: 0.9881 Epoch 77/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0357 - accuracy: 0.9826 Epoch 78/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0409 - accuracy: 0.9872 Epoch 79/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0429 - accuracy: 0.9813 Epoch 80/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0398 - accuracy: 0.9846 Epoch 81/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0584 - accuracy: 0.9763 Epoch 82/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0669 - accuracy: 0.9788 Epoch 83/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0291 - accuracy: 0.9869 Epoch 84/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0436 - accuracy: 0.9833 Epoch 85/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0330 - accuracy: 0.9871 Epoch 86/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0550 - accuracy: 0.9819 Epoch 87/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0321 - accuracy: 0.9876 Epoch 88/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0456 - accuracy: 0.9814 Epoch 89/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0219 - accuracy: 0.9906 Epoch 90/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0360 - accuracy: 0.9870 Epoch 91/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0214 - accuracy: 0.9913 Epoch 92/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0296 - accuracy: 0.9878 Epoch 93/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0194 - accuracy: 0.9919 Epoch 94/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0304 - accuracy: 0.9878 Epoch 95/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0341 - accuracy: 0.9848 Epoch 96/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0247 - accuracy: 0.9918 Epoch 97/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0344 - accuracy: 0.9856 Epoch 98/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0376 - accuracy: 0.9836 Epoch 99/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0476 - accuracy: 0.9823 Epoch 100/100 50/50 [==============================] - 0s 3ms/step - loss: 0.0246 - accuracy: 0.9882
2022-03-28 08:49:27.783148: W tensorflow/python/util/util.cc:348] Sets are not currently considered sequences, but this may change in the future, so consider avoiding using them.
Model training is complete ... ... ...
clf
<autokeras.tasks.structured_data.StructuredDataClassifier at 0x7fc0cd4c2690>
# Evaluate the best model with testing data.
print(); print();
print(clf.evaluate(X_test, y_test, verbose=1))
13/13 [==============================] - 1s 3ms/step - loss: 0.2935 - accuracy: 0.9600 [0.29345089197158813, 0.9599999785423279]
# For Classification
import scikitplot as skplt
from sklearn.metrics import accuracy_score, classification_report
from sklearn.metrics import cohen_kappa_score, confusion_matrix
# Predict with the best model.
predicted_y = clf.predict(X_test, verbose=0)
# Evaluate the skill of the Trained model
acc = accuracy_score(y_test, predicted_y)
classReport = classification_report(y_test, predicted_y)
confMatrix = confusion_matrix(y_test, predicted_y)
print(); print('Testing Results of the trained model: ')
print(); print('Accuracy : ', acc)
#print(); print('Confusion Matrix :\n', confMatrix)
print(); print('Classification Report :\n',classReport)
# Confusion matrix
skplt.metrics.plot_confusion_matrix(y_test, predicted_y,figsize=(11, 11)); plt.show()
Testing Results of the trained model: Accuracy : 0.96 Classification Report : precision recall f1-score support 0 0.95 1.00 0.97 36 1 0.92 1.00 0.96 47 2 1.00 1.00 1.00 46 3 0.94 0.89 0.91 36 4 0.94 1.00 0.97 34 5 0.91 0.91 0.91 34 6 1.00 0.94 0.97 33 7 0.98 0.95 0.97 44 8 1.00 0.94 0.97 48 9 0.95 0.95 0.95 42 accuracy 0.96 400 macro avg 0.96 0.96 0.96 400 weighted avg 0.96 0.96 0.96 400
# For Regression
#print("********************************************************************")
#print(format(' Validation using Validation dataset ','*^65'))
#print("********************************************************************")
#import numpy as np
#import seaborn as sns
#import matplotlib.pyplot as plt
#import scikitplot as skplt
#from sklearn.metrics import r2_score, mean_squared_error
# Predict with the best model
#predicted_val = clf.predict(X_test, verbose = 0)
# Evaluate the skill of the Trained model
#r2 = r2_score(y_test, predicted_val)
#rmse = mean_squared_error(y_test, predicted_val)
#print(); print('Testing Results of the trained model: ')
#print(); print('R Squared (r2) Score : ', r2)
#print(); print('RMSE Score : ', np.sqrt(rmse))
#print()
#plt.figure(figsize = (8,8))
#sns.regplot(y_test, predicted_val, marker = "+")
#plt.show()
model = clf.export_model()
model.summary()
Model: "model" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_1 (InputLayer) [(None, 216)] 0 _________________________________________________________________ multi_category_encoding (Mul (None, 216) 0 _________________________________________________________________ normalization (Normalization (None, 216) 433 _________________________________________________________________ dense (Dense) (None, 256) 55552 _________________________________________________________________ re_lu (ReLU) (None, 256) 0 _________________________________________________________________ dense_1 (Dense) (None, 32) 8224 _________________________________________________________________ re_lu_1 (ReLU) (None, 32) 0 _________________________________________________________________ dropout (Dropout) (None, 32) 0 _________________________________________________________________ dense_2 (Dense) (None, 10) 330 _________________________________________________________________ classification_head_1 (Softm (None, 10) 0 ================================================================= Total params: 64,539 Trainable params: 64,106 Non-trainable params: 433 _________________________________________________________________
from tensorflow.keras.utils import plot_model
plot_model(model)
print(type(model))
try:
model.save("best_keras_model", save_format="tf")
except Exception:
model.save("best_keras_model.h5")
<class 'tensorflow.python.keras.engine.functional.Functional'>
model = tf.keras.models.load_model('best_keras_model', custom_objects=ak.CUSTOM_OBJECTS)
model.summary()
Model: "model" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_1 (InputLayer) [(None, 216)] 0 _________________________________________________________________ multi_category_encoding (Mul (None, 216) 0 _________________________________________________________________ normalization (Normalization (None, 216) 433 _________________________________________________________________ dense (Dense) (None, 256) 55552 _________________________________________________________________ re_lu (ReLU) (None, 256) 0 _________________________________________________________________ dense_1 (Dense) (None, 32) 8224 _________________________________________________________________ re_lu_1 (ReLU) (None, 32) 0 _________________________________________________________________ dropout (Dropout) (None, 32) 0 _________________________________________________________________ dense_2 (Dense) (None, 10) 330 _________________________________________________________________ classification_head_1 (Softm (None, 10) 0 ================================================================= Total params: 64,539 Trainable params: 64,106 Non-trainable params: 433 _________________________________________________________________
In this coding recipe, we discussed how to build a deep learning model in Python with AutoKeras.
Specifically, we have learned the followings: