Today, I'm going to show you how to achieve 75% accuracy with the CIFAR10 dataset. At first, we aimed for 90% or more, so we plan to continue fine tuning in the future.
As shown in the figure below, we will execute all the contents that are usually done in AI projects.
This is a feature of this task.
VGG16 consists of 5 Conv Blocks and the final Full Connected Layer. This time, we will leave the first to fourth Conv Blocks as they are and learn the fifth and final Full Connected Layer.
The figure below shows the transition of accuracy accuracy and loss function Loss during training. Learning up to 200 Epochs gave 75% accuracy. However, the results of the Train data and the Validaion data are separated, so it looks like Overfitting is occurring.
The result of inference. The average accuracy of the test data is 74.5%.
test acc: 0.7450000047683716 The Confusion Matrix module of Scikit Learn was used to process the Confusion Matrix.
It is the inference result of 5000 test data. (5000 pieces = 500 pieces * 10 classes)
Plot the above textual confusion matrix with Matplotlib.
It also tells you the Precision, Recall, and F1-score results for each class. (Refer to here for the explanation of Precision and Recall.)
Program structure
train.py
##Import
import os
import keras
from keras.preprocessing.image import ImageDataGenerator
from keras import models, layers
from keras.applications import VGG16
from keras import optimizers
import numpy as np
import matplotlib.pyplot as plt
from keras.callbacks import EarlyStopping
#1.plot loss and accuracy
def plot_acc(hist):
acc = hist.history['acc']
val_acc = hist.history['val_acc']
epochs = range(len(acc))
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and Validation accuracy')
plt.legend()
pass
def plot_loss(hist):
loss = hist.history['loss']
val_loss = hist.history['val_loss']
epochs = range(len(loss))
plt.plot(epochs, loss, 'ro', label='Training loss')
plt.plot(epochs, val_loss, 'r', label='Validation loss')
plt.title('Training and Validation loss')
plt.legend()
def main():
#Initial Setting
width_x, width_y = 32, 32
batch_size = 32
num_of_train_samples = 40000
num_of_val_samples = 5000
num_of_test_samples = 5000 #CIFAR100
epochs = 1000
# label_class
classes = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
nb_classes = len(classes)
## 01. Data Input
# folder information
base_dir = 'E:\\Dataset\CIFAR10\cifar10_keras_training'
train_data_dir = os.path.join(base_dir, 'train')
val_data_dir = os.path.join(base_dir, 'val')
test_data_dir = os.path.join(base_dir, 'test')
print(train_data_dir)
print(val_data_dir)
print(test_data_dir)
# Input Data Generation (with Data Augmentation)
train_datagen = ImageDataGenerator(rescale=1. / 255,
rotation_range=20,
width_shift_range=0.1,
height_shift_range=0.1,
shear_range=0.1,
zoom_range=0.1,
horizontal_flip=True,
fill_mode='nearest')
val_datagen = ImageDataGenerator(rescale=1. / 255)
test_datagen = ImageDataGenerator(rescale=1. / 255)
train_generator = train_datagen.flow_from_directory(
train_data_dir,
target_size=(width_x, width_y),
color_mode='rgb',
classes=classes,
class_mode='categorical',
batch_size=batch_size,
shuffle=False)
val_generator = val_datagen.flow_from_directory(
val_data_dir,
target_size=(width_x, width_y),
color_mode='rgb',
classes=classes,
class_mode='categorical',
batch_size=batch_size,
shuffle=False)
test_generator = test_datagen.flow_from_directory(
test_data_dir,
target_size=(width_x, width_y),
color_mode='rgb',
classes=classes,
class_mode='categorical',
batch_size=batch_size,
shuffle=False)
##2. CNN Model
conv_base = VGG16(weights='imagenet',
include_top=False,
input_shape=(width_x, width_y, 3))
# conv5 block fine tuning only
conv_base.trainable = True
set_trainable = False
for layer in conv_base.layers:
if layer.name == 'block5_conv1':
set_trainable = True
if set_trainable:
layer.trainable = True
else:
layer.trainable = False
model = models.Sequential()
model.add(conv_base)
model.add(layers.Flatten())
model.add(layers.Dropout(0.5))
model.add(layers.Dense(512, activation='relu'))
model.add(layers.Dense(nb_classes, activation='softmax'))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer=optimizers.RMSprop(lr=1e-5),
metrics=['acc'])
model.summary()
##3. Training
# early_stopping = EarlyStopping(patience=20)
history = model.fit_generator(
train_generator,
epochs=epochs,
steps_per_epoch=num_of_train_samples//batch_size,
validation_data=val_generator,
validation_steps= num_of_val_samples//batch_size,
verbose=2)
# callbacks=[early_stopping]
##5. Model Save
model.save('./Model/CIFAR10_trained03_seq.h5')
##4. Accuracy and Loss Plot
plot_acc(history)
plt.figure()
plot_loss(history)
plt.show()
## Run code
if __name__=='__main__':
main()
Program structure
Inferenece.py
## Import
import os
import keras
from keras.models import load_model
from keras.preprocessing.image import ImageDataGenerator
from sklearn.metrics import confusion_matrix, accuracy_score
from sklearn.metrics import classification_report
import numpy as np
import matplotlib.pyplot as plt
##Confusion matrix function
def plot_confusion_matrix(cm, classes, cmap):
''' confusion_Function to display matrix as heatmap
Keyword arguments:
cm -- confusion_matrix
title --Figure title
cmap --Color map to use
Normalize = True/ False
'''
plt.imshow(cm, cmap=cmap)
plt.colorbar()
plt.ylabel('True')
plt.xlabel('Predicted')
plt.title('Confusion Matrix')
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
plt.tight_layout()
##Main Function
def main():
#01. Initial Setting
width_x, width_y = 32, 32
batch_size = 32
# label_class
classes = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
#02. load_test data
base_dir = 'E:\\Dataset\CIFAR10\cifar10_keras_training'
test_data_dir = os.path.join(base_dir, 'test')
#02-01. Input Data Generation (with Data Augmentation)
test_datagen = ImageDataGenerator(rescale=1. / 255)
test_generator = test_datagen.flow_from_directory(
test_data_dir,
target_size=(width_x, width_y),
color_mode='rgb',
classes=classes,
class_mode='categorical',
batch_size=batch_size,
shuffle=False) #In case of test generator, Shuffle sholud be turned off.
#03. Load Trained model
model_dir = './Model/'
model_name = 'CIFAR10_trained03_seq.h5'
model_dir_name = os.path.join(model_dir, model_name)
print(model_dir_name)
model=load_model(model_dir_name)
#04. Evaluating Test Data
test_loss, test_acc = model.evaluate_generator(test_generator, steps=50)
print('test acc:', test_acc)
#05. Prediction and Confusion Matrix
Y_pred = model.predict_generator(test_generator)
y_pred = np.argmax(Y_pred, axis=-1)
y_true = test_generator.classes
print('Confusion Matrix')
print(confusion_matrix(y_true, y_pred))
print('Classification Report')
print(classification_report(y_true, y_pred, target_names=classes))
cm = confusion_matrix(y_true, y_pred)
cmap = plt.cm.Blues
plot_confusion_matrix(cm, classes=classes, cmap=cmap)
plt.show()
## Run code
if __name__=='__main__':
main()