Let's try to use the CNN concept in the MNIST datase and see if we can increase the accuracy to more than 99.5% using single convolution and maxpooling layer, and within 10 epochs. Code structure and concept remains the same.
In [1]:
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
In [3]:
mnist = tf.keras.datasets.mnist
(training_images, training_labels), (testing_images, testing_labels) = mnist.load_data()
In [4]:
def reshape_and_normalize_image(images):
rows, width, height = images.shape
images = images.reshape(rows, height, width, 1) #Reshaping all the images to add an extra dimension at the end of the array
max_pixel = np.max(images)
images = np.divide(images, max_pixel) #Normalizing the image
return images
In [5]:
training_images = reshape_and_normalize_image(training_images)
testing_images = reshape_and_normalize_image(testing_images)
In [10]:
class callback(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs = {}):
if logs['accuracy'] >= 0.995:
print('Reached 99.5%. Stopping the training')
self.model.stop_training = True
In [11]:
model = tf.keras.models.Sequential([
tf.keras.layers.Input(shape=(28,28,1)),
tf.keras.layers.Conv2D(64, (3,3), activation='relu'),
tf.keras.layers.MaxPooling2D(2,2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(10, activation='softmax')
])
model.compile(optimizer='adam', loss = tf.keras.losses.SparseCategoricalCrossentropy, metrics=['accuracy'])
model.fit(training_images, training_labels, epochs = 10, callbacks=[callback()])
Epoch 1/10 1875/1875 ━━━━━━━━━━━━━━━━━━━━ 19s 10ms/step - accuracy: 0.9060 - loss: 0.3057 Epoch 2/10 1875/1875 ━━━━━━━━━━━━━━━━━━━━ 19s 10ms/step - accuracy: 0.9845 - loss: 0.0540 Epoch 3/10 1875/1875 ━━━━━━━━━━━━━━━━━━━━ 17s 9ms/step - accuracy: 0.9890 - loss: 0.0358 Epoch 4/10 1875/1875 ━━━━━━━━━━━━━━━━━━━━ 16s 8ms/step - accuracy: 0.9934 - loss: 0.0213 Epoch 5/10 1875/1875 ━━━━━━━━━━━━━━━━━━━━ 17s 9ms/step - accuracy: 0.9958 - loss: 0.0148 Epoch 6/10 1873/1875 ━━━━━━━━━━━━━━━━━━━━ 0s 9ms/step - accuracy: 0.9964 - loss: 0.0113Reached 99.5%. Stopping the training 1875/1875 ━━━━━━━━━━━━━━━━━━━━ 18s 9ms/step - accuracy: 0.9964 - loss: 0.0113
Out[11]:
<keras.src.callbacks.history.History at 0x14d43d243d0>
In [12]:
test_loss, test_accuracy = model.evaluate(testing_images, testing_labels, verbose=False)
In [13]:
test_loss, test_accuracy
Out[13]:
(0.04949159175157547, 0.9855999946594238)
In [ ]:
In [ ]: