반응형
In [1]:
from IPython.core.display import display, HTML
display(HTML("<style>.container {width:90% !important;}</style>"))
라이브러리 불러오기¶
In [2]:
import tensorflow as tf
from tensorflow.keras import datasets, layers, models
import numpy as np
import matplotlib.pyplot as plt
import cv2 as cv
import os
print(tf.__version__)
2.3.1
데이터셋 불러오기 및 정규화¶
In [3]:
(train_images, train_labels), (test_images, test_labels) = datasets.mnist.load_data()
train_images = train_images.reshape((60000, 28, 28, 1))
test_images = test_images.reshape((10000, 28, 28, 1))
# 픽셀 값을 0~1 사이로 정규화합니다.
train_images, test_images = train_images / 255.0, test_images / 255.0
CNN 모델 생성¶
In [4]:
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1)))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(10, activation='softmax'))
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d (Conv2D) (None, 26, 26, 32) 320 _________________________________________________________________ max_pooling2d (MaxPooling2D) (None, 13, 13, 32) 0 _________________________________________________________________ conv2d_1 (Conv2D) (None, 11, 11, 64) 18496 _________________________________________________________________ max_pooling2d_1 (MaxPooling2 (None, 5, 5, 64) 0 _________________________________________________________________ conv2d_2 (Conv2D) (None, 3, 3, 64) 36928 _________________________________________________________________ flatten (Flatten) (None, 576) 0 _________________________________________________________________ dense (Dense) (None, 64) 36928 _________________________________________________________________ dense_1 (Dense) (None, 10) 650 ================================================================= Total params: 93,322 Trainable params: 93,322 Non-trainable params: 0 _________________________________________________________________
모델 컴파일 및 훈련¶
In [5]:
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(train_images, train_labels, epochs=5)
Epoch 1/5 1875/1875 [==============================] - 17s 9ms/step - loss: 0.1464 - accuracy: 0.9551 Epoch 2/5 1875/1875 [==============================] - 17s 9ms/step - loss: 0.0460 - accuracy: 0.9853 Epoch 3/5 1875/1875 [==============================] - 17s 9ms/step - loss: 0.0331 - accuracy: 0.9896 Epoch 4/5 1875/1875 [==============================] - 17s 9ms/step - loss: 0.0261 - accuracy: 0.9920 Epoch 5/5 1875/1875 [==============================] - 17s 9ms/step - loss: 0.0205 - accuracy: 0.9933
Out[5]:
<tensorflow.python.keras.callbacks.History at 0x1d090408e50>
모델 평가¶
In [6]:
test_loss, test_acc = model.evaluate(test_images, test_labels, verbose=2)
313/313 - 1s - loss: 0.0313 - accuracy: 0.9908
In [7]:
print(test_acc)
0.9908000230789185
모델 저장¶
In [8]:
model.save('mnist_cnn_model.h5')
반응형