# Fashion MNIST로 처리 - Functional api 사용
# Label Description
# 0: T-shirt/top
# 1: Trouser
# 2: Pullover
# 3: Dress
# 4: Coat
# 5: Sandal
# 6: Shirt
# 7: Sneaker
# 8: Bag
# 9: Ankle boot
import tensorflow as tf
from keras import datasets, layers, models
(x_train, y_train), (x_test, y_test) = datasets.fashion_mnist.load_data()
print(x_train.shape, y_train.shape, x_test.shape, y_test.shape) # (60000, 28, 28) (60000,) (10000, 28, 28) (10000,)
x_train = x_train / 255.0
x_test = x_test / 255.0
# CNN은 채널을 사용하기 때문에 3차원 데이터를 4차원으로 변경
x_train = x_train.reshape((-1, 28, 28, 1)) # 흑백은 channel이 1개
x_test = x_test.reshape((-1, 28, 28, 1)) # 예) x_test[3, 12, 13, 1]
import matplotlib.pyplot as plt
# plt.figure()
# for i in range(16):
# plt.subplot(4, 4, i + 1)
# plt.imshow(x_train[i], cmap='gray')
#
# plt.show()
# model : functional api
input_shape = (28, 28, 1)
img_input = layers.Input(shape = input_shape)
net = layers.Conv2D(filters=16, kernel_size=(3,3), activation='relu')(img_input)
net = layers.MaxPool2D(pool_size=(2, 2))(net)
net = layers.Conv2D(filters=32, kernel_size=(3,3), activation='relu')(net)
net = layers.MaxPool2D(pool_size=(2, 2))(net)
net = layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu')(net)
net = layers.MaxPool2D(pool_size=(2, 2))(net)
net = layers.Flatten()(net)
net = layers.Dense(units=64, activation='relu')(net)
net = layers.Dense(units=32, activation='relu')(net)
outputs = layers.Dense(units=10, activation='softmax')(net)
model = tf.keras.Model(inputs=img_input, outputs=outputs)
print(model.summary())
# 나머지는 이전 실습과 동일
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])\
from keras.callbacks import EarlyStopping
es = EarlyStopping(monitor='val_loss', patience=3) # patience는 많이 줘야됨
history = model.fit(x_train, y_train, batch_size=128, epochs=1000, verbose=0, validation_split=0.2,
callbacks=[es])
# history 저장
import pickle
history = history.history
with open('cnn2_history.pickle', 'wb') as f:
pickle.dump(history, f)
# 모델 평가
train_loss, train_acc = model.evaluate(x_train, y_train)
test_loss, test_acc = model.evaluate(x_test, y_test)
print('train_loss : {}, train_acc : {}'.format(train_loss, train_acc))
print('test_loss : {}, test_acc : {}'.format(test_loss, test_acc))
# 모델 저장
model.save('cnn2_model.h5')
print()
# --- 학습된 모델로 작업 ---
mymodel = tf.keras.models.load_model('cnn2_model.h5')
# predict
import numpy as np
print('예측값 :', np.argmax(mymodel.predict(x_test[:1])))
print('예측값 :', np.argmax(mymodel.predict(x_test[[0]]))) # 위랑 같은 의미
print('실제값 :', y_test[0])
# 시각화
import matplotlib.pyplot as plt
with open('cnn2_history.pickle', 'rb') as f:
history = pickle.load(f)
def plot_acc_func(title=None):
plt.plot(history['accuracy'], label='accuracy')
plt.plot(history['val_accuracy'], label='val_accuracy')
plt.title(title)
plt.xlabel('epochs')
plt.ylabel(title)
plt.legend()
plot_acc_func('accuracy')
plt.show()
def plot_loss_func(title=None):
plt.plot(history['loss'], label='loss')
plt.plot(history['val_loss'], label='val_loss')
plt.title(title)
plt.xlabel('epochs')
plt.ylabel(title)
plt.legend()
plot_acc_func('loss')
plt.show()
<console>
(60000, 28, 28) (60000,) (10000, 28, 28) (10000,)
Model: "model"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 28, 28, 1)] 0
conv2d (Conv2D) (None, 26, 26, 16) 160
max_pooling2d (MaxPooling2D (None, 13, 13, 16) 0
)
conv2d_1 (Conv2D) (None, 11, 11, 32) 4640
max_pooling2d_1 (MaxPooling (None, 5, 5, 32) 0
2D)
conv2d_2 (Conv2D) (None, 3, 3, 64) 18496
max_pooling2d_2 (MaxPooling (None, 1, 1, 64) 0
2D)
flatten (Flatten) (None, 64) 0
dense (Dense) (None, 64) 4160
dense_1 (Dense) (None, 32) 2080
dense_2 (Dense) (None, 10) 330
=================================================================
Total params: 29,866
Trainable params: 29,866
Non-trainable params: 0
_________________________________________________________________
None
1/1875 [..............................] - ETA: 39s - loss: 0.1464 - accuracy: 0.9688
18/1875 [..............................] - ETA: 5s - loss: 0.1540 - accuracy: 0.9549
34/1875 [..............................] - ETA: 5s - loss: 0.1744 - accuracy: 0.9412
50/1875 [..............................] - ETA: 5s - loss: 0.1872 - accuracy: 0.9331
68/1875 [>.............................] - ETA: 5s - loss: 0.1914 - accuracy: 0.9306
86/1875 [>.............................] - ETA: 5s - loss: 0.1903 - accuracy: 0.9284
102/1875 [>.............................] - ETA: 5s - loss: 0.1989 - accuracy: 0.9228
118/1875 [>.............................] - ETA: 5s - loss: 0.2006 - accuracy: 0.9219
134/1875 [=>............................] - ETA: 5s - loss: 0.1982 - accuracy: 0.9223
151/1875 [=>............................] - ETA: 5s - loss: 0.1934 - accuracy: 0.9249
169/1875 [=>............................] - ETA: 5s - loss: 0.1956 - accuracy: 0.9242
189/1875 [==>...........................] - ETA: 5s - loss: 0.1940 - accuracy: 0.9258
205/1875 [==>...........................] - ETA: 5s - loss: 0.1936 - accuracy: 0.9265
224/1875 [==>...........................] - ETA: 4s - loss: 0.1927 - accuracy: 0.9270
245/1875 [==>...........................] - ETA: 4s - loss: 0.1927 - accuracy: 0.9276
264/1875 [===>..........................] - ETA: 4s - loss: 0.1930 - accuracy: 0.9278
279/1875 [===>..........................] - ETA: 4s - loss: 0.1944 - accuracy: 0.9275
297/1875 [===>..........................] - ETA: 4s - loss: 0.1935 - accuracy: 0.9278
316/1875 [====>.........................] - ETA: 4s - loss: 0.1925 - accuracy: 0.9285
337/1875 [====>.........................] - ETA: 4s - loss: 0.1918 - accuracy: 0.9288
356/1875 [====>.........................] - ETA: 4s - loss: 0.1928 - accuracy: 0.9282
377/1875 [=====>........................] - ETA: 4s - loss: 0.1943 - accuracy: 0.9280
399/1875 [=====>........................] - ETA: 4s - loss: 0.1970 - accuracy: 0.9268
422/1875 [=====>........................] - ETA: 4s - loss: 0.1993 - accuracy: 0.9262
443/1875 [======>.......................] - ETA: 4s - loss: 0.2004 - accuracy: 0.9256
463/1875 [======>.......................] - ETA: 3s - loss: 0.2004 - accuracy: 0.9250
484/1875 [======>.......................] - ETA: 3s - loss: 0.1991 - accuracy: 0.9252
504/1875 [=======>......................] - ETA: 3s - loss: 0.1992 - accuracy: 0.9251
525/1875 [=======>......................] - ETA: 3s - loss: 0.2002 - accuracy: 0.9249
546/1875 [=======>......................] - ETA: 3s - loss: 0.2013 - accuracy: 0.9247
566/1875 [========>.....................] - ETA: 3s - loss: 0.2017 - accuracy: 0.9244
587/1875 [========>.....................] - ETA: 3s - loss: 0.2012 - accuracy: 0.9245
606/1875 [========>.....................] - ETA: 3s - loss: 0.2022 - accuracy: 0.9242
625/1875 [=========>....................] - ETA: 3s - loss: 0.2026 - accuracy: 0.9241
645/1875 [=========>....................] - ETA: 3s - loss: 0.2022 - accuracy: 0.9242
664/1875 [=========>....................] - ETA: 3s - loss: 0.2020 - accuracy: 0.9245
685/1875 [=========>....................] - ETA: 3s - loss: 0.2025 - accuracy: 0.9243
705/1875 [==========>...................] - ETA: 3s - loss: 0.2017 - accuracy: 0.9246
725/1875 [==========>...................] - ETA: 3s - loss: 0.2034 - accuracy: 0.9243
747/1875 [==========>...................] - ETA: 3s - loss: 0.2024 - accuracy: 0.9247
767/1875 [===========>..................] - ETA: 2s - loss: 0.2029 - accuracy: 0.9244
787/1875 [===========>..................] - ETA: 2s - loss: 0.2025 - accuracy: 0.9246
808/1875 [===========>..................] - ETA: 2s - loss: 0.2020 - accuracy: 0.9250
830/1875 [============>.................] - ETA: 2s - loss: 0.2022 - accuracy: 0.9252
852/1875 [============>.................] - ETA: 2s - loss: 0.2014 - accuracy: 0.9256
877/1875 [=============>................] - ETA: 2s - loss: 0.2008 - accuracy: 0.9261
897/1875 [=============>................] - ETA: 2s - loss: 0.2005 - accuracy: 0.9263
919/1875 [=============>................] - ETA: 2s - loss: 0.2010 - accuracy: 0.9263
942/1875 [==============>...............] - ETA: 2s - loss: 0.2016 - accuracy: 0.9260
964/1875 [==============>...............] - ETA: 2s - loss: 0.2014 - accuracy: 0.9260
985/1875 [==============>...............] - ETA: 2s - loss: 0.2025 - accuracy: 0.9255
1004/1875 [===============>..............] - ETA: 2s - loss: 0.2029 - accuracy: 0.9255
1021/1875 [===============>..............] - ETA: 2s - loss: 0.2024 - accuracy: 0.9256
1036/1875 [===============>..............] - ETA: 2s - loss: 0.2022 - accuracy: 0.9256
1049/1875 [===============>..............] - ETA: 2s - loss: 0.2022 - accuracy: 0.9255
1063/1875 [================>.............] - ETA: 2s - loss: 0.2026 - accuracy: 0.9254
1079/1875 [================>.............] - ETA: 2s - loss: 0.2021 - accuracy: 0.9257
1092/1875 [================>.............] - ETA: 2s - loss: 0.2019 - accuracy: 0.9257
1106/1875 [================>.............] - ETA: 2s - loss: 0.2020 - accuracy: 0.9257
1122/1875 [================>.............] - ETA: 2s - loss: 0.2022 - accuracy: 0.9255
1137/1875 [=================>............] - ETA: 2s - loss: 0.2028 - accuracy: 0.9254
1158/1875 [=================>............] - ETA: 1s - loss: 0.2025 - accuracy: 0.9255
1178/1875 [=================>............] - ETA: 1s - loss: 0.2027 - accuracy: 0.9255
1198/1875 [==================>...........] - ETA: 1s - loss: 0.2026 - accuracy: 0.9254
1219/1875 [==================>...........] - ETA: 1s - loss: 0.2021 - accuracy: 0.9256
1239/1875 [==================>...........] - ETA: 1s - loss: 0.2018 - accuracy: 0.9257
1259/1875 [===================>..........] - ETA: 1s - loss: 0.2018 - accuracy: 0.9257
1279/1875 [===================>..........] - ETA: 1s - loss: 0.2021 - accuracy: 0.9254
1299/1875 [===================>..........] - ETA: 1s - loss: 0.2018 - accuracy: 0.9256
1320/1875 [====================>.........] - ETA: 1s - loss: 0.2023 - accuracy: 0.9256
1343/1875 [====================>.........] - ETA: 1s - loss: 0.2014 - accuracy: 0.9257
1369/1875 [====================>.........] - ETA: 1s - loss: 0.2015 - accuracy: 0.9259
1392/1875 [=====================>........] - ETA: 1s - loss: 0.2017 - accuracy: 0.9259
1414/1875 [=====================>........] - ETA: 1s - loss: 0.2020 - accuracy: 0.9257
1433/1875 [=====================>........] - ETA: 1s - loss: 0.2022 - accuracy: 0.9256
1454/1875 [======================>.......] - ETA: 1s - loss: 0.2022 - accuracy: 0.9257
1475/1875 [======================>.......] - ETA: 1s - loss: 0.2021 - accuracy: 0.9257
1501/1875 [=======================>......] - ETA: 0s - loss: 0.2020 - accuracy: 0.9258
1523/1875 [=======================>......] - ETA: 0s - loss: 0.2035 - accuracy: 0.9251
1546/1875 [=======================>......] - ETA: 0s - loss: 0.2055 - accuracy: 0.9246
1571/1875 [========================>.....] - ETA: 0s - loss: 0.2084 - accuracy: 0.9238
1594/1875 [========================>.....] - ETA: 0s - loss: 0.2097 - accuracy: 0.9233
1616/1875 [========================>.....] - ETA: 0s - loss: 0.2114 - accuracy: 0.9229
1636/1875 [=========================>....] - ETA: 0s - loss: 0.2134 - accuracy: 0.9224
1658/1875 [=========================>....] - ETA: 0s - loss: 0.2157 - accuracy: 0.9216
1673/1875 [=========================>....] - ETA: 0s - loss: 0.2170 - accuracy: 0.9212
1693/1875 [==========================>...] - ETA: 0s - loss: 0.2182 - accuracy: 0.9208
1710/1875 [==========================>...] - ETA: 0s - loss: 0.2194 - accuracy: 0.9205
1727/1875 [==========================>...] - ETA: 0s - loss: 0.2207 - accuracy: 0.9201
1742/1875 [==========================>...] - ETA: 0s - loss: 0.2216 - accuracy: 0.9199
1758/1875 [===========================>..] - ETA: 0s - loss: 0.2228 - accuracy: 0.9196
1776/1875 [===========================>..] - ETA: 0s - loss: 0.2240 - accuracy: 0.9193
1793/1875 [===========================>..] - ETA: 0s - loss: 0.2252 - accuracy: 0.9190
1814/1875 [============================>.] - ETA: 0s - loss: 0.2259 - accuracy: 0.9185
1836/1875 [============================>.] - ETA: 0s - loss: 0.2269 - accuracy: 0.9181
1860/1875 [============================>.] - ETA: 0s - loss: 0.2288 - accuracy: 0.9174
1875/1875 [==============================] - 5s 3ms/step - loss: 0.2295 - accuracy: 0.9171
1/313 [..............................] - ETA: 7s - loss: 0.9383 - accuracy: 0.7812
17/313 [>.............................] - ETA: 0s - loss: 0.3152 - accuracy: 0.8934
34/313 [==>...........................] - ETA: 0s - loss: 0.3720 - accuracy: 0.8778
52/313 [===>..........................] - ETA: 0s - loss: 0.3821 - accuracy: 0.8756
70/313 [=====>........................] - ETA: 0s - loss: 0.3582 - accuracy: 0.8804
86/313 [=======>......................] - ETA: 0s - loss: 0.3692 - accuracy: 0.8815
104/313 [========>.....................] - ETA: 0s - loss: 0.3782 - accuracy: 0.8783
121/313 [==========>...................] - ETA: 0s - loss: 0.3815 - accuracy: 0.8776
138/313 [============>.................] - ETA: 0s - loss: 0.3777 - accuracy: 0.8791
156/313 [=============>................] - ETA: 0s - loss: 0.3688 - accuracy: 0.8804
173/313 [===============>..............] - ETA: 0s - loss: 0.3690 - accuracy: 0.8802
191/313 [=================>............] - ETA: 0s - loss: 0.3702 - accuracy: 0.8802
208/313 [==================>...........] - ETA: 0s - loss: 0.3677 - accuracy: 0.8800
226/313 [====================>.........] - ETA: 0s - loss: 0.3658 - accuracy: 0.8796
243/313 [======================>.......] - ETA: 0s - loss: 0.3584 - accuracy: 0.8814
260/313 [=======================>......] - ETA: 0s - loss: 0.3615 - accuracy: 0.8804
277/313 [=========================>....] - ETA: 0s - loss: 0.3578 - accuracy: 0.8809
294/313 [===========================>..] - ETA: 0s - loss: 0.3599 - accuracy: 0.8803
311/313 [============================>.] - ETA: 0s - loss: 0.3577 - accuracy: 0.8819
313/313 [==============================] - 1s 3ms/step - loss: 0.3588 - accuracy: 0.8816
train_loss : 0.2294693887233734, train_acc : 0.9171000123023987
test_loss : 0.35884204506874084, test_acc : 0.881600022315979
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 96ms/step
예측값 : 9
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 14ms/step
예측값 : 9
실제값 : 9
images data 시각화