TensorFlow

TensorFlow 기초 27 - Fashion MNIST로 CNN 처리 - sub classing model 사용

코딩탕탕 2022. 12. 7. 13:00

 

 

# Fashion MNIST로 CNN 처리 - sub classing model 사용

import tensorflow as tf
from keras import datasets, layers, models

(x_train, y_train), (x_test, y_test) = datasets.fashion_mnist.load_data()
print(x_train.shape, y_train.shape, x_test.shape, y_test.shape) # (60000, 28, 28) (60000,) (10000, 28, 28) (10000,)

x_train = x_train / 255.0
x_test = x_test / 255.0

# CNN은 채널을 사용하기 때문에 3차원 데이터를 4차원으로 변경
x_train = x_train.reshape((-1, 28, 28, 1)) # 흑백은 channel이 1개
x_test = x_test.reshape((-1, 28, 28, 1)) # 예) x_test[3, 12, 13, 1]

# model
class MyModel(models.Model):
    def __init__(self):
        super(MyModel, self).__init__()
        self.conv1 = layers.Conv2D(filters=16, kernel_size=(3, 3), activation='relu')
        self.conv2 = layers.Conv2D(filters=32, kernel_size=(3, 3), activation='relu')
        self.conv3 = layers.Conv2D(filters=64, kernel_size=(3, 3), activation='relu')
        self.pool = layers.MaxPool2D(pool_size=(2, 2))
        self.flatten = layers.Flatten()
        self.dropout = layers.Dropout(0.2)
        self.d1 = layers.Dense(units=64, activation='relu')
        self.d2 = layers.Dense(units=32, activation='relu')
        self.d3 = layers.Dense(units=10, activation='softmax')
        
    def call(self, x):
        x = self.conv1(x)
        x = self.pool(x)
        x = self.conv2(x)
        x = self.pool(x)
        x = self.conv3(x)
        x = self.pool(x)
        x = self.flatten(x)
        x = self.d1(x)
        x = self.dropout(x)
        x = self.d2(x)
        x = self.dropout(x)
        return self.d3(x)
        
model = MyModel()

# 나머지는 이전 실습과 동일

model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])\

from keras.callbacks import EarlyStopping
es = EarlyStopping(monitor='val_loss', patience=3) # patience는 많이 줘야됨

history = model.fit(x_train, y_train, batch_size=128, epochs=1000, verbose=0, validation_split=0.2,
                    callbacks=[es])
# history 저장
import pickle
history = history.history
with open('cnn2_history.pickle', 'wb') as f:
    pickle.dump(history, f)

# 모델 평가
train_loss, train_acc = model.evaluate(x_train, y_train)
test_loss, test_acc = model.evaluate(x_test, y_test)
print('train_loss : {}, train_acc : {}'.format(train_loss, train_acc))
print('test_loss : {}, test_acc : {}'.format(test_loss, test_acc))

print()

# predict
import numpy as np
print('예측값 :', np.argmax(model.predict(x_test[:1])))
print('예측값 :', np.argmax(model.predict(x_test[[0]]))) # 위랑 같은 의미
print('실제값 :', y_test[0])

# 시각화
import matplotlib.pyplot as plt

with open('cnn2_history.pickle', 'rb') as f:
    history = pickle.load(f)
    
def plot_acc_func(title=None):
    plt.plot(history['accuracy'], label='accuracy')
    plt.plot(history['val_accuracy'], label='val_accuracy')
    plt.title(title)
    plt.xlabel('epochs')
    plt.ylabel(title)
    plt.legend()
    
plot_acc_func('accuracy')
plt.show()

def plot_loss_func(title=None):
    plt.plot(history['loss'], label='loss')
    plt.plot(history['val_loss'], label='val_loss')
    plt.title(title)
    plt.xlabel('epochs')
    plt.ylabel(title)
    plt.legend()
    
plot_acc_func('loss')
plt.show()


<console>
(60000, 28, 28) (60000,) (10000, 28, 28) (10000,)

   1/1875 [..............................] - ETA: 39s - loss: 0.2013 - accuracy: 0.9062
  17/1875 [..............................] - ETA: 6s - loss: 0.2065 - accuracy: 0.9283 
  32/1875 [..............................] - ETA: 6s - loss: 0.2323 - accuracy: 0.9150
  46/1875 [..............................] - ETA: 6s - loss: 0.2426 - accuracy: 0.9096
  61/1875 [..............................] - ETA: 6s - loss: 0.2448 - accuracy: 0.9114
  76/1875 [>.............................] - ETA: 6s - loss: 0.2423 - accuracy: 0.9124
  95/1875 [>.............................] - ETA: 5s - loss: 0.2542 - accuracy: 0.9069
 114/1875 [>.............................] - ETA: 5s - loss: 0.2523 - accuracy: 0.9071
 134/1875 [=>............................] - ETA: 5s - loss: 0.2488 - accuracy: 0.9095
 154/1875 [=>............................] - ETA: 5s - loss: 0.2404 - accuracy: 0.9119
 175/1875 [=>............................] - ETA: 5s - loss: 0.2429 - accuracy: 0.9104
 195/1875 [==>...........................] - ETA: 4s - loss: 0.2382 - accuracy: 0.9138
 215/1875 [==>...........................] - ETA: 4s - loss: 0.2379 - accuracy: 0.9135
 236/1875 [==>...........................] - ETA: 4s - loss: 0.2392 - accuracy: 0.9133
 256/1875 [===>..........................] - ETA: 4s - loss: 0.2390 - accuracy: 0.9137
 275/1875 [===>..........................] - ETA: 4s - loss: 0.2403 - accuracy: 0.9126
 295/1875 [===>..........................] - ETA: 4s - loss: 0.2406 - accuracy: 0.9128
 315/1875 [====>.........................] - ETA: 4s - loss: 0.2411 - accuracy: 0.9126
 336/1875 [====>.........................] - ETA: 4s - loss: 0.2408 - accuracy: 0.9126
 355/1875 [====>.........................] - ETA: 4s - loss: 0.2393 - accuracy: 0.9129
 373/1875 [====>.........................] - ETA: 4s - loss: 0.2414 - accuracy: 0.9124
 390/1875 [=====>........................] - ETA: 4s - loss: 0.2437 - accuracy: 0.9114
 407/1875 [=====>........................] - ETA: 4s - loss: 0.2437 - accuracy: 0.9109
 424/1875 [=====>........................] - ETA: 4s - loss: 0.2449 - accuracy: 0.9106
 442/1875 [======>.......................] - ETA: 4s - loss: 0.2448 - accuracy: 0.9106
 462/1875 [======>.......................] - ETA: 3s - loss: 0.2437 - accuracy: 0.9104
 483/1875 [======>.......................] - ETA: 3s - loss: 0.2450 - accuracy: 0.9099
 503/1875 [=======>......................] - ETA: 3s - loss: 0.2450 - accuracy: 0.9098
 522/1875 [=======>......................] - ETA: 3s - loss: 0.2479 - accuracy: 0.9091
 541/1875 [=======>......................] - ETA: 3s - loss: 0.2482 - accuracy: 0.9087
 560/1875 [=======>......................] - ETA: 3s - loss: 0.2485 - accuracy: 0.9086
 577/1875 [========>.....................] - ETA: 3s - loss: 0.2496 - accuracy: 0.9079
 593/1875 [========>.....................] - ETA: 3s - loss: 0.2496 - accuracy: 0.9076
 610/1875 [========>.....................] - ETA: 3s - loss: 0.2505 - accuracy: 0.9074
 627/1875 [=========>....................] - ETA: 3s - loss: 0.2504 - accuracy: 0.9071
 644/1875 [=========>....................] - ETA: 3s - loss: 0.2499 - accuracy: 0.9072
 660/1875 [=========>....................] - ETA: 3s - loss: 0.2497 - accuracy: 0.9076
 677/1875 [=========>....................] - ETA: 3s - loss: 0.2493 - accuracy: 0.9077
 694/1875 [==========>...................] - ETA: 3s - loss: 0.2492 - accuracy: 0.9077
 711/1875 [==========>...................] - ETA: 3s - loss: 0.2482 - accuracy: 0.9079
 727/1875 [==========>...................] - ETA: 3s - loss: 0.2487 - accuracy: 0.9081
 743/1875 [==========>...................] - ETA: 3s - loss: 0.2479 - accuracy: 0.9082
 760/1875 [===========>..................] - ETA: 3s - loss: 0.2472 - accuracy: 0.9085
 776/1875 [===========>..................] - ETA: 3s - loss: 0.2473 - accuracy: 0.9083
 792/1875 [===========>..................] - ETA: 3s - loss: 0.2463 - accuracy: 0.9088
 808/1875 [===========>..................] - ETA: 3s - loss: 0.2462 - accuracy: 0.9091
 824/1875 [============>.................] - ETA: 3s - loss: 0.2463 - accuracy: 0.9089
 841/1875 [============>.................] - ETA: 2s - loss: 0.2462 - accuracy: 0.9091
 858/1875 [============>.................] - ETA: 2s - loss: 0.2460 - accuracy: 0.9093
 875/1875 [=============>................] - ETA: 2s - loss: 0.2460 - accuracy: 0.9094
 892/1875 [=============>................] - ETA: 2s - loss: 0.2465 - accuracy: 0.9093
 908/1875 [=============>................] - ETA: 2s - loss: 0.2465 - accuracy: 0.9093
 924/1875 [=============>................] - ETA: 2s - loss: 0.2466 - accuracy: 0.9093
 942/1875 [==============>...............] - ETA: 2s - loss: 0.2465 - accuracy: 0.9094
 960/1875 [==============>...............] - ETA: 2s - loss: 0.2464 - accuracy: 0.9097
 977/1875 [==============>...............] - ETA: 2s - loss: 0.2470 - accuracy: 0.9095
 992/1875 [==============>...............] - ETA: 2s - loss: 0.2468 - accuracy: 0.9097
1009/1875 [===============>..............] - ETA: 2s - loss: 0.2470 - accuracy: 0.9093
1026/1875 [===============>..............] - ETA: 2s - loss: 0.2465 - accuracy: 0.9096
1043/1875 [===============>..............] - ETA: 2s - loss: 0.2456 - accuracy: 0.9097
1059/1875 [===============>..............] - ETA: 2s - loss: 0.2462 - accuracy: 0.9094
1076/1875 [================>.............] - ETA: 2s - loss: 0.2463 - accuracy: 0.9094
1093/1875 [================>.............] - ETA: 2s - loss: 0.2461 - accuracy: 0.9096
1110/1875 [================>.............] - ETA: 2s - loss: 0.2458 - accuracy: 0.9095
1125/1875 [=================>............] - ETA: 2s - loss: 0.2459 - accuracy: 0.9094
1142/1875 [=================>............] - ETA: 2s - loss: 0.2469 - accuracy: 0.9093
1159/1875 [=================>............] - ETA: 2s - loss: 0.2467 - accuracy: 0.9092
1176/1875 [=================>............] - ETA: 2s - loss: 0.2467 - accuracy: 0.9092
1192/1875 [==================>...........] - ETA: 2s - loss: 0.2465 - accuracy: 0.9092
1211/1875 [==================>...........] - ETA: 1s - loss: 0.2464 - accuracy: 0.9094
1230/1875 [==================>...........] - ETA: 1s - loss: 0.2459 - accuracy: 0.9094
1247/1875 [==================>...........] - ETA: 1s - loss: 0.2457 - accuracy: 0.9094
1267/1875 [===================>..........] - ETA: 1s - loss: 0.2459 - accuracy: 0.9094
1285/1875 [===================>..........] - ETA: 1s - loss: 0.2460 - accuracy: 0.9094
1302/1875 [===================>..........] - ETA: 1s - loss: 0.2460 - accuracy: 0.9094
1319/1875 [====================>.........] - ETA: 1s - loss: 0.2464 - accuracy: 0.9093
1335/1875 [====================>.........] - ETA: 1s - loss: 0.2462 - accuracy: 0.9094
1351/1875 [====================>.........] - ETA: 1s - loss: 0.2456 - accuracy: 0.9096
1367/1875 [====================>.........] - ETA: 1s - loss: 0.2455 - accuracy: 0.9097
1383/1875 [=====================>........] - ETA: 1s - loss: 0.2453 - accuracy: 0.9098
1400/1875 [=====================>........] - ETA: 1s - loss: 0.2448 - accuracy: 0.9100
1418/1875 [=====================>........] - ETA: 1s - loss: 0.2451 - accuracy: 0.9098
1435/1875 [=====================>........] - ETA: 1s - loss: 0.2452 - accuracy: 0.9099
1451/1875 [======================>.......] - ETA: 1s - loss: 0.2451 - accuracy: 0.9098
1467/1875 [======================>.......] - ETA: 1s - loss: 0.2447 - accuracy: 0.9100
1483/1875 [======================>.......] - ETA: 1s - loss: 0.2450 - accuracy: 0.9097
1501/1875 [=======================>......] - ETA: 1s - loss: 0.2450 - accuracy: 0.9097
1517/1875 [=======================>......] - ETA: 1s - loss: 0.2458 - accuracy: 0.9093
1533/1875 [=======================>......] - ETA: 1s - loss: 0.2465 - accuracy: 0.9089
1551/1875 [=======================>......] - ETA: 0s - loss: 0.2473 - accuracy: 0.9087
1571/1875 [========================>.....] - ETA: 0s - loss: 0.2493 - accuracy: 0.9082
1591/1875 [========================>.....] - ETA: 0s - loss: 0.2500 - accuracy: 0.9079
1608/1875 [========================>.....] - ETA: 0s - loss: 0.2505 - accuracy: 0.9077
1625/1875 [=========================>....] - ETA: 0s - loss: 0.2516 - accuracy: 0.9075
1642/1875 [=========================>....] - ETA: 0s - loss: 0.2529 - accuracy: 0.9072
1658/1875 [=========================>....] - ETA: 0s - loss: 0.2544 - accuracy: 0.9067
1675/1875 [=========================>....] - ETA: 0s - loss: 0.2556 - accuracy: 0.9062
1692/1875 [==========================>...] - ETA: 0s - loss: 0.2563 - accuracy: 0.9060
1711/1875 [==========================>...] - ETA: 0s - loss: 0.2573 - accuracy: 0.9057
1732/1875 [==========================>...] - ETA: 0s - loss: 0.2577 - accuracy: 0.9057
1749/1875 [==========================>...] - ETA: 0s - loss: 0.2584 - accuracy: 0.9056
1765/1875 [===========================>..] - ETA: 0s - loss: 0.2593 - accuracy: 0.9053
1780/1875 [===========================>..] - ETA: 0s - loss: 0.2597 - accuracy: 0.9051
1796/1875 [===========================>..] - ETA: 0s - loss: 0.2608 - accuracy: 0.9048
1812/1875 [===========================>..] - ETA: 0s - loss: 0.2611 - accuracy: 0.9045
1828/1875 [============================>.] - ETA: 0s - loss: 0.2618 - accuracy: 0.9043
1843/1875 [============================>.] - ETA: 0s - loss: 0.2624 - accuracy: 0.9042
1859/1875 [============================>.] - ETA: 0s - loss: 0.2629 - accuracy: 0.9040
1874/1875 [============================>.] - ETA: 0s - loss: 0.2632 - accuracy: 0.9039
1875/1875 [==============================] - 6s 3ms/step - loss: 0.2632 - accuracy: 0.9039

  1/313 [..............................] - ETA: 5s - loss: 1.0368 - accuracy: 0.8438
 19/313 [>.............................] - ETA: 0s - loss: 0.3320 - accuracy: 0.8832
 35/313 [==>...........................] - ETA: 0s - loss: 0.3689 - accuracy: 0.8750
 53/313 [====>.........................] - ETA: 0s - loss: 0.3620 - accuracy: 0.8750
 70/313 [=====>........................] - ETA: 0s - loss: 0.3461 - accuracy: 0.8799
 86/313 [=======>......................] - ETA: 0s - loss: 0.3495 - accuracy: 0.8790
102/313 [========>.....................] - ETA: 0s - loss: 0.3601 - accuracy: 0.8759
118/313 [==========>...................] - ETA: 0s - loss: 0.3598 - accuracy: 0.8761
135/313 [===========>..................] - ETA: 0s - loss: 0.3611 - accuracy: 0.8750
154/313 [=============>................] - ETA: 0s - loss: 0.3588 - accuracy: 0.8744
171/313 [===============>..............] - ETA: 0s - loss: 0.3567 - accuracy: 0.8750
186/313 [================>.............] - ETA: 0s - loss: 0.3624 - accuracy: 0.8745
201/313 [==================>...........] - ETA: 0s - loss: 0.3616 - accuracy: 0.8745
217/313 [===================>..........] - ETA: 0s - loss: 0.3591 - accuracy: 0.8750
234/313 [=====================>........] - ETA: 0s - loss: 0.3549 - accuracy: 0.8754
250/313 [======================>.......] - ETA: 0s - loss: 0.3542 - accuracy: 0.8759
266/313 [========================>.....] - ETA: 0s - loss: 0.3523 - accuracy: 0.8772
282/313 [==========================>...] - ETA: 0s - loss: 0.3527 - accuracy: 0.8770
298/313 [===========================>..] - ETA: 0s - loss: 0.3545 - accuracy: 0.8760
313/313 [==============================] - 1s 3ms/step - loss: 0.3508 - accuracy: 0.8768
train_loss : 0.26322826743125916, train_acc : 0.9039000272750854
test_loss : 0.35077914595603943, test_acc : 0.876800000667572


1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 98ms/step
예측값 : 9

1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 14ms/step
예측값 : 9
실제값 : 9

model을 함수로 만들어서 설계하고 그것을 적용하였다.