Keras : Vision models サンプル: cifar10_cnn.py
CIFAR10 スモール画像データセット上で単純な CNN をトレーニングします。
50 エポック後、データ拡張なしで 79.14 %、データ拡張ありで 78.95 % のテスト精度です。
from __future__ import print_function import keras from keras.datasets import cifar10 from keras.preprocessing.image import ImageDataGenerator from keras.models import Sequential from keras.layers import Dense, Dropout, Activation, Flatten from keras.layers import Conv2D, MaxPooling2D import numpy as np import os
batch_size = 32 num_classes = 10 epochs = 100 #data_augmentation = True num_predictions = 20 save_dir = os.path.join(os.getcwd(), 'saved_models') model_name = 'keras_cifar10_trained_model.h5'
# The data, shuffled and split between train and test sets: (x_train, y_train), (x_test, y_test) = cifar10.load_data() print('x_train shape:', x_train.shape) print(x_train.shape[0], 'train samples') print(x_test.shape[0], 'test samples')
# Convert class vectors to binary class matrices. y_train = keras.utils.to_categorical(y_train, num_classes) y_test = keras.utils.to_categorical(y_test, num_classes)
model = Sequential() model.add(Conv2D(32, (3, 3), padding='same', input_shape=x_train.shape[1:])) model.add(Activation('relu')) model.add(Conv2D(32, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Conv2D(64, (3, 3), padding='same')) model.add(Activation('relu')) model.add(Conv2D(64, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(512)) model.add(Activation('relu')) model.add(Dropout(0.5)) model.add(Dense(num_classes)) model.add(Activation('softmax'))
model.summary()
Layer (type) Output Shape Param # ================================================================= conv2d_1 (Conv2D) (None, 32, 32, 32) 896 _________________________________________________________________ activation_1 (Activation) (None, 32, 32, 32) 0 _________________________________________________________________ conv2d_2 (Conv2D) (None, 30, 30, 32) 9248 _________________________________________________________________ activation_2 (Activation) (None, 30, 30, 32) 0 _________________________________________________________________ max_pooling2d_1 (MaxPooling2 (None, 15, 15, 32) 0 _________________________________________________________________ dropout_1 (Dropout) (None, 15, 15, 32) 0 _________________________________________________________________ conv2d_3 (Conv2D) (None, 15, 15, 64) 18496 _________________________________________________________________ activation_3 (Activation) (None, 15, 15, 64) 0 _________________________________________________________________ conv2d_4 (Conv2D) (None, 13, 13, 64) 36928 _________________________________________________________________ activation_4 (Activation) (None, 13, 13, 64) 0 _________________________________________________________________ max_pooling2d_2 (MaxPooling2 (None, 6, 6, 64) 0 _________________________________________________________________ dropout_2 (Dropout) (None, 6, 6, 64) 0 _________________________________________________________________ flatten_1 (Flatten) (None, 2304) 0 _________________________________________________________________ dense_1 (Dense) (None, 512) 1180160 _________________________________________________________________ activation_5 (Activation) (None, 512) 0 _________________________________________________________________ dropout_3 (Dropout) (None, 512) 0 _________________________________________________________________ dense_2 (Dense) (None, 10) 5130 _________________________________________________________________ activation_6 (Activation) (None, 10) 0 ================================================================= Total params: 1,250,858 Trainable params: 1,250,858 Non-trainable params: 0 _________________________________________________________________
# initiate RMSprop optimizer opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-6)
# Let's train the model using RMSprop model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy'])
x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train /= 255 x_test /= 255
print('Not using data augmentation.') model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, validation_data=(x_test, y_test), shuffle=True)
Not using data augmentation. Train on 50000 samples, validate on 10000 samples Epoch 1/100 50000/50000 [==============================] - 27s 542us/step - loss: 1.8227 - acc: 0.3329 - val_loss: 1.6068 - val_acc: 0.4253 Epoch 2/100 50000/50000 [==============================] - 23s 463us/step - loss: 1.4986 - acc: 0.4602 - val_loss: 1.3667 - val_acc: 0.5093 Epoch 3/100 50000/50000 [==============================] - 23s 463us/step - loss: 1.3429 - acc: 0.5215 - val_loss: 1.2526 - val_acc: 0.5627 Epoch 4/100 50000/50000 [==============================] - 23s 463us/step - loss: 1.2421 - acc: 0.5578 - val_loss: 1.1588 - val_acc: 0.5889 Epoch 5/100 50000/50000 [==============================] - 23s 464us/step - loss: 1.1646 - acc: 0.5890 - val_loss: 1.0919 - val_acc: 0.6194 Epoch 6/100 50000/50000 [==============================] - 23s 466us/step - loss: 1.1047 - acc: 0.6107 - val_loss: 1.0132 - val_acc: 0.6467 Epoch 7/100 50000/50000 [==============================] - 23s 467us/step - loss: 1.0511 - acc: 0.6312 - val_loss: 0.9798 - val_acc: 0.6591 Epoch 8/100 50000/50000 [==============================] - 23s 467us/step - loss: 1.0043 - acc: 0.6477 - val_loss: 0.9472 - val_acc: 0.6696 Epoch 9/100 50000/50000 [==============================] - 23s 466us/step - loss: 0.9694 - acc: 0.6590 - val_loss: 0.9134 - val_acc: 0.6830 Epoch 10/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.9373 - acc: 0.6746 - val_loss: 0.8924 - val_acc: 0.6868 Epoch 11/100 50000/50000 [==============================] - 23s 466us/step - loss: 0.9072 - acc: 0.6844 - val_loss: 0.8416 - val_acc: 0.7087 Epoch 12/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.8787 - acc: 0.6923 - val_loss: 0.8738 - val_acc: 0.6964 Epoch 13/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.8585 - acc: 0.7009 - val_loss: 0.8511 - val_acc: 0.7043 Epoch 14/100 50000/50000 [==============================] - 23s 468us/step - loss: 0.8408 - acc: 0.7094 - val_loss: 0.8406 - val_acc: 0.7088 Epoch 15/100 50000/50000 [==============================] - 23s 468us/step - loss: 0.8217 - acc: 0.7144 - val_loss: 0.7943 - val_acc: 0.7242 Epoch 16/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.8066 - acc: 0.7198 - val_loss: 0.8001 - val_acc: 0.7284 Epoch 17/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7932 - acc: 0.7236 - val_loss: 0.7945 - val_acc: 0.7292 Epoch 18/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7816 - acc: 0.7291 - val_loss: 0.7900 - val_acc: 0.7294 Epoch 19/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7681 - acc: 0.7363 - val_loss: 0.8079 - val_acc: 0.7260 Epoch 20/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7657 - acc: 0.7346 - val_loss: 0.7332 - val_acc: 0.7497 Epoch 21/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7537 - acc: 0.7404 - val_loss: 0.7724 - val_acc: 0.7423 Epoch 22/100 50000/50000 [==============================] - 23s 466us/step - loss: 0.7467 - acc: 0.7426 - val_loss: 0.7368 - val_acc: 0.7480 Epoch 23/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7368 - acc: 0.7456 - val_loss: 0.7392 - val_acc: 0.7475 Epoch 24/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7312 - acc: 0.7522 - val_loss: 0.7236 - val_acc: 0.7575 Epoch 25/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7269 - acc: 0.7501 - val_loss: 0.7224 - val_acc: 0.7564 Epoch 26/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7221 - acc: 0.7540 - val_loss: 0.7437 - val_acc: 0.7539 Epoch 27/100 50000/50000 [==============================] - 23s 468us/step - loss: 0.7174 - acc: 0.7537 - val_loss: 0.6990 - val_acc: 0.7654 Epoch 28/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7117 - acc: 0.7571 - val_loss: 0.7156 - val_acc: 0.7572 Epoch 29/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7122 - acc: 0.7587 - val_loss: 0.7026 - val_acc: 0.7653 Epoch 30/100 50000/50000 [==============================] - 23s 467us/step - loss: 0.7046 - acc: 0.7604 - val_loss: 0.7148 - val_acc: 0.7668 Epoch 31/100 50000/50000 [==============================] - 23s 466us/step - loss: 0.7012 - acc: 0.7611 - val_loss: 0.7158 - val_acc: 0.7653 Epoch 32/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6958 - acc: 0.7636 - val_loss: 0.6800 - val_acc: 0.7718 Epoch 33/100 50000/50000 [==============================] - 23s 460us/step - loss: 0.6896 - acc: 0.7652 - val_loss: 0.7154 - val_acc: 0.7618 Epoch 34/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6843 - acc: 0.7684 - val_loss: 0.6794 - val_acc: 0.7723 Epoch 35/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6820 - acc: 0.7703 - val_loss: 0.7220 - val_acc: 0.7669 Epoch 36/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6855 - acc: 0.7677 - val_loss: 0.6755 - val_acc: 0.7767 Epoch 37/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6765 - acc: 0.7710 - val_loss: 0.6761 - val_acc: 0.7806 Epoch 38/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6740 - acc: 0.7721 - val_loss: 0.6686 - val_acc: 0.7764 Epoch 39/100 50000/50000 [==============================] - 23s 460us/step - loss: 0.6729 - acc: 0.7734 - val_loss: 0.6895 - val_acc: 0.7771 Epoch 40/100 50000/50000 [==============================] - 23s 463us/step - loss: 0.6692 - acc: 0.7736 - val_loss: 0.7017 - val_acc: 0.7804 Epoch 41/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6672 - acc: 0.7747 - val_loss: 0.6708 - val_acc: 0.7752 Epoch 42/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6698 - acc: 0.7741 - val_loss: 0.6807 - val_acc: 0.7760 Epoch 43/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6619 - acc: 0.7758 - val_loss: 0.7031 - val_acc: 0.7696 Epoch 44/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6630 - acc: 0.7773 - val_loss: 0.6922 - val_acc: 0.7691 Epoch 45/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6657 - acc: 0.7753 - val_loss: 0.6880 - val_acc: 0.7759 Epoch 46/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6589 - acc: 0.7762 - val_loss: 0.6611 - val_acc: 0.7845 Epoch 47/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6512 - acc: 0.7797 - val_loss: 0.6467 - val_acc: 0.7852 Epoch 48/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6562 - acc: 0.7790 - val_loss: 0.6576 - val_acc: 0.7864 Epoch 49/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6543 - acc: 0.7801 - val_loss: 0.7988 - val_acc: 0.7727 Epoch 50/100 50000/50000 [==============================] - 23s 459us/step - loss: 0.6481 - acc: 0.7799 - val_loss: 0.6851 - val_acc: 0.7747 Epoch 51/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6543 - acc: 0.7810 - val_loss: 0.8068 - val_acc: 0.7375 Epoch 52/100 50000/50000 [==============================] - 23s 464us/step - loss: 0.6513 - acc: 0.7814 - val_loss: 0.6482 - val_acc: 0.7891 Epoch 53/100 50000/50000 [==============================] - 23s 464us/step - loss: 0.6508 - acc: 0.7833 - val_loss: 0.6891 - val_acc: 0.7751 Epoch 54/100 50000/50000 [==============================] - 23s 463us/step - loss: 0.6480 - acc: 0.7832 - val_loss: 0.6766 - val_acc: 0.7781 Epoch 55/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6464 - acc: 0.7830 - val_loss: 0.6954 - val_acc: 0.7816 Epoch 56/100 50000/50000 [==============================] - 23s 460us/step - loss: 0.6454 - acc: 0.7839 - val_loss: 0.6974 - val_acc: 0.7749 Epoch 57/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6477 - acc: 0.7839 - val_loss: 0.7029 - val_acc: 0.7678 Epoch 58/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6431 - acc: 0.7856 - val_loss: 0.6629 - val_acc: 0.7883 Epoch 59/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6469 - acc: 0.7845 - val_loss: 0.6843 - val_acc: 0.7839 Epoch 60/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6382 - acc: 0.7851 - val_loss: 0.6917 - val_acc: 0.7848 Epoch 61/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6424 - acc: 0.7842 - val_loss: 0.6622 - val_acc: 0.7942 Epoch 62/100 50000/50000 [==============================] - 23s 457us/step - loss: 0.6396 - acc: 0.7867 - val_loss: 0.6774 - val_acc: 0.7786 Epoch 63/100 50000/50000 [==============================] - 23s 458us/step - loss: 0.6387 - acc: 0.7859 - val_loss: 0.6649 - val_acc: 0.7860 Epoch 64/100 50000/50000 [==============================] - 23s 457us/step - loss: 0.6370 - acc: 0.7873 - val_loss: 0.7143 - val_acc: 0.7823 Epoch 65/100 50000/50000 [==============================] - 23s 456us/step - loss: 0.6362 - acc: 0.7876 - val_loss: 0.7169 - val_acc: 0.7684 Epoch 66/100 50000/50000 [==============================] - 23s 459us/step - loss: 0.6384 - acc: 0.7862 - val_loss: 0.7613 - val_acc: 0.7735 Epoch 67/100 50000/50000 [==============================] - 23s 459us/step - loss: 0.6361 - acc: 0.7876 - val_loss: 0.6679 - val_acc: 0.7862 Epoch 68/100 50000/50000 [==============================] - 23s 457us/step - loss: 0.6410 - acc: 0.7870 - val_loss: 0.6876 - val_acc: 0.7793 Epoch 69/100 50000/50000 [==============================] - 23s 457us/step - loss: 0.6378 - acc: 0.7874 - val_loss: 0.7004 - val_acc: 0.7809 Epoch 70/100 50000/50000 [==============================] - 23s 457us/step - loss: 0.6366 - acc: 0.7892 - val_loss: 0.7142 - val_acc: 0.7753 Epoch 71/100 50000/50000 [==============================] - 23s 459us/step - loss: 0.6396 - acc: 0.7897 - val_loss: 0.6872 - val_acc: 0.7759 Epoch 72/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6331 - acc: 0.7884 - val_loss: 0.6699 - val_acc: 0.7807 Epoch 73/100 50000/50000 [==============================] - 23s 463us/step - loss: 0.6369 - acc: 0.7889 - val_loss: 0.6521 - val_acc: 0.7930 Epoch 74/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6385 - acc: 0.7872 - val_loss: 0.7298 - val_acc: 0.7771 Epoch 75/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6392 - acc: 0.7891 - val_loss: 0.6874 - val_acc: 0.7811 Epoch 76/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6383 - acc: 0.7860 - val_loss: 0.6748 - val_acc: 0.7886 Epoch 77/100 50000/50000 [==============================] - 23s 459us/step - loss: 0.6429 - acc: 0.7850 - val_loss: 0.7190 - val_acc: 0.7711 Epoch 78/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6437 - acc: 0.7867 - val_loss: 0.7070 - val_acc: 0.7745 Epoch 79/100 50000/50000 [==============================] - 23s 463us/step - loss: 0.6387 - acc: 0.7868 - val_loss: 0.6576 - val_acc: 0.7888 Epoch 80/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6333 - acc: 0.7897 - val_loss: 0.6937 - val_acc: 0.7824 Epoch 81/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6334 - acc: 0.7887 - val_loss: 0.6653 - val_acc: 0.7859 Epoch 82/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6373 - acc: 0.7893 - val_loss: 0.6599 - val_acc: 0.7949 Epoch 83/100 50000/50000 [==============================] - 23s 460us/step - loss: 0.6349 - acc: 0.7908 - val_loss: 0.7904 - val_acc: 0.7715 Epoch 84/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6442 - acc: 0.7865 - val_loss: 0.6862 - val_acc: 0.7753 Epoch 85/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6427 - acc: 0.7874 - val_loss: 0.6689 - val_acc: 0.7893 Epoch 86/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6427 - acc: 0.7891 - val_loss: 0.7285 - val_acc: 0.7749 Epoch 87/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6419 - acc: 0.7876 - val_loss: 0.6623 - val_acc: 0.7914 Epoch 88/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6397 - acc: 0.7880 - val_loss: 0.7575 - val_acc: 0.7760 Epoch 89/100 50000/50000 [==============================] - 23s 460us/step - loss: 0.6434 - acc: 0.7871 - val_loss: 0.7873 - val_acc: 0.7601 Epoch 90/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6379 - acc: 0.7887 - val_loss: 0.7023 - val_acc: 0.7784 Epoch 91/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6461 - acc: 0.7862 - val_loss: 0.7355 - val_acc: 0.7754 Epoch 92/100 50000/50000 [==============================] - 23s 463us/step - loss: 0.6428 - acc: 0.7903 - val_loss: 0.7525 - val_acc: 0.7717 Epoch 93/100 50000/50000 [==============================] - 23s 462us/step - loss: 0.6448 - acc: 0.7858 - val_loss: 0.6510 - val_acc: 0.7915 Epoch 94/100 50000/50000 [==============================] - 23s 465us/step - loss: 0.6411 - acc: 0.7862 - val_loss: 0.6908 - val_acc: 0.7783 Epoch 95/100 50000/50000 [==============================] - 23s 461us/step - loss: 0.6506 - acc: 0.7881 - val_loss: 0.7145 - val_acc: 0.7693 Epoch 96/100 50000/50000 [==============================] - 23s 469us/step - loss: 0.6505 - acc: 0.7863 - val_loss: 0.8005 - val_acc: 0.7448 Epoch 97/100 50000/50000 [==============================] - 23s 468us/step - loss: 0.6499 - acc: 0.7881 - val_loss: 0.7509 - val_acc: 0.7629 Epoch 98/100 50000/50000 [==============================] - 23s 470us/step - loss: 0.6643 - acc: 0.7824 - val_loss: 0.7633 - val_acc: 0.7628 Epoch 99/100 50000/50000 [==============================] - 23s 469us/step - loss: 0.6575 - acc: 0.7824 - val_loss: 0.7434 - val_acc: 0.7656 Epoch 100/100 50000/50000 [==============================] - 23s 468us/step - loss: 0.6650 - acc: 0.7821 - val_loss: 0.6553 - val_acc: 0.7914
# Score trained model. scores = model.evaluate(x_test, y_test, verbose=1) print('Test loss:', scores[0]) print('Test accuracy:', scores[1])
10000/10000 [==============================] - 2s 162us/step Test loss: 0.655318251133 Test accuracy: 0.7914
model = Sequential() model.add(Conv2D(32, (3, 3), padding='same', input_shape=x_train.shape[1:])) model.add(Activation('relu')) model.add(Conv2D(32, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Conv2D(64, (3, 3), padding='same')) model.add(Activation('relu')) model.add(Conv2D(64, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(512)) model.add(Activation('relu')) model.add(Dropout(0.5)) model.add(Dense(num_classes)) model.add(Activation('softmax'))
# Let's train the model using RMSprop model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy'])
print('Using real-time data augmentation.') # This will do preprocessing and realtime data augmentation: datagen = ImageDataGenerator( featurewise_center=False, # set input mean to 0 over the dataset samplewise_center=False, # set each sample mean to 0 featurewise_std_normalization=False, # divide inputs by std of the dataset samplewise_std_normalization=False, # divide each input by its std zca_whitening=False, # apply ZCA whitening rotation_range=0, # randomly rotate images in the range (degrees, 0 to 180) width_shift_range=0.1, # randomly shift images horizontally (fraction of total width) height_shift_range=0.1, # randomly shift images vertically (fraction of total height) horizontal_flip=True, # randomly flip images vertical_flip=False) # randomly flip images # Compute quantities required for feature-wise normalization # (std, mean, and principal components if ZCA whitening is applied). datagen.fit(x_train)
# Fit the model on the batches generated by datagen.flow(). model.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size), steps_per_epoch=int(np.ceil(x_train.shape[0] / float(batch_size))), epochs=epochs, validation_data=(x_test, y_test), workers=4)
Epoch 1/100 1563/1563 [==============================] - 26s 16ms/step - loss: 1.8610 - acc: 0.3122 - val_loss: 1.5732 - val_acc: 0.4244 Epoch 2/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.5770 - acc: 0.4206 - val_loss: 1.3804 - val_acc: 0.5065 Epoch 3/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.4533 - acc: 0.4725 - val_loss: 1.2990 - val_acc: 0.5354 Epoch 4/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.3698 - acc: 0.5069 - val_loss: 1.2224 - val_acc: 0.5694 Epoch 5/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.3005 - acc: 0.5342 - val_loss: 1.1519 - val_acc: 0.5890 Epoch 6/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.2464 - acc: 0.5527 - val_loss: 1.0802 - val_acc: 0.6246 Epoch 7/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.1966 - acc: 0.5759 - val_loss: 1.0926 - val_acc: 0.6171 Epoch 8/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.1629 - acc: 0.5879 - val_loss: 1.0064 - val_acc: 0.6466 Epoch 9/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.1252 - acc: 0.6017 - val_loss: 0.9886 - val_acc: 0.6542 Epoch 10/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.0889 - acc: 0.6145 - val_loss: 0.9484 - val_acc: 0.6721 Epoch 11/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.0624 - acc: 0.6265 - val_loss: 0.9133 - val_acc: 0.6801 Epoch 12/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.0377 - acc: 0.6335 - val_loss: 0.9082 - val_acc: 0.6829 Epoch 13/100 1563/1563 [==============================] - 25s 16ms/step - loss: 1.0137 - acc: 0.6406 - val_loss: 0.8746 - val_acc: 0.7024 Epoch 14/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.9931 - acc: 0.6495 - val_loss: 0.8481 - val_acc: 0.7057 Epoch 15/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.9724 - acc: 0.6580 - val_loss: 0.8380 - val_acc: 0.7071 Epoch 16/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.9582 - acc: 0.6635 - val_loss: 0.8242 - val_acc: 0.7113 Epoch 17/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.9386 - acc: 0.6698 - val_loss: 0.8123 - val_acc: 0.7173 Epoch 18/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.9255 - acc: 0.6756 - val_loss: 0.7884 - val_acc: 0.7272 Epoch 19/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.9112 - acc: 0.6814 - val_loss: 0.7918 - val_acc: 0.7254 Epoch 20/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8973 - acc: 0.6871 - val_loss: 0.7634 - val_acc: 0.7374 Epoch 21/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8855 - acc: 0.6898 - val_loss: 0.7619 - val_acc: 0.7395 Epoch 22/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8756 - acc: 0.6945 - val_loss: 0.7541 - val_acc: 0.7405 Epoch 23/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8676 - acc: 0.6993 - val_loss: 0.7338 - val_acc: 0.7463 Epoch 24/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8593 - acc: 0.7037 - val_loss: 0.7153 - val_acc: 0.7519 Epoch 25/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8483 - acc: 0.7061 - val_loss: 0.7282 - val_acc: 0.7512 Epoch 26/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8437 - acc: 0.7080 - val_loss: 0.7305 - val_acc: 0.7470 Epoch 27/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8399 - acc: 0.7086 - val_loss: 0.7451 - val_acc: 0.7498 Epoch 28/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8406 - acc: 0.7106 - val_loss: 0.7120 - val_acc: 0.7558 Epoch 29/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8304 - acc: 0.7142 - val_loss: 0.7142 - val_acc: 0.7568 Epoch 30/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8261 - acc: 0.7147 - val_loss: 0.6896 - val_acc: 0.7675 Epoch 31/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8190 - acc: 0.7185 - val_loss: 0.7089 - val_acc: 0.7604 Epoch 32/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8219 - acc: 0.7179 - val_loss: 0.6832 - val_acc: 0.7711 Epoch 33/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8119 - acc: 0.7218 - val_loss: 0.6821 - val_acc: 0.7699 Epoch 34/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8103 - acc: 0.7224 - val_loss: 0.7050 - val_acc: 0.7640 Epoch 35/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8079 - acc: 0.7230 - val_loss: 0.6796 - val_acc: 0.7713 Epoch 36/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8001 - acc: 0.7275 - val_loss: 0.6692 - val_acc: 0.7716 Epoch 37/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7988 - acc: 0.7262 - val_loss: 0.6659 - val_acc: 0.7723 Epoch 38/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7945 - acc: 0.7307 - val_loss: 0.6610 - val_acc: 0.7763 Epoch 39/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.8000 - acc: 0.7252 - val_loss: 0.6737 - val_acc: 0.7741 Epoch 40/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7937 - acc: 0.7295 - val_loss: 0.6884 - val_acc: 0.7756 Epoch 41/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7874 - acc: 0.7318 - val_loss: 0.6521 - val_acc: 0.7801 Epoch 42/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7897 - acc: 0.7296 - val_loss: 0.6480 - val_acc: 0.7838 Epoch 43/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7820 - acc: 0.7340 - val_loss: 0.6472 - val_acc: 0.7845 Epoch 44/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7826 - acc: 0.7329 - val_loss: 0.6822 - val_acc: 0.7774 Epoch 45/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7781 - acc: 0.7355 - val_loss: 0.6527 - val_acc: 0.7820 Epoch 46/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7782 - acc: 0.7366 - val_loss: 0.6574 - val_acc: 0.7828 Epoch 47/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7776 - acc: 0.7358 - val_loss: 0.6903 - val_acc: 0.7778 Epoch 48/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7706 - acc: 0.7383 - val_loss: 0.6850 - val_acc: 0.7710 Epoch 49/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7712 - acc: 0.7382 - val_loss: 0.6479 - val_acc: 0.7818 Epoch 50/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7690 - acc: 0.7390 - val_loss: 0.6697 - val_acc: 0.7799 Epoch 51/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7692 - acc: 0.7390 - val_loss: 0.6334 - val_acc: 0.7933 Epoch 52/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7690 - acc: 0.7393 - val_loss: 0.6687 - val_acc: 0.7731 Epoch 53/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7651 - acc: 0.7436 - val_loss: 0.6549 - val_acc: 0.7858 Epoch 54/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7647 - acc: 0.7421 - val_loss: 0.6297 - val_acc: 0.7928 Epoch 55/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7619 - acc: 0.7423 - val_loss: 0.6644 - val_acc: 0.7845 Epoch 56/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7559 - acc: 0.7433 - val_loss: 0.6390 - val_acc: 0.7877 Epoch 57/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7578 - acc: 0.7444 - val_loss: 0.6450 - val_acc: 0.7833 Epoch 58/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7570 - acc: 0.7437 - val_loss: 0.6214 - val_acc: 0.7929 Epoch 59/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7537 - acc: 0.7457 - val_loss: 0.6394 - val_acc: 0.7916 Epoch 60/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7460 - acc: 0.7471 - val_loss: 0.6387 - val_acc: 0.7837 Epoch 61/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7523 - acc: 0.7471 - val_loss: 0.6561 - val_acc: 0.7975 Epoch 62/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7484 - acc: 0.7484 - val_loss: 0.6156 - val_acc: 0.7949 Epoch 63/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7483 - acc: 0.7471 - val_loss: 0.6420 - val_acc: 0.7897 Epoch 64/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7487 - acc: 0.7479 - val_loss: 0.6298 - val_acc: 0.7875 Epoch 65/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7491 - acc: 0.7469 - val_loss: 0.6222 - val_acc: 0.7882 Epoch 66/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7471 - acc: 0.7478 - val_loss: 0.6394 - val_acc: 0.7931 Epoch 67/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7489 - acc: 0.7479 - val_loss: 0.6529 - val_acc: 0.7902 Epoch 68/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7426 - acc: 0.7501 - val_loss: 0.6213 - val_acc: 0.7909 Epoch 69/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7437 - acc: 0.7501 - val_loss: 0.6180 - val_acc: 0.7933 Epoch 70/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7478 - acc: 0.7486 - val_loss: 0.6419 - val_acc: 0.7888 Epoch 71/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7422 - acc: 0.7490 - val_loss: 0.6822 - val_acc: 0.7796 Epoch 72/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7437 - acc: 0.7498 - val_loss: 0.6280 - val_acc: 0.7957 Epoch 73/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7416 - acc: 0.7505 - val_loss: 0.6498 - val_acc: 0.7816 Epoch 74/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7377 - acc: 0.7506 - val_loss: 0.6571 - val_acc: 0.7923 Epoch 75/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7389 - acc: 0.7509 - val_loss: 0.6123 - val_acc: 0.7921 Epoch 76/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7370 - acc: 0.7534 - val_loss: 0.6300 - val_acc: 0.7922 Epoch 77/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7442 - acc: 0.7512 - val_loss: 0.6151 - val_acc: 0.7938 Epoch 78/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7427 - acc: 0.7526 - val_loss: 0.6207 - val_acc: 0.7931 Epoch 79/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7453 - acc: 0.7494 - val_loss: 0.6349 - val_acc: 0.7914 Epoch 80/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7406 - acc: 0.7512 - val_loss: 0.6376 - val_acc: 0.7951 Epoch 81/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7455 - acc: 0.7510 - val_loss: 0.6520 - val_acc: 0.7926 Epoch 82/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7358 - acc: 0.7521 - val_loss: 0.5933 - val_acc: 0.8035 Epoch 83/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7413 - acc: 0.7516 - val_loss: 0.6643 - val_acc: 0.7861 Epoch 84/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7387 - acc: 0.7532 - val_loss: 0.6446 - val_acc: 0.7905 Epoch 85/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7438 - acc: 0.7504 - val_loss: 0.6167 - val_acc: 0.7975 Epoch 86/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7474 - acc: 0.7518 - val_loss: 0.6305 - val_acc: 0.7971 Epoch 87/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7397 - acc: 0.7516 - val_loss: 0.6774 - val_acc: 0.7870 Epoch 88/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7410 - acc: 0.7524 - val_loss: 0.6310 - val_acc: 0.7921 Epoch 89/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7388 - acc: 0.7525 - val_loss: 0.6814 - val_acc: 0.7882 Epoch 90/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7389 - acc: 0.7539 - val_loss: 0.6460 - val_acc: 0.7850 Epoch 91/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7446 - acc: 0.7526 - val_loss: 0.6775 - val_acc: 0.7788 Epoch 92/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7454 - acc: 0.7516 - val_loss: 0.6384 - val_acc: 0.7938 Epoch 93/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7440 - acc: 0.7518 - val_loss: 0.6413 - val_acc: 0.7900 Epoch 94/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7457 - acc: 0.7502 - val_loss: 0.6443 - val_acc: 0.7951 Epoch 95/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7434 - acc: 0.7515 - val_loss: 0.7209 - val_acc: 0.7839 Epoch 96/100 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7495 - acc: 0.7516 - val_loss: 0.6507 - val_acc: 0.7944 Epoch 97/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7461 - acc: 0.7513 - val_loss: 0.6454 - val_acc: 0.7877 Epoch 98/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7513 - acc: 0.7484 - val_loss: 0.7052 - val_acc: 0.7872 Epoch 99/100 1563/1563 [==============================] - 27s 17ms/step - loss: 0.7483 - acc: 0.7510 - val_loss: 0.6265 - val_acc: 0.7919 Epoch 100/100 1563/1563 [==============================] - 24s 16ms/step - loss: 0.7458 - acc: 0.7523 - val_loss: 0.6516 - val_acc: 0.7895
# Save model and weights if not os.path.isdir(save_dir): os.makedirs(save_dir) model_path = os.path.join(save_dir, model_name) model.save(model_path) print('Saved trained model at %s ' % model_path)
Saved trained model at /home/ubuntu/ws.keras/notebook/saved_models/keras_cifar10_trained_model.h5
# Score trained model. scores = model.evaluate(x_test, y_test, verbose=1) print('Test loss:', scores[0]) print('Test accuracy:', scores[1])
10000/10000 [==============================] - 1s 150us/step Test loss: 0.651550699043 Test accuracy: 0.7895
以上