{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "## Transfer Learning with InceptionV3 (From ImageNet to Cifar-10)\n", "- https://gogul09.github.io/software/flower-recognition-deep-learning" ] }, { "cell_type": "code", "execution_count": 1, "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "2.0.0-alpha0\n" ] } ], "source": [ "# boilerplate code\n", "import tensorflow as tf\n", "print(tf.__version__)" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "scrolled": true }, "outputs": [], "source": [ "from tensorflow import keras\n", "\n", "import cv2 #python -m pip install opencv-python\n", "import numpy as np\n", "from tensorflow.keras.datasets import cifar10\n", "from tensorflow.keras.utils import to_categorical\n", "from tensorflow.keras.models import Model\n", "from tensorflow.keras.layers import Dense, GlobalAveragePooling2D\n", "\n", "from tensorflow.keras.optimizers import SGD\n", "from tensorflow.keras.callbacks import LearningRateScheduler\n", "\n", "import math" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "scrolled": true }, "outputs": [], "source": [ "num_classes = 10" ] }, { "cell_type": "code", "execution_count": 4, "metadata": { "scrolled": true }, "outputs": [], "source": [ "def load_cifar10_data(img_rows, img_cols):\n", " # Load cifar10 training and test sets\n", " (X_train, Y_train), (X_test, Y_test) = cifar10.load_data()\n", "\n", " # Resize training images\n", " X_train = np.array([cv2.resize(img, (img_rows, img_cols)) for img in X_train[:, :, :, :]])\n", " X_test = np.array([cv2.resize(img, (img_rows, img_cols)) for img in X_test[:, :, :, :]])\n", "\n", "# X_train = X_train.astype('float16') / 255.0\n", "# X_test = X_test.astype('float16') / 255.0\n", "\n", " # Transform targets to keras compatible format\n", " Y_train = to_categorical(Y_train, num_classes)\n", " Y_test = to_categorical(Y_test, num_classes)\n", "\n", " print(\"X_train: {0}\".format(X_train.shape))\n", " print(\"Y_train: {0}\".format(Y_train.shape))\n", " print(\"X_test: {0}\".format(X_test.shape))\n", " print(\"Y_test: {0}\".format(Y_test.shape))\n", "\n", " return X_train, Y_train, X_test, Y_test" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "X_train: (50000, 299, 299, 3)\n", "Y_train: (50000, 10)\n", "X_test: (10000, 299, 299, 3)\n", "Y_test: (10000, 10)\n" ] } ], "source": [ "X_train, y_train, X_test, y_test = load_cifar10_data(299, 299)" ] }, { "cell_type": "code", "execution_count": 9, "metadata": { "scrolled": true }, "outputs": [], "source": [ "from tensorflow.keras.applications.inception_v3 import InceptionV3\n", "\n", "def build_model(nb_classes):\n", " base_model = InceptionV3(weights='imagenet', include_top=False, input_shape=[299, 299, 3])\n", "\n", " # add a global spatial average pooling layer\n", " x = base_model.output\n", " x = GlobalAveragePooling2D()(x)\n", " \n", " # let's add a fully-connected layer\n", " x = Dense(1024, activation='relu')(x)\n", " # and a logistic layer\n", "\n", " predictions = Dense(nb_classes, activation='softmax')(x)\n", "\n", " # this is the model we will train\n", " model = Model(inputs=base_model.input, outputs=predictions)\n", "\n", " # first: train only the top layers (which were randomly initialized)\n", " # i.e. freeze all convolutional InceptionV3 layers\n", " for layer in base_model.layers:\n", " layer.trainable = False\n", "\n", " return model" ] }, { "cell_type": "code", "execution_count": 10, "metadata": { "scrolled": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Model: \"model_1\"\n", "__________________________________________________________________________________________________\n", "Layer (type) Output Shape Param # Connected to \n", "==================================================================================================\n", "input_2 (InputLayer) [(None, 299, 299, 3) 0 \n", "__________________________________________________________________________________________________\n", "conv2d_94 (Conv2D) (None, 149, 149, 32) 864 input_2[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_94 (Batc (None, 149, 149, 32) 96 conv2d_94[0][0] \n", "__________________________________________________________________________________________________\n", "activation_94 (Activation) (None, 149, 149, 32) 0 batch_normalization_v1_94[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_95 (Conv2D) (None, 147, 147, 32) 9216 activation_94[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_95 (Batc (None, 147, 147, 32) 96 conv2d_95[0][0] \n", "__________________________________________________________________________________________________\n", "activation_95 (Activation) (None, 147, 147, 32) 0 batch_normalization_v1_95[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_96 (Conv2D) (None, 147, 147, 64) 18432 activation_95[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_96 (Batc (None, 147, 147, 64) 192 conv2d_96[0][0] \n", "__________________________________________________________________________________________________\n", "activation_96 (Activation) (None, 147, 147, 64) 0 batch_normalization_v1_96[0][0] \n", "__________________________________________________________________________________________________\n", "max_pooling2d_4 (MaxPooling2D) (None, 73, 73, 64) 0 activation_96[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_97 (Conv2D) (None, 73, 73, 80) 5120 max_pooling2d_4[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_97 (Batc (None, 73, 73, 80) 240 conv2d_97[0][0] \n", "__________________________________________________________________________________________________\n", "activation_97 (Activation) (None, 73, 73, 80) 0 batch_normalization_v1_97[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_98 (Conv2D) (None, 71, 71, 192) 138240 activation_97[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_98 (Batc (None, 71, 71, 192) 576 conv2d_98[0][0] \n", "__________________________________________________________________________________________________\n", "activation_98 (Activation) (None, 71, 71, 192) 0 batch_normalization_v1_98[0][0] \n", "__________________________________________________________________________________________________\n", "max_pooling2d_5 (MaxPooling2D) (None, 35, 35, 192) 0 activation_98[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_102 (Conv2D) (None, 35, 35, 64) 12288 max_pooling2d_5[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_102 (Bat (None, 35, 35, 64) 192 conv2d_102[0][0] \n", "__________________________________________________________________________________________________\n", "activation_102 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_102[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_100 (Conv2D) (None, 35, 35, 48) 9216 max_pooling2d_5[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_103 (Conv2D) (None, 35, 35, 96) 55296 activation_102[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_100 (Bat (None, 35, 35, 48) 144 conv2d_100[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_103 (Bat (None, 35, 35, 96) 288 conv2d_103[0][0] \n", "__________________________________________________________________________________________________\n", "activation_100 (Activation) (None, 35, 35, 48) 0 batch_normalization_v1_100[0][0] \n", "__________________________________________________________________________________________________\n", "activation_103 (Activation) (None, 35, 35, 96) 0 batch_normalization_v1_103[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_9 (AveragePoo (None, 35, 35, 192) 0 max_pooling2d_5[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_99 (Conv2D) (None, 35, 35, 64) 12288 max_pooling2d_5[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_101 (Conv2D) (None, 35, 35, 64) 76800 activation_100[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_104 (Conv2D) (None, 35, 35, 96) 82944 activation_103[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_105 (Conv2D) (None, 35, 35, 32) 6144 average_pooling2d_9[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_99 (Batc (None, 35, 35, 64) 192 conv2d_99[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_101 (Bat (None, 35, 35, 64) 192 conv2d_101[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_104 (Bat (None, 35, 35, 96) 288 conv2d_104[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_105 (Bat (None, 35, 35, 32) 96 conv2d_105[0][0] \n", "__________________________________________________________________________________________________\n", "activation_99 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_99[0][0] \n", "__________________________________________________________________________________________________\n", "activation_101 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_101[0][0] \n", "__________________________________________________________________________________________________\n", "activation_104 (Activation) (None, 35, 35, 96) 0 batch_normalization_v1_104[0][0] \n", "__________________________________________________________________________________________________\n", "activation_105 (Activation) (None, 35, 35, 32) 0 batch_normalization_v1_105[0][0] \n", "__________________________________________________________________________________________________\n", "mixed0 (Concatenate) (None, 35, 35, 256) 0 activation_99[0][0] \n", " activation_101[0][0] \n", " activation_104[0][0] \n", " activation_105[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_109 (Conv2D) (None, 35, 35, 64) 16384 mixed0[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_109 (Bat (None, 35, 35, 64) 192 conv2d_109[0][0] \n", "__________________________________________________________________________________________________\n", "activation_109 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_109[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_107 (Conv2D) (None, 35, 35, 48) 12288 mixed0[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_110 (Conv2D) (None, 35, 35, 96) 55296 activation_109[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_107 (Bat (None, 35, 35, 48) 144 conv2d_107[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_110 (Bat (None, 35, 35, 96) 288 conv2d_110[0][0] \n", "__________________________________________________________________________________________________\n", "activation_107 (Activation) (None, 35, 35, 48) 0 batch_normalization_v1_107[0][0] \n", "__________________________________________________________________________________________________\n", "activation_110 (Activation) (None, 35, 35, 96) 0 batch_normalization_v1_110[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_10 (AveragePo (None, 35, 35, 256) 0 mixed0[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_106 (Conv2D) (None, 35, 35, 64) 16384 mixed0[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_108 (Conv2D) (None, 35, 35, 64) 76800 activation_107[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_111 (Conv2D) (None, 35, 35, 96) 82944 activation_110[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_112 (Conv2D) (None, 35, 35, 64) 16384 average_pooling2d_10[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_106 (Bat (None, 35, 35, 64) 192 conv2d_106[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_108 (Bat (None, 35, 35, 64) 192 conv2d_108[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_111 (Bat (None, 35, 35, 96) 288 conv2d_111[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_112 (Bat (None, 35, 35, 64) 192 conv2d_112[0][0] \n", "__________________________________________________________________________________________________\n", "activation_106 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_106[0][0] \n", "__________________________________________________________________________________________________\n", "activation_108 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_108[0][0] \n", "__________________________________________________________________________________________________\n", "activation_111 (Activation) (None, 35, 35, 96) 0 batch_normalization_v1_111[0][0] \n", "__________________________________________________________________________________________________\n", "activation_112 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_112[0][0] \n", "__________________________________________________________________________________________________\n", "mixed1 (Concatenate) (None, 35, 35, 288) 0 activation_106[0][0] \n", " activation_108[0][0] \n", " activation_111[0][0] \n", " activation_112[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_116 (Conv2D) (None, 35, 35, 64) 18432 mixed1[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_116 (Bat (None, 35, 35, 64) 192 conv2d_116[0][0] \n", "__________________________________________________________________________________________________\n", "activation_116 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_116[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_114 (Conv2D) (None, 35, 35, 48) 13824 mixed1[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_117 (Conv2D) (None, 35, 35, 96) 55296 activation_116[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_114 (Bat (None, 35, 35, 48) 144 conv2d_114[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_117 (Bat (None, 35, 35, 96) 288 conv2d_117[0][0] \n", "__________________________________________________________________________________________________\n", "activation_114 (Activation) (None, 35, 35, 48) 0 batch_normalization_v1_114[0][0] \n", "__________________________________________________________________________________________________\n", "activation_117 (Activation) (None, 35, 35, 96) 0 batch_normalization_v1_117[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_11 (AveragePo (None, 35, 35, 288) 0 mixed1[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_113 (Conv2D) (None, 35, 35, 64) 18432 mixed1[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_115 (Conv2D) (None, 35, 35, 64) 76800 activation_114[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_118 (Conv2D) (None, 35, 35, 96) 82944 activation_117[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_119 (Conv2D) (None, 35, 35, 64) 18432 average_pooling2d_11[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_113 (Bat (None, 35, 35, 64) 192 conv2d_113[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_115 (Bat (None, 35, 35, 64) 192 conv2d_115[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_118 (Bat (None, 35, 35, 96) 288 conv2d_118[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_119 (Bat (None, 35, 35, 64) 192 conv2d_119[0][0] \n", "__________________________________________________________________________________________________\n", "activation_113 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_113[0][0] \n", "__________________________________________________________________________________________________\n", "activation_115 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_115[0][0] \n", "__________________________________________________________________________________________________\n", "activation_118 (Activation) (None, 35, 35, 96) 0 batch_normalization_v1_118[0][0] \n", "__________________________________________________________________________________________________\n", "activation_119 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_119[0][0] \n", "__________________________________________________________________________________________________\n", "mixed2 (Concatenate) (None, 35, 35, 288) 0 activation_113[0][0] \n", " activation_115[0][0] \n", " activation_118[0][0] \n", " activation_119[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_121 (Conv2D) (None, 35, 35, 64) 18432 mixed2[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_121 (Bat (None, 35, 35, 64) 192 conv2d_121[0][0] \n", "__________________________________________________________________________________________________\n", "activation_121 (Activation) (None, 35, 35, 64) 0 batch_normalization_v1_121[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_122 (Conv2D) (None, 35, 35, 96) 55296 activation_121[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_122 (Bat (None, 35, 35, 96) 288 conv2d_122[0][0] \n", "__________________________________________________________________________________________________\n", "activation_122 (Activation) (None, 35, 35, 96) 0 batch_normalization_v1_122[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_120 (Conv2D) (None, 17, 17, 384) 995328 mixed2[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_123 (Conv2D) (None, 17, 17, 96) 82944 activation_122[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_120 (Bat (None, 17, 17, 384) 1152 conv2d_120[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_123 (Bat (None, 17, 17, 96) 288 conv2d_123[0][0] \n", "__________________________________________________________________________________________________\n", "activation_120 (Activation) (None, 17, 17, 384) 0 batch_normalization_v1_120[0][0] \n", "__________________________________________________________________________________________________\n", "activation_123 (Activation) (None, 17, 17, 96) 0 batch_normalization_v1_123[0][0] \n", "__________________________________________________________________________________________________\n", "max_pooling2d_6 (MaxPooling2D) (None, 17, 17, 288) 0 mixed2[0][0] \n", "__________________________________________________________________________________________________\n", "mixed3 (Concatenate) (None, 17, 17, 768) 0 activation_120[0][0] \n", " activation_123[0][0] \n", " max_pooling2d_6[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_128 (Conv2D) (None, 17, 17, 128) 98304 mixed3[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_128 (Bat (None, 17, 17, 128) 384 conv2d_128[0][0] \n", "__________________________________________________________________________________________________\n", "activation_128 (Activation) (None, 17, 17, 128) 0 batch_normalization_v1_128[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_129 (Conv2D) (None, 17, 17, 128) 114688 activation_128[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_129 (Bat (None, 17, 17, 128) 384 conv2d_129[0][0] \n", "__________________________________________________________________________________________________\n", "activation_129 (Activation) (None, 17, 17, 128) 0 batch_normalization_v1_129[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_125 (Conv2D) (None, 17, 17, 128) 98304 mixed3[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_130 (Conv2D) (None, 17, 17, 128) 114688 activation_129[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_125 (Bat (None, 17, 17, 128) 384 conv2d_125[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_130 (Bat (None, 17, 17, 128) 384 conv2d_130[0][0] \n", "__________________________________________________________________________________________________\n", "activation_125 (Activation) (None, 17, 17, 128) 0 batch_normalization_v1_125[0][0] \n", "__________________________________________________________________________________________________\n", "activation_130 (Activation) (None, 17, 17, 128) 0 batch_normalization_v1_130[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_126 (Conv2D) (None, 17, 17, 128) 114688 activation_125[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_131 (Conv2D) (None, 17, 17, 128) 114688 activation_130[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_126 (Bat (None, 17, 17, 128) 384 conv2d_126[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_131 (Bat (None, 17, 17, 128) 384 conv2d_131[0][0] \n", "__________________________________________________________________________________________________\n", "activation_126 (Activation) (None, 17, 17, 128) 0 batch_normalization_v1_126[0][0] \n", "__________________________________________________________________________________________________\n", "activation_131 (Activation) (None, 17, 17, 128) 0 batch_normalization_v1_131[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_12 (AveragePo (None, 17, 17, 768) 0 mixed3[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_124 (Conv2D) (None, 17, 17, 192) 147456 mixed3[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_127 (Conv2D) (None, 17, 17, 192) 172032 activation_126[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_132 (Conv2D) (None, 17, 17, 192) 172032 activation_131[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_133 (Conv2D) (None, 17, 17, 192) 147456 average_pooling2d_12[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_124 (Bat (None, 17, 17, 192) 576 conv2d_124[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_127 (Bat (None, 17, 17, 192) 576 conv2d_127[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_132 (Bat (None, 17, 17, 192) 576 conv2d_132[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_133 (Bat (None, 17, 17, 192) 576 conv2d_133[0][0] \n", "__________________________________________________________________________________________________\n", "activation_124 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_124[0][0] \n", "__________________________________________________________________________________________________\n", "activation_127 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_127[0][0] \n", "__________________________________________________________________________________________________\n", "activation_132 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_132[0][0] \n", "__________________________________________________________________________________________________\n", "activation_133 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_133[0][0] \n", "__________________________________________________________________________________________________\n", "mixed4 (Concatenate) (None, 17, 17, 768) 0 activation_124[0][0] \n", " activation_127[0][0] \n", " activation_132[0][0] \n", " activation_133[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_138 (Conv2D) (None, 17, 17, 160) 122880 mixed4[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_138 (Bat (None, 17, 17, 160) 480 conv2d_138[0][0] \n", "__________________________________________________________________________________________________\n", "activation_138 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_138[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_139 (Conv2D) (None, 17, 17, 160) 179200 activation_138[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_139 (Bat (None, 17, 17, 160) 480 conv2d_139[0][0] \n", "__________________________________________________________________________________________________\n", "activation_139 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_139[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_135 (Conv2D) (None, 17, 17, 160) 122880 mixed4[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_140 (Conv2D) (None, 17, 17, 160) 179200 activation_139[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_135 (Bat (None, 17, 17, 160) 480 conv2d_135[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_140 (Bat (None, 17, 17, 160) 480 conv2d_140[0][0] \n", "__________________________________________________________________________________________________\n", "activation_135 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_135[0][0] \n", "__________________________________________________________________________________________________\n", "activation_140 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_140[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_136 (Conv2D) (None, 17, 17, 160) 179200 activation_135[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_141 (Conv2D) (None, 17, 17, 160) 179200 activation_140[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_136 (Bat (None, 17, 17, 160) 480 conv2d_136[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_141 (Bat (None, 17, 17, 160) 480 conv2d_141[0][0] \n", "__________________________________________________________________________________________________\n", "activation_136 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_136[0][0] \n", "__________________________________________________________________________________________________\n", "activation_141 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_141[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_13 (AveragePo (None, 17, 17, 768) 0 mixed4[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_134 (Conv2D) (None, 17, 17, 192) 147456 mixed4[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_137 (Conv2D) (None, 17, 17, 192) 215040 activation_136[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_142 (Conv2D) (None, 17, 17, 192) 215040 activation_141[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_143 (Conv2D) (None, 17, 17, 192) 147456 average_pooling2d_13[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_134 (Bat (None, 17, 17, 192) 576 conv2d_134[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_137 (Bat (None, 17, 17, 192) 576 conv2d_137[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_142 (Bat (None, 17, 17, 192) 576 conv2d_142[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_143 (Bat (None, 17, 17, 192) 576 conv2d_143[0][0] \n", "__________________________________________________________________________________________________\n", "activation_134 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_134[0][0] \n", "__________________________________________________________________________________________________\n", "activation_137 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_137[0][0] \n", "__________________________________________________________________________________________________\n", "activation_142 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_142[0][0] \n", "__________________________________________________________________________________________________\n", "activation_143 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_143[0][0] \n", "__________________________________________________________________________________________________\n", "mixed5 (Concatenate) (None, 17, 17, 768) 0 activation_134[0][0] \n", " activation_137[0][0] \n", " activation_142[0][0] \n", " activation_143[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_148 (Conv2D) (None, 17, 17, 160) 122880 mixed5[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_148 (Bat (None, 17, 17, 160) 480 conv2d_148[0][0] \n", "__________________________________________________________________________________________________\n", "activation_148 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_148[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_149 (Conv2D) (None, 17, 17, 160) 179200 activation_148[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_149 (Bat (None, 17, 17, 160) 480 conv2d_149[0][0] \n", "__________________________________________________________________________________________________\n", "activation_149 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_149[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_145 (Conv2D) (None, 17, 17, 160) 122880 mixed5[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_150 (Conv2D) (None, 17, 17, 160) 179200 activation_149[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_145 (Bat (None, 17, 17, 160) 480 conv2d_145[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_150 (Bat (None, 17, 17, 160) 480 conv2d_150[0][0] \n", "__________________________________________________________________________________________________\n", "activation_145 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_145[0][0] \n", "__________________________________________________________________________________________________\n", "activation_150 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_150[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_146 (Conv2D) (None, 17, 17, 160) 179200 activation_145[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_151 (Conv2D) (None, 17, 17, 160) 179200 activation_150[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_146 (Bat (None, 17, 17, 160) 480 conv2d_146[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_151 (Bat (None, 17, 17, 160) 480 conv2d_151[0][0] \n", "__________________________________________________________________________________________________\n", "activation_146 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_146[0][0] \n", "__________________________________________________________________________________________________\n", "activation_151 (Activation) (None, 17, 17, 160) 0 batch_normalization_v1_151[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_14 (AveragePo (None, 17, 17, 768) 0 mixed5[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_144 (Conv2D) (None, 17, 17, 192) 147456 mixed5[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_147 (Conv2D) (None, 17, 17, 192) 215040 activation_146[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_152 (Conv2D) (None, 17, 17, 192) 215040 activation_151[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_153 (Conv2D) (None, 17, 17, 192) 147456 average_pooling2d_14[0][0] \n", "__________________________________________________________________________________________________\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "batch_normalization_v1_144 (Bat (None, 17, 17, 192) 576 conv2d_144[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_147 (Bat (None, 17, 17, 192) 576 conv2d_147[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_152 (Bat (None, 17, 17, 192) 576 conv2d_152[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_153 (Bat (None, 17, 17, 192) 576 conv2d_153[0][0] \n", "__________________________________________________________________________________________________\n", "activation_144 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_144[0][0] \n", "__________________________________________________________________________________________________\n", "activation_147 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_147[0][0] \n", "__________________________________________________________________________________________________\n", "activation_152 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_152[0][0] \n", "__________________________________________________________________________________________________\n", "activation_153 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_153[0][0] \n", "__________________________________________________________________________________________________\n", "mixed6 (Concatenate) (None, 17, 17, 768) 0 activation_144[0][0] \n", " activation_147[0][0] \n", " activation_152[0][0] \n", " activation_153[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_158 (Conv2D) (None, 17, 17, 192) 147456 mixed6[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_158 (Bat (None, 17, 17, 192) 576 conv2d_158[0][0] \n", "__________________________________________________________________________________________________\n", "activation_158 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_158[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_159 (Conv2D) (None, 17, 17, 192) 258048 activation_158[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_159 (Bat (None, 17, 17, 192) 576 conv2d_159[0][0] \n", "__________________________________________________________________________________________________\n", "activation_159 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_159[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_155 (Conv2D) (None, 17, 17, 192) 147456 mixed6[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_160 (Conv2D) (None, 17, 17, 192) 258048 activation_159[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_155 (Bat (None, 17, 17, 192) 576 conv2d_155[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_160 (Bat (None, 17, 17, 192) 576 conv2d_160[0][0] \n", "__________________________________________________________________________________________________\n", "activation_155 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_155[0][0] \n", "__________________________________________________________________________________________________\n", "activation_160 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_160[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_156 (Conv2D) (None, 17, 17, 192) 258048 activation_155[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_161 (Conv2D) (None, 17, 17, 192) 258048 activation_160[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_156 (Bat (None, 17, 17, 192) 576 conv2d_156[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_161 (Bat (None, 17, 17, 192) 576 conv2d_161[0][0] \n", "__________________________________________________________________________________________________\n", "activation_156 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_156[0][0] \n", "__________________________________________________________________________________________________\n", "activation_161 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_161[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_15 (AveragePo (None, 17, 17, 768) 0 mixed6[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_154 (Conv2D) (None, 17, 17, 192) 147456 mixed6[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_157 (Conv2D) (None, 17, 17, 192) 258048 activation_156[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_162 (Conv2D) (None, 17, 17, 192) 258048 activation_161[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_163 (Conv2D) (None, 17, 17, 192) 147456 average_pooling2d_15[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_154 (Bat (None, 17, 17, 192) 576 conv2d_154[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_157 (Bat (None, 17, 17, 192) 576 conv2d_157[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_162 (Bat (None, 17, 17, 192) 576 conv2d_162[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_163 (Bat (None, 17, 17, 192) 576 conv2d_163[0][0] \n", "__________________________________________________________________________________________________\n", "activation_154 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_154[0][0] \n", "__________________________________________________________________________________________________\n", "activation_157 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_157[0][0] \n", "__________________________________________________________________________________________________\n", "activation_162 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_162[0][0] \n", "__________________________________________________________________________________________________\n", "activation_163 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_163[0][0] \n", "__________________________________________________________________________________________________\n", "mixed7 (Concatenate) (None, 17, 17, 768) 0 activation_154[0][0] \n", " activation_157[0][0] \n", " activation_162[0][0] \n", " activation_163[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_166 (Conv2D) (None, 17, 17, 192) 147456 mixed7[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_166 (Bat (None, 17, 17, 192) 576 conv2d_166[0][0] \n", "__________________________________________________________________________________________________\n", "activation_166 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_166[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_167 (Conv2D) (None, 17, 17, 192) 258048 activation_166[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_167 (Bat (None, 17, 17, 192) 576 conv2d_167[0][0] \n", "__________________________________________________________________________________________________\n", "activation_167 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_167[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_164 (Conv2D) (None, 17, 17, 192) 147456 mixed7[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_168 (Conv2D) (None, 17, 17, 192) 258048 activation_167[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_164 (Bat (None, 17, 17, 192) 576 conv2d_164[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_168 (Bat (None, 17, 17, 192) 576 conv2d_168[0][0] \n", "__________________________________________________________________________________________________\n", "activation_164 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_164[0][0] \n", "__________________________________________________________________________________________________\n", "activation_168 (Activation) (None, 17, 17, 192) 0 batch_normalization_v1_168[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_165 (Conv2D) (None, 8, 8, 320) 552960 activation_164[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_169 (Conv2D) (None, 8, 8, 192) 331776 activation_168[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_165 (Bat (None, 8, 8, 320) 960 conv2d_165[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_169 (Bat (None, 8, 8, 192) 576 conv2d_169[0][0] \n", "__________________________________________________________________________________________________\n", "activation_165 (Activation) (None, 8, 8, 320) 0 batch_normalization_v1_165[0][0] \n", "__________________________________________________________________________________________________\n", "activation_169 (Activation) (None, 8, 8, 192) 0 batch_normalization_v1_169[0][0] \n", "__________________________________________________________________________________________________\n", "max_pooling2d_7 (MaxPooling2D) (None, 8, 8, 768) 0 mixed7[0][0] \n", "__________________________________________________________________________________________________\n", "mixed8 (Concatenate) (None, 8, 8, 1280) 0 activation_165[0][0] \n", " activation_169[0][0] \n", " max_pooling2d_7[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_174 (Conv2D) (None, 8, 8, 448) 573440 mixed8[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_174 (Bat (None, 8, 8, 448) 1344 conv2d_174[0][0] \n", "__________________________________________________________________________________________________\n", "activation_174 (Activation) (None, 8, 8, 448) 0 batch_normalization_v1_174[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_171 (Conv2D) (None, 8, 8, 384) 491520 mixed8[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_175 (Conv2D) (None, 8, 8, 384) 1548288 activation_174[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_171 (Bat (None, 8, 8, 384) 1152 conv2d_171[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_175 (Bat (None, 8, 8, 384) 1152 conv2d_175[0][0] \n", "__________________________________________________________________________________________________\n", "activation_171 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_171[0][0] \n", "__________________________________________________________________________________________________\n", "activation_175 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_175[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_172 (Conv2D) (None, 8, 8, 384) 442368 activation_171[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_173 (Conv2D) (None, 8, 8, 384) 442368 activation_171[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_176 (Conv2D) (None, 8, 8, 384) 442368 activation_175[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_177 (Conv2D) (None, 8, 8, 384) 442368 activation_175[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_16 (AveragePo (None, 8, 8, 1280) 0 mixed8[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_170 (Conv2D) (None, 8, 8, 320) 409600 mixed8[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_172 (Bat (None, 8, 8, 384) 1152 conv2d_172[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_173 (Bat (None, 8, 8, 384) 1152 conv2d_173[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_176 (Bat (None, 8, 8, 384) 1152 conv2d_176[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_177 (Bat (None, 8, 8, 384) 1152 conv2d_177[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_178 (Conv2D) (None, 8, 8, 192) 245760 average_pooling2d_16[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_170 (Bat (None, 8, 8, 320) 960 conv2d_170[0][0] \n", "__________________________________________________________________________________________________\n", "activation_172 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_172[0][0] \n", "__________________________________________________________________________________________________\n", "activation_173 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_173[0][0] \n", "__________________________________________________________________________________________________\n", "activation_176 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_176[0][0] \n", "__________________________________________________________________________________________________\n", "activation_177 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_177[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_178 (Bat (None, 8, 8, 192) 576 conv2d_178[0][0] \n", "__________________________________________________________________________________________________\n", "activation_170 (Activation) (None, 8, 8, 320) 0 batch_normalization_v1_170[0][0] \n", "__________________________________________________________________________________________________\n", "mixed9_0 (Concatenate) (None, 8, 8, 768) 0 activation_172[0][0] \n", " activation_173[0][0] \n", "__________________________________________________________________________________________________\n", "concatenate_2 (Concatenate) (None, 8, 8, 768) 0 activation_176[0][0] \n", " activation_177[0][0] \n", "__________________________________________________________________________________________________\n", "activation_178 (Activation) (None, 8, 8, 192) 0 batch_normalization_v1_178[0][0] \n", "__________________________________________________________________________________________________\n", "mixed9 (Concatenate) (None, 8, 8, 2048) 0 activation_170[0][0] \n", " mixed9_0[0][0] \n", " concatenate_2[0][0] \n", " activation_178[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_183 (Conv2D) (None, 8, 8, 448) 917504 mixed9[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_183 (Bat (None, 8, 8, 448) 1344 conv2d_183[0][0] \n", "__________________________________________________________________________________________________\n", "activation_183 (Activation) (None, 8, 8, 448) 0 batch_normalization_v1_183[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_180 (Conv2D) (None, 8, 8, 384) 786432 mixed9[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_184 (Conv2D) (None, 8, 8, 384) 1548288 activation_183[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_180 (Bat (None, 8, 8, 384) 1152 conv2d_180[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_184 (Bat (None, 8, 8, 384) 1152 conv2d_184[0][0] \n", "__________________________________________________________________________________________________\n", "activation_180 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_180[0][0] \n", "__________________________________________________________________________________________________\n", "activation_184 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_184[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_181 (Conv2D) (None, 8, 8, 384) 442368 activation_180[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_182 (Conv2D) (None, 8, 8, 384) 442368 activation_180[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_185 (Conv2D) (None, 8, 8, 384) 442368 activation_184[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_186 (Conv2D) (None, 8, 8, 384) 442368 activation_184[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_17 (AveragePo (None, 8, 8, 2048) 0 mixed9[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_179 (Conv2D) (None, 8, 8, 320) 655360 mixed9[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_181 (Bat (None, 8, 8, 384) 1152 conv2d_181[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_182 (Bat (None, 8, 8, 384) 1152 conv2d_182[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_185 (Bat (None, 8, 8, 384) 1152 conv2d_185[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_186 (Bat (None, 8, 8, 384) 1152 conv2d_186[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_187 (Conv2D) (None, 8, 8, 192) 393216 average_pooling2d_17[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_179 (Bat (None, 8, 8, 320) 960 conv2d_179[0][0] \n", "__________________________________________________________________________________________________\n", "activation_181 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_181[0][0] \n", "__________________________________________________________________________________________________\n", "activation_182 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_182[0][0] \n", "__________________________________________________________________________________________________\n", "activation_185 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_185[0][0] \n", "__________________________________________________________________________________________________\n", "activation_186 (Activation) (None, 8, 8, 384) 0 batch_normalization_v1_186[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_v1_187 (Bat (None, 8, 8, 192) 576 conv2d_187[0][0] \n", "__________________________________________________________________________________________________\n", "activation_179 (Activation) (None, 8, 8, 320) 0 batch_normalization_v1_179[0][0] \n", "__________________________________________________________________________________________________\n", "mixed9_1 (Concatenate) (None, 8, 8, 768) 0 activation_181[0][0] \n", " activation_182[0][0] \n", "__________________________________________________________________________________________________\n", "concatenate_3 (Concatenate) (None, 8, 8, 768) 0 activation_185[0][0] \n", " activation_186[0][0] \n", "__________________________________________________________________________________________________\n", "activation_187 (Activation) (None, 8, 8, 192) 0 batch_normalization_v1_187[0][0] \n", "__________________________________________________________________________________________________\n", "mixed10 (Concatenate) (None, 8, 8, 2048) 0 activation_179[0][0] \n", " mixed9_1[0][0] \n", " concatenate_3[0][0] \n", " activation_187[0][0] \n", "__________________________________________________________________________________________________\n", "global_average_pooling2d_1 (Glo (None, 2048) 0 mixed10[0][0] \n", "__________________________________________________________________________________________________\n", "dense_2 (Dense) (None, 1024) 2098176 global_average_pooling2d_1[0][0] \n", "__________________________________________________________________________________________________\n", "dense_3 (Dense) (None, 10) 10250 dense_2[0][0] \n", "==================================================================================================\n", "Total params: 23,911,210\n", "Trainable params: 2,108,426\n", "Non-trainable params: 21,802,784\n", "__________________________________________________________________________________________________\n" ] } ], "source": [ "model = build_model(10)\n", "model.summary()" ] }, { "cell_type": "code", "execution_count": 11, "metadata": { "scrolled": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Train on 50000 samples, validate on 10000 samples\n", "\n", "Epoch 00001: LearningRateScheduler reducing learning rate to 0.01.\n", "Epoch 1/35\n", "50000/50000 [==============================] - 173s 3ms/sample - loss: 0.9602 - accuracy: 0.6899 - val_loss: 75.5155 - val_accuracy: 0.1106\n", "\n", "Epoch 00002: LearningRateScheduler reducing learning rate to 0.01.\n", "Epoch 2/35\n", "50000/50000 [==============================] - 164s 3ms/sample - loss: 0.6719 - accuracy: 0.7707 - val_loss: 79.6195 - val_accuracy: 0.1105\n", "\n", "Epoch 00003: LearningRateScheduler reducing learning rate to 0.01.\n", "Epoch 3/35\n", "50000/50000 [==============================] - 167s 3ms/sample - loss: 0.6184 - accuracy: 0.7882 - val_loss: 99.9694 - val_accuracy: 0.1073\n", "\n", "Epoch 00004: LearningRateScheduler reducing learning rate to 0.01.\n", "Epoch 4/35\n", "50000/50000 [==============================] - 168s 3ms/sample - loss: 0.5885 - accuracy: 0.7982 - val_loss: 95.5520 - val_accuracy: 0.1095\n", "\n", "Epoch 00005: LearningRateScheduler reducing learning rate to 0.01.\n", "Epoch 5/35\n", "50000/50000 [==============================] - 168s 3ms/sample - loss: 0.5680 - accuracy: 0.8039 - val_loss: 96.9085 - val_accuracy: 0.1118\n", "\n", "Epoch 00006: LearningRateScheduler reducing learning rate to 0.01.\n", "Epoch 6/35\n", "50000/50000 [==============================] - 167s 3ms/sample - loss: 0.5489 - accuracy: 0.8102 - val_loss: 120.0123 - val_accuracy: 0.1062\n", "\n", "Epoch 00007: LearningRateScheduler reducing learning rate to 0.01.\n", "Epoch 7/35\n", "50000/50000 [==============================] - 168s 3ms/sample - loss: 0.5322 - accuracy: 0.8161 - val_loss: 120.5379 - val_accuracy: 0.1116\n", "\n", "Epoch 00008: LearningRateScheduler reducing learning rate to 0.0096.\n", "Epoch 8/35\n", "50000/50000 [==============================] - 167s 3ms/sample - loss: 0.5186 - accuracy: 0.8216 - val_loss: 137.5567 - val_accuracy: 0.1023\n", "\n", "Epoch 00009: LearningRateScheduler reducing learning rate to 0.0096.\n", "Epoch 9/35\n", "50000/50000 [==============================] - 168s 3ms/sample - loss: 0.5067 - accuracy: 0.8250 - val_loss: 135.9308 - val_accuracy: 0.1071\n", "\n", "Epoch 00010: LearningRateScheduler reducing learning rate to 0.0096.\n", "Epoch 10/35\n", "50000/50000 [==============================] - 168s 3ms/sample - loss: 0.4931 - accuracy: 0.8290 - val_loss: 135.0156 - val_accuracy: 0.1075\n", "\n", "Epoch 00011: LearningRateScheduler reducing learning rate to 0.0096.\n", "Epoch 11/35\n", "50000/50000 [==============================] - 166s 3ms/sample - loss: 0.4871 - accuracy: 0.8297 - val_loss: 119.6107 - val_accuracy: 0.1249\n", "\n", "Epoch 00012: LearningRateScheduler reducing learning rate to 0.0096.\n", "Epoch 12/35\n", "50000/50000 [==============================] - 167s 3ms/sample - loss: 0.4720 - accuracy: 0.8368 - val_loss: 133.6296 - val_accuracy: 0.1099\n", "\n", "Epoch 00013: LearningRateScheduler reducing learning rate to 0.0096.\n", "Epoch 13/35\n", "50000/50000 [==============================] - 166s 3ms/sample - loss: 0.4627 - accuracy: 0.8401 - val_loss: 137.7038 - val_accuracy: 0.1115\n", "\n", "Epoch 00014: LearningRateScheduler reducing learning rate to 0.0096.\n", "Epoch 14/35\n", "50000/50000 [==============================] - 165s 3ms/sample - loss: 0.4529 - accuracy: 0.8440 - val_loss: 139.3622 - val_accuracy: 0.1120\n", "\n", "Epoch 00015: LearningRateScheduler reducing learning rate to 0.0096.\n", "Epoch 15/35\n", "50000/50000 [==============================] - 164s 3ms/sample - loss: 0.4383 - accuracy: 0.8471 - val_loss: 159.8944 - val_accuracy: 0.1095\n", "\n", "Epoch 00016: LearningRateScheduler reducing learning rate to 0.009216.\n", "Epoch 16/35\n", "50000/50000 [==============================] - 164s 3ms/sample - loss: 0.4269 - accuracy: 0.8524 - val_loss: 156.2141 - val_accuracy: 0.1092\n", "\n", "Epoch 00017: LearningRateScheduler reducing learning rate to 0.009216.\n", "Epoch 17/35\n", "50000/50000 [==============================] - 165s 3ms/sample - loss: 0.4172 - accuracy: 0.8556 - val_loss: 141.9600 - val_accuracy: 0.1108\n", "\n", "Epoch 00018: LearningRateScheduler reducing learning rate to 0.009216.\n", "Epoch 18/35\n", "50000/50000 [==============================] - 164s 3ms/sample - loss: 0.4067 - accuracy: 0.8596 - val_loss: 158.9483 - val_accuracy: 0.1139\n", "\n", "Epoch 00019: LearningRateScheduler reducing learning rate to 0.009216.\n", "Epoch 19/35\n", "50000/50000 [==============================] - 165s 3ms/sample - loss: 0.3942 - accuracy: 0.8645 - val_loss: 157.3088 - val_accuracy: 0.1111\n", "\n", "Epoch 00020: LearningRateScheduler reducing learning rate to 0.009216.\n", "Epoch 20/35\n", "50000/50000 [==============================] - 162s 3ms/sample - loss: 0.3854 - accuracy: 0.8674 - val_loss: 171.4200 - val_accuracy: 0.1055\n", "\n", "Epoch 00021: LearningRateScheduler reducing learning rate to 0.009216.\n", "Epoch 21/35\n", "50000/50000 [==============================] - 162s 3ms/sample - loss: 0.3739 - accuracy: 0.8720 - val_loss: 149.3375 - val_accuracy: 0.1153\n", "\n", "Epoch 00022: LearningRateScheduler reducing learning rate to 0.009216.\n", "Epoch 22/35\n", "50000/50000 [==============================] - 162s 3ms/sample - loss: 0.3622 - accuracy: 0.8768 - val_loss: 157.7161 - val_accuracy: 0.1247\n", "\n", "Epoch 00023: LearningRateScheduler reducing learning rate to 0.009216.\n", "Epoch 23/35\n", "50000/50000 [==============================] - 163s 3ms/sample - loss: 0.3507 - accuracy: 0.8794 - val_loss: 157.6633 - val_accuracy: 0.1165\n", "\n", "Epoch 00024: LearningRateScheduler reducing learning rate to 0.008847359999999999.\n", "Epoch 24/35\n", "50000/50000 [==============================] - 162s 3ms/sample - loss: 0.3390 - accuracy: 0.8849 - val_loss: 167.3696 - val_accuracy: 0.1098\n", "\n", "Epoch 00025: LearningRateScheduler reducing learning rate to 0.008847359999999999.\n", "Epoch 25/35\n", "50000/50000 [==============================] - 162s 3ms/sample - loss: 0.3278 - accuracy: 0.8920 - val_loss: 166.1665 - val_accuracy: 0.1176\n", "\n", "Epoch 00026: LearningRateScheduler reducing learning rate to 0.008847359999999999.\n", "Epoch 26/35\n", "50000/50000 [==============================] - 163s 3ms/sample - loss: 0.3165 - accuracy: 0.8953 - val_loss: 162.0232 - val_accuracy: 0.1153\n", "\n", "Epoch 00027: LearningRateScheduler reducing learning rate to 0.008847359999999999.\n", "Epoch 27/35\n", "50000/50000 [==============================] - 161s 3ms/sample - loss: 0.3059 - accuracy: 0.9012 - val_loss: 158.9673 - val_accuracy: 0.1154\n", "\n", "Epoch 00028: LearningRateScheduler reducing learning rate to 0.008847359999999999.\n", "Epoch 28/35\n", "50000/50000 [==============================] - 163s 3ms/sample - loss: 0.2946 - accuracy: 0.9040 - val_loss: 168.1538 - val_accuracy: 0.1211\n", "\n", "Epoch 00029: LearningRateScheduler reducing learning rate to 0.008847359999999999.\n", "Epoch 29/35\n", "50000/50000 [==============================] - 161s 3ms/sample - loss: 0.2847 - accuracy: 0.9083 - val_loss: 162.2235 - val_accuracy: 0.1226\n", "\n", "Epoch 00030: LearningRateScheduler reducing learning rate to 0.008847359999999999.\n", "Epoch 30/35\n", "50000/50000 [==============================] - 162s 3ms/sample - loss: 0.2734 - accuracy: 0.9118 - val_loss: 171.6387 - val_accuracy: 0.1206\n", "\n", "Epoch 00031: LearningRateScheduler reducing learning rate to 0.008847359999999999.\n", "Epoch 31/35\n", "50000/50000 [==============================] - 162s 3ms/sample - loss: 0.2630 - accuracy: 0.9166 - val_loss: 185.3488 - val_accuracy: 0.1155\n", "\n", "Epoch 00032: LearningRateScheduler reducing learning rate to 0.008493465599999998.\n", "Epoch 32/35\n", "50000/50000 [==============================] - 161s 3ms/sample - loss: 0.2500 - accuracy: 0.9231 - val_loss: 187.1196 - val_accuracy: 0.1176\n", "\n", "Epoch 00033: LearningRateScheduler reducing learning rate to 0.008493465599999998.\n", "Epoch 33/35\n", "50000/50000 [==============================] - 161s 3ms/sample - loss: 0.2411 - accuracy: 0.9261 - val_loss: 175.4048 - val_accuracy: 0.1178\n", "\n", "Epoch 00034: LearningRateScheduler reducing learning rate to 0.008493465599999998.\n", "Epoch 34/35\n", "50000/50000 [==============================] - 162s 3ms/sample - loss: 0.2306 - accuracy: 0.9304 - val_loss: 177.7775 - val_accuracy: 0.1156\n", "\n", "Epoch 00035: LearningRateScheduler reducing learning rate to 0.008493465599999998.\n", "Epoch 35/35\n", "50000/50000 [==============================] - 162s 3ms/sample - loss: 0.2187 - accuracy: 0.9365 - val_loss: 173.0359 - val_accuracy: 0.1231\n" ] } ], "source": [ "initial_lrate = 0.01\n", "\n", "def decay(epoch, steps=100):\n", " drop = 0.96\n", " epochs_drop = 8\n", " lrate = initial_lrate * math.pow(drop, math.floor((1 + epoch) / epochs_drop))\n", " return lrate\n", "\n", "lr_sc = LearningRateScheduler(decay, verbose=1)\n", "\n", "sgd = SGD(lr=initial_lrate, momentum=0.9, nesterov=True)\n", "\n", "model.compile(\n", " loss='categorical_crossentropy',\n", " optimizer=sgd,\n", " metrics=['accuracy']\n", ")\n", "\n", "epochs = 35\n", "\n", "history = model.fit(\n", " x=X_train,\n", " y=y_train,\n", " validation_data=(X_test, y_test),\n", " epochs=epochs, batch_size=256, callbacks=[lr_sc]\n", ")" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "scrolled": true }, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.3" } }, "nbformat": 4, "nbformat_minor": 2 }