{ "cells": [ { "cell_type": "markdown", "metadata": { "ExecuteTime": { "end_time": "2018-01-19T02:50:58.408908Z", "start_time": "2018-01-19T02:50:58.405402Z" } }, "source": [ "# Convolutional Neural Networks\n", "\n", "\n", "\n", "\n", "## Setup\n", "\n", "Using the Keras library to import the [MNIST](http://yann.lecun.com/exdb/mnist/) dataset due to the simplicity over other libraries (e.x. [CNTK](https://cntk.ai/pythondocs/CNTK_103A_MNIST_DataLoader.html#) and [TensorFlow](https://www.tensorflow.org/get_started/mnist/pros))\n", "" ] }, { "cell_type": "code", "execution_count": 1, "metadata": { "ExecuteTime": { "end_time": "2018-04-29T01:31:22.215183Z", "start_time": "2018-04-29T01:31:08.349411Z" } }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Using TensorFlow backend.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "X_train shape: (60000, 28, 28, 1)\n", "60000 Training samples\n", "10000 Testing samples\n" ] } ], "source": [ "import keras\n", "from keras import backend as K\n", "import matplotlib.pyplot as plt\n", "\n", "%matplotlib inline\n", "\n", "# Loading the data, shuffled and split between train and test sets\n", "(X_train, y_train), (X_test, y_test) = keras.datasets.mnist.load_data()\n", "\n", "# Input image dimensions\n", "img_rows, img_cols = 28, 28\n", "num_channels = 1\n", "\n", "# Ensuring the channels are in the correct \n", "if K.image_data_format() == 'channels_first':\n", " X_train = X_train.reshape(X_train.shape[0], num_channels, img_rows, img_cols)\n", " X_test = X_test.reshape(X_test.shape[0], num_channels, img_rows, img_cols)\n", " input_shape = (num_channels, img_rows, img_cols)\n", "else:\n", " X_train = X_train.reshape(X_train.shape[0], img_rows, img_cols, num_channels)\n", " X_test = X_test.reshape(X_test.shape[0], img_rows, img_cols, num_channels)\n", " input_shape = (img_rows, img_cols, num_channels)\n", " \n", "print('X_train shape:', X_train.shape)\n", "print(X_train.shape[0], 'Training samples')\n", "print(X_test.shape[0], 'Testing samples')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Keras" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "ExecuteTime": { "end_time": "2018-04-29T01:34:29.906686Z", "start_time": "2018-04-29T01:31:49.693994Z" }, "scrolled": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "2018/04/28 20:31\n", "OS: win32\n", "Python: 3.6.2 |Anaconda custom (64-bit)| (default, Jul 20 2017, 12:30:02) [MSC v.1900 64 bit (AMD64)]\n", "NumPy: 1.12.1\n", "Keras: 2.0.6\n", "Backend: TensorFlow 1.3.0\n", "[name: \"/gpu:0\"\n", "device_type: \"GPU\"\n", "memory_limit: 1524796620\n", "locality {\n", " bus_id: 1\n", "}\n", "incarnation: 17807082595901432932\n", "physical_device_desc: \"device: 0, name: GeForce GTX 680, pci bus id: 0000:01:00.0\"\n", "]\n", "X_train shape: (60000, 28, 28, 1)\n", "60000 Training samples\n", "10000 Testing samples\n", "_________________________________________________________________\n", "Layer (type) Output Shape Param # \n", "=================================================================\n", "conv2d_1 (Conv2D) (None, 24, 24, 32) 832 \n", "_________________________________________________________________\n", "leaky_re_lu_1 (LeakyReLU) (None, 24, 24, 32) 0 \n", "_________________________________________________________________\n", "conv2d_2 (Conv2D) (None, 20, 20, 32) 25632 \n", "_________________________________________________________________\n", "leaky_re_lu_2 (LeakyReLU) (None, 20, 20, 32) 0 \n", "_________________________________________________________________\n", "max_pooling2d_1 (MaxPooling2 (None, 10, 10, 32) 0 \n", "_________________________________________________________________\n", "dropout_1 (Dropout) (None, 10, 10, 32) 0 \n", "_________________________________________________________________\n", "conv2d_3 (Conv2D) (None, 8, 8, 64) 18496 \n", "_________________________________________________________________\n", "leaky_re_lu_3 (LeakyReLU) (None, 8, 8, 64) 0 \n", "_________________________________________________________________\n", "conv2d_4 (Conv2D) (None, 6, 6, 64) 36928 \n", "_________________________________________________________________\n", "leaky_re_lu_4 (LeakyReLU) (None, 6, 6, 64) 0 \n", "_________________________________________________________________\n", "max_pooling2d_2 (MaxPooling2 (None, 3, 3, 64) 0 \n", "_________________________________________________________________\n", "dropout_2 (Dropout) (None, 3, 3, 64) 0 \n", "_________________________________________________________________\n", "flatten_1 (Flatten) (None, 576) 0 \n", "_________________________________________________________________\n", "dense_1 (Dense) (None, 1024) 590848 \n", "_________________________________________________________________\n", "dense_2 (Dense) (None, 1024) 1049600 \n", "_________________________________________________________________\n", "dense_3 (Dense) (None, 10) 10250 \n", "=================================================================\n", "Total params: 1,732,586\n", "Trainable params: 1,732,586\n", "Non-trainable params: 0\n", "_________________________________________________________________\n", "Train on 54000 samples, validate on 6000 samples\n", "Epoch 1/12\n", "54000/54000 [==============================] - 14s - loss: 0.4236 - acc: 0.8607 - val_loss: 0.0711 - val_acc: 0.9762\n", "Epoch 2/12\n", "54000/54000 [==============================] - 13s - loss: 0.1124 - acc: 0.9646 - val_loss: 0.0471 - val_acc: 0.9848\n", "Epoch 3/12\n", "54000/54000 [==============================] - 12s - loss: 0.0829 - acc: 0.9743 - val_loss: 0.0425 - val_acc: 0.9877\n", "Epoch 4/12\n", "54000/54000 [==============================] - 13s - loss: 0.0687 - acc: 0.9779 - val_loss: 0.0324 - val_acc: 0.9900\n", "Epoch 5/12\n", "54000/54000 [==============================] - 13s - loss: 0.0605 - acc: 0.9812 - val_loss: 0.0292 - val_acc: 0.9917\n", "Epoch 6/12\n", "54000/54000 [==============================] - 12s - loss: 0.0519 - acc: 0.9836 - val_loss: 0.0300 - val_acc: 0.9917\n", "Epoch 7/12\n", "54000/54000 [==============================] - 12s - loss: 0.0487 - acc: 0.9846 - val_loss: 0.0344 - val_acc: 0.9903\n", "Epoch 8/12\n", "54000/54000 [==============================] - 12s - loss: 0.0464 - acc: 0.9854 - val_loss: 0.0283 - val_acc: 0.9920\n", "Epoch 9/12\n", "54000/54000 [==============================] - 12s - loss: 0.0424 - acc: 0.9869 - val_loss: 0.0256 - val_acc: 0.9927\n", "Epoch 10/12\n", "54000/54000 [==============================] - 12s - loss: 0.0405 - acc: 0.9874 - val_loss: 0.0269 - val_acc: 0.9933\n", "Epoch 11/12\n", "54000/54000 [==============================] - 12s - loss: 0.0370 - acc: 0.9883 - val_loss: 0.0283 - val_acc: 0.9920\n", "Epoch 12/12\n", "54000/54000 [==============================] - 12s - loss: 0.0354 - acc: 0.9895 - val_loss: 0.0265 - val_acc: 0.9932\n", "10000/10000 [==============================] - 0s \n", "\n", "Test score: 0.0188353575279\n", "Test accuracy: 0.9942\n", "Total training time for 12 epochs: 157 seconds\n" ] } ], "source": [ "import sys\n", "import time\n", "import numpy as np\n", "import keras\n", "from keras.models import Sequential\n", "from keras.layers import Dense, Dropout, Flatten\n", "from keras.layers import Conv2D, MaxPooling2D\n", "from keras.layers import advanced_activations\n", "from keras.layers.normalization import BatchNormalization\n", "from keras import callbacks\n", "from keras import backend as K\n", "\n", "print(time.strftime('%Y/%m/%d %H:%M'))\n", "print('OS:', sys.platform)\n", "print('Python:', sys.version)\n", "print('NumPy:', np.__version__)\n", "print('Keras:', keras.__version__)\n", "\n", "# Printing backend and GPU information\n", "if keras.backend.backend() == 'tensorflow':\n", " import tensorflow as tf\n", " from tensorflow.python.client import device_lib\n", " print('Backend: TensorFlow', tf.__version__)\n", " local_device_protos = device_lib.list_local_devices()\n", " print([x for x in local_device_protos if x.device_type == 'GPU'])\n", "\n", " # Avoiding memory issues with the GPU\n", " config = tf.ConfigProto()\n", " config.gpu_options.allow_growth = True\n", " sess = tf.Session(config=config)\n", " K.set_session(sess)\n", "\n", "elif keras.backend.backend() == 'cntk':\n", " import cntk as C\n", " print('Backend: CNTK', C.__version__)\n", " print('GPU: ', C.gpu(0))\n", "\n", "# Setting data types and normalizing the images\n", "X_train = X_train.astype('float32')\n", "X_test = X_test.astype('float32')\n", "X_train /= 255\n", "X_test /= 255\n", "print('X_train shape:', X_train.shape)\n", "print(X_train.shape[0], 'Training samples')\n", "print(X_test.shape[0], 'Testing samples')\n", "\n", "# Model settings\n", "batch_size = 128\n", "num_classes = 10\n", "epochs = 12\n", "\n", "# Converting class vectors to binary class matrices\n", "y_train = keras.utils.to_categorical(y_train, num_classes)\n", "y_test = keras.utils.to_categorical(y_test, num_classes)\n", "\n", "\n", "# Beginning model building\n", "model = Sequential()\n", "\n", "# Layer 1 - Conv (5x5)\n", "model.add(Conv2D(32, kernel_size=(5, 5), input_shape=input_shape))\n", "model.add(advanced_activations.LeakyReLU(alpha=0.03))\n", "\n", "# Layer 2 - Conv (5x5) & Max Pooling\n", "model.add(Conv2D(32, kernel_size=(5, 5)))\n", "model.add(advanced_activations.LeakyReLU(alpha=0.03))\n", "model.add(MaxPooling2D(pool_size=(2, 2)))\n", "model.add(Dropout(0.5))\n", "\n", "# Layer 3 - Conv (3x3)\n", "model.add(Conv2D(64, kernel_size=(3, 3)))\n", "model.add(advanced_activations.LeakyReLU(alpha=0.03))\n", "\n", "# Layer 4 - Conv (3x3) & Max Pooling\n", "model.add(Conv2D(64, kernel_size=(3, 3), activation='relu'))\n", "model.add(advanced_activations.LeakyReLU(alpha=0.03))\n", "model.add(MaxPooling2D(pool_size=(2, 2)))\n", "model.add(Dropout(0.5))\n", "\n", "# Layer 5 - FC 1024\n", "model.add(Flatten())\n", "model.add(Dense(1024, activation='relu'))\n", "\n", "# Layer 6 - FC 1024\n", "model.add(Dense(1024, activation='relu'))\n", "\n", "# Output Layer\n", "model.add(Dense(num_classes, activation='softmax'))\n", "\n", "# Defining loss function, optimizer, and metrics to report\n", "model.compile(loss=keras.losses.categorical_crossentropy,\n", " optimizer=keras.optimizers.Adadelta(),\n", " metrics=['accuracy'])\n", "\n", "# Summary of the model before fitting\n", "model.summary()\n", "\n", "\n", "### Setting up callbacks\n", "# Early stopping\n", "earlystop = callbacks.EarlyStopping(monitor='val_loss',\n", " min_delta=0.0001, # Amount counting as an improvement\n", " patience=5, # Number of epochs before stopping\n", " verbose=1, \n", " mode='auto')\n", "\n", "# Tracking the training time for each epoch\n", "class TimeHistory(callbacks.Callback):\n", " '''\n", " Tracks training time on individual epochs for a Keras model\n", " '''\n", " def on_train_begin(self, logs={}):\n", " self.times = []\n", "\n", " def on_epoch_begin(self, batch, logs={}):\n", " self.epoch_time_start = time.time()\n", "\n", " def on_epoch_end(self, batch, logs={}):\n", " self.times.append(time.time() - self.epoch_time_start)\n", "\n", " \n", "time_callback = TimeHistory() # Gives training time for all epochs\n", "\n", "\n", "# Model checkpoints - saves the model with the best validation loss\n", "model_filepath = 'model.val_loss{val_loss:.5f}_epoch{epoch:04d}-.h5'\n", "checkpoint = callbacks.ModelCheckpoint(model_filepath, monitor='val_loss',\n", " save_best_only=True)\n", "\n", "# Reducing the learning rate if training loss does not increase\n", "learning_rate_redux = callbacks.ReduceLROnPlateau(monitor='loss', \n", " patience=3, # Reduce after 3 epochs\n", " verbose=1, \n", " factor=0.3, # Reduce to 1/3\n", " min_lr=0.00001)\n", "\n", "\n", "# Fitting the model\n", "model_info = model.fit(X_train, y_train,\n", " epochs=epochs,\n", " batch_size=batch_size, verbose=1,\n", " validation_split=0.1, # Uses last 10% of data (not shuffled) for validation\n", " callbacks=[earlystop, checkpoint, learning_rate_redux, time_callback])\n", "\n", "# Getting test information\n", "score, acc = model.evaluate(X_test, y_test,\n", " batch_size=batch_size)\n", "print()\n", "print('Test score:', score)\n", "print('Test accuracy:', acc)\n", "\n", "# Saving the model\n", "# model.save('model.h5')\n", "\n", "# Reporting total training time\n", "total_training_time = round(sum(time_callback.times))\n", "print('Total training time for {0} epochs: {1} seconds'.format(epochs, total_training_time))" ] }, { "cell_type": "code", "execution_count": 4, "metadata": { "ExecuteTime": { "end_time": "2018-04-29T01:35:55.231650Z", "start_time": "2018-04-29T01:35:53.918650Z" } }, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAA4IAAAFNCAYAAABVKNEpAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xl4XHd5//33rX0ZLbakkXfLsSaLszlgnBRCIKWhSVhS\nwgNJCoSmgJ+0bKHQkvKj0FKgoaUttIXmZ9JQKIQ8YQkN1CFASQhLgCTESZzEieQtXmJttmWN9uV+\n/jhn5LEsWWNbozMjfV7XpWtmzjK6pVzx0WfO93t/zd0RERERERGR+aMg6gJERERERERkdikIioiI\niIiIzDMKgiIiIiIiIvOMgqCIiIiIiMg8oyAoIiIiIiIyzygIioiIiIiIzDMKgiJZYmZNZuZmVpTB\nsX9kZj+fjbpERETyla6tIjNHQVAEMLOdZjZkZvUTtj8WXnCaoqnsqFpiZpY0s3ujrkVERGQ6uXxt\nPZFAKTJXKQiKHLEDuC71wszOBSqiK+cYbwQGgcvMbNFsfmNdKEVE5CTl+rVVZN5SEBQ54r+A69Ne\nvx34avoBZlZjZl81sw4z22VmHzWzgnBfoZl91sw6zWw78JpJzv0PM3vBzPaa2SfNrPAE6ns7cCvw\nBPDWCe+93My+E9bVZWb/lrbvXWb2jJn1mNnTZvaicLubWXPacf9pZp8Mn7/SzPaY2YfNbD/wZTNb\nYGbfD7/HwfD5srTzF5rZl81sX7j/u+H2LWb2urTjisPf0QUn8LOLiEh+yvVr6zHMrNTMPhdez/aF\nz0vDffXh9e+QmR0ws5+l1frhsIYeM3vWzF51KnWIZJuCoMgRvwKqzeys8CJyLfC1Ccf8K1ADnAa8\nguDidkO4713Aa4ELgHXA/zPh3P8ERoDm8JhXA+/MpDAzWwm8Evh6+HV92r5C4PvALqAJWArcGe57\nE/DX4fHVwOuBrky+J7AIWAisBDYQ/Hvx5fD1CqAf+Le04/+L4FPes4E48M/h9q9ydHC9EnjB3R/L\nsA4REclfOXttPY7/A1wErAXOB9YDHw33fRDYAzQAjcBHADezM4D3AC9x9yrg94Gdp1iHSFYpCIoc\nLfXJ5WXAM8De1I60C9hfunuPu+8E/hF4W3jIm4HPuftudz8A/F3auY0EAegmd+9193aCoHRthnW9\nDXjC3Z8mCHlnp91RWw8sAf48fO8Bd09Njn8n8Pfu/rAHWt19V4bfcwz4uLsPunu/u3e5+7fdvc/d\ne4BPEVywMbPFwBXAje5+0N2H3f2n4ft8DbjSzKrTfpb/yrAGERHJf7l6bZ3KW4BPuHu7u3cAf5NW\nzzCwGFgZXut+5u4OjAKlwBozK3b3ne6+7RTrEMkqzfsROdp/AQ8Cq5gwdAWoB4oJ7ryl7CK4AwdB\nGNs9YV/KyvDcF8wsta1gwvHHcz3wJQB332tmPyUYXvMYsBzY5e4jk5y3HDjZC1GHuw+kXphZBcEF\n9nJgQbi5KryILwcOuPvBiW/i7vvM7BfAG83sboLA+P6TrElERPJPrl5bp7JkknqWhM//gWCkzQ/D\n77nR3W9x91Yzuyncd7aZ3Qf8mbvvO8VaRLJGdwRF0oR3y3YQfML4nQm7Owk+CVyZtm0FRz7ZfIEg\nEKXvS9lN0Oil3t1rw69qdz97uprM7KVAAvhLM9sfztm7EPjDsInLbmDFFA1ddgOrp3jrPo6esD+x\nAY1PeP1B4AzgQnevBi5JlRh+n4VmVjvF9/oKwfDQNwEPufveKY4TEZE5JhevrdPYN0k9+8Kfpcfd\nP+jupxFMt/iz1FxAd7/D3S8Oz3XgM6dYh0hWKQiKHOsdwO+6e2/6RncfBe4CPmVmVeG8vT/jyFyH\nu4D3mdkyM1sA3Jx27gvAD4F/NLNqMysws9Vm9ooM6nk78CNgDcF8hbXAOUA5wd213xBcKG8xs0oz\nKzOzl4Xn3gZ8yMxebIHmsG6AzQRhstDMLicc5nkcVQTzAg+Z2ULg4xN+vnuBL4ZNZYrN7JK0c78L\nvIjgTuDET4NFRGTuy7Vra0ppeN1MfRUA3wA+amYNFix98bFUPWb22vBaakA3wZDQMTM7w8x+N2wq\nM0BwvRw7wd+RyKxSEBSZwN23ufsjU+x+L9ALbAd+DtwB3B7u+xJwH/A48FuO/dTzeqAEeBo4CHyL\nYJ7BlMysjGB+xL+6+/60rx0EQ23eHl5EX0cwUf55gkns14Q/yzcJ5vLdAfQQBLKF4du/PzzvEMF8\niO8erxbgcwThs5Ng8v8PJux/G8GnuluBduCm1A537we+TTAsaOLvRURE5rhcurZOkCQIbamv3wU+\nCTxC0KX7yfD7fjI8PgH8ODzvIeCL7n4/wfzAWwiukfsJmqb95QnUITLrLJjfKiKSXWb2MeB0d3/r\ntAeLiIiISFapWYyIZF04lPQdHOm6JiIiIiIRytrQUDO73czazWzLFPvNzP7FzFrN7AkLF7kO910e\nLsTZamY3T3a+iOQHM3sXwYT+e939wajrEREREZEsDg0NG0Ukga+6+zmT7L+SYEz4lQQdED/v7heG\nreifI1hrZg/wMHBduH6aiIiIiIiInKKs3REMP/k/cJxDriIIie7uvwJqw0Wp1wOt7r7d3YcIFs++\nKlt1ioiIiIiIzDdRdg1dytELfu4Jt021XURERERERGZA3jeLMbMNwAaAysrKF5955pkRVyQiItn2\n6KOPdrp7Q9R15Iv6+npvamqKugwREZkFmV4jowyCe4Hlaa+XhduKp9g+KXffCGwEWLdunT/yyFRL\n1IiIyFxhZruiriGfNDU1oeujiMj8kOk1MsqhofcA14fdQy8Cut39BYLmMAkzW2VmJcC14bEiIiIi\nIiIyA7J2R9DMvgG8Eqg3sz3Axwnu9uHutwKbCDqGtgJ9wA3hvhEzew9wH1AI3O7uT2WrThERERER\nkfkma0HQ3a+bZr8D755i3yaCoCgiIiIiIiIzLO+bxYiIiIiIiAAMDw+zZ88eBgYGoi4l68rKyli2\nbBnFxcUndb6CoIiIiIiIzAl79uyhqqqKpqYmzCzqcrLG3enq6mLPnj2sWrXqpN4jymYxIiIiIiIi\nM2ZgYIC6uro5HQIBzIy6urpTuvOpICgiIiIiInPGXA+BKaf6cyoIioiIiIiIzIBDhw7xxS9+8YTP\nu/LKKzl06FAWKpqagqCIiIiIiMgMmCoIjoyMHPe8TZs2UVtbm62yJqVmMSIiM8UdRodguA+G+mC4\nP3g+2WNRGVTWQUXaV3F51D9Bdo2NwkA3FBRBWXXU1cgJeL6rj/ufbefN65ZTXlIYdTkiIjnr5ptv\nZtu2baxdu5bi4mLKyspYsGABW7du5bnnnuMP/uAP2L17NwMDA7z//e9nw4YNADQ1NfHII4+QTCa5\n4ooruPjii/nlL3/J0qVL+e///m/Ky2f+bwQFQRGZH8bGYGQgDGJThbT+tBA3Ydsxj1Oc62MnX2Nx\nBVTUQ8XCI+GwcsLrivojz8sXQOEs/zPuHvysA4eg/2D4lf784BT7DsFgd/Aev/tXcMmHZrduOSVP\n7evm4/c8xYtWLODcZTVRlyMikrNuueUWtmzZwubNm3nggQd4zWtew5YtW8Y7e95+++0sXLiQ/v5+\nXvKSl/DGN76Rurq6o96jpaWFb3zjG3zpS1/izW9+M9/+9rd561vfOuO1KgiKSPRGR6YOVsP9MNx7\nnH3h49BxwtlwP4z0n3hdVgDFlcGduuLyIKgVl0NJJVQ2HL1t/HHiton7K6C4DIYHoK8L+jrDxy7o\nOxA89obbulqDbUM9UxUI5bVH31WsWHh0WJwYJkurwezI3bmpQtuUge4gjA4e53dWGNRUviD4qmyA\n+tPD1+H2FRed+H8LiVSiMQZAa0ePgqCI5I2/+d5TPL3v8Iy+55ol1Xz8dWdnfPz69euPWt7hX/7l\nX7j77rsB2L17Ny0tLccEwVWrVrF27VoAXvziF7Nz585TL3wSCoIyP42NBX+A9+yH3vbgj+KoFJUe\n/Ud7UWl0tcyUsTHoPwA9L0BPGyT3B7/rnv3h83DbQHcQ4MaGT/x7FJZMCFtpQax8wbHhrKTy6G1F\nZcdumxjcCouD0BS14YHg9zkeGLugt+vo132dcOh52PdYECSn+p0WFAc/2+BhwKf+nsWVR8JceS3U\nNwfPy2qP3p56ntpeWpUbvzOZUSvrKikqMFraklGXIiKSVyorK8efP/DAA/z4xz/moYceoqKigle+\n8pWTLv9QWnrkb8HCwkL6+0/iw+wMKAjK3DI6Ar0dR8JGzwuQbDs2hPS2w9jxJ+1GpiQ2xV2dCfPJ\nUsMEy2uhYJbm7IyNBr/fnv3h7zU96KX9vpNtk/9+y2ogtgiqFsGK3wnCQ0nF5Hfd0h9LJmwrKp/9\nIZFRKi6D4iVQvSSz491hKBneWTxwdFjs6wrCd3nt8UNdUUl2fybJK8WFBTTVV9LSriAoIvnjRO7c\nzZSqqip6eiYfydPd3c2CBQuoqKhg69at/OpXv5rl6o42j/6Skrw2OhwGj1TYmCToJduCkDLZHK2K\n+iB8xBohfjZUNYaBpDHYVlg8+z9TynD/hD/WJ9zl6Xg2eD7cO8UbWPDH+/gQwLqp55SNDw+ccNdm\ndBiS7WlheYqg19s+xe+37sjvs+HM4Hed+n2nP5/rzVByhVnw37i0Chaumv54kQwk4jGe3T/VMGUR\nEQGoq6vjZS97Geeccw7l5eU0NjaO77v88su59dZbOeusszjjjDO46KJop0ooCEq03IPQcej5qcNd\nzwtBEDqGQSx+JGwsWXskjFQtPvK8Mj437m6MB8bOyeeTpb4ObIc9DwfPp7rrWVhyZL5Y6rxjhgla\nML8rFZoXnTch3C0Onsca58bvV0SOKxGPcd9T+xkcGaW0SJ1DRUSmcscdd0y6vbS0lHvvvXfSfal5\ngPX19WzZsmV8+4c+lL3magqCkn2jI9D9PBzYAQd3BI/pzyc28bDCMGw0Qs1yWPaStACy+EgwqWyY\nZ8MDy6FmafCVCfdgHthUwwP7umDgcHCX8KgAHQa9yvj8+v1K1oyOOT0DwxzqG+ZQ/zANVaUsrdXd\n4XyzOh5jzGFHZy9nLtLyHyIi+U5/5cnMGO6HgzvDkLc9LfBth+7dR9+ZKiqDBU2w8DQ47dJg6Frt\nyiNDCCvqZm/O21xmFszJK6uButVRVyNzwPDoGN39QaDr7h8Kgl0Y7rr7hjjUP/nrwwPDeNoN5w9e\ndjrvfVUiuh9ETkoiXgVAa3tSQVBEZA5QEJTM9R86OuAd3AEHdgbPe/YdfWxpTRDwlqyFc66GBauC\n1wtPC+4+FRRE8iOICAwMj44HukNhYOvuG+ZQKtxNfN03THf/MMnBqRssmUFNeTG15cXUVJRQW1FC\nU33lkdflxdRWBF+pQCH55bSGSgoMdQ4VEZkjFATlCPegYcjBMOiND98Mn/cfOPr4WGMQ8E57ZRDy\nFoRBb+GqoHmJWsiLzJjBkVGSAyP0Do7SMzhM7+AoycFhkoOp7SP0DAaPyYERkkNpzweDr97wcXh0\n6mUjigqM2oriINRVlLCouowzFlVRW14yHuRS+8bDXXkJVWVFFBTo//m5rKy4kOULK2hV51ARkTlB\nQXA+GhsNhnF2bIX2Z4LHjq3Qtf3ozpRWADXLgnC35qojd/QWrAqGdpbGovoJRHKOuzM86gyOjDIw\nPMbgyCiDI2MMDB/9OBg+9g6OThreUqEuOXh0gDteeEtXWVJIrKyIytIiqkqDx+WVFePPY2VFxEqL\nxgPckWAXhLvKkkJMH+LIFBLxmIKgiMgcoSA4l42NwaGdwfIDqcDX/gx0PgcjaYtX1iwPWv43vfzo\nIZw1y9UNUuaMsTGnu3+YzuQgnckhDvYNMTB8JLRlEt4Gh8cYGBmd9HFwZJSxzLLaMdLDWyz8miy8\nxUqPPibYXkistJjK0kIqS3RXLl+Y2eXA54FC4DZ3v2WK414CPARc6+7fOpFzs2F1PMZPn+tgZHSM\nokIN8RcRyWcKgnPB2FjQlbN9K3Q8c+Sx47mjO3JWLw0C36pLgsf4WdBwRrDWmEgeGhwZ5UDvEJ09\nQ3T2DtLZM0hX79CRxzD0dSUHOdA7xEgGSa240CgrKqS0uIDStMey4gJKiwpYWFlCaVEBZcWFlBal\n7zv2Mf09yiY8VpYUKbzNU2ZWCHwBuAzYAzxsZve4+9OTHPcZ4Icnem62JOJVDI86uw70sbpBo0JE\nRE5VLBYjmYxmpIWCYD5xDzpwThb40od0Vi2B+Jmw7oajA19ZTXS1i2TA3UkOjoyHt1SQ60wO0jXh\nsTM5yOGByZuXlBUXUB8rpS5WytLaMs5bWkN9VQl1laXUxUpoiJWyoLKE8uLCI4EuDG2FCmWSfeuB\nVnffDmBmdwJXARPD3HuBbwMvOYlzsyIRD8Jfa3tSQVBEJM8pCOYidzi8d5LA9ywMpX1iEFsUBL4X\nXR88NoSBr7w2utpl3nJ3BobH6B0aoX9olN6hoLFJX/jYP3z0676hEQ71DY/fuetKDtGRHGRoZGzS\n96+tKA7CXWUJZy2ppr6yZDzs1cWC5/XhY4XmuUluWwrsTnu9B7gw/QAzWwq8AbiUo4PgtOdm0+q0\nIPj7Z8/WdxURyR8333wzy5cv593vfjcAf/3Xf01RURH3338/Bw8eZHh4mE9+8pNcddVVEVeqIJg7\ndv4CnrgznMv3bLAQeEplPAh6a99ydOCrWBhdvZLXRsec3qERegZG6BscoS8Mbn2D4ePQaPA1OELv\n0PHDXOrY3qGRo9aKm055cSE15cXjd+oS8SrqYyXjoa4uLdgtrCyhWPORZH75HPBhdx872Q81zGwD\nsAFgxYoVM1JUrLSIJTVltLT1zMj7iYjMNddccw033XTTeBC86667uO+++3jf+95HdXU1nZ2dXHTR\nRbz+9a+P/ENrBcGo7X4Y7v8kbH8gGLq56Dw475ojgS9+lgKfjHN3BkfGODwwTHIgCHLJwRF6Bobp\nCV8H28LXg6ltRx9/vPXgJiopKqCypJCKkiIqSgqpKC2isqSQJbUlVJQUUlka7KsM91WUHP26sqSQ\n8pJgLlxFOCeuvLhQ8+JkPtsLLE97vSzclm4dcGf4R0I9cKWZjWR4LgDuvhHYCLBu3bqTbGV0rObG\nKlo71DlURPLAvTfD/idn9j0XnQtXTN2j64ILLqC9vZ19+/bR0dHBggULWLRoER/4wAd48MEHKSgo\nYO/evbS1tbFo0aKZre0EKQhGZd9muP/T0HIfVNTDqz8FL3kHFJdHXZnMgrExp61ngJ2dfbT3DHB4\nYCQMasNhsDsS4NLD3nRrwKWUFxdSVRZ0lawqK6a6rIhF1WXBttJiqsqKxr8qwqYlQXgLwlpFWvDT\nnTiRGfcwkDCzVQQh7lrgD9MPcPdVqedm9p/A9939u2ZWNN252dbcEOOOHV2Mjbk+0BERmcSb3vQm\nvvWtb7F//36uueYavv71r9PR0cGjjz5KcXExTU1NDAwMTP9GWaYgONvanoYHPg3PfA/KauFVH4f1\nG7Qm3xyUCns7OnvZ1dXHzs5ednb1srOzj10HehkYPnYuXGGBhWEtCHBVZUUsrik7KtRVlQXrw1WV\nFYfHpW0Pz1Vbd5Hc5e4jZvYe4D6CJSBud/enzOzGcP+tJ3rubNSdkmiMMTA8xt5D/SxfWDGb31pE\n5MQc585dNl1zzTW8613vorOzk5/+9KfcddddxONxiouLuf/++9m1a1ckdU2kIDhbOlvggb+DLd8J\nlmt45V/CRX+iTp55bmzM2X944EjA6+odD34Tw15JYQHLF5azqr6SixP1NNVX0lRXweKacqrDoFde\nrCYnIvOBu28CNk3YNmkAdPc/mu7c2ZTeOVRBUETkWGeffTY9PT0sXbqUxYsX85a3vIXXve51nHvu\nuaxbt44zzzwz6hIBBcHsO7ADfvr3QSOYojK4+APw0vdq3l8eGQ97nb3s7OoLQ19wd29XVx+DI0eH\nvRV1FTTVVfDyRD0r6ytZVVfJyroKltSWa2kCEcl7zWEQbGnv4dIz4xFXIyKSm5588sjcxPr6eh56\n6KFJj4tqDUHIchA0s8uBzxMMX7nN3W+ZsH8BcDuwGhgA/tjdt4T7PgC8E3DgSeAGd49+MG2muvfA\ng/8Aj30NCorgoj+Fl90EsYaoK5NJnFzYq+SSREN4Z6+Spvrg7p7CnojMZbUVQTff1nY1jBERyWdZ\nC4JmVgh8AbiMYJ2jh83sHndPX/T2I8Bmd3+DmZ0ZHv+qcP2k9wFr3L3fzO4imBD/n9mqd8b07Ief\n/RM8+uVgPcAX3wAv/yBUL466MgklB0d4et9hntzbzVN7u3n6hcPs6Ow9OuwVFbByYQUr6yp5xekN\nrKyrZFV9cGdPYU9E5rtEPEaLgqCISF7L5h3B9UCru28HMLM7gauA9CC4BrgFwN23mlmTmTWm1VZu\nZsNABbAvi7Weut5O+MXn4De3wegQXPBWuOTPoXb59OdK1nT3DfPUvm627Ovmyb2HeWpvNzu6esfX\nu4tXlXL2kmpePj5nr5Km+koWVZcp7ImITKE5HuO7j+3F3TWvWUQkT2UzCC4Fdqe93gNcOOGYx4Gr\ngZ+Z2XpgJbDM3R81s88CzwP9wA/d/YdZrPXk9R+EX/4b/PpWGO4L1gB8xV/AwtOirmze6UoOsmXf\nYbbs7Q7C397DPH+gb3z/0tpyzl5SzR9csJRzllZzzpIa4tVlEVYsIpKfEo0xegZHaO8ZpFH/jopI\njpkvH1K5n9oSsVE3i7kF+LyZbSaYB/gYMBrOHbwKWAUcAr5pZm91969NfAMz2wBsAFixYsWsFc7A\n4SD8/fLfYLAbzr466ATacPrs1TCPtR8eYEsY9lJDPPd1H5lCumJhBecureHa9cs5Z0kNZy+ppi5W\nGmHFIiJzx3jDmLakgqCI5JSysjK6urqoq6ub02HQ3enq6qKs7OT/Dc5mENwLpI+LXBZuG+fuh4Eb\nACz4L7UD2A78PrDD3TvCfd8BXgocEwTdfSOwEWDdunWnFoszMdQLv/lSMAy0/yCc+dogAC46J+vf\nej5yd/Z1DwR3+fZ2syWc29fRMwiAGayqr2Rd00LOXVrD2UurOXtxDTUVxRFXLiIyd6V3Dr04UR9x\nNSIiRyxbtow9e/bQ0dERdSlZV1ZWxrJly076/GwGwYeBhJmtIgiA1wJ/mH6AmdUCfe4+RNAh9EF3\nP2xmzwMXmVkFwdDQVwGPZLHW6Q0PwCO3w8//CXo7oPkyuPQjsPRFkZY1l7g7uw/08+Te7vBuXzdP\n7TvMgd4hAAoMEvEqXp6o55wlNZy7rIazFlcTK436xraIyPzSECulprxYnUNFJOcUFxezatWqqMvI\nC1n7C9rdR8zsPcB9BMtH3O7uT5nZjeH+W4GzgK+YmQNPAe8I9/3azL4F/BYYIRgyujFbtR7XyBA8\n9lV48B+hZx+sugQu/TqsmDjdUU7U/u4BNu8+yGO7D/HE7mBe3+GBEQCKCozTG6u47KxGzllazdlL\nazhrUTXlJYURVy0iImamzqEiInkuq7dS3H0TsGnCtlvTnj8ETDqpzt0/Dnw8m/Ud1+gIPP6NYDH4\n7udhxe/A1Rth1csjKymf9Q2N8MSebjbvPsTm5w+xefch9h8O5vQVFxpnLa7mtecv4dylNZyzpIbT\nF8UoLVLoExHJVc3xGD98ui3qMkRE5CRpTN1EY6Ow5dvwwC1wYBsseRG87p9h9auCCWkyrdExp7U9\nyebdB9m8+xCPPX+I59p6GAtncK5YWMH6VQtZu7yWtStqWbO4mrJihT4RkXzSHI9x58O76UoOqhmX\niEgeUhBMGRuDZ+6BB/4OOrZC47lw7TfgjCsUAKfR3jPA5ucP8Vh4t+/Jvd0kB4MhntVlRZy/vJZX\nr2lk7Ypazl9Wqz8YRETmgERjFQCt7Un9uy4ikocUBFPGRuBHfwVF5fCmr8BZr4eCgqiryjn9Q6Ns\n2dc9Prxz8+5D7D3UDwTz+s5aXM0bLlg6frdvVV0lBVqYXURkzjnSOTTJhafVRVyNiIicKAXBlKIS\nePv3oGY5FGiYIsDYmLO9M8ljaaFv6/4eRsMxnssWlHPBilpueFkTF6yo5ewlNRriKSIyTyypKaOy\npFCdQ0VE8pSCYLoFTVFXEKnO5OBRd/oe33OInrCLZ1VpEectr+FPXrGatctrOX95LQ1VGgokIjJf\nmRnN8ZiCoIhInlIQnOfcnf99pp1/uO9Znm3rAaCwwDijsYrXnb+EtctruWB5LasbYhriKSIiR1kd\nj/GL1s6oyxARkZOgIDiPPdfWw99+/2l+1tLJ6oZKPnLlmaxdvoBzl9ZovT4REZlWIl7Fd367l8MD\nw1SXFUddjoiInAAFwXnoUN8Q//yj5/jar5+nsqSQj712DW/7nZUUF6o5joiIZC4RNoxpbU/yohUL\nIq5GREROhILgPDIyOsbXf/08//Sj5+gZGOYtF67kA5edzsLKkqhLExGRPJTqHNrapiAoIpJvFATn\niZ+1dPCJ7z1NS3uSl66u42OvW8OZi6qjLktERPLY8oUVlBQV0NqhhjEiIvlGQXCO29HZy6f+52l+\n/Ew7KxZW8H/f9mJevaYRMzV+ERGRU1NYYKxuiNESNhsTEZH8oSA4Rx0eGObfftLKl3+xg5LCAm6+\n4kxueFkTpUVqAiMiIjOnOR7jsecPRl2GiIicIAXBOWZ0zPnmI7v57A+fpat3iDe9eBkf+v0ziFeV\nRV2aiIiEzOxy4PNAIXCbu98yYf9VwN8CY8AIcJO7/zzctxPoAUaBEXdfN4ulHyMRj/H9J/bRNzRC\nRYn+rBARyRf6F3sO+fX2Lj7x/ad5at9h1q1cwJf/aD3nLquJuiwREUljZoXAF4DLgD3Aw2Z2j7s/\nnXbY/wL3uLub2XnAXcCZafsvdfecWMAvEY/hDts7ejlnqa45IiL5QkFwDthzsI+/27SV/3nyBZbU\nlPEv113A685brHmAIiK5aT3Q6u7bAczsTuAqYDwIunt695VKwGe1whOQ6hza0t6jICgikkcUBPNY\n39AI//7ANjY+uB0z+MDvnc6GS07TYvAiIrltKbA77fUe4MKJB5nZG4C/A+LAa9J2OfBjMxsF/q+7\nb8xirdPREqP0AAAgAElEQVRaWVdJUYHR2q7OoSIi+URBMA+NjTn//fhePnPvs+w/PMBVa5fw4cvP\nZEltedSliYjIDHH3u4G7zewSgvmCvxfuutjd95pZHPiRmW119wcnnm9mG4ANACtWrMhanSVFBTTV\nV9LSpiAoIpJPFATzzObdh/ib7z3FY88f4rxlNXzhLRfw4pULoy5LREQytxdYnvZ6WbhtUu7+oJmd\nZmb17t7p7nvD7e1mdjfBUNNjgmB4p3AjwLp167I6tLS5IcZzWkJCRCSvKAjmibbDA3zm3q1857G9\nNFSV8tk3nc/VFyyloEDzAEVE8szDQMLMVhEEwGuBP0w/wMyagW1hs5gXAaVAl5lVAgXu3hM+fzXw\nidkt/1iJxhg/eqaNwZFRLVMkIpInFARz3MDwKLf9bDtffGAbI6POn75yNX96aTOxUv2nExHJR+4+\nYmbvAe4jWD7idnd/ysxuDPffCrwRuN7MhoF+4JowFDYSDBeF4Bp+h7v/IJIfJE1zPMbomLOzs48z\nFlVFXY6IiGRAaSJHuTv3btnPpzc9w56D/Vx+9iI+cuVZrKiriLo0ERE5Re6+Cdg0Ydutac8/A3xm\nkvO2A+dnvcATlOoc2tqeVBAUEckTCoI56Kl93Xzie0/z6x0HOHNRFXe860Jeuro+6rJEREQmtboh\nhlmwhAQsjrocERHJgIJgDulMDvKPP3yOOx9+ngUVJXzqDedw7UtWUKh5gCIiksPKigtZsbCCFi0h\nISKSNxQEc8Tjuw/x1tt+Tf/wKH/8slW871UJasqLoy5LREQkI80NMbYpCIqI5A0FwRyx6ckXGBwd\n4wc3XTI+10JERCRfNDfG+FlLJyOjYxQVFkRdjoiITEP/UueIlvYkp9VXKgSKiEheSsSrGBod4/kD\nfVGXIiIiGVAQzBEt7T0kGtVpTURE8lN651AREcl9CoI5oH9olD0H+2lu0N1AERHJT6kgqIYxIiL5\nQUEwB2zrSOIOiUYFQRERyU+x0iIW15TpjqCISJ7IahA0s8vN7FkzazWzmyfZv8DM7jazJ8zsN2Z2\nTtq+WjP7lpltNbNnzOx3sllrlLZ1BBdNzQ8UEZF81hyPKQiKiOSJrAVBMysEvgBcAawBrjOzNRMO\n+wiw2d3PA64HPp+27/PAD9z9TOB84Jls1Rq1lrYkhQVGU11l1KWIiIictES8itb2JGNjHnUpIiIy\njWzeEVwPtLr7dncfAu4ErppwzBrgJwDuvhVoMrNGM6sBLgH+I9w35O6HslhrpFrae2iqq6CkSCN1\nRUQkfzXHY/QPj7L3UH/UpYiIyDSymTyWArvTXu8Jt6V7HLgawMzWAyuBZcAqoAP4spk9Zma3mdmk\nt8vMbIOZPWJmj3R0dMz0zzArWtuTGhYqIiJ5LzXXvbVDw0NFRHJd1LegbgFqzWwz8F7gMWCUYKH7\nFwH/7u4XAL3AMXMMAdx9o7uvc/d1DQ0Ns1T2zBkaGWNnVx+JuJaOEBGR/Jbqft3apiAoIpLrirL4\n3nuB5Wmvl4Xbxrn7YeAGADMzYAewHagA9rj7r8NDv8UUQTDf7erqZXTMdUdQRETy3oLKEupjJbS0\n90RdioiITCObdwQfBhJmtsrMSoBrgXvSDwg7g5aEL98JPOjuh919P7DbzM4I970KeDqLtUYmtd6S\ngqCIiMwF6hwqIpIfsnZH0N1HzOw9wH1AIXC7uz9lZjeG+28FzgK+YmYOPAW8I+0t3gt8PQyK2wnv\nHM41LW1JzGC1FpMXEZE5IBGv4rub9+LuBIN9REQkF2VzaCjuvgnYNGHbrWnPHwJOn+LczcC6bNaX\nC1o7kixbUE55SWHUpYiIiJyy5niMnoER2nsGaawui7ocERGZQtTNYua9lrYeNYoREZE5IxFOddDw\nUBGR3KYgGKHRMWd7Z6/mB4qIyJzRHC4h0dKmhjEiIrlMQTBCuw/0MTQypiAoIiJzRkOslOqyovFm\naCIikpsUBCOUukgmFARFRGSOMDMSjVUaGioikuMUBCOUukiuVhAUEZE5JKElJEREcp6CYIRa2ntY\nVF1GdVlx1KWIiMgsMrPLzexZM2s1s5sn2X+VmT1hZpvN7BEzuzjTc3NBczxGV+8QB3qHoi5FRESm\noCAYoW3tSc0PFBGZZ8ysEPgCcAWwBrjOzNZMOOx/gfPdfS3wx8BtJ3Bu5JrVOVREJOcpCEbE3WlR\nEBQRmY/WA63uvt3dh4A7gavSD3D3pLt7+LIS8EzPzQWJxmBZpJZ2dQ4VEclVCoIR2dc9QN/QKIlG\nBUERkXlmKbA77fWecNtRzOwNZrYV+B+Cu4IZnxu1JTVlVJQU0tKmO4IiIrlKQTAiqeEyzQ0KgiIi\ncix3v9vdzwT+APjbEz3fzDaE8wsf6ejomPkCj/+9aY7H2NahICgikqsUBCOSWmg3NXxGRETmjb3A\n8rTXy8Jtk3L3B4HTzKz+RM51943uvs7d1zU0NJx61SeoOR7THUERkRymIBiRbR1JFlaWsLCyJOpS\nRERkdj0MJMxslZmVANcC96QfYGbNZmbh8xcBpUBXJufmiuZ4jP2HBzg8MBx1KSIiMomiqAuYr1ra\n1ChGRGQ+cvcRM3sPcB9QCNzu7k+Z2Y3h/luBNwLXm9kw0A9cEzaPmfTcSH6QaSTiwYiXbe1JLlix\nIOJqRERkIgXBCKQ6hr72vMVRlyIiIhFw903Apgnbbk17/hngM5mem4sS4YedLQqCIiI5SUNDI9CZ\nHKK7f1h3BEVEZM5avrCCkqICrSUoIpKjFAQjkFpXKTVsRkREZK4pLDBOq69UEBQRyVEKghHYllo6\nQncERURkDks0VmlReRGRHKUgGIGW9iRVpUU0VpdGXYqIiEjWNDfE2HOwn/6h0ahLERGRCRQEI9DS\nlqS5MUbYGVxERGROSjTGcEcLy4uI5CAFwQi0diRpbtCwUBERmdtSnUM1T1BEJPcoCM6y7r5hOnoG\nSTQqCIqIyNy2sq6SwgJTEBQRyUEKgrOstSOYNK9GMSIiMteVFBXQVFehhjEiIjlIQXCWtbQFn4pq\n6QgREZkPEvEqWnRHUEQk5ygIzrKW9iRlxQUsrS2PuhQREZGsa47H2NXVx9DIWNSliIhIGgXBWdba\nnmR1Q4yCAnUMFRGRuS/RGGN0zNnZ1Rt1KSIikkZBcJa1tifHu6iJiIjMdak58ampESIikhsUBGdR\n7+AIew/1q1GMiIjMG6sbYphpCQkRkVyjIDiLUgvqNqtRjIiIzBNlxYUsX6DOoSIiuSarQdDMLjez\nZ82s1cxunmT/AjO728yeMLPfmNk5E/YXmtljZvb9bNY5W8Y7hmoNQRERmUcS8ZjuCIqI5JisBUEz\nKwS+AFwBrAGuM7M1Ew77CLDZ3c8Drgc+P2H/+4FnslXjbGvtSFJcaKxcWBF1KSIiIrOmOR5je2cv\nI6PqHCoikiuyeUdwPdDq7tvdfQi4E7hqwjFrgJ8AuPtWoMnMGgHMbBnwGuC2LNY4q1rakqyqr6So\nUCNyRURk/miOxxgaGWP3wf6oSxERkVA2E8lSYHfa6z3htnSPA1cDmNl6YCWwLNz3OeAvgDnz8eG2\njqQaxYiIyLyTaAzmxre0aZ6giEiuiPrW1C1ArZltBt4LPAaMmtlrgXZ3f3S6NzCzDWb2iJk90tHR\nkeVyT97A8Ci7unrVKEZEROad1Q2VQDBFQkREckNRFt97L7A87fWycNs4dz8M3ABgZgbsALYD1wCv\nN7MrgTKg2sy+5u5vnfhN3H0jsBFg3bp1noWfY0bs6OxlzNEdQRERmXeqyopZXFNGq9YSFBHJGdm8\nI/gwkDCzVWZWAlwL3JN+gJnVhvsA3gk86O6H3f0v3X2ZuzeF5/1kshCYT1Ld0rSYvIiIzEfN8Rgt\n6hwqIpIzpg2CZvZeM1twom/s7iPAe4D7CDp/3uXuT5nZjWZ2Y3jYWcAWM3uWoLvo+0/0++SLlvYk\nBQar6iujLkVERGTWNcdjbOtIMjaWs4N3RETmlUyGhjYCD5vZb4HbgfvcPaN/xd19E7BpwrZb054/\nBJw+zXs8ADyQyffLZdvak6xYWEFZcWHUpYiISMTM7HKCJZMKgdvc/ZYJ+98CfBgwoAf4E3d/PNy3\nM9w2Coy4+7pZLP2kJeJV9A2Nsq+7n2ULtIySiEjUpr0j6O4fBRLAfwB/BLSY2afNbHWWa5tTWtp7\n1ChGREQyXWd3B/AKdz8X+FvCufBpLnX3tfkSAuHIHHkNDxURyQ0ZzREM7wDuD79GgAXAt8zs77NY\n25wxMjrGjs5eNYoRERHIYJ1dd/+lux8MX/6KI0sr5a3UHPltCoIiIjkhkzmC7zezR4G/B34BnOvu\nfwK8GHhjluubE3Yd6GN41NUoRkREILN1dtO9A7g37bUDPzazR81sQxbqy4oFlSXUx0poUedQEZGc\nkMkcwYXA1e6+K32ju4+F6/3JNFIXvUSjgqCIiGTOzC4lCIIXp22+2N33mlkc+JGZbXX3Byc5dwOw\nAWDFihWzUu90VjfEaGnXovIiIrkgk6Gh9wIHUi/MrNrMLgRw92eyVdhcsi1cQHd1g4KgiIhMv84u\ngJmdB9wGXOXuXant7r43fGwH7iYYanoMd9/o7uvcfV1DQ8MMln/yEo0xWtuTZNhzTkREsiiTIPjv\nQPo4jmS4TTLU0tbD0tpyKkszuQErIiJzXCbr7K4AvgO8zd2fS9teaWZVqefAq4Ets1b5KUrEqzg8\nMEJHz2DUpYiIzHuZJBNLXy4iHBKqRHMCWtqTrNb8QBERIVhn18xS6+wWAren1tkN998KfAyoA75o\nZnBkmYhG4O5wWxFwh7v/IIIf46Skdw6NV5dFXI2IyPyWSaDbbmbv48hdwD8FtmevpLllbMzZ1pHk\notPqoi5FRERyRAbr7L4TeOck520Hzs96gVmSaprW2p7kZc31EVcjIjK/ZTI09EbgpQTzF/YAFxJO\nPpfp7T3Uz8DwmDqGiojIvNdQVUp1WZEaxoiI5IBp7wiGk9GvnYVa5qTWcL0krSEoIiLznZnRHI9p\nCQkRkRwwbRA0szKC1tVnA+MD+t39j7NY15yR+tRTQVBEZG4ys9XAHncfNLNXAucBX3X3Q9FWlpsS\n8Sr+d2tb1GWIiMx7mQwN/S9gEfD7wE8J2lxrTEeGWtqS1MdKqa0oiboUERHJjm8Do2bWDGwkWBri\njmhLyl2JxhidySEO9A5FXYqIyLyWSRBsdve/Anrd/SvAawjmCUoGWjuSmh8oIjK3jbn7CPAG4F/d\n/c+BxRHXlLNWpzWMERGR6GQSBIfDx0Nmdg5QA8SzV9Lc4e60tiVJNCoIiojMYcNmdh3wduD74bbi\nCOvJaQkFQRGRnJBJENxoZguAjxIsePs08JmsVjVHtPcM0jM4ovmBIiJz2w3A7wCfcvcdZraKYFqF\nTGJJTTkVJYXqHCoiErHjNosxswLgsLsfBB4ETpuVquaIVFc0BUERkbnL3Z8G3gcQfnBa5e76wHQK\nBQXG6oaY7giKiETsuHcE3X0M+ItZqmXOUcdQEZG5z8weMLNqM1sI/Bb4kpn9U9R15bJEXEFQRCRq\nmQwN/bGZfcjMlpvZwtRX1iubA1rbk9SUF9MQK426FBERyZ4adz8MXE2wbMSFwO9FXFNOa26M8UL3\nAD0Dw9MfLCIiWTHtOoLANeHju9O2ORomOq2W9qBjqJlFXYqIiGRPkZktBt4M/J+oi8kHzQ3BSJlt\nHb2sXV4bcTUiIvPTtHcE3X3VJF8KgRnY1p7UsFARkbnvE8B9wDZ3f9jMTgNaIq4ppyUaqwBoaVPD\nGBGRqEx7R9DMrp9su7t/debLmTu6koN09Q4pCIqIzHHu/k3gm2mvtwNvjK6i3Ld8QTklRQWaJygi\nEqFMhoa+JO15GfAqgsnwCoLHkbq4KQiKiMxtZrYM+FfgZeGmnwHvd/c90VWV24oKCzitvlJBUEQk\nQtMGQXd/b/prM6sF7sxaRXNEa0dwcUsNfxERkTnry8AdwJvC128Nt10WWUV5oDke44k93VGXISIy\nb2XSNXSiXmDVTBcy17S0JaksKWRJTVnUpYiISHY1uPuX3X0k/PpPoCHqonJdIl7F7oN9DAyPRl2K\niMi8lMkcwe8RdAmFIDiuAe7KZlFzwbaOJKvVMVREZD7oMrO3At8IX18HdEVYT15ojsdwD66XZy+p\nibocEZF5J5M5gp9Nez4C7NK8h+m1tCV5aXNd1GWIiEj2/THBHMF/Jvjg9JfAH0VZUD5INAZz6Fvb\nFQRFRKKQSRB8HnjB3QcAzKzczJrcfWdWK8tjhweG2X94QI1iRETmAXffBbw+fZuZ3QR8LpqK8kNT\nXSWFBUZLmxrGiIhEIZM5gt8ExtJej5LWJluOtS3sgpaIq1GMiMg89WdRF5DrSooKWFlXoc6hIiIR\nySQIFrn7UOpF+Lwkkzc3s8vN7FkzazWzmyfZv8DM7jazJ8zsN2Z2Trh9uZndb2ZPm9lTZvb+TH+g\nXNAyHgR1R1BEZJ7SBPEMJOIxWtq1qLyISBQyCYIdZjY+5MXMrgI6pzvJzAqBLwBXEDSYuc7M1kw4\n7CPAZnc/D7ge+Hy4fQT4oLuvAS4C3j3JuTlrW3uSkqICli+siLoUERGJhk9/iCTiVezs6mNoZGz6\ng0VEZEZlEgRvBD5iZs+b2fPAh4H/N4Pz1gOt7r49vIt4J3DVhGPWAD8BcPetQJOZNbr7C+7+23B7\nD/AMsDSjnygHtLQnOa0+mPsgIiJzk5n1mNnhSb56gCXTnDvdiJm3hKNlnjSzX5rZ+Zmem0+a4zFG\nx5xdXb1RlyIiMu9ksqD8NuAiM4uFrzMdzL8U2J32eg9w4YRjHgeuBn5mZuuBlcAyoC11gJk1ARcA\nv87w+0aupb2H85fVRl2GiIhkkbuf1ETwtBEzlxFcGx82s3vc/em0w3YAr3D3g2Z2BbARuDDDc/NG\nqqlaS3uSRKPm1YuIzKZp7wia2afNrNbdk+6eDOf1fXKGvv8tQK2ZbQbeCzxG0Iwm9b1jwLeBm9z9\n8BT1bTCzR8zskY6Ojhkq6+T1D42y52C/GsWIiMhUph0x4+6/dPeD4ctfEXxImtG5+WR1Qwwz1DlU\nRCQCmQwNvcLdD6VehBemKzM4by+wPO31snDbOHc/7O43uPtagjmCDcB2ADMrJgiBX3f370z1Tdx9\no7uvc/d1DQ0NGZSVXds6krgfWR9JRERkgslGzBxv+sM7gHtP8tycVl5SyLIF5bR2KAiKiMy2TIJg\noZmVpl6YWTlQepzjUx4GEma2ysxKgGuBe9IPMLPacB/AO4EH3f2wmRnwH8Az7v5PmfwguWJbeDHT\nGoIiInKqzOxSgiD44ZM4N6dGzEwlEa+ipU2dQ0VEZlsmQfDrwP+a2TvM7J3Aj4CvTHeSu48A7wHu\nI2j2cpe7P2VmN5rZjeFhZwFbzOxZgu6iqWUiXga8DfhdM9scfmVyFzJyLW1JCguMprrKqEsREZHc\nNO2IGQAzOw+4DbjK3btO5FzIvREzU0nEY2zv7GV0TI1WRURmUybNYj5jZo8Dv0fQDvs+gqYu03L3\nTcCmCdtuTXv+EHD6JOf9nDxdg6mlvYeVdRWUFGWSsUVEZB4aHzFDEOKuBf4w/QAzWwF8B3ibuz93\nIufmm9XxGEMjY+w+0EdTvT5EFRGZLdMGwVAbQQh8E0Ens29nraI819qe1ELyIiIyJXcfMbPUiJlC\n4PbUiJlw/63Ax4A64IvBbAlGwrt7k54byQ8yQxJpnUMVBEVEZs+UQdDMTgeuC786gf8PMHe/dJZq\nyztDI2Ps7OrjinMWR12KiIjksAxGzLyTYO58RufmsyNLSPRw2ZrGiKsREZk/jndHcCvwM+C17t4K\nYGYfmJWq8tSurmCOgxrFiIiIZKaqrJhF1WW0tqtzqIjIbDreRLargReA+83sS2b2KvJ03t5saWlX\nx1AREZETlWiMKQiKiMyyKYOgu3/X3a8FzgTuB24C4mb272b26tkqMJ+0tCUxCxbIFRERkcw0x4Mg\nOKbOoSIis2ba1pbu3uvud7j76wjaVD/GSaxnNB+0diRZtqCc8pLCqEsRERHJG83xGH1Do7xweCDq\nUkRE5o0TWuPA3Q+G6xK9KlsF5bOWth4S8aqoyxAREckrqWunFpYXEZk9WuxuhoyOOds7ezU/UERE\n5ASlrp2aJygiMnsUBGfI7gN9DI2MKQiKiIicoIWVJdRVligIiojMIgXBGaKOoSIiIievOR4bv5aK\niEj2KQjOkFYFQRERkZOW6hzqrs6hIiKzQUFwhrS097CouozqsuKoSxEREck7iXiM7v5hOpKDUZci\nIjIvKAjOkG3tSd0NFBEROUmJxqBzaGubhoeKiMwGBcEZ4O60KAiKiIictPHOoR0KgiIis0FBcAbs\n6x6gb2hUQVBEROQkxatKqSorokV3BEVEZoWC4AxINYpJKAiKiIicFDMjEY/R0q5F5UVEZoOC4Axo\naQsuWqn5DSIiInLigs6hvVGXISIyLygIzoBtHUkWVpawsLIk6lJERETyViJeRWdykIO9Q1GXIiIy\n5ykIzoCWNjWKEREROVXNjWoYIyIyWxQET5E6hoqIiMyM5oYwCLYrCIqIZJuC4CnqTA7R3T+sRjEi\nIiKnaGltOeXFheocKiIyCxQET1Gqu1kirkYxIiIip6KgwGhW51ARkVmhIHiKtoXDVzQ0VERE5NQ1\nx2Pj11YREckeBcFT1NKepKq0iMbq0qhLERERyXvN8Rj7ugfoGRiOuhQRkTlNQfAUtbQlWR2PYWZR\nlyIiInnCzC43s2fNrNXMbp5k/5lm9pCZDZrZhybs22lmT5rZZjN7ZPaqnh2pOffbOrSeoIhINikI\nnqLWjqQaxYiISMbMrBD4AnAFsAa4zszWTDjsAPA+4LNTvM2l7r7W3ddlr9JopKZaqHOoiEh2KQie\ngu6+YTp6Bkk0KgiKiEjG1gOt7r7d3YeAO4Gr0g9w93Z3fxiYd+MjVyysoKSwQA1jRESyTEHwFLR2\nBBcpNYoREZETsBTYnfZ6T7gtUw782MweNbMNM1pZDigqLOC0hkpatYSEiEhWZTUIZjAHYoGZ3W1m\nT5jZb8zsnEzPzQWpdY60dISIiMyii919LcHQ0neb2SWTHWRmG8zsETN7pKOjY3YrPEWr4zFaOxQE\nRUSyKWtBMMM5EB8BNrv7ecD1wOdP4NzItbQnKSsuYGltedSliIhI/tgLLE97vSzclhF33xs+tgN3\nEww1ney4je6+zt3XNTQ0nEK5sy8Rj/H8gT4GhkejLkVEZM7K5h3BaedAEIS8nwC4+1agycwaMzw3\ncq3tSVY3xCgoUMdQERHJ2MNAwsxWmVkJcC1wTyYnmlmlmVWlngOvBrZkrdKIJOJVuMM23RUUEcma\nbAbBTOZAPA5cDWBm64GVBJ+Mnur8iVnR2q6OoSIicmLcfQR4D3Af8Axwl7s/ZWY3mtmNAGa2yMz2\nAH8GfNTM9phZNdAI/NzMHgd+A/yPu/8gmp8ke9Q5VEQk+4oi/v63AJ83s83Ak8BjwAmNAwknym8A\nWLFixYwXOJXewRH2Hurnuvjy6Q8WERFJ4+6bgE0Ttt2a9nw/wQejEx0Gzs9uddFrqq+gsMAUBEVE\nsiibQXDaORDufhi4AcCCFdl3ANuB8unOTXuPjcBGgHXr1vkM1T6t1HCVZjWKERERmVGlRYWsrKsY\nb8omIiIzL5tDQ6edA2FmteE+gHcCD4bh8KTnT8yW1MVJS0eIiIjMvOYGdQ4VEcmmrAXBTOZAAGcB\nW8zsWYIOoe8/3rnZqvVktHYkKS40VtZVRF2KiIjInJNojLGzs5ehkbGoSxERmZOyOkcwgzkQDwGn\nZ3puLmlpS7KqvpLiwqwuxSgiIjIvJeJVjIw5u7p6STRqGoaIyExTijlJ2zqSGhYqIiKSJeocKiKS\nXQqCJ2FgeJRdXb1qFCMiIpIlqxtimEGLgqCISFYoCJ6EHZ29jLkaxYiIiGRLeUkhyxaUKwiKiGSJ\nguBJSA1T0WLyIiIi2dPcENPQUBGRLFEQPAkt7UkKDFbVV0ZdioiIyJyVaKxiW0eS0bFZWyZYRGTe\nUBA8Cdvak6xYWEFZcWHUpYiIiMxZzfEYQyNj7D7QF3UpIiJzjoLgSWhp71GjGBERkSxT59D/v707\nD4+jutM9/v31pn2xtdlYNja2bGNsMMEQVrNNAiQBkhAwGZInYTJhyEAgJGTi3Jm5yZ3hmSEzZGMg\nECCE5LINYRlghj2xDVzMYsAEs1m28SIvkixvau3LuX9USWq1Jduy1V2S+v08Tz1dfaq6+1eyrKNX\np+qUiEjqKAgOUWdXNx9vb9JEMSIiIinW09dqwhgRkeGnIDhEG3Y009HlNFGMiIhIihVmR5lQmK0R\nQRGRFFAQHKLqWn/G0AoFQRERkVSbUZ7PmrrGoMsQERlzFASHaG29FwSnlykIioiIpJoXBOM4p5lD\nRUSGk4LgEFXXNjKpOIe8rEjQpYiIiIx5VRX5NLV3sXV3a9CliIiMKQqCQ1RdF2e6rg8UERFJixll\nmjBGRCQVFASHoLvbsbY+roliRERE0qSqwrtdU3WtrhMUERlOCoJDsHlXC60d3QqCIiIiaTI+L8aE\nwmzufGkdj6/crGsFRUSGiYLgEPRMX617CIqIiKTPr796HGUFWVz74Eou+fVyVm3eHXRJIiKjnoLg\nEFT701crCIqIiKTPMZOLefyqU7nxi/NYV9/E+be8zA8f/TMN8bagSxMRGbUUBIegujZOaX4Wxbmx\noEsRERHJKOGQcekJU1jy/TP4xinT+MOKGs64aSl3vbSOjq7uoMsTERl1FASHYI0mihERkWFgZuea\n2UdmtsbMFg+wfbaZLTezNjO7fiivHesKs6P8w+fm8Mx3FvKJKeO44X8+4NxfvMiy1fVBlyYiMqoo\nCB4g5xxrauNUVSgIiojIwTOzMHArcB4wB/iymc1J2m0HcA1w00G8NiPMKM/nnsuP5zdfW0BXt+Nr\ndy/W41AAACAASURBVL/OX//uDdZvbwq6NBGRUUFB8ADVNbbR2Nap6wNFRORQnQCscc6tc861Aw8C\nFybu4Jyrc869AXQM9bWZxMw4+8gKnr1uIYvPm83ytQ186ufL+NenPyDe1hl0eSIiI5qC4AGqrtWM\noSIiMiwmAZsSntf4bal+7ZiVFQlz5enTWfL9M7hw/iR+vWwdZ960lIffrKG7W7ebEBEZiILgAdKM\noSIiMpqY2RVmtsLMVtTXZ8b1c+UF2dx08TH811WnMKk4h+v/8A5fuO0V3t64M+jSRERGHAXBA7Sm\nLk5RTpSy/KygSxERkdFtMzA54Xml3zasr3XO3eGcW+CcW1BWVnZQhY5W8ycX8+i3TuanFx/Dll0t\nfOFXr/C9h96hbk9r0KWJiIwYCoIHqLrOmzHUzIIuRURERrc3gCozm2ZmMeBS4Ik0vDajhELGRcdV\nsuT6M/jWGdN58p0tnHnTUm5bupa2zq6gyxMRCZyC4AFaWxfXaaEiInLInHOdwNXAs8AHwEPOuffM\n7EozuxLAzCaYWQ3wXeAfzKzGzAoHe20wRzI65GdF+MG5s3nuuoWcNL2UnzzzIef8/EVeeL8W53T9\noIhkrkjQBYwGDfE2GpraFQRFRGRYOOeeAp5Kars9YX0b3mmfB/Ra2b+ppXnc9bUFvLi6nn/67/f5\n69+v4LSqUn50/hxmlBcEXZ6ISNppRPAArKnTjKEiIiJjwcKZZTx97Wn878/NYeWmXZz7i5f4pyff\nZ3dL8p06RETGNgXBA7Cm3guCVRX6i6GIiMhoFw2H+KtTp7H0+jO4eMFkfvvKx5x101IeeH0jXbrd\nhIhkiJQGQTM718w+MrM1ZrZ4gO1FZvakmb1jZu+Z2eUJ267z21aZ2QNmlp3KWvelujZObizMYUWB\nlSAiIiLDrCQ/i3/94jyevPpUppfl88NH3+WCW17mjfU7gi5NRCTlUhYEzSwM3AqcB8wBvmxmc5J2\nuwp43zl3DHAG8FMzi5nZJOAaYIFzbi4QxpsZLRBr672JYjRjqIiIyNgzd1IR//k3J/IfXz6WnU3t\nXHz7cr79wNts2dUSdGkiIimTyhHBE4A1zrl1zrl24EHgwqR9HFBgXsLKB3YAnf62CJBjZhEgF9iS\nwlr3qbpWM4aKiIiMZWbG+cccxh+/dwbXnF3Fc+9t46yfLuXmP1bT2qHbTYjI2JPKIDgJ2JTwvMZv\nS3QLcCReyHsXuNY51+2c2wzcBGwEtgK7nXPPDfQhZnaFma0wsxX19fXDfQzsae1g255WBUEREZEM\nkBML891PzeSF757O2bMr+Nnzq1n4b0v44aPv8syqrexp1aQyIjI2BH37iHOAlcBZwHTgeTN7Ce9U\n0AuBacAu4A9m9hXn3L3Jb+CcuwO4A2DBggXDfoX3Wn/G0CpNLS0iIpIxJo/P5dbLPsFX1jZwzysf\n89/vbOGB1zcSDhnHTRnH6bPKOH1mGXMmFhIK6dIRERl9UhkENwOTE55X+m2JLgdudN4dXdeY2cfA\nbOBw4GPnXD2AmT0KnAzsFQRTrVq3jhAREclYJ00v4aTpJXR0dfP2xl0sW13HstX1/PuzH/Hvz35E\naX6M06q8UHhaVSkl+VlBlywickBSGQTfAKrMbBpeALwU+MukfTYCZwMvmVkFMAtYBxhwopnlAi3+\nPitSWOug1tbFiUVCTB6XE8THi4iIyAgQDYc4Ydp4Tpg2nu+fM5v6xjZeXlPPso/qWba6nsfe3owZ\nzJtUxMKqMk6fVcaxk4uJhHWnLhEZmVIWBJ1znWZ2NfAs3qmedzvn3jOzK/3ttwP/DNxjZu/ihb8f\nOOe2A9vN7GHgLbzJY97GP/0z3arr4hxRmqcf5CIiItKrrCCLLxxbyReOraS727Fqy+7eUHjbsrXc\nsmQNBdkRTp1Ryukzy1g4s4zDivVHZREZOVJ6jaBz7ingqaS22xPWtwCfHuS1PwJ+lMr6DkR1XSPH\nVBYHXYaIiIiMUKGQcXRlMUdXFvPts6vY3dLBK2u2s2y1FwyfXrUNgKryfE6f6Y0WHj91PNnRcMCV\ni0gmC3qymBGtpb2Lmp0tfOkTk/e/s4iIiAhQlBPlvHkTOW/eRJxzVNfFWfZRPS9W1/P75Ru46+WP\nyY6GOOmIEhbO9K4vnFaap/sVi0haKQjuw9r6OM5pohgREclwuzbC0p/AKddC2cygqxlVzIyZFQXM\nrCjgmwuPoLm9k9fW7egdLVzy5PsATB6f451CWlXGyTNKyc/Sr2giklr6KbMPa+v9W0dUKAiKiEgG\n2/I2rHoEVt4HR54Pp30XDjs26KpGpdxYhDNnl3Pm7HIANjY0s6zam3Tm0bc2c++rG4mGjeMOH8fp\nM8tZOLOU2RMKCesWFSIyzBQE96G6Nk44ZEwtyQu6FBERkeDMuRAOPwVevQ1evxM+eAKmnwWnfc9r\n1ymNB21KSS5fLTmcr554OO2d3azYsIMXV3vXF/7kmQ/5yTOQFQlxRFk+VeXeMqM8n6qKfA4vySOq\nyexE5CApCO5DdV0jh5fkEovoh6yIiGS4vFI4+x+900NX/AaW3wr3fBYmfxJO/S7MPEeB8BDFIiFO\nnl7KydNLWXzebGr3tPJy9XY+3LaH6ro4b27YyRPvbOndPxIyppbm9QXEigJmlOVzRFmeJqIRkf1S\nENyHNXVxqnR9oIiMQB0dHdTU1NDa2hp0KSmVnZ1NZWUl0Wg06FKkR3YhnHodfPJKePte+H83wwOL\noGKu1z7n8xDWrxfDoaIwm4uOq+zX1tTWybr6JqrrGllTF6e6Ls6H2xp59r1tdDtvn5DBlPG5zCjP\nZ0Z5Qe8o4ozyfPJ07aGI+PTTYBDtnd2sb2jm3LkTgi5FRGQvNTU1FBQUMHXq1DE706BzjoaGBmpq\napg2bVrQ5UiyaA6c8E047uve9YMv/Qwe+Qb86QY49TtwzJchkhV0lWNOXlaEeZVFzKss6tfe2tHF\n+oYmqmvjrKmL+yGxkWWr6+nocr37TSrO6Q2FVf4ppjPKCijK1R9bRDKNguAgNjQ00dXtqCovCLoU\nEZG9tLa2jukQCN5siyUlJdTX1wddiuxLOArHXArzLoGP/gde+ik8eS0svRFOutoLilk6uybVsqNh\nZk8oZPaEwn7tHV3dbNzRTHVtnLX1caprG6mui/Paxw20dnT37ldWkNXvGsQZ5QVUVeRTkhcb0z9n\nRDKZguAgquu8GUN16wgRGaky4ZezTDjGMSMU8mYUnf05WLfUC4TP/T28dJN3GukJV0Du+KCrzDjR\ncIjpZflML+v/+0x3t2Pzrpa+U0xrvdNMH31rM41tnb37jcuNUlVRwMyKfGZVFPjrBYzPi6X7UERk\nmCkIDqK6No4Ze/3gFBER2LVrF/fffz9/+7d/O6TXfeYzn+H++++nuLg4RZVJ4Mxg+pnesukNePln\nsPRfvWsJF1zujRIWTgy6yowXChmTx+cyeXwuZ82u6G13zlG7p603IK6u9UYRH1+5hcbWvoBYmp/F\nzIr83nskzqzIp6qigKIcnWIqMlooCA5iTX2cynE55MQ065aISLJdu3bxq1/9aq8g2NnZSSQyeNfy\n1FNPpbo0GUkmHw9ffgBq34eXfw6v/gpevwPm/6U3++j4I4KuUJKYGROKsplQlM1pVWW97T0B8aPa\nRqprG1ld28hHtXH+sGITTe1dvftNKMymyh89nFnhnV5aVVFAviapERlx9L9yENW1jczQaKCIyIAW\nL17M2rVrmT9/PtFolOzsbMaNG8eHH37I6tWr+fznP8+mTZtobW3l2muv5YorrgBg6tSprFixgng8\nznnnncepp57KK6+8wqRJk3j88cfJyckJ+MjSw8zOBX4JhIG7nHM3Jm03f/tngGbg6865t/xt64FG\noAvodM4tSGPpB6diDlx0J5z5v+CVm73ZRt/6PRz1Re/m9BVHBV2h7EdiQDx9Zl9ATDzFdHVtnNXb\nGlld18j/fXUDbZ191yBOKs5h1gQvGM4sL2DWhAJmlOfrNhciAVIQHEBXt2Pd9iYWJvygExEZqf7P\nk+/x/pY9w/qecw4r5EfnD/7L+Y033siqVatYuXIlS5cu5bOf/SyrVq3qnd3z7rvvZvz48bS0tHD8\n8cdz0UUXUVJS0u89qqureeCBB7jzzju55JJLeOSRR/jKV74yrMcxEplZGLgV+BRQA7xhZk84595P\n2O08oMpfPgnc5j/2ONM5tz1NJQ+f8dPgcz+HhX8Hr94KK34Lqx6Gmed5gXDyCUFXKEM02CmmXd2O\nTTuaE0YQ46yubeSl6r5ZTM2/zUXPqaU9p5keUZZHVkQBUSTVFAQHsGlHM+2d3ZooRkTkAJ1wwgn9\nbvFw880389hjjwGwadMmqqur9wqC06ZNY/78+QAcd9xxrF+/Pm31BuwEYI1zbh2AmT0IXAgkBsEL\ngd875xzwqpkVm9lE59zW9JebAoUT4dM3eDeif/1OeO02+M2nYOpp3r0Ip5+lm9OPcmH/ZvdTS/M4\n56i+W3F1dHWzoaGJ1bVxPtrW2DuS+KcP6+jyb4QYDhlTS3KpKi+gtCBGYXaUwpyo/xhJeB7pbY9F\nQkEdqsiopSA4AM0YKiKjyb5G7tIlLy+vd33p0qW88MILLF++nNzcXM4444wBb3yfldV3j7lwOExL\nS0taah0BJgGbEp7X0H+0b7B9JgFbAQe8YGZdwK+dc3eksNbUyh0PZ/wATroK3vodvPIfcO8XYeJ8\nOO173gykIf2CP5ZEwyFmlBcwo7yAz8zrmzSorbOLj7c3eeHQHz1cXdfI6+s72N3S0RsSB5MdDQ0Y\nEAtzIhT1rg8cJgsUJCVDKQgOYI2CoIjIPhUUFNDY2Djgtt27dzNu3Dhyc3P58MMPefXVV9Nc3Zh3\nqnNus5mVA8+b2YfOuReTdzKzK4ArAKZMmZLuGocmK98Lg8f/NbzzoDexzENfhdKZ3gjhvIu9+xXK\nmJUVGfg+iOBNVNPS0cWelk72tHawp6XDf0x83tmvfUdTO+u3N/W2d+4nSOZEw3sFxPzsKHmxMDmx\nMHmxiP8YJjcrQq7flhsLkxuLkJvVt09uLEw0rGApI5+C4ACq6xqpKMyiMFudjojIQEpKSjjllFOY\nO3cuOTk5VFT0XRt07rnncvvtt3PkkUcya9YsTjzxxAArHZE2A5MTnlf6bQe0j3Ou57HOzB7DO9V0\nryDojxTeAbBgwYJ9/xY8UkSy4LivwfzL4P3/8gLhf30LlvwLzPgLyK+A/DLIK4d8f8kr1w3rxzgz\n88JWLMKEouwhv/5ggmRDUzvrG5ppbu+kub2L5vau/Y5KJoqFQ/3CYV5PYPSD5P4CZl5WhHG5Ucbl\nxRiXGyMc0qnSMvwUBAewti5OVXlB0GWIiIxo999//4DtWVlZPP300wNu67kOsLS0lFWrVvW2X3/9\n9cNe3wj2BlBlZtPwwt2lwF8m7fMEcLV//eAngd3Oua1mlgeEnHON/vqngX9KY+3pEY7AvC/B3Iug\n+jnvlNEPnoDmhoH3j+ZCXpkfDiv61gdqi+Xr+sMMc6hBErww2dbZTUt7F00J4bC5rZOm9q7ewNjU\n1unv00VLe/9tzW1dbNvT2vcebd7j/vKlGRTlRBmfF2N8bsx7TFrG5cUo8UNjSX6MnGgY0/e57IeC\nYBLnHNV1cS5ZMHn/O4uIiAyRc67TzK4GnsW7fcTdzrn3zOxKf/vtwFN4t45Yg3f7iMv9l1cAj/m/\n4EWA+51zz6T5ENLHDGae4y0AXR3QtB2a6iBe7z/WJqzXwY51sPFVPzQO8Bt2JKf/aOJAI4w96wqN\n4jMzsqNhsqNhxuXFhu19ewJmc2JgbO8i3trJzuZ2djQlLM3t7Ii3s3FHM29v2sXOpvZBT3nNioS8\nYJgcGnNjjM/vC5Ql+V54LNaoY0ZSEEyyZXcrze1duj5QRERSxjn3FF7YS2y7PWHdAVcN8Lp1wDEp\nL3CkCke9GUcLJ+5/365OaN7uhcOe4Bivhab6vrad66HmdS9cDhoay6BwEkw4Gg6b701kUzrTG7UU\nOUSJAXP8EAOmc449rZ3sbGqnoamdnYmBsan/sqGhmZ1N7TS2dQ5SBxT3jDrmxSjIjpITDZMVDZHj\n1+c9hnrr7W2PhciOhMmOhcmOeKe8Zie8LisS0ujkCKWfYkl6JoqpUhAUEREZvcIRKJjgLfvT1emN\nIPaMKjb5obFnfecGePteeP3X3v6RHJgwry8YHjYfSmcpHEpamRlFOVGKcqJMLc3b/wvwZmfd1dxB\nQ7ydnc19ATIxSDY0tVHf2EZLRxetvUs3LR1Du04yUXZSoMyKhsnxQ2VPe7YfNHNjYe+4cmMU50Qp\nzvWOsTgnRlFulIKsCCGNXg4L/cRKUl3rzYKnEUEREZEMEY5AQYW3DKa7CxrWwJaVsHWl97jyfnjd\nv3tHJAcmzO0LhhPnQ9lshUMZUbIiYSoKw1QUHty1kh1d3X0Bsb2b1s4uWtq95y1+YOxb79/W2uHv\n619r2ea/dk9rh/8e3n7N7d7rBhPyr5kcOCwO0JYbpSgnRlGObhOSTD+dkqytj/vnTGftf2cRERHJ\nDKEwlM3ylmMWeW3d3V447AmGW1fCOw/AG3d62yPZUDG3/8hh2WzdCkNGrWg4RDQcSvnM+m2dXexu\n6WB3s3cfyV3NHexq6WBXc7vXntS2vqGJXc3erK9uH4OWeYOMNhbleiOOxblRcmNhsiLeabFZkZC3\nHgmRHe1b79keC4dG9eikgmCS6tq4RgNFRERk/0IhKJvpLUdf4rV1d8OOtf1HDt/5T3jjLm97OGvv\nkcPyIxUORRJkRcKUF4QpLxjayGV3t6OxtZNdLe29QdELlO0JwdFva2lnTV3c26e5g/au7oOqNRb2\nA2NCUIxFQmRFe0JjqPdayYECZla/gBni88dOStt9KBUEE/TMGPrZow/gInQRETlg+fn5xOPxoMsQ\nSb1QCEqrvOXoi7227m5vNtOtK2HL27D1HXj3D7DiN972cBZUHJU0cngkRIZvdkpJ0tnmz0Bbn/BY\n3/+5GRRVQtFkbyme7D0vmOiNEMuIEwoZRbneCN/hJQf+OuccrR3d7Gxup6Wji7aObto6u2jr7PaW\njoT1zp7tCfsMun8X8bZOGuLtg75fsgvmHzaMX5F9UxBMsD3uDTdrohgREREZNqEQlM7wlnlf8tq6\nu2Hnx34w9EcO330EVtztbQ/HvHA4cb43a2l20eBLLE+3uejugpadAwe6vdYboG33wO8TzvLuOZlX\nCq4balZAy47++1jY+zcpquwLh8lhMXZgk7fIyGBm5MTC5MRy0vq5zjnau7r7hclYmkYDQUGwn+o6\nTRQjInIgFi9ezOTJk7nqKu8OBz/+8Y+JRCIsWbKEnTt30tHRwQ033MCFF14YcKUiI1QoBCXTvSU5\nHCZec/jeY9C6a9/vZeF9B8Xs4tERJJ3zAl13p7d0tfvhbrBAl/C8ZYcX3JJZCHJL+sLdYcf2reeV\nJSz+84HuHdneBLtrYPcm2LWpb313DWxYDns2g0ua3CRnvB8Up/hBMSks5pWNjK+5BMrM/NNCw3Bw\n8/ccEgXBBGt7bx1REHAlIiJD8PRi2Pbu8L7nhHlw3o2Dbl60aBHf+c53eoPgQw89xLPPPss111xD\nYWEh27dv58QTT+SCCy7Q/aNEDlRiOJx7UV97Zzu07YHW3V4obN29/2V7bd96R/O+P3dfQTKa2xfM\nEkPaXm0d+9k+wPOupH2Tw9Rgsor6glvJdJhy4t6BrmfJKT700zhjeX0TBQ2kuwsat3rBcNcmPyT6\nQbFhLaxbCu1Jp8aHs/oCYvHkvpDYGxorIaKJCyW1UhoEzexc4JdAGLjLOXdj0vYi4F5gil/LTc65\n3/rbioG7gLl4d3n9K+fc8lTWW10XpyArQkWh/uOJiOzLscceS11dHVu2bKG+vp5x48YxYcIErrvu\nOl588UVCoRCbN2+mtraWCRMO4D5uIjK4SAwipV7IORiHEiQ7miEUhVDEX8IJ60nPw1FvieYc2P57\ntUX33id3fP9wl1sy8gJSKNwX3qacuPd257yve29QrIHdG/ueV78A8W17vy6SA1kFSUthwnr+AG1J\n+8XyvX+PkfwHOee80d/ONujqgK42f729/2Nn695tXW3e93fPY799krb1vO+g2/xHC0M02/v69zxG\nsryvYyR772373Hc/j5HsQG8xk7JPNrMwcCvwKaAGeMPMnnDOvZ+w21XA+865882sDPjIzO5zzrXj\nBchnnHNfMrMYkJuqWntU18aZXp6vv16LyOiyj5G7VLr44ot5+OGH2bZtG4sWLeK+++6jvr6eN998\nk2g0ytSpU2ltbQ2kNhFJcKhBUg6NGeSM85YJ8wbep7PNO8W0Jxzu2eJdx9jW2H/ZtSHh+R5vJHW/\nnx/eR2AcZLGwF6iSA1pXu7feE8Z69+lZb0vYPtBrEtp6XtPdMXxf61DEG22NxJIe/SWcBbFcCI/r\nvy0c80NZ1DvFuKPFC5X9HtugeTt0tEJnS//HrrZDqzkxSF71mldjGqQygp4ArHHOrQMwsweBC4HE\nIOiAAvOSVz6wA+j0RwoXAl8H8INhewprBWBNfZwzZpal+mNERMaERYsW8c1vfpPt27ezbNkyHnro\nIcrLy4lGoyxZsoQNGzYEXaKIyOgQyYLxR3jLgXLOCyc9obAnILbH925LXpobYOf6vucdTUOrtydw\nhaN9QapnifSsZ3kjksltA70mua03nGUNHugGCntBzeba3e0FxoHCY3Jo3NdjZ1taR7xTGQQnAZsS\nntcAn0za5xbgCWALUAAscs51m9k0oB74rZkdA7wJXOucG+J36dA89DcnMYrvCSkiklZHHXUUjY2N\nTJo0iYkTJ3LZZZdx/vnnM2/ePBYsWMDs2bODLlFEZOwy80aRotmQf4gDGd1d/YOi6x48wIWi3vWs\n0icU8kbx0jSSN1yCnizmHGAlcBYwHXjezF7Cq+sTwLedc6+Z2S+BxcA/Jr+BmV0BXAEwZcqUQypm\nWqmm+hURGYp33+2bpKa0tJTlywe+lFv3EBQRGcFCYW9inZzioCuRNEplnN8MTE54Xum3JboceNR5\n1gAfA7PxRg9rnHOv+fs9jBcM9+Kcu8M5t8A5t6CsTKd1ioiIiIiI7E8qg+AbQJWZTfMne7kU7zTQ\nRBuBswHMrAKYBaxzzm0DNplZzzy9Z9P/2kIRERERERE5SCk7NdQ512lmVwPP4t0+4m7n3HtmdqW/\n/Xbgn4F7zOxdwIAfOOe2+2/xbeA+P0Suwxs9FBERERERkUOU0msEnXNPAU8ltd2esL4F+PQgr10J\nLEhlfSIio5lzbszf7sY5F3QJIiIiY5Km/BERGYWys7NpaGgY00HJOUdDQwPZ2dlBlyIiIjLmBD1r\nqIiIHITKykpqamqor68PupSUys7OprKyMugyRERExhwFQRGRUSgajTJt2rSgyxAREZFRSqeGioiI\niIiIZBgFQRERERERkQyjICgiIiIiIpJhbCzNOGdm9cCGQ3ybImD3MJRzMKYAGwP6bNCx69jTL8hj\nD/K4Qcd+qMd+uHOubDiKyQRjoH8E/Z/Rsaefjj0Y+r3o0B1QHzmmguBwMLM7nHNXBPTZ9UH+YqNj\n17EH8NmBHXuQx+1/vo5dRpVM/r7RsevYA/r8jDz2oPuITDp2nRq6tycD/OxdAX426NiDomMPRpDH\nDTp2GX0y+ftGxx4cHXswMvV3A8igY1cQTOKcC/IfP8jTD3TswdGxByDg4wYdu4wymfx9o2MPlI49\nAJn6uwFk1rErCI4sdwRdQIB07JlJx56ZMvnY5eBl8veNjj0zZeqxZ+pxQ5qPXdcIioiIiIiIZBiN\nCIqIiIiIiGQYBcERwMwmm9kSM3vfzN4zs2uDrimdzCxsZm+b2X8HXUu6mVmxmT1sZh+a2QdmdlLQ\nNaWDmV3nf6+vMrMHzCw76JpSyczuNrM6M1uV0DbezJ43s2r/cVyQNabCIMf97/73+5/N7DEzKw6y\nRhnZMr1/hMztIzO1f4TM6iMztX+EkdFHKgiODJ3A95xzc4ATgavMbE7ANaXTtcAHQRcRkF8Czzjn\nZgPHkAFfBzObBFwDLHDOzQXCwKXBVpVy9wDnJrUtBv7onKsC/ug/H2vuYe/jfh6Y65w7GlgN/DDd\nRcmokun9I2RuH5lx/SNkZB95D5nZP8II6CMVBEcA59xW59xb/noj3g+7ScFWlR5mVgl8Frgr6FrS\nzcyKgIXAbwCcc+3OuaCnTE6XCJBjZhEgF9gScD0p5Zx7EdiR1Hwh8Dt//XfA59NaVBoMdNzOueec\nc53+01eByrQXJqNGJvePkLl9ZIb3j5BBfWSm9o8wMvpIBcERxsymAscCrwVbSdr8Avg7oDvoQgIw\nDagHfuuf9nOXmeUFXVSqOec2AzcBG4GtwG7n3HPBVhWICufcVn99G1ARZDEB+Svg6aCLkNEhA/tH\nyNw+MiP7R1Af6VP/6El5H6kgOIKYWT7wCPAd59yeoOtJNTP7HFDnnHsz6FoCEgE+AdzmnDsWaGLs\nnv7Qyz/X/0K8jv4wIM/MvhJsVcFy3vTNGTWFs5n9Pd5pf/cFXYuMfJnWP0LG95EZ2T+C+shkmdg/\nQvr6SAXBEcLMonid3H3OuUeDridNTgEuMLP1wIPAWWZ2b7AlpVUNUOOc6/nr9sN4Hd9Y9xfAx865\neudcB/AocHLANQWh1swmAviPdQHXkzZm9nXgc8BlTvcwkv3I0P4RMruPzNT+EdRHQgb3j5DePlJB\ncAQwM8M7D/4D59zPgq4nXZxzP3TOVTrnpuJdCP0n51zG/NXLObcN2GRms/yms4H3AywpXTYCJ5pZ\nrv+9fzYZMglAkieAr/nrXwMeD7CWtDGzc/FOdbvAOdccdD0ysmVq/wiZ3UdmcP8I6iMhQ/tHSH8f\nqSA4MpwCfBXvr30r/eUzQRclafFt4D4z+zMwH/iXgOtJOf8vvA8DbwHv4v0cuiPQolLMzB4AoEFg\nlQAAAnVJREFUlgOzzKzGzL4B3Ah8ysyq8f4CfGOQNabCIMd9C1AAPO//rLs90CJlpFP/mLkyrn+E\nzOsjM7V/hJHRR5rOyhEREREREcksGhEUERERERHJMAqCIiIiIiIiGUZBUEREREREJMMoCIqIiIiI\niGQYBUEREREREZEMoyAoMkKYWVfC9OgrzWzxML73VDNbNVzvJyIiki7qH0VSIxJ0ASLSq8U5Nz/o\nIkREREYY9Y8iKaARQZERzszWm9m/mdm7Zva6mc3w26ea2Z/M7M9m9kczm+K3V5jZY2b2jr+c7L9V\n2MzuNLP3zOw5M8sJ7KBEREQOkfpHkUOjICgycuQknfqyKGHbbufcPOAW4Bd+238Av3POHQ3cB9zs\nt98MLHPOHQN8AnjPb68CbnXOHQXsAi5K8fGIiIgMB/WPIilgzrmgaxARwMzizrn8AdrXA2c559aZ\nWRTY5pwrMbPtwETnXIffvtU5V2pm9UClc64t4T2mAs8756r85z8Aos65G1J/ZCIiIgdP/aNIamhE\nUGR0cIOsD0VbwnoXukZYRERGP/WPIgdJQVBkdFiU8LjcX38FuNRfvwx4yV//I/AtADMLm1lRuooU\nERFJM/WPIgdJf/EQGTlyzGxlwvNnnHM9U2SPM7M/4/3V8st+27eB35rZ94F64HK//VrgDjP7Bt5f\nNr8FbE159SIiIqmh/lEkBXSNoMgI518DscA5tz3oWkREREYK9Y8ih0anhoqIiIiIiGQYjQiKiIiI\niIhkGI0IioiIiIiIZBgFQRERERERkQyjICgiIiIiIpJhFARFREREREQyjIKgiIiIiIhIhlEQFBER\nERERyTD/HydGnc1NStsUAAAAAElFTkSuQmCC\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "def plot_model_loss_acc(model_history):\n", " '''\n", " Plots the accuracy and the loss for the training and\n", " validation sets by epoch\n", " '''\n", " fig, axs = plt.subplots(1,2,figsize=(15,5))\n", " \n", " # Accuracy plot\n", " axs[0].plot(range(1, len(model_history.history['acc'])+1),\n", " model_history.history['acc'])\n", " axs[0].plot(range(1, len(model_history.history['val_acc'])+1), \n", " model_history.history['val_acc'])\n", " axs[0].set_title('Model Accuracy')\n", " axs[0].set_ylabel('Accuracy')\n", " axs[0].set_xlabel('Epoch')\n", " axs[0].set_xticks(np.arange(1, len(model_history.history['acc'])+1), \n", " len(model_history.history['acc'])/10)\n", " axs[0].legend(['train', 'val'], loc='best')\n", " \n", " # Loss plot\n", " axs[1].plot(range(1, len(model_history.history['loss'])+1),\n", " model_history.history['loss'])\n", " axs[1].plot(range(1, len(model_history.history['val_loss'])+1),\n", " model_history.history['val_loss'])\n", " axs[1].set_title('Model Loss')\n", " axs[1].set_ylabel('Loss')\n", " axs[1].set_xlabel('Epoch')\n", " axs[1].set_xticks(np.arange(1, len(model_history.history['loss'])+1),\n", " len(model_history.history['loss'])/10)\n", " axs[1].legend(['train', 'val'], loc='best')\n", " plt.show()\n", " \n", "\n", "plot_model_loss_acc(model_info)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Data Augmentation\n", "\n", "Using Keras functionality for training on augmented data" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [ "from keras.preprocessing.image import ImageDataGenerator\n", "\n", "datagenerator = ImageDataGenerator(\n", " rotation_range=360,\n", " width_shift_range=0.2,\n", " height_shift_range=0.2,\n", " rescale=None,\n", " shear_range=0.2,\n", " zoom_range=0.2,\n", " horizontal_flip=True,\n", " vertical_flip=True,\n", " fill_mode='nearest')\n", "\n", "datagenerator.fit(X_train)\n", "\n", "# Fit the model on the batches generated by datagen.flow().\n", "model_info = model.fit_generator(generator=datagenerator.flow(X_train, y_train,\n", " batch_size=batch_size,\n", " shuffle=True),\n", " steps_per_epoch=10*round(X_train.shape[0] / batch_size),\n", " epochs=epochs,\n", " validation_data=(X_val, y_val),\n", " verbose=1,\n", " callbacks=[time_callback, # Gives epoch training times\n", " earlystop,\n", " callbacks.ModelCheckpoint('model.h5', save_best_only=True)])" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Additional Setup\n", "\n", "Splitting the training dataset into training and validation since Keras automatically does this when fitting the model" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "ExecuteTime": { "end_time": "2018-03-22T23:00:06.563570Z", "start_time": "2018-03-22T23:00:04.559751Z" }, "collapsed": true }, "outputs": [], "source": [ "from sklearn.model_selection import train_test_split\n", "X_train, X_validation, y_train, y_validation = train_test_split(X_train, y_train, test_size=.2)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## CNTK\n", "\n", "**To-do: **\n", "- Finish CNTK script to train the model\n", "- Add script for loading trained model and generating predictions" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "ExecuteTime": { "end_time": "2018-03-22T23:00:06.619571Z", "start_time": "2018-03-22T23:00:06.597071Z" } }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "2018/03/22 18:00\n", "OS: win32\n", "Python: 3.6.2 |Anaconda custom (64-bit)| (default, Jul 20 2017, 12:30:02) [MSC v.1900 64 bit (AMD64)]\n", "NumPy: 1.13.3\n", "CNTK: 2.3.1\n", "GPU: GPU[0] GeForce GTX 680\n" ] } ], "source": [ "import sys\n", "import time\n", "import numpy as np\n", "import cntk as C\n", "from cntk.layers import Convolution2D, MaxPooling, Dropout, Dense\n", "\n", "print(time.strftime('%Y/%m/%d %H:%M'))\n", "print('OS:', sys.platform)\n", "print('Python:', sys.version)\n", "print('NumPy:', np.__version__)\n", "print('CNTK:', C.__version__)\n", "print('GPU:', C.gpu(0))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Reference [1](https://github.com/Microsoft/CNTK/blob/master/Examples/Image/Classification/ConvNet/Python/ConvNet_MNIST.py) | [2](https://cntk.ai/pythondocs/CNTK_103D_MNIST_ConvolutionalNeuralNetwork.html) | [3](https://medium.com/@tuzzer/building-a-deep-handwritten-digits-classifier-using-microsoft-cognitive-toolkit-6ae966caec69)" ] }, { "cell_type": "code", "execution_count": 26, "metadata": { "ExecuteTime": { "end_time": "2018-01-20T00:04:40.286310Z", "start_time": "2018-01-20T00:04:40.134283Z" } }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Output Shape of the first convolution layer: (8, 14, 14)\n", "Bias value of the last dense layer: [ 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1]\n", "Training 1072922 parameters in 8 parameter tensors.\n" ] } ], "source": [ "# Define the data dimensions\n", "input_dim_model = (1, 28, 28) # Images are 28 x 28 with 1 channel of color (gray)\n", "input_dim = 28*28 # used by readers to treat input data as a vector\n", "num_output_classes = 10\n", "\n", "X = C.input_variable(input_dim_model)\n", "y = C.input_variable(num_output_classes)\n", "\n", "def create_model(input_vars, out_dims=10, dropout_prob=0.0):\n", " with C.layers.default_options(init=C.glorot_uniform(), activation=C.relu):\n", "\n", " conv1 = Convolution2D(filter_shape=(5, 5),\n", " num_filters=8,\n", " strides=(2, 2),\n", " pad=True,\n", " init_bias=0.1,\n", " name='conv1')(input_vars)\n", " pooling1 = MaxPooling((2, 2),\n", " strides=(2, 2),\n", " pad=True)(conv1)\n", "\n", " conv2 = Convolution2D(filter_shape=(5, 5),\n", " num_filters=64,\n", " strides=1,\n", " pad=True,\n", " init_bias=0.1,\n", " name='conv2')(pooling1)\n", " pooling2 = MaxPooling((2, 2),\n", " strides=(2, 2),\n", " pad=True)(conv2)\n", "\n", " fc1 = Dense(1024,\n", " init_bias=0.1,\n", " name='fc1')(pooling2)\n", " \n", " dropout1 = Dropout(dropout_prob)(fc1)\n", " \n", " output_layer = Dense(out_dims,\n", " activation=None,\n", " init_bias=0.1,\n", " name='classify')(dropout1)\n", "\n", " return output_layer\n", " \n", "# Defining the input to the neural network\n", "input_vars = C.input_variable(input_dim, np.float32)\n", " \n", "# Create the model\n", "output = create_model(X)\n", "\n", "# Print the output shapes / parameters of different components\n", "print('Output Shape of the first convolution layer:', output.conv1.shape)\n", "print('Bias value of the last dense layer:', output.classify.b.value)\n", "\n", "# Number of parameters in the network\n", "C.logging.log_number_of_parameters(output)\n", "\n", "### Setting up the trainer\n", "# Define the label as the other input parameter of the trainer\n", "labels = C.input_variable(num_output_classes, np.float32)\n", "\n", "# Initialize the parameters for the trainer\n", "train_minibatch_size = 50\n", "# lr_schedule = C.learners.learning_parameter_schedule_per_sample(lr_per_sample, epoch_size=epoch_size)\n", "momentum = 0.9\n", "\n", "# Define the loss function\n", "loss = C.cross_entropy_with_softmax(output, labels)\n", "\n", "# Define the function that calculates classification error\n", "label_error = C.classification_error(output, labels)\n", "\n", "# Instantiate the trainer object to drive the model training\n", "# learner = C.learners.momentum_sgd(output.parameters, lr_schedule, momentum)\n", "# trainer = C.Trainer(output, loss, label_error, [learner])" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Testing\n", "\n", "Below is non-working code being saved here so I can use pull this notebook on other machines" ] }, { "cell_type": "code", "execution_count": 72, "metadata": { "ExecuteTime": { "end_time": "2018-03-23T00:06:24.177900Z", "start_time": "2018-03-23T00:06:24.129899Z" }, "collapsed": true, "scrolled": false }, "outputs": [], "source": [ "LR = 0.01\n", "MOMENTUM = 0.9\n", "batch_size = 128\n", "\n", "\n", "def shuffle_data(X, y):\n", " s = np.arange(len(X))\n", " np.random.shuffle(s)\n", " X = X[s]\n", " y = y[s]\n", " return X, y\n", "\n", "def yield_mb(X, y, batchsize=128, shuffle=False):\n", " if shuffle:\n", " X, y = shuffle_data(X, y)\n", " # Only complete batches are submitted\n", " for i in range(len(X) // batchsize):\n", " yield X[i * batchsize:(i + 1) * batchsize], y[i * batchsize:(i + 1) * batchsize]\n", "\n", "\n", "# def create_symbol(n_classes=10):\n", "# # Weight initialiser from uniform distribution\n", "# # Activation (unless states) is None\n", "# with C.layers.default_options(init = C.glorot_uniform(), activation = C.relu):\n", "# x = Convolution2D(filter_shape=(5, 5), num_filters=32, pad=True)(features)\n", "# x = Convolution2D(filter_shape=(5, 5), num_filters=32, pad=True)(x)\n", "# x = MaxPooling((2, 2), strides=(2, 2), pad=False)(x)\n", "# x = Dropout(0.25)(x)\n", "\n", "# x = Convolution2D(filter_shape=(3, 3), num_filters=100, pad=True)(x)\n", "# x = Convolution2D(filter_shape=(3, 3), num_filters=100, pad=True)(x)\n", "# x = MaxPooling((2, 2), strides=(2, 2), pad=False)(x)\n", "# x = Dropout(0.25)(x) \n", " \n", "# x = Dense(512)(x)\n", "# x = Dropout(0.5)(x)\n", "# x = Dense(n_classes, activation=None)(x)\n", "# return x\n", " \n", " \n", "# X = C.input_variable(input_dim_model)\n", "# y = C.input_variable(num_output_classes)\n", "\n", "def create_symbol(out_dims=10, dropout_prob=0.0):\n", " with C.layers.default_options(init=C.glorot_uniform(), activation=C.relu):\n", " conv1 = Convolution2D(filter_shape=(5, 5),\n", " num_filters=8,\n", " strides=(2, 2),\n", " pad=True,\n", " init_bias=0.1,\n", " name='conv1')(features)\n", " pooling1 = MaxPooling((2, 2),\n", " strides=(2, 2),\n", " pad=True)(conv1)\n", "\n", " conv2 = Convolution2D(filter_shape=(5, 5),\n", " num_filters=64,\n", " strides=1,\n", " pad=True,\n", " init_bias=0.1,\n", " name='conv2')(pooling1)\n", " pooling2 = MaxPooling((2, 2),\n", " strides=(2, 2),\n", " pad=True)(conv2)\n", "\n", " fc1 = Dense(1024,\n", " init_bias=0.1,\n", " name='fc1')(pooling2)\n", " \n", " dropout1 = Dropout(dropout_prob)(fc1)\n", " \n", " output_layer = Dense(out_dims,\n", " activation=None,\n", " init_bias=0.1,\n", " name='classify')(dropout1)\n", "\n", " return output_layer\n", " \n", "def init_model(m, labels, lr=LR, momentum=MOMENTUM):\n", " # Loss (dense labels); check if support for sparse labels\n", " loss = C.cross_entropy_with_softmax(m, labels) \n", " # Momentum SGD\n", " # https://github.com/Microsoft/CNTK/blob/master/Manual/Manual_How_to_use_learners.ipynb\n", " # unit_gain=False: momentum_direction = momentum*old_momentum_direction + gradient\n", " # if unit_gain=True then ...(1-momentum)*gradient\n", " learner = C.momentum_sgd(m.parameters,\n", " lr=C.learning_rate_schedule(lr, C.UnitType.minibatch),\n", " momentum=C.momentum_schedule(momentum), \n", " unit_gain=False)\n", " trainer = C.Trainer(m, (loss, C.classification_error(m, labels)), [learner])\n", " return trainer\n", "\n", "\n", "# Placeholders\n", "# [128 x 28 x 28 x 1\n", "# features = C.input_variable((3, 32, 32), np.float32)\n", "features = C.input_variable((28, 28), np.float32)\n", "labels = C.input_variable(10, np.float32)\n", "# Load symbol\n", "sym = create_symbol()\n", "\n", "trainer = init_model(sym, labels)\n", "\n", "y_train_onehot = np.eye(len(np.unique(y_train)))[y_train]\n", "\n", "# Main training loop: 53s\n", "for j in range(3):\n", " for data, label in yield_mb(X_train.astype('float32'), y_train_onehot.astype('float32'), batch_size, shuffle=True):\n", " trainer.train_minibatch({features: data, labels: label})\n", " # Log (this is just last batch in epoch, not average of batches)\n", " eval_error = trainer.previous_minibatch_evaluation_average\n", " print(\"Epoch %d | Accuracy: %.6f\" % (j+1, (1-eval_error)))\n", " \n", " \n", "# learner = C.sgd(model.parameters,\n", "# C.learning_parameter_schedule(0.1))\n", "# progress_writer = C.logging.ProgressPrinter(0)\n", "\n", "# train_summary = loss.train((X_train_lr, Y_train_lr), parameter_learners=[learner],\n", "# callbacks=[progress_writer])" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## MXNet\n", "\n", "**To-do:**\n", "- Add script for training a model in MXNet\n", "- Add script for loading a trained model and generating predictions" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2017-12-20T00:37:05.203151Z", "start_time": "2017-12-20T00:37:05.190651Z" }, "collapsed": true }, "outputs": [], "source": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## PyTorch\n", "\n", "**To-do:**\n", "- Conform model architecture to other models\n", "- Add script for loading a trained model and generating predictions\n", "- Add train/validation accuracy" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "2018/02/06 13:20\n", "OS: linux\n", "Python: 3.5.2 (default, Aug 18 2017, 17:48:00) \n", "[GCC 5.4.0 20160609]\n", "NumPy: 1.14.0\n", "PyTorch: 0.3.0.post4\n", "Epoch [1/5], Iter [100/600] Loss: 0.1559\n", "Epoch [1/5], Iter [200/600] Loss: 0.2541\n", "Epoch [1/5], Iter [300/600] Loss: 0.1078\n", "Epoch [1/5], Iter [400/600] Loss: 0.0419\n", "Epoch [1/5], Iter [500/600] Loss: 0.0725\n", "Epoch [1/5], Iter [600/600] Loss: 0.1366\n", "Epoch [2/5], Iter [100/600] Loss: 0.0406\n", "Epoch [2/5], Iter [200/600] Loss: 0.0233\n", "Epoch [2/5], Iter [300/600] Loss: 0.0631\n", "Epoch [2/5], Iter [400/600] Loss: 0.0380\n", "Epoch [2/5], Iter [500/600] Loss: 0.0049\n", "Epoch [2/5], Iter [600/600] Loss: 0.0495\n", "Epoch [3/5], Iter [100/600] Loss: 0.0103\n", "Epoch [3/5], Iter [200/600] Loss: 0.0652\n", "Epoch [3/5], Iter [300/600] Loss: 0.0153\n", "Epoch [3/5], Iter [400/600] Loss: 0.0650\n", "Epoch [3/5], Iter [500/600] Loss: 0.0205\n", "Epoch [3/5], Iter [600/600] Loss: 0.0374\n", "Epoch [4/5], Iter [100/600] Loss: 0.0126\n", "Epoch [4/5], Iter [200/600] Loss: 0.0348\n", "Epoch [4/5], Iter [300/600] Loss: 0.0123\n", "Epoch [4/5], Iter [400/600] Loss: 0.0465\n", "Epoch [4/5], Iter [500/600] Loss: 0.0342\n", "Epoch [4/5], Iter [600/600] Loss: 0.0205\n", "Epoch [5/5], Iter [100/600] Loss: 0.0088\n", "Epoch [5/5], Iter [200/600] Loss: 0.0023\n", "Epoch [5/5], Iter [300/600] Loss: 0.0108\n", "Epoch [5/5], Iter [400/600] Loss: 0.0845\n", "Epoch [5/5], Iter [500/600] Loss: 0.0307\n", "Epoch [5/5], Iter [600/600] Loss: 0.0223\n", "Test Accuracy of the model on the 10000 test images: 98 %\n" ] } ], "source": [ "import sys\n", "import time\n", "import numpy as np\n", "import torch\n", "import torch.nn as nn\n", "import torchvision.datasets as dsets\n", "import torchvision.transforms as transforms\n", "from torch.autograd import Variable\n", "\n", "print(time.strftime('%Y/%m/%d %H:%M'))\n", "print('OS:', sys.platform)\n", "print('Python:', sys.version)\n", "print('NumPy:', np.__version__)\n", "print('PyTorch:', torch.__version__)\n", "if torch.cuda.is_available() == True:\n", " print('GPU:', torch.cuda.current_device())\n", " \n", " \n", "# Checking if there is a GPU and assigning it to a variable\n", "if torch.cuda.is_available() == True:\n", " gpu = True\n", "else:\n", " gpu = False\n", "\n", "# Hyper Parameters\n", "num_epochs = 5\n", "batch_size = 100\n", "learning_rate = 0.001\n", "\n", "# MNIST Dataset\n", "train_dataset = dsets.MNIST(root='./data/',\n", " train=True, \n", " transform=transforms.ToTensor(),\n", " download=True)\n", "\n", "test_dataset = dsets.MNIST(root='./data/',\n", " train=False, \n", " transform=transforms.ToTensor())\n", "\n", "# Data Loader (Input Pipeline)\n", "train_loader = torch.utils.data.DataLoader(dataset=train_dataset,\n", " batch_size=batch_size, \n", " shuffle=True)\n", "\n", "test_loader = torch.utils.data.DataLoader(dataset=test_dataset,\n", " batch_size=batch_size, \n", " shuffle=False)\n", "\n", "# CNN Model (2 conv layer)\n", "class CNN(nn.Module):\n", " def __init__(self):\n", " super(CNN, self).__init__()\n", " self.layer1 = nn.Sequential(\n", " nn.Conv2d(1, 16, kernel_size=5, padding=2),\n", " nn.BatchNorm2d(16),\n", " nn.ReLU(),\n", " nn.MaxPool2d(2))\n", " self.layer2 = nn.Sequential(\n", " nn.Conv2d(16, 32, kernel_size=5, padding=2),\n", " nn.BatchNorm2d(32),\n", " nn.ReLU(),\n", " nn.MaxPool2d(2))\n", " self.fc = nn.Linear(7*7*32, 10)\n", " \n", " def forward(self, x):\n", " out = self.layer1(x)\n", " out = self.layer2(out)\n", " out = out.view(out.size(0), -1)\n", " out = self.fc(out)\n", " return out\n", " \n", "cnn = CNN()\n", "\n", "if gpu == True:\n", " cnn.cuda()\n", "\n", "\n", "# Loss and Optimizer\n", "criterion = nn.CrossEntropyLoss()\n", "optimizer = torch.optim.Adam(cnn.parameters(), lr=learning_rate)\n", "\n", "# Train the Model\n", "for epoch in range(num_epochs):\n", " for i, (images, labels) in enumerate(train_loader):\n", " \n", " if gpu == True:\n", " images = Variable(images).cuda()\n", " labels = Variable(labels).cuda()\n", " else:\n", " images = Variable(images)\n", " labels = Variable(labels)\n", " \n", " # Forward + Backward + Optimize\n", " optimizer.zero_grad()\n", " outputs = cnn(images)\n", " loss = criterion(outputs, labels)\n", " loss.backward()\n", " optimizer.step()\n", " \n", " if (i+1) % 100 == 0:\n", " print ('Epoch [%d/%d], Iter [%d/%d] Loss: %.4f' \n", " %(epoch+1, num_epochs, i+1, len(train_dataset)//batch_size, loss.data[0]))\n", "\n", "# Test the Model\n", "cnn.eval() # Change model to 'eval' mode (BN uses moving mean/var).\n", "correct = 0\n", "total = 0\n", "for images, labels in test_loader:\n", " if gpu == True:\n", " images = Variable(images).cuda()\n", " else:\n", " images = Variable(images)\n", " outputs = cnn(images)\n", " _, predicted = torch.max(outputs.data, 1)\n", " total += labels.size(0)\n", " correct += (predicted == labels).sum()\n", "\n", "print('Test Accuracy of the model on the 10000 test images: %d %%' % (100.0 * correct / total))\n", "\n", "# Save the Trained Model\n", "# torch.save(cnn.state_dict(), 'cnn.pkl')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## TensorFlow\n", "\n", "**To-do:**\n", "- Conform architecture to others in the notebook" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "ExecuteTime": { "end_time": "2018-03-20T22:36:50.981175Z", "start_time": "2018-03-20T22:36:22.386624Z" }, "scrolled": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "2018/03/20 17:36\n", "OS: win32\n", "Python: 3.6.2 |Anaconda custom (64-bit)| (default, Jul 20 2017, 12:30:02) [MSC v.1900 64 bit (AMD64)]\n", "NumPy: 1.13.3\n", "TensorFlow: 1.3.0\n", "[name: \"/gpu:0\"\n", "device_type: \"GPU\"\n", "memory_limit: 1524796620\n", "locality {\n", " bus_id: 1\n", "}\n", "incarnation: 12763196243698338072\n", "physical_device_desc: \"device: 0, name: GeForce GTX 680, pci bus id: 0000:01:00.0\"\n", "]\n", "Reformatted data shapes:\n", "Training set (48000, 784) (48000, 10)\n", "Validation set (12000, 784) (12000, 10)\n", "Test set (10000, 784) (10000, 10)\n", "\n", "Initialized\n", "\n", "Beginning Epoch 1 -\n", "Epoch 1 Step 0 (0.00% epoch, 0.00% total)\n", "------------------------------------\n", "Minibatch loss: 62305.937500\n", "Minibatch accuracy: 10.2%\n", "Validation accuracy: 10.2%\n", "2018-03-20 17:36:26.054341\n", "Total execution time: 0.01 minutes\n", "\n", "Epoch 1 Step 1000 (2.08% epoch, 0.16% total)\n", "------------------------------------\n", "Minibatch loss: 2.296496\n", "Minibatch accuracy: 13.3%\n", "Validation accuracy: 10.5%\n", "2018-03-20 17:36:29.352841\n", "Total execution time: 0.06 minutes\n", "\n", "Epoch 1 Step 2000 (4.17% epoch, 0.48% total)\n", "------------------------------------\n", "Minibatch loss: 2.312936\n", "Minibatch accuracy: 6.2%\n", "Validation accuracy: 10.8%\n", "2018-03-20 17:36:32.736199\n", "Total execution time: 0.12 minutes\n", "\n", "Epoch 1 Step 3000 (6.25% epoch, 0.96% total)\n", "------------------------------------\n", "Minibatch loss: 2.294709\n", "Minibatch accuracy: 15.6%\n", "Validation accuracy: 10.8%\n", "2018-03-20 17:36:36.377200\n", "Total execution time: 0.18 minutes\n", "\n", "Epoch 1 Step 4000 (8.33% epoch, 1.60% total)\n", "------------------------------------\n", "Minibatch loss: 2.309429\n", "Minibatch accuracy: 8.6%\n", "Validation accuracy: 10.2%\n", "2018-03-20 17:36:39.849201\n", "Total execution time: 0.24 minutes\n", "\n", "Epoch 1 Step 5000 (10.42% epoch, 2.40% total)\n", "------------------------------------\n", "Minibatch loss: 2.294353\n", "Minibatch accuracy: 14.1%\n", "Validation accuracy: 10.8%\n", "2018-03-20 17:36:43.187700\n", "Total execution time: 0.29 minutes\n", "\n", "Epoch 1 Step 6000 (12.50% epoch, 3.37% total)\n", "------------------------------------\n", "Minibatch loss: 2.296662\n", "Minibatch accuracy: 17.2%\n", "Validation accuracy: 9.8%\n", "2018-03-20 17:36:46.425175\n", "Total execution time: 0.34 minutes\n", "\n", "Epoch 1 Step 7000 (14.58% epoch, 4.49% total)\n", "------------------------------------\n", "Minibatch loss: 2.302536\n", "Minibatch accuracy: 10.9%\n", "Validation accuracy: 10.8%\n", "2018-03-20 17:36:49.893675\n", "Total execution time: 0.40 minutes\n", "\n", "Training manually ended\n" ] } ], "source": [ "import sys\n", "import time\n", "from datetime import datetime\n", "import numpy as np\n", "import tensorflow as tf\n", "\n", "print(time.strftime('%Y/%m/%d %H:%M'))\n", "print('OS:', sys.platform)\n", "print('Python:', sys.version)\n", "print('NumPy:', np.__version__)\n", "print('TensorFlow:', tf.__version__)\n", "\n", "# Checking tensorflow processing devices\n", "from tensorflow.python.client import device_lib\n", "local_device_protos = device_lib.list_local_devices()\n", "print([x for x in local_device_protos if x.device_type == 'GPU'])\n", "\n", "# Avoiding memory issues with the GPU\n", "config = tf.ConfigProto()\n", "config.gpu_options.allow_growth = True\n", "sess = tf.Session(config=config)\n", "\n", "image_size = 28\n", "num_labels = 10\n", "num_channels = 1\n", "\n", "\n", "def reformat(dataset, labels):\n", " dataset = dataset.reshape((-1, image_size * image_size)).astype(np.float32)\n", " labels = (np.arange(num_labels) == labels[:, None]).astype(np.float32)\n", " return dataset, labels\n", "\n", "\n", "def accuracy(predictions, labels):\n", " return (100.0 * np.sum(np.argmax(predictions, 1) == np.argmax(labels, 1))\n", " / predictions.shape[0])\n", "\n", "\n", "X_train, y_train = reformat(X_train, y_train)\n", "X_validation, y_validation = reformat(X_validation, y_validation)\n", "X_test, y_test = reformat(X_test, y_test)\n", "print('Reformatted data shapes:')\n", "print('Training set', X_train.shape, y_train.shape)\n", "print('Validation set', X_validation.shape, y_validation.shape)\n", "print('Test set', X_test.shape, y_test.shape)\n", "\n", "# Training Parameters\n", "learning_rate = 0.001\n", "num_steps = y_train.shape[0] + 1 # 200,000 per epoch\n", "batch_size = 128\n", "epochs = 12\n", "display_step = 250 # To print progress\n", "\n", "# Network Parameters\n", "num_input = 784 # Data input (image shape: 28x28)\n", "num_classes = 10 # Total classes (10 characters)\n", "\n", "num_hidden = 1024\n", "\n", "# Defining the model and graph\n", "sgd_hidden_graph = tf.Graph()\n", "with sgd_hidden_graph.as_default():\n", " tf_train_dataset = tf.placeholder(\n", " tf.float32, shape=(batch_size, image_size * image_size))\n", " tf_train_labels = tf.placeholder(\n", " tf.float32, shape=(batch_size, num_labels))\n", " tf_valid_dataset = tf.constant(X_validation.astype('float32'))\n", " tf_test_dataset = tf.constant(X_test, name='test_data')\n", "\n", " w0 = tf.Variable(tf.truncated_normal(\n", " [image_size * image_size, num_hidden]), name='W0')\n", " w1 = tf.Variable(tf.truncated_normal([num_hidden, num_labels]), name='W1')\n", "\n", " b0 = tf.Variable(tf.zeros([num_hidden]), name='b0')\n", " b1 = tf.Variable(tf.zeros([num_labels]), name='b1')\n", "\n", " def reluLayer(dataset):\n", " return tf.nn.relu(tf.matmul(dataset, w0) + b0)\n", "\n", " def logitLayer(dataset):\n", " return tf.matmul(reluLayer(dataset), w1) + b1\n", "\n", " sgd_hidden_loss = tf.reduce_mean(\n", " tf.nn.softmax_cross_entropy_with_logits(\n", " labels=tf_train_labels,\n", " logits=logitLayer(tf_train_dataset)))\n", " sgd_hidden_optimizer = tf.train.GradientDescentOptimizer(\n", " 0.5).minimize(sgd_hidden_loss)\n", "\n", " sgd_hidden_train_prediction = tf.nn.softmax(\n", " logitLayer(tf_train_dataset), name='train_predictor')\n", " sgd_hidden_valid_prediction = tf.nn.softmax(\n", " logitLayer(tf_valid_dataset), name='validate_predictor')\n", " sgd_hidden_test_prediction = tf.nn.softmax(\n", " logitLayer(tf_test_dataset), name='test_predictor')\n", "\n", "\n", "# Creating the graph and running the model\n", "with tf.Session(graph=sgd_hidden_graph) as sgd_hidden_session:\n", " saver = tf.train.Saver()\n", " tf.global_variables_initializer().run()\n", "\n", " # For tracking execution time and progress\n", " start_time = time.time()\n", " total_steps = 0\n", "\n", " print('\\nInitialized\\n')\n", " try:\n", " for epoch in range(1, epochs + 1):\n", " print('Beginning Epoch {0} -'.format(epoch))\n", " for step in range(num_steps):\n", " offset = (step * batch_size) % (y_train.shape[0] - batch_size)\n", " batch_data = X_train[offset:(offset + batch_size), :]\n", " batch_labels = y_train[offset:(\n", " offset + batch_size), :].reshape(batch_size, num_labels)\n", " feed_dict = {tf_train_dataset: batch_data,\n", " tf_train_labels: batch_labels}\n", " _, l, sgd_hidden_predictions = sgd_hidden_session.run(\n", " [sgd_hidden_optimizer, sgd_hidden_loss, sgd_hidden_train_prediction],\n", " feed_dict=feed_dict)\n", " if (step % 1000 == 0) or (step == num_steps):\n", " # Calculating percentage of completion\n", " total_steps += step\n", " pct_epoch = (step / float(num_steps)) * 100\n", " pct_total = (total_steps / float(num_steps *\n", " (epochs + 1))) * 100 # Fix this line\n", "\n", " # Printing progress\n", " print('Epoch %d Step %d (%.2f%% epoch, %.2f%% total)' %\n", " (epoch, step, pct_epoch, pct_total))\n", " print('------------------------------------')\n", " print('Minibatch loss: %f' % l)\n", " print(\"Minibatch accuracy: %.1f%%\" %\n", " accuracy(sgd_hidden_predictions, batch_labels))\n", " print(\"Validation accuracy: %.1f%%\" % accuracy(sgd_hidden_valid_prediction.eval(),\n", " y_validation))\n", " print(datetime.now())\n", " print('Total execution time: %.2f minutes' %\n", " ((time.time() - start_time) / 60.))\n", " print()\n", " print(\"Test accuracy: %.1f%%\" % accuracy(\n", " sgd_hidden_test_prediction.eval(), y_test.astype('int32')))\n", " except KeyboardInterrupt:\n", " print('Training manually ended')\n", "# saver.save(sgd_hidden_session, '{}/model'.format(Path.cwd()))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Loading in a trained model and generating predictions\n", "\n", "Most of this script is from a competition submission script and needs to be cleaned up. Additionally, it contains code for creating a pickle file from images within subdirectories." ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [ "from six.moves import cPickle as pickle\n", "from scipy import ndimage\n", "import math\n", "import numpy as np\n", "import os\n", "from pathlib import Path\n", "from __future__ import print_function\n", "import tensorflow as tf\n", "from pathlib import Path\n", "\n", "# Change this line to the saved model\n", "modelFile = 'trainedModel.meta'\n", "\n", "image_size = 28 # Pixel width and height.\n", "num_labels = 10\n", "pixel_depth = 255.0 # Number of levels per pixel.\n", "\n", "total_images = sum([len(files) for r, d, files in os.walk(\"./data/\")])\n", "\n", "\n", "def accuracy(predictions, labels):\n", " return (100.0 * np.sum(np.argmax(predictions, 1) == np.argmax(labels, 1))\n", " / predictions.shape[0])\n", "\n", "\n", "def load_letter(folder, min_num_images):\n", " \"\"\"Load the data for a single letter label.\"\"\"\n", " image_files = list(folder.iterdir())\n", " dataset = np.ndarray(shape=(len(image_files), image_size, image_size),\n", " dtype=np.float32)\n", " print(folder)\n", " num_images = 0\n", " for image in image_files:\n", " image_file = folder.joinpath(image)\n", " try:\n", " image_data = (ndimage.imread(image_file).astype(float) -\n", " pixel_depth / 2) / pixel_depth\n", " if image_data.shape != (image_size, image_size):\n", " raise Exception('Unexpected image shape: %s' %\n", " str(image_data.shape))\n", " dataset[num_images, :, :] = image_data\n", " num_images = num_images + 1\n", " except IOError as e:\n", " print('Could not read:', image_file,\n", " ':', e, '- it\\'s ok, skipping.')\n", "\n", " dataset = dataset[0:num_images, :, :]\n", " if num_images < min_num_images:\n", " raise Exception('Many fewer images than expected: %d < %d' %\n", " (num_images, min_num_images))\n", "\n", " print('Full dataset tensor:', dataset.shape)\n", " print('Mean:', np.mean(dataset))\n", " print('Standard deviation:', np.std(dataset))\n", " return dataset\n", "\n", "\n", "def maybe_pickle(data_folders, min_num_images_per_class, force=False):\n", " dataset_names = []\n", " data_folders = (i for i in data_folders if i.is_dir())\n", " for folder in data_folders:\n", " set_filename = str(folder) + '.pickle'\n", " dataset_names.append(set_filename)\n", " if os.path.exists(set_filename) and not force:\n", " # You may override by setting force=True.\n", " print('%s already present - Skipping pickling.' % set_filename)\n", " else:\n", " print('Pickling %s.' % set_filename)\n", " dataset = load_letter(folder, min_num_images_per_class)\n", " try:\n", " with open(set_filename, 'wb') as f:\n", " pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)\n", " except Exception as e:\n", " print('Unable to save data to', set_filename, ':', e)\n", " return dataset_names\n", "\n", "\n", "def make_arrays(nb_rows, img_size):\n", " if nb_rows:\n", " dataset = np.ndarray((nb_rows, img_size, img_size), dtype=np.float32)\n", " labels = np.ndarray(nb_rows, dtype=np.int32)\n", " else:\n", " dataset, labels = None, None\n", " return dataset, labels\n", "\n", "\n", "def randomize(dataset, labels):\n", " permutation = np.random.permutation(labels.shape[0])\n", " shuffled_dataset = dataset[permutation, :, :]\n", " shuffled_labels = labels[permutation]\n", " return shuffled_dataset, shuffled_labels\n", "\n", "\n", "def reformat(dataset, labels):\n", " dataset = dataset.reshape(\n", " labels.shape[0], image_size, image_size, 1).astype(np.float32)\n", " labels = (np.arange(num_labels) == labels[:, None]).astype(np.float32)\n", " return dataset, labels\n", "\n", "\n", "def merge_datasets(pickle_files, dataset_size):\n", " \"\"\"\n", " Merge multiple glyph pickle files into nd-array dataset and nd-array labels\n", " for model evaluation.\n", " Simplification from https://github.com/udacity/deep-learning\n", " \"\"\"\n", " num_classes = len(pickle_files)\n", " dataset, labels = make_arrays(dataset_size, image_size)\n", " size_per_class = dataset_size // num_classes\n", "\n", " start_t = 0\n", " for label, pickle_file in enumerate(pickle_files):\n", " try:\n", " with open(pickle_file, 'rb') as f:\n", " letter_set = pickle.load(f)\n", " end_t = start_t + size_per_class\n", " np.random.shuffle(letter_set)\n", " dataset[start_t:end_t, :, :] = letter_set[0:size_per_class]\n", " labels[start_t:end_t] = label\n", " start_t = end_t\n", " except Exception as e:\n", " print('Unable to process data from', pickle_file, ':', e)\n", " raise\n", "\n", " return dataset, labels\n", "\n", "\n", "dir_name = 'data'\n", "\n", "glyph_dir = Path.cwd().joinpath(dir_name)\n", "test_folders = [glyph_dir.joinpath(i) for i in glyph_dir.iterdir()]\n", "test_datasets = maybe_pickle(test_folders, 2) # provide only 2 samples for now\n", "test_dataset, test_labels = merge_datasets(test_datasets, total_images)\n", "test_dataset, test_labels = randomize(test_dataset, test_labels)\n", "test_dataset, test_labels = reformat(test_dataset, test_labels)\n", "print('Testing size', test_dataset.shape, test_labels.shape)\n", "\n", "\n", "def classify(test_dataset, model_filename=modelFile, checkpoint_path=None):\n", " \"\"\"\n", " A sample classifier to unpickle a TensorFlow model and label the dataset.\n", " There are magic strings in this function derived from to the model to evaluate.\n", " Your implementation will likely have different tags that depend upon the model\n", " implementation.\n", "\n", " We pad the input test_dataset to make it at least as large as the model batch,\n", " and repeat prediction on chunks of the input if it is larger than the model batch.\n", "\n", " Args:\n", " test_dataset: Expect an input of N*28*28*1 shape numpy array, where N is number of images and\n", " 28*28 is pixel width and hieght.\n", " model_filename: optional file name stored by a previous TensorFlow session.\n", " checkpoint_path: optional path for previous TensorFlow session.\n", "\n", " Returns:\n", " The #observations by #labels nd-array labelings.\n", " \"\"\"\n", " # Re-scaling the dataset to a similar scale as the training data in the notMNIST.pickle file\n", " pixel_depth = 255.0\n", " test_dataset = (test_dataset - 255.0 / 2) / 255\n", "\n", " num_classes = 10\n", " n = int(test_dataset.shape[0])\n", " result = np.ndarray([n, num_classes], dtype=np.float32)\n", "\n", " with tf.Session() as session:\n", " saver = tf.train.import_meta_graph('./model/' + model_filename)\n", " saver.restore(session, './model/' + model_filename.split('.')[0])\n", " graph_predict = tf.get_default_graph()\n", " test_predict = graph_predict.get_tensor_by_name(\n", " 'Softmax_1:0') # string from model\n", " m = int(graph_predict.get_tensor_by_name(\n", " 'Placeholder:0').shape[0]) # string from model\n", " for i in range(0, int(math.ceil(n / m))):\n", " start = i * m\n", " end = min(n, ((i + 1) * m))\n", " x = np.zeros((128, 28, 28, 1)).astype(np.float32)\n", " x[0:(end - start)] = test_dataset[start:end]\n", " result[start:end] = test_predict.eval(\n", " feed_dict={\"Placeholder:0\": x})[0:(end - start)]\n", " return result\n", "\n", "\n", "def testing_accuracy(data, labels):\n", " \"\"\"\n", " Generates predictions and returns the accuracy\n", " \"\"\"\n", " predictions = classify(data)\n", " print(\"Test accuracy: %.1f%%\" % accuracy(predictions, labels))\n", "\n", "\n", "testing_accuracy((test_dataset), test_labels)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "varInspector": { "cols": { "lenName": 16, "lenType": 16, "lenVar": 40 }, "kernels_config": { "python": { "delete_cmd_postfix": "", "delete_cmd_prefix": "del ", "library": "var_list.py", "varRefreshCmd": "print(var_dic_list())" }, "r": { "delete_cmd_postfix": ") ", "delete_cmd_prefix": "rm(", "library": "var_list.r", "varRefreshCmd": "cat(var_dic_list()) " } }, "types_to_exclude": [ "module", "function", "builtin_function_or_method", "instance", "_Feature" ], "window_display": false } }, "nbformat": 4, "nbformat_minor": 2 }