{ "cells": [ { "cell_type": "markdown", "metadata": { "collapsed": true }, "source": [ "### Imports" ] }, { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": true }, "outputs": [], "source": [ "import numpy as np\n", "%matplotlib inline\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from IPython import display\n", "plt.style.use('seaborn-white')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Read and process data" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": true }, "outputs": [], "source": [ "data = open('input.txt', 'r').read()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Process data and calculate indexes" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "data has 1115402 characters, 65 unique\n" ] } ], "source": [ "chars = list(set(data))\n", "data_size, X_size = len(data), len(chars)\n", "print(\"data has %d characters, %d unique\" % (data_size, X_size))\n", "char_to_idx = {ch:i for i,ch in enumerate(chars)}\n", "idx_to_char = {i:ch for i,ch in enumerate(chars)}" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Constants and Hyperparameters" ] }, { "cell_type": "code", "execution_count": 4, "metadata": { "collapsed": true }, "outputs": [], "source": [ "H_size = 100 # Size of the hidden layer\n", "T_steps = 25 # Number of time steps (length of the sequence) used for training\n", "learning_rate = 1e-1 # Learning rate\n", "weight_sd = 0.1 # Standard deviation of weights for initialization\n", "z_size = H_size + X_size # Size of concatenate(H, X) vector" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Activation Functions and Derivatives\n", "\n", "#### Sigmoid\n", "\n", "\\begin{align}\n", "\\sigma(x) &= \\frac{1}{1 + e^{-x}}\\\\\n", "\\frac{d\\sigma(x)}{dx} &= \\sigma(x) \\cdot (1 - \\sigma(x))\n", "\\end{align}\n", "\n", "#### Tanh\n", "\n", "\\begin{align}\n", "\\frac{d\\text{tanh}(x)}{dx} &= 1 - \\text{tanh}^2(x)\n", "\\end{align}" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def sigmoid(x):\n", " return 1 / (1 + np.exp(-x))\n", "\n", "\n", "def dsigmoid(y):\n", " return y * (1 - y)\n", "\n", "\n", "def tanh(x):\n", " return np.tanh(x)\n", "\n", "\n", "def dtanh(y):\n", " return 1 - y * y" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Parameters" ] }, { "cell_type": "code", "execution_count": 6, "metadata": { "collapsed": true }, "outputs": [], "source": [ "class Param:\n", " def __init__(self, name, value):\n", " self.name = name\n", " self.v = value #parameter value\n", " self.d = np.zeros_like(value) #derivative\n", " self.m = np.zeros_like(value) #momentum for AdaGrad" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "We use random weights with normal distribution (`0`, `weight_sd`) for $tanh$ activation function and (`0.5`, `weight_sd`) for $sigmoid$ activation function.\n", "\n", "Biases are initialized to zeros." ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "class Parameters:\n", " def __init__(self):\n", " self.W_f = Param('W_f', \n", " np.random.randn(H_size, z_size) * weight_sd + 0.5)\n", " self.b_f = Param('b_f',\n", " np.zeros((H_size, 1)))\n", "\n", " self.W_i = Param('W_i',\n", " np.random.randn(H_size, z_size) * weight_sd + 0.5)\n", " self.b_i = Param('b_i',\n", " np.zeros((H_size, 1)))\n", "\n", " self.W_C = Param('W_C',\n", " np.random.randn(H_size, z_size) * weight_sd)\n", " self.b_C = Param('b_C',\n", " np.zeros((H_size, 1)))\n", "\n", " self.W_o = Param('W_o',\n", " np.random.randn(H_size, z_size) * weight_sd + 0.5)\n", " self.b_o = Param('b_o',\n", " np.zeros((H_size, 1)))\n", "\n", " #For final layer to predict the next character\n", " self.W_v = Param('W_v',\n", " np.random.randn(X_size, H_size) * weight_sd)\n", " self.b_v = Param('b_v',\n", " np.zeros((X_size, 1)))\n", " \n", " def all(self):\n", " return [self.W_f, self.W_i, self.W_C, self.W_o, self.W_v,\n", " self.b_f, self.b_i, self.b_C, self.b_o, self.b_v]\n", " \n", "parameters = Parameters()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Forward pass\n", "\n", "![LSTM](http://blog.varunajayasiri.com/ml/lstm.svg)\n", "\n", "*Operation $z$ is the concatenation of $x$ and $h_{t-1}$*\n", "\n", "#### Concatenation of $h_{t-1}$ and $x_t$\n", "\\begin{align}\n", "z & = [h_{t-1}, x_t] \\\\\n", "\\end{align}\n", "\n", "#### LSTM functions\n", "\\begin{align}\n", "f_t & = \\sigma(W_f \\cdot z + b_f) \\\\\n", "i_t & = \\sigma(W_i \\cdot z + b_i) \\\\\n", "\\bar{C}_t & = tanh(W_C \\cdot z + b_C) \\\\\n", "C_t & = f_t * C_{t-1} + i_t * \\bar{C}_t \\\\\n", "o_t & = \\sigma(W_o \\cdot z + b_t) \\\\\n", "h_t &= o_t * tanh(C_t) \\\\\n", "\\end{align}\n", "\n", "#### Logits\n", "\\begin{align}\n", "v_t &= W_v \\cdot h_t + b_v \\\\\n", "\\end{align}\n", "\n", "#### Softmax\n", "\\begin{align}\n", "\\hat{y_t} &= \\text{softmax}(v_t)\n", "\\end{align}\n", "\n", "$\\hat{y_t}$ is `y` in code and $y_t$ is `targets`.\n" ] }, { "cell_type": "code", "execution_count": 8, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def forward(x, h_prev, C_prev, p = parameters):\n", " assert x.shape == (X_size, 1)\n", " assert h_prev.shape == (H_size, 1)\n", " assert C_prev.shape == (H_size, 1)\n", " \n", " z = np.row_stack((h_prev, x))\n", " f = sigmoid(np.dot(p.W_f.v, z) + p.b_f.v)\n", " i = sigmoid(np.dot(p.W_i.v, z) + p.b_i.v)\n", " C_bar = tanh(np.dot(p.W_C.v, z) + p.b_C.v)\n", "\n", " C = f * C_prev + i * C_bar\n", " o = sigmoid(np.dot(p.W_o.v, z) + p.b_o.v)\n", " h = o * tanh(C)\n", "\n", " v = np.dot(p.W_v.v, h) + p.b_v.v\n", " y = np.exp(v) / np.sum(np.exp(v)) #softmax\n", "\n", " return z, f, i, C_bar, C, o, h, v, y" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Backward pass\n", "\n", "#### Loss\n", "\n", "\\begin{align}\n", "L_k &= -\\sum_{t=k}^T\\sum_j y_{t,j} log \\hat{y_{t,j}} \\\\\n", "L &= L_1 \\\\\n", "\\end{align}\n", "\n", "#### Gradients\n", "\n", "\\begin{align}\n", "dv_t &= \\hat{y_t} - y_t \\\\\n", "dh_t &= dh'_t + W_y^T \\cdot dv_t \\\\\n", "do_t &= dh_t * \\text{tanh}(C_t) \\\\\n", "dC_t &= dC'_t + dh_t * o_t * (1 - \\text{tanh}^2(C_t))\\\\\n", "d\\bar{C}_t &= dC_t * i_t \\\\\n", "di_t &= dC_t * \\bar{C}_t \\\\\n", "df_t &= dC_t * C_{t-1} \\\\\n", "\\\\\n", "df'_t &= f_t * (1 - f_t) * df_t \\\\\n", "di'_t &= i_t * (1 - i_t) * di_t \\\\\n", "d\\bar{C}'_{t-1} &= (1 - \\bar{C}_t^2) * d\\bar{C}_t \\\\\n", "do'_t &= o_t * (1 - o_t) * do_t \\\\\n", "dz_t &= W_f^T \\cdot df'_t \\\\\n", " &+ W_i^T \\cdot di_t \\\\\n", " &+ W_C^T \\cdot d\\bar{C}_t \\\\\n", " &+ W_o^T \\cdot do_t \\\\\n", "\\\\\n", "[dh'_{t-1}, dx_t] &= dz_t \\\\\n", "dC'_t &= f_t * dC_t\n", "\\end{align}\n", "\n", "* $dC'_t = \\frac{\\partial L_{t+1}}{\\partial C_t}$ and $dh'_t = \\frac{\\partial L_{t+1}}{\\partial h_t}$\n", "* $dC_t = \\frac{\\partial L}{\\partial C_t} = \\frac{\\partial L_t}{\\partial C_t}$ and $dh_t = \\frac{\\partial L}{\\partial h_t} = \\frac{\\partial L_{t}}{\\partial h_t}$\n", "* All other derivatives are of $L$\n", "* `target` is target character index $y_t$\n", "* `dh_next` is $dh'_{t}$ (size H x 1)\n", "* `dC_next` is $dC'_{t}$ (size H x 1)\n", "* `C_prev` is $C_{t-1}$ (size H x 1)\n", "* $df'_t$, $di'_t$, $d\\bar{C}'_t$, and $do'_t$ are *also* assigned to `df`, `di`, `dC_bar`, and `do` in the **code**.\n", "* *Returns* $dh_t$ and $dC_t$\n", "\n", "#### Model parameter gradients\n", "\n", "\\begin{align}\n", "dW_v &= dv_t \\cdot h_t^T \\\\\n", "db_v &= dv_t \\\\\n", "\\\\\n", "dW_f &= df'_t \\cdot z^T \\\\\n", "db_f &= df'_t \\\\\n", "\\\\\n", "dW_i &= di'_t \\cdot z^T \\\\\n", "db_i &= di'_t \\\\\n", "\\\\\n", "dW_C &= d\\bar{C}'_t \\cdot z^T \\\\\n", "db_C &= d\\bar{C}'_t \\\\\n", "\\\\\n", "dW_o &= do'_t \\cdot z^T \\\\\n", "db_o &= do'_t \\\\\n", "\\\\\n", "\\end{align}" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "def backward(target, dh_next, dC_next, C_prev,\n", " z, f, i, C_bar, C, o, h, v, y,\n", " p = parameters):\n", " \n", " assert z.shape == (X_size + H_size, 1)\n", " assert v.shape == (X_size, 1)\n", " assert y.shape == (X_size, 1)\n", " \n", " for param in [dh_next, dC_next, C_prev, f, i, C_bar, C, o, h]:\n", " assert param.shape == (H_size, 1)\n", " \n", " dv = np.copy(y)\n", " dv[target] -= 1\n", "\n", " p.W_v.d += np.dot(dv, h.T)\n", " p.b_v.d += dv\n", "\n", " dh = np.dot(p.W_v.v.T, dv) \n", " dh += dh_next\n", " do = dh * tanh(C)\n", " do = dsigmoid(o) * do\n", " p.W_o.d += np.dot(do, z.T)\n", " p.b_o.d += do\n", "\n", " dC = np.copy(dC_next)\n", " dC += dh * o * dtanh(tanh(C))\n", " dC_bar = dC * i\n", " dC_bar = dtanh(C_bar) * dC_bar\n", " p.W_C.d += np.dot(dC_bar, z.T)\n", " p.b_C.d += dC_bar\n", "\n", " di = dC * C_bar\n", " di = dsigmoid(i) * di\n", " p.W_i.d += np.dot(di, z.T)\n", " p.b_i.d += di\n", "\n", " df = dC * C_prev\n", " df = dsigmoid(f) * df\n", " p.W_f.d += np.dot(df, z.T)\n", " p.b_f.d += df\n", "\n", " dz = (np.dot(p.W_f.v.T, df)\n", " + np.dot(p.W_i.v.T, di)\n", " + np.dot(p.W_C.v.T, dC_bar)\n", " + np.dot(p.W_o.v.T, do))\n", " dh_prev = dz[:H_size, :]\n", " dC_prev = f * dC\n", " \n", " return dh_prev, dC_prev" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Forward Backward Pass" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Clear gradients before each backward pass" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [], "source": [ "def clear_gradients(params = parameters):\n", " for p in params.all():\n", " p.d.fill(0)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Clip gradients to mitigate exploding gradients" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "def clip_gradients(params = parameters):\n", " for p in params.all():\n", " np.clip(p.d, -1, 1, out=p.d)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Calculate and store the values in forward pass. Accumulate gradients in backward pass and clip gradients to avoid exploding gradients.\n", "\n", "* `input`, `target` are list of integers, with character indexes.\n", "* `h_prev` is the array of initial `h` at $h_{-1}$ (size H x 1)\n", "* `C_prev` is the array of initial `C` at $C_{-1}$ (size H x 1)\n", "* *Returns* loss, final $h_T$ and $C_T$" ] }, { "cell_type": "code", "execution_count": 12, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def forward_backward(inputs, targets, h_prev, C_prev):\n", " global paramters\n", " \n", " # To store the values for each time step\n", " x_s, z_s, f_s, i_s, = {}, {}, {}, {}\n", " C_bar_s, C_s, o_s, h_s = {}, {}, {}, {}\n", " v_s, y_s = {}, {}\n", " \n", " # Values at t - 1\n", " h_s[-1] = np.copy(h_prev)\n", " C_s[-1] = np.copy(C_prev)\n", " \n", " loss = 0\n", " # Loop through time steps\n", " assert len(inputs) == T_steps\n", " for t in range(len(inputs)):\n", " x_s[t] = np.zeros((X_size, 1))\n", " x_s[t][inputs[t]] = 1 # Input character\n", " \n", " (z_s[t], f_s[t], i_s[t],\n", " C_bar_s[t], C_s[t], o_s[t], h_s[t],\n", " v_s[t], y_s[t]) = \\\n", " forward(x_s[t], h_s[t - 1], C_s[t - 1]) # Forward pass\n", " \n", " loss += -np.log(y_s[t][targets[t], 0]) # Loss for at t\n", " \n", " clear_gradients()\n", "\n", " dh_next = np.zeros_like(h_s[0]) #dh from the next character\n", " dC_next = np.zeros_like(C_s[0]) #dh from the next character\n", "\n", " for t in reversed(range(len(inputs))):\n", " # Backward pass\n", " dh_next, dC_next = \\\n", " backward(target = targets[t], dh_next = dh_next,\n", " dC_next = dC_next, C_prev = C_s[t-1],\n", " z = z_s[t], f = f_s[t], i = i_s[t], C_bar = C_bar_s[t],\n", " C = C_s[t], o = o_s[t], h = h_s[t], v = v_s[t],\n", " y = y_s[t])\n", "\n", " clip_gradients()\n", " \n", " return loss, h_s[len(inputs) - 1], C_s[len(inputs) - 1]" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Sample the next character" ] }, { "cell_type": "code", "execution_count": 13, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def sample(h_prev, C_prev, first_char_idx, sentence_length):\n", " x = np.zeros((X_size, 1))\n", " x[first_char_idx] = 1\n", "\n", " h = h_prev\n", " C = C_prev\n", "\n", " indexes = []\n", " \n", " for t in range(sentence_length):\n", " _, _, _, _, C, _, h, _, p = forward(x, h, C)\n", " idx = np.random.choice(range(X_size), p=p.ravel())\n", " x = np.zeros((X_size, 1))\n", " x[idx] = 1\n", " indexes.append(idx)\n", "\n", " return indexes" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Training (Adagrad)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Update the graph and display a sample output" ] }, { "cell_type": "code", "execution_count": 14, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def update_status(inputs, h_prev, C_prev):\n", " #initialized later\n", " global plot_iter, plot_loss\n", " global smooth_loss\n", " \n", " # Get predictions for 200 letters with current model\n", "\n", " sample_idx = sample(h_prev, C_prev, inputs[0], 200)\n", " txt = ''.join(idx_to_char[idx] for idx in sample_idx)\n", "\n", " # Clear and plot\n", " plt.plot(plot_iter, plot_loss)\n", " display.clear_output(wait=True)\n", " plt.show()\n", "\n", " #Print prediction and loss\n", " print(\"----\\n %s \\n----\" % (txt, ))\n", " print(\"iter %d, loss %f\" % (iteration, smooth_loss))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Update parameters\n", "\n", "\\begin{align}\n", "\\theta_i &= \\theta_i - \\eta\\frac{d\\theta_i}{\\sum dw_{\\tau}^2} \\\\\n", "d\\theta_i &= \\frac{\\partial L}{\\partial \\theta_i}\n", "\\end{align}" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [], "source": [ "def update_paramters(params = parameters):\n", " for p in params.all():\n", " p.m += p.d * p.d # Calculate sum of gradients\n", " #print(learning_rate * dparam)\n", " p.v += -(learning_rate * p.d / np.sqrt(p.m + 1e-8))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "To delay the keyboard interrupt to prevent the training \n", "from stopping in the middle of an iteration " ] }, { "cell_type": "code", "execution_count": 16, "metadata": { "collapsed": true }, "outputs": [], "source": [ "import signal\n", "\n", "class DelayedKeyboardInterrupt(object):\n", " def __enter__(self):\n", " self.signal_received = False\n", " self.old_handler = signal.signal(signal.SIGINT, self.handler)\n", "\n", " def handler(self, sig, frame):\n", " self.signal_received = (sig, frame)\n", " print('SIGINT received. Delaying KeyboardInterrupt.')\n", "\n", " def __exit__(self, type, value, traceback):\n", " signal.signal(signal.SIGINT, self.old_handler)\n", " if self.signal_received:\n", " self.old_handler(*self.signal_received)" ] }, { "cell_type": "code", "execution_count": 17, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Exponential average of loss\n", "# Initialize to a error of a random model\n", "smooth_loss = -np.log(1.0 / X_size) * T_steps\n", "\n", "iteration, pointer = 0, 0\n", "\n", "# For the graph\n", "plot_iter = np.zeros((0))\n", "plot_loss = np.zeros((0))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Training loop" ] }, { "cell_type": "code", "execution_count": 18, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXEAAAD0CAYAAABtjRZ7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XtAlFXCBvBnGO4MiHgnFEElNcTCWUxD1DQxzc3Mwsva\n7uq2aUZRa6GmKIm3TLbUrdy23b4V3fLSVmsXTdQQCVASFbxfUJGL3FRmBAZmzvfHwMgwg6DNDLzy\n/P5R3ut5GeaZM+c957wyIYQAERFJkl1LF4CIiO4dQ5yISMIY4kREEsYQJyKSMIY4EZGE2dvqRJWV\nlcjKykKnTp0gl8ttdVoiIknTarUoKipCYGAgnJ2dTdbbLMSzsrIwffp0W52OiOi+snnzZiiVSpPl\nNgvxTp06GQrStWtXW52WiEjSCgoKMH36dEOGNmSzEK9rQunatSt8fHxsdVoiovtCY83QvLFJRCRh\nDHEiIgljiBMRSRhDnIhIwhjiREQSxhAnIpKwNh3iNVodes7/Fv86eLGli0JEdE/adIjfqtYCAOJ3\nn2nhkhAR3Zs2HeJERFLHEAfA59MRkVS16RCXtXQBiIh+pTYd4nX4rGgikqo2HeIyGeviRCRtbTrE\n67AeTkRSxRAnIpIwhjh4g5OIpKtZD4XYuHEj9u7di+rqakydOhUhISGYP38+ZDIZ+vTpgyVLlsDO\nzg4bNmzA/v37YW9vj4ULFyIoKMja5bcINqcQkVQ1WRNPS0vDkSNH8J///AebNm1CQUEBVq5ciaio\nKGzZsgVCCCQmJiI7Oxvp6enYtm0b4uPjERsba4vy/yqsgROR1DUZ4snJyQgICMDcuXMxe/ZsjBgx\nAtnZ2QgJCQEAhIWFISUlBRkZGQgNDYVMJoO3tze0Wi1KS0utfgGWwB6GRCRVTTanlJWVIS8vDx9/\n/DFyc3MxZ84cCCEM3fPc3NxQXl4OlUoFT09Pw351y728vKxX+l+JPQyJSOqaDHFPT0/4+/vD0dER\n/v7+cHJyQkFBgWG9Wq2Gh4cHFAoF1Gq10XJ3d3frlJqIiAA0ozll0KBBOHDgAIQQKCwsREVFBYYM\nGYK0tDQAQFJSEpRKJYKDg5GcnAydToe8vDzodLpWXQsnIrofNFkTHzlyJA4dOoTJkydDCIGYmBj4\n+Phg8eLFiI+Ph7+/P8LDwyGXy6FUKhEREQGdToeYmBhblJ+IqE1rVhfDt956y2RZQkKCybLIyEhE\nRkb++lLZmGAnQyKSqDY92EfGToZEJHFtOsSJiKSOIQ72Eyci6WKIExFJWJsOcd7QJCKpa9MhTkQk\ndQxxcBZDIpIuhjgRkYQxxImIJIwhDrA9hYgkiyFORCRhDHGwqyERSVebDnGO1CQiqWvTIV6HE2ER\nkVQxxMHmFCKSLoY4EZGEMcTBtnEiki6GOBGRhDHEwbE+RCRdbTrEGd5EJHVtOsTrsIMhEUkVQxys\nkRORdDHEiYgkjCEOQLCPIRFJlH1zNnrmmWegUCgAAD4+PoiIiMDy5cshl8sRGhqKV155BTqdDkuX\nLsXp06fh6OiIuLg4+Pr6WrXwRERtXZMhXlVVBSEENm3aZFj29NNPY/369ejevTv+/Oc/48SJE8jN\nzYVGo8EXX3yBzMxMrFq1Ch999JFVC09E1NY1GeKnTp1CRUUFZs6ciZqaGkRGRkKj0aBHjx4AgNDQ\nUKSkpKCoqAjDhg0DADz88MPIysqybsktiI0pRCRVTYa4s7MzZs2aheeeew45OTl48cUX4eHhYVjv\n5uaGK1euQKVSGZpcAEAul6Ompgb29s1qsWkRbAsnIqlrMmH9/Pzg6+sLmUwGPz8/uLu74/r164b1\narUaHh4eqKyshFqtNizX6XStOsCJiO4HTfZO2b59O1atWgUAKCwsREVFBVxdXXH58mUIIZCcnAyl\nUong4GAkJSUBADIzMxEQEGDdkhMRUdM18cmTJ2PBggWYOnUqZDIZVqxYATs7O8ybNw9arRahoaEY\nOHAgBgwYgIMHD2LKlCkQQmDFihW2KL9FsFWFiKSqyRB3dHTE2rVrTZZv3brV6Gc7Ozu88847lisZ\nERE1iYN9iIgkjCFORCRhbTrE2RRORFLXpkOciEjqGOJERBLGECcikjCGOBGRhDHEiYgkjCFORCRh\nbTrEOdyeiKSuTYc4EZHUMcSJiCSMIU5EJGEMcSIiCWOIExFJGEOciEjCGOJERBLWtkOc/cSJSOLa\ndogTEUkcQ5yISMIY4kREEsYQJyKSMIY4EZGEMcSJiCSsWSFeUlKC4cOH4/z587h06RKmTp2KadOm\nYcmSJdDpdACADRs2YPLkyZgyZQqOHTtm1UJbimAfQyKSuCZDvLq6GjExMXB2dgYArFy5ElFRUdiy\nZQuEEEhMTER2djbS09Oxbds2xMfHIzY21uoFJyKiZoT46tWrMWXKFHTu3BkAkJ2djZCQEABAWFgY\nUlJSkJGRgdDQUMhkMnh7e0Or1aK0tNS6JSciojuH+JdffgkvLy8MGzbMsEwIAZlMBgBwc3NDeXk5\nVCoVFAqFYZu65UREZF32d1q5Y8cOyGQy/Pzzzzh58iSio6ONathqtRoeHh5QKBRQq9VGy93d3a1X\naiIiAtBETXzz5s1ISEjApk2b0K9fP6xevRphYWFIS0sDACQlJUGpVCI4OBjJycnQ6XTIy8uDTqeD\nl5eXTS6AiKgtu2NN3Jzo6GgsXrwY8fHx8Pf3R3h4OORyOZRKJSIiIqDT6RATE2ONshIRUQPNDvFN\nmzYZ/p+QkGCyPjIyEpGRkZYplY3wafdEJHUc7ENEJGEMcSIiCWOIExFJGEOciEjCGOJERBLGECci\nkjCGOBGRhLXpEGc3cSKSujYd4kREUscQJyKSMIY4EZGEMcSJiCSMIU5EJGEMcSIiCWvTIS44Fy0R\nSVybDnEiIqljiBMRSRhDnIhIwhjiREQSxhAnIpIwhjgRkYQxxImIJKxNhzh7iROR1LXpECcikjr7\npjbQarVYtGgRLl68CJlMhtjYWDg5OWH+/PmQyWTo06cPlixZAjs7O2zYsAH79++Hvb09Fi5ciKCg\nIFtcAxFRm9VkiO/btw8A8PnnnyMtLQ1//etfIYRAVFQUBg8ejJiYGCQmJsLb2xvp6enYtm0b8vPz\nERkZiR07dlj9AoiI2rImQ3z06NEYMWIEACAvLw8eHh5ISUlBSEgIACAsLAwHDx6En58fQkNDIZPJ\n4O3tDa1Wi9LSUnh5eVn1AoiI2rJmtYnb29sjOjoay5Ytw4QJEyCEgEwmAwC4ubmhvLwcKpUKCoXC\nsE/dciIisp5m39hcvXo1du3ahcWLF6OqqsqwXK1Ww8PDAwqFAmq12mi5u7u7ZUtLRERGmgzxr776\nChs3bgQAuLi4QCaTITAwEGlpaQCApKQkKJVKBAcHIzk5GTqdDnl5edDpdK2+KYUz0RKR1DXZJj5m\nzBgsWLAA06dPR01NDRYuXIhevXph8eLFiI+Ph7+/P8LDwyGXy6FUKhEREQGdToeYmBhblJ+IqE1r\nMsRdXV3xwQcfmCxPSEgwWRYZGYnIyEjLlIyIiJrEwT5ERBLGECcikjCGOBGRhDHEiYgkrE2HuOA8\nhkQkcW06xImIpI4hTkQkYQxxIiIJY4gTEUkYQ5yISMIY4kREEsYQJyKSMEmEuBACG386j2JVVdMb\n39WBLXs4IiJbk0SIH796Ayu/P4XXv8hs6aIQEbUqkgjxaq2+yqyqqmnhkhARtS6SCPE6fBIPEZEx\nSYU4EREZY4gTEUmYJEJcJmvpEhARtU6SCHFrYRM7EUldmw5xIiKpk1SIs+ZMRGRMEiHOJnEiIvMk\nEeJERGSe/Z1WVldXY+HChbh69So0Gg3mzJmD3r17Y/78+ZDJZOjTpw+WLFkCOzs7bNiwAfv374e9\nvT0WLlyIoKAgW10DEVGbdccQ/+abb+Dp6Yk1a9bg+vXrmDhxIvr27YuoqCgMHjwYMTExSExMhLe3\nN9LT07Ft2zbk5+cjMjISO3bssHxpOWSTiMjIHUN87NixCA8PB6CfSVAulyM7OxshISEAgLCwMBw8\neBB+fn4IDQ2FTCaDt7c3tFotSktL4eXlZf0rICJqw+7YJu7m5gaFQgGVSoVXX30VUVFREEJAVjv6\nxs3NDeXl5VCpVFAoFEb7lZeXW7fkFsCKPRFJXZM3NvPz8/HCCy/g6aefxoQJE2Bnd3sXtVoNDw8P\nKBQKqNVqo+Xu7u4WK6SMQzaJiMy6Y4gXFxdj5syZePPNNzF58mQAQP/+/ZGWlgYASEpKglKpRHBw\nMJKTk6HT6ZCXlwedTsemFCIiG7hjm/jHH3+Mmzdv4sMPP8SHH34IAHj77bcRFxeH+Ph4+Pv7Izw8\nHHK5HEqlEhEREdDpdIiJibFJ4YmI2ro7hviiRYuwaNEik+UJCQkmyyIjIxEZGWm5kpnBJmwiImMc\n7ENEJGEMcSIiCZNEiFurb4pgAw0RSZwkQrwO+3UTERmTVIgTEZExhjgRkYRJIsQ5YJOIyDxJhLgt\n1Gh1LV0EIqK7xhCvdezqjZYuAhHRXZNUiFu6SyB7uxCR1EkqxK2pvLKmpYtARHTXJBXiMis+MnnJ\n11lWOzYRkbVIKsStOcKy4Gal1Y5NRGQtkghxa9bA61RWs3cKEUmPJEKciIjMY4jX87d951q6CERE\nd0VSIV6/S+DGn87j6JXrFj3+ml2nLXo8IiJru+OTfVqzld+fAgDkrBp/z8dgN3EikjpJ1cSJiMiY\nJEKcE2AREZkniRCvY4th8lodG1mISDokEeK62vS2s0FpJ314EADw4r8PY9i7e61/QiKiX0EiIa7/\n184G7SpHc/WzGf54ohBXSiugqdGh5/xvkZB6yernJiK6WxIJcX2Ky1qgcfxmZTUAIP7HMzY/NxFR\nU5oV4kePHsWMGTMAAJcuXcLUqVMxbdo0LFmyBDqdfrj6hg0bMHnyZEyZMgXHjh2zaCFFXXOKhTNc\nNNLIfr5IVW8b/b+aGh2mfZIKdZXtZzs8d60cmhpOC0BEppoM8U8++QSLFi1CVVUVAGDlypWIiorC\nli1bIIRAYmIisrOzkZ6ejm3btiE+Ph6xsbEWLaS4Q3NKtRWeyDNq7U+G/6tqQ1tVVYOU8yUIe3ef\nYV3+jQrM+DTNUFu3hoIblRgdn4R3dmZb7RxEJF1NhniPHj2wfv16w8/Z2dkICQkBAISFhSElJQUZ\nGRkIDQ2FTCaDt7c3tFotSktLLVbI3p0VAIAgn3Ym6z4/dMVi5zFn5Hv7jX4uUWvw3McpGPt+Eh5/\n7yccOFuMHRm5Vjt/2S0NAODQxTKrnYOIpKvJEA8PD4e9/e2BnUIIQ9u0m5sbysvLoVKpoFAoDNvU\nLbcUV0f9+TsqnEzWVWhs37xxKKcMpwrKUVGtBQAUq6qsdq66byFS7iu/K7sA+05da+liEN2X7vrG\npl29fn5qtRoeHh5QKBRQq9VGy93d3S1TwnrMtWHbYprapvzrYA6qarRWOXbdHOotcVPXUl7alIE/\nfnaopYtBdF+66xDv378/0tLSAABJSUlQKpUIDg5GcnIydDod8vLyoNPp4OXlZbFC1uXXe7tNe4hc\nLr11x317zv8WC/973GJlMeeWRouNP11AiaoKNY200e87fQ3PfpSCyuq7C3tDTfweynWmsBy5ZXf+\n/bRV+TcqGr2xTSQldx3i0dHRWL9+PSIiIlBdXY3w8HAEBgZCqVQiIiICkZGRiImJsUZZAQCfp19G\nheZ2EG5qRv/tLWmXrVaeOn/bdw6D4vbgrR23e+ZUVmsRvf0Yrl6vwMzPDiHjUhme+/hns/tX1WjN\nhsqvaU4Z89ckhK7e1/SGbczx3BsYsnKv1e+nENlCs2Yx9PHxwdatWwEAfn5+SEhIMNkmMjISkZGR\nli1drfr5Nf/L4/jkwAWTbS6X3EJ3L5e7anawZEWsqrYL4Je/XEX88w+j4EYlHl2ZCADIu1FhONfx\nqzfQc/63GDegKz6cPggAcKOiGgNjd2PemAC88ngf4zIamlOaV44arQ72ckl0/28xpwv192sO5ZRi\nakiPFi4N0a8jiXd7w2A+X6Q2+vlQTinC1uzD1sON16x0OoG1u0+j4EYlMq9cR5laY5WyAvomnLoA\nB4ADZ4tNtvnueIHh/9dre6A0bC4qUVUh6+pNAMZt/0cul+HT5ItIvVBitP2V0lvo/fb32J6Ri00/\n5xiWl9d2gbxxqxqzPjuEghuVRtd/9Mp1XC5pvNll+Jp9+OKQ9b/N2Iqudgiw3Ib3Gc5dK8cbX2Q2\n2txGdzb9H6kYGLu7pYvRJFVVDX65bNueZJKYT1zexCifuiaK6B3HMbC7J25W1CDEz7hNPj2nFOv3\nnkP6xVKkXdR3f4x8vDcAoFcnN5MPBlt4f88ZdGvnjOgdt9vsf7lchuAe7QEAg+L2GG1/OKcUXds5\n45kPUwzL6s+nfvaavoaZkHoJmfUemLHtcC5mhvphS/plJJ66hsTaD5i6fZ/+20GTY9URQuBSyS1E\n7ziOiN/crrUu/SYbj/ftjJ4d3PDTmWuYMaSnyb5pF0rg19Gteb+MBl7Z8gt+O9AbYx7qek/7N0YI\ngbe/0v++m/q7spRiVRVmfJqO/BuVmBnqh8AHTLvK0p0dPFfS9EYtLPVCCeJ3n0F6TimOLx0Dd2cH\nAMDV6xXwcLY3/GxpkgjxuzH2/QMAgH3zRhgFyJS/pwIAyitvd0lcv1f/OLZu7VxaKMTPmiyb9GEK\nOioc8ewgH6Plx6/ewGQz7en/PZKLB7t4wF4uw+ZUfW05s8ETj97ZeQLTBvfAP8w0Q/18/vab49rN\nSmi0Ovi0dzUsqz+pY9DSXTi2NBwA8FlKDj5LyTGsK1FrEDU6wPBzyrliTPtHmrnLNivvegWGrtqL\nj6YH48kB3bDzWD52HsvHgbdGoruXKwpvVqJGJ/CAp0uzjne+SIUTeTcxYaA3AP0Hm5ebI/afvoZq\nrf6iNFasFZ+7psLXmVfxxhMBUDb4MG7ol8tluFikNnrNs/NuoFcnBR5btRclag12vx6GgC7me3zd\nqKiGvZ0Mbk7WeztP/0cqfDu4YcUzA37Vcf518CJG9+uC7l6uTW9cK/2i5caclKk1kMtleP/Hs3hr\n7INwdpBb5LgFNyoNGQPAaIT1Y6v2ondnBfa8Mdwi52pIMiGe/vYohCxPbHrDWg0H6dQ5kX/TZFl4\nYFe0c3HAt8fz77V4FlWs0mDjT6aBa87rXxxt1nZ9F/9gsuzfP+cg5uvbI0FDVuh/v9+9OgwCApXV\nOjz70e1a/83KGox9PwlO9qatcO/vOYvhAZ3wSI/2UFXVmA3wovIq/HnTYax9biD8OylwprAc87Yd\nxZzhvXChWP8hOmfzL/j+tWGGfYa9uw/tXR1QdkvfJJSzajyulVfCyV4OV0c5Us6X4GEfT5wquAlv\nTxeUqjX4/NBl/Cdd37Tm19ENgQ+0w6KvskzKU1Mb5j+eKETfru5Y/cMpOMjtMGOIL25VaTG0Vwdc\nKFahu5crDpwpxuj+XYz2V1fVILesAg921YdrZbUW+Tcq4dfRDaPj9aN+fz+0p8l5G5pU+81qfFA3\nODvIcb5IhfHrkjG6X2eU1DZ7/ffIVUSP7QsAGLV2PyYP6o45I3phTkIGvs8qMPxu7taV0lvwcHbA\npVI1BjzQrtF7SgfPleDguRIsnxiIm5U1aOdiWqvMKVZDKwR6dbo9ZuRQTikG9WgPOzsZvj+ej9j/\nnUDs/05g/dRHDB+wTXl+o/nOAHerWFVl9IHa3csFf3zMr8n9hBDQCf20H439ftQNxqvohH7epX/X\nVnTOXVOZ2csyJBPind2drXZsZ3s7/G16MEZm5GLetqOIGt3HbC35flM/wOsbt+5Ao/ucKmh8ENeG\nvefw8sjeRsFf3zs7T+DI5et4fO1P2BkZiqfWJwPQB3d9T35gfP66AAeAD/efw7s/6J+FOqpvZyQ2\nMYjoqfXJjYbbN0fzsG7qI3jx34eNlv/3yFUAMPwd+Hdyw4UiNR7198LfX1DCw9kBo9buN3x7O79i\nHI5fvYGJtc1SZ5c/aTiWrsH89Cu+OwlvTxe899xAk/J8/NN5RI0OwPHamTT3nLx9bcXlVTh3TYXe\nnRU4X6TG6h9OYc6IXoYAB/S1v02pl/CQtwfyb1RA6etlUuPdd+oaLhSr8bzSB+7ODhhWbxqJt8f1\nw/LvTuJ5pQ9iJjyEwCW78KdQPyx6qr9hm0+TLyLu25NIjh5p9I1tz4lC/Kn291j3LfjguWJMr/0w\nz1k13uh1/uTABYT4eeF/R/MwK9TPJBwb66jw0f7zeHbQA4Y8qKrRwlFuB5lMhsSThbil0eIvW49C\no9Vh++whUPa83axaeLPS6Fib0y7j2UE+qKrWwUEuQ2W1DtP+kYq1zw3EIz3ao7Jaize3H8Ppgps4\nU6gyXIc5DTtJXCpRm/3mbA2SCXFAX1P59pjla8s7j+XjOWV3TB7kg8mDfFCh0ZqE+PkV41Ct1Zmt\n0ZJe4qlrdwzV/x3NM/y/LsDvVl2A152vOXrO/7bRdXcapFX3N3ChNqxTL5QiaKnpzbWT+TcNAQ7A\nqBtpZbVxk01KbfPVqL6d0c7FAXHfnjSs256Ri4E+noj6ItPkHNsycrEtI9coRBo+wCRg0fcm+z3c\n3ROf/l4JrRDYe/Ia5n+pvx+QcanU0DuqzvLv9GXZejgXzyu7AwD+kXzR6N5BXXlzyyrg094VV0pv\nYf3es9h6+PbUE3Xfgkc82MmwrOEcR8dyb2Du5l9w+FIZPjlwATsjh6GTu35Edt0HwtIJ/Y3uwwDA\n6h9OYfUPpxDk0w4rJw3A+HXJWDS+H14Y0hOz/s/4w3jx19mGb3U5xWqMX2f8N3fumsrs6/nMhyk4\nu/xJjH0/CTkNbvhfLFab3Ocx1zXYljc3ZcJGIx5yc3MxatQoJCYmwsfHp+kdGvH+njNWqSU3/ISt\n/zW1/vq6QHjjiQBsy7iCdi4Ohh4kRNaWMGswfvepvnYb0EVhqCHeix1zhuDZj+6ttrhsYiDkMlmz\nB9K9OzkIb21venbTPwztabjX8lSQ/t7Ir/X0w974OjOv6Q3riX9+IN7Yar6p8uu5j+FCsQoTgryx\nbu85rEs0zSM3RznUGuMKwpvhD2LuyN53VQ6g6eyUXIhXaLR45sODGPNQV7O/vHvVMMRvVFTjpU2H\nkXqhFF/NfQwPd/cEoG9DdHaQG2oNtzQ1CFyyC3yqG9H944+P9cS/DuZY/Lj3ct+iqeyUVHMKALg4\nyvFDVBgAWCzE63/tq9POxQGf/3mIyfKGbYyujva4sHI89pwoREAXd4St4QhJIqmzRoBbiyQG+zTm\n5wWPW+Q4Yy3QF3l0/y7o0cEVOavGI23hKHw99zELlIyI6M4kVxOvr1s7F/xtWjAOnC36VfNgPDmg\nmwVLBXTxcEYXD+PeNBMGemPO8F7ILbuFqC8ycUujNfpqlXGpDM9+lIJxA7riu+MFeDbYB1NDuhvu\ncOesGo+9pwrh4mAPdVWNoScAEbVtkg5xQN9jZXxQt3sO8eXPBJrt82pJJ98ZC0d7O8jtZOjv7YHE\nvwxHicp42P8g3/ZIXTAKXTycjLpVebdzRt4Nfdeox/ve7qf8ZviDWLPrNO7W4UWjzQ4+edTfC6kX\nGh9U8ZcnArC2wXNGd8wZilf/cwRXr1fc9fGIyDIkH+J1zi5/EsWqKgxZuddk3ae/V6Jaq8NjvTti\nS9plrPz+lGHdUwOaN+DgXmyfPQSd3J3g4mg8KqxbOxd0a2c68rBrO9O+8LvfGI5bZh58MW5AN6MQ\n93JzxA+vDYOXmyNuVWshl8ng7CBHr4XfAQA6uzvhq7mPoaPCCedXjDMsB4A9b4Shd2d37Dt1Dcnn\nivFp8kWjc23502AM9u+A77IKcLLeYKlBvu1xcP7j+MO/0rH/dJFheb9uHvj8z0Pu2LWvzqpJAwzd\n3ojuZ/Of7GuV4943Ie4gtzN68k/OqvHY9HMOMq/cwKh+t2uwLw3vhSG9OiDQux3srDx3Rv2BBvdK\n4WQPhZnh1D07uOK1UX0ghMDMUD94ujoa1nnUm8XwwS7ueGm4PyYF376rXdfv18neDqfjbg9MGdm3\nM0b27YzFT/VH4slCvPjvw9jzxnD4147Ae21UH8xOyAAAHF0yxrCfS72hy0eXjDF8s9n9ehj++uMZ\ntHNxQKlag1H9OmPHL1eNhlE3/OAKfMDDqMumJQdemev2RWQrU37T3SrHvW9CHNAH+fGlYwwhNWNI\nT8ww7WCCIB9PG5fM8mQyGV5/IqDJ7Xa9HmZ2+bKJgRji3/iHzKh+XXBhpfnuUGP6dzFqgnrUvwO+\nzyrAa6P6GC0P6OKOj35nPKDkxxOFAIBF4/vhD0N7GjUdvR/xMCY+8gCO5V7HbzccNMyb8srI3vho\n/3ns+CXXZPBFY+Y/2Rerar9xLZsYiKpqLX65XGY0e+TzSh+jQSrNda9NWdbm6eqA67ea99DuhqNd\nRz7YCfvqfZuyhddG9cEHFuwmvOeNMIyOT2p0/SM9PHHk8vVG11tL+tujcKFIbVTRsqT7KsQBWG2m\nsPvNjEd973of3w767pUNZ4h8YYgvBvi0M8y+eCcrJg1Aj/0X8IehPQ3znn/58lDszi7ExEceAKD/\nkK1/09debofIUX0QOaoPTheUw6e9Cx5asuuO5xnaq4Ph/xOCusHT1RGR/zlitM2qSUFYNjEQldU6\nzPrsEA5fuvMouy9fHgpVZY3ZpzO9Gf4gjly+jj0nCw3LPJztcbPSNs+Arf/7ak4z1gtDexpCPGfV\neKs//aq+dycH4dlgH8jtZNhzshDZeZYZLFc3sZk5nq4O+HLOUPgt0DcjZiwajVK1BqVqDSLqTVxl\nDZ3dna06bYikuxiSbfXr5oEDb43ErFDjSYNkMlmzAhzQ/0HHTOhv9OCK4B7tm91e+GBXd7g52WP/\nvBEm6xJmDcaMR32RHD3SaMa/uhrQkgn98btHbw/jtrOTwclejnYuDtg+ZygurhyHb155DDmrxuOt\nsQ+aHD+4R3uEBXRCBzMP7J75mJ/hwR1vhj+In94cgcyYMUhdMAqA6QdfS9MJgR1zhiBlvnE33WUT\nA+84IOXyXXrwAAAKQElEQVQhbw+TZQfeGolXH2/eSMT+3TzwvLK74duyJad0v9O0x0snPGT0ra+D\nwgl9urhjsH8HvD2uX5PHrhvsB+ibI9eamfvGHGcH60csQ5zuSncv11bx0OaeHd3wzSuPYc3kIIwf\n0A3xzw9EaJ+OWDYxED7tXeHsIEdXD2ejN1FHhRPiJg6AwskefzAzu6BMJjM0tb08ojdyVo03OxBs\nkG97JMwaDAB4vG9npMx/HC6O+lkVAf19CN8ObrCzk6FrO2ecjhuLz198FMeX6u8juDub/wLs0sS0\nqNtmD8G0wT3g36l5c7R7NHIeAIAABvl6wbt2at9RfTsDAB7pfuemxge7uhuNgTi2dAy6e7ka3XOp\n08Xj9ofdb3rWzpHva/xh35wHnTfnz235M4FwdpAjZ9V4OMhNdxgeoH8de3dWYM3kIKN1zbk35tPe\nBTvm6Ntm7WS2m4u+Oe675hRqO4J8PBHk44nnlOZvGCVHjzS7PCs2vNnnWP1sEFb/cMpkHu3QPh1N\naqyxv30Ivh3cMLI2EOs42evD2d3ZwbDP7/+Zjp/O6NugXR3luKXRYlS/zujVSYEH2rsYzTPygKcL\nrl6vgNK3PX5Te7N8XeJZXCuvRELqZbg5mg//3p0V+KW2DbiHl6vhoeJPBXXDsD4djbYd1a8LzsQ9\nCcfaaYY3zhiEuZt/wce/G4Tf+HkZnqoz8zH9Qy1OvjMW1TodPGqbL3t2dMOGaY/g7f9mYdyAbng2\n+AH06OCKEWv2498zQ6Ds6YXMK9fRv5tpTb6hCGV3fFHvKV3nlhv3pvpgysN47fNMdGvnjPza7rf1\nj6v/YNA3rfzvlVAM8Ln9EA5zc3pPH9wDF4tV6KhwwrbDuXhpuD/idp40mm/+Ie92GPCA/gPOQW5n\n+Kb3gKcL/m9miGHqYUD/wb63mZOzWYSwkStXroiAgABx5coVW52SqNW6WaERf/3xtFi354zYlZUv\nfKN3igVfHjOsv1p2SySfLRJb0i6JvOu3xLfH8kyOcehiifCN3iliv8k2Wu4bvVP4Ru8Usz47ZFhW\noakRT607IH65VHpP5V27+7Q4nFNyT/veSdqFEjHyvX2GMo9au19UaGqEEPrrGPt+kmHbGxUakXy2\nSFRoaoRv9E6x9dBlw3719Vn4nfCN3il2ZeXfc7kOni0ScxIOi8slavF/KReFpkYrhBDigz1nxKn8\nm0IIIUpVVYbtY746Lnyjd4qFta9hmbpK+C/4ViSk5txzGeo0lZ0McaIWptXqxCdJ54Wqsvqu991z\nokBUVtcYLTtbWC6e3pAsCm9WWKqIVnezQmMUikIIcTinxGRZQ3/8V7pJiC+uDdTq2uC1hUvFajF0\nZaLIv27533lT2Sm5WQyJiOrUaHWo1gqjAXU6nUBVjc5kkJ1U3XezGBIR1bGX28G+QVbb2cnumwBv\nDvZOISKSMIvWxHU6HZYuXYrTp0/D0dERcXFx8PW9+0ElRETUPBatie/ZswcajQZffPEF/vKXv2DV\nqlWWPDwRETVg0RDPyMjAsGH6B5M+/PDDyMrKsuThiYioAYuGuEqlgkKhMPwsl8tRU2ObuSOIiNoi\ni4a4QqGAWq02/KzT6WBvzw4wRETWYtGEDQ4Oxr59+zBu3DhkZmYiIOD2VKlarX7mt4KCgsZ2JyKi\nBuoysy5DG7JoiD/xxBM4ePAgpkyZAiEEVqxYYVhXVKSfJ2L69OmWPCURUZtQVFRktrefzUZsVlZW\nIisrC506dYJc3nY64hMR/RparRZFRUUIDAyEs7PpvOQ2C3EiIrI8jtgkIpKwVt91RCqjQI8ePYr3\n3nsPmzZtwqVLlzB//nzIZDL06dMHS5YsgZ2dHTZs2ID9+/fD3t4eCxcuRFBQUKPb2lJ1dTUWLlyI\nq1evQqPRYM6cOejdu7ekrkGr1WLRokW4ePEiZDIZYmNj4eTkJKlrqFNSUoJJkybhn//8J+zt7SV3\nDc8884yhq7GPjw8iIiKwfPlyyOVyhIaG4pVXXmn0fZ2ZmWmybUvYuHEj9u7di+rqakydOhUhISGt\n93Ww+LyJFrZr1y4RHR0thBDiyJEjYvbs2S1cIlN///vfxVNPPSWee+45IYQQL730kkhNTRVCCLF4\n8WKxe/dukZWVJWbMmCF0Op24evWqmDRpUqPb2tr27dtFXFycEEKIsrIyMXz4cMldw48//ijmz58v\nhBAiNTVVzJ49W3LXIIQQGo1GvPzyy2LMmDHi3LlzkruGyspK8fTTTxst++1vfysuXbokdDqd+NOf\n/iSys7MbfV+b29bWUlNTxUsvvSS0Wq1QqVRi3bp1rfp1aPXNKVIYBdqjRw+sX7/e8HN2djZCQkIA\nAGFhYUhJSUFGRgZCQ0Mhk8ng7e0NrVaL0tJSs9va2tixY/Haa68BAIQQkMvlkruG0aNHY9myZQCA\nvLw8eHh4SO4aAGD16tWYMmUKOnfWPx1Iatdw6tQpVFRUYObMmXjhhRdw6NAhaDQa9OjRAzKZDKGh\noYZraPi+VqlUZre1teTkZAQEBGDu3LmYPXs2RowY0apfh1Yf4lIYBRoeHm40qEkIYXgOpZubG8rL\ny02uo265uW1tzc3NDQqFAiqVCq+++iqioqIkdw0AYG9vj+joaCxbtgwTJkyQ3DV8+eWX8PLyMoQb\nIL2/JWdnZ8yaNQuffvopYmNjsWDBAri4uJiU1dz7urHrsrWysjJkZWXhgw8+QGxsLObNm9eqX4dW\n3yYuxVGg9du/1Go1PDw8TK5DrVbD3d3d7LYtIT8/H3PnzsW0adMwYcIErFmzxqRcrf0aAH1Ndt68\neXj++edRVVVlUq7WfA07duyATCbDzz//jJMnTyI6OhqlpaUm5WrN1+Dn5wdfX1/IZDL4+fnB3d0d\n169fNylXZWWlyfva3HW1xDV4enrC398fjo6O8Pf3h5OTk9Egxdb2OrT6mnhwcDCSkpIAwGQUaGvV\nv39/pKWlAQCSkpKgVCoRHByM5ORk6HQ65OXlQafTwcvLy+y2tlZcXIyZM2fizTffxOTJkyV5DV99\n9RU2btwIAHBxcYFMJkNgYKCkrmHz5s1ISEjApk2b0K9fP6xevRphYWGSuobt27cbZi8tLCxERUUF\nXF1dcfnyZQghkJycbLiGhu9rhUIBBwcHk21tbdCgQThw4ACEEIZrGDJkSKt9HVp9P/G6u9hnzpwx\njALt1atXSxfLRG5uLt544w1s3boVFy9exOLFi1FdXQ1/f3/ExcVBLpdj/fr1SEpKgk6nw4IFC6BU\nKhvd1pbi4uLw/fffw9/f37Ds7bffRlxcnGSu4datW1iwYAGKi4tRU1ODF198Eb169ZLU61DfjBkz\nsHTpUtjZ2UnqGjQaDRYsWIC8vDzIZDLMmzcPdnZ2WLFiBbRaLUJDQ/H66683+r7OzMw02bYlvPvu\nu0hLS4MQAq+//jp8fHxa7evQ6kOciIga1+qbU4iIqHEMcSIiCWOIExFJGEOciEjCGOJERBLGECci\nkjCGOBGRhDHEiYgk7P8B0dYmu+FLckoAAAAASUVORK5CYII=\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stdout", "output_type": "stream", "text": [ "----\n", " .\n", "\n", "MENENUS:\n", "Rothy firertoud a to saken; brours Vulls hindss\n", "Serrserd be claus kist yous surd your dondsther ums.\n", "\n", "GMRUS:\n", "with\n", "And my itenten.\n", "\n", "CORUDnd Toh ReDIUS:\n", "Hald lim nod praki pat, lif haw wiviT \n", "----\n", "iter 6231, loss 51.562062\n" ] } ], "source": [ "while True:\n", " try:\n", " with DelayedKeyboardInterrupt():\n", " # Reset\n", " if pointer + T_steps >= len(data) or iteration == 0:\n", " g_h_prev = np.zeros((H_size, 1))\n", " g_C_prev = np.zeros((H_size, 1))\n", " pointer = 0\n", "\n", "\n", " inputs = ([char_to_idx[ch] \n", " for ch in data[pointer: pointer + T_steps]])\n", " targets = ([char_to_idx[ch] \n", " for ch in data[pointer + 1: pointer + T_steps + 1]])\n", "\n", " loss, g_h_prev, g_C_prev = \\\n", " forward_backward(inputs, targets, g_h_prev, g_C_prev)\n", " smooth_loss = smooth_loss * 0.999 + loss * 0.001\n", "\n", " # Print every hundred steps\n", " if iteration % 100 == 0:\n", " update_status(inputs, g_h_prev, g_C_prev)\n", "\n", " update_paramters()\n", "\n", " plot_iter = np.append(plot_iter, [iteration])\n", " plot_loss = np.append(plot_loss, [loss])\n", "\n", " pointer += T_steps\n", " iteration += 1\n", " except KeyboardInterrupt:\n", " update_status(inputs, g_h_prev, g_C_prev)\n", " break" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Gradient Check\n", "\n", "Approximate the numerical gradients by changing parameters and running the model. Check if the approximated gradients are equal to the computed analytical gradients (by backpropagation).\n", "\n", "Try this on `num_checks` individual paramters picked randomly for each weight matrix and bias vector." ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [ "from random import uniform" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Calculate numerical gradient" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def calc_numerical_gradient(param, idx, delta, inputs, target, h_prev, C_prev):\n", " old_val = param.v.flat[idx]\n", " \n", " # evaluate loss at [x + delta] and [x - delta]\n", " param.v.flat[idx] = old_val + delta\n", " loss_plus_delta, _, _ = forward_backward(inputs, targets,\n", " h_prev, C_prev)\n", " param.v.flat[idx] = old_val - delta\n", " loss_mins_delta, _, _ = forward_backward(inputs, targets, \n", " h_prev, C_prev)\n", " \n", " param.v.flat[idx] = old_val #reset\n", "\n", " grad_numerical = (loss_plus_delta - loss_mins_delta) / (2 * delta)\n", " # Clip numerical error because analytical gradient is clipped\n", " [grad_numerical] = np.clip([grad_numerical], -1, 1) \n", " \n", " return grad_numerical" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Check gradient of each paramter matrix/vector at `num_checks` individual values" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def gradient_check(num_checks, delta, inputs, target, h_prev, C_prev):\n", " global parameters\n", " \n", " # To calculate computed gradients\n", " _, _, _ = forward_backward(inputs, targets, h_prev, C_prev)\n", " \n", " \n", " for param in parameters.all():\n", " #Make a copy because this will get modified\n", " d_copy = np.copy(param.d)\n", "\n", " # Test num_checks times\n", " for i in range(num_checks):\n", " # Pick a random index\n", " rnd_idx = int(uniform(0, param.v.size))\n", " \n", " grad_numerical = calc_numerical_gradient(param,\n", " rnd_idx,\n", " delta,\n", " inputs,\n", " target,\n", " h_prev, C_prev)\n", " grad_analytical = d_copy.flat[rnd_idx]\n", "\n", " err_sum = abs(grad_numerical + grad_analytical) + 1e-09\n", " rel_error = abs(grad_analytical - grad_numerical) / err_sum\n", " \n", " # If relative error is greater than 1e-06\n", " if rel_error > 1e-06:\n", " print('%s (%e, %e) => %e'\n", " % (param.name, grad_numerical, grad_analytical, rel_error))" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "scrolled": false }, "outputs": [], "source": [ "gradient_check(10, 1e-5, inputs, targets, g_h_prev, g_C_prev)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.1" } }, "nbformat": 4, "nbformat_minor": 2 }