{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Vanilla Recurrent Neural Network\n", "
\n", "Character level implementation of vanilla recurrent neural network" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Import dependencies" ] }, { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": true, "nbpresent": { "id": "c14f5fc3-7ae4-4775-aca6-4e89462d83df" } }, "outputs": [], "source": [ "import numpy as np\n", "import matplotlib.pyplot as plt" ] }, { "cell_type": "markdown", "metadata": { "nbpresent": { "id": "812c485b-1558-4c42-bf28-17cf898d8049" } }, "source": [ "## Parameters Initialization" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": true, "nbpresent": { "id": "a1126408-fd54-4d9e-a169-b248b7656a36" } }, "outputs": [], "source": [ "def initialize_parameters(hidden_size, vocab_size):\n", " '''\n", " Returns:\n", " parameters -- a tuple of network parameters\n", " adagrad_mem_vars -- a tuple of mem variables required for adagrad update\n", " '''\n", " Wxh = np.random.randn(hidden_size, vocab_size) * 0.01\n", " Whh = np.random.randn(hidden_size, hidden_size) * 0.01\n", " Why = np.random.randn(vocab_size, hidden_size) * 0.01\n", " bh = np.zeros([hidden_size, 1])\n", " by = np.zeros([vocab_size, 1])\n", "\n", " mWxh, mWhh, mWhy = np.zeros_like(Wxh), np.zeros_like(Whh), np.zeros_like(Why)\n", " mbh, mby = np.zeros_like(bh), np.zeros_like(by) # memory variables for Adagrad\n", " parameter = (Wxh, Whh, Why, bh, by)\n", " adagrad_mem_vars = (mWxh, mWhh, mWhy, mbh, mby)\n", " \n", " return (parameter, adagrad_mem_vars)" ] }, { "cell_type": "markdown", "metadata": { "nbpresent": { "id": "fd3b570f-140d-4209-98cb-105a062f0cd7" } }, "source": [ "## Forward Propogation" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def softmax(X):\n", " t = np.exp(X)\n", " return t / np.sum(t, axis=0)" ] }, { "cell_type": "code", "execution_count": 4, "metadata": { "collapsed": true, "nbpresent": { "id": "965c3245-fdbd-4049-8905-98a0c913f525" } }, "outputs": [], "source": [ "def forward_propogation(X, parameters, seq_length, hprev):\n", " '''\n", " Implement the forward propogation in the network\n", "\n", " Arguments:\n", " X -- input to the network\n", " parameters -- a tuple containing weights and biases of the network\n", " seq_length -- length of sequence of input\n", " hprev -- previous hidden state\n", "\n", " Returns:\n", " caches -- tuple of activations and hidden states for each step of forward prop\n", " '''\n", "\n", " caches = {}\n", " caches['h0'] = np.copy(hprev)\n", " Wxh, Whh, Why, bh, by = parameters\n", " for i in range(seq_length):\n", " x = X[i].reshape(vocab_size, 1) \n", " ht = np.tanh(np.dot(Whh, caches['h' + str(i)]) + np.dot(Wxh, x) + bh)\n", " Z = np.dot(Why, ht) + by\n", " A = softmax(Z)\n", " caches['A' + str(i+1)] = A\n", " caches['h' + str(i+1)] = ht\n", " return caches" ] }, { "cell_type": "markdown", "metadata": { "nbpresent": { "id": "c4713b4b-8f80-42e8-b0e4-658123b3a73f" } }, "source": [ "## Cost Computation" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { "collapsed": true, "nbpresent": { "id": "938d86d0-a5a7-4a5f-9ef8-8e349fb78f04" } }, "outputs": [], "source": [ "def compute_cost(Y, caches):\n", " \"\"\"\n", " Implement the cost function for the network\n", "\n", " Arguments:\n", " Y -- true \"label\" vector, shape (vocab_size, number of examples)\n", " caches -- tuple of activations and hidden states for each step of forward prop\n", "\n", " Returns:\n", " cost -- cross-entropy cost\n", " \"\"\"\n", "\n", " seq_length = len(caches) // 2\n", " cost = 0\n", " for i in range(seq_length):\n", " y = Y[i].reshape(vocab_size, 1)\n", " cost += - np.sum(y * np.log(caches['A' + str(i+1)]))\n", " return np.squeeze(cost)" ] }, { "cell_type": "markdown", "metadata": { "nbpresent": { "id": "4c54d363-8bad-4c18-8882-3b1ed3c4b7f1" } }, "source": [ "## Backward Propogation" ] }, { "cell_type": "code", "execution_count": 6, "metadata": { "collapsed": true, "nbpresent": { "id": "9d525583-9a97-44aa-940d-94b28dd39f29" } }, "outputs": [], "source": [ "def backward_propogation(X, Y, caches, parameters):\n", " '''\n", " Implement Backpropogation\n", "\n", " Arguments:\n", " Al -- Activations of last layer\n", " Y -- True labels of data\n", " caches -- tuple containing values of `A` and `h` for each char in forward prop\n", " parameters -- tuple containing parameters of the network\n", "\n", " Returns\n", " grads -- tuple containing gradients of the network parameters\n", " '''\n", "\n", " Wxh, Whh, Why, bh, by = parameters\n", "\n", " dWhh, dWxh, dWhy = np.zeros_like(Whh), np.zeros_like(Wxh), np.zeros_like(Why)\n", " dbh, dby = np.zeros_like(bh), np.zeros_like(by)\n", " dhnext = np.zeros_like(caches['h0']) \n", "\n", " seq_length = len(caches) // 2\n", "\n", " for i in reversed(range(seq_length)):\n", " y = Y[i].reshape(vocab_size, 1)\n", " x = X[i].reshape(vocab_size, 1)\n", " dZ = np.copy(caches['A' + str(i+1)]) - y\n", " dWhy += np.dot(dZ, caches['h' + str(i+1)].T)\n", " dby += dZ \n", " dht = np.dot(Why.T, dZ) + dhnext\n", " dhraw = dht * (1 - caches['h' + str(i+1)] * caches['h' + str(i+1)])\n", " dbh += dhraw\n", " dWhh += np.dot(dhraw, caches['h' + str(i)].T)\n", " dWxh += np.dot(dhraw, x.T)\n", " dhnext = np.dot(Whh.T, dhraw)\n", "\n", " for dparam in [dWxh, dWhh, dWhy, dbh, dby]:\n", " np.clip(dparam, -5, 5, out=dparam) # clip to mitigate exploding gradients\n", "\n", " grads = (dWxh, dWhh, dWhy, dbh, dby)\n", " return grads" ] }, { "cell_type": "markdown", "metadata": { "nbpresent": { "id": "1736f426-5cad-44ad-92bc-fc6ccc6758db" } }, "source": [ "## Parameters Update" ] }, { "cell_type": "code", "execution_count": 7, "metadata": { "collapsed": true, "nbpresent": { "id": "ef33b53a-de6b-4fb5-9111-508c9183f35f" } }, "outputs": [], "source": [ "def update_parameters(parameters, grads, adagrad_mem_vars, learning_rate):\n", " '''\n", " Update parameters of the network using Adagrad update\n", "\n", " Arguments:\n", " paramters -- tuple containing weights and biases of the network\n", " grads -- tuple containing the gradients of the parameters\n", " learning_rate -- rate of adagrad update\n", "\n", " Returns\n", " parameters -- tuple containing updated parameters\n", " '''\n", "\n", " a = np.copy(parameters[0])\n", " for param, dparam, mem in zip(parameters, grads, adagrad_mem_vars):\n", " mem += dparam * dparam\n", " param -= learning_rate * dparam / np.sqrt(mem + 1e-8) # adagrad update\n", "\n", " return (parameters, adagrad_mem_vars)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Sample text from model" ] }, { "cell_type": "code", "execution_count": 8, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def print_sample(ht, seed_ix, n, parameters):\n", " \"\"\" \n", " Samples a sequence of integers from the model.\n", " \n", " Arguments\n", " ht -- memory state\n", " seed_ix --seed letter for first time step\n", " n -- number of chars to extract\n", " parameters -- tuple containing network weights and biases\n", " \"\"\"\n", " Wxh, Whh, Why, bh, by = parameters\n", " x = np.eye(vocab_size)[seed_ix].reshape(vocab_size, 1)\n", " ixes = []\n", " for t in range(n):\n", " ht = np.tanh(np.dot(Wxh, x) + np.dot(Whh, ht) + bh)\n", " y = np.dot(Why, ht) + by\n", " p = np.exp(y) / np.sum(np.exp(y))\n", " ix = np.random.choice(range(vocab_size), p=p.ravel()) ### why not argmax of p??\n", " x = np.eye(vocab_size)[ix].reshape(vocab_size, 1)\n", " ixes.append(ix)\n", " \n", " txt = ''.join(ix_to_char[ix] for ix in ixes)\n", " print('----\\n %s \\n----' % txt)" ] }, { "cell_type": "code", "execution_count": 9, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def get_one_hot(p, char_to_ix, data, vocab_size):\n", " '''\n", " Gets indexes of chars of `seq_length` from `data`, returns them in one hot representation\n", " '''\n", " inputs = [char_to_ix[ch] for ch in data[p:p+seq_length]]\n", " targets = [char_to_ix[ch] for ch in data[p+1:p+seq_length+1]]\n", " X = np.eye(vocab_size)[inputs]\n", " Y = np.eye(vocab_size)[targets]\n", " return X, Y" ] }, { "cell_type": "markdown", "metadata": { "nbpresent": { "id": "399cefc4-a924-41c2-994a-43902692ff76" } }, "source": [ "## Model" ] }, { "cell_type": "code", "execution_count": 27, "metadata": { "nbpresent": { "id": "237535d0-fd38-4421-baaf-592076b72e67" } }, "outputs": [], "source": [ "def Model(data, seq_length, lr, char_to_ix, ix_to_char, num_of_iterations):\n", " '''\n", " Train RNN model and return trained parameters\n", " '''\n", " parameters, adagrad_mem_vars = initialize_parameters(hidden_size, vocab_size)\n", " costs = []\n", " n, p = 0, 0\n", " smooth_loss = -np.log(1.0 / vocab_size) * seq_length\n", " while n < num_of_iterations:\n", " if p + seq_length + 1 >= len(data) or n == 0: \n", " hprev = np.zeros((hidden_size, 1)) # reset RNN memory\n", " p = 0 # go from start of data\n", "\n", " X, Y = get_one_hot(p, char_to_ix, data, vocab_size)\n", " caches = forward_propogation(X, parameters, seq_length, hprev)\n", " cost = compute_cost(Y, caches)\n", " grads = backward_propogation(X, Y, caches, parameters)\n", " parameters, adagrad_mem_vars = update_parameters(parameters, grads, adagrad_mem_vars, lr)\n", " smooth_loss = smooth_loss * 0.999 + cost * 0.001\n", "\n", " if n % 1000 == 0:\n", " print_sample(hprev, char_to_ix['a'], 200, parameters)\n", " print('Iteration: %d -- Cost: %0.3f' % (n, smooth_loss))\n", "\n", " costs.append(cost)\n", " hprev = caches['h' + str(seq_length)]\n", " n+=1\n", " p+=seq_length\n", "\n", " plt.plot(costs)\n", " return parameters" ] }, { "cell_type": "markdown", "metadata": { "nbpresent": { "id": "b12fa819-ef1c-468d-ac07-184adeb85519" } }, "source": [ "## Implementing the model on a text" ] }, { "cell_type": "code", "execution_count": 12, "metadata": { "nbpresent": { "id": "5457c250-fd02-466b-8286-02b5749b757b" } }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "data has 748 characters, 42 unique.\n" ] } ], "source": [ "data = open('data/text-data.txt', 'r').read() # read a text file\n", "chars = list(set(data)) # vocabulary\n", "data_size, vocab_size = len(data), len(chars)\n", "print ('data has %d characters, %d unique.' % (data_size, vocab_size))\n", "char_to_ix = { ch:i for i,ch in enumerate(chars) } # maps char to it's index in vocabulary\n", "ix_to_char = { i:ch for i,ch in enumerate(chars) } # maps index in vocabular to corresponding character" ] }, { "cell_type": "code", "execution_count": 29, "metadata": { "collapsed": true, "nbpresent": { "id": "c551d248-6228-48ec-83cf-d6ec46639245" } }, "outputs": [], "source": [ "# hyper-parameters\n", "learning_rate = 0.1\n", "hidden_size = 100\n", "seq_length = 25\n", "num_of_iterations = 20000" ] }, { "cell_type": "code", "execution_count": 30, "metadata": { "nbpresent": { "id": "b737c4a1-2309-4dbf-9897-4b564bb2497f" } }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "----\n", " Schu\n", "kwoj!wi nRA—— wj w .ycHrrhqagT:noh:ahvyqkSAnwpNtNTfpnk;hnnN\n", "YIuprpNto:ThHYmcwdwYRYldcTaNmR\n", "fkm!swem!\n", "jnBqb iTIp,g,,v\n", " qvw fkwS;g;!qmcBbicAlY;nbbIkwHYO:IsfhT :wl\n", "—e!eI\n", ".wwuoI—esSq—BOOcS\n", ",mOyBIT;;u \n", "----\n", "Iteration: 0 -- Cost: 93.442\n", "----\n", " s wheng thert s dot bn,\n", "B. wor TtoTar;\n", "Had .asshThanb,\n", "And as tha Telethabgas wberg penmerg te th;A,\n", "\n", "Iy Sar me:\n", "Thoudd herg d aberas and Then\n", "Tnt ps waaverornasiactham!,\n", "And bastevemer,r wing thitmth \n", "----\n", "Iteration: 1000 -- Cost: 68.503\n", "----\n", " veler anotrgr, ben,\n", "I d agep had I clent there thallther iak lecI stok.\n", "\n", "kere anis I shotheredint thavelest aseme way lealling per ais ay I dad\n", "Ind baas tha siverged ia,\n", "mar tredeelay,\n", "And shadow tod \n", "----\n", "Iteration: 2000 -- Cost: 39.597\n", "----\n", " ksithet I bethathen betherusd we panted in a ore that fornind lowked bowd if bether keother waAna s wond themod the bess be he in trould woore as fir aiTh\n", "And that Tas gre thathen boastougheramou the \n", "----\n", "Iteration: 3000 -- Cost: 22.733\n", "----\n", " gevelgarsing thes\n", "Theniem\n", "Thotroa to way,\n", "Iedt bthe ind to ssy,\n", "I shadowe kno dnas marsi\n", "Tsher win oon toould now trn took the passing there\n", "Had worn them really about the sas morn loodstoowing the te \n", "----\n", "Iteration: 4000 -- Cost: 11.318\n", "----\n", " d woads ind hewinr eng\n", "I d way leavelena t—ng in the one in thewother, as just as for that the passingrt wen toows ay,\n", "Iedt ow morn wt pe the better claim,\n", "Back.\n", "Oha d took the one less traved both\n", "An \n", "----\n", "Iteration: 5000 -- Cost: 6.830\n", "----\n", " delling the traveled by len woore it lent that the passing there\n", "Had worn them rhavesuho ben wit len wood,\n", "And sorry I could notstr asshaithen the on the baps the bether corh bent in themu juh way lea \n", "----\n", "Iteration: 6000 -- Cost: 3.523\n", "----\n", " rre besep took beavellea by,\n", "I sherdent the pit Inclaps sre pas graveler, the be owassed lot I soo\n", "Thoughthom enghpa wavilge aive\n", "Hac pin banr,\n", "And he Tan\n", "Tad gas owe in then ae\n", "I s gratshacops theh a \n", "----\n", "Iteration: 7000 -- Cost: 4.619\n", "----\n", " nd ages hence:\n", "Tso rowllong s morhaps the better claim,\n", "Becaus in theh the one thalllouvish the in the aook the on the undergrowth;\n", "\n", "shewans wans weer in ates hads wiverged in a yellow wood,\n", "And tha \n", "----\n", "Iteration: 8000 -- Cost: 4.595\n", "----\n", " ss len len as trot troddyow yreasI wantes rnat hassy,\n", "And en t ans ay In leakessharhads ohr cn bear;\n", "And eors in bend t—e herher stous that mornith as for thaviverged in a yellow wood,\n", "And sorry I do \n", "----\n", "Iteration: 9000 -- Cost: 2.303\n", "----\n", " g toode orl that morning equallyhea—ss way leads oir could notr;\n", "And that has marsing this with a sigh\n", "Somewhere ages and ages hence:\n", "Two roads diverged in a yellow wood,\n", "And sorry I coubd\n", "Tore agss i \n", "----\n", "Iteration: 10000 -- Cost: 1.423\n", "----\n", " st as fair,\n", "And having perhaps the better claim,\n", "Because it was grassy and wanted wear;\n", "Thoubd by,\n", "And looked down one as far as I could\n", "To where it bent in the undergrowth;\n", "\n", "Then took the other, as j \n", "----\n", "Iteration: 11000 -- Cost: 0.915\n", "----\n", " share I—\n", "I took the one less traveled by,\n", "And that has mareishen\n", "\n", "I challlas lasen ino ste traveler, look the one lllenithen has mirsin ans lookes dn way\n", "In leaves n thet the pnd worn akep had tre th \n", "----\n", "Iteration: 12000 -- Cost: 0.672\n", "----\n", " get as I cops the passing thing in she beate\n", "Talen that morrent ing hanted wear;\n", "Though as for that the passing there\n", "Had worn them really about the same,\n", "\n", "And both that morning equally lay\n", "In leaves \n", "----\n", "Iteration: 13000 -- Cost: 0.545\n", "----\n", " s firr ads that yh wanted waarn tookethea ben wood,\n", "And having pe traveled by,\n", "And that has marsing thirhaps the better claim,\n", "Because it was grassy and worked dged wanteigrlenges I corhabem\n", "The on to \n", "----\n", "Iteration: 14000 -- Cost: 0.471\n", "----\n", " nt be teelsherhept the passing that good the one less traveled by,\n", "And that has maisith\n", "And that has hais len bead wink on tood,eaubour air,\n", "And wayd\n", "Iges hadcld told,eduslero boad way lec way!\n", "Yet Tw \n", "----\n", "Iteration: 15000 -- Cost: 0.426\n", "----\n", " s mareass woore in way\n", "\n", "And both that morning equally lro the und th tond woollin with a sigh\n", "Somewhere ages and ages hence:\n", "Twough as fan traveled by,\n", "And that has mar ing this with a sigh\n", "Somewhere \n", "----\n", "Iteration: 16000 -- Cost: 6.138\n", "----\n", " ing and ages hence:\n", "Two roads diverged in a yellow wood,\n", "And sorn th wbeagel and way,\n", "I doubted if I should ever aly Ih th w ynl and both that marling there\n", "Had worn them really about the same,\n", "\n", "And b \n", "----\n", "Iteration: 17000 -- Cost: 2.683\n", "----\n", " s mas merhith t morning equr way I could\n", "Tore and aing ing thass ond both thet the better claim,\n", "Because ing thaveler bing pass anin b abe the und worn woorsing horn lookst heages\n", "Tokept the first for \n", "----\n", "Iteration: 18000 -- Cost: 1.299\n", "----\n", " d looked bout the pes it pe that morning equally lay\n", "In leaves no step had trodden black.\n", "Oh, I kept traver dtep had traveled by,\n", "And that has marsithit the one traveler, long I stood\n", "And looked down \n", "----\n", "Iteration: 19000 -- Cost: 1.071\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4xLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvAOZPmwAAHLFJREFUeJzt3XmYXHWZ9vHvk04Iq1lICJiAzSoGlC0gCAoIsiRodEQEfQURZRZ4X5kZ5YrKDIyYlyAjIC5sCoQZJGwK0bCFDCRAIKGBEBKWrJ2NLJ2E7El3uvuZP+p0qO5U9an91Dl1f66rrj71q7M8dbr67lO/s5m7IyIiydUj6gJERKS8FPQiIgmnoBcRSTgFvYhIwinoRUQSTkEvIpJwCnoRkYRT0IuIJJyCXkQk4XpGXQDAgAEDvL6+PuoyRERi5fXXX1/t7gPDxquKoK+vr6ehoSHqMkREYsXMFuUynrpuREQSTkEvIpJwCnoRkYRT0IuIJJyCXkQk4RT0IiIJp6AXEUm4WAe9u/NIwxJaWtujLkVEpGrFOuifmrWCHz86k19PmhN1KSIiVSvWQb9+63YA1mxqibgSEZHqFeugFxGRcAp6EZGEU9CLiCScgl5EJOESEfTuUVcgIlK9EhH0IiKSXSKC3izqCkREqldo0JvZ/mb2vJm9Y2azzeyHQXt/M5toZnODn/2CdjOz28xsnpnNNLNjy/0m1HUjIpJdLlv0rcC/uvtQ4ETgCjMbCowCJrn7ocCk4DnAucChweNy4PaSVx3QhryISLjQoHf35e7+RjC8EXgXGAyMBMYGo40FvhoMjwTu95RXgb5mtl/JKwe0IS8iEi6vPnozqweOAaYBg9x9efDSCmBQMDwYWJI22dKgTUREIpBz0JvZnsBjwFXuviH9NXd38tzANrPLzazBzBqamprymfSjeRQ0lYhIbckp6M2sF6mQf8Dd/xw0r+zokgl+rgralwH7p00+JGjrxN3vcvdh7j5s4MCBhdYvIiIhcjnqxoA/Au+6+81pL40HLgmGLwGeSGu/ODj65kRgfVoXj4iIVFjPHMY5GfgO8LaZzQjafgqMAR42s8uARcAFwWtPAsOBecAW4NKSViwiInkJDXp3f4ns3eFnZBjfgSuKrEtEREokEWfGiohIdgp6EZGEU9CLiCRcIoLedY6siEhWiQh6ERHJLhFBbzpHVkQkq0QEvbpuRESyi3XQ64YjIiLhYh30uuGIiEi4WAe9iIiEi3XQq+tGRCRcrINeRETCKehFRBJOQS8iknAKehGRhFPQi4gknIJeRCThFPQiIgmXiKDXGbIiItklIuhFRCS7RAS9zpAVEckuEUGvrhsRkexiHfS64YiISLhYB71uOCIiEi7WQS8iIuFiHfTquhERCRfroFfXjYhIuFgHvYiIhIt10KvrRkQkXKyDXkREwsU66NVHLyISLtZBLyIi4WId9OqjFxEJF+ugFxGRcAp6EZGEU9CLiCScgl5EJOFCg97M7jGzVWY2K63tOjNbZmYzgsfwtNd+YmbzzOx9Mzu7XIWn00GWIiLZ5bJFfx9wTob2W9z96ODxJICZDQUuBI4Ipvm9mdWVqlgREclfaNC7+xRgbY7zGwmMc/dmd18IzANOKKK+nOggSxGR7Irpo7/SzGYGXTv9grbBwJK0cZYGbWXRcWbsvKZN5VqEiEjsFRr0twMHA0cDy4Ff5TsDM7vczBrMrKGpqamgIj5Ytw2ANxevK2h6EZFaUFDQu/tKd29z93bgbj7qnlkG7J826pCgLdM87nL3Ye4+bODAgYWUoZ2wIiI5KCjozWy/tKdfAzqOyBkPXGhmvc3sQOBQYHpxJXZTR7lmLCKSID3DRjCzB4HTgAFmthS4FjjNzI4mtVHdCPw9gLvPNrOHgXeAVuAKd28rT+lgSnoRkVChQe/uF2Vo/mM3448GRhdTlIiIlI7OjBURSbhYB70uUywiEi7eQa+cFxEJFeugFxGRcAp6EZGEU9CLiCRcrINeXfQiIuFiHfQiIhJOQS8iknCxDnodXikiEi7mQa+kFxEJE+ugFxGRcAp6EZGEU9CLiCScgl5EJOEU9CIiCRfroNdBNyIi4eId9LoIgohIqFgHvYiIhIt10KvrRkQkXKyDXkREwinoRUQSTkEvIpJwCnoRkYSLddBrX6yISLh4B72SXkQkVKyDXkREwsU66HVmrIhIuHgHvXJeRCRUrIPePeoKRESqX6yDfpeesS5fRKQiYp2UQ/rtFnUJIiJVL9ZBLyIi4WId9OqjFxEJF+ugFxGRcAp6EZGEi3XQq+dGRCRcrINeRETChQa9md1jZqvMbFZaW38zm2hmc4Of/YJ2M7PbzGyemc00s2PLWbyIiITLZYv+PuCcLm2jgEnufigwKXgOcC5waPC4HLi9NGWKiEihQoPe3acAa7s0jwTGBsNjga+mtd/vKa8Cfc1sv1IVm6G2cs1aRCQxCu2jH+Tuy4PhFcCgYHgwsCRtvKVB207M7HIzazCzhqampgLLEBGRMEXvjPXUZnXem9bufpe7D3P3YQMHDiy2DLa0tBY9DxGRJCo06Fd2dMkEP1cF7cuA/dPGGxK0ld3/f/LdSixGRCR2Cg368cAlwfAlwBNp7RcHR9+cCKxP6+IpufSvEVPnrynXYkSkBO55aSG3Pjcn6jJqUs+wEczsQeA0YICZLQWuBcYAD5vZZcAi4IJg9CeB4cA8YAtwaRlqzmhB0+ZKLUpECvDzv70DwFVnHhZxJbUnNOjd/aIsL52RYVwHrii2qFwtX7e1UosSEYmtWJ8Z+8bidVGXICJS9WId9LpnrIhIuFgHvYiIhIt10HfdoH953upI6hARqWaxDvoBe/bu9Pzbf5imyyKIiHQR66D/0dmf3KntzSXaQSsiki7WQb9rr7qd2v7u91MjqEREpHrFOuhFRCScgl5EJOEU9CIiCaegFxFJOAW9iEjCJTLoH3ptcdQliIhUjUQG/d0vLoy6BBGRqpHIoG/X2bEiIjskMuh1ExIRkY8kMuilOv3Xq4u44M5Xoi5DpObEPuivOP3gjO1t7eq+qTb/9vgspi9cG3UZIjUn9kFfl+XuI3dMnl/hSkREqlPsg/64+v4Z25d+qPvJiohAAoL+1MMGZmx/cPpiNje3VrgaEZHqE/ug786EmcujLkFEJHKJCPoD+u+esf3qx2ZWuBIRCfPM7BVRl1BzEhH0u++y8w1IOqzd3FLBSkQkzBUPvBF1CTUnEUHfb/ddsr527PUTK1iJ5GLVhm1RlyARatWhzxWXiKCXeFm8dkvUJYjUlEQE/dEH9I26BOnGS3NXs37L9qjLEKlZiQj6f/3SYd2+PnXe6gpVIl1t297G//njNC65d/qONn1xF6msRAR9z7ru38a3/jCN1rZ2rn1iFsvX60SqKMxYsi7qEkRqViKCPhfXPD6Lsa8s4upHdciliNSWxAT9Pnv17vb1ca8tAaBpYzMtre2VKElEpCokJugf+P5ncxrvvRUbOeyap3hxblOZKxKRbK4a92bUJdSUxAR9vz2yH0ufyZQ5Cvqo6AZg8viMD6IuoaYkJugLsWZTM00bm6MuI9EU6iLR6xl1AaVSSKAc94vnAGgcM6LE1YiIVI+a3aLXlqaI1IrEBH3/PPvoJTrb23TUk0glFRX0ZtZoZm+b2Qwzawja+pvZRDObG/zsV5pSu1fXw7hmxKdyHr9Nm/SRuebxWVGXIFJTSrFFf7q7H+3uw4Lno4BJ7n4oMCl4XhH79tk153HvfbmxfIVItxau3hx1CSI1pRxdNyOBscHwWOCrZVhGRr17Zr8uvYhIrSo26B141sxeN7PLg7ZB7t5xD78VwKAil5GzLx6+T6UWJTlyXcJMJHLFHl55irsvM7N9gIlm9l76i+7uZpbxLz34x3A5wAEHHFBkGSl1PYxjDujLm4t1AS0RkQ5FbdG7+7Lg5yrgL8AJwEoz2w8g+Lkqy7R3ufswdx82cODAYsro5Nuf/UTJ5iUi5dOuO01VTMFBb2Z7mNleHcPAWcAsYDxwSTDaJcATxRaZj68fOzjvad5c/GEZKqms9Vt1Yw+Jl22tbVGXUDOK2aIfBLxkZm8B04EJ7v40MAb4kpnNBc4MnleMmYXeiKSrSnb1NG1s5r0VG0o6zydmLOOo/3iWWcvWl3S+IpIMBQe9uy9w96OCxxHuPjpoX+PuZ7j7oe5+pruvLV25ubni9EPyGv/W5+bs1NbW7oyd2khzaxvuzrbtxW993P7CfI4f/Rzn3Ppi0fNKNzm4QNt7KzaWdL4ikgyJOTM2XY8exrVfHprz+Bu2te7U9tgbS7l2/Gx+//x8xk5t5PB/e5qVG7YVVdeNT78XPlLC6Lw0keglMugBLj35wKKm3xSE//qt23nirdQlVZd+uKXoukREKi2xQV9KHQcHmFm0hYRwbT6LSAaJDvqLT8r9UMv6UROoHzUh4yFfHQHao0qD3qjOukSkOiQ66P/jK0fkPc3itanumfS4b98R9KWoSkSkshId9IV0tYx/a+dbnLUHV9XtukU/df5q2nTSh4hUuUQHPcCDPzgxr/FvnjiH+U2bOrV1bNGn5/zL81bzrbun8bvn5xVdY7F0PRkR6U7ig/6kg/fOe5ozfjW50/OO49OXrN26o63jUMtCL7m7uXnnQzqLVY07i/UvSCR6iQ96gO9+rr4k81m2bmv4SDm6/m/vlGxeIiLdqYmgv2bEp/jyUR/Pa5qOIE7fSC7l9vLri+J/fR2RYjRtbI66hJpRE0Hfs64Hv7noGJ77ly+UbJ7FHrKuCzpJrfvxIzOjLqFm1ETQdzhkn73ynib9qJpMXeCFbuWX8tymdz7YEMxTPeJSvQbsuUun59rYqZyaCvpC3P/Koh3DmUK90GgtZSbrYmYSD53/gmYu1dVWK6Xmgn7hDcO5/AsHFTRtKY9qKeWO3Q5zV20KH6nC9C1DJHo1F/Rmxk+Hf6qgaddubtl5fsUWVEJ3TVnAf7+6KHxEkUjon35Uai7oO8wbfW7e0/x60twdw9X6kb3m8VlRlyCSkb7cRadmg75nXQ9Gf+3IqMuoWRu26daHtaZOF4uKTM0GPaRuJL5fn13zmmbc9MW0t3tVddnEUUtre9QlSIVV69Vfa0FNBz1A7575rYJRf36bR99YWtWHhv38rzrrVqrPms06QSoqNR/0rQVcffLqR2fys7+k+sL/OvMDrn70rVKXVZR7Xl4YdQmhbnyq9m6rWOu2tyWnk37b9jY2xqj7seaD/vqRR3LggD0YuFfvgqbf3uY83LC0xFUlR7Y/7Ude1zqT+Dr9P1/g09c9G3UZOav5oD/98H14/ken8f1TirvHrIjUjuXrt0VdQl5qPug7FPulsuMyBACbmls565bJvK0z/0R26Lt7r53aHm5Yogv8VYCCPnDQgD2Kmn74bS8y8Z2VtLc7DY1rmbNyEzc9+3630yxes6WoZYrESab7MV/96Ey+fvvUCKqpLQr6wFlH7Fv0PH5wfwMPTFu040JoPUOOG5703sqil1nttuswSgkkZ1ds/Cjo0yy8YXjR81i2bhuT5zQB8D/vrer02v79d+v0vBaOKi70DlySQEr6yCjo05gZt3zzKO699PiC5+Huna54efsL8z+af5dob66Brd3uDl9ds0nHVdcS5Xx0FPRdfO2YIZz+yX0Knv7OKQs6Pb/x6ezHi99QA8eSt3UT9L+Y8G4FK5Go6Uqm0VHQZ/HWtWeVbF6zltXu0TfdbdG36w+/pjgwuO9uoeNJ6Snos+izWy8Gfaywk6i6Ou83L/HK/DUZX5u+cG3o9O8u35DxiIXuVMu1ZNraq6MOiZ479Ntj50MspfwU9N14/IqT+cZxQ3jwBydyQn3/ouZ10d2vsnjtzodTXnDnK2xpac063ZuLP+TcX7/IXS8uyDpOJk/PXpF3jeXQ2s1p791160jytLtz8sEDoi6jJinou7Ffn9246RtHcdLBe/PwP5xUtuUM/fdnmBIcqdPV1OCbwFtL1uU1z3y/AZRLephfP/KITq/9bebySpcjEXKojUPNqpCCPg/fHLZ/2eZ98T3TOemGSdw8cU6no1FueiZ10tWm5sxb/XNWZr5fbLX0f6f30X/npProCpHo+c5HnsVd/agJrNpY/ZdDUNDn4cbzP8Pc0edy2icHctJBe5d8/svXb+O2SXM57hfPMXX+auY3fXQP2Bfnrs64U3fy+5m/CVRLt0ir+ugl4DjZLkm/OcuGTBy8trD6L+GgoM9Tr7oe3HfpCdz3veM5/7ghBc3jT9//bOg437p7Gmf8anKntvN+8xIfbm5hSVpff7Y/nDfz7Oopl+bt3Qf9B2W4SXq+6kdN4EePVNelppPIPdVzc1mGCwgece0zLP0wnpcEmf1B9R9Vp6AvUO+edfznN47iyf/3ed6+7iym/Pj0nKf93CGF75A65vqJfP6Xz/OP//06H25uyXp7tj9NW1zQ/D/c3ML4tz4ouL6uwk4K+9yY/ynZsorxqC6bXHZO6i5TPzn38IyvP/TaksoWVCK/f2H+jrPhq1XPqAuIu6Ef/xgAe+3ai8YxI1i3pYWjfz4xdLpTDxtY1IfjqVkreGpW90fW1I+awKCP9eZ7Jx/IyKMHs28Ot0085vpU7SfU989p/DC5HOZ52X2vcffFw+hR4XuKbtveVtCNZ6Qw7Z7quulZl3n7smlj+c6UnrdqE+Acss9eZZn/4jWbgYFlmXcplG2L3szOMbP3zWyemY0q13KqTd/dd6FxzAgax4zgDxcP2+n1c49MXTztviIus5CPlRuaueGp9zjxhknUj5qw4/GV377E3VMW8OqCNSxcvZmX5q7m1Jue3zHdHZPndzPX3LW0dQ76v/3fU3YaZ9J7qzjop09y1i2TWVbBrpwv3TKZI699ZsfzJ2Ysq9iya1FH1002415bUrazZ8+8eTJn3jylJPPK9DmZnXaZ8mpUli16M6sDfgd8CVgKvGZm4929pm5meubQQTSOGYG7M/uDDTRtaubUQ1P/9c2MxjEj+O6903khyw7VfP3y/M9w9aMzcxp35tL1zOzmevn3TW3kvqmNOc3rrKGDaG139tmrN73qevDCnFWcPXRf/vDSR7c07NiXcOTgPnzx8H12uuAbwJyVmzg5Q1fOofvsyQkH9ufo/fty4IA96LfHLuzaq47de9XRs87o2aMHu/bqQcfGuUHotwN3Z8nazv9UfjhuBiOPHpzTe5b87Dg4IPggXHn6Ifz2+Xk7jXf2rVMY/bVPM3/VJhzot3svDhu0F/1234W+u/fCCrjB+Pa0jY22du/U3enu3DllAd84bgh779mbVRu3ccLoSdx76fFZL4WSfv2qDuNeW8KYr38m79oqxcrxH9TMTgKuc/ezg+c/AXD3GzKNP2zYMG9oaCh5HXGyfst2bp00h3tfbix4Ho1jRjBu+mJG/fnt0hVWQo1jRuwYrh81IcJKJCqHDdqTZ//5VBat2cypN70QdTmdnPeZ/TKe23Hh8fuzdnML5x83hCMG98m4MVKMyT8+jU/sXdj9MMzsdXffueug63hlCvrzgXPc/fvB8+8An3X3KzONr6DPjbuzbst2HntjKX+atpgFwSWAP95nV14e9cUdWzvL12/lm3dmPhM3Kp8e3Ie/dum2mbVsPef95qWIKpKodPzDb2hcy/l3vBJxNdUhfSMoH1Uf9GZ2OXA5wAEHHHDcokWLMs5LotfS2k5LWzvbW9sxgzWbW9i0rZV2d7a2tLFu63ZWb2pm47ZW1m/dTktrOx9uaWHXnnXs22dXPj24D2cOHVTQsjc3t7JxWyubW1rZ0tzGpuZW1m9tYUtLG6s3NbNuy3Y2bmvl/ZUbwWHR2s2s2dRCuzu57Gf97ufqGdJvN9xh9JO6mma5vXXtWfTZrfP1btydDVtb+evMD3hm9gpenLs6oury89Phh/PcO6uY3hh+varuXHpyPdd++YjwETOIOujVdSMiUma5Bn25jrp5DTjUzA40s12AC4HxZVqWiIh0oyxH3bh7q5ldCTwD1AH3uPvscixLRES6V7YTptz9SeDJcs1fRERyo0sgiIgknIJeRCThFPQiIgmnoBcRSTgFvYhIwpXlhKm8izBrAgo9NXYAUI2n0lVrXVC9tamu/Kiu/CSxrk+4e+j1kasi6IthZg25nBlWadVaF1RvbaorP6orP7Vcl7puREQSTkEvIpJwSQj6u6IuIItqrQuqtzbVlR/VlZ+arSv2ffQiItK9JGzRi4hIN2Id9JW+AbmZ7W9mz5vZO2Y228x+GLRfZ2bLzGxG8BieNs1PgvreN7Ozy1W7mTWa2dvB8huCtv5mNtHM5gY/+wXtZma3BcueaWbHps3nkmD8uWZ2SZE1fTJtncwwsw1mdlUU68vM7jGzVWY2K62tZOvHzI4L1v+8YNqcbm6apa6bzOy9YNl/MbO+QXu9mW1NW293hC0/23sssK6S/d4sdQnzaUH7Q5a6nHmhdT2UVlOjmc2IYH1ly4bIP2NA6u4ucXyQuvzxfOAgYBfgLWBomZe5H3BsMLwXMAcYClwH/CjD+EODunoDBwb11pWjdqARGNCl7ZfAqGB4FHBjMDwceIrUfbRPBKYF7f2BBcHPfsFwvxL+vlYAn4hifQFfAI4FZpVj/QDTg3EtmPbcIuo6C+gZDN+YVld9+nhd5pNx+dneY4F1lez3BjwMXBgM3wH8Y6F1dXn9V8C/R7C+smVD5J8xd4/1Fv0JwDx3X+DuLcA4YGQ5F+juy939jWB4I/AuMLibSUYC49y92d0XAvOCuitV+0hgbDA8FvhqWvv9nvIq0NfM9gPOBia6+1p3/xCYCJxTolrOAOa7e3cnxpVtfbn7FKDrPd9Ksn6C1z7m7q966i/y/rR55V2Xuz/r7q3B01eBId3NI2T52d5j3nV1I6/fW7Al+kXg0VLWFcz3AuDB7uZRpvWVLRsi/4xBvLtuBgNL0p4vpfvQLSkzqweOAaYFTVcGX8HuSfu6l63GctTuwLNm9rql7scLMMjdO25rvwLouHFrJevqcCGd/wCjXl9QuvUzOBgudX0A3yO19dbhQDN708wmm9nn0+rNtvxs77FQpfi97Q2sS/tnVqr19XlgpbvPTWur+Prqkg1V8RmLc9BHxsz2BB4DrnL3DcDtwMHA0cByUl8fK+0Udz8WOBe4wsy+kP5isBUQySFWQf/rV4BHgqZqWF+dRLl+sjGznwGtwANB03LgAHc/BvgX4E9m9rFc51eC91h1v7cuLqLzxkTF11eGbChqfqUS56BfBuyf9nxI0FZWZtaL1C/yAXf/M4C7r3T3NndvB+4m9ZW1uxpLXru7Lwt+rgL+EtSwMvjK1/F1dVWl6wqcC7zh7iuDGiNfX4FSrZ9ldO5eKbo+M/sucB7w7SAgCLpG1gTDr5Pq/z4sZPnZ3mPeSvh7W0Oqq6Jnl/aCBfP6O+ChtHorur4yZUM386vsZyzXzvxqe5C6DeICUjt/Onb0HFHmZRqpvrFbu7Tvlzb8z6T6KwGOoPNOqgWkdlCVtHZgD2CvtOGppPrWb6LzjqBfBsMj6LwjaLp/tCNoIamdQP2C4f4lWG/jgEujXl902TlXyvXDzjvKhhdR1znAO8DALuMNBOqC4YNI/aF3u/xs77HAukr2eyP17S59Z+w/FVpX2jqbHNX6Ins2VMdnrNg/4igfpPZczyH1n/pnFVjeKaS+es0EZgSP4cB/AW8H7eO7/EH8LKjvfdL2kpey9uBD/FbwmN0xP1J9oZOAucBzaR8YA34XLPttYFjavL5HamfaPNLCuYja9iC1Bdcnra3i64vUV/rlwHZS/ZuXlXL9AMOAWcE0vyU4GbHAuuaR6qft+IzdEYz79eD3OwN4A/hy2PKzvccC6yrZ7y34zE4P3usjQO9C6wra7wP+ocu4lVxf2bIh8s+Yu+vMWBGRpItzH72IiORAQS8iknAKehGRhFPQi4gknIJeRCThFPQiIgmnoBcRSTgFvYhIwv0vuLP1FEReYjYAAAAASUVORK5CYII=\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "parameters = Model(data, seq_length, learning_rate, char_to_ix, ix_to_char, num_of_iterations)" ] } ], "metadata": { "kernelspec": { "display_name": "Python [conda env:ML]", "language": "python", "name": "conda-env-ML-py" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.5.4" }, "widgets": { "state": {}, "version": "1.1.2" } }, "nbformat": 4, "nbformat_minor": 2 }