{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# View mnist.pkl.gz" ] }, { "cell_type": "code", "execution_count": 8, "metadata": { "collapsed": true }, "outputs": [], "source": [ "import matplotlib.cm as cm\n", "import matplotlib.pyplot as plt\n", "import gzip\n", "import pickle" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "collapsed": true }, "outputs": [], "source": [ "%matplotlib inline" ] }, { "cell_type": "code", "execution_count": 9, "metadata": { "collapsed": true }, "outputs": [], "source": [ "with gzip.open('mnist.pkl.gz', 'rb') as f:\n", " train_set, valid_set, test_set = pickle.load(f) #mnist.pkl is a tuple with length 3" ] }, { "cell_type": "code", "execution_count": 10, "metadata": { "collapsed": true }, "outputs": [], "source": [ "train_x, train_y = train_set" ] }, { "cell_type": "code", "execution_count": 11, "metadata": { "collapsed": false }, "outputs": [ { "data": { "text/plain": [ "array([[ 0., 0., 0., ..., 0., 0., 0.],\n", " [ 0., 0., 0., ..., 0., 0., 0.],\n", " [ 0., 0., 0., ..., 0., 0., 0.],\n", " ..., \n", " [ 0., 0., 0., ..., 0., 0., 0.],\n", " [ 0., 0., 0., ..., 0., 0., 0.],\n", " [ 0., 0., 0., ..., 0., 0., 0.]], dtype=float32)" ] }, "execution_count": 11, "metadata": {}, "output_type": "execute_result" } ], "source": [ "train_x" ] }, { "cell_type": "code", "execution_count": 12, "metadata": { "collapsed": false }, "outputs": [ { "data": { "text/plain": [ "array([5, 0, 4, ..., 8, 4, 8])" ] }, "execution_count": 12, "metadata": {}, "output_type": "execute_result" } ], "source": [ "train_y" ] }, { "cell_type": "code", "execution_count": 13, "metadata": { "collapsed": false }, "outputs": [ { "data": { "text/plain": [ "50000" ] }, "execution_count": 13, "metadata": {}, "output_type": "execute_result" } ], "source": [ "len(train_y)" ] }, { "cell_type": "code", "execution_count": 16, "metadata": { "collapsed": false }, "outputs": [ { "data": { "text/plain": [ "50000" ] }, "execution_count": 16, "metadata": {}, "output_type": "execute_result" } ], "source": [ "len(train_x)" ] }, { "cell_type": "code", "execution_count": 17, "metadata": { "collapsed": false }, "outputs": [ { "data": { "text/plain": [ "784" ] }, "execution_count": 17, "metadata": {}, "output_type": "execute_result" } ], "source": [ "len(train_x[0])" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "由此可以看出,train_x是$50000\\times 784$的矩阵,表明有50000个样本,每个样本长度为$784=28^2$,是像素尺度为$28\\times 28$的图象。" ] }, { "cell_type": "code", "execution_count": 18, "metadata": { "collapsed": false }, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 18, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWEAAAFfCAYAAACfj30KAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJztvX2MfF1W1/vd3dVV1b/+veFgniFyI4OjEWIkOihOcGS8\nY4LMH4D/QNBkHI0xiNwYEnVCMnFGxoSIwYwB54YY7yBRSCYBL2iYebggvowIoyjKS3Ay3BkB4Xkc\neGae30vXW3dv/+he9Xxr1dr7nOqu6lNV/f0kO2efU9V19qnu/p511l5r7ZRzhhBCiG446HoAQghx\nl5EICyFEh0iEhRCiQyTCQgjRIRJhIYToEImwEEJ0iERYCCE6RCIshBAdIhEWQogO6XU9gJTS6wB8\nJYBPARh3OxohhFgLQwBfAODFnPNv1964MRFOKf1VAH8dwOsB/FcA/1fO+T8Gb/1KAP9sU+MQQogO\n+XMAvr/2ho24I1JKXw/gOwG8B8AfwqUIv5hS+tzg7Z/axBiEEGIL+FTTGzblE/4WAN+Tc/6+nPMv\nA/hGAKcA/mLwXrkghBD7SqO+rV2EU0pHAN4E4CfsWL4s1fbjAN687vMJIcQuswlL+HMBHAJ42R1/\nGZf+YSGEEFcoRE0IITpkEyL8WwDOAbzgjr8A4KUNnE8IIXaWtYtwznkG4GcBvM2OpZTS1f5Prft8\nQgixy2wqTvjvA/jelNLPAvgYLqMl7gH43g2dTwghdpKNiHDO+UNXMcHfhks3xM8B+Mqc86c3cT4h\nhNhVUtcLfaaU/jAu3RdCCLFvvCnn/J9rb1B0hBBCdIhEWAghOkQiLIQQHSIRFkKIDpEICyFEh0iE\nhRCiQyTCQgjRIRJhIYToEImwEEJ0iERYCCE6RCIshBAdIhEWQogOkQgLIUSHSISFEKJDJMJCCNEh\nEmEhhOgQibAQQnSIRFgIITpEIiyEEB0iERZCiA6RCAshRIdIhIUQokMkwkII0SESYSGE6BCJsBBC\ndIhEWAghOkQiLIQQHSIRFkKIDpEICyFEh0iEhRCiQyTCQgjRIRJhIYToEImwEEJ0iERYCCE6RCIs\nhBAdIhEWQogOkQgLIUSHSISFEKJDel0PQIhdI6XUauv7q3x2m+M55/m2qW+fwWOMxt009lWvJ4LH\n13TMHy9d2y4jERY7y6ZEos15U0o4ODgobq3fVpCj9zVtLy4uWjceV1OLxLom3m0x4cw54+LiYmHf\nH7O+vw5/bB/EWCIsdo6SsN3EAl31/IeHhzg4OKhurV8bfzTukvD54+fn5zg7O1vYRsfOzs5weHiI\nXq83H5ff5z4LMd9cov02sPXa9qZh11Jr9h3sOhJhsVO0tRJvQ4RNvEzAeMui5sdT6/vmBZDb2dkZ\nZrNZ2M7OznBwcIDZbIacM3q9Ho6OjubbUt/G7S16bylbv0RknXpxbRLbs7Oz+TVa35qdw6zhXUYi\nLHaC0uN6za+5KSE2EfbiFYmbCVo09mhbc3H47WQywXQ6xXQ6XehbA14TKhtTv99vbGzRc/PH7AbD\n1ATRW+e1/tnZGabTKWaz2Xw7m82WXDG34X7aNBJhsdXUJqqafJebgq3gfr+/IG6+3+v1ijcLv2Xr\nt+Sr5f3xeDxv/X4f4/EYvV5vbqGaAJ+fn89FeDAYLLThcLi0zy4Ldq1Erhb+niMB5mMmsmbZ1rZ8\nc+n1ephMJvNz5ZwXXBK7ztpFOKX0HgDvcYd/Oef8xes+l9hvShNvbXymmxRhEyIWtn6/Pxcy7h8d\nHS2MMerzsZLVGR0bjUY4PT1Fv9/HaDRa8EGbAJuo2Q2j3+9jOBxiOBzi+PgYx8fH875tvXul5Edm\nK79JgAEsCKxZtqX+ZDKZ31T8eey6/E1gV9mUJfwLAN4GwL6hsw2dR+wpbf+5an7UTXFwcDD3+5rl\naxalCZzt9/v9laINvMVZ2z5//nzB4ubrNgt4Npst3TCGwyHu3bs3b8fHxwv77Bv2Pm+/n1JaEtvS\nvrkYai4UazUBNsue3RO7zKZE+Czn/OkNfba4gzRZvaUJrE1gQmh+X7N82Zq0xiLcxoIvuQF8Ozg4\nmAuwj8JgC9he9yJswntycoL79+/j5ORk3vr9/oLgmij7dnR0tCTCNUE2C7dNOzo6WripeMve+9p3\nmU2J8O9NKf1PAGMA/wHAt+acf21D5xJ7hv/nauOWKAnyJmB3BLseIgtzOBy2dp+klFq5AKyxALMF\nzI/9k8lkYaxmtR8fH88F+MGDB/PtgwcP5iJci6JgfzdTEuScM2azGcbj8dzVwM0fY1+6F2ATYbkj\nyvw0gHcC+O8APg/AewH825TSH8g5P9/A+cQdI/Kj+n4tfOqmsDiyO8KsX7MwT05OlkS46eZRCnUr\n+WkjobLHfp6oK7kjTHwfPnw4b+bL9s0sb26libmob2MajUYYjUZh31w4dm08Ccf+YolwhZzzi7T7\nCymljwH4HwC+DsAH130+cX1u8gdcsk7XcazW9xNYtf6m/kF7vd6C4HLjx/r79++3EmEeb5MAswhz\nnC3H1HJ0gQkf+6pt7DxOaw8ePJiLsBdd3re+n5irpSRbpEMpKYSz5c7OzuZuCXaH3Mbv97bZeIha\nzvnVlNLHAbxx0+cSi7DF6I/5ftNn+M9resT2x5reUxpzJMKrpN9ugl6vtyS+pdbWEgZec3O0cUWY\nO8LcIcPhcB5dYDG3nMhgroaHDx/OBZfHyBOJLHyR1e2F1/pN9Sv8DcPfNPzNgxNPrO1TurKxcRFO\nKd3HpQB/36bPJV6jJHS+H/1c7fNqj/+112v9aEylbS1xwPc3JcKHh4cLk2/czBdsbTAYLH1/tRtT\nKR43ippgn/RwOCwK8MHBAU5OTua+X5uIY781h9T56Aj+bv1N04stN388StaoCbEXYM608+fZZTYR\nJ/z3APwLXLogfheAvw1gBuAH1n0uEdP0j+5ft/dEn8O0tUBrCQZR4zFGfT7GQhuFbnF/kyLMj/e1\nZtERfA21302bLDXrm2AOBoNQgO3zDg8Pl1wPPHloNwufZOKjMfj3BWB+jlpRHi/CXoB96nVkCc9m\nswUBtrYvbMIS/nwA3w/gdQA+DeCjAP5Yzvm3N3Au0UBkcXqLxotVSZzbxq+23bJYNrkwrN8UuhUl\nEqybw8PDpaSMUuv3+wvfY9MTSunmFe2bJTybzTAYDEIL2L4v9ldbny1hu2FwunUkwJElHAlwtF/y\nX5d82d4ajizhfWATE3PfsO7PFKtTEl6/tffyz/m+bSOhq/ksVxHLto/rPoKgNI4ofGpdWIxuU/MR\nBLUtu1vauHIAzH3CJlAsTOzasInEyGft3REWnhY9YbRxRXC5SS/GTe6IKHMuEuB98wurdsQeEllX\nJcvKv5+3/lg0a9+m3xR2VfMPl3ymTeewz90E7ArguNmo9Xqv/YutIsY1F5K9h8PO2AK23y0nVZR8\n2CzCPvuu5F6KLOGoBrDv1ybmSlawLGGxszT5Gn0EQZMA2+NvLYuq7TH/WkloInGOBD1qmxRh/11E\n6by8X/te/bHo9dL7jo6Oqi4I9hlzplzJf+1jdGtPT0DZEi4VYG/jE/YTc+zrZktYIiy2npIARz4+\ne7/f+mPe6ouswNp7Sq9FYlsT4bZiv0kR9j5T72LhY/5na/urcH5+jn6/P58g43FxNbfpdFqtnuYt\nYb5x1CxxYDkqoqlQu3dHRD5h3paiI/ahjrAhEd5Dokfa0qx77VHYf1ZT4H50rM2+t7RKIswWHgtv\nabtJEY6+09Kx6HdTOlZKeoi2R0dHSy4Is8Q5RXk6nc73eUKxdCz6/XPftpH4lsQ4KtZeSzCJhDgS\n4H0QYonwnuKFgvtsrdXEN3JHtCkMHr2n9HNc+NwLb9SvWdb+2KZEmL+T6IYX7belTbytf837gCMr\n098g/Y2Tt03jq403Et9oBY3aqiA1d4RNQMoSFluFt1b84zFHDvjJK/uZNmLC4smWVLTvW+11f6Pw\nwsvHmibCuG1ShNvCsbQA5v3SthZhYLGx9j52QbAg8Q3Xu2v8DcvfkP3Y+Xz+GIAFkWWxjY5ZQaFa\nCUt+3S/XxJ8nn7DohJKbgPv2j1fyx/pj/Bk1K9jcEaUVJJqsK7/lfsn6jfb9pFctm2sbYAEGmt0M\nkQVZE7looc9o4U8ra2kF3r0Pnfd5rDVr3MZbEl7/2ng8xrNnz/D8+fP51trp6SlOT0/nFdWsRRES\n+zY5JxHeEaLJkVIixiougrauCO8TjlpUZatNGFdNhCOf8C4J8aoiwYkN0arJJdEtLZRp/V6vNxfg\npsiStq4QHmubG8dkMlkQXS/AVk3NhLjkltgnAQYkwjtD9Ige9S2bq2ktMZ6E4c/nfsknfJ1t7Vjb\nG4z3fZYEeJtE2FvCNTipobYeW+QnbVrBuOaS8seaRLetCHtLeDKZzAU3alzW0qxgX5RoH2OFJcI7\nBAtuqfV6vYVVHrh8oe8PBoNGC5j7beJiV31PbYKwZg1HIWGRX7NrrmsJ8+SajxYwCzFyBZSs0FUy\nGEuiG/XbuE/sPSbCJrbWIgE2EY6WvFeImuiEyAr2MamWyeWXsCltucyinaPWL1lPbful10qWd7Qf\nXXsto6trVrGEgUWfMEcL2OM5T1x5izOyQm3r/058LDMfazNBGCVi1M5/cXGB6XS6UMC9tp1Op0uh\nbKUwtV1HIrwD1ETICxyLMBdq4WLjtn98fLyyCDclJkTH/Ot+n8/TNI7I+i9Z09vAdSxhdkdw1IBf\nBqgUDhYdLyXrRN9lLf042i+Jr+9bgfnSkka8X3O5cHTEPgixRHiHiB7J/eN+ZAnzygncTk5OFj7X\n+nwsEsDoH7ntseh4dO7StuQP3wdLmC1QdkewOPFjey07zbfSHELULwmtb3bDaCvEtYU+fVSED0mL\nLO19EGBAIrwzeAHyQmyTXbzyL6+ma6sq2AoLVuCbP7upX/rHbfrHbjpmn8/X6vuRENcm8raFVYXC\nuyPYEmYf6unpadVC9a+1+c5sfxVx99Z3bd9EuLTsPffZ91tr+4BEeEcouSN81AGvtMAr6vJCjo8e\nPZoLMn92m35twqzpPbXmr7W03zSmbWNVn3AUHcHWooV2nZ6etvLZ2jb6nkrHbiK0tX1zr/isuGjr\nIyCiyAy5I8St490RPtjei3C0ou7jx4/x+PHjuRCv4o/d5Da61tr34N/T9HldcZPoCJ6YY1fE6ekp\nnj171iqKgUXYqH1/AEIBrUU9RPvRsVKqcnScr4G/xyiLb9eRCO8IkTsisorZJeHdErzY4+PHj+ci\n3NYK3jV26Z+UBdMEzCxH9glzkkOThXhda9GL7U2E2IuwL9wTRUDMZrOd+t3dFInwjhAFzfMf/8HB\nQRjQHk2mRBYGPzab4Oacd1J8I/ifutQvvb/EKtZl28+P3BEla7hJeG8iwm0m2lZtPouv9nd5l5AI\n7xjRHzZbxb72akmQoz94Fo19E+BVHmub9oF6FIm16DusiQzfXH2yBvuF24gwX9t1RbjNRBz7nZv2\nfTq1f/0uCjAgEd4Z2ljCbA17i6P0R89WL7NvAhwJU02ca32gPFFp0R72nuhn/fj8WNlq5EksdklE\nIly73lWIhLQmvFE/2pbSrGUJi50hEmAW4pQuU4vbWMD+D35fRLfEqo/tNXEGlkP2bHtxcbEkxqXx\nlI57dwRbwuaOeP78+cJ1NfVX/a7aCCz/PfnXoz7/rcod8RoS4R3CC4YJL7ebuCNK7LJAR8JUEoqa\ny4J/ngXX0nw53tmORd9bTXxZrEyEo8k5c0fUxhi9tsp3tqrAlm5qNSGWO+ISifCOwP9Q9sdrMZ0m\nxgCubQkDZbEtCcq2Ez2i16y7NhYlizDXWTD4GH9vkcCUjnl3BFvDbUTYf/Z1RLgkpLXj0c/699f8\nxnfVGpYI7xD+DzqlhPPzcwCvCeh1LOFIKHZRdCNKQly7Ofn3+32O1Y4EwydotBFgHhtbwSUBNhH2\n11g7tsp3toq4tvnOSjfC6CZ415AI7wClP2Tvhsg54/DwsNWEHAtKJL67av1G1MTXvqM2wsLfmS0N\n5b87+z5LVl1t3wuxt4IjS7jNZ17n++LxrNJvOtYkzHdRiCXCO4QX4IuLi4XXALS2gL1IROK7L0Ic\n/cP7G1TNrxmJsPV9Kc5SCnUbaxhYjNGNoiM4Tvg6n7/Kd1bb3uRYkwV915AI7wiRkESvHx4etnZH\n2M/xthTfuqvUHoWjJ4WSddbm6YEn5ezcvOUxRfv2+bXoiMgSjq55HbQR9tK5mn62JtZ3DYnwjuHF\nF1gUGh8QX3NL2Oy+fYbfsiBv8h9kFcH3N4u2PxOFSPmbVBsBtqcQL8Lmn7c+R060FTN/c4gy5tga\nFvuBRHiH8K4C+6e1f3YWmNqkzvPnzzEYDOaLbLap+RtZeNyPLOq27brfhR+L3+fvoWlb8gtHYmw+\n4aZlnaKVQ6LMOhZxLmLjbxR3/bF9X5EI7wjeF2nHbGtiHFlRUaD/YDBYWiyzqd/WZ5pzbi3s6xLh\nSJR9Td5ai6zhkhBbUkxbEa6tZMHt4uKiusT7XY6l3WckwjuIFxwW4GhSx1vCttJyae230rHIr+pF\nyvq1hSR7vcs/u3UIcK3P3wEvn+NXchiPx/PokiZXRCTCvHp0SYjbLrDp11YruUvE/iAR3iG8H5L3\nffIGW4G+FGK/30ev18PBwUEoGKVjtWB7/5pfcsn67K5oSu1t+i6ixq/Zd8A3IfapcrPyiU1CXLOE\nm8S46XUAS0va7+viluI1JMI7SumfsSm8yYq/swizOET9o6Ojpcm9pr6d5+joCOfn5zg6OloS4E2I\nMAuovxFxooOtTmH96XS6kk+4JKzRvm/9fj88nlJaKnZeCqMT+4NEeIco/fOZMLAlHLkj7J+fJ4xY\naH0z8WRxjaIKon1e5cNbcJxtZmO/zvdQEkvre3eE+cOfPXs231qbTCathN3GWxNf37fvod/vzxvv\n26RbSmm+xE/NHSH2C4nwjsBixf+I/nhJhPv9Pkaj0cJkEYC5MPO2NCkUhb+V9geDwcLPA69ZwOba\nuI6glCYl/ZazztgdYeL79OlTPH36FE+ePMHTp08XRLgpcaNkCfMNjY8Nh8P5klO24on/jgHg8PAw\nnJiLYpnF/iAR3jFMBPw/oh0viTBbwByVYNbYbDabC7BZZ/4xmIUh6kevsQvCQrvOzs4W3BPX+Q5s\ny8LpRZirkHEd3mfPnuHJkyd48uQJXn31Vbz66qsYj8cr+4S96EYi3O/354uuTiYTHB8fL7ka7Pvp\n9XoS4DuIRHiHaBJg20YiHAnwxcXF3DJregRmEW6zTpgXYF6c1FwdNxGUmtvAW8KRO+Lp06f47Gc/\nO2+j0aiVK8K7I5q2/X4f9+7dw2Qywb179xYEmD/PbqDmjuD3SYj3G4nwjlHzCwMIRdi7IOwfmZMD\n/CNylBzgV8at9dnPaZNwLMDsprju91ATX7OEI3fE06dP5xbwZz7zGbzyyisYjUaNbojIEo7El/v9\nfh/j8RgnJydLFrC/SeWcl9wR0U1R7BcS4R0m+oeM3BHeAo7e1xQKxZNGTW06nc5D5rwAm8vjJoLi\n3RGlkDlvCVskhFnCLMJ+uaAmd0RJdH2/3+/PY39ZTA1+QgCwJMKl2hZif5AI7yF+csqEtibGUaad\nCdhgMMBkMlkKn/Liy8f4MZuz7tglYVsrC+lTev0xuzaz4nlSkEXO+qenp/M44MlkMg/X4/FG4V9N\nzb43ngg9ODiY147w4/buoOiJxHzxr776Kp4+fYpnz57h9PR0nlTinzDE/iAR3jPYkuNJstlsNhfg\nKJoiKhhjAmzhVKXaC9ExAEspy9b8xFWpHKQfr/m9/cRbaUwmwpwdxyFgkdslEty233WpRkSTS8jG\ne3R0NI/asDhmG7+N21vSYveRCO8hkSVcE2AWMpuks8doC28zN0JpIs4LIoCqFcyP65Y4wu4LE1y2\nHHlCkW8cJlBs5U6n0wURY4uSr6HpUb8kxizAXLODnzLs53q9HiaTycJ1sQDbeI+OjhZil238dvOQ\nJbyfSIT3EG/hzmazJQH2j/RsBfu4YetHERCl/TYCfHR0NE/miIr8MLyShV0fj5ndDXbzsJA0E2B2\nR9QmICNBbhJiroTmf4Yt/ejmZ+Pq9XpzC5gtYYnwfiMR3kO8pRUJMAvB0dHRXHzNIoua97/W/LFe\nhCML2HzNFxcX8/dx3+CwvJILxRc+NxE2nzALcWki0ouw9fl79d+xCXD0/dvn8g0limEeDAYYj8fo\n9XpLNS3Mipc7Yn+RCO8h3h1Rc0FYgkBT5levt7iKc6mxCPsiPmz9mmAOBgPk/FrBHy++HL7lhZgF\nmOtDcLEiL8BtRTja8vdrWxuLf93E1yYoSy4IC2Ez3zhXeWNftibm9peVRTil9BYAfwPAmwB8HoCv\nzTn/iHvPtwH4SwAeA/j3AP5KzvkTNx+uaCKaLLLj3k9sIlyrnMb7pToR0T6HXvmQLZvoMzHOOc8r\nrLHA+FUq+Dq8JczCxeuw1XzCJXeEnYe3te/ZH/c1goHlkMDZbLZ00zs8PJzfTKKaxz7LTuwH17GE\nTwD8HIB/DOCH/IsppXcB+GYA7wDwKQB/B8CLKaUvyjlPrz9U0RZ+TD47O1sSYF+svW09YQ5n86Ft\n/jWODfY+Zou4MCvPxuwF2ETMvxbFQpeWhecQNe+OiCbm/PcY9f14TRTZ6uU4aR8FEX2/NjnZlAij\nhI39Y2URzjl/BMBHACDF5a/+GoD35Zz/5dV73gHgZQBfC+BD1x+qaAtbaCYWNmnkoxDarqrBReNL\nrSbCLMDj8Xhe0GY6nS6tGMLj42WV+HoiIS6JMPtVI3dEyQcc7Zde4xA1nqhjSz6afOS+/Y5KLh62\n3MX+sFafcErpDQBeD+An7FjO+UlK6WcAvBkS4VuBLTQvCFEsbm35If9ozVZ1bd/igSMLeDweYzgc\nzq1Tvpf7G4TP4CuF1kUC7KMjOMqglA7cVuD4/aXEEtv6GGIOF+RjfH3R98t9sT+se2Lu9QAyLi1f\n5uWr18QtUIptBZaXFGLL2G/9MZ7xZwsyOsYizAJsFjBPlnEcMAuwuU4if61PS/aWMId4+YSHmiW8\n6vfM2xLxA2Pz8k6rWOVid1F0xB3D/yNziBUnSJjQmQXMLgMvuKV976KI3BfR636izG+b0qG5PKdF\nflj4nX/8bxLCTXznTcfF3WLdIvwSgATgBSxawy8A+C9rPpdYIyxwpQmpkuA2WWz+vb5Fn1eyMk04\nI+H1JTl9NEhUVe62hFiIEmsV4ZzzJ1NKLwF4G4D/BgAppYcAvgzAP1znucRmKFnK3hKuNf95JeH1\nlm+TuBvssmARns1mGA6HSxEcnBjB4WBcy0GIrrhOnPAJgDfi0uIFgC9MKX0JgFdyzr8G4P0A3p1S\n+gQuQ9TeB+DXAfzwWkYsNkKbR+M2Aln6zDauizbi7l0RvkQm+3mjxIhohRFZwqJLrmMJfymAn8Tl\nBFwG8J1Xx/8JgL+Yc/6OlNI9AN+Dy2SNfwfgq7JihLcSFjd/LHpv5C5ocklE720jviWibDxeksl+\nPpq4kwiLbeM6ccL/BkD1GS7n/F4A773ekERX1PzCvn6DbUv+Yz5W8wM3TchFPuHIHRGtWHF+fr4k\nwCV3hL9mIW4LRUcIAOX16/x7SttoIq3JGm7rguDPKbkjOImBK5r5cpychl2qVyzEbSIRFgsC7Lcl\nK5j7TcJdaz4RoST0jK/O5hMYTKTPz88xmUzmS82zT9jShP3EnIRY3DYSYQGgbgmXXmvyA0evl1wQ\ntTA1/uzIEvYTdybQZ2dn8xRpL8L2nsgnLCEWt4lEWMxp44pY5Wfs9baWcNtJP1481AswW8lnZ2cY\njUYLAuwn5vyae4aEWNwWEmERsk4BahLhVaIjvCUcCbCV6Dw/P8doNArdEdGip0J0gURYbAQTWF5i\niVe/GI1Gc0EEsOAPjlwO9jneQuVoiZzzwsrNpRVCOLWZ188rRWaUJgmFWAcSYbERTISjUpNHR0dL\n5RstzZgFz4ei2WRaVHiHa/fafqmovC8qZFl23jov+ayFWCcSYbERcs6hFWyuAE6FjurksgDbGnj9\nfn9pYs+IykTWBNgLcVSs3p+HCxkJsS4kwmIjRJbweDxeqtdQElQfB2wJGVZj2PuOfU1eFuFojTtf\nWrO0ZJNfIkqIdSMRFhshEmEWYH7dRLVUIc1WJD47O5v/rG0j/7C9Fgmwt4Jt0s6vGO1D12y8soTF\nupEIi43AbgarYMbRCFzhrOQDjkpUAnEx9GhVC79YqV9myVvCVm2NV7wwuO6yEOtEIiw2grkYzBJm\nH7AdN3E2K7hUI9hWxPDLBLHgRvur+ITPzs7CYu82XoWziU0hERYbgUPUZrNZeMxWvQAwT77wAuwL\ntfvJNy7A4yfneDn5NtERnMbsk0n4BiDEOpEIi41g7ghvUZoA93q9ebQEgHkUBAuwrcbMKyRbGBrH\nBrMFzAuTRj7hyB1hlnBUvN5cKlGdCSHWgURYbAQWXe5bhAOnF5tgRiJsi4GaCFsyBrDoG/YlLtm1\n0cYSNn+zHzuPUZaw2AQSYbERzIo0q9WsSXYXsNXqxff4+HhJgC0dGcCCxWr7vnZEkwB7n7Dhw9Si\naAkh1oVEWGwEE0cLP2PXAa9bl1KapzJbs2XrR6MRTk9P52Fkg8EAFxcXC75e6xt8nlq0hQk9R13Y\n+9iSjtpsNmusIOf7Ueqz0qEFIBEWt0RNZDiKgoX49PR0XnTHXALHx8dza5aXNbJsOhNfS/6IRHg4\nHM7F124Sh4eHGAwGC6Uv2SUyHo/nW64zUar6xsd8zeRSX0J8N5EIi1ujJDLsL7bMutFotFD1zFwY\ns9lsQST9kkYsusBivLBZweba4FRpmyjs9/uYTCYLAswiPJlM5pZwVAUuqjcRZeP5vupS3F0kwmLj\nsN+W960fFfqxKms+zdmWtT8+Pl7KtuNaE5EoW8qyF2CruGbi690jk8lk7qM2EY7WyCsds/OZ+Nu+\nTwYRdxOJsNgoHELGosdCzKFrZgl7C9h+3uKLSxYwrzfHfmG/KrOvtmYCbZOBJrjW562JsC/0ExUB\nMvG1cc89J+5FAAAgAElEQVRmMxwcHCykYPN3JO4eEmFxK5jQRI/cLFRcbc0LsL0vEmBeddnXJmZ3\nRMl10e/3F1Zmjvocs8zREz6awu/bz/nVPNhqlgjfXSTCYmOw8NYK33hLuFTox1vBwKIla9ast4Tt\n9Ui4vQC3aSzCXPjH73NYHS8sygLsK7WJu4dEWGwUL8CRKPtaEixUZgGbQLMrgQWWJ92iqmy2Hp2N\ngf3E9tm2bepz3HLk6+V9yw70NZTZVaG6FHcbibDYOJFfmGFLOKq0xm4KDj1rCjuz9/m16FiAvc+W\nhbbU90JsPx/tz2azuQDb9fvsQSWC3G0kwuJW8HV//cQcW4S+0prFD5tPN7KALWqBV+lgS9jOawLs\nQ8T85FnUOHvPi3FpfzqdFgXYBFqW8N1GIixuHW8N+2prvtKaTdSZX9fH/Vqhn5IlDLwmwKV14/h8\nkSDXRNpbybw/mUzm1+wFmFd9FncXibDoHF8u0peRtMkt21pK83g8rrbSRKAvg2ljMAvb3ATsM/Zu\nBhZam9iL9i1pBIjLY/IkHt88VklxVpLHbiMRFluBT/m1sC0T5vPzcwAIV2/2NSYs046Lyfs6xNEx\nLihkfuRSkaCoapsvGGT1kksrgfhm56yt9sxb/u6a+mJ7kQiLrcIX9zEBNjGKRNiE+Pnz5wupzuxz\n9VvrA1jwGXsRtuNefDn8zVvObDFzcSEjEmAT4dKqz77PfvXIQm4KCxTbg0RYbAUlS5hfB7Dgd2VL\nmC1g87WyOFrjfWBxTTpvkfZ6vSWrmctyegs4ClPr9/sL11kSYPu8KPbYH/M3Jm+tl9wWYjuRCIvO\niQS49B5vCVudicFgsFDe0lKD/aoaXFfCxy9bGUwAC2FlXoTZqu71etWkDZtsLImvF2IOc+NmJTQ5\n0qLUou9ObC8SYbE1lESYBSZyR/T7fZyeni4IcEoJZ2dncws5qhnBLoloss6O2aSdRWbYtk3aMouw\n/9zIHVGKwIgWII38xvydid1AIiy2AhYSrgfsj3P2momwWbh+GSITQVvI0wuwz6JjEeb9aFKsVryH\n9/2qHCUL2G4InB7tl1ay74OXjeJj/L3VkmPEdiERFp0TpTGzaNnrUclL74JgweLQL19rwizZyB1h\n57Jz23HvMimVr/Thdfb5tagM2/I1lQSYRd6Ol743sf1IhMVWEAkGLzNvrSTCNcHyKcyc+MEibESP\n/TxG7wYohZOZ+ySyhKN19iwLcDweL1n1XoA53dm7b3yEicR4u5EIi60gCq8CsLRlES5VJ4ssU/YB\nWxgZW5N2Dhbj0vhs3wt0dCy6kZTC3fwK1DwWL8Dsu7br4yZ2B4mw2Bp8qJUnpbQQouark/FnROUu\nTYCn0+l80VDvE+ZzNfXbJElEIsw3DbaEOfKCrVi7HvNx23X78/mbl8R4N5AIi62kFGrFMbilQukc\nKdCU1WZFgIBlV0FtQs1oEjpfKMi7Mfx5oygI23oLnyuxWeyyzzCsha6J7UAiLHYOtgy5poN/jI8s\nS/t5L2TD4XDJGi31vTjXttaPVn0uLfIZRU1EN5NSTLG1yGpvY72L20UiLHaKSEAjAWY/cJMAWxgb\nW8rWj7ZRIge7DyJB9iLsl2gq+Yu9q4LHUqv05r8L23rXhR3TBF53SITFzsGxsiak0+l06XUTMj4e\nWdHT6RTD4XDunvDuCjt2cXExz7gruS5KAhxFZngR9qLrBZibibDVUbbY4tJNpzSRyN+N6AaJsNgp\n2DdqQur9tPYasDyR5sXbBGw4HM5rT1idYsu0sxoQkcvAW9wm+v69LMKlqA2eZCz5stmnbSF6k8mk\nVWIHC3Lt+xW3i0RY7BzenWB4cfaC7YWXs+6sDKZtTZgt9ZljjVl0LZnDXitFKrCYsgvAC23J6vUC\nbDeLpjhpX6vY4or9BKbEtztWFuGU0lsA/A0AbwLweQC+Nuf8I/T6BwH8efdjH8k5v/0mAxUCWLaE\nvaVrYmsWZ8kH7OtPHB8fYzgcYjKZ4Pj4eKEgj0/2YEs1gsPHgEVLmMXOC7D5crlAUE2ER6PRUnSI\nfQ8+eoLFF8BSerN8wt1xHUv4BMDPAfjHAH6o8J4PA3gnAPsPmVzjPEKEmNjyPicycNhWyQfMVdgG\ngwHG4zHu3buH6XS6JMDmIvDWqhctnuiKws9YnL11bBXf2A/sxdf7qmvheX7S0s7JySmcms3uFXG7\nrCzCOeePAPgIAKRykOQk5/zpmwxMiAh+hPbHLFaWa/OWXBFmAQ8Gg7kI25JEHLnAVq+JoLd0DS9k\nfgKPfb42PiuFyTeRKEqjJsIlATZXhVns0fcm8e2eTfmE35pSehnAZwD8KwDvzjm/sqFziTtE5Mv0\nUQrWN0H14suTbrxaM6cy+6iFw8PD+SSd+ZsNH17mIw/YbWGCzpNknIARhaKVIjZ4EjCygH1WIX+H\nPFZl13XLJkT4wwB+EMAnAfweAN8O4EdTSm/Out2KNcCP0UA5UcIe802QbCLLogo4GmI8Hs9F2PuA\nbWVns5RtySIfK2ylJHmCzl73PlufPGEiGlm+0YRcv9+vuiCm0+nCpJ0/l22jsDpxu6xdhHPOH6Ld\nX0wp/TyAXwHwVgA/ue7zibtJm1Rcvy4bL2tvNSRMqLjkZC08zCxQcyOwW+Hi4mIh+gHAkh+Yz+GP\nsyi2CVXzKczs9+ZMQu/nPjg4WIgqsXN7d4W4HTYeopZz/mRK6bcAvBESYdEB/pHfhIgtP5ussyLx\nHPbFn3F+fr7wnsha5T77bZu2RhS6FiVccKZcKQPPPsdn1HHfVv/g+Gpxe2xchFNKnw/gdQB+c9Pn\nEsLDwsVhbf7R+/DwMKxPbJ/Bj/u8mof31Ub7vkSl7wOvxR9HFrAXYXuPWbs+ASNKk55MJgurdnB2\nnV2jX8VZ3A7XiRM+waVVa3/FX5hS+hIAr1y19+DSJ/zS1fv+LoCPA3hxHQMWYhVYgM2XHMUXHxwc\nzAuq+7Av729lEWbfsm/9fj8MM2O3glESYBt39F4Oo4smEtkiH4/H84gQKxofCbC4fa5jCX8pLt0K\n+ap959XxfwLgmwD8QQDvAPAYwG/gUnz/Vs55duPRCrECLMAmWgAW+va+g4PLpYV8Ld9owsvqTPDE\nHvejrW/edVCyZGtFfkqRHBxjbGMYjUYYjUahlR8Vihe3x3XihP8NgNpv609ffzhCrJ9IjL2FDGDB\nAgYQCrDFFnOIW9s2GAzCGGRO/PDWcGQB1wSa/ci+Dob5qP312bVJhLtBtSPEXuPjin2YmIWVAai6\nIHxyBwux9UvHBoPBwoKjLJxeTG0cOeelyAoTZrNcIwH2FrAlovh4YbOALYPQfNfyCd8+EmGx1/iJ\nOTtm8bycPMGvRUke3qr1QsvNkkI4PIx9t2bNWmZbyeXAx3wmoL0W+YA5CcVqJZcs4CihQ9weEmGx\nt9gjP7si+LgJMddUKGWd+Qk3FlyrvOb3fRq0waJZCy3zgsyTdLyiso+EsPH5SAi7draALSJEPuHu\nkAiLvcZnh0VZYuYn9gLsIxlY6IbD4UL5S+7bQqJWCpOtVnYbzGaz+TJH0cScTy/2zV7zFjBXiYvi\ngFmAR6ORRLhjJMJir2HBsq3BffO1np2dhbG8vGURtmalMM3yHA6H86QIPwnHgunjfDkyw8bPW+57\nAbbz2U2E+94CHo/HGI1G8zA6X49Y3B4SYXEniMSM4XRejpLgrfV7vd7Co35keXImWyTA9h4fLWG0\nEUO+WZyfn+Po6Gh+Tt5yCU+bYByNRgtREzwxx24bsXkkwkIElArsmGvAl8f04W32Mznn0FfLAuyz\n4VYhSvKwyAqOsCitzBFNLFpdCZ8m7d0hYj1IhIW4gt0WAEK3gF+lw+oyRAJs4hoJsFmnPmpiHUJs\nn8chbr62BYswi7G5UXj83PdbcXMkwkIQLIIsMmYFW5+FuCTAJrAscLY6slnCPjrCj6FGSXy9OEYx\nxDVrmCMxfOEju37FE68PibAQjpK4cFhYtNIzZ+DxApsswsfHx/PYYU7guC6REPvXrdRmJL5RuB2n\nRNuNwq9eLQFeHxJhIRxNAuPLYZpVyCLMK3BYBIVVMmuyhG0Mbd0SUYKHP16zgr0QW4U2f418/Yqi\nWB8SYSGIkjvCv2ZCy6+x5Wi+4ouLi7kFbAuL1nzCq8AiWxNgFuE2Qmzui9lsFk40mrtCLon1IBEW\n4oqmiTnej1Z7tlAxm7CzuhDHx8fzUpIW1saF2EuTXG2sYZ9RxxmA/FpbAR4Oh0sZe/46/XFxMyTC\nQgTULDyuQcEWMCd0cLGde/fuLYhwbUWM62CCyJNznJxiYXI+s85XWTMhjsTfX6dYHxJhIYjICo5e\ns31Og44SPM7PzzEajZZEuI0l3IQXSvZNs+uAY5Xb+IS9CHsBliW8XiTCQgTUQsZYfP1ySb5/dna2\nJMJeiGsC3OSS4HN5lwlvVwlR88kZJsCcyi0RXh8SYSGuQdvJtJzzUoozr3x8XUvY+675WORK8MWI\nSpbxYDBYig9mHzdP9on1IBEWew+LVNM2atFrbT/v6OgIjx8/xqNHj/Dw4UPcv38f9+7dm1ddswy2\nkqiVjkcFfbjvj3G8Lxf2sRsD15Uo+a7Zf62oiPUhERZ7TUlYS80X7yn1m4TbWr/fx+d8zufg0aNH\nePDgAU5OTpZE+CZlJCPXQ3TMCzAvec+WuoXRect9HT5sESMRFnuNnzSLJtCsz5ENpTKW3Np8rhdh\ns4SPj48X1n2LRLjtI39TkR0fw8ylLlmEmwRYlvBmkAiLvcVbqyUR5ZKQvFBm6Zj/nOjzrLE7omYJ\nR5EOTXgx9ALMrckSNgH2CSUsxOwnlgivD4mw2GsiAS5ZvDxpVWscJRBZybzf7/fx8OHDeWOfsBVU\nt1q+Nt5VYbFlS9Uf95aw9wmzGLNYR5awWB8SYbHXeDeBXzGDm48WiCIIvBBHn8X7R0dHczdEkyV8\n3YiDkvXLZSfb+ITZHeGLw0fRHBLj9SARFntNyRIuCTBnknGfj3G4V5Mb4+joCCcnJwutrU+4DZEv\nOKr9GwlxTYSjKmryCW8GibDYW9gnHAkwW7VHR0fz9F1O5Y36ZhFHi4D6/tHREY6Pj3F8fDwXX1uP\nruYTXoUmK7iNALMQ+1U/fE1huSTWi0RY7DU+/MwLsLVSbV2/9I8Jsv95dlX447wSs/9cXt9tVZom\n5rx4loR4NpstTcqVVtKQJbx+JMJir4lC0LwQ+4wxv5KyX9KexbO2tb5fRsi7OW663HxNfL0Q+9WY\nvTtiPB7P6yBHny0BXj8SYbG31PzBpToKZqGay4DdB9Y3EW7bmqxm747wxYOa9kvREW0s4cgdUapn\nIfHdDBJhsTV4ISr127YousGLLvfN4jXRrVnENcvXb73gclyxj4rwAgvExeV9v8kn7GN8axauBPh2\nkQiLraBUp8HvR9ltpeZr55YEmEXY+4OjY+YTLvmB2eKNIiZ8goddI7Ca2DW5DCI/7nXcChLgzSIR\nFltBUz0HL8BReFgt7tdbvaWQND8BF/V9vHCtXxPfyBJelShlueSSWMUa5s8Xm0UiLDonSi8ubU3c\natEIJX9vTYB9kfNaK1m4TTeHyIK/qQgbbSbn2govf6bYPBJhsRVEk2hRLYZVXAwloa1ta6Jt++zb\nLQltTXi5MLp3R7SlJqhN7oko1EwWcHdIhMVWUCop6UWMoxj88jw1y7UpEy5KVS5NvFlcb61mRBuf\n9TrcEUZbS9hbxPaz0VbcDhJh0TlRZlsU1+tFuJZYYQVySuIbpSKvUsCnZKmXLPmam2UVEY4s1lV8\nwqu6JMTmkQiLraBmBfvJtiipgmN5OZSsJrxR0kTJt+v3m/zXvh/tr+qOKLkMVo2OiKzh0jnE5pEI\ni62gKbGiJMK+LoOvz1CacPPHvZ+3ZImzFczjbtOi99uxJpomz9YZJyxuF4mw6JwoOqKWYuxF2CqT\n2db6NT+xb7bOW1vXgh+zvw7etn3tutQs4XXFCovNIREWW0HkjogqnkUifO/ePdy/f39eKtL67Bdu\nK8JNrgNf42Edk2pNREJZEtMmt0RpYk6C3B0SYXEjIhFqk/nG+z6utxah4C3fyApmv3BTJISv37CK\na6ENJQEtHWsbYpZzXlp+qNZ/9dVX8fTpUzx79gzPnz/HaDRaqJpm9SIkxLePRFisTJvH7WjCqtT3\nlcxq9R6Gw+FcbFl0uVi6r/EQFdNhP+9NhbYNLG610LBShhvX8rXGS9JzWUreZxH+7Gc/Oxfi09PT\nuRDb+y4uLtZ6zaIdEmGxEiVfqBfh2iSX3zZlsPExnozjCbioYPpgMChWLSsV0vE3lnUKcunxn/f9\nSha++hnv+3XifOPjT58+xZMnT/DkyZMla5iLuIvbRyIsVqbNrH8U2RDVVGA/b5tWqvcbVT6rFdgp\nCbG/Fr7m67KK79YvsOnXhON9v0inX7iTX3v+/PncCjZLeDweL6wpJxHuBomwaM0qPtNSjQdf28Gv\nauGz3/yxaKWL0ioY9vmlWN9S5lpJfNdhETdFMnjXQsmyjWoB19rz589xenq6sI3cEfIJ3z4riXBK\n6VsB/BkAvx/ACMBPAXhXzvnj7n3fBuAvAXgM4N8D+Cs550+sZcRiK6hFEng3Q5taviVxjcQ3so6j\nfW/xRkV1anG7q8TxNtEmmYJFOBJSs1r9MT5ufX9sNBphPB5jNBotNLkjumdVS/gtAL4LwH+6+tlv\nB/BjKaUvyjmPACCl9C4A3wzgHQA+BeDvAHjx6j3TdQ1cdEMUshVtfUhZk4uhVLvX96Moh1Lfajw0\n1XmI0ofXLcC2LcXw8uSbiTCv+2ZLD/ljvkU/1+Zn5I7ojpVEOOf8dt5PKb0TwP8C8CYAH706/NcA\nvC/n/C+v3vMOAC8D+FoAH7rheMUW4H2nUdQDuyPaWLvRWm7RMa7zEIWw8WtNSRe36YpoK8Ds7zXh\ntWbWrDUvzF6keb/mM5Y7oltu6hN+DCADeAUAUkpvAPB6AD9hb8g5P0kp/QyAN0MivNOUIge8sJml\nGSVX2PptbSfY/D77eZu2UT2HaL8kwtF134SSH9gvSc+WMLsQLKzMWiTIpb6JOws9H5M7ojuuLcLp\n8i/z/QA+mnP+pavDr8elKL/s3v7y1Wtix4nEK7I4IxE2AfaLZ5YW1PThaCbCTXUdvL+Xx13r8zWu\nkygqIor/rYkwT6pZi6zjqI1Go1ZxxxLhbriJJfwBAF8M4MvXNBaxI5R8wl6ArZnvl63bqOAOt9Jx\n9vN6F0hUNrI0/i5YxRKezWahCHNji7hpW0sQ4a24fa4lwiml7wbwdgBvyTn/Jr30EoAE4AUsWsMv\nAPgv1x2kuDltBKnmF+XY36blfHq93pLYthHYyP3gIyK4xkOpbOQ66vQ2HavF/PpjpWSLaBl6DiHz\nfb8fWb0+WsIEXWwvK4vwlQB/DYCvyDn/Kr+Wc/5kSuklAG8D8N+u3v8QwJcB+Ic3H65YhTYC6x/L\nIx+ptSjWtxQD7AW15GqIJuA4CmLVFON1TKDVttaPVqwoHYv8sVF/NpvNrV7z/7L7gfejSTebYGM3\ngyzc7WfVOOEPAPgGAF8N4HlK6YWrl17NOY+v+u8H8O6U0idwGaL2PgC/DuCH1zJi0YqS/zN6rY1F\naatatFnXzdKLo0iHaGKuFBvsQ82abhLrFOBalTHvx+WJrajvM938PrsgeCKuqe/jgf1Emwry7Aar\nWsLfiMuJt3/tjv8FAN8HADnn70gp3QPwPbiMnvh3AL5KMcK3RyRQfr80sRZNtNmxprXc2mS2lWJ/\nS4XXm1KM102tilmUXuyjDUoWbqnAjt+vRTj4Y5y0IUt4d1k1Tvig+V1Azvm9AN57jfGINVES3JIA\nNyUzHB4erpQyvEoacs2q5nCzUlzvuq3hUunIKLOtVLWM931MbpSGbH2fcOGz4LgfpTbzDUACvBuo\ndsSeE4ku7zct1c77kQ+31EpLCUVrvJWSLnzpyZo7wl/zdShltvmtr2BWKqRT2q+91vSZfKxU6EeW\n8G4hEd5TSlav37aNdjg8PAzLSJaiHmp1I2o1fnlbKrhj1+e363BRRFZwFF9rFm5koUb9Wl0H3rLP\n2FvX/hj7f9n9IRHeLSTCe0gkUKVYWr98UKnf6/XmQtumRdXSSv0mC9y7I0rXeFNqcbzceDKNU4Rr\nvtzpdLrQr9Vw8IJa8jVHE4F8TBNzu4FEeE9pMwHnBbht2BkvLVRqtoR8TditH2XclZpdG1+nv+7r\n4EPQmjLb2B3BSRVRFIOfWCttp9NpUVD9sdINIgqfE9uNRHjPaRLjaCHN0npvLMD379+vtiixo9Rq\nrpM2E2/rjJQo+YO9KLI7gtODfTxvm7RitpyjAj+1bVMT249EeE9ZJQzNxLDJd+st4fv37+PBgwdh\ni1wJtTKSNmZ/DVF/U7Sp8cBuAbaEuciOL6AeWceR5TyZTObnbtp6gW3aF9uLRHjLaYrx9ZECkdBF\nghgJr9V5KC28+eDBg7mlyysd+/XdhsNho1vB+6fb0ja9GMDC43gt3dje61OLfZqx7U+n06WCOl54\nefUKtoZLlrH5hdtes9gfJMJbQpMV2EZUr7ON3A7c530W35OTk7nocl2HyPKNXAk3jWZok14cPcLX\n+k1JF1zr15eXLG1NdHl5eU6qaJpAkwDvPxLhjokeuf02qkxWimCwia42pR5NhGuRC3yMLV9eXp4T\nLvxEW8m/exPaFs7h4jlNk11tFtb0IlybjONtKb3YIhxKJSQlwHcDifAWUAu7MhHj2rylDDO/rlpJ\niLlfi4zwURKlmr9+Tbc2ArzOzLZSlpuJay3My1u5tSy4qNZvm4LqPumijSUsAb47SIQ7pPR47gXL\nRNivyRbVabDQsDaxt23ig7lfKsjDBXdKKcbrTKqoiS73fe0GL6ZREkSbVY45PjiK+/WJGtHn1JIq\nJMB3C4lwx5REykc2mDXqRdCXi+z3+9VwsKYsuahv21JNCF5e3hdd30RGm21rqcVc36FpfbXrbFdJ\nSfbujZIIS3zvJhLhLaIUI8uRDCbCvDIF94fD4Uoxuqu0phWO/dpuNUv4JqJcckNE6cVsuUZpwr4g\nTklQfWuynrmVst+U2SYAiXDntElS8D5hs3otSoGjFo6Pj6uWbpN1XOrX/Me+/kPNCl6XRQwsF1f3\nmW3eh+tXIY5Si2v1HVisS8VzvNXrV9DwE4PKbBMS4Q6JfMKRIEfuCEuYsNhdS5K4d+9eo/j6Gr1R\nqFsprritkNcmG/21r0qtyI5fNLOU2Rb12Y/LUQ3e32vpxdESRdHxUooxN3F3kQhvAdGjui+yYxNz\nJsJmCd+/fx8PHz7Ew4cP8ejRI5ycnDT6enl/lboNpYy7aL+W7bYu33BNgKNFM31mG/dL1rG3ns0S\n9uIaia0da4rikDvibiMR7phaFhwLG7sjzP9rFvCjR4/w6NEjPH78eJ4y3DTJxlZrk7Ba38brt+u0\ncpto8gV7K7TN8vGW2RaFmUX7s9msMTqD93nsTX1x95AIr0DJuquFmtWOtbU6B4PBQtEc7wfmZu6I\nkuXr9yPLu7RdhVWExVuCtSy4Wsyv708mk1apxVxop03FMxbhpmQRIZqQCLegzSx/JGa1/iqt3+/P\nXQ7WTIQtIsLH6tZ8t37yb1MJFW2pPaL747xwZlP872QyaUwrZreEn4Tj6IYonCy6eUh4xapIhFtQ\nshYj0W2KMPCvRZNgfr/f7y9Mvln//v37C+nDJsJtoh02nVDRBrYe20xeRbG/tXhe9gHXUoutglkU\n5+uXDGojvhJisQoS4QYi/2yped9rqb6D7ZeiEfyxqHgOW8K2hDyLcC3SoSTAJfFdtyBHKbq12g6+\nmLoXzCisjFeyaOPrjdZy82FmkQhLfMVNkQi3wAtxFN7FQhutoRYdq1nOvO33+0uFc3jfuyNWjXTg\na2wjytclEik/wVZbN83CzaKlgUrRDKX3l9KLo3XcuNBOqTymv0YJsmiLRLiBkgBHk1xc3yFqftXh\nNu4Ks4Sjwjl+a0V0mqIcIkuYr3UTrohagRq2hH3RdJ8E4a3ZWp3eyMVQSjWuJWBElnB0DUJcB4lw\nC1iIffaYt4C5mE6t+Qm0ksDb53OxHO7zPlvCNd81H7Pr4+v0/ZvSVCHMLyNUSwlm0W1qXL+3qThP\nm2LuUY0HCbG4KRLhlrAFWUrdtZTiNq2p0I6P8W2yrLmwelPkQy0SYtMxv6VHd5/lFhXMmc1mS+u3\nWd83E+Gm+sCR37dpcc3SdZT2haghEW7AC5oXSvbzcjZbUxsMBo0xvP480eoX/hiLsB9/ze1Q6t+U\ntskKtUw3noCL1nHjxsc4vTgqnOP3fRU27weuuSOifSHaIBFuQeQTjibhWIR54oybRTSYCPtMtjZZ\nbk37NuY2W9+vHVsXUXgXZ7yxJcyTZ+PxeCHJ4vnz53j27Nm82b5tp9NpaM2Wlo5vilcuhacJcRMk\nwi2IJueixTCttgOvSBxVOjs5OcFwOAwFuCTKtegGfywaf+m6botSZIRtvQCXVrEYjUYLAvz06dP5\nlvsswiyspTXm2kY6SHTFupEIN1CblGMR9v5gczt4K9gEeRURborrZd/vqjQ9Ul9XdJosSRY4H/FQ\nWpF4PB4vuB7M6i216XTaahwSVtElEuEWsLXpBdhHKfjIBV6BwhdDj9wJJUu3zeRaRFNomG1LAnld\noWqbAXdxcbGUWFHbnp6ezsXXr2ZsFrRWqxC7hES4gZI/mC1gv+RQJMQc0cAiXApNK8XytqX2WO1F\nNvKHliaj/GeXiOrqlmrw+qy3WvMRERwPzJEOtZuJxFlsExLhFkTuiJII+3jgKKzMLwUUZctFVrCN\npa0wl0SX+6tYrE3nYdqsOuGLrpfSkXk/ihG2tGO/dlskwk3jFuK2kQg34CflSu6IyBJmV0TkjijV\ndKhZwlFomRfjKPqgZBXWIgf8sSbR4tfbrlocxQLX+lEtCG8JR6FkUd+PWYgukAi3wFvCfkKutAx8\nSSIE/MYAAAnpSURBVIhZhEsRDlF6sY3Ftk2+4DaNoxKaluzxn186L4AwXbi0bltJoEtC7ms/+NTj\nyBL2Y5b4im1BItxAFJrG7oimibmSAPuoh9L2ppltJeFlS7ipODr7WaPPj/Z5ss0X2PH9Uk3gUp3g\nUulKrv3rrd9orEJsAxLhFpR8wlGqsq/tUEo3Pjo6KoptKRqCx8PbiJL/1/f9UkAl8Ts7O1v4bH8u\nv2+TaKXFNXnf+4j9DaCNb7lU+9ePUdaw2DYkwg2UfMJcz6HmjiiFp5kI1ybdIgvY96N9o+aC8Ouy\nsZhF7ezsrCpgfj9aTNPXfbD9NinFbV0mbQrtRPtCdIVEuAWRJcwCHE3M1cTYmn2237ZxNbSNDY5C\nz5oEOJoUs3XVSufw5+fMtqjWAx+LCujUirv7bLdSa+M+EaJrJMIN+Iw5Xy+izcRcFB1hhXb8uaLz\nX4cmC5gn5SIhjmrx2ufyOUp9E9mopoOv9eCLpjcJayn0LNoXYtuRCLegVnvB+2sj0YtWCN5U3YZV\nYn/Pz88b43J5v8mvyn2fXlxrUQUzb73bVoh9QyLcAE9e2SP6eDxeiHAwQfUrP/ii67zt9Tbz1ddc\nD9Gxpvhc3udzNPW966GU2VYqFanIBnFXkAg3YOLgq3pZxpsJcM55vvKDjxH28cKDwWCjIhwJcanf\nNqliNpvNP9+fL+r7ibmmzLamBAsh9hWJcAM+oWE6nS6kHPN7ptNpddULblb3dxPU6j94ES7F4kbb\nNqFphl9WnmODp9PpQuyxt4B9E2KfkQg3EFnC7ILghIfJZLK0qrKPiODiPZses/dPR/tt4nF9nDCf\np7TvEzNq7ggJr7jLSIQbYEt4NpvNXRAppQUBns1m8zXe/PJD0bGo+Po6x9zUOFmjtMilj9/154jO\na9RSlUs+4SjKQaIs9p2VRDil9K0A/gyA3w9gBOCnALwr5/xxes8HAfx596MfyTm//YZj7QyzhHnl\nCi/A5qYoFWb3/U2JcE3IvKhxlIRPdoiO+XOUzg2g0cdcsoT92JvOKcSus6ol/BYA3wXgP1397LcD\n+LGU0hflnEf0vg8DeCcAi8Oa3HCcneF9wgAW9nmyrrZ6clS2cpNjrsXOcp+TI3z4WpvkB39eo00J\nS/+5mpQTd5GVRNhbsymldwL4XwDeBOCj9NIk5/zpG49uCzCL13zALFKz2az1WnBRicpNjrntNprE\nK/XbnhdoX9Q9ukmUrkOIfeSmPuHHADKAV9zxt6aUXgbwGQD/CsC7c87+PTsBW4smyCbAUT1gX4Cn\n1N/0IpulR3nfX6Wtes5VsuCiz4j2hdg3ri3C6VJF3g/goznnX6KXPgzgBwF8EsDvwaXL4kdTSm/O\nO/gfxVZi0zpvTXV/21Q/2+R11I6XtrWfbfr8mpjL7yvEJTexhD8A4IsBfDkfzDl/iHZ/MaX08wB+\nBcBbAfzkDc7XGZqhF0JsimvNDqWUvhvA2wG8Nef8m7X35pw/CeC3ALzxOucSQoh9ZmVL+EqAvwbA\nV+Scf7XF+z8fwOsAVMVaCCHuIitZwimlDwD4cwD+LIDnKaUXrtrw6vWTlNJ3pJS+LKX0u1NKbwPw\n/wL4OIAX1z14IYTYdVZ1R3wjgIcA/jWA36D2dVevnwP4gwB+GMB/B/CPAPxHAH8i5zxbw3iFEGKv\nWDVOuCraOecxgD99oxEJIcQdYnNpW0IIIRqRCAshRIdIhIUQokMkwkII0SESYSGE6BCJsBBCdIhE\nWAghOkQiLIQQHSIRFkKIDpEICyFEh0iEhRCiQyTCQgjRIRJhIYToEImwEEJ0iERYCCE6RCIshBAd\nIhEWQogOkQgLIUSHSISFEKJDJMJCCNEhEmEhhOiQbRDhYdcDEEKIDdGob9sgwl/Q9QCEEGJDfEHT\nG1LO+RbGURlASq8D8JUAPgVg3OlghBBiPQxxKcAv5px/u/bGzkVYCCHuMtvgjhBCiDuLRFgIITpE\nIiyEEB0iERZCiA7ZShFOKf3VlNInU0qjlNJPp5T+SNdjWgcppfeklC5c+6Wux3UdUkpvSSn9SErp\nf15dx1cH7/m2lNJvpJROU0r/X0rpjV2M9To0XV9K6YPB7/JHuxpvW1JK35pS+lhK6UlK6eWU0j9P\nKf2+4H07+btrc33b9rvbOhFOKX09gO8E8B4AfwjAfwXwYkrpczsd2Pr4BQAvAHj9Vfvj3Q7n2pwA\n+DkA3wRgKcQmpfQuAN8M4C8D+KMAnuPy99i/zUHegOr1XfFhLP4uv+F2hnYj3gLguwB8GYA/BeAI\nwI+llI7tDTv+u2u8viu253eXc96qBuCnAfwD2k8Afh3A3+x6bGu4tvcA+M9dj2MD13UB4Kvdsd8A\n8C20/xDACMDXdT3eNV3fBwH8UNdjW8O1fe7V9f3xPf3dRde3Vb+7rbKEU0pHAN4E4CfsWL781n4c\nwJu7Gtea+b1Xj7i/klL6pyml/6PrAa2blNIbcGld8O/xCYCfwf78HgHgrVePvL+cUvpASul3dD2g\na/AYl5b+K8Be/u4Wro/Ymt/dVokwLu9ahwBedsdfxuUfxq7z0wDeicsMwW8E8AYA/zaldNLloDbA\n63H5h7+vv0fg8nH2HQD+TwB/E8BXAPjRlFLqdFQrcDXW9wP4aM7Z5ib25ndXuD5gy353vS5OelfJ\nOb9Iu7+QUvoYgP8B4Otw+YgkdoSc84do9xdTSj8P4FcAvBXAT3YyqNX5AIAvBvDlXQ9kQ4TXt22/\nu22zhH8LwDkuHebMCwBeuv3hbJac86sAPg5gJ2aeV+AlXPry78TvEQByzp/E5d/vTvwuU0rfDeDt\nAN6ac/5NemkvfneV61ui69/dVolwznkG4GcBvM2OXT0ivA3AT3U1rk2RUrqPy1989Y9k17j6o34J\ni7/Hh7icsd673yMApJQ+H8DrsAO/yyuB+hoAfzLn/Kv82j787mrXV3h/p7+7bXRH/H0A35tS+lkA\nHwPwLQDuAfjeLge1DlJKfw/Av8ClC+J3AfjbAGYAfqDLcV2HKz/2G3FpNQHAF6aUvgTAKznnX8Ol\nL+7dKaVP4LJC3vtwGeXywx0Md2Vq13fV3gPgB3EpWG8E8Hdx+VTz4vKnbQ8ppQ/gMhzrqwE8TymZ\nxftqztmqGO7s767p+q5+r9v1u+s6PKMQVvJNuPzljwD8BwBf2vWY1nRdP4DLP+YRgF8F8P0A3tD1\nuK55LV+By9Cfc9f+H3rPe3EZ7nSKyz/wN3Y97nVcHy7LFH4El//EYwD/P4D/G8Dv7HrcLa4ruqZz\nAO9w79vJ313T9W3j706lLIUQokO2yicshBB3DYmwEEJ0iERYCCE6RCIshBAdIhEWQogOkQgLIUSH\nSISFEKJDJMJCCNEhEmEhhOgQibAQQnSIRFgIITpEIiyEEB3yvwHGFZtYaWxMwgAAAABJRU5ErkJg\ngg==\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "plt.imshow(train_x[0].reshape((28, 28)), cmap = cm.Greys_r)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# rbm code" ] }, { "cell_type": "code", "execution_count": 20, "metadata": { "collapsed": true }, "outputs": [], "source": [ "from __future__ import print_function\n", "\n", "import timeit\n", "\n", "try:\n", " import PIL.Image as Image\n", "except ImportError:\n", " import Image\n", "\n", "import numpy\n", "\n", "import theano\n", "import theano.tensor as T\n", "import os\n", "\n", "from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams\n", "\n", "from utils import tile_raster_images\n", "from logistic_sgd import load_data" ] }, { "cell_type": "code", "execution_count": 21, "metadata": { "collapsed": true }, "outputs": [], "source": [ "class RBM(object):\n", " \"\"\"Restricted Boltzmann Machine (RBM) \"\"\"\n", " def __init__(\n", " self,\n", " input=None,\n", " n_visible=784,\n", " n_hidden=500,\n", " W=None,\n", " hbias=None,\n", " vbias=None,\n", " numpy_rng=None,\n", " theano_rng=None\n", " ):\n", " \"\"\"\n", " RBM constructor. Defines the parameters of the model along with\n", " basic operations for inferring hidden from visible (and vice-versa),\n", " as well as for performing CD updates.\n", "\n", " :param input: None for standalone RBMs or symbolic variable if RBM is\n", " part of a larger graph.\n", "\n", " :param n_visible: number of visible units\n", "\n", " :param n_hidden: number of hidden units\n", "\n", " :param W: None for standalone RBMs or symbolic variable pointing to a\n", " shared weight matrix in case RBM is part of a DBN network; in a DBN,\n", " the weights are shared between RBMs and layers of a MLP\n", "\n", " :param hbias: None for standalone RBMs or symbolic variable pointing\n", " to a shared hidden units bias vector in case RBM is part of a\n", " different network\n", "\n", " :param vbias: None for standalone RBMs or a symbolic variable\n", " pointing to a shared visible units bias\n", " \"\"\"\n", "\n", " self.n_visible = n_visible\n", " self.n_hidden = n_hidden\n", "\n", " if numpy_rng is None:\n", " # create a number generator\n", " numpy_rng = numpy.random.RandomState(1234)\n", "\n", " if theano_rng is None:\n", " theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))\n", "\n", " if W is None:\n", " # W is initialized with `initial_W` which is uniformely\n", " # sampled from -4*sqrt(6./(n_visible+n_hidden)) and\n", " # 4*sqrt(6./(n_hidden+n_visible)) the output of uniform if\n", " # converted using asarray to dtype theano.config.floatX so\n", " # that the code is runable on GPU\n", " initial_W = numpy.asarray(\n", " numpy_rng.uniform(\n", " low=-4 * numpy.sqrt(6. / (n_hidden + n_visible)),\n", " high=4 * numpy.sqrt(6. / (n_hidden + n_visible)),\n", " size=(n_visible, n_hidden)\n", " ),\n", " dtype=theano.config.floatX\n", " )\n", " # theano shared variables for weights and biases\n", " W = theano.shared(value=initial_W, name='W', borrow=True)\n", "\n", " if hbias is None:\n", " # create shared variable for hidden units bias\n", " hbias = theano.shared(\n", " value=numpy.zeros(\n", " n_hidden,\n", " dtype=theano.config.floatX\n", " ),\n", " name='hbias',\n", " borrow=True\n", " )\n", "\n", " if vbias is None:\n", " # create shared variable for visible units bias\n", " vbias = theano.shared(\n", " value=numpy.zeros(\n", " n_visible,\n", " dtype=theano.config.floatX\n", " ),\n", " name='vbias',\n", " borrow=True\n", " )\n", "\n", " # initialize input layer for standalone RBM or layer0 of DBN\n", " self.input = input\n", " if not input:\n", " self.input = T.matrix('input')\n", "\n", " self.W = W\n", " self.hbias = hbias\n", " self.vbias = vbias\n", " self.theano_rng = theano_rng\n", " # **** WARNING: It is not a good idea to put things in this list\n", " # other than shared variables created in this function.\n", " self.params = [self.W, self.hbias, self.vbias]\n", " # end-snippet-1\n", "\n", " def free_energy(self, v_sample):\n", " ''' Function to compute the free energy '''\n", " wx_b = T.dot(v_sample, self.W) + self.hbias\n", " vbias_term = T.dot(v_sample, self.vbias)\n", " hidden_term = T.sum(T.log(1 + T.exp(wx_b)), axis=1)\n", " return -hidden_term - vbias_term\n", "\n", " def propup(self, vis):\n", " '''This function propagates the visible units activation upwards to\n", " the hidden units\n", "\n", " Note that we return also the pre-sigmoid activation of the\n", " layer. As it will turn out later, due to how Theano deals with\n", " optimizations, this symbolic variable will be needed to write\n", " down a more stable computational graph (see details in the\n", " reconstruction cost function)\n", "\n", " '''\n", " pre_sigmoid_activation = T.dot(vis, self.W) + self.hbias\n", " return [pre_sigmoid_activation, T.nnet.sigmoid(pre_sigmoid_activation)]\n", "\n", " def sample_h_given_v(self, v0_sample):\n", " ''' This function infers state of hidden units given visible units '''\n", " # compute the activation of the hidden units given a sample of\n", " # the visibles\n", " pre_sigmoid_h1, h1_mean = self.propup(v0_sample)\n", " # get a sample of the hiddens given their activation\n", " # Note that theano_rng.binomial returns a symbolic sample of dtype\n", " # int64 by default. If we want to keep our computations in floatX\n", " # for the GPU we need to specify to return the dtype floatX\n", " h1_sample = self.theano_rng.binomial(size=h1_mean.shape,\n", " n=1, p=h1_mean,\n", " dtype=theano.config.floatX)\n", " return [pre_sigmoid_h1, h1_mean, h1_sample]\n", "\n", " def propdown(self, hid):\n", " '''This function propagates the hidden units activation downwards to\n", " the visible units\n", "\n", " Note that we return also the pre_sigmoid_activation of the\n", " layer. As it will turn out later, due to how Theano deals with\n", " optimizations, this symbolic variable will be needed to write\n", " down a more stable computational graph (see details in the\n", " reconstruction cost function)\n", "\n", " '''\n", " pre_sigmoid_activation = T.dot(hid, self.W.T) + self.vbias\n", " return [pre_sigmoid_activation, T.nnet.sigmoid(pre_sigmoid_activation)]\n", "\n", " def sample_v_given_h(self, h0_sample):\n", " ''' This function infers state of visible units given hidden units '''\n", " # compute the activation of the visible given the hidden sample\n", " pre_sigmoid_v1, v1_mean = self.propdown(h0_sample)\n", " # get a sample of the visible given their activation\n", " # Note that theano_rng.binomial returns a symbolic sample of dtype\n", " # int64 by default. If we want to keep our computations in floatX\n", " # for the GPU we need to specify to return the dtype floatX\n", " v1_sample = self.theano_rng.binomial(size=v1_mean.shape,\n", " n=1, p=v1_mean,\n", " dtype=theano.config.floatX)\n", " return [pre_sigmoid_v1, v1_mean, v1_sample]\n", "\n", " def gibbs_hvh(self, h0_sample):\n", " ''' This function implements one step of Gibbs sampling,\n", " starting from the hidden state'''\n", " pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h0_sample)\n", " pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v1_sample)\n", " return [pre_sigmoid_v1, v1_mean, v1_sample,\n", " pre_sigmoid_h1, h1_mean, h1_sample]\n", "\n", " def gibbs_vhv(self, v0_sample):\n", " ''' This function implements one step of Gibbs sampling,\n", " starting from the visible state'''\n", " pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v0_sample)\n", " pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h1_sample)\n", " return [pre_sigmoid_h1, h1_mean, h1_sample,\n", " pre_sigmoid_v1, v1_mean, v1_sample]\n", "\n", " # start-snippet-2\n", " def get_cost_updates(self, lr=0.1, persistent=None, k=1):\n", " \"\"\"This functions implements one step of CD-k or PCD-k\n", "\n", " :param lr: learning rate used to train the RBM\n", "\n", " :param persistent: None for CD. For PCD, shared variable\n", " containing old state of Gibbs chain. This must be a shared\n", " variable of size (batch size, number of hidden units).\n", "\n", " :param k: number of Gibbs steps to do in CD-k/PCD-k\n", "\n", " Returns a proxy for the cost and the updates dictionary. The\n", " dictionary contains the update rules for weights and biases but\n", " also an update of the shared variable used to store the persistent\n", " chain, if one is used.\n", "\n", " \"\"\"\n", "\n", " # compute positive phase\n", " pre_sigmoid_ph, ph_mean, ph_sample = self.sample_h_given_v(self.input)\n", "\n", " # decide how to initialize persistent chain:\n", " # for CD, we use the newly generate hidden sample\n", " # for PCD, we initialize from the old state of the chain\n", " if persistent is None:\n", " chain_start = ph_sample\n", " else:\n", " chain_start = persistent\n", " # end-snippet-2\n", " # perform actual negative phase\n", " # in order to implement CD-k/PCD-k we need to scan over the\n", " # function that implements one gibbs step k times.\n", " # Read Theano tutorial on scan for more information :\n", " # http://deeplearning.net/software/theano/library/scan.html\n", " # the scan will return the entire Gibbs chain\n", " (\n", " [\n", " pre_sigmoid_nvs,\n", " nv_means,\n", " nv_samples,\n", " pre_sigmoid_nhs,\n", " nh_means,\n", " nh_samples\n", " ],\n", " updates\n", " ) = theano.scan(\n", " self.gibbs_hvh,\n", " # the None are place holders, saying that\n", " # chain_start is the initial state corresponding to the\n", " # 6th output\n", " outputs_info=[None, None, None, None, None, chain_start],\n", " n_steps=k,\n", " name=\"gibbs_hvh\"\n", " )\n", " # start-snippet-3\n", " # determine gradients on RBM parameters\n", " # note that we only need the sample at the end of the chain\n", " chain_end = nv_samples[-1]\n", "\n", " cost = T.mean(self.free_energy(self.input)) - T.mean(\n", " self.free_energy(chain_end))\n", " # We must not compute the gradient through the gibbs sampling\n", " gparams = T.grad(cost, self.params, consider_constant=[chain_end])\n", " # end-snippet-3 start-snippet-4\n", " # constructs the update dictionary\n", " for gparam, param in zip(gparams, self.params):\n", " # make sure that the learning rate is of the right dtype\n", " updates[param] = param - gparam * T.cast(\n", " lr,\n", " dtype=theano.config.floatX\n", " )\n", " if persistent:\n", " # Note that this works only if persistent is a shared variable\n", " updates[persistent] = nh_samples[-1]\n", " # pseudo-likelihood is a better proxy for PCD\n", " monitoring_cost = self.get_pseudo_likelihood_cost(updates)\n", " else:\n", " # reconstruction cross-entropy is a better proxy for CD\n", " monitoring_cost = self.get_reconstruction_cost(updates,\n", " pre_sigmoid_nvs[-1])\n", "\n", " return monitoring_cost, updates\n", " # end-snippet-4\n", "\n", " def get_pseudo_likelihood_cost(self, updates):\n", " \"\"\"Stochastic approximation to the pseudo-likelihood\"\"\"\n", "\n", " # index of bit i in expression p(x_i | x_{\\i})\n", " bit_i_idx = theano.shared(value=0, name='bit_i_idx')\n", "\n", " # binarize the input image by rounding to nearest integer\n", " xi = T.round(self.input)\n", "\n", " # calculate free energy for the given bit configuration\n", " fe_xi = self.free_energy(xi)\n", "\n", " # flip bit x_i of matrix xi and preserve all other bits x_{\\i}\n", " # Equivalent to xi[:,bit_i_idx] = 1-xi[:, bit_i_idx], but assigns\n", " # the result to xi_flip, instead of working in place on xi.\n", " xi_flip = T.set_subtensor(xi[:, bit_i_idx], 1 - xi[:, bit_i_idx])\n", "\n", " # calculate free energy with bit flipped\n", " fe_xi_flip = self.free_energy(xi_flip)\n", "\n", " # equivalent to e^(-FE(x_i)) / (e^(-FE(x_i)) + e^(-FE(x_{\\i})))\n", " cost = T.mean(self.n_visible * T.log(T.nnet.sigmoid(fe_xi_flip -\n", " fe_xi)))\n", "\n", " # increment bit_i_idx % number as part of updates\n", " updates[bit_i_idx] = (bit_i_idx + 1) % self.n_visible\n", "\n", " return cost\n", "\n", " def get_reconstruction_cost(self, updates, pre_sigmoid_nv):\n", " \"\"\"Approximation to the reconstruction error\n", "\n", " Note that this function requires the pre-sigmoid activation as\n", " input. To understand why this is so you need to understand a\n", " bit about how Theano works. Whenever you compile a Theano\n", " function, the computational graph that you pass as input gets\n", " optimized for speed and stability. This is done by changing\n", " several parts of the subgraphs with others. One such\n", " optimization expresses terms of the form log(sigmoid(x)) in\n", " terms of softplus. We need this optimization for the\n", " cross-entropy since sigmoid of numbers larger than 30. (or\n", " even less then that) turn to 1. and numbers smaller than\n", " -30. turn to 0 which in terms will force theano to compute\n", " log(0) and therefore we will get either -inf or NaN as\n", " cost. If the value is expressed in terms of softplus we do not\n", " get this undesirable behaviour. This optimization usually\n", " works fine, but here we have a special case. The sigmoid is\n", " applied inside the scan op, while the log is\n", " outside. Therefore Theano will only see log(scan(..)) instead\n", " of log(sigmoid(..)) and will not apply the wanted\n", " optimization. We can not go and replace the sigmoid in scan\n", " with something else also, because this only needs to be done\n", " on the last step. Therefore the easiest and more efficient way\n", " is to get also the pre-sigmoid activation as an output of\n", " scan, and apply both the log and sigmoid outside scan such\n", " that Theano can catch and optimize the expression.\n", "\n", " \"\"\"\n", "\n", " cross_entropy = T.mean(\n", " T.sum(\n", " self.input * T.log(T.nnet.sigmoid(pre_sigmoid_nv)) +\n", " (1 - self.input) * T.log(1 - T.nnet.sigmoid(pre_sigmoid_nv)),\n", " axis=1\n", " )\n", " )\n", "\n", " return cross_entropy\n" ] }, { "cell_type": "code", "execution_count": 22, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def test_rbm(learning_rate=0.1, training_epochs=15,\n", " dataset='mnist.pkl.gz', batch_size=20,\n", " n_chains=20, n_samples=10, output_folder='rbm_plots',\n", " n_hidden=500):\n", " \"\"\"\n", " Demonstrate how to train and afterwards sample from it using Theano.\n", "\n", " This is demonstrated on MNIST.\n", "\n", " :param learning_rate: learning rate used for training the RBM\n", "\n", " :param training_epochs: number of epochs used for training\n", "\n", " :param dataset: path the the pickled dataset\n", "\n", " :param batch_size: size of a batch used to train the RBM\n", "\n", " :param n_chains: number of parallel Gibbs chains to be used for sampling\n", "\n", " :param n_samples: number of samples to plot for each chain\n", "\n", " \"\"\"\n", " datasets = load_data(dataset)\n", "\n", " train_set_x, train_set_y = datasets[0]\n", " test_set_x, test_set_y = datasets[2]\n", "\n", " # compute number of minibatches for training, validation and testing\n", " n_train_batches = train_set_x.get_value(borrow=True).shape[0] // batch_size\n", "\n", " # allocate symbolic variables for the data\n", " index = T.lscalar() # index to a [mini]batch\n", " x = T.matrix('x') # the data is presented as rasterized images\n", "\n", " rng = numpy.random.RandomState(123)\n", " theano_rng = RandomStreams(rng.randint(2 ** 30))\n", "\n", " # initialize storage for the persistent chain (state = hidden\n", " # layer of chain)\n", " persistent_chain = theano.shared(numpy.zeros((batch_size, n_hidden),\n", " dtype=theano.config.floatX),\n", " borrow=True)\n", "\n", " # construct the RBM class\n", " rbm = RBM(input=x, n_visible=28 * 28,\n", " n_hidden=n_hidden, numpy_rng=rng, theano_rng=theano_rng)\n", "\n", " # get the cost and the gradient corresponding to one step of CD-15\n", " cost, updates = rbm.get_cost_updates(lr=learning_rate,\n", " persistent=persistent_chain, k=15)\n", "\n", " #################################\n", " # Training the RBM #\n", " #################################\n", " if not os.path.isdir(output_folder):\n", " os.makedirs(output_folder)\n", " os.chdir(output_folder)\n", "\n", " # start-snippet-5\n", " # it is ok for a theano function to have no output\n", " # the purpose of train_rbm is solely to update the RBM parameters\n", " train_rbm = theano.function(\n", " [index],\n", " cost,\n", " updates=updates,\n", " givens={\n", " x: train_set_x[index * batch_size: (index + 1) * batch_size]\n", " },\n", " name='train_rbm'\n", " )\n", "\n", " plotting_time = 0.\n", " start_time = timeit.default_timer()\n", "\n", " # go through training epochs\n", " for epoch in range(training_epochs):\n", "\n", " # go through the training set\n", " mean_cost = []\n", " for batch_index in range(n_train_batches):\n", " mean_cost += [train_rbm(batch_index)]\n", "\n", " print('Training epoch %d, cost is ' % epoch, numpy.mean(mean_cost))\n", "\n", " # Plot filters after each training epoch\n", " plotting_start = timeit.default_timer()\n", " # Construct image from the weight matrix\n", " image = Image.fromarray(\n", " tile_raster_images(\n", " X=rbm.W.get_value(borrow=True).T,\n", " img_shape=(28, 28),\n", " tile_shape=(10, 10),\n", " tile_spacing=(1, 1)\n", " )\n", " )\n", " image.save('filters_at_epoch_%i.png' % epoch)\n", " plotting_stop = timeit.default_timer()\n", " plotting_time += (plotting_stop - plotting_start)\n", "\n", " end_time = timeit.default_timer()\n", "\n", " pretraining_time = (end_time - start_time) - plotting_time\n", "\n", " print ('Training took %f minutes' % (pretraining_time / 60.))\n", " # end-snippet-5 start-snippet-6\n", " #################################\n", " # Sampling from the RBM #\n", " #################################\n", " # find out the number of test samples\n", " number_of_test_samples = test_set_x.get_value(borrow=True).shape[0]\n", "\n", " # pick random test examples, with which to initialize the persistent chain\n", " test_idx = rng.randint(number_of_test_samples - n_chains)\n", " persistent_vis_chain = theano.shared(\n", " numpy.asarray(\n", " test_set_x.get_value(borrow=True)[test_idx:test_idx + n_chains],\n", " dtype=theano.config.floatX\n", " )\n", " )\n", " # end-snippet-6 start-snippet-7\n", " plot_every = 1000\n", " # define one step of Gibbs sampling (mf = mean-field) define a\n", " # function that does `plot_every` steps before returning the\n", " # sample for plotting\n", " (\n", " [\n", " presig_hids,\n", " hid_mfs,\n", " hid_samples,\n", " presig_vis,\n", " vis_mfs,\n", " vis_samples\n", " ],\n", " updates\n", " ) = theano.scan(\n", " rbm.gibbs_vhv,\n", " outputs_info=[None, None, None, None, None, persistent_vis_chain],\n", " n_steps=plot_every,\n", " name=\"gibbs_vhv\"\n", " )\n", "\n", " # add to updates the shared variable that takes care of our persistent\n", " # chain :.\n", " updates.update({persistent_vis_chain: vis_samples[-1]})\n", " # construct the function that implements our persistent chain.\n", " # we generate the \"mean field\" activations for plotting and the actual\n", " # samples for reinitializing the state of our persistent chain\n", " sample_fn = theano.function(\n", " [],\n", " [\n", " vis_mfs[-1],\n", " vis_samples[-1]\n", " ],\n", " updates=updates,\n", " name='sample_fn'\n", " )\n", "\n", " # create a space to store the image for plotting ( we need to leave\n", " # room for the tile_spacing as well)\n", " image_data = numpy.zeros(\n", " (29 * n_samples + 1, 29 * n_chains - 1),\n", " dtype='uint8'\n", " )\n", " for idx in range(n_samples):\n", " # generate `plot_every` intermediate samples that we discard,\n", " # because successive samples in the chain are too correlated\n", " vis_mf, vis_sample = sample_fn()\n", " print(' ... plotting sample %d' % idx)\n", " image_data[29 * idx:29 * idx + 28, :] = tile_raster_images(\n", " X=vis_mf,\n", " img_shape=(28, 28),\n", " tile_shape=(1, n_chains),\n", " tile_spacing=(1, 1)\n", " )\n", "\n", " # construct image\n", " image = Image.fromarray(image_data)\n", " image.save('samples.png')\n", " # end-snippet-7\n", " os.chdir('../')" ] }, { "cell_type": "code", "execution_count": 23, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "... loading data\n", "Training epoch 0, cost is -90.590108121\n", "Training epoch 1, cost is -80.306465234\n", "Training epoch 2, cost is -74.3871721077\n", "Training epoch 3, cost is -72.2911275348\n", "Training epoch 4, cost is -68.7493255098\n", "Training epoch 5, cost is -63.3319654523\n", "Training epoch 6, cost is -65.6836096852\n", "Training epoch 7, cost is -67.5699326992\n", "Training epoch 8, cost is -68.1104919343\n", "Training epoch 9, cost is -64.2462694866\n", "Training epoch 10, cost is -61.3355699009\n", "Training epoch 11, cost is -62.0303931597\n", "Training epoch 12, cost is -64.0581166301\n", "Training epoch 13, cost is -63.4414152005\n", "Training epoch 14, cost is -62.7701948813\n", "Training took 71.232035 minutes\n", " ... plotting sample 0\n", " ... plotting sample 1\n", " ... plotting sample 2\n", " ... plotting sample 3\n", " ... plotting sample 4\n", " ... plotting sample 5\n", " ... plotting sample 6\n", " ... plotting sample 7\n", " ... plotting sample 8\n", " ... plotting sample 9\n" ] } ], "source": [ "test_rbm()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "![](rbm_plots/samples.png)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Basic about theano" ] }, { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": true }, "outputs": [], "source": [ "from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams" ] }, { "cell_type": "code", "execution_count": 4, "metadata": { "collapsed": true }, "outputs": [], "source": [ "import numpy" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { "collapsed": true }, "outputs": [], "source": [ "numpy_rng = numpy.random.RandomState(1234)" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Help on class MRG_RandomStreams in module theano.sandbox.rng_mrg:\n", "\n", "class MRG_RandomStreams(__builtin__.object)\n", " | Module component with similar interface to numpy.random\n", " | (numpy.random.RandomState).\n", " | \n", " | Parameters\n", " | ----------\n", " | seed : int or list of 6 int\n", " | A default seed to initialize the random state.\n", " | If a single int is given, it will be replicated 6 times.\n", " | The first 3 values of the seed must all be less than M1 = 2147483647,\n", " | and not all 0; and the last 3 values must all be less than\n", " | M2 = 2147462579, and not all 0.\n", " | \n", " | Methods defined here:\n", " | \n", " | __init__(self, seed=12345, use_cuda=None)\n", " | \n", " | binomial(self, size=None, n=1, p=0.5, ndim=None, dtype='int64', nstreams=None)\n", " | \n", " | get_substream_rstates(self, n_streams, dtype, inc_rstate=True)\n", " | Initialize a matrix in which each row is a MRG stream state,\n", " | and they are spaced by 2**72 samples.\n", " | \n", " | inc_rstate(self)\n", " | Update self.rstate to be skipped 2^134 steps forward to the next stream\n", " | start.\n", " | \n", " | multinomial(self, size=None, n=1, pvals=None, ndim=None, dtype='int64', nstreams=None)\n", " | Sample `n` (`n` needs to be >= 1, default 1) times from a multinomial\n", " | distribution defined by probabilities pvals.\n", " | \n", " | Example : pvals = [[.98, .01, .01], [.01, .49, .50]] and n=1 will\n", " | probably result in [[1,0,0],[0,0,1]]. When setting n=2, this\n", " | will probably result in [[2,0,0],[0,1,1]].\n", " | \n", " | Notes\n", " | -----\n", " | -`size` and `ndim` are only there keep the same signature as other\n", " | uniform, binomial, normal, etc.\n", " | TODO : adapt multinomial to take that into account\n", " | \n", " | -Does not do any value checking on pvals, i.e. there is no\n", " | check that the elements are non-negative, less than 1, or\n", " | sum to 1. passing pvals = [[-2., 2.]] will result in\n", " | sampling [[0, 0]]\n", " | \n", " | multinomial_wo_replacement(self, size=None, n=1, pvals=None, ndim=None, dtype='int64', nstreams=None)\n", " | Sample `n` times *WITHOUT replacement* from a multinomial distribution\n", " | defined by probabilities pvals, and returns the indices of the sampled\n", " | elements.\n", " | `n` needs to be in [1, m], where m is the number of elements to select\n", " | from, i.e. m == pvals.shape[1]. By default n = 1.\n", " | \n", " | Example : pvals = [[.98, .01, .01], [.01, .49, .50]] and n=1 will\n", " | probably result in [[0],[2]]. When setting n=2, this\n", " | will probably result in [[0,1],[2,1]].\n", " | \n", " | Notes\n", " | -----\n", " | -`size` and `ndim` are only there keep the same signature as other\n", " | uniform, binomial, normal, etc.\n", " | TODO : adapt multinomial to take that into account\n", " | \n", " | -Does not do any value checking on pvals, i.e. there is no\n", " | check that the elements are non-negative, less than 1, or\n", " | sum to 1. passing pvals = [[-2., 2.]] will result in\n", " | sampling [[0, 0]]\n", " | \n", " | n_streams(self, size)\n", " | \n", " | normal(self, size, avg=0.0, std=1.0, ndim=None, dtype=None, nstreams=None)\n", " | Parameters\n", " | ----------\n", " | size\n", " | Can be a list of integers or Theano variables (ex: the shape\n", " | of another Theano Variable).\n", " | dtype\n", " | The output data type. If dtype is not specified, it will be\n", " | inferred from the dtype of low and high, but will be at\n", " | least as precise as floatX.\n", " | nstreams\n", " | Number of streams.\n", " | \n", " | pretty_return(self, node_rstate, new_rstate, sample, size, nstreams)\n", " | \n", " | seed(self, seed=None)\n", " | Re-initialize each random stream.\n", " | \n", " | Parameters\n", " | ----------\n", " | seed : None or integer in range 0 to 2**30\n", " | Each random stream will be assigned a unique state that depends\n", " | deterministically on this value.\n", " | \n", " | Returns\n", " | -------\n", " | None\n", " | \n", " | set_rstate(self, seed)\n", " | \n", " | uniform(self, size, low=0.0, high=1.0, ndim=None, dtype=None, nstreams=None)\n", " | Sample a tensor of given size whose element from a uniform\n", " | distribution between low and high.\n", " | \n", " | If the size argument is ambiguous on the number of dimensions,\n", " | ndim may be a plain integer to supplement the missing information.\n", " | \n", " | Parameters\n", " | ----------\n", " | low\n", " | Lower bound of the interval on which values are sampled.\n", " | If the ``dtype`` arg is provided, ``low`` will be cast into\n", " | dtype. This bound is excluded.\n", " | high\n", " | Higher bound of the interval on which values are sampled.\n", " | If the ``dtype`` arg is provided, ``high`` will be cast into\n", " | dtype. This bound is excluded.\n", " | size\n", " | Can be a list of integer or Theano variable (ex: the shape\n", " | of other Theano Variable).\n", " | dtype\n", " | The output data type. If dtype is not specified, it will be\n", " | inferred from the dtype of low and high, but will be at\n", " | least as precise as floatX.\n", " | \n", " | updates(self)\n", " | \n", " | ----------------------------------------------------------------------\n", " | Data descriptors defined here:\n", " | \n", " | __dict__\n", " | dictionary for instance variables (if defined)\n", " | \n", " | __weakref__\n", " | list of weak references to the object (if defined)\n", "\n" ] } ], "source": [ "help(RandomStreams)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": { "collapsed": false }, "outputs": [], "source": [ "theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))" ] }, { "cell_type": "code", "execution_count": 7, "metadata": { "collapsed": false }, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ "theano_rng" ] }, { "cell_type": "code", "execution_count": 12, "metadata": { "collapsed": false }, "outputs": [ { "data": { "text/plain": [ "Elemwise{Cast{int64}}.0" ] }, "execution_count": 12, "metadata": {}, "output_type": "execute_result" } ], "source": [ "theano_rng.binomial(size=(100,10))# size is tupe of int or theano variables" ] }, { "cell_type": "code", "execution_count": 14, "metadata": { "collapsed": false }, "outputs": [], "source": [ "test = theano_rng.binomial(size=(100,10), p=0.4)" ] }, { "cell_type": "code", "execution_count": 15, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Elemwise{Cast{int64}}.0\n" ] } ], "source": [ "print(test)" ] } ], "metadata": { "anaconda-cloud": {}, "kernelspec": { "display_name": "Python [conda root]", "language": "python", "name": "conda-root-py" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 2 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", "version": "2.7.12" } }, "nbformat": 4, "nbformat_minor": 1 }