{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Linear Regression cost 최소화의 TensorFlow 구현"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Simplified hypothesis\n",
    "\n",
    "\\begin{equation}\n",
    "H(x) = Wx\n",
    "\\end{equation}\n",
    "\n",
    "\\begin{equation}\n",
    "Cost(W) = \\frac{1}{m} \\sum_{i=1}^{m}(Wx^{(i)}-y^{(i)})^{2}\n",
    "\\end{equation}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXEAAAEACAYAAABF+UbAAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XeYlOXVx/HvoahURVGwQrCANSoqWBIXISY2hCgIUYOC\nGgugwQKWCBqjoDEYC0rEvNh7VzSouFFRSgQUkCaCoBQrIoIK7Hn/uAezgYWd3Z2Ze57Z3+e69mJ2\nmWeew8Ie7jl3OebuiIhIMtWIHYCIiFSekriISIIpiYuIJJiSuIhIgimJi4gkmJK4iEiCpZXEzeyP\nZjbNzN43swfNbDMza25m48xslpk9bGa1sh2siIj8r3KTuJntAPQBDnT3/YBaQHdgCHCzu7cElgG9\nshmoiIhsKN1ySk2gXmq0XQdYBLQDnkz9/r1A58yHJyIim1JuEnf3RcDNwALgU+AbYBKwzN1LUk/7\nBNghW0GKiEjZ0imnbAWcCDQjJOp6wDFlPFX790VEciydycgOwEfu/hWAmT0NHAZsZWY1UqPxnQgl\nlg2YmZK7iEgluLuV95x0auILgLZmtoWZGdAemA68DnRJPacH8OwmAsn7j4EDB0aPoVDiTEKMilNx\n5vtHutKpiU8AngAmA+8BBvwDGAD0M7PZwNbAPWnfVUREMiKttd3ufg1wzXpfnge0yXhEIiKSNu3Y\nTCkqKoodQlqSEGcSYgTFmWmKMw6rSO2lUjcw82zfQ0Sk0JgZnqGJTRERyVNK4iIiCaYkLiKSYEri\nIiIJpiQuIpJgSuIiIgmmJC4ikmBK4iIiCaYkLiKSYEriIiIJlpMkPnlyLu4iIlL95CSJDx+ei7uI\niFQ/OTkAa6utnAULoEGDrN5KRKRg5NUBWEcdBQ8+mIs7iYhULzlJ4ueeC3fdBTqRVkQks3KSxNu3\nhxUrYPz4XNxNRKT6yEkSr1EjjMbvvDMXdxMRqT7Kndg0sz2ARwEnNEluAfwJuD/19WbAfKCru39T\nxvXu7nzxBey2G3z0EWy9dWb/ECIihSZjE5vuPtvdD3D3A4HWwHfA04Ru96+6e0tgDHD5pl6ncWM4\n/ngYOTKd8EVEqodvv4WOHWHNmspdX9FySgdgrrsvBE4E7k19/V6gU3kXn3deKKmUlFTwriIiBeqB\nB6B2bahVq3LXVzSJnwI8lHrcxN2XArj7EmDb8i4+7DCoVw9eeaWCdxURKUDucMcdcMEFlX+NtJO4\nmdUGOgKPr7t/RW9mFoK9446KXikiUnjeeCNUJtq1q/xrVGQAfwzwrrt/kfp8qZk1cfelZtYU+Gxj\nFw4aNOinx23aFDF2bBHz50Pz5hUPWESkUNxxB5x/fhjgFhcXU1xcXOHXSHvbvZk9DLzs7vemPh8C\nfOXuQ8ysP9DI3QeUcZ2vf48//hG22AJuuKHC8YqIFIRFi2CffWD+fGjYcMPfT3d1SlpJ3MzqAAuA\nFu7+beprWwOPATunfq+Luy8r49oNkvicOXDEEfDxxyGZi4hUN4MGwWefwbBhZf9+RpN4VZSVxAF+\n/Ws47TQ4/fSs3l5EJO+sXg3NmoVFHnvvXfZz8uoArLKcf74mOEWkenr6adhjj40n8IqIlsSPPx4W\nL4Z3340VgYhIHOsmNDMhWhKvWTOcp6LRuIhUJ9OmhXnBzp0z83rRauIAn38e3lLMmRO25YuIFLo/\n/AF23BGuvnrTz8v7ic11evYMiXzABosTRUQKy9dfQ4sWMHMmNGmy6efm/cTmOn36hJJKZQ9/ERFJ\ninvuCfOB5SXwioiexA84IOzcfOaZ2JGIiGTP2rVhwNq3b2ZfN3oSh/CHuvXW2FGIiGTPCy9A06Zw\n8MGZfd28SOKdOsG8eTBlSuxIRESy49ZbMz8KhzxJ4rVrh7PGb7stdiQiIpk3bRrMmAEnnZT5146+\nOmUdLTcUkUJ17rmwww7lLyssLTFLDEvr2RN23x0u32SjNxGR5KjIssLSErPEsLQ+fcKJXqtXx45E\nRCQzsrGssLS8SuIHHBD+x3ryydiRiIhU3Zo1Ya7voouyd4+8SuIQGkYMHRp6z4mIJNlTT4UjZ1u3\nzt498i6Jn3ACfPEFjBsXOxIRkaq55ZYwMM2mvEviNWuGtZRDh8aORESk8saPhyVLoGPH7N4nr1an\nrPPtt2Er/qRJ4a2IiEjSdOsGbdtWvh6eyCWGpV18cegA/de/ZiEoEZEsWrAgLNSYN6/sJsjpyHSj\n5C2BEcA+QAnQE5gNPAo0A+YDXd39mzKurVQSnz8/TAbMnw8NGlT4chGRaPr3hx9/rFpZONPrxP8O\njHL3PYGfAzOBAcCr7t4SGANkdItO8+bQrh2MHJnJVxURya4VK8La8Gyck1KWckfiZtYAmOLuu673\n9ZnAke6+1MyaAsXu3qqM6ys1EgcYOxZ69IBZs8KEp4hIvrvjDhgzpur7XTI5Em8BfGFm/2dmk8zs\nH2ZWF2ji7ksB3H0JsG3VQt7QYYfBNtvA889n+pVFRDJv7drcLCssrVaazzkQuMDd/2NmQwmllLSH\n14MGDfrpcVFREUVFRWldZwaXXBImNzt1SvduIiJxPPdcGHgefnjFry0uLqa4uLjC16VTTmkCvOPu\nLVKfH0FI4rsCRaXKKa+naubrX1/pcgqEbau77w4PPQSHHlrplxERybrDDw+j8JNPrvprZayckiqZ\nLDSzPVJfag9MB54Dzkh9rQfwbOVC3bRatcI35eabs/HqIiKZ8fbbsHgxdO6c2/umu8Tw54QlhrWB\nj4AzgZrAY8DOwAKgi7svK+PaKo3EIcz2Nm8edkDtumu5TxcRybmTTgor6nr3zszrJX6zz/quuCLs\n5FT3HxHJNx9+GMq98+dDvXqZec2CS+KLF8Pee4fOP9tsk4HAREQy5IILYKut4C9/ydxrFlwSh9D5\nZ9dd4corM/JyIiJV9uWXYfHF9Omw/faZe92CTOLTp0OHDuEty+abZ+QlRUSq5Lrrwhkp99yT2ddN\nZHu28uy9dzhU5v77Y0ciIgKrVsHtt0O/fvFiSFQSh3CwzE03hZ1RIiIxjRwJhxwSBpixJC6J//KX\n0KgRPJuVVekiIulZsybsJu/fP24ciUviZuGbNniw+nCKSDxPPBEmMiuzxT6TEpfEAU48EZYvh0oc\nMyAiUmXuMGRI/FE4JDSJ16gBl10WvokiIrn2yiuwejUcd1zsSBKaxAFOPRWmTYPJk2NHIiLVzeDB\nYSBZIw8yaB6EUDmbbx4akN54Y+xIRKQ6mTgxbLPv3j12JEGiNvusb/lyaNECJkwIv4qIZNvJJ8MR\nR1S+i326CnLHZlmuvBK++gruvDNrtxARAWDmTPjFL8IOzfr1s3uvapPEP/8cWrYM9fEddsjabURE\nOOOM8K7/6quzf69qk8QhvK2pWVONI0Qke+bPh9atQz28UaPs369aJfFPPoH99oPZs6Fx46zeSkSq\nqfPPh4YNw8qUXKhWSRzgnHOgSRP485+zfisRqWYWL4a99go18SZNcnPPapfE586FNm3go4/C/5Yi\nIply6aXwww9w6625u2e1S+IAp50G++wDAwbk5HYiUg2sa/rw3nuw8865u29Gk7iZzQe+AUqA1e5+\niJk1Ah4FmgHzga7u/k0Z1+YsiU+fDu3bh9F43bo5uaWIFLiBA+HTT2HEiNzeN9NJ/COgtbt/Xepr\nQ4Av3f1GM+sPNHL3DcbAuUziAJ07h47Tffvm7JYiUqCWLw8tId9+O4zGcynTSXwecJC7f1nqazOB\nI919qZk1BYrdvVUZ1+Y0ib/7bjjl8MMPYYstcnZbESlAN9wAU6fCQw/l/t7ZGIl/BTgw3N1HmNnX\n7t6o1HO+dPcN+tDnOokDHH88HHtsWBIkIlIZK1aEjT3FxWFlSq6lm8Rrpfl6h7n7EjPbFhhtZrMI\nCT0tgwYN+ulxUVERRUVF6V5aKVdfHc436NVLDZVFpHKGDQul2Vwl8OLiYoor0SShwqtTzGwgsAI4\nCygqVU553d33LOP5OR+JAxxzDHTqBH/4Q85vLSIJ9913oRb+6qthxVsMGet2b2Z1zax+6nE94Ghg\nKvAccEbqaT2AvOp6efXVoZ7144+xIxGRpLnrrnDQVawEXhHljsTN7GfA04TySS3gQXcfbGZbA48B\nOwMLgC7uvqyM66OMxAGOPhq6doWzzopyexFJoJUrwyj8X/8Kx3nEUi03+6xv7Fg4/XSYNQtq144S\ngogkzC23wBtvwFNPxY1DSTylffuwk/PMM6OFICIJsWpVGIW/+CIccEDcWDJWE0+6gQPhuutCU1MR\nkU25+244+OD4CbwiCn4kDtChQ+iH16tX1DBEJI+tXAm77QajRsH++8eORuWU//H22/C734Xzxjfb\nLGooIpKn/vpXGDcOnngidiSBkvh6jjkmbMc/99zYkYhIvlmxItTCX3stf5YVKomvZ+JE+O1vYc4c\nnakiIv/rhhvg/ffh4YdjR/JfSuJl6Ngx1Md1wqGIrPPNN+GEwjffDE3X84WSeBmmTAkHY334oc4b\nF5Hg2mtDZ7B7740dyf9SEt+Ik0+GQw+Fiy+OHYmIxPb112EUPm5cWJmST5TEN2LatFBSmTMHGjSI\nHY2IxHTVVbBkSe679qRDSXwTTj0VWrWCP/0pdiQiEsvSpeGY2cmTYZddYkezISXxTZg7F9q0CWeq\nbLNBGwsRqQ769oUaNcJZKflISbwc550H9evDTTfFjkREcu3jj+HAA2HGDNhuu9jRlE1JvByLFsG+\n+4a1oTvuGDsaEcmlM8+EnXcOK1PylZJ4Gvr3h2XLYPjw2JGISK588AEUFYXFDVtuGTuajVMST8OX\nX4bF/e+8E5YZiUjhO+kkaNsWLr00diSbpiSepuuug+nT82u7rYhkx8SJ0LlzGIXXqRM7mk1TEk/T\nihVhFD5qVLLOEBaRinGHX/0KunRJRgN1NYVIU/36YcF///6xIxGRbBo9GhYuhJ49Y0eSWWkncTOr\nYWaTzOy51OfNzWycmc0ys4fNrFb2wsyuc86B+fPhlVdiRyIi2bB2bRioDR5ceP12KzISvxD4oNTn\nQ4Cb3b0lsAxIbN+c2rXh+uvhssugpCR2NCKSaQ8+CPXqQadOsSPJvLSSuJntBBwLlD5h4CjgydTj\ne4HOmQ0tt046CTbfHB56KHYkIpJJ338fjti48UawcivMyZPuSHwocCngAGa2DfC1u68bt34C7JD5\n8HLHLPwlX3VV+EsXkcJw221hd+bhh8eOJDvKrWOb2XHAUnefYmZF676c+ihto0tQBg0a9NPjoqIi\nioqKNvbUqH75S9hvP7jjDh1VK1IIvvoqDM7efDN2JOUrLi6muLi4wteVu8TQzK4HTgPWAHWABsAz\nwNFAU3cvMbO2wEB3P6aM6/N6ieH61u3mmjULGjWKHY2IVMUll4RlxHfdFTuSisvKOnEzOxK42N07\nmtmjwFPu/qiZ3Qm85+4bfKuSlsQhrFZp2DB0vxaRZJo3Dw46KPQQ2H772NFUXC6S+M+AR4BGwGTg\nNHdfXcY1iUviS5aEjtfjx4cO2CKSPF27hkPukto3QDs2q+j662HSJHjiidiRiEhFjR0L3bvDzJnJ\n7aerJF5Fq1aF7j/33x8mPEUkGUpKQh/dPn3gtNNiR1N52nZfRXXqhN1d/fppA5BIkjz8cPiZ/d3v\nYkeSG0rim9CtG9SqBQ88EDsSEUnHypVw+eUwdGhovVYdqJxSjnfeCRMkM2eGbbsikr/+8heYMgUe\nfzx2JFWnmngGdesW6uOl9iyJSJ5ZvDisKpswoTBWlSmJZ9C6pqqTJkGzZrGjEZGy9OgR1oMPHhw7\nksxQEs+wa64JHYAeeyx2JCKyvnfegZNPDmXPBg1iR5MZWp2SYZdeGt6mVeJoAxHJopIS6NsXhgwp\nnAReEUriaapbN2zD79sX1qyJHY2IrDNyZOgJcOqpsSOJQ+WUCnCHo44KPfrOPz92NCKybBnsuSc8\n/3w4J6WQqCaeJVOnQvv2MGMGbLNN7GhEqrd+/eDbb+Huu2NHknlK4lnUu3eoww0bFjsSkeprxoxw\nJMb06bDddrGjyTwl8Sz66ivYay948UVo3Tp2NCLVj3t4R9ypU5inKkRanZJFW28NN9wQ6uI6V0Uk\n9x55BL7+WnNToCReaT16hHNVRowo/7kikjnffBM69gwbFn4GqzuVU6rg/fehQ4dQk9t229jRiFQP\nF10UWq4V+gBKNfEc+eMfYflyuOee2JGIFL733oOjjw4Dp8aNY0eTXUriObJ8eVin+vjjcNhhsaMR\nKVwlJfCLX4RS5jnnxI4m+zSxmSPrGiqff752copk08iR4WfsrLNiR5Jfyk3iZra5mY03s8lmNtXM\nBqa+3tzMxpnZLDN72Myq7RRDt25hneott8SORKQwffZZaPZw113Vp9lDutIqp5hZXXdfaWY1gbHA\nhUA/4Al3f9zM7gSmuPvwMq4t6HLKOnPnQps2MHEi/OxnsaMRKSynnRaOmb3pptiR5E5GyynuvjL1\ncHOgFuBAO+DJ1NfvBTpXIs6CseuuYdnT+eeHjQgikhmvvBK616spS9nSSuJmVsPMJgNLgFeAucAy\nd1+31eUTYIfshJgcF18Mn34Kjz4aOxKRwrByJZx7blgTrvaIZUurjp1K1geYWUPgaWDPsp62sesH\nlfovtKioiKKiogoFmRS1a4eDeDp1Csugtt46dkQiyXbttXDIIXDMMbEjyb7i4mKKK9GwoMJLDM3s\namAlcBnQ1N1LzKwtMNDdN/hWV5eaeGm9e8MPPxTmyWoiubJuM93UqdCkSexoci9jNXEza2xmW6Ye\n1wE6AB8ArwNdUk/rATxb+XALy/XXw8svqwuQSGWtWQNnnx2611fHBF4R6dTEtwdeN7MpwHjgX+4+\nChgA9DOz2cDWgPYspjRsCHfeGdazfvdd7GhEkueWW6B+fa0JT4d2bGbR6aeHM1X+9rfYkYgkx+zZ\nYffzhAnQokXsaOLRtvs88OWXsO++8OSTcOihsaMRyX8lJXDkkaEFYqGeE54ubbvPA9tsA7fdBj17\nwvffx45GJP8NGxb2WfTuHTuS5NBIPAe6dIHddw8TniJStnnz4OCDw8aeli1jRxOfyil5ZOlS2G+/\n0M6t0Dpyi2RCSUnYW3H00XDZZbGjyQ8qp+SRJk3CbHuPHiqriJTlzjtDo4d+/WJHkjwaieeIO5xy\nCuyySzi6VkSCOXPCxL/KKP9L5ZQ89MUX8POfw0MPhRl4kepuzZrQ6KF7d61GWZ/KKXmocWMYPhzO\nPBO+/TZ2NCLx3XQT1Kmj1ShVoZF4BL16Qc2a8I9/xI5EJJ733gtno/znP9CsWexo8o9G4nls6FAY\nPTqsVhGpjn74AX7/+zASVwKvGo3EI/n3v0MdcPJkHfAj1c/FF8NHH8FTT4GVO9asnjSxmQBXXBHe\nUr7wgv4hS/UxenTYxfzee2FXs5RN5ZQEuOYa+PxzuP322JGI5MYXX4SJ/XvvVQLPFI3EI1u3Rra4\nGPbZJ3Y0ItnjHrpetWwJN94YO5r8p5F4Quy+e/gH3b27dnNKYRs+HBYuhOuuix1JYdFIPA+4Q9eu\nsP32cOutsaMRybwPPggb3N58E1q1ih1NMmgkniBmYc3488/D00/HjkYks777LpzkOWSIEng2aCSe\nR8aPhxNOCL/+7GexoxHJjJ49YfVquO8+rcKqCI3EE6hNGxgwIByU9eOPsaMRqbr774e33w6nFCqB\nZ0e5I3Ez2wm4D2gKrAXudvdbzawR8CjQDJgPdHX3b8q4XiPxCnCHE0+EXXcNOztFkmrmzHC41Wuv\nhfP0pWIyORJfA/Rz972AQ4ELzKwVodv9q+7eEhgDXF6VgCUwg5EjQ2382WdjRyNSOStXhjr49dcr\ngWdbhWviZvYMcHvq40h3X2pmTYFid99g2kIj8cp5550wIn/nnTAqF0kK91AH//FHeOABlVEqKys1\ncTNrDuwPjAOauPtSAHdfAmxb8TBlYw49FP70JzjppDCqEUmKf/wDJk4M68KVwLOvVrpPNLP6wBPA\nhe6+wszSHl4PGjTop8dFRUUUFRVVIMTqq3fvsFLlD3/QzL4kw/jxYfDx1ltQv37saJKluLiY4uLi\nCl+XVjnFzGoBLwAvufvfU1+bARSVKqe87u57lnGtyilVsHJlGJWfcw5ccEHsaEQ27rPPQiPw224L\npUCpmkyXU/4JfLAugac8B5yRetwD0DRcFtStG47rvPbasFRLJB+tWROWxp5+uhJ4rqWzxPBw4A1g\nKuCpjyuACcBjwM7AAqCLuy8r43qNxDPgxRdDWWXCBNhhh9jRiPyvSy6B99+Hl14KXauk6nSeeAG6\n7rqwNb+4OPQlFMkH994Lf/5zqIfreNnMURIvQO7htMPatTXRKfnhnXegY8fQqWqvvWJHU1i07b4A\nmcE//wkzZoTehCIxLVwIJ58cNqcpgceT9hJDyQ9168Izz4RzVvbaC44/PnZEUh2tXBkaPFx0ERx3\nXOxoqjeVUxJq3LjwNnbMGHUEktwqKYFu3WCLLUI9XGW97FA5pcC1bQt//3sYBS1aFDsaqU6uuAIW\nLw47M5XA41M5JcG6d4d580JJ5Y03tENOsm/48HA429tvh5G4xKdySsK5h92cixaFUw9r6b9lyZJR\no6BXr9BibbfdYkdT+FROqSbMYNiwsGOuT5+Q1EUybfJkOOOMsHtYCTy/KIkXgNq14fHHw1vcwYNj\nRyOFZv780DZw2LBwjo/kF735LhANG4a3u0ccAdtuC2edFTsiKQRLl8LRR0P//mFNuOQfJfECsuOO\nMHo0HHkkbL01/Pa3sSOSJPvmGzjmmDCB3qdP7GhkYzSxWYAmTYLf/AYefRTatYsdjSTR99+Hf0N7\n7w23366lhDHo7JRq7vXXw9GgL70ErVvHjkaSZM2a0B9z883hwQd1KmEsWp1SzbVrFzZjHHccTJ0a\nOxpJirVrwyqUVavCIWtK4PlPNfEC1qlTeFv861/Da6/Bnhv0XRL5r5KSMCG+eDG88AJstlnsiCQd\nSuIFrls3WL0afvWrcM7KHnvEjkjykTucdx7MnRtKcDqvPjmUxKuB00+HH3+EDh1CQ4kWLWJHJPnE\nHfr2DZ15Ro+GevViRyQVoSReTfTqFUbk7dqF0op23QmEEspFF4XmDq++Cg0axI5IKqrciU0zu8fM\nlprZ+6W+1sjMRpvZLDP7l5ltmd0wJRPOPReuvBKKikJjCaneSkrCv4mJE0MC32qr2BFJZaSzOuX/\ngF+v97UBwKvu3hIYA1ye6cAkO845B264Adq3h/feix2NxLJmTViFMmtWKKEogSdXWuvEzawZ8Ly7\n75f6fCZwpLsvNbOmQLG7t9rItVonnoceeyzswnvxRTjooNjRSC6tXg2nngrLloUuUXXrxo5IypLu\nOvHK1sS3c/elAO6+xMy2reTrSCRdu4bNHMceq52d1cnKleHvvkYNeO45nQleCLTZpxo78cSQwE85\nBZ54InY0km1ffhnKaI0bw5NPKoEXisqOxJeaWZNS5ZTPNvXkQYMG/fS4qKiIoqKiSt5WMq1du1AT\nPf74cGLdBRfEjkiy4eOPw1konTrB9dfrLJR8VFxcTHFxcYWvS7cm3pxQE9839fkQ4Ct3H2Jm/YFG\n7j5gI9eqJp4A8+aFnZ1dusB11+mHvJBMnRrKZpdcAhdeGDsaSVfGDsAys4eAImAbYCkwEHgGeBzY\nGVgAdHH3ZRu5Xkk8IT7/PIzId9sN7rlHb7cLwUsvQY8ecOutYfeuJIdOMZRKWbUqLD1bsCCsXGjS\nJHZEUhnuIXEPGRLmOw47LHZEUlE6xVAqpU4deOSRUFpp0yZsxZZkWb06nIMyYkRo2acEXtiUxGUD\nZjBoUOjX2aEDPP107IgkXZ9/HiYwP/kExo6F5s1jRyTZpiQuG9WtWziS9KKLYMCAsMtP8teECWHj\n1sEHw7PPhr6rUvhUE5dyffFF6LO4dm0otWy3XeyIpDR3uOsuGDgwNALp1Cl2RJIJqolLxjRuDC+/\nHGqrrVuHOqvkhxUrwuqTYcNC+UQJvPpREpe01KwZ1o/feSf89rdwzTUqr8T2n//AgQeGLfTjxsHu\nu8eOSGJQOUUqbNEi+P3vQ+u3Bx7Q5FmulZTAX/8aPm67LRybIIVH5RTJmh12CFv1O3WCQw4JiVz/\nT+fGwoWh1d7zz4dzwJXARUlcKqVGjbCN++WXw4aSjh3h009jR1W4SkrC5OWBB8JRR4U2e82axY5K\n8oGSuFTJgQfCu++GCc/99w8bTDQqz6wPPwyJe+RI+Pe/Q3emmjVjRyX5QklcqmyzzcLmoDFjYPjw\nsEFI7d+q7ocfwructm3DO52xY2GvvWJHJflGSVwyZt99Q8PdE06AX/4ylFuWL48dVTK99FL4fr71\nFowfD/36afQtZVMSl4yqVSvs8Jw2Db76Clq1gvvuCzVdKd/cuaFZR9++MHRomMDcddfYUUk+UxKX\nrGjSBP75T3jqKbjjjlA7HzVK9fKNWbw4NOQ45JBQPpk2DY47LnZUkgRK4pJVbduGjSgDB4byypFH\nhtquBMuWwRVXwD77hJ6nM2fC5ZeHxyLpUBKXrDODzp1Dh5levUKn9Q4d4NVXq+/IfMmSkKx32w0+\n+wymTIG//Q22VctxqSAlccmZmjXDOR+zZ8Ppp4dWYQcdBI89Fg7Xqg4+/BDOPTesMvn227BhZ8QI\n2Hnn2JFJUmnbvURTUhKOuh0yJJx/ffbZ0LNn2BFaSNasCfMBd90Vjos97zzo00enQcqmqT2bJMrk\nyWGN+aOPho0tvXqF7eW1a8eOrPLmzg1HEowYATvtFEbgXbuG7kki5clJEjez3wC3EMoy97j7kDKe\noyQuafv2W3jwwbAscfbscGLiKadAUVEy1kkvXBjKQ488Ah9/HJL22WfDz38eOzJJmqwfgGVmNYDb\ngV8DewPdzaxVZV8vtuLi4tghpCUJcVYlxgYNwoj17bfDdv7dd4fLLoPtt4fTToP774elS+PHuc7q\n1fDmm3DVVaG+v//+Ybfq9deH0x5vv73qCTwJf+egOGOpysTmIcAcd//Y3VcDjwAnZias3EvKX2wS\n4sxUjM2TilhAAAAE7UlEQVSawaWXhmQ+cWLYBfr009CyZVh33rt3KFd8+GHlVrlUJs7ly+G11+Av\nfwk7U7fdNkzQlpTAzTeH9d4jRoRSUK1aFY8pU3HGoDjjqMo/sx2BhaU+/4SQ2EUyrlkzOOec8LF6\ndZggHDcu9JK8/HJYtSqstW7ZMuwSbdUKdtkljOAbNQrLHNP1/fdhCeCiRTBnTli7PWtWGGEvXBhG\n223bhpU2d98NTZtm788tUp6qJPGyfixU/Jasq10bDj88fKyzaFFIsjNnho9Ro8KKlyVLYOXKsIO0\nUSPYYov/fnz0EbzxRkja338P330XSjWrVoXnN20ayjmtWoUeoy1bwp57JnuyVQpPpSc2zawtMMjd\nf5P6fADg609umpkSu4hIJWR1dYqZ1QRmAe2BxcAEoLu76xBSEZEcqXQ5xd3XmllvYDT/XWKoBC4i\nkkNZ3+wjIiLZk5OzU8zsWjN7z8wmm9nLZpZ38/lmdqOZzTCzKWb2pJk1jB1TWczsZDObZmZrzezA\n2PGsz8x+Y2YzzWy2mfWPHU9ZzOweM1tqZu/HjmVTzGwnMxtjZh+Y2VQz6xs7prKY2eZmNj718z3V\nzAbGjmljzKyGmU0ys+dix7IxZja/VL6cUO7zczESN7P67r4i9bgPsJe7n5f1G1eAmXUAxrh7iZkN\nJkzSXh47rvWZWUugBBgOXOLukyKH9JPUBrDZhHmSRcBEoJu7z4wa2HrM7AhgBXCfu+8XO56NSQ12\nmrr7FDOrD7wLnJhv308AM6vr7itTc2Vjgb7uXm4CyjUz+yPQGmjo7h1jx1MWM/sIaO3uX6fz/JyM\nxNcl8JR6hCSUV9z9VXdfF9c4YKeY8WyMu89y9zmUvcQztkRsAHP3t4C0fkBicvcl7j4l9XgFMIOw\nPyPvuPvK1MPNCXNteVenNbOdgGOBEbFjKYdRgdycs6Nozew6M1sA/A64Olf3raSewEuxg0igsjaA\n5WXSSRozaw7sD4yPG0nZUmWKycAS4BV3nxg7pjIMBS4lD/+DWY8D/zKziWZ2dnlPzlgSN7NXzOz9\nUh9TU7+eAODuV7n7LsCDQJ9M3TeTMaaecyWw2t0fihFjunHmKW0Ay4JUKeUJ4ML13tXmDXcvcfcD\nCO9g25jZXrFjKs3MjgOWpt7ZGPn5Tnadw9z9IMK7hgtS5b+NytDpDuDuv0rzqQ8DLwKDMnXvdJUX\no5n1IHzjjspNRGWrwPcy33wC7FLq850ItXGpJDOrRUjg97v7s7HjKY+7LzezYuA3wAeRwyntcKCj\nmR0L1AEamNl97v77yHFtwN2XpH793MyeJpQp39rY83O1OmW3Up+eSKjt5ZXUsbqXAR3d/YfY8aQp\n30YTE4HdzKyZmW0GdAPydRVAvo/G1vkn8IG7/z12IBtjZo3NbMvU4zpAByCvJl/d/Qp338XdWxD+\nXY7JxwRuZnVT77wws3rA0cC0TV2Tq5r44FQ5YArhL/jCHN23Im4D6gOvpJYgDYsdUFnMrJOZLQTa\nAi+YWd7U7t19LbBuA9h04JF83ABmZg8BbwN7mNkCMzszdkxlMbPDgVOBo1LLzSalBhv5Znvg9dTP\n93jgX+4+KnJMSdUEeCs1vzAOeN7dR2/qAm32ERFJMDVKFhFJMCVxEZEEUxIXEUkwJXERkQRTEhcR\nSTAlcRGRBFMSFxFJMCVxEZEE+38PBkrcEpPemgAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x10dfbf1d0>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "%matplotlib inline\n",
    "# Lab 3 Minimizing Cost\n",
    "import tensorflow as tf\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "X = [1, 2, 3]\n",
    "Y = [1, 2, 3]\n",
    "\n",
    "W = tf.placeholder(tf.float32)\n",
    "\n",
    "# Our hypothesis for linear model X * W\n",
    "hypothesis = X * W\n",
    "\n",
    "# cost/loss function\n",
    "cost = tf.reduce_mean(tf.square(hypothesis - Y))\n",
    "\n",
    "# Launch the graph in a session.\n",
    "sess = tf.Session()\n",
    "# Initializes global variables in the graph.\n",
    "sess.run(tf.global_variables_initializer())\n",
    "\n",
    "# Variables for plotting cost function\n",
    "# cost 함수를 그리기 위한 변수\n",
    "W_history = []\n",
    "cost_history = []\n",
    "\n",
    "for i in range(-30, 50):\n",
    "    curr_W = i * 0.1\n",
    "    curr_cost = sess.run(cost, feed_dict={W: curr_W})\n",
    "    W_history.append(curr_W)\n",
    "    cost_history.append(curr_cost)\n",
    "\n",
    "# Show the cost function\n",
    "plt.plot(W_history, cost_history)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Gradient Descent\n",
    "\n",
    "\\begin{equation}\n",
    "Cost(W) = \\frac{1}{m} \\sum_{i=1}^{m}(Wx^{(i)}-y^{(i)})^{2}\n",
    "\\end{equation}\n",
    "\n",
    "\\begin{equation}\n",
    "W := W - \\alpha \\frac{1}{m} \\sum_{i=1}^{m} (Wx^{(i)} - y^{(i)})x^{(i)}\n",
    "\\end{equation}\n",
    "\n",
    "위 수식은 다음처럼 코딩할 수 있음."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# Minimize: Gradient Descent using derivative: W -= learning_rate * derivative\n",
    "learning_rate = 0.1\n",
    "gradient = tf.reduce_mean((W * X - Y) * X)\n",
    "descent = W - learning_rate * gradient\n",
    "update = W.assign(descent)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 0.000196199 [ 1.00648403]\n",
      "1 5.58083e-05 [ 1.00345814]\n",
      "2 1.58727e-05 [ 1.00184429]\n",
      "3 4.51458e-06 [ 1.0009836]\n",
      "4 1.28461e-06 [ 1.00052464]\n",
      "5 3.65236e-07 [ 1.00027978]\n",
      "6 1.03953e-07 [ 1.00014925]\n",
      "7 2.95924e-08 [ 1.00007963]\n",
      "8 8.40479e-09 [ 1.00004244]\n",
      "9 2.39406e-09 [ 1.00002265]\n",
      "10 6.79378e-10 [ 1.00001204]\n",
      "11 1.93381e-10 [ 1.00000644]\n",
      "12 5.66018e-11 [ 1.00000346]\n",
      "13 1.44998e-11 [ 1.00000179]\n",
      "14 4.24431e-12 [ 1.00000095]\n",
      "15 1.06108e-12 [ 1.00000048]\n",
      "16 2.65269e-13 [ 1.00000024]\n",
      "17 9.9476e-14 [ 1.00000012]\n",
      "18 0.0 [ 1.]\n",
      "19 0.0 [ 1.]\n",
      "20 0.0 [ 1.]\n"
     ]
    }
   ],
   "source": [
    "# Lab 3 Minimizing Cost\n",
    "import tensorflow as tf\n",
    "tf.set_random_seed(777)  # for reproducibility\n",
    "\n",
    "x_data = [1, 2, 3]\n",
    "y_data = [1, 2, 3]\n",
    "\n",
    "# Try to find values for W and b to compute y_data = W * x_data + b\n",
    "# We know that W should be 1 and b should be 0\n",
    "# But let's use TensorFlow to figure it out\n",
    "W = tf.Variable(tf.random_normal([1]), name='weight')\n",
    "\n",
    "X = tf.placeholder(tf.float32)\n",
    "Y = tf.placeholder(tf.float32)\n",
    "\n",
    "# Our hypothesis for linear model X * W\n",
    "hypothesis = X * W\n",
    "\n",
    "# cost/loss function\n",
    "cost = tf.reduce_mean(tf.square(hypothesis - Y))\n",
    "\n",
    "# Minimize: Gradient Descent using derivative: W -= learning_rate * derivative\n",
    "learning_rate = 0.1\n",
    "gradient = tf.reduce_mean((W * X - Y) * X)\n",
    "descent = W - learning_rate * gradient\n",
    "update = W.assign(descent)\n",
    "\n",
    "# Launch the graph in a session.\n",
    "sess = tf.Session()\n",
    "# Initializes global variables in the graph.\n",
    "sess.run(tf.global_variables_initializer())\n",
    "\n",
    "for step in range(21):\n",
    "    sess.run(update, feed_dict={X: x_data, Y: y_data})\n",
    "    print(step, sess.run(cost, feed_dict={X: x_data, Y: y_data}), sess.run(W))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Gradient Descent를 다음처럼 간단하게 할 수 있음."
   ]
  },
  {
   "cell_type": "raw",
   "metadata": {},
   "source": [
    "# Minimize: Gradient Descent Magic\n",
    "optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)\n",
    "train = optimizer.minimize(cost)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "W = 5.0을 초기값을 주었을 때 \n",
    "\n",
    "cost가 최소가 되는 W를 찾는 프로그램"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 -3.0\n",
      "1 0.733334\n",
      "2 0.982222\n",
      "3 0.998815\n",
      "4 0.999921\n",
      "5 0.999995\n",
      "6 1.0\n",
      "7 1.0\n",
      "8 1.0\n",
      "9 1.0\n"
     ]
    }
   ],
   "source": [
    "# Lab 3 Minimizing Cost\n",
    "import tensorflow as tf\n",
    "\n",
    "# tf Graph Input\n",
    "X = [1, 2, 3]\n",
    "Y = [1, 2, 3]\n",
    "\n",
    "# Set wrong model weights\n",
    "W = tf.Variable(5.0)\n",
    "\n",
    "# Linear model\n",
    "hypothesis = X * W\n",
    "\n",
    "# cost/loss function\n",
    "cost = tf.reduce_mean(tf.square(hypothesis - Y))\n",
    "\n",
    "# Minimize: Gradient Descent Magic\n",
    "optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)\n",
    "train = optimizer.minimize(cost)\n",
    "\n",
    "# Launch the graph in a session.\n",
    "sess = tf.Session()\n",
    "# Initializes global variables in the graph.\n",
    "sess.run(tf.global_variables_initializer())\n",
    "\n",
    "#for step in range(100):\n",
    "for step in range(10):\n",
    "    print(step, sess.run(W))\n",
    "    sess.run(train)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "W = 5.0을 초기값을 주었을 때\n",
    "\n",
    "cost가 최소가 되는 W를 찾는 프로그램"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 -3.0\n",
      "1 0.733334\n",
      "2 0.982222\n",
      "3 0.998815\n",
      "4 0.999921\n",
      "5 0.999995\n",
      "6 1.0\n",
      "7 1.0\n",
      "8 1.0\n",
      "9 1.0\n"
     ]
    }
   ],
   "source": [
    "# Lab 3 Minimizing Cost\n",
    "import tensorflow as tf\n",
    "\n",
    "# tf Graph Input\n",
    "X = [1, 2, 3]\n",
    "Y = [1, 2, 3]\n",
    "\n",
    "# Set wrong model weights\n",
    "W = tf.Variable(-3.0)\n",
    "\n",
    "# Linear model\n",
    "hypothesis = X * W\n",
    "\n",
    "# cost/loss function\n",
    "cost = tf.reduce_mean(tf.square(hypothesis - Y))\n",
    "\n",
    "# Minimize: Gradient Descent Magic\n",
    "optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)\n",
    "train = optimizer.minimize(cost)\n",
    "\n",
    "# Launch the graph in a session.\n",
    "sess = tf.Session()\n",
    "# Initializes global variables in the graph.\n",
    "sess.run(tf.global_variables_initializer())\n",
    "\n",
    "#for step in range(100):\n",
    "for step in range(10):\n",
    "    print(step, sess.run(W))\n",
    "    sess.run(train)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "수동으로 구한 gradient와 텐서플로 optimizer에서 구한 gvs(gradient)가 같은지 확인."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 [37.333332, 5.0, [(37.333336, 5.0)]]\n",
      "1 [33.848888, 4.6266665, [(33.848888, 4.6266665)]]\n",
      "2 [30.689657, 4.2881775, [(30.689657, 4.2881775)]]\n",
      "3 [27.825287, 3.9812808, [(27.825287, 3.9812808)]]\n",
      "4 [25.228262, 3.703028, [(25.228264, 3.703028)]]\n",
      "5 [22.873621, 3.4507453, [(22.873623, 3.4507453)]]\n",
      "6 [20.738752, 3.2220092, [(20.738752, 3.2220092)]]\n",
      "7 [18.803137, 3.0146217, [(18.803137, 3.0146217)]]\n",
      "8 [17.048176, 2.8265903, [(17.048176, 2.8265903)]]\n",
      "9 [15.457013, 2.6561086, [(15.457014, 2.6561086)]]\n",
      "10 [14.014359, 2.5015385, [(14.01436, 2.5015385)]]\n",
      "11 [12.706352, 2.3613949, [(12.706352, 2.3613949)]]\n",
      "12 [11.520427, 2.2343314, [(11.520427, 2.2343314)]]\n",
      "13 [10.445186, 2.119127, [(10.445186, 2.119127)]]\n",
      "14 [9.4703016, 2.0146751, [(9.4703016, 2.0146751)]]\n",
      "15 [8.5864067, 1.9199722, [(8.5864067, 1.9199722)]]\n",
      "16 [7.7850089, 1.8341081, [(7.7850089, 1.8341081)]]\n",
      "17 [7.0584083, 1.756258, [(7.0584083, 1.756258)]]\n",
      "18 [6.3996239, 1.685674, [(6.3996239, 1.685674)]]\n",
      "19 [5.8023257, 1.6216778, [(5.8023257, 1.6216778)]]\n",
      "20 [5.260776, 1.5636545, [(5.260776, 1.5636545)]]\n",
      "21 [4.7697697, 1.5110468, [(4.7697697, 1.5110468)]]\n",
      "22 [4.3245912, 1.4633491, [(4.3245912, 1.4633491)]]\n",
      "23 [3.9209633, 1.4201032, [(3.9209635, 1.4201032)]]\n",
      "24 [3.5550067, 1.3808936, [(3.5550067, 1.3808936)]]\n",
      "25 [3.2232056, 1.3453435, [(3.2232056, 1.3453435)]]\n",
      "26 [2.9223735, 1.3131114, [(2.9223738, 1.3131114)]]\n",
      "27 [2.6496189, 1.2838877, [(2.6496186, 1.2838877)]]\n",
      "28 [2.4023216, 1.2573916, [(2.4023218, 1.2573916)]]\n",
      "29 [2.1781051, 1.2333684, [(2.1781051, 1.2333684)]]\n",
      "30 [1.9748148, 1.2115873, [(1.9748147, 1.2115873)]]\n",
      "31 [1.7904993, 1.1918392, [(1.7904994, 1.1918392)]]\n",
      "32 [1.623386, 1.1739342, [(1.6233861, 1.1739342)]]\n",
      "33 [1.4718695, 1.1577003, [(1.4718695, 1.1577003)]]\n",
      "34 [1.3344955, 1.1429816, [(1.3344957, 1.1429816)]]\n",
      "35 [1.2099417, 1.1296366, [(1.2099419, 1.1296366)]]\n",
      "36 [1.0970144, 1.1175373, [(1.0970144, 1.1175373)]]\n",
      "37 [0.9946267, 1.1065671, [(0.9946267, 1.1065671)]]\n",
      "38 [0.90179497, 1.0966209, [(0.90179503, 1.0966209)]]\n",
      "39 [0.81762749, 1.087603, [(0.81762755, 1.087603)]]\n",
      "40 [0.74131513, 1.0794266, [(0.74131513, 1.0794266)]]\n",
      "41 [0.67212623, 1.0720135, [(0.67212629, 1.0720135)]]\n",
      "42 [0.60939401, 1.0652922, [(0.60939401, 1.0652922)]]\n",
      "43 [0.55251688, 1.0591983, [(0.55251688, 1.0591983)]]\n",
      "44 [0.50094914, 1.0536731, [(0.50094914, 1.0536731)]]\n",
      "45 [0.45419374, 1.0486636, [(0.45419377, 1.0486636)]]\n",
      "46 [0.41180158, 1.0441216, [(0.41180158, 1.0441216)]]\n",
      "47 [0.37336722, 1.0400037, [(0.37336725, 1.0400037)]]\n",
      "48 [0.33851996, 1.03627, [(0.33851999, 1.03627)]]\n",
      "49 [0.30692515, 1.0328848, [(0.30692515, 1.0328848)]]\n",
      "50 [0.27827826, 1.0298156, [(0.27827829, 1.0298156)]]\n",
      "51 [0.25230527, 1.0270327, [(0.25230527, 1.0270327)]]\n",
      "52 [0.2287569, 1.0245097, [(0.2287569, 1.0245097)]]\n",
      "53 [0.20740573, 1.022222, [(0.20740573, 1.022222)]]\n",
      "54 [0.18804836, 1.020148, [(0.18804836, 1.020148)]]\n",
      "55 [0.17049654, 1.0182675, [(0.17049655, 1.0182675)]]\n",
      "56 [0.15458433, 1.0165626, [(0.15458435, 1.0165626)]]\n",
      "57 [0.14015675, 1.0150168, [(0.14015675, 1.0150168)]]\n",
      "58 [0.12707591, 1.0136153, [(0.12707591, 1.0136153)]]\n",
      "59 [0.11521538, 1.0123445, [(0.11521538, 1.0123445)]]\n",
      "60 [0.10446167, 1.0111923, [(0.10446167, 1.0111923)]]\n",
      "61 [0.094712019, 1.0101477, [(0.094712019, 1.0101477)]]\n",
      "62 [0.085872017, 1.0092006, [(0.085872017, 1.0092006)]]\n",
      "63 [0.077858053, 1.0083419, [(0.077858053, 1.0083419)]]\n",
      "64 [0.070591293, 1.0075634, [(0.070591293, 1.0075634)]]\n",
      "65 [0.064002357, 1.0068574, [(0.064002357, 1.0068574)]]\n",
      "66 [0.05802846, 1.0062174, [(0.05802846, 1.0062174)]]\n",
      "67 [0.052612226, 1.005637, [(0.052612226, 1.005637)]]\n",
      "68 [0.047702473, 1.005111, [(0.047702473, 1.005111)]]\n",
      "69 [0.043249767, 1.0046339, [(0.043249767, 1.0046339)]]\n",
      "70 [0.039213181, 1.0042014, [(0.039213181, 1.0042014)]]\n",
      "71 [0.035553534, 1.0038093, [(0.035553537, 1.0038093)]]\n",
      "72 [0.032236177, 1.0034539, [(0.032236181, 1.0034539)]]\n",
      "73 [0.029227654, 1.0031315, [(0.029227655, 1.0031315)]]\n",
      "74 [0.02649951, 1.0028392, [(0.02649951, 1.0028392)]]\n",
      "75 [0.024025917, 1.0025742, [(0.024025917, 1.0025742)]]\n",
      "76 [0.021783749, 1.002334, [(0.021783751, 1.002334)]]\n",
      "77 [0.01975123, 1.0021162, [(0.019751232, 1.0021162)]]\n",
      "78 [0.017907381, 1.0019187, [(0.017907381, 1.0019187)]]\n",
      "79 [0.016236702, 1.0017396, [(0.016236704, 1.0017396)]]\n",
      "80 [0.014720838, 1.0015773, [(0.014720838, 1.0015773)]]\n",
      "81 [0.01334699, 1.00143, [(0.013346991, 1.00143)]]\n",
      "82 [0.012100856, 1.0012965, [(0.012100856, 1.0012965)]]\n",
      "83 [0.010971785, 1.0011755, [(0.010971785, 1.0011755)]]\n",
      "84 [0.0099481745, 1.0010659, [(0.0099481754, 1.0010659)]]\n",
      "85 [0.009018898, 1.0009663, [(0.009018898, 1.0009663)]]\n",
      "86 [0.0081768828, 1.0008761, [(0.0081768837, 1.0008761)]]\n",
      "87 [0.0074131489, 1.0007943, [(0.0074131489, 1.0007943)]]\n",
      "88 [0.0067215762, 1.0007201, [(0.0067215762, 1.0007201)]]\n",
      "89 [0.0060940585, 1.0006529, [(0.0060940585, 1.0006529)]]\n",
      "90 [0.0055252709, 1.000592, [(0.0055252714, 1.000592)]]\n",
      "91 [0.0050098896, 1.0005368, [(0.0050098896, 1.0005368)]]\n",
      "92 [0.0045425892, 1.0004867, [(0.0045425892, 1.0004867)]]\n",
      "93 [0.0041189194, 1.0004413, [(0.0041189194, 1.0004413)]]\n",
      "94 [0.0037339528, 1.0004001, [(0.003733953, 1.0004001)]]\n",
      "95 [0.0033854644, 1.0003628, [(0.0033854644, 1.0003628)]]\n",
      "96 [0.0030694802, 1.0003289, [(0.0030694804, 1.0003289)]]\n",
      "97 [0.0027837753, 1.0002983, [(0.0027837753, 1.0002983)]]\n",
      "98 [0.0025234222, 1.0002704, [(0.0025234222, 1.0002704)]]\n",
      "99 [0.0022875469, 1.0002451, [(0.0022875469, 1.0002451)]]\n"
     ]
    }
   ],
   "source": [
    "# Lab 3 Minimizing Cost\n",
    "# This is optional\n",
    "import tensorflow as tf\n",
    "\n",
    "# tf Graph Input\n",
    "X = [1, 2, 3]\n",
    "Y = [1, 2, 3]\n",
    "\n",
    "# Set wrong model weights\n",
    "W = tf.Variable(5.)\n",
    "\n",
    "# Linear model\n",
    "hypothesis = X * W\n",
    "\n",
    "# Manual gradient\n",
    "gradient = tf.reduce_mean((W * X - Y) * X) * 2\n",
    "\n",
    "# cost/loss function\n",
    "cost = tf.reduce_mean(tf.square(hypothesis - Y))\n",
    "\n",
    "# Minimize: Gradient Descent Magic\n",
    "optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)\n",
    "train = optimizer.minimize(cost)\n",
    "\n",
    "# Get gradients\n",
    "gvs = optimizer.compute_gradients(cost, [W])\n",
    "# Optional: modify gradient if necessary\n",
    "# gvs = [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in gvs]\n",
    "# Apply gradients\n",
    "apply_gradients = optimizer.apply_gradients(gvs)\n",
    "\n",
    "# Launch the graph in a session.\n",
    "sess = tf.Session()\n",
    "# Initializes global variables in the graph.\n",
    "sess.run(tf.global_variables_initializer())\n",
    "\n",
    "for step in range(100):\n",
    "    print(step, sess.run([gradient, W, gvs]))\n",
    "    sess.run(apply_gradients)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python [Root]",
   "language": "python",
   "name": "Python [Root]"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}