{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/home/mathlab115/anaconda3/lib/python3.6/site-packages/h5py/__init__.py:34: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", " from ._conv import register_converters as _register_converters\n", "Using TensorFlow backend.\n" ] } ], "source": [ "%matplotlib inline\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from time import time\n", "\n", "from keras.models import Model, Sequential\n", "from keras.optimizers import Adam\n", "import keras.backend as K\n", "from keras.utils.generic_utils import Progbar\n", "\n", "from model import *" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# for resist GPU memory\n", "import tensorflow as tf\n", "config = tf.ConfigProto()\n", "config.gpu_options.allow_growth=True\n", "sess = tf.Session(config=config)\n", "K.set_session(sess)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "collapsed": true }, "outputs": [], "source": [ "from keras.datasets import cifar100, cifar10\n", "\n", "(x_train, y_train), (x_test, y_test) = cifar10.load_data()" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "(60000, 32, 32, 3)" ] }, "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ "X = np.concatenate((x_test,x_train))\n", "X.shape" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 5, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHOdJREFUeJztnWmsZVd15//rTm+uqleDy1V24bKNCW3RxKCSRRSCSFAi\nN0ICvlihEXIkROVDghop/cGipYb+RkcNER8ipCJYcSLCoADCSqxugZvIQkkTCjyVKeOx7Kryq8E1\nveHO96x8uNed8qv9X+++6b6y9/8nleq+vc4+Z599zrrD/p+1lrk7hBD5UdrqAQghtgY5vxCZIucX\nIlPk/EJkipxfiEyR8wuRKXJ+ITJFzi9Epsj5hciUyno6m9k9AL4KoAzgr9z9S9H245NTPr1jNm30\nHu/oRbK5ZLxLOXhbK9lan2pMH9CCcWBTHqDkO+UPbAYDCcfIjRacODPFfaKJ5BTBU6o9cu/0iqBP\nYIuOFc5jcGrG7qtod+TmX7iyhGa9OdRErtn5zawM4C8B/D6AUwB+bmYPufuvWJ/pHbP4yGf+NGnz\n1gIfpNeT7ZNVPr7t4/zNZLyyNo8slcqkPbihyc0HALbGd4YiuDm73W66Ty94cy3WNsZqlb/DVqvp\nuRqr8FuuVuYXtFvi46j3OtS20Gok2+ebbdrn0hKfq0azRW0eXJfoHimX03NSMz6/tVot2f79B/+R\n9rlmTENveS13A3je3V909zaAbwP46Dr2J4QYIetx/psAnLzq71ODNiHEm4BNX/Azs8NmdtTMjjbr\nS5t9OCHEkKzH+U8DOHDV3zcP2t6Aux9x90Pufmh8cmodhxNCbCTrcf6fA7jDzG41sxqAPwTw0MYM\nSwix2ax5td/du2b2pwD+D/pS3wPu/nTUx7yDSu9M0lYbS69SA0CtlF7NnRpLrygDwERljNq8y1e3\nu9GqeG/1K+m9Ll9VLgp+ztGKfiSJFWzlPtpfIDqU+RSjXQSr/UW6Y5PMIQBYma/a9wLxqhmt9jfT\nStFCvUn71Jf4hLRb/FhAoJoEK/dVstpfrnD1w5kQGFzn5axL53f3hwE8vJ59CCG2Bj3hJ0SmyPmF\nyBQ5vxCZIucXIlPk/EJkyrpW+1eLoUC1l5ZYSoHs5UjLZS3eBT0iDwJAEal5gRTFArp6kdTX4YEg\nLFoRAKzE35dLgc3JPq0ItLIgUq0An48egoksp/dplUA7rPLbkc8U0AkiQlvd9H3QbkXXjB/Lg2tN\nZVYA7oGsW0rvs0TaAcA6aZsHY7hm/0NvKYR4SyHnFyJT5PxCZIqcX4hMkfMLkSkjXe33wtFdSi+l\ndoLVULP0ymalzFewS+VglT0gCtJha+LhCmsUUBOkyFrruzIN+IhSz/WCQJZAGmk7D1pystoP56v9\nHqzaR+pHFMpSkICaciW4d6pRWrbgWMF9EClC7D7oBspTQVSdMMfgMvTJL0SmyPmFyBQ5vxCZIucX\nIlPk/EJkipxfiEwZqdRXFI5GI11BpYhkHkvLGuUKf+8qB/W6oqpQkdTHZJSwGkugDVlUFooEpABx\nDj8W9FOOij9FFYACFbMT7JOVyYrmtxScVzj+SN5iZcM8uD/43lAKrOF1IXn6AB6MFZYUI6FOq6kB\npU9+ITJFzi9Epsj5hcgUOb8QmSLnFyJT5PxCZMq6pD4zOwFgAUAPQNfdD0Xbu/fQ6i0kbUWUWI+8\nRZXBI8TKPX5qpeA9LyrXxaK2IomqFEh9URSbBzJPOaihxWTHmvH5KAcRc1HOvUBNhRNjcChYiR8r\nkmejuSp5umM52mEg3Xow991uVGKNH44FVUZpF0tsHJFOuYyN0Pl/191f24D9CCFGiL72C5Ep63V+\nB/BjM/uFmR3eiAEJIUbDer/2v9/dT5vZDQB+ZGbPuPujV28weFM4DACTUxPrPJwQYqNY1ye/u58e\n/H8OwA8A3J3Y5oi7H3L3Q2PjtfUcTgixgazZ+c1sysxmXn8N4A8AHNuogQkhNpf1fO3fC+AHg0im\nCoC/c/f/HXUo4OiQ0lsGnnDTiebRK/g3iUqYyJDrId2gH6uuVY7ydwbVqbqBbGQlfm4WJNwst9PR\ngN3gSlerUYRbUFKMJekEMD6Wbq8Ex4rkNyOS3aAnN5F+RS8orRWccyWQATu2RhmQtAfKJ6q1arpP\npCkuY83O7+4vAvjNtfYXQmwtkvqEyBQ5vxCZIucXIlPk/EJkipxfiEwZaQLPvojCItkiiS2teXjB\nh98Joul6pPYfAHgQdsaUrYnxcX6sEtG8ALSN95tv8ASejXqd2pjEWSUSKwBMjXN5aHomLSkBwEyN\nn9sYqYVXC+Z+PKpdGMiikXRbkIi/TsHr4HVqQX1CBPXzAsm3FEUsIj3HURLaGpH6ogjNa8Y09JZC\niLcUcn4hMkXOL0SmyPmFyBQ5vxCZMtLVfgfPV9bt8VVl75FV8YLnByiqfLXcq0HJqGDleJzks9u9\nYyftM7/I31/PXuAr8BfrfIyXlqIgqGayveJ8lXqylu4DALva09R2oDrLx0HrZPHrUi3x+QhSEMIC\nZYctwXuwNN8N9hcFfnWDnIxRbj0W0BTlfyyvIlcfQ5/8QmSKnF+ITJHzC5Epcn4hMkXOL0SmyPmF\nyJQRS32Gjqdz07XAg0Quz6clFG9y+WpqG39fm6rw/Hiz27ZRW6WZlqJumN5D+4x1ufxzYuEVatu9\nbQe1dXr8vOttMldBbrcl47JXsbRIbZWLfJ+7ZqbSx0KD9rlcXqK2sTEuBVuJS2IFkd86wXXpeiA7\nB6XBeoF8GJVmY4peNVAwxzrpezEaw3L0yS9Epsj5hcgUOb8QmSLnFyJT5PxCZIqcX4hMWVHqM7MH\nAHwEwDl3f9egbSeA7wA4COAEgHvd/dJK+3KU0cL2pK2Y3EX7LV5eSLZfvsQPOXGZR77deus+anvH\nXdfUGv3/nHr218n2yUm+v9tvuZHaGgXP4ffM3Blqu2GWR9O9ei49V90Kv9TdQCprFlyiOn1xntrq\nrbQcObuTR2JWJrgtSuFXOI8ULEiNNQty3Vk3kPqCqL4iKPNVBLn/jOQurAShgA0SHhtUcruGYT75\n/xrAPcva7gfwiLvfAeCRwd9CiDcRKzq/uz8K4OKy5o8CeHDw+kEAH9vgcQkhNpm1/ubf6+5zg9dn\n0K/YK4R4E7HuBT/v/wiivzTM7LCZHTWzo50m/x0uhBgta3X+s2a2DwAG/59jG7r7EXc/5O6Hqqxo\nuxBi5KzV+R8CcN/g9X0AfrgxwxFCjIphpL5vAfgggN1mdgrAFwB8CcB3zezTAF4GcO9QB6tOYuf+\ndydtnUkeGVeaTctNu/deoX2KBW5rs7pbAF68yOWa1tjuZPtLPBgNc0tz1Ha+ziWqF07SL1O49V3/\ngdqqkyQKj0heADA+lo7AA1ZIFBmUvGqTUlM2mZ5DAJjeMUltnR6PBux2efmybid9cYogoSmCCMgo\nOq9X8ASkvEwdl/q6HpSOI9pnsYpyXSs6v7t/gpg+NPRRhBDXHXrCT4hMkfMLkSlyfiEyRc4vRKbI\n+YXIlJEm8LRSBdWpdJSbTXMJaMd0OspqbA+X7KqBTLLU5LLRJQ8kpam0vHJpKR1JBwBL5y9Q29xp\nLue1Z26gtvLuA9R2+01vS7Zb8HDlZJUnLTUi2QFAN5DLSqSu4USVR8yNBTJVUfAT6AYyYIfIgM0O\nl4LrveWhLP9Oo8113XZgs6BGITvtbpm7p5XScmQRyJTXHHfoLYUQbynk/EJkipxfiEyR8wuRKXJ+\nITJFzi9EpoxU6mu32zj90otJW2n8JO03sT0tezWMR6OVekHCyi6XXSZneGTZOEsyOrmT9pnaxhN4\nzt5yB7VNb5vhtmk+xiqLWCyCS11w+a3d43PV6nCpjyWS7AWJOFvgUXFe8FwQJeOJUGuevjY151Lq\nNuP1CZfaXFZcXOQSYf0Sv7+79XQi2mKcS9mlKonSDBKMXrOPobcUQrylkPMLkSlyfiEyRc4vRKbI\n+YXIlJGu9neaDZx69njSVg9WSve8Lb0qXpm5mfYpI8gUXPB8dkuTfCW9MlZLH2uMr5bXxrltfCJY\nwQ7GaG2+Kt5lq/MVPo7p7bxM1ljBA0XGWnyMVRLAMzPNz7lS5upBqxmoDg0+H42ldEBQo8H3V5vY\nQW0zMzzXZHmM2wx8ji+fTStg3XkeFFYqp8/Le/yaXLOPobcUQrylkPMLkSlyfiEyRc4vRKbI+YXI\nFDm/EJkyTLmuBwB8BMA5d3/XoO2LAD4D4Pxgs8+7+8Mr7ssdNRJUM7uTB8dcPJfOdVda5O9dlRoP\njHFw+WphYZ7arJI+HpMAAWBinNvGqnz6q0GdrJkpHsjSWEznE/QgF9+Bgwepbf4izzN44cwpatt/\nQzon4zvufi/tc/CW/dTW7fKAlcVFXq7ruWdfSLeffJX26QSBWjbF8x1Wp6apbWYPD+KqjM8m21tn\n0hIgALQb6TJwJRtevR/mk/+vAdyTaP8Ld79r8G9FxxdCXF+s6Pzu/igA/gSOEOJNyXp+83/WzJ40\nswfMLP29RQhx3bJW5/8agNsA3AVgDsCX2YZmdtjMjprZ0V6HP4YphBgta3J+dz/r7j13LwB8HcDd\nwbZH3P2Qux8qV/nilxBitKzJ+c1s31V/fhzAsY0ZjhBiVAwj9X0LwAcB7DazUwC+AOCDZnYXAAdw\nAsAfD3Owshm2V9PvN+++8x2032O/Skser148n2wHgBpP74dGm0d0db1HbeVaOlJtPIgE7I7zKLZ2\nmSe0KxmPzuoE+yyX0pJYpcaP9dKxx6jthWOPU1v9Mp//F8fJ8c5x+ertn/zP1Hbz/puorTvNI+Ym\nu81kuzV4ibUXXj5LbZcCebO2i+cFnNzDczneeOOtyXaf5tLh+TPpe7FcGf7b9YrO7+6fSDR/Y+gj\nCCGuS/SEnxCZIucXIlPk/EJkipxfiEyR8wuRKSNN4FkyYIooQDuDSLW929OSx8lTvARSOYjMqhqP\nmOt1udTXXkpLhO0GL+FUD+S8ahBpVzYexbYQRANum0lrnDtmuAR04pknqK11+TVqO7ifS1tLV9LJ\nJ//5J/+X9tlLxg4An/yjP6I2q3Lpc//+vcn27TNcRrv9xpep7fjzXKo8EURA1pr8/t5+Y3oeF3pc\nQl4gcnVP5bqEECsh5xciU+T8QmSKnF+ITJHzC5Epcn4hMmWkUl+5ZNi+LS15FB0ul5VL6RpuBYnY\nAvp1ARlj41xSmpriddqqJFFnUM4O9UAGbDX4+FvtdC02AGgs8Yi0ZnMxPY4LPJFK/TxPZvkbv8Gj\nLXfN8rma83RUYqvO5+ORR39Kbf/x7t+itgO3v53a6qR23XiQW+Id73wntR28/TZqe/UKT/760jyf\n//PNtO3px47SPnOvPJNsbwf31HL0yS9Epsj5hcgUOb8QmSLnFyJT5PxCZMpIV/utZDSXXBHkrGs0\n0ivYjfpl3qfDT809KJM1wYNEJmfSJcBmd/Egot07+Yp4bYIHbtQqPCBofp6f94XX0nn1LsylyzsB\nwGSFyxW33fI2ausFMSS79uxLtjfaaeUGAE7N8fx4/+8JniN2Yj8f45lLl5Lt5vxzb8bS+fEAYJLl\nJgTQAp+QRouf99PHnk22//pff0773LwrPcYzCuwRQqyEnF+ITJHzC5Epcn4hMkXOL0SmyPmFyJRh\nynUdAPA3APaiX57riLt/1cx2AvgOgIPol+y6193TusqAoudYWkoHMbzy8hnab+FSOmCi1uNBIr32\naWprd7js0rgcyDVn05LY4kmen21s5x5qm957M7Xt2sNtszfw0lXb9t6ebH/VeYmyymUeRNQFn6uZ\nILBn983p3HmvzfOcgMWrfO6fev45apt98SVqe20+fY8UfDowHciAaKZzEwLA/CVevuzU+Tq1vXAy\nPSd7pniewffcmQ4+OvUElweXM8wnfxfAn7n7nQDeB+BPzOxOAPcDeMTd7wDwyOBvIcSbhBWd393n\n3P2Xg9cLAI4DuAnARwE8ONjsQQAf26xBCiE2nlX95jezgwDeA+BnAPa6++uPjZ1B/2eBEOJNwtDO\nb2bTAL4H4HPu/oYf4e7uQPrZRjM7bGZHzexoO0hQIYQYLUM5v5lV0Xf8b7r79wfNZ81s38C+D0Cy\nYoG7H3H3Q+5+qFbjz80LIUbLis5vZgbgGwCOu/tXrjI9BOC+wev7APxw44cnhNgshonq+20AnwLw\nlJk9Pmj7PIAvAfiumX0awMsA7h3ukOn3m4sXr/BBltKRVO98+620T6fDtZzFxXSUIAAsBbZGM50f\nrdXkCufiaR6Bt3D2FWq7MDVLbTO791Pbrn0Hk+3e4nn/KkH5r/EJLmPOL/Bz27E7PX5WTgwAxko8\nuvDkSyeo7djxdFQcADSKdK6+5iK/P8Z63DZV4vLy7BTPC/jaGV7K6+wraVl69x030j433pheYqsG\n13I5K27p7j8FwK7Kh4Y+khDiukJP+AmRKXJ+ITJFzi9Epsj5hcgUOb8QmTLSBJ6FO9hTfv3HCdJM\nTqYTXVaqPNGiUYEC2LmTJ9ys13n0FbNFfZoLvIRTPZIVLwUy4AVuu/LyE8n2YHqxe5bLihPBg1lz\nZ3gkZqWWLgEW3XDlXo/vL/ic2jt7A7Ut9dLjP9cO5NkgOm+py6+1Vfj9WNvO77mp7WkZdmnxIu1z\n/mxaHuwGEvdy9MkvRKbI+YXIFDm/EJki5xciU+T8QmSKnF+ITBmp1OfuYbQdo1xZ/TCrgQxYq/Ho\nq1KJvx+OjaVlo6kpHqnWnUrX9wOAZn2J2hZJfcK+jfdbaqZtnTaX0S6+xm2L8zwacO8enpz0zNmz\nyfZyoDlaEUh9gVRZC2Td8cl0EsyxPfz+uDLO74F6nffrGU9AOrNtG7Xt2J4eY9FcfU3GbpcnXF2O\nPvmFyBQ5vxCZIucXIlPk/EJkipxfiEwZ6Wo/4CiKIm1xvlLK+nSDleNIVagE6sFabYxSmQfG2AQv\nx1TpBME2HZ5HbqaVtrUb6fyDAFCf58rCE489Tm2/88EPUNvkLWkF5MlfHKV95pcCheNMOlAIAB7+\n+29T2/jk7mR7tcZzE6LGVQcPype58371Kzwg6Mq59Mr92/ZyFanort6PlqNPfiEyRc4vRKbI+YXI\nFDm/EJki5xciU+T8QmTKitqVmR0A8Dfol+B2AEfc/atm9kUAnwHwuk7xeXd/ONyZc9kugvXpBTnf\nogCHcjld/gsAxse5BMSkvnI5kAdrwftrldtKFW6r1nhwycRYWh7qjHF5cCqQvS5d5Hnk/ukn/0Rt\n09vSAU3PP/cc7dMLpqrT4sFMZ175NbWV/aVkuxWBtFzl907Bbx1YcB+MG++4cyod9LN9ggcDNZfa\nyXYPzms5wwjXXQB/5u6/NLMZAL8wsx8NbH/h7v9r6KMJIa4bhqnVNwdgbvB6wcyOA7hpswcmhNhc\nVvWb38wOAngPgJ8Nmj5rZk+a2QNmxh9JE0Jcdwzt/GY2DeB7AD7n7vMAvgbgNgB3of/N4Muk32Ez\nO2pmRzvd9O8UIcToGcr5zayKvuN/092/DwDuftbde+5eAPg6gLtTfd39iLsfcvdD1QrPoCOEGC0r\nOr/1S+l8A8Bxd//KVe37rtrs4wCObfzwhBCbxTCr/b8N4FMAnjKz10O8Pg/gE2Z2F/ry3wkAf7zS\njhwADToKopFYKa9yicsnXub7iyKfGg0uiTGJkOX2A4BqIPVVg5xv1UBSisScguSza3X5DkuRvBnk\nQmw0eaTgAilFNjHNIxmnZrm0VQQ5/Cy4ntVOWg4uWumycQDQLvNr1otyPE5MUNvsJI/Qm6mm+5VK\n3D0b9fRP6GIjpT53/ymQvKNiTV8IcV2jJ/yEyBQ5vxCZIucXIlPk/EJkipxfiEwZaQJPA1Au0u83\na4n2izSvciDJFIHEFsmAzNYKZKOoelKgVKISGEsWXLZSeoyVsaBPIG2VgsjJShBdyKaxF1y0bpAA\n00qB1hclrayR+2qCS28Twf3RixLNBrZOm9sWyJOvpQ5PaFoj8nLhw/uRPvmFyBQ5vxCZIucXIlPk\n/EJkipxfiEyR8wuRKaOt1eeg4VneW73EFtb3K3HJw4kcthIsujAaRy84ry6ptwYA7aAmXKXK+zGJ\nsxQkl6wGtkjqW4s8G0ll3WJtx4oSuTIZsxQkT40oB1FzHshs0fhb5B7xDk9a2mylIyqLaC6WoU9+\nITJFzi9Epsj5hcgUOb8QmSLnFyJT5PxCZMpopT5wOSSKRmL1xyL5JNxfFAUWwCSlqPYfUQcBAMUm\nSFtsLJUKP+dymUfnRecW2RjRDVcOJVMufUZ1Gdm1ZrLtSrZY3lz9fAA8KLHTXr10uJo7W5/8QmSK\nnF+ITJHzC5Epcn4hMkXOL0SmrLjab2bjAB4FMDbY/u/d/QtmthPAdwAcRL9c173ufmnNIwmWKdnK\nZrjaH+Rhi3K+rSWHX0S02r+6tdl/J1rdZrZOJwgUCgqohqXIglJeJXLiRbTKHsxHOcozGOVrXEtu\nyIiobFhkDOj10mOM5pfdi2zek9sOsU0LwO+5+2+iX477HjN7H4D7ATzi7ncAeGTwtxDiTcKKzu99\nXk8jWh38cwAfBfDgoP1BAB/blBEKITaFoX7zm1l5UKH3HIAfufvPAOx197nBJmcA7N2kMQohNoGh\nnN/de+5+F4CbAdxtZu9aZneQH7BmdtjMjprZ0Q7JTy6EGD2rWu1398sAfgLgHgBnzWwfAAz+P0f6\nHHH3Q+5+qBosLAkhRsuKzm9me8xsx+D1BIDfB/AMgIcA3DfY7D4AP9ysQQohNp5hAnv2AXjQzMro\nv1l8193/wcz+BcB3zezTAF4GcO8wB2RqWSTJMPkqlOUC2SiU7ELTWqS5KMBobTLUWgJqul0eDNTt\npvPBAXEQUa3Gv8kxmapcCfIFVoIAqUDOiwJx1kKYG3KN0mHUzyw9x05K2/X3R8a4irlY0fnd/UkA\n70m0XwDwoaGPJIS4rtATfkJkipxfiEyR8wuRKXJ+ITJFzi9Eptha89mt6WBm59GXBQFgN4DXRnZw\njsbxRjSON/JmG8ct7r5nmB2O1PnfcGCzo+5+aEsOrnFoHBqHvvYLkStyfiEyZSud/8gWHvtqNI43\nonG8kbfsOLbsN78QYmvR134hMmVLnN/M7jGzX5vZ82a2Zbn/zOyEmT1lZo+b2dERHvcBMztnZseu\nattpZj8ys+cG/89u0Ti+aGanB3PyuJl9eATjOGBmPzGzX5nZ02b2XwbtI52TYBwjnRMzGzezfzWz\nJwbj+B+D9o2dD3cf6T/0C5q9AOA2ADUATwC4c9TjGIzlBIDdW3DcDwB4L4BjV7X9OYD7B6/vB/A/\nt2gcXwTwX0c8H/sAvHfwegbAswDuHPWcBOMY6ZygnyN4evC6CuBnAN630fOxFZ/8dwN43t1fdPc2\ngG+jnww0G9z9UQAXlzWPPCEqGcfIcfc5d//l4PUCgOMAbsKI5yQYx0jxPpueNHcrnP8mACev+vsU\ntmCCBziAH5vZL8zs8BaN4XWup4SonzWzJwc/Czb958fVmNlB9PNHbGmS2GXjAEY8J6NImpv7gt/7\nvZ+Y9D8B+BMz+8BWDwiIE6KOgK+h/5PsLgBzAL48qgOb2TSA7wH4nLvPX20b5ZwkxjHyOfF1JM0d\nlq1w/tMADlz1982DtpHj7qcH/58D8AP0f5JsFUMlRN1s3P3s4MYrAHwdI5oTM6ui73DfdPfvD5pH\nPiepcWzVnAyOveqkucOyFc7/cwB3mNmtZlYD8IfoJwMdKWY2ZWYzr78G8AcAjsW9NpXrIiHq6zfX\ngI9jBHNi/SR83wBw3N2/cpVppHPCxjHqORlZ0txRrWAuW838MPorqS8A+G9bNIbb0FcangDw9CjH\nAeBb6H997KC/5vFpALvQL3v2HIAfA9i5ReP4WwBPAXhycLPtG8E43o/+V9gnATw++PfhUc9JMI6R\nzgmAdwN4bHC8YwD++6B9Q+dDT/gJkSm5L/gJkS1yfiEyRc4vRKbI+YXIFDm/EJki5xciU+T8QmSK\nnF+ITPk3aAKCHnWhsY0AAAAASUVORK5CYII=\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "plt.imshow(X[9487])" ] }, { "cell_type": "code", "execution_count": 6, "metadata": { "collapsed": true }, "outputs": [], "source": [ "#Hyperperemeter\n", "BATCHSIZE=64\n", "LEARNING_RATE = 0.0002\n", "TRAINING_RATIO = 1\n", "BETA_1 = 0.0\n", "BETA_2 = 0.9\n", "EPOCHS = 500\n", "BN_MIMENTUM = 0.9\n", "BN_EPSILON = 0.00002\n", "SAVE_DIR = 'img/generated_img_CIFAR10_ResNet/'\n", "\n", "GENERATE_ROW_NUM = 8\n", "GENERATE_BATCHSIZE = GENERATE_ROW_NUM*GENERATE_ROW_NUM" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Generator\n", "_________________________________________________________________\n", "Layer (type) Output Shape Param # \n", "=================================================================\n", "input_1 (InputLayer) (None, 128) 0 \n", "_________________________________________________________________\n", "dense_1 (Dense) (None, 4096) 528384 \n", "_________________________________________________________________\n", "reshape_1 (Reshape) (None, 4, 4, 256) 0 \n", "_________________________________________________________________\n", "Generator_resblock_1 (Model) (None, 8, 8, 256) 1248000 \n", "_________________________________________________________________\n", "Generator_resblock_2 (Model) (None, 16, 16, 256) 1248000 \n", "_________________________________________________________________\n", "Generator_resblock_3 (Model) (None, 32, 32, 256) 1248000 \n", "_________________________________________________________________\n", "batch_normalization_7 (Batch (None, 32, 32, 256) 1024 \n", "_________________________________________________________________\n", "activation_7 (Activation) (None, 32, 32, 256) 0 \n", "_________________________________________________________________\n", "conv2d_10 (Conv2D) (None, 32, 32, 3) 6915 \n", "=================================================================\n", "Total params: 4,280,323\n", "Trainable params: 4,276,739\n", "Non-trainable params: 3,584\n", "_________________________________________________________________\n", "Discriminator\n", "Spectral Normalization: True\n", "_________________________________________________________________\n", "Layer (type) Output Shape Param # \n", "=================================================================\n", "input_5 (InputLayer) (None, 32, 32, 3) 0 \n", "_________________________________________________________________\n", "Discriminator_resblock_Down_ (None, 16, 16, 128) 151680 \n", "_________________________________________________________________\n", "Discriminator_resblock_Down_ (None, 8, 8, 128) 311680 \n", "_________________________________________________________________\n", "Discriminator_resblock_1 (Mo (None, 8, 8, 128) 311680 \n", "_________________________________________________________________\n", "Discriminator_resblock_2 (Mo (None, 8, 8, 128) 311680 \n", "_________________________________________________________________\n", "activation_16 (Activation) (None, 8, 8, 128) 0 \n", "_________________________________________________________________\n", "global_average_pooling2d_1 ( (None, 128) 0 \n", "_________________________________________________________________\n", "dense_sn_1 (DenseSN) (None, 1) 129 \n", "=================================================================\n", "Total params: 1,086,849\n", "Trainable params: 1,086,849\n", "Non-trainable params: 0\n", "_________________________________________________________________\n", "model_for_training_generator\n", "_________________________________________________________________\n", "Layer (type) Output Shape Param # \n", "=================================================================\n", "input_10 (InputLayer) (None, 128) 0 \n", "_________________________________________________________________\n", "Generator (Model) (None, 32, 32, 3) 4280323 \n", "_________________________________________________________________\n", "Discriminator (Model) (None, 1) 1086849 \n", "=================================================================\n", "Total params: 5,367,172\n", "Trainable params: 4,276,739\n", "Non-trainable params: 1,090,433\n", "_________________________________________________________________\n" ] } ], "source": [ "def wasserstein_loss(y_true, y_pred):\n", " return K.mean(y_true*y_pred)\n", "\n", "generator = BuildGenerator(bn_momentum=BN_MIMENTUM, bn_epsilon=BN_EPSILON)\n", "discriminator = BuildDiscriminator()\n", "\n", "Noise_input_for_training_generator = Input(shape=(128,))\n", "Generated_image = generator(Noise_input_for_training_generator)\n", "Discriminator_output = discriminator(Generated_image)\n", "model_for_training_generator = Model(Noise_input_for_training_generator, Discriminator_output)\n", "print(\"model_for_training_generator\")\n", "discriminator.trainable = False\n", "model_for_training_generator.summary()\n", "model_for_training_generator.compile(optimizer=Adam(LEARNING_RATE, beta_1=BETA_1, beta_2=BETA_2), loss=wasserstein_loss)" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "model_for_training_discriminator\n", "____________________________________________________________________________________________________\n", "Layer (type) Output Shape Param # Connected to \n", "====================================================================================================\n", "input_12 (InputLayer) (None, 128) 0 \n", "____________________________________________________________________________________________________\n", "input_11 (InputLayer) (None, 32, 32, 3) 0 \n", "____________________________________________________________________________________________________\n", "Generator (Model) (None, 32, 32, 3) 4280323 input_12[0][0] \n", "____________________________________________________________________________________________________\n", "Discriminator (Model) (None, 1) 1086849 input_11[0][0] \n", " Generator[2][0] \n", "====================================================================================================\n", "Total params: 5,367,172\n", "Trainable params: 1,086,849\n", "Non-trainable params: 4,280,323\n", "____________________________________________________________________________________________________\n" ] } ], "source": [ "Real_image = Input(shape=(32,32,3))\n", "Noise_input_for_training_discriminator = Input(shape=(128,))\n", "Fake_image = generator(Noise_input_for_training_discriminator)\n", "Discriminator_output_for_real = discriminator(Real_image)\n", "Discriminator_output_for_fake = discriminator(Fake_image)\n", "\n", "model_for_training_discriminator = Model([Real_image,\n", " Noise_input_for_training_discriminator],\n", " [Discriminator_output_for_real,\n", " Discriminator_output_for_fake])\n", "print(\"model_for_training_discriminator\")\n", "generator.trainable = False\n", "discriminator.trainable = True\n", "model_for_training_discriminator.compile(optimizer=Adam(LEARNING_RATE, beta_1=BETA_1, beta_2=BETA_2), loss=[wasserstein_loss, wasserstein_loss])\n", "model_for_training_discriminator.summary()" ] }, { "cell_type": "code", "execution_count": 11, "metadata": { "collapsed": true }, "outputs": [], "source": [ "real_y = np.ones((BATCHSIZE, 1), dtype=np.float32)\n", "fake_y = -real_y" ] }, { "cell_type": "code", "execution_count": 10, "metadata": { "collapsed": true }, "outputs": [], "source": [ "X = X/255*2-1" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "epoch 1 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.94880771636963\n", "32/64 [==============>...............] - ETA: 0s1.4451154470443726\n", "32/64 [==============>...............] - ETA: 0s-1.4451155066490173\n", "wasserstein_loss: -5.960464477539063e-08\n", "plot generated_image\n", "epoch 2 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 132.20580124855042\n", "32/64 [==============>...............] - ETA: 0s-0.1759192794561386\n", "32/64 [==============>...............] - ETA: 0s0.17591925710439682\n", "wasserstein_loss: -2.2351741790771484e-08\n", "plot generated_image\n", "epoch 3 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 131.93839740753174\n", "32/64 [==============>...............] - ETA: 0s3.2876545190811157\n", "32/64 [==============>...............] - ETA: 0s-3.2876555919647217\n", "wasserstein_loss: -1.0728836059570312e-06\n", "plot generated_image\n", "epoch 4 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 133.41933369636536\n", "32/64 [==============>...............] - ETA: 0s-0.14881636202335358\n", "32/64 [==============>...............] - ETA: 0s0.14881636202335358\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 5 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.3112678527832\n", "32/64 [==============>...............] - ETA: 0s-0.028686107136309147\n", "32/64 [==============>...............] - ETA: 0s0.028686104342341423\n", "wasserstein_loss: -2.7939677238464355e-09\n", "plot generated_image\n", "epoch 6 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.40845894813538\n", "32/64 [==============>...............] - ETA: 0s-0.09372971951961517\n", "32/64 [==============>...............] - ETA: 0s0.09372971206903458\n", "wasserstein_loss: -7.450580596923828e-09\n", "plot generated_image\n", "epoch 7 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.31502103805542\n", "32/64 [==============>...............] - ETA: 0s-0.04031790792942047\n", "32/64 [==============>...............] - ETA: 0s0.04031790792942047\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 8 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.19004201889038\n", "32/64 [==============>...............] - ETA: 0s-0.03722946532070637\n", "32/64 [==============>...............] - ETA: 0s0.03722946345806122\n", "wasserstein_loss: -1.862645149230957e-09\n", "plot generated_image\n", "epoch 9 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.34114503860474\n", "32/64 [==============>...............] - ETA: 0s-0.07468081265687943\n", "32/64 [==============>...............] - ETA: 0s0.07468081265687943\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 10 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.26677441596985\n", "32/64 [==============>...............] - ETA: 0s-0.06679900735616684\n", "32/64 [==============>...............] - ETA: 0s0.06679900735616684\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 11 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.4104232788086\n", "32/64 [==============>...............] - ETA: 0s-0.07402388378977776\n", "32/64 [==============>...............] - ETA: 0s0.07402388378977776\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 12 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.35877871513367\n", "32/64 [==============>...............] - ETA: 0s-0.06437696516513824\n", "32/64 [==============>...............] - ETA: 0s0.06437696143984795\n", "wasserstein_loss: -3.725290298461914e-09\n", "plot generated_image\n", "epoch 13 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.47368812561035\n", "32/64 [==============>...............] - ETA: 0s-0.06496286764740944\n", "32/64 [==============>...............] - ETA: 0s0.06496286764740944\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 14 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.35207867622375\n", "32/64 [==============>...............] - ETA: 0s-0.06923170387744904\n", "32/64 [==============>...............] - ETA: 0s0.06923170387744904\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 15 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.4222342967987\n", "32/64 [==============>...............] - ETA: 0s-0.07500187680125237\n", "32/64 [==============>...............] - ETA: 0s0.07500187307596207\n", "wasserstein_loss: -3.725290298461914e-09\n", "plot generated_image\n", "epoch 16 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.20164108276367\n", "32/64 [==============>...............] - ETA: 0s-0.07157521322369576\n", "32/64 [==============>...............] - ETA: 0s0.07157521322369576\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 17 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.54905438423157\n", "32/64 [==============>...............] - ETA: 0s-0.06725597754120827\n", "32/64 [==============>...............] - ETA: 0s0.06725598871707916\n", "wasserstein_loss: 1.1175870895385742e-08\n", "plot generated_image\n", "epoch 18 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.36335039138794\n", "32/64 [==============>...............] - ETA: 0s-0.06717506051063538\n", "32/64 [==============>...............] - ETA: 0s0.06717505678534508\n", "wasserstein_loss: -3.725290298461914e-09\n", "plot generated_image\n", "epoch 19 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.7008762359619\n", "32/64 [==============>...............] - ETA: 0s-0.06350045651197433\n", "32/64 [==============>...............] - ETA: 0s0.06350045651197433\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 20 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.35547423362732\n", "32/64 [==============>...............] - ETA: 0s-0.06133383326232433\n", "32/64 [==============>...............] - ETA: 0s0.061333831399679184\n", "wasserstein_loss: -1.862645149230957e-09\n", "plot generated_image\n", "epoch 21 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.38607931137085\n", "32/64 [==============>...............] - ETA: 0s-0.05890892446041107\n", "32/64 [==============>...............] - ETA: 0s0.05890892446041107\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 22 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.52250981330872\n", "32/64 [==============>...............] - ETA: 0s-0.05670524388551712\n", "32/64 [==============>...............] - ETA: 0s0.05670524388551712\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 23 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.33417344093323\n", "32/64 [==============>...............] - ETA: 0s-0.053800102323293686\n", "32/64 [==============>...............] - ETA: 0s0.053800102323293686\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 24 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.33403158187866\n", "32/64 [==============>...............] - ETA: 0s-0.05269063822925091\n", "32/64 [==============>...............] - ETA: 0s0.05269063822925091\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 25 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.1869478225708\n", "32/64 [==============>...............] - ETA: 0s-0.052681947126984596\n", "32/64 [==============>...............] - ETA: 0s0.05268194153904915\n", "wasserstein_loss: -5.587935447692871e-09\n", "plot generated_image\n", "epoch 26 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.18537831306458\n", "32/64 [==============>...............] - ETA: 0s-0.05041646212339401\n", "32/64 [==============>...............] - ETA: 0s0.05041646212339401\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 27 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.37776684761047\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "32/64 [==============>...............] - ETA: 0s-0.05045543052256107\n", "32/64 [==============>...............] - ETA: 0s0.05045543052256107\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 28 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.38412523269653\n", "32/64 [==============>...............] - ETA: 0s-0.04955735802650452\n", "32/64 [==============>...............] - ETA: 0s0.04955735802650452\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 29 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.40204763412476\n", "32/64 [==============>...............] - ETA: 0s-0.0485646091401577\n", "32/64 [==============>...............] - ETA: 0s0.04856461472809315\n", "wasserstein_loss: 5.587935447692871e-09\n", "plot generated_image\n", "epoch 30 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.36450219154358\n", "32/64 [==============>...............] - ETA: 0s-0.04919157549738884\n", "32/64 [==============>...............] - ETA: 0s0.04919157549738884\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 31 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.43105340003967\n", "32/64 [==============>...............] - ETA: 0s-0.04786474257707596\n", "32/64 [==============>...............] - ETA: 0s0.04786474257707596\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 32 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.46417713165283\n", "32/64 [==============>...............] - ETA: 0s-0.04680074378848076\n", "32/64 [==============>...............] - ETA: 0s0.04680074378848076\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 33 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.5649073123932\n", "32/64 [==============>...............] - ETA: 0s-0.046741463243961334\n", "32/64 [==============>...............] - ETA: 0s0.04674146696925163\n", "wasserstein_loss: 3.725290298461914e-09\n", "plot generated_image\n", "epoch 34 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.27696180343628\n", "32/64 [==============>...............] - ETA: 0s-0.046080177649855614\n", "32/64 [==============>...............] - ETA: 0s0.046080177649855614\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 35 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.30296730995178\n", "32/64 [==============>...............] - ETA: 0s-0.04417840763926506\n", "32/64 [==============>...............] - ETA: 0s0.04417840950191021\n", "wasserstein_loss: 1.862645149230957e-09\n", "plot generated_image\n", "epoch 36 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.5395781993866\n", "32/64 [==============>...............] - ETA: 0s-0.04403984174132347\n", "32/64 [==============>...............] - ETA: 0s0.04403984174132347\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 37 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.24879908561707\n", "32/64 [==============>...............] - ETA: 0s-0.043628064915537834\n", "32/64 [==============>...............] - ETA: 0s0.043628064915537834\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 38 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.36790990829468\n", "32/64 [==============>...............] - ETA: 0s-0.043931981548666954\n", "32/64 [==============>...............] - ETA: 0s0.043931981548666954\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 39 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.54706454277039\n", "32/64 [==============>...............] - ETA: 0s-0.043369974941015244\n", "32/64 [==============>...............] - ETA: 0s0.043369974941015244\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 40 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.45085310935974\n", "32/64 [==============>...............] - ETA: 0s-0.04330463334918022\n", "32/64 [==============>...............] - ETA: 0s0.04330463334918022\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 41 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.13854098320007\n", "32/64 [==============>...............] - ETA: 0s-0.04290587082505226\n", "32/64 [==============>...............] - ETA: 0s0.04290587082505226\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 42 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.40039730072021\n", "32/64 [==============>...............] - ETA: 0s-0.04214660823345184\n", "32/64 [==============>...............] - ETA: 0s0.04214660823345184\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 43 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.34528827667236\n", "32/64 [==============>...............] - ETA: 0s-0.04079541377723217\n", "32/64 [==============>...............] - ETA: 0s0.04079541377723217\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 44 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.2051465511322\n", "32/64 [==============>...............] - ETA: 0s-0.04228292405605316\n", "32/64 [==============>...............] - ETA: 0s0.04228292591869831\n", "wasserstein_loss: 1.862645149230957e-09\n", "plot generated_image\n", "epoch 45 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.26617288589478\n", "32/64 [==============>...............] - ETA: 0s-0.0420759841799736\n", "32/64 [==============>...............] - ETA: 0s0.0420759841799736\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 46 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.5399990081787\n", "32/64 [==============>...............] - ETA: 0s-0.0409205537289381\n", "32/64 [==============>...............] - ETA: 0s0.04092055559158325\n", "wasserstein_loss: 1.862645149230957e-09\n", "plot generated_image\n", "epoch 47 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.26917552947998\n", "32/64 [==============>...............] - ETA: 0s-0.04017970524728298\n", "32/64 [==============>...............] - ETA: 0s0.04017970524728298\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 48 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.30178689956665\n", "32/64 [==============>...............] - ETA: 0s-0.03962242044508457\n", "32/64 [==============>...............] - ETA: 0s0.03962242044508457\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 49 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.654381275177\n", "32/64 [==============>...............] - ETA: 0s-0.03883266821503639\n", "32/64 [==============>...............] - ETA: 0s0.03883267007768154\n", "wasserstein_loss: 1.862645149230957e-09\n", "plot generated_image\n", "epoch 50 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.28032326698303\n", "32/64 [==============>...............] - ETA: 0s-0.038484446704387665\n", "32/64 [==============>...............] - ETA: 0s0.038484446704387665\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 51 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.2165331840515\n", "32/64 [==============>...............] - ETA: 0s-0.03752958960831165\n", "32/64 [==============>...............] - ETA: 0s0.03752958960831165\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 52 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.60667252540588\n", "32/64 [==============>...............] - ETA: 0s-0.03707117587327957\n", "32/64 [==============>...............] - ETA: 0s0.03707117587327957\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 53 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.48254561424255\n", "32/64 [==============>...............] - ETA: 0s-0.03780117630958557\n", "32/64 [==============>...............] - ETA: 0s0.03780117630958557\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 54 of 500\n", "number of batches: 937\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.3475034236908\n", "32/64 [==============>...............] - ETA: 0s-0.03709513880312443\n", "32/64 [==============>...............] - ETA: 0s0.03709513694047928\n", "wasserstein_loss: -1.862645149230957e-09\n", "plot generated_image\n", "epoch 55 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.36211466789246\n", "32/64 [==============>...............] - ETA: 0s-0.035701872780919075\n", "32/64 [==============>...............] - ETA: 0s0.035701872780919075\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 56 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.38477110862732\n", "32/64 [==============>...............] - ETA: 0s-0.03565170802175999\n", "32/64 [==============>...............] - ETA: 0s0.03565170802175999\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 57 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.4325511455536\n", "32/64 [==============>...............] - ETA: 0s-0.03558463603258133\n", "32/64 [==============>...............] - ETA: 0s0.03558463603258133\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 58 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.16646671295166\n", "32/64 [==============>...............] - ETA: 0s-0.035412872210145\n", "32/64 [==============>...............] - ETA: 0s0.03541287034749985\n", "wasserstein_loss: -1.862645149230957e-09\n", "plot generated_image\n", "epoch 59 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.04189801216125\n", "32/64 [==============>...............] - ETA: 0s-0.03521161526441574\n", "32/64 [==============>...............] - ETA: 0s0.03521161526441574\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 60 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.45459127426147\n", "32/64 [==============>...............] - ETA: 0s-0.03505341894924641\n", "32/64 [==============>...............] - ETA: 0s0.03505341894924641\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 61 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.5339035987854\n", "32/64 [==============>...............] - ETA: 0s-0.034686196595430374\n", "32/64 [==============>...............] - ETA: 0s0.034686196595430374\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 62 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.39927077293396\n", "32/64 [==============>...............] - ETA: 0s-0.03436245024204254\n", "32/64 [==============>...............] - ETA: 0s0.03436245210468769\n", "wasserstein_loss: 1.862645149230957e-09\n", "plot generated_image\n", "epoch 63 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.6504819393158\n", "32/64 [==============>...............] - ETA: 0s-0.033955931663513184\n", "32/64 [==============>...............] - ETA: 0s0.033955931663513184\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 64 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.54109740257263\n", "32/64 [==============>...............] - ETA: 0s-0.034003784880042076\n", "32/64 [==============>...............] - ETA: 0s0.034003784880042076\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 65 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.44626569747925\n", "32/64 [==============>...............] - ETA: 0s-0.03345862403512001\n", "32/64 [==============>...............] - ETA: 0s0.03345862403512001\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 66 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.22644710540771\n", "32/64 [==============>...............] - ETA: 0s-0.03287150710821152\n", "32/64 [==============>...............] - ETA: 0s0.03287150710821152\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 67 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.53603744506836\n", "32/64 [==============>...............] - ETA: 0s-0.02548270206898451\n", "32/64 [==============>...............] - ETA: 0s0.025482701137661934\n", "wasserstein_loss: -9.313225746154785e-10\n", "plot generated_image\n", "epoch 247 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.54178643226624\n", "32/64 [==============>...............] - ETA: 0s-0.025010794401168823\n", "32/64 [==============>...............] - ETA: 0s0.025010794401168823\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 248 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.50311303138733\n", "32/64 [==============>...............] - ETA: 0s-0.02553188055753708\n", "32/64 [==============>...............] - ETA: 0s0.02553188055753708\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 249 of 500\n", "number of batches: 937\n", "936/937 [============================>.] - ETA: 0s\n", "epoch time: 135.25214552879333\n", "32/64 [==============>...............] - ETA: 0s-0.025545545853674412\n", "32/64 [==============>...............] - ETA: 0s0.025545545853674412\n", "wasserstein_loss: 0.0\n", "plot generated_image\n", "epoch 250 of 500\n", "number of batches: 937\n", "418/937 [============>.................] - ETA: 75s" ] }, { "ename": "KeyboardInterrupt", "evalue": "", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0mgenerator\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrainable\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 26\u001b[0m discriminator_loss.append(model_for_training_discriminator.train_on_batch([image_batch, noise],\n\u001b[0;32m---> 27\u001b[0;31m [real_y, fake_y]))\n\u001b[0m\u001b[1;32m 28\u001b[0m \u001b[0mdiscriminator\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrainable\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 29\u001b[0m \u001b[0mgenerator\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrainable\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/home/mathlab115/anaconda3/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mtrain_on_batch\u001b[0;34m(self, x, y, sample_weight, class_weight)\u001b[0m\n\u001b[1;32m 1760\u001b[0m \u001b[0mins\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0msample_weights\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1761\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_make_train_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1762\u001b[0;31m \u001b[0moutputs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1763\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1764\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0moutputs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/home/mathlab115/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m 2271\u001b[0m updated = session.run(self.outputs + [self.updates_op],\n\u001b[1;32m 2272\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2273\u001b[0;31m **self.session_kwargs)\n\u001b[0m\u001b[1;32m 2274\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mupdated\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2275\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/home/mathlab115/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 776\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 777\u001b[0m result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 778\u001b[0;31m run_metadata_ptr)\n\u001b[0m\u001b[1;32m 779\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 780\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/home/mathlab115/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 980\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 981\u001b[0m results = self._do_run(handle, final_targets, final_fetches,\n\u001b[0;32m--> 982\u001b[0;31m feed_dict_string, options, run_metadata)\n\u001b[0m\u001b[1;32m 983\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 984\u001b[0m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/home/mathlab115/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_do_run\u001b[0;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1030\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1031\u001b[0m return self._do_call(_run_fn, self._session, feed_dict, fetch_list,\n\u001b[0;32m-> 1032\u001b[0;31m target_list, options, run_metadata)\n\u001b[0m\u001b[1;32m 1033\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1034\u001b[0m return self._do_call(_prun_fn, self._session, handle, feed_dict,\n", "\u001b[0;32m/home/mathlab115/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_do_call\u001b[0;34m(self, fn, *args)\u001b[0m\n\u001b[1;32m 1037\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1038\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1039\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1040\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1041\u001b[0m \u001b[0mmessage\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/home/mathlab115/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[0;34m(session, feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[1;32m 1019\u001b[0m return tf_session.TF_Run(session, options,\n\u001b[1;32m 1020\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1021\u001b[0;31m status, run_metadata)\n\u001b[0m\u001b[1;32m 1022\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1023\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_prun_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msession\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "test_noise = np.random.randn(GENERATE_BATCHSIZE, 128)\n", "W_loss = []\n", "discriminator_loss = []\n", "generator_loss = []\n", "for epoch in range(EPOCHS):\n", " np.random.shuffle(X)\n", " \n", " print(\"epoch {} of {}\".format(epoch+1, EPOCHS))\n", " num_batches = int(X.shape[0] // BATCHSIZE)\n", " \n", " print(\"number of batches: {}\".format(int(X.shape[0] // (BATCHSIZE))))\n", " \n", " progress_bar = Progbar(target=int(X.shape[0] // (BATCHSIZE * TRAINING_RATIO)))\n", " minibatches_size = BATCHSIZE * TRAINING_RATIO\n", " \n", " start_time = time()\n", " for index in range(int(X.shape[0] // (BATCHSIZE * TRAINING_RATIO))):\n", " progress_bar.update(index)\n", " discriminator_minibatches = X[index * minibatches_size:(index + 1) * minibatches_size]\n", " \n", " for j in range(TRAINING_RATIO):\n", " image_batch = discriminator_minibatches[j * BATCHSIZE : (j + 1) * BATCHSIZE]\n", " noise = np.random.randn(BATCHSIZE, 128).astype(np.float32)\n", " discriminator.trainable = True\n", " generator.trainable = False\n", " discriminator_loss.append(model_for_training_discriminator.train_on_batch([image_batch, noise],\n", " [real_y, fake_y]))\n", " discriminator.trainable = False\n", " generator.trainable = True\n", " generator_loss.append(model_for_training_generator.train_on_batch(np.random.randn(BATCHSIZE, 128), real_y))\n", " \n", " print('\\nepoch time: {}'.format(time()-start_time))\n", " \n", " W_real = model_for_training_generator.evaluate(test_noise, real_y)\n", " print(W_real)\n", " W_fake = model_for_training_generator.evaluate(test_noise, fake_y)\n", " print(W_fake)\n", " W_l = W_real+W_fake\n", " print('wasserstein_loss: {}'.format(W_l))\n", " W_loss.append(W_l)\n", " #Generate image\n", " generated_image = generator.predict(test_noise)\n", " generated_image = (generated_image+1)/2\n", " for i in range(GENERATE_ROW_NUM):\n", " new = generated_image[i*GENERATE_ROW_NUM:i*GENERATE_ROW_NUM+GENERATE_ROW_NUM].reshape(32*GENERATE_ROW_NUM,32,3)\n", " if i!=0:\n", " old = np.concatenate((old,new),axis=1)\n", " else:\n", " old = new\n", " print('plot generated_image')\n", " plt.imsave('{}/SN_epoch_{}.png'.format(SAVE_DIR, epoch), old)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.1" } }, "nbformat": 4, "nbformat_minor": 2 }