{ "cells": [ { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Train on 160 samples, validate on 18 samples\n", "Epoch 1/500\n", "160/160 [==============================] - 0s 3ms/step - loss: -3.6097 - acc: 0.1938 - val_loss: -10.6586 - val_acc: 0.3333\n", "Epoch 2/500\n", "160/160 [==============================] - 0s 38us/step - loss: -11.2532 - acc: 0.3000 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 3/500\n", "160/160 [==============================] - 0s 31us/step - loss: -13.1812 - acc: 0.2750 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 4/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.3629 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 5/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.2910 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 6/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.4224 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 7/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2757 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 8/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.0510 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 9/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2660 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 10/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.5742 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 11/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1695 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 12/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 13/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 14/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 15/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1968 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 16/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8271 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 17/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2362 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 18/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 19/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2199 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 20/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 21/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2608 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 22/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9018 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 23/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1941 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 24/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2706 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 25/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1315 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 26/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2877 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 27/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2801 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 28/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3005 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 29/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2052 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 30/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2696 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 31/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3136 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 32/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3186 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 33/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.9745 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 34/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2938 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 35/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2420 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 36/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 37/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3008 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 38/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3129 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 39/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3070 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 40/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2527 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 41/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2482 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 42/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 43/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 44/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 45/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 46/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 47/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 48/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 49/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.1595 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 50/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3005 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 51/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 52/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 53/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 54/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 55/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 56/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 57/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 58/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 59/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 60/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 61/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 62/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2182 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 63/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3290 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 64/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 65/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3188 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 66/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 67/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 68/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 69/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 70/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 71/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 72/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 73/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 74/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3333 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 75/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 76/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 77/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 78/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2557 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 79/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 80/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 81/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 82/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 83/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 84/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 85/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 86/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 87/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 88/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 89/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0660 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 90/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3377 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 91/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 92/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 93/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 94/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 95/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 96/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 97/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 98/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 99/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 100/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 101/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 102/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 103/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 104/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 105/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 106/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 107/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 108/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 109/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 110/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2442 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 111/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2798 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 112/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2290 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 113/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 114/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2024 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 115/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 116/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 117/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 118/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 119/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 120/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 121/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 122/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 123/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 124/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 125/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 126/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 127/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 128/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2769 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 129/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2889 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 130/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 131/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 132/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 133/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 134/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 135/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3377 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 136/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 137/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 138/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 139/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 140/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 141/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 142/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 143/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 144/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 145/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 146/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 147/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 148/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 149/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 150/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 151/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 152/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 153/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 154/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 155/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 156/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 157/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 158/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 159/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 160/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2768 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 161/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 162/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1844 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 163/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 164/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 165/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 166/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 167/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 168/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 169/500\n", "160/160 [==============================] - 0s 100us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 170/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 171/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 172/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3143 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 173/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 174/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 175/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 176/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 177/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3305 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 178/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2540 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 179/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 180/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 181/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1789 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 182/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 183/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3252 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 184/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1838 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 185/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 186/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 187/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2552 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 188/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 189/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 190/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 191/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 192/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 193/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 194/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 195/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 196/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 197/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 198/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 199/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 200/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 201/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 202/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 203/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2571 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 204/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 205/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 206/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 207/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 208/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 209/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 210/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2745 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 211/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 212/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 213/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 214/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 215/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2036 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 216/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1698 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 217/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 218/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 219/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 220/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 221/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 222/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 223/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 224/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 225/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 226/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 227/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 228/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 229/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2972 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 230/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 231/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 232/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 233/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 234/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 235/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 236/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 237/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 238/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 239/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 240/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 241/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 242/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 243/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 244/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 245/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 246/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 247/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 248/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2336 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 249/500\n", "160/160 [==============================] - 0s 69us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 250/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 251/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 252/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 253/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3006 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 254/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 255/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2824 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 256/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 257/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 258/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 259/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2726 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 260/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 261/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 262/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 263/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2844 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 264/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2960 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 265/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 266/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 267/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 268/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 269/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 270/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 271/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3077 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 272/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 273/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 274/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 275/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 276/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 277/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 278/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 279/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 280/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 281/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3146 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 282/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 283/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 284/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 285/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 286/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 287/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 288/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 289/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 290/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 291/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 292/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 293/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 294/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 295/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 296/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 297/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 298/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 299/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 300/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 301/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 302/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 303/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 304/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 305/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 306/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 307/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 308/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3244 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 309/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 310/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 311/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 312/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 313/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 314/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 315/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 316/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 317/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 318/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 319/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 320/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 321/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 322/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1443 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 323/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 324/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 325/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 326/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 327/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 328/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 329/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 330/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 331/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 332/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 333/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 334/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 335/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 336/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 337/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 338/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 339/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 340/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 341/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 342/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 343/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 344/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 345/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 346/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 347/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 348/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 349/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 350/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 351/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 352/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 353/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 354/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 355/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 356/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 357/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 358/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2429 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 359/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 360/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 361/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 362/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 363/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3181 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 364/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 365/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 366/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 367/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 368/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 369/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 370/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 371/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 372/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 373/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 374/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2986 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 375/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 376/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 377/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 378/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 379/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 380/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 381/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 382/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 383/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 384/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 385/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 386/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 387/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 388/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 389/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 390/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 391/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 392/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 393/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 394/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 395/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 396/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 397/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 398/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 399/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 400/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 401/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 402/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 403/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 404/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 405/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 406/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 407/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 408/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 409/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 410/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 411/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 412/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 413/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 414/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 415/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 416/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 417/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 418/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 419/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 420/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 421/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 422/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 423/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 424/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 425/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 426/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 427/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 428/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 429/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 430/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 431/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 432/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 433/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 434/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 435/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 436/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 437/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 438/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 439/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 440/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 441/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 442/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 443/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 444/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 445/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 446/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 447/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 448/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 449/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 450/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 451/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 452/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 453/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 454/500\n", "160/160 [==============================] - 0s 81us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 455/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 456/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 457/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 458/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 459/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 460/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 461/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 462/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 463/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 464/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 465/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 466/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 467/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 468/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 469/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 470/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 471/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 472/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 473/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 474/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 475/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 476/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 477/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 478/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 479/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 480/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 481/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 482/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 483/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 484/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 485/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 486/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 487/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 488/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 489/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 490/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 491/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 492/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 493/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 494/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 495/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 496/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 497/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 498/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 499/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 500/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n" ] } ], "source": [ "import numpy as np\n", "import pandas as pd\n", "import tensorflow as tf\n", "import matplotlib\n", "from matplotlib import pyplot as plt\n", "from keras.models import Sequential\n", "from keras.layers import Dense, Dropout\n", "from sklearn.model_selection import train_test_split\n", "\n", "def drowac(h) :\n", " plt.figure(figsize=(9,6))\n", " plt.grid(True)\n", " plt.plot(h.history['acc'])\n", " plt.plot(h.history['val_acc'])\n", " plt.title('accuracy')\n", " plt.ylabel('accuracy')\n", " plt.xlabel('epoch')\n", " plt.legend(['train', 'test'], loc='upper left')\n", " plt.show()\n", "\n", "def drowls(h) :\n", " plt.figure(figsize=(9,6))\n", " plt.grid(True)\n", " plt.plot(h.history['loss'])\n", " plt.plot(h.history['val_loss'])\n", " plt.title('loss')\n", " plt.ylabel('loss')\n", " plt.xlabel('epoch')\n", " plt.legend(['train', 'test'], loc='upper left')\n", " plt.show()\n", "\n", "def neural_net (model):\n", " model.add(Dense(25, input_dim=13, activation='relu'))\n", " model.add(Dense(15, activation='relu'))\n", " model.add(Dropout(.2))\n", " model.add(Dense(8, activation='relu'))\n", " model.add(Dense(1, activation='sigmoid'))\n", " model.compile(loss=\"binary_crossentropy\", optimizer=\"adam\", metrics=['accuracy'])\n", " hist = model.fit(x_train, y_train, epochs=500, validation_data=(x_test,y_test))\n", " return hist\n", "data = pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/wine/wine.data', sep = ',', names = ['Class','Alcohol', 'Malic acid','Ash','Alcalinity of ash','Magnesium','Total phenols','Flavanoids','Nonflavanoid phenols','Proanthocyanins','Color intensity','Hue','OD280/OD315 of diluted wines','Proline'])\n", "X_col = [col for col in data.columns if col != 'Class']\n", "X = data[X_col]\n", "Y = data['Class']\n", "x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size = 0.1, random_state = 10)\n", "model = Sequential()\n", "hist = neural_net (model)" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjMAAAGDCAYAAADecJEqAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3XmUnVWd7//3tyqpzCQQJErCECBtG4FOMOKAQ2mjgnaDLlsbkRZtu6Ot/MTr1StcZ/q6ro23HfonDqh0OzQi4sTVIINS2A7IIFEJiAQEKVACYUgKMlZ97x/nqcqpSiXnpFLPOU8l79daLM4z77NLqz7svZ+9IzORJEmaqDraXQBJkqTdYZiRJEkTmmFGkiRNaIYZSZI0oRlmJEnShGaYkSRJE5phRpIkTWiGGUmSNKEZZiRNeFHj7zNpL+X/+SWNm4g4KyLuiIj1EXFLRLyi7tg/RsStdceOKfYfFBHfiogHImJtRHyq2P/BiPhq3fWHRkRGxKRiuyciPhwRPwUeBw6LiDfUPePOiHjTiPKdHBErI2JdUc4TIuJVEXHjiPP+e0R8p7yakjSeDDOSxtMdwHOB2cCHgK9GxJMi4lXAB4HXAfsAJwFrI6IT+B5wN3AoMB+4aBee93fAcmBWcY81wF8Vz3gD8PG60HQs8GXgXcAc4HnAXcClwMKIeErdfU8DvrJL31xS2xhmJI2bzPxGZt6XmQOZ+XXgduBY4B+AczPz+qxZnZl3F8cOBN6VmY9l5sbM/MkuPPI/MnNVZm7NzC2Z+f3MvKN4xjXAFdTCFcAbgQsy88qifPdm5m8zcxPwdWoBhoh4KrVg9b1xqBJJLWCYkTRuIuJ1RTfOIxHxCHAksD9wELVWm5EOAu7OzK1jfOQ9I55/YkRcGxEPFc9/afH8wWeNVgaALwGnRkRQa+25uAg5kiYAw4ykcRERhwCfB84A5mbmHOBmIKiFjsNHuewe4ODBcTAjPAZMr9t+4ijnZN3zpwDfBP4PMK94/ori+YPPGq0MZOa1wGZqrTinYheTNKEYZiSNlxnUwsUDABHxBmotMwBfAN4ZEU8r3jw6ogg/1wF/BD4SETMiYmpEHFdcsxJ4XkQcHBGzgbMbPL8LmFI8f2tEnAi8uO74F4E3RMRfRkRHRMyPiD+vO/5l4FPA1l3s6pLUZoYZSeMiM28B/hX4OXA/cBTw0+LYN4APAxcC64HvAPtlZj/w18ARwB+AXuBvi2uupDaW5dfAjTQYw5KZ64G3ARcDD1NrYbm07vh1FIOCgUeBa4BD6m7xFWrhy1YZaYKJzGx8liTt4SJiGrW3oY7JzNvbXR5JzbNlRpJq/gm43iAjTTyjDbqTpL1KRNxFbaDwy9tcFEljUGrLTDG75m0RsToizhrl+Jsj4jfFq5w/iYjFI44fHBF9EfHOMsspae+WmYdm5iGZeVO7yyJp15U2ZqaY2fN3wIuoDeq7HnhNMUhw8Jx9MnNd8fkk4C2ZeULd8W8CA8AvMvP/lFJQSZI0oZXZMnMssDoz78zMzdSmKD+5/oTBIFMYfK0TgIh4OXAnsKrEMkqSpAmuzDEz8xk+O2cv8IyRJ0XEW4F3UJsj4oXFvhnAu6m16uywiykillNbl4Vp06Y97aCDDhqvsg8zMDBAR4djpVvF+m4d67q1rO/Wsa5bq6z6/t3vfvdgZj6h0XllhpkYZd92fVqZeR5wXkScCrwXOJ3aAnUfz8y+2uzio8vM84HzAZYtW5Y33HDDeJR7Oz09PXR3d5dyb23P+m4d67q1rO/Wsa5bq6z6joi7mzmvzDDTS20tlEELgPt2cv5FwGeKz88A/iYizqW2uu1ARGzMzE+VUlJJkjRhlRlmrgcWRcRC4F7gFGozcg6JiEV1czq8jNoKu2Tmc+vO+SDQZ5CRJEmjKS3MZObWiDgDuBzoBC7IzFURcQ5wQ2ZeCpwREccDW6hNP356WeWRJEl7plInzcvMFdRWra3f9/66z2c2cY8PjvX5W7Zsobe3l40bN471FgDMnj2bW2+9dbfuUbapU6eyYMECJk+e3O6iSJLUUnv0DMC9vb3MmjWLQw89lJ0NJG5k/fr1zJo1axxLNr4yk7Vr19Lb28vChQvbXRxJklpqj35vbePGjcydO3e3gsxEEBHMnTt3t1ugJEmaiPboMAPs8UFm0N7yPSVJGmmPDzPt9sgjj/DpT396l6976UtfyiOPPFJCiSRJ2rMYZkq2ozDT39+/0+tWrFjBnDlzyiqWJEl7jD16AHAVnHXWWdxxxx0sWbKEyZMnM3PmTJ70pCexcuVKbrnlFl7+8pdzzz33sHHjRs4880yWL18OwKGHHsoNN9xAX18fJ554Is95znP42c9+xvz58/nud7/LtGnT2vzNJEmqhr0mzHzo/67ilvvWNT5xFP39/XR2dm63f/GB+/CBv37qTq/9yEc+ws0338zKlSvp6enhZS97GTfffPPQW0cXXHAB++23Hxs2bODpT386r3zlK5k7d+6we9x+++187Wtf4/Of/zyvfvWr+eY3v8lpp502pu8iSdKeZq8JM2PWv4mOgX4YGGWA7YYBePD27ffXe6gX+jfXznu0l2OXHsXCWVuHrvu3c/+Nb6+4EoB7/nAvt1//I+YuWwIDW2HtHfDY4yw8eAFLFsyAB2/naX9+CHetuhEe3G7NTuhbA/++w3U5J4wljzwCv7eLrRWs69ayvlvHum6hJx4F005oaxH2mjDTqAVlhx7tZevGPiaN0jIzFjOmb+se6vnpL7jqmp/x8xUXM336NLpPPo2NmzZtd82UKV1Dnzs7O9ngK9iSJA3Za8LMmM1ewIaOsU+aNyv2Y/3jm2D/RTD7XuiaUfsMPMot7HvAgUw/+Gh++9vfcu2Nv4LZC2rHOybB3MNhSh90dg1dw4z9Iadu2673wFZ4w/fH+k0rY6Wr3baMdd1a1nfrWNct1tPT1scbZko2d+5cjjvuOI488kimTZvGvHnzho6dcMIJfPazn+Xoo4/myU9+Ms985jPbWFJJkiYmw0wLXHjhhaPunzJlCpdddtmox+666y4A9t9/f26++eah/e9858QfEyNJ0ngyzIzRXQ8+xuxpk1n72Ga2DgwMOzajaxIH7TcdgHseepzHNm/d5ft3RDBv1hTuX7eJAXLYsbkzuth/5hR+/+BjbO7f9uw/PbqRN3/06jF8m10XwNv+chEX/uIPPNC3/Tif3bFhwwamXb/j7/HKYxZw19rHuPHuh8d0/5lTJvGW7iP41ytvo38gG1/QQqPV634zunj9sw/lE1fdzkCOb3kb1bXGl/XdOtZ167xi6XyWtDlNGGbGIDNZt3EL6zZuAWDGlElM7qzNP7hhcz/rNmwZOvfRDVvomtTB1MnNDyAeGKjd/8G+zWzc2s/saZOHlivo27iVdRu3Mmd6F32btjK9axJdk2rP7prUwdKDWjN6//JV93PR9fdww90P8/RD92X+nPGb9+b++zcxb97o3+MXv3+Iq269n9/dv56D95vO4ifts0v3fujxLfz4dw/wpZ/fxZ0PPMbJSw6kSgtBXL7qfi66blu9dkTwi98/xOSOP3DvIxt46ZFPHNfn7ayuNf6s79axrltnwb7TYX17y2CYGYOR/zH/xH2mMmNKrSr/+OgGHuzbXDtvIBnIZM60yRywz9Sm77+lf4B1f9wy1Opy0H7T6SjCzF1Fa8xgi8L+M7uYM732ttNj93fxiVOeslvfrVnHf+wa7l77GABvfcERdD/5gHG7d09PD93dS0c99vaLbuJnd6xl45YBTl4yn7e+4Ihduvdtf1rPj3/3AHevfYyZUybxyVNGf067vOhj13BXUa9vecERTOns4NQv/IK71j7Gk2ZP5RPjXN6d1bXGn/XdOtZ1a/X0rG7r813OYAxyRFN/Z8e2/7bviCAzycyhwFF/vBmD52/pH6AjYijIDB7rHxj7vcfLnGmTuX9drRtkMEy15LnTu1izvvbc2dMmj+H62jX3r9s0puvLNmf65KHvN2faZGYX5V2zfhNzKlheSaoCW2bGYGTLTH2gGMwdAwn9ObbAMRhgBjK3u3ZSVcLM9G1/WFv5R7Y+gNSXoVXXl232tG3BcM70rqEuRIDZLQyNkjSR2DIzBtu1zMTwlpnBc7buRuAYvGbktZ0dtZCzpRh03K4ws09dKGhlC8fs3Xzu1MmdTCkCQhVbZkZ+v939vpK0NzDMjEF9y0xHBB3Dupm2ndM/kKx79FG+eP5nd/kZnR3BV7/wGTZv3LDdfoDNW4swE+3qZtrWSrBPC//IDm8RGltLxeA9qtgyU1+mfaZOYkZXJ5OKn7ndTJI0OsPMGNS3zIxsGYm6lpn+gWT9ukf5/Pmf2+VnTOoI/vOLn2HLpgZhps3dTPtMndTSMgwLM2MMI4MhaPYYw1CZBgPLrKmTmNTZQURUOnxJUhU4ZmYM6ltmRv4hH9ky88n//UHuvOMOlixZwote9CIOOOAALr74YjZt2sQrXvEKPvShD/HYY4/x6le/mt7eXvr7+3nf+97HrXf+gTX3/4nXvPylPGneAVx99dXDnrd56wCdHTEUnlptsMtjdov/wNZ3tYy1RWio7BVs6Risz5Hf88G+zZUsryRVwd4TZi47C/70mzFdOq1/K3Ruq6qpAwMctmWADXMXs677n4edG8WsJQNFy8zbz/4gvXf+jpUrV3LFFVdwySWXcN1115GZnHTSSfz4xz/mgQce4MADD+T736+tq/Too49y3MBkPv/p/59LvvcDjjzsoKH7Dwszne2bIWWotaDFrRuDrSkdAbOmjO1/vrMr3NIxGFhGG2BtmJGk0dnNtJtGjlkZbJmpdTMNDGu5ueKKK7jiiitYunQpxxxzDL/97W+5/fbbOeqoo7jqqqt497vfzX/9138xe/bsbQOAY/sBwFB7U6pdXUww+h/dVphT13LRMcbvPxgOqjgGZfA19zkj3mqq/7ckabi9p2XmxI+M+dIN64evmr3+8c3c89DjAOy7gzEzg91M9X9wM5Ozzz6bN73pTds948Ybb2TFihWcffbZvPjFL+bN/+1/AKOEmfo5Z9rUxQTbwkwrB//WP3d3Wikq3c00StmqXF5JqgJbZsagfgDwpO3GzBQDgKm9mr3PrFmsX1+b5/klL3kJF1xwAX19fQDce++9rFmzhvvuu4/p06dz2mmn8c53vpNf/vKXdEYwfcZMNjzeN+z+9a0xI5/dSttaEFr7B3ZyZwczujp3a86VodadCnYzzRllLFK7WsEkaaLYe1pmxkHfpq2Q2dQA4HUbttK3aSv7778/xx13HEceeSQnnngip556Ks961rMAmDlzJl/96ldZvXo173rXu+jo6GDy5Ml85jOfYVJH8MrXvp5XveIk5h944NAA4IgYmgW4nd1Mc9r4B3bO9K7dClGzR+nKqYptY5G2f2urit1iklQFhpldcOcDtVaSJ82urbM0vauT6SMGoQ52Mz38eG19pplTOrnwwguHnXPmmWcO2z788MN5yUteMmzf5q39vPFN/8Q//893Di1iOWjWlMms37RlaD2odpg9bTLPXbQ/xy6c2/Jnv2jxPA4uViUfi6cdvC9LD57DIXPHfo+y7DN1Ms/7syfwjMO21euxC/fj2YfPZb8Z1QtfklQFhpkxGGyZOfwJM7d7Nbq+sWT2tMk8YVbzC0zW65rUyaIDZo167OAK/BHu6Ai+8sZntOXZHzzpqbt1/eID9+HbbzlunEozvjo6gi///bHD9j378P159uH7t6lEklR9jpkZg4FMgtHneKnf184xLZIk7S0MM2OQObwFpl79/naOaZEkaW+xx4eZkYtCjoeBzB3OvFu/v5VhpozvKUnSRLBHh5mpU6eydu3acf9Dv7OWmXqtCjOZydq1a5k6dWzjcyRJmsj26AHACxYsoLe3lwceeGC37rNx40amTp3K/Q/XFn1c39XJlv4B8pHRw8PgeVvXdnF/V+duPbtZU6dOZcGCBS15liRJVVJqmImIE4BPAp3AFzLzIyOOvxl4K9AP9AHLM/OWiHgR8BGgC9gMvCszf7Srz588eTILFy7czW8BPT09LF26lBPPqq2d9NxF+7O2bzMrzlw66vmD5120/Jkcc1jrX12WJGlvUlo3U0R0AucBJwKLgddExOIRp12YmUdl5hLgXOBjxf4Hgb/OzKOA04GvlFXOsXh0wxamTm5cdU4/L0lS+cocM3MssDoz78zMzcBFwMn1J2TmurrNGUAW+2/KzPuK/auAqRExpcSy7pJamGncfeT085Ikla/MMDMfuKduu7fYN0xEvDUi7qDWMvO2Ue7zSuCmzNxUSinHoOkwU8Hp8iVJ2tOUOWZmtFd5tnutKDPPA86LiFOB91LrVqrdIOKpwL8ALx71ARHLgeUA8+bNo6enZ/dLPYq+vr5h937k8S2se3htw+dd+9Mf7/AVbu3YyPpWeazr1rK+W8e6bq1213eZYaYXOKhuewFw3w7OhVo31GcGNyJiAfBt4HWZecdoF2Tm+cD5AMuWLcvu7u7dLPLoenp66O7uhh98f2jfQQc+ke7uJaNfUJz3ghe8oJTy7OmG6luls65by/puHeu6tdpd32V2M10PLIqIhRHRBZwCXFp/QkQsqtt8GXB7sX8O8H3g7Mz8aYllHLOddTMd9oQZLSyJJEl7t9JaZjJza0ScAVxO7dXsCzJzVUScA9yQmZcCZ0TE8cAW4GG2dTGdARwBvC8i3lfse3FmrimrvLtqZ28qrXjbc9k64Iy8kiS1QqnzzGTmCmDFiH3vr/t85g6u+1/A/yqzbLtrZ28qNTM4WJIkjY89ejmDMs1xDhlJkirBMDNGTognSVI1GGZ2Qf26kbOdEE+SpEowzOyCrknbqssJ8SRJqgbDzC7o6txWXbbMSJJUDYaZXTCps75lxjAjSVIVGGZ2Qea2uWOmd/n6tSRJVWCY2QX18+C55pIkSdVgmNkFA+msvpIkVY1hZhcMuESBJEmVY5jZBWYZSZKqxzCzCwYyedZhc7nqHc9rd1EkSVLBMLMLMmHJwXM44oBZ7S6KJEkqGGZ2QX/msCUNJElS+xlmdsFAJh2+ki1JUqUYZpqUmWRimJEkqWIMM00anGLGMCNJUrUYZpo0OGGeY2YkSaoWw0yT+gfDjGlGkqRKMcw0yW4mSZKqyTDTJLuZJEmqJsNMkwZsmZEkqZIMM00abJkxy0iSVC2GmSYNrphty4wkSdVimGnSYDdTp4NmJEmqFMNMkxwALElSNRlmmrRtzIxpRpKkKjHMNMl5ZiRJqibDTJP6B+xmkiSpigwzTRpwOQNJkirJMNMku5kkSaomw0yTfJtJkqRqMsw0qd9J8yRJqiTDTJOG1mayaUaSpEopNcxExAkRcVtErI6Is0Y5/uaI+E1ErIyIn0TE4rpjZxfX3RYRLymznM1Iu5kkSaqk0sJMRHQC5wEnAouB19SHlcKFmXlUZi4BzgU+Vly7GDgFeCpwAvDp4n5t46rZkiRVU5ktM8cCqzPzzszcDFwEnFx/Qmauq9ucARSRgZOBizJzU2b+Hlhd3K8t1m4Y4CerHwRsmZEkqWomlXjv+cA9ddu9wDNGnhQRbwXeAXQBL6y79toR184f5drlwHKAefPm0dPTMx7l3s57f/I4G/pvAWDVqlVMffC2Up6jmr6+vtJ+lhrOum4t67t1rOvWand9lxlmRmvDyO12ZJ4HnBcRpwLvBU7fhWvPB84HWLZsWXZ3d+9OeXdoww++P/T5L446iu7F80p5jmp6enoo62ep4azr1rK+W8e6bq1213eZ3Uy9wEF12wuA+3Zy/kXAy8d4bct0+P6XJEmVUuaf5uuBRRGxMCK6qA3ovbT+hIhYVLf5MuD24vOlwCkRMSUiFgKLgOtKLGvTXDVbkqRqKa2bKTO3RsQZwOVAJ3BBZq6KiHOAGzLzUuCMiDge2AI8TK2LieK8i4FbgK3AWzOzv6yy7grfZpIkqVrKHDNDZq4AVozY9/66z2fu5NoPAx8ur3Rj49tMkiRViyNAdlGnLTOSJFWKYWYXOWZGkqRqMczsIruZJEmqFsPMLnKhSUmSqsUw04T6+OLbTJIkVYthpgn1jTE2zEiSVC2GmSZ0DgszphlJkqrEMNOEDsOMJEmVZZhpQn1+MctIklQthpkm1HczdTpoRpKkSjHMNMFuJkmSqssw04T6AGPDjCRJ1WKYaULHsDEzphlJkqrEMNME55mRJKm6DDNNcACwJEnVZZhpQjgAWJKkyjLMNKHTeWYkSaosw0wThr/NZJqRJKlKDDNNcJ4ZSZKqyzDThGFhxhqTJKlS/NPchPpKsmVGkqRqMcw0obOulgwzkiRVi2GmCfXxxWlmJEmqFsNME4aPmTHNSJJUJYaZJnT6arYkSZVlmGmCazNJklRdhpkmuJyBJEnVZZhpQn18MctIklQthpkmZN3nTtOMJEmVYphpQn2YsZtJkqRqMcw0oy7NmGUkSaoWw0wT6ltmwjQjSVKlGGaakI1PkSRJbVJqmImIEyLitohYHRFnjXL8HRFxS0T8OiJ+GBGH1B07NyJWRcStEfFv0c4mEdOMJEmVVVqYiYhO4DzgRGAx8JqIWDzitJuAZZl5NHAJcG5x7bOB44CjgSOBpwPPL6usjaRpRpKkyiqzZeZYYHVm3pmZm4GLgJPrT8jMqzPz8WLzWmDB4CFgKtAFTAEmA/eXWFZJkjRBlRlm5gP31G33Fvt25I3AZQCZ+XPgauCPxT+XZ+atJZWzIdtlJEmqrkkl3nu0MS6j5oKIOA1YRtGVFBFHAE9hW0vNlRHxvMz88YjrlgPLAebNm0dPT8/4lHyErVv7Gfw6ZT1D2/T19VnPLWJdt5b13TrWdWu1u77LDDO9wEF12wuA+0aeFBHHA+8Bnp+Zm4rdrwCuzcy+4pzLgGcCw8JMZp4PnA+wbNmy7O7uHuevUPMv110GDABQ1jO0TU9Pj/XcItZ1a1nfrWNdt1a767vMbqbrgUURsTAiuoBTgEvrT4iIpcDngJMyc03doT8Az4+ISRExmVqLjd1MkiRpO6WFmczcCpwBXE4tiFycmasi4pyIOKk47aPATOAbEbEyIgbDziXAHcBvgF8Bv8rM/1tWWRtJ04wkSZVVZjcTmbkCWDFi3/vrPh+/g+v6gTeVWTZJkrRncAbgJtgwI0lSdRlmJEnShGaYaYJjZiRJqi7DTBPMMpIkVZdhRpIkTWiGmSbYzSRJUnUZZpqQwHOO2J/VHz6x3UWRJEkjGGaa1NERTOq0uiRJqhr/OjchGX3VTEmS1H6GmWYkhGlGkqRKairMRMQ3I+JlEbFXhh9bZiRJqq5mw8lngFOB2yPiIxHx5yWWqXISCJtmJEmqpKbCTGZelZmvBY4B7gKujIifRcQbImJymQWsig6zjCRJldR0t1FEzAVeD/wDcBPwSWrh5spSSlYhAwl2NEmSVE2TmjkpIr4F/DnwFeCvM/OPxaGvR8QNZRWuSuxlkiSpmpoKM8CnMvNHox3IzGXjWJ5KykzbZSRJqqhmu5meEhFzBjciYt+IeEtJZaokW2YkSaqmZsPMP2bmI4Mbmfkw8I/lFKl6aq9mm2YkSaqiZsNMR9S9mxwRnUBXOUWqntqr2e0uhSRJGk2zY2YuBy6OiM9S+9v+ZuAHpZWqapwBWJKkymo2zLwbeBPwT9TeUb4C+EJZhaoau5kkSaqupsJMZg5QmwX4M+UWp5qcZkaSpOpqdp6ZRcD/BhYDUwf3Z+ZhJZWrWtIsI0lSVTU7APjfqbXKbAVeAHyZ2gR6ewXXZpIkqbqaDTPTMvOHQGTm3Zn5QeCF5RWreowykiRVU7MDgDdGRAe1VbPPAO4FDiivWNXiq9mSJFVXsy0zbwemA28DngacBpxeVqGqJh0zI0lSZTVsmSkmyHt1Zr4L6APeUHqpKsgxM5IkVVPDlpnM7AeeFnvxX/PaPDOSJKmKmh0zcxPw3Yj4BvDY4M7M/FYppaqYNM1IklRZzYaZ/YC1DH+DKYG9IsyAMwBLklRVzc4AvFeOkxnk20ySJFVXszMA/zvFrP71MvPvx71EFWWWkSSpmprtZvpe3eepwCuA+8a/ONWUrpotSVJlNdvN9M367Yj4GnBVo+si4gTgk0An8IXM/MiI4+8A/oHaMgkPAH+fmXcXxw6mtjL3QdRahV6amXc1U97x5qrZkiRVV7OT5o20CDh4ZycU89OcB5xIbYHK10TE4hGn3QQsy8yjgUuAc+uOfRn4aGY+BTgWWDPGso4LW2YkSaqmZsfMrGf4mJk/Ae9ucNmxwOrMvLO4x0XAycAtgydk5tV1519LbWZhitAzKTOvLM7ra6acZXEAsCRJ1dVsN9OsMdx7PnBP3XYv8IydnP9G4LLi858Bj0TEt4CF1Lq0ziom8BsSEcuB5QDz5s2jp6dnDMVsbGBggD/e90d6eh4q5f4arq+vr7SfpYazrlvL+m4d67q12l3fzbbMvAL4UWY+WmzPAboz8zs7u2yUfdu9EVXc7zRgGfD8unI9F1gK/AH4OvB64IvDbpZ5PnA+wLJly7K7u7uZr7PL4kff58D5B9LdfVQp99dwPT09lPWz1HDWdWtZ361jXbdWu+u72TEzHxgMMgCZ+QjwgQbX9FIbvDtoAaO8ARURxwPvAU7KzE11196UmXdm5lbgO8AxTZZ13GVCh91MkiRVUrNhZrTzGrXqXA8sioiFEdEFnAJcWn9CRCwFPkctyKwZce2+EfGEYvuF1I21aTXfZpIkqbqaDTM3RMTHIuLwiDgsIj4O3LizC4oWlTOAy4FbgYszc1VEnBMRJxWnfRSYCXwjIlZGxKXFtf3AO4EfRsRvqHVZfX6Xv904cQCwJEnV1eykef8f8D5qY1cArgDe2+iizFwBrBix7/11n4/fybVXAkc3Wb7SmWUkSaqmZt9megw4q+SyVFZtBmDjjCRJVdRUN1NEXFm8wTS4vW9EXF5esapl1FewJElSJTQ7Zmb/4g0mADLzYeCAcopUTTbMSJJUTc2GmYFirSQAIuJQ9qIGi0zfZpIkqaqaHQD8HuAnEXFNsf08ipl39xa2zEiSVE3NDgD+QUQsoxZgVgLfBTaUWbAqqc0zI0mSqqjZ5Qz+ATiT2iy+K4FnAj8ON1n5AAAQ90lEQVSnNpndHs95ZiRJqq5mx8ycCTwduDszX0BtzaQHSitV1fhqtiRJldVsmNmYmRsBImJKZv4WeHJ5xaoWu5kkSaquZgcA9xbzzHwHuDIiHmaURSP3VAmmGUmSKqrZAcCvKD5+MCKuBmYDPyitVFXjq9mSJFVWsy0zQzLzmsZn7VkcACxJUnU1O2Zmr2eWkSSpmgwzTbBlRpKk6jLMNMHlDCRJqi7DTJNsmZEkqZoMM01wnhlJkqrLMNMsm2YkSaokw0wDmQnYMiNJUlUZZhoosowNM5IkVZRhpoEiy/g2kyRJFWWYaWCom8ksI0lSJRlmGhhsmekwzEiSVEmGmQYGhlpmTDOSJFWRYaaBwQHAkiSpmgwzTbJhRpKkajLMNDD0arZvM0mSVEmGmQYS32aSJKnKDDMNbGuZkSRJVWSYaWBo0jzTjCRJlWSYaWDb2kymGUmSqsgw04AtM5IkVZthpgHnmZEkqdpKDTMRcUJE3BYRqyPirFGOvyMibomIX0fEDyPikBHH94mIeyPiU2WWc6eGVs22aUaSpCoqLcxERCdwHnAisBh4TUQsHnHaTcCyzDwauAQ4d8TxfwauKauMzRh6NbudhZAkSTtUZsvMscDqzLwzMzcDFwEn15+QmVdn5uPF5rXAgsFjEfE0YB5wRYllbGjo1WzTjCRJlVRmmJkP3FO33Vvs25E3ApcBREQH8K/Au0orXZOGBgC3tRSSJGlHJpV479H+/o86nDYiTgOWAc8vdr0FWJGZ9+xsrEpELAeWA8ybN4+enp7dKe+o1m2uFXn16tX0bLl73O+v7fX19ZXys9T2rOvWsr5bx7purXbXd5lhphc4qG57AXDfyJMi4njgPcDzM3NTsftZwHMj4i3ATKArIvoyc9gg4sw8HzgfYNmyZdnd3T3uX+LBvk3wo6v4sz9bRPezDh33+2t7PT09lPGz1Pas69ayvlvHum6tdtd3mWHmemBRRCwE7gVOAU6tPyEilgKfA07IzDWD+zPztXXnvJ7aIOHt3oZqBZczkCSp2kobM5OZW4EzgMuBW4GLM3NVRJwTEScVp32UWsvLNyJiZURcWlZ5xipxBLAkSVVWZssMmbkCWDFi3/vrPh/fxD3+A/iP8S5b02yZkSSp0pwBuAGXM5AkqdoMMw1sGzNjmpEkqYoMMw0MzQBslpEkqZIMMw34NpMkSdVmmGnAMTOSJFWbYaaBzMGFJk0zkiRVkWGmgXRxJkmSKs0w08BgmOmwn0mSpEoyzDQw9DZTm8shSZJGZ5hpIF3NQJKkSjPMNODbTJIkVZthpgHfZpIkqdoMMw3YMiNJUrUZZhoYejVbkiRVkmGmocG1mWyakSSpigwzDbg2kyRJ1WaYacAxM5IkVZthpoFtLTOmGUmSqsgw08DQDMBmGUmSKskw04BjZiRJqjbDTAMuZyBJUrUZZhrIbUOA21oOSZI0OsNMA7bMSJJUbYaZJpllJEmqJsNMA9taZowzkiRVkWGmgaFXs9tcDkmSNDrDTAOOmZEkqdoMMw24nIEkSdVmmGkgc7CbyTQjSVIVGWYaGGyZMctIklRNhpkGXM5AkqRqM8w0NLjQpHFGkqQqMsw0YMuMJEnVZphpYHDMTIctM5IkVVKpYSYiToiI2yJidUScNcrxd0TELRHx64j4YUQcUuxfEhE/j4hVxbG/LbOcOzMwMNjN1K4SSJKknSktzEREJ3AecCKwGHhNRCwecdpNwLLMPBq4BDi32P848LrMfCpwAvCJiJhTVll3xjWzJUmqtjJbZo4FVmfmnZm5GbgIOLn+hMy8OjMfLzavBRYU+3+XmbcXn+8D1gBPKLGsO5SmGUmSKm1SifeeD9xTt90LPGMn578RuGzkzog4FugC7hjl2HJgOcC8efPo6enZjeKO7ta1/QD8auWv2HxP57jfX9vr6+sr5Wep7VnXrWV9t4513Vrtru8yw8xobRk5yj4i4jRgGfD8EfufBHwFOD0zB7a7Web5wPkAy5Yty+7u7t0s8va6Vj8I1/+CpUuX8MzD5o77/bW9np4eyvhZanvWdWtZ361jXbdWu+u7zDDTCxxUt70AuG/kSRFxPPAe4PmZualu/z7A94H3Zua1JZZzp+xlkiSp2socM3M9sCgiFkZEF3AKcGn9CRGxFPgccFJmrqnb3wV8G/hyZn6jxDI2tG3VbOOMJElVVFqYycytwBnA5cCtwMWZuSoizomIk4rTPgrMBL4RESsjYjDsvBp4HvD6Yv/KiFhSVll3JvHVbEmSqqzMbiYycwWwYsS+99d9Pn4H130V+GqZZWuWMwBLklRtzgDcwNCYGdOMJEmVZJhpIJ1oRpKkSjPMNGDLjCRJ1WaYacQxM5IkVZphpoFtbzMZZyRJqiLDTAO+zSRJUrUZZhrYNmlee8shSZJGZ5hpYNu7TKYZSZKqyDDTwOCr2bbMSJJUTYaZBkZd5luSJFWGYaYBx8xIklRthpmGim4mx8xIklRJhpkGbJmRJKnaDDMNuJyBJEnVZphpYNukeaYZSZKqyDDTwLblDNpcEEmSNCrDTAMuZyBJUrUZZhoYSBealCSpygwzTTLLSJJUTYaZBuxmkiSp2gwzDWwbAGyckSSpigwzDdgyI0lStRlmGnAGYEmSqs0w08DQDMC2zUiSVEmGmQYynTRPkqQqM8w0kI1PkSRJbWSYacQxM5IkVZphpgFfzZYkqdoMMw34arYkSdVmmGlg6G0m04wkSZVkmGlgW8uMaUaSpCoyzDSwbcxMmwsiSZJGZZhpwDEzkiRVW6lhJiJOiIjbImJ1RJw1yvF3RMQtEfHriPhhRBxSd+z0iLi9+Of0Msu5M0PzzJhmJEmqpNLCTER0AucBJwKLgddExOIRp90ELMvMo4FLgHOLa/cDPgA8AzgW+EBE7FtWWXdqcAZg04wkSZVUZsvMscDqzLwzMzcDFwEn15+QmVdn5uPF5rXAguLzS4ArM/OhzHwYuBI4ocSy7pBvM0mSVG1lhpn5wD11273Fvh15I3DZGK8tjWNmJEmqtkkl3nu0v/+jLnUUEacBy4Dn78q1EbEcWA4wb948enp6xlTQnfnd3VsA+NnPfsasLiNNK/T19ZXys9T2rOvWsr5bx7purXbXd5lhphc4qG57AXDfyJMi4njgPcDzM3NT3bXdI67tGXltZp4PnA+wbNmy7O7uHnnKbvv9T38Pt97Cc447jn1ndI37/bW9np4eyvhZanvWdWtZ361jXbdWu+u7zG6m64FFEbEwIrqAU4BL60+IiKXA54CTMnNN3aHLgRdHxL7FwN8XF/taLl1oUpKkSiutZSYzt0bEGdRCSCdwQWauiohzgBsy81Lgo8BM4BvFQo5/yMyTMvOhiPhnaoEI4JzMfKissu7M0ABgR81IklRJZXYzkZkrgBUj9r2/7vPxO7n2AuCC8krXnHQEsCRJleYMwE2ym0mSpGoyzDQw2DDTYZqRJKmSDDMNDAzNACxJkqrIMNOAMwBLklRthpkGto3/Nc1IklRFhpkGsmibsWVGkqRqMsw0kKMuwCBJkqrCMNMkW2YkSaomw0wDC/adxpFzO301W5Kkiip1BuA9wclL5jP7kduZ3GnukySpivwLLUmSJjTDjCRJmtAMM5IkaUIzzEiSpAnNMCNJkiY0w4wkSZrQDDOSJGlCM8xIkqQJzTAjSZImNMOMJEma0AwzkiRpQjPMSJKkCc0wI0mSJrTIzHaXYVxExAPA3SXdfn/gwZLure1Z361jXbeW9d061nVrlVXfh2TmExqdtMeEmTJFxA2Zuazd5dhbWN+tY123lvXdOtZ1a7W7vu1mkiRJE5phRpIkTWiGmeac3+4C7GWs79axrlvL+m4d67q12lrfjpmRJEkTmi0zkiRpQjPMNBARJ0TEbRGxOiLOand5JrqIuCAi1kTEzXX79ouIKyPi9uLf+xb7IyL+raj7X0fEMe0r+cQUEQdFxNURcWtErIqIM4v91vk4i4ipEXFdRPyqqOsPFfsXRsQvirr+ekR0FfunFNuri+OHtrP8E1FEdEbETRHxvWLbui5JRNwVEb+JiJURcUOxrzK/RwwzOxERncB5wInAYuA1EbG4vaWa8P4DOGHEvrOAH2bmIuCHxTbU6n1R8c9y4DMtKuOeZCvw3zPzKcAzgbcW/xu2zsffJuCFmfkXwBLghIh4JvAvwMeLun4YeGNx/huBhzPzCODjxXnaNWcCt9ZtW9flekFmLql7Bbsyv0cMMzt3LLA6M+/MzM3ARcDJbS7ThJaZPwYeGrH7ZOBLxecvAS+v2//lrLkWmBMRT2pNSfcMmfnHzPxl8Xk9tV/887HOx11RZ33F5uTinwReCFxS7B9Z14M/g0uAv4yIaFFxJ7yIWAC8DPhCsR1Y161Wmd8jhpmdmw/cU7fdW+zT+JqXmX+E2h9f4IBiv/U/joqm9aXAL7DOS1F0e6wE1gBXAncAj2Tm1uKU+vocquvi+KPA3NaWeEL7BPA/gIFiey7WdZkSuCIiboyI5cW+yvwemVTmzfcAoyV3X/9qHet/nETETOCbwNszc91O/qPUOt8NmdkPLImIOcC3gaeMdlrxb+t6jCLir4A1mXljRHQP7h7lVOt6/ByXmfdFxAHAlRHx252c2/L6tmVm53qBg+q2FwD3takse7L7B5sgi3+vKfZb/+MgIiZTCzL/mZnfKnZb5yXKzEeAHmrjlOZExOB/ONbX51BdF8dns30XrEZ3HHBSRNxFrfv/hdRaaqzrkmTmfcW/11AL6sdSod8jhpmdux5YVIyQ7wJOAS5tc5n2RJcCpxefTwe+W7f/dcXI+GcCjw42aao5xbiALwK3ZubH6g5Z5+MsIp5QtMgQEdOA46mNUboa+JvitJF1Pfgz+BvgR+nEX03JzLMzc0FmHkrt9/KPMvO1WNeliIgZETFr8DPwYuBmKvR7xEnzGoiIl1JL/J3ABZn54TYXaUKLiK8B3dRWWL0f+ADwHeBi4GDgD8CrMvOh4g/xp6i9/fQ48IbMvKEd5Z6oIuI5wH8Bv2Hb2IL/SW3cjHU+jiLiaGqDIDup/YfixZl5TkQcRq31YD/gJuC0zNwUEVOBr1Abx/QQcEpm3tme0k9cRTfTOzPzr6zrchT1+u1icxJwYWZ+OCLmUpHfI4YZSZI0odnNJEmSJjTDjCRJmtAMM5IkaUIzzEiSpAnNMCNJkiY0w4ykPU5EdA+upCxpz2eYkSRJE5phRlLbRMRpEXFdRKyMiM8VCzX2RcS/RsQvI+KHEfGE4twlEXFtRPw6Ir4dEfsW+4+IiKsi4lfFNYcXt58ZEZdExG8j4j9dJVnacxlmJLVFRDwF+FtqC9gtAfqB1wIzgF9m5jHANdRmiQb4MvDuzDya2ozGg/v/EzgvM/8CeDYwOG36UuDtwGLgMGrr+UjaA7lqtqR2+UvgacD1RaPJNGoL1Q0AXy/O+SrwrYiYDczJzGuK/V8CvlGsFzM/M78NkJkbAYr7XZeZvcX2SuBQ4Cflfy1JrWaYkdQuAXwpM88etjPifSPO29maKzvrOtpU97kff99Jeyy7mSS1yw+Bv4mIAwAiYr+IOITa76XBlY9PBX6SmY8CD0fEc4v9fwdck5nrgN6IeHlxjykRMb2l30JS2/lfKpLaIjNviYj3AldERAewBXgr8Bjw1Ii4EXiU2rgagNOBzxZh5U7gDcX+vwM+FxHnFPd4VQu/hqQKcNVsSZUSEX2ZObPd5ZA0cdjNJEmSJjRbZiRJ0oRmy4wkSZrQDDOSJGlCM8xIkqQJzTAjSZImNMOMJEma0AwzkiRpQvt/mlcLX6tUegEAAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAGDCAYAAAAxsvoUAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3Xl8nWWd9/HvL0uTpmlTmrYBWqCFCrJaISAIzqTsICrKMi44ysxYZ14OD/qIAoOoOOCgPq4zilatIyogiyyyW23YobSlhbSldKEladombZp9z7meP8590qRJ73PS5pz7Ounn/Xrxas59tt/5JaTfXtd135c55wQAAJCNcqIuAAAAYF8RZAAAQNYiyAAAgKxFkAEAAFmLIAMAALIWQQYAAGQtggyAyJnZJjM7N+o6AGQfggwAAMhaBBkAAJC1CDIAvGFmBWb2IzOrDf77kZkVBPdNNbNHzazRzBrM7Dkzywnuu97MtphZi5mtNbNzov0kADIlL+oCAGCAmySdLmmuJCfpYUlfk3SzpC9LqpE0LXjs6ZKcmR0j6d8lneqcqzWzWZJyM1s2gKgwIgPAJ5+S9C3nXJ1zrl7SLZI+HdzXI+kQSUc453qcc8+5+GZxfZIKJB1nZvnOuU3OuQ2RVA8g4wgyAHxyqKTNA25vDo5J0vckrZf0tJltNLMbJMk5t17SFyV9U1Kdmd1jZocKwAGBIAPAJ7WSjhhw+/DgmJxzLc65LzvnjpT0IUn/N7EWxjl3l3PurOC5TtJ3Mls2gKgQZAD45G5JXzOzaWY2VdLXJf1ekszsEjObY2YmqVnxKaU+MzvGzM4OFgV3SuoI7gNwACDIAPDJrZKWSnpd0huSlgfHJOldkhZJapX0kqSfOecqFV8fc7ukHZK2SZou6T8yWjWAyFh8rRwAAED2YUQGAABkLYIMAADIWgQZAACQtQgyAAAgaxFkAABA1hoTey1NnTrVzZo1Ky2v3dbWpgkTJqTltTEU/c4cep1Z9Dtz6HVmpavfy5Yt2+Gcm5bscWMiyMyaNUtLly5Ny2tXVlaqoqIiLa+Noeh35tDrzKLfmUOvMytd/TazzckfxdQSAADIYgQZAACQtQgyAAAga42JNTLD6enpUU1NjTo7O/frdUpKSrRmzZpRqio9CgsLNXPmTOXn50ddCgAAGTVmg0xNTY0mTpyoWbNmKb5Z7r5paWnRxIkTR7Gy0eWc086dO1VTU6PZs2dHXQ4AABk1ZqeWOjs7VVpaul8hJhuYmUpLS/d75AkAgGw0ZoOMpDEfYhIOlM8JAMCexnSQiVpjY6N+9rOfjfh5F198sRobG9NQEQAAYwtBJo32FmT6+vpCn/f4449r8uTJ6SoLAIAxY8wu9vXBDTfcoA0bNmju3LnKz89XcXGxDjnkEK1YsUKrV6/WpZdequrqanV2duraa6/V/PnzJe2+UnFra6suuuginXXWWXrxxRc1Y8YMPfzwwxo/fnzEnwwAAD8cEEHmlj+v0ura5n16bl9fn3Jzc4ccP+7QSfrGh44Pfe7tt9+uqqoqrVixQpWVlfrgBz+oqqqq/rOLFi5cqClTpqijo0OnnnqqLrvsMpWWlg56jXXr1unuu+/WL3/5S1155ZV64IEHdNVVV+3TZwEAYKw5IILMvoo5p5iThsaYfXPaaacNOkX6Jz/5iR588EFJUnV1tdatWzckyMyePVtz586VJJ1yyinatGnTKFUDAED2OyCCTLKRk72p2dWupvYeHT+jZFTqGLg7aGVlpRYtWqSXXnpJRUVFqqioGPYU6oKCgv6vc3Nz1dHRMSq1AAAwFrDYN40mTpyolpaWYe9ramrSQQcdpKKiIr355pt6+eWXM1wdAADZ74AYkdk/bp+fWVpaqjPPPFMnnHCCxo8fr7Kysv77LrzwQv385z/XSSedpGOOOUann376aBQLAMABhSATYjQuM3fXXXcNe7ygoEBPPPHEsPcl1sFMnTpVVVVV/cevu+66UagIAICxg6mlUFwxFwAAnxFkwtj+TCwBAIB0I8gAAICs5W2QMbPrzMyZ2dTIaojqjQEAQEq8DDJmdpik8yS9E3UtAADAX14GGUk/lPRVsUQFAACEMOf8ygpm9mFJ5zjnrjWzTZLKnXM7hnncfEnzJamsrOyUe+65Z9D9JSUlmjNnzn7V0tARU0uP0xGT9m2TgsbGRt1333363Oc+N+Ln/vSnP9XVV1+toqKilB6/fv16NTU1jfh9fNPa2qri4uKoyzgg0OvMot+ZQ68zK139njdv3jLnXHmyx0USZMxskaSDh7nrJkn/Iel851xTWJAZqLy83C1dunTQsTVr1ujYY4/drzprGzvU0NalE2ZM3qfnb9q0SZdccsmga8GkKrED9tSpqS0RGo3P64PKykpVVFREXcYBgV5nFv3OHHqdWenqt5mlFGQiuSCec+7c4Y6b2YmSZktaaWaSNFPScjM7zTm3LYMlBvXs39zWDTfcoA0bNmju3Lk677zzNH36dN17773q6urSRz/6Ud1yyy1qa2vTlVdeqZqaGvX19enmm2/W9u3bVVtbq3nz5mnq1KlavHjxqH0mAADGEq+u7Ouce0PS9MTtVEdkknriBmnbGyN+2pS+Pk3sdVLBMG06+ETpottDn3/77berqqpKK1as0NNPP637779fS5YskXNOH/7wh/Xss8+qvr5ehx56qB577DFJ8T2YSkpK9IMf/ECLFy9OeUQGAIADka+Lfcecp59+Wk8//bTe+9736uSTT9abb76pdevW6cQTT9SiRYt0/fXX67nnnlNJyejstA0AwIHAqxGZPTnnZo3KCyUZOdmbhqYO7Wjp0okz922NzEDOOd144436/Oc/P+S+ZcuW6fHHH9eNN96o888/X1//+tf3+/0AADgQMCKTRhMnTlRLS4sk6YILLtDChQvV2toqSdqyZYvq6upUW1uroqIiXXXVVbruuuu0fPnyIc8FAADD83pEJmr7e2Xf0tJSnXnmmTrhhBN00UUX6ZOf/KTOOOMMSVJxcbF+//vfa/369frKV76inJwc5efn64477pAkzZ8/XxdddJEOOeQQFvsCALAXBJlQtt9X5LvrrrsG3b722msH3T7qqKN0wQUXDHneNddco2uuuWY/3x0AgLGNqSUAAJC1CDIAACBrEWRCWLBIxrdtHAAAQNyYDjIHSgA5UD4nAAB7GrNBprCwUDt37hzzf8k757Rz504VFhZGXQoAABk3Zs9amjlzpmpqalRfX7/Pr9Hc2aPmjl7lNRfKbH9Pxk6fwsJCzZw5M+oyAADIuDEbZPLz8zV79uz9eo3//us6ff8vb2ndbRcpP3fMDl4BAJC1+Ns5RE5OfBRmjM9OAQCQtQgyKYiRZAAA8BJBJoTHy2IAAIAIMqFMTC0BAOAzgkyI/gvi7feOSwAAIB0IMiFy+q/sG20dAABgeASZEImpJRb7AgDgJ4JMiN1TSwAAwEcEmRQwIAMAgJ8IMiGMIRkAALxGkAmRw1lLAAB4jSATInE9vBg5BgAALxFkQiSmlhyLZAAA8BJBJgRLZAAA8BtBJsTuEZmICwEAAMMiyIRIrJFhagkAAD8RZEIwtQQAgN8IMiHY/RoAAL8RZEKw+zUAAH4jyIRg92sAAPxGkAnB7tcAAPiNIBOGERkAALxGkAlhyR8CAAAiRJAJwQXxAADwG0EmBLtfAwDgN4JMiMTp1+x+DQCAnwgyIXZfEI8kAwCAjwgyIdiiAAAAvxFkQrDYFwAAvxFkQrD7NQAAfiPIhGBqCQAAv3kZZMzsGjNba2arzOy7kdXB7tcAAHgtL+oC9mRm8yR9RNJJzrkuM5seXS3xP7mODAAAfvJxRObfJN3unOuSJOdcXVSFsPs1AAB+8zHIHC3pA2b2ipk9Y2anRlcKu18DAOCzSKaWzGyRpIOHuesmxWs6SNLpkk6VdK+ZHen2OHXIzOZLmi9JZWVlqqysHPU6V23vlSQtXbpU9ZNyR/31MVRra2tavpcYil5nFv3OHHqdWVH3O5Ig45w7d2/3mdm/SfpTEFyWmFlM0lRJ9Xu8xgJJCySpvLzcVVRUjHqd3au2Sa8t0ymnlOuEGSWj/voYqrKyUun4XmIoep1Z9Dtz6HVmRd1vH6eWHpJ0tiSZ2dGSxknaEUUhXBAPAAC/eXfWkqSFkhaaWZWkbkmf2XNaKVPY/RoAAL95F2Scc92Sroq6DondrwEA8J2PU0veYPdrAAD8RpAJwxYFAAB4jSATYvemkZGWAQAA9oIgEyLH+qNMpHUAAIDhEWRCsNgXAAC/EWRCsPs1AAB+I8iE6N/9miQDAICXCDIhjLOWAADwGkEmhLH7NQAAXiPIhOCkJQAA/EaQCUGOAQDAbwSZEOx+DQCA3wgyIdj9GgAAvxFkQnBBPAAA/EaQCcXu1wAA+IwgE4LryAAA4DeCTIjEWUskGQAA/ESQCZHY/ZrFvgAA+IkgE6J/sW8s2joAAMDwCDIh+ne/jrgOAAAwPIJMCHa/BgDAbwSZEJy1BACA3wgyIYzryAAA4DWCTIjdU0vR1gEAAIZHkAnB1BIAAH4jyITYPbUUcSEAAGBYBJkQ7H4NAIDfCDIh2P0aAAC/EWRCcdYSAAA+I8iEMEv+GAAAEB2CTIhEjmFABgAAPxFkQrD7NQAAfiPIhGD3awAA/EaQCcHu1wAA+I0gE4LdrwEA8BtBJgXEGAAA/ESQCZGTw2ZLAAD4jCATInH6dYypJQAAvESQCcHu1wAA+I0gE4LdrwEA8BtBJgS7XwMA4DeCTBh2vwYAwGveBRkzm2tmL5vZCjNbamanRVaL+i8kE1UJAAAghHdBRtJ3Jd3inJsr6evB7Uiw2BcAAL/5GGScpEnB1yWSaqMqhN2vAQDwm/l2+X0zO1bSU4rniBxJ73fObR7mcfMlzZeksrKyU+65555Rr6W12+nf/9auTx07TucdkT/qr4+hWltbVVxcHHUZBwR6nVn0O3PodWalq9/z5s1b5pwrT/a4vFF/5xSY2SJJBw9z102SzpH0JefcA2Z2paRfSzp3zwc65xZIWiBJ5eXlrqKiYtTrbGzvlv72Fx111BxVnDV71F8fQ1VWViod30sMRa8zi35nDr3OrKj7HUmQcc4NCSYJZnanpGuDm/dJ+lVGihquFna/BgDAaz6ukamV9PfB12dLWhdZJex+DQCA1yIZkUnic5J+bGZ5kjoVrIOJQuKsJQAA4Cfvgoxz7nlJp0RdhyTlGFsUAADgMx+nlrzB7tcAAPiNIBOCC+IBAOA3gkwIdr8GAMBvBJkQxu7XAAB4jSATwtgzEgAArxFkQuyeWiLJAADgI4JMCEZkAADwG0EmRP/u15FWAQAA9oYgE4IL4gEA4DeCTIjE1BIXxAMAwE8EmRBm7H4NAIDPCDKpYEQGAAAvEWSSMDEiAwCArwgySZgxIAMAgK8IMkmYWOwLAICvCDIpIMYAAOAngkwSJqaWAADwFUEmGWP3awAAfEWQSSJHYm4JAABPEWSSMRb7AgDgK4JMEqyRAQDAXwSZJLggHgAA/iLIJMEF8QAA8BdBJgWskQEAwE8EmSQs6gIAAMBepRRkzOxaM5tkcb82s+Vmdn66i/NBfGqJERkAAHyU6ojMPznnmiWdL2mapKsl3Z62qjxDjAEAwE+pBpnEDMvFkn7jnFupA2TWJUcs9gUAwFepBpllZva04kHmKTObKCmWvrI8wgXxAADwVl6Kj/tnSXMlbXTOtZvZFMWnl8Y8riMDAIC/Uh2ROUPSWudco5ldJelrkprSV5ZPjKklAAA8lWqQuUNSu5m9R9JXJW2WdGfaqvKImcSYDAAAfko1yPS6+DnIH5H0Y+fcjyVNTF9Z/mCvJQAA/JXqGpkWM7tR0qclfcDMciXlp68sf5hY7AsAgK9SHZH5B0ldil9PZpukGZK+l7aqPMJeSwAA+CulIBOElz9IKjGzSyR1OucOiDUyEitkAADwVapbFFwpaYmkKyRdKekVM7s8nYX5gjUyAAD4K9U1MjdJOtU5VydJZjZN0iJJ96erMF+w1xIAAP5KdY1MTiLEBHaO4LlZjQviAQDgr1RHZJ40s6ck3R3c/gdJj6enJP8wIgMAgJ9SCjLOua+Y2WWSzlR8kGKBc+7BtFbmCTNGZAAA8FXK00POuQecc//XOfel/Q0xZnaFma0ys5iZle9x341mtt7M1prZBfvzPqOBxb4AAPgrdETGzFo0/IBE/O935ybt4/tWSfqYpF/s8X7HSfq4pOMlHSppkZkd7Zzr28f32W9cEA8AAH+FBhnnXFq2IXDOrZEki29kNNBHJN3jnOuS9LaZrZd0mqSX0lFHSphaAgDAW76deTRDUvWA2zXBsciwZyQAAP5K9aylETOzRZIOHuaum5xzD+/tacMcGzZGmNl8SfMlqaysTJWVlftSZlIuFtP2urq0vT4Ga21tpdcZQq8zi35nDr3OrKj7nbYg45w7dx+eViPpsAG3Z0qq3cvrL5C0QJLKy8tdRUXFPrxdcrnPP65p06apouKUtLw+BqusrFS6vpcYjF5nFv3OHHqdWVH327eppUckfdzMCsxstqR3Kb41QmRY7AsAgL8iCTJm9lEzq5F0hqTHgovtyTm3StK9klZLelLSF6I8YymBHAMAgJ/SNrUUJrgOzbDXonHO3SbptsxWtHdmxlpfAAA85dvUkpcYkQEAwE8EmSRyOP8aAABvEWRSECPHAADgJYJMEsFeDFGXAQAAhkGQScLExBIAAL4iyCRhxmJfAAB8RZBJARfEAwDATwSZJIbb/AkAAPiBIJMEU0sAAPiLIJMCx3JfAAC8RJAJ07xVs101IzIAAHiKIBOm8tu6ves2FvsCAOApgkyY/AkqVBcjMgAAeIogE2ZcURBkSDIAAPiIIBMmv0i5iinP9URdCQAAGAZBJkx+kSSpwHVGXAgAABgOQSbMuHiQyY8RZAAA8BFBJkz+BEnSOIIMAABeIsiEGcfUEgAAPiPIhEmskWFEBgAALxFkwgRBZpzriLgQAAAwHIJMmP6ppa6ICwEAAMMhyITJJ8gAAOAzgkyYcfGzlgqYWgIAwEsEmTAs9gUAwGsEmTBMLQEA4DWCTJjcPPUoj+vIAADgKYJMEp1WoIIYa2QAAPARQSaJLhVqHFNLAAB4iSCTRLcVaBxTSwAAeIkgk0SXFXDWEgAAniLIJNFtBSoUQQYAAB/lRV2A77pzCjSrd5P0xPVRl3JAmFNTI3U8EXUZBwR6nVn0O3PodQYdc3HUFRBkktmQf6wO71ovrbw76lIOCGW9vdJOfiwzgV5nFv3OHHqdQVOOknR0pCXwnU7iqUmX6fqmS7X2houiLuWA8EJlpSoqKqIu44BArzOLfmcOvc6wyspI3541MknkmtQbc1GXAQAAhkGQSSLXpL6Yk3OEGQAAfEOQSSI36BCjMgAA+Icgk0Suxf/s7SPIAADgG4JMErkWTzI9sVjElQAAgD0RZJLon1piRAYAAO8QZJLYPbXEiAwAAL6JJMiY2RVmtsrMYmZWPuD4eWa2zMzeCP48O4r6BuoPMiz2BQDAO1FdEK9K0sck/WKP4zskfcg5V2tmJ0h6StKMTBc3EFNLAAD4K5Ig45xbI0kWLKQdcPy1ATdXSSo0swLnXFcGyxuExb4AAPjL5y0KLpP02t5CjJnNlzRfksrKylSZpksk93R3SjK99PISVU9kSVG6tba2pu17icHodWbR78yh15kVdb/TFmTMbJGkg4e56ybn3MNJnnu8pO9IOn9vj3HOLZC0QJLKy8tduvbVWPbHRZK6NPfkU3TCjJK0vAd2q2SPlIyh15lFvzOHXmdW1P1OW5Bxzp27L88zs5mSHpT0j865DaNb1cglFvv2sdgXAADveDVXYmaTJT0m6Ubn3AtR1yMNPGuJNTIAAPgmqtOvP2pmNZLOkPSYmT0V3PXvkuZIutnMVgT/TY+ixoTcnGCxL2ctAQDgnajOWnpQ8emjPY/fKunWzFe0d+y1BACAv7yaWvJR4joynH4NAIB/CDJJMCIDAIC/CDJJ7D5riREZAAB8Q5BJgsW+AAD4iyCTBKdfAwDgL4JMEokgw4gMAAD+Icgkwe7XAAD4iyCTRGL3axb7AgDgH4JMEkwtAQDgL4JMEv1TS4zIAADgHYJMEozIAADgL4JMElzZFwAAfxFkksjhyr4AAHiLIJOEmSk/19QTY0QGAADfEGRSkJeTo94+RmQAAPANQSYFebnGYl8AADxEkElBfm4Op18DAOAhgkwKcnOMs5YAAPAQQSYF+TmmXhb7AgDgHYJMCvJyWewLAICPCDIpyOP0awAAvESQSUE+p18DAOAlgkwKWOwLAICfCDIpyM9lsS8AAD4iyKSguDBPje3dUZcBAAD2QJBJwbEHT9KabS3qYZ0MAABeIcik4MSZJerujekr963UiurGqMsBAAABgkwKTphRIkl6aEWtbn6oKuJqAABAAkEmBbNLJ/R/fdS0CSGPBAAAmUSQSUFOjun2j50YdRkAAGAPBJkUffy0w/Wu6cXq6mXBLwAAviDIjEBBfg5BBgAAjxBkRqAwL1ddvX1RlwEAAAIEmREoyM9RVw8jMgAA+IIgMwIFeblMLQEA4BGCzAgU5OWos4epJQAAfEGQGYGCPBb7AgDgE4LMCBSw2BcAAK8QZEagkNOvAQDwCkFmBAryczlrCQAAjxBkRqAgL0edvX1yzkVdCgAAEEFmRArycuSc1NNHkAEAwAeRBBkzu8LMVplZzMzKh7n/cDNrNbProqhvbwryciWJBb8AAHgiqhGZKkkfk/TsXu7/oaQnMldOagrz4+1iwS8AAH7Ii+JNnXNrJMnMhtxnZpdK2iipLcNlJbV7RIYgAwCADyzKhatmVinpOufc0uD2BEmLJJ0n6TpJrc65/7eX586XNF+SysrKTrnnnnvSUmNra6uKi4slSS/V9uoXr3fpv84ar+rWmE6enqu8nKFhDPtuYL+RXvQ6s+h35tDrzEpXv+fNm7fMOTdk+cme0jYiY2aLJB08zF03Oece3svTbpH0Q+dc63CjNQM55xZIWiBJ5eXlrqKiYj+q3bvKykolXruzaqv0+nIVHHq0fnbvSi38bLkq3l2Wlvc9UA3sN9KLXmcW/c4cep1ZUfc7bUHGOXfuPjztfZIuN7PvSposKWZmnc65/xnd6vZNYmqpoa1bktTWxaJfAACiFMkamb1xzn0g8bWZfVPxqSUvQowUP/1akpo7eiSxVgYAgKhFdfr1R82sRtIZkh4zs6eiqGOkCvLjIzLNnb2SpG6CDAAAkYrqrKUHJT2Y5DHfzEw1qesfkelMjMgwtQQAQJS4su8IJK4j09wRH5FhagkAgGgRZEYgsdg3MSLD1BIAANEiyIzA0MW+TC0BABAlgswIJBb7tgSLfbt6GJEBACBKBJkR2HNEpruPIAMAQJQIMiNQkJcjM6mlixEZAAB8QJAZATNTUTC9JLFGBgCAqBFkRmj8uN1BprMnpi2NHRFWAwDAgY0gM0IDg8yTq7bp77+7WHXNnRFWBADAgYsgM0JF+YMvhtwbc9re3BVRNQAAHNgIMiM0cEQmoSW4QB4AAMgsgswIFQ0TZJoJMgAARIIgM0LDB5neCCoBAAAEmREaP27ohuGJC+QBAIDMIsiM0Pj8oS3bc0Tm1kdX60/LazJVEgAAByyCzAgVDTMis+di3z++Wq0nq7ZlqiQAAA5YBJkRGu6speaO3SMyHd19aunq1Y7WwadkL9u8Sxf+6Fm1drGeBgCA0UKQGaGBWxQkDByRqWuJXxxvZ1v3oMe8uqlBb25r0Ya61qTv0dnD1gcAAKSCIDNCw47IDAoy8ZGYHS2DR2S2B1f/rdkVvqXB0k0NOumbT7P1AQAAKSDIjNDwa2R2TxfVBVf5bevuU0d335DjNbvaQ19/VW2zuvtiemt7y2iUCwDAmEaQGaFkF8RLTC1JGrROJtURmW3B47Y2Zv/+TT19MX3hruWq2tIUdSkAgDGKIDNChcEaGbPdxwaNyAyYUhoUZIKAs6WxQ929MbV3D7/oNxF4tjZl/9TS2zva9NjrW7VozfaoSwEAjFEEmRFKjMjkDEgyzR09cs5J2j2FJEk7W+MLfp1zg6aWvvHIKl1+x0vDvn4iyNTuw4jM+rpWraxuHPHz0qW6IT6N9k5D+HQaskd7d6+6elmMHqVvPFylR1+vjboMwBsEmRHaHWTitycW5CnmpF3t8emlupZOTS0ukLR7RKa5o1ddvTHl55qqGzr0l9XbtXprc/801Pq6lv4Rmm1N+z4i881HVunzv1vWH6qilggw7+wc/SDjnNOLG3YoFvPjsyZTs6tdV/9miXbtcTbbSF39myW686VNauro0T8uXKIN9cnPghstr25q0HFff0rfeWJtxt4Tg3V09+nOlzfrgWVccBNIIMiMUOKsJQtGZN5z2GRJ0us18ZGQjfVtmhsc21Dfqkv++zk9uWqrJOnMOVPV0dPXH3CWbdqlhrZuffAnz+v6B96QJG0PRm7e2t6iZZsbBr13Z0+fVtU2qXJt3ZBTtJ1zer2mUduaO/X2jraUPstX71+pWx9dPbIGjEB1QzyMjdaITHVDu+pbuvStP6/Wr59/W5/85St6IksuPPhk1TYtXluvZ9fVj/i5zjk551Tf0qXFa+v16Mqteuatej37Vr2eeGNrGqod3vUPvC5JemRlZkcDVlY36uEVWzL6nr56a3uLnJPe3BbtyQCra5vVxNYs8ARBZoQSZy3lB0Myp86aohyTlm/epeqGdm1p7NBZc0o1fWKB7nm1WlVbmvXTxRskSf9y1pH9r2MmffuJNfr242vU1RvTn1fWavHaOrV29So3x7SjtVuX3fFS/8hMY3u3zvn+M/rgT57XZ3/zqu6o3DCorppdHf1bJby4YWfSz9Ha1asHX9uiu5a8M+Lr1tTsalf5rYv0XJK/lKuDM7TqWroGncG1L7p7Yzrn+8/o1NsWaeELb+vWx9ZIkp5fvyPl19jV1q3fvby5f8orE2Ixp8dejwcPKX5hxJG64YE39NGfvajX3ok/9/UtjXpuwOv9eWWtTr1tkRr2YbTn/9z9mr720BtDjte3dGnh82+rLxjxqm3s0Mb6NhUX5KmhrWvhYzJGAAAN5UlEQVSva7zS4dbHVuu6+1YO+YszEd4To3JVW5r0u5c399+/aUdbRr/XmbBma7MkaWtTpxrb9290b181tHXr0p++oNufWBPJ+wN7IsiMUNmkAh1dVqyffOK9uuKUmfqXD8zWuw+epOXvNGrJ2/ERlPcdWaoLjj+4fxHwOw3tmliQp7mHT9aPPz5XH5l7qM44slTVDR26f1mNjpo2QYdNGa+rf/OqJGn21An97/d3312sy+54UZ9ZuETbmzv1vctP0hlHluquJe+ouzfW/7jEmUF5OaYnq7YlnV56Yf0O9fQ5tXf3qXJtnZ6s2to/rTWcWMz1/4Vx1yvvaEdrlxY8uzH0Paob2vun4JKddj7c+w28CvIzb9Wruy825HEvb0we2hL+87HVuvmhKp3/w2e1MUNTMo+srNUX7lqu59bFA1dYkGnv7tWvntuo2gHXENq0o033LavWiupG/fr5tyVJnT0x3RdMLSx/p1F3VG5QfUuX7l9WPaLaaltjemRlre5eUt2/Nivhv55Yo289uloPvRYfCXkpCMf/+vdHKubilwlIZkdrly7+8XP61XPhPydh6po7tXTzLvX0Of3tze39r/uJBS/rmrtf04f/5wX94tmNisWcvnL/67r5oSotf2eXtjd36pL/fl4f+O5i/fAvbw153adWbdN/PbFGPcP8TCVs3tmm/33h7SFrgtq7e9Ub8ryRfr6RTCMPHIlZs3X/RmV6+mL79A+Mx97Yqu6+mB59fWv/P4LqWjr18Iotof2UpMVv1unCHz2rN7cl//kBUmW+rKfYH+Xl5W7p0qVpee3KykpVVFSEPuabj6zSnS9tUsxJJePz9drN52nJpgZ9fMHLmlSYp+bOXv3TmbP19Q8d1/+c+pYu1exq1ytvN+jUWVNUXJCnT/7yZTV29OiP80/XjtZufe2hKu1o7dLMg8YrPzdH//KB2frU+47Q4rV1uvo3r2pWaZEOmjBOUnyR8fbmTn3pvKP1vafW6t0HTxz24n0J25o61dLZq4K8HDV39qinz2liYZ7mTC8e8ljn4tNk43JzdHhpkd7a1qKemFN3b0xzD5s86Ayugd6oadLxM0q0srpRR06boJLx+Un73dzUrEklk1TX3KXapg4dd8gkjcvLUW1jh3r7nF644WxVrq3Xv/5+maZMGKeGtu7QGgZ+hpU1jTr/uDK9tGGnisbl6ZDJhZKkmIuPOEwen6/iwqHXCRpOT19Mb9e36bApRaF93ljf1j+SMLEgT23dvf3TkXuqa+7SlsYOTS7K7w+z1Q3tau7sVX6Oqa27T2WTCvqnH98zs0Qra+IBdlxejgrzcnTUMN+/vamub1JTt9TT5wb9LEnqXzQ+sTBfR06boNrgbLsnv/h3et+3/6ojSos0ZcDjwz6PmfSemcm/R8Np6ujRxvo2TRiXq6KCPM08aLy2N3WqdkDoLszP0VHTirWqtlk5Jk2bWKD83BzVNXfp746epkVrtg/6GXEuPhUcc9KRUyeopGj4n8v121vV0tWrw6cUqbQ4/lljMafVW5tVMn6cDpsyfkSfJfGzneBcfIqmzzmdMKOkP/SH2Vjfpknj81Td0JHS9yDM5p3tau3s1bGHTkrpvQc+r6O7Tx09ff2/Z97e0abG9p6kNa3d1qL27j6VThinw0uL9rn2ZPbsNdLns++fpZLGdUn/ntwXZrbMOVee9HEEmXCpBJmm9h797Jn1Wl3brLPfPV1XnzlbfTGn/3x0tS44/mA9sLxGX7ngGJVNKgx9HeecevqcxuXFB8pe2rBTz62r15fPP0a5A37TOOf0wPIteiL4l1HCew8/SF8691368V/XpTSFUXHMdJ1+5BT9/uV3NGd6sV57Z9de570PLRmv7r6YdrR2KTfHdPWZs3XXK5vVHvIvutwc0xfmzdEza+u1qrZJvSkszN3VsEsHTTlIReNyNat0gtZsa+kfXfrQew7VleWHqbOnT7f8eZUunTtDv3r+7ZSnxqZNLNA3PnS8VlQ3auHzbys24Ge/bFKhGtt7Uj4jJ8dMMw8aHw9YIZ9rXG6OPnvmLD2yolaXnzJTC57dOOzIkhQfTTvvuIP17Fv1agumbiYXjdOnTz9C6+pa9GTVNl1RfphW1zbrkJJCXXziIfraQ28oLzdHl508Q799cfOgz5TMroZd+sj7jlZbd++Qn5cJ4/J0RflM3fnS7tc899gyfeb9s/StP6/WurrkowFmpstOnqEX1u/Q1pDRvmSOmlasE2eU6KFgnUyOmT528gy98naDLjz+YP3x1Wo1d/bokJJCnTlnqu4PRqsuP2WmLjj+YN3y51VDrt8086DxOu6QSXp69d4vDTBpfL5OP7JUi1ZvH9TXOdOLtaO1e8RTO4mf7YFmlU5Qfm5OSv1MuLL8MC3bvGu/F3pPmTBOBxWN26fX+dT7Dtff3qzr/74WF+TptNlT9Mxb9f3TkcOZVJivS046RPcvq9nr/wejYbheIz0+cdrhKtq5liCzv6IOMhg99Dtz6HVm0e/ModeZla5+pxpkWCMDAACyFkEGAABkLYIMAADIWgQZAACQtQgyAAAgaxFkAABA1iLIAACArEWQAQAAWYsgAwAAshZBBgAAZC2CDAAAyFoEGQAAkLUIMgAAIGuNid2vzaxe0uY0vfxUSTvS9NoYin5nDr3OLPqdOfQ6s9LV7yOcc9OSPWhMBJl0MrOlqWwjjtFBvzOHXmcW/c4cep1ZUfebqSUAAJC1CDIAACBrEWSSWxB1AQcY+p059Dqz6Hfm0OvMirTfrJEBAABZixEZAACQtQgyIczsQjNba2brzeyGqOvJdma20MzqzKxqwLEpZvYXM1sX/HlQcNzM7CdB7183s5Ojqzz7mNlhZrbYzNaY2SozuzY4Tr/TwMwKzWyJma0M+n1LcHy2mb0S9PuPZjYuOF4Q3F4f3D8ryvqzkZnlmtlrZvZocJtep4mZbTKzN8xshZktDY5587uEILMXZpYr6aeSLpJ0nKRPmNlx0VaV9f5X0oV7HLtB0l+dc++S9NfgthTv+7uC/+ZLuiNDNY4VvZK+7Jw7VtLpkr4Q/PzS7/ToknS2c+49kuZKutDMTpf0HUk/DPq9S9I/B4//Z0m7nHNzJP0weBxG5lpJawbcptfpNc85N3fAadbe/C4hyOzdaZLWO+c2Oue6Jd0j6SMR15TVnHPPSmrY4/BHJP02+Pq3ki4dcPxOF/eypMlmdkhmKs1+zrmtzrnlwdctiv/CnyH6nRZB31qDm/nBf07S2ZLuD47v2e/E9+F+SeeYmWWo3KxnZjMlfVDSr4LbJnqdad78LiHI7N0MSdUDbtcExzC6ypxzW6X4X76SpgfH6f8oCYbS3yvpFdHvtAmmOlZIqpP0F0kbJDU653qDhwzsaX+/g/ubJJVmtuKs9iNJX5UUC26Xil6nk5P0tJktM7P5wTFvfpfkpfPFs9xwiZ1TvDKH/o8CMyuW9ICkLzrnmkP+IUq/95Nzrk/SXDObLOlBSccO97DgT/q9j8zsEkl1zrllZlaRODzMQ+n16DnTOVdrZtMl/cXM3gx5bMb7zYjM3tVIOmzA7ZmSaiOqZSzbnhh2DP6sC47T//1kZvmKh5g/OOf+FBym32nmnGuUVKn42qTJZpb4B+PAnvb3O7i/REOnXTG8MyV92Mw2KT7lf7biIzT0Ok2cc7XBn3WKh/TT5NHvEoLM3r0q6V3BSvhxkj4u6ZGIaxqLHpH0meDrz0h6eMDxfwxWwJ8uqSkxjInkgjUAv5a0xjn3gwF30e80MLNpwUiMzGy8pHMVX5e0WNLlwcP27Hfi+3C5pL85LuqVEufcjc65mc65WYr/Xv6bc+5TotdpYWYTzGxi4mtJ50uqkke/S7ggXggzu1jxpJ8raaFz7raIS8pqZna3pArFd0rdLukbkh6SdK+kwyW9I+kK51xD8Bfx/yh+llO7pKudc0ujqDsbmdlZkp6T9IZ2ryP4D8XXydDvUWZmJym+4DFX8X8g3uuc+5aZHan4qMEUSa9Juso512VmhZJ+p/japQZJH3fObYym+uwVTC1d55y7hF6nR9DXB4ObeZLucs7dZmal8uR3CUEGAABkLaaWAABA1iLIAACArEWQAQAAWYsgAwAAshZBBgAAZC2CDIAxxcwqEjsiAxj7CDIAACBrEWQARMLMrjKzJWa2wsx+EWy62Gpm3zez5Wb2VzObFjx2rpm9bGavm9mDZnZQcHyOmS0ys5XBc44KXr7YzO43szfN7A/sdgyMXQQZABlnZsdK+gfFN6ObK6lP0qckTZC03Dl3sqRnFL/6syTdKel659xJil+tOHH8D5J+6px7j6T3S0pcCv29kr4o6ThJRyq+Pw+AMYjdrwFE4RxJp0h6NRgsGa/4pnMxSX8MHvN7SX8ysxJJk51zzwTHfyvpvmD/lxnOuQclyTnXKUnB6y1xztUEt1dImiXp+fR/LACZRpABEAWT9Fvn3I2DDprdvMfjwvZQCZsu6hrwdZ/4XQeMWUwtAYjCXyVdbmbTJcnMppjZEYr/TkrsYPxJSc8755ok7TKzDwTHPy3pGedcs6QaM7s0eI0CMyvK6KcAEDn+lQIg45xzq83sa5KeNrMcST2SviCpTdLxZrZMUpPi62gk6TOSfh4ElY2Srg6Of1rSL8zsW8FrXJHBjwHAA+x+DcAbZtbqnCuOug4A2YOpJQAAkLUYkQEAAFmLERkAAJC1CDIAACBrEWQAAEDWIsgAAICsRZABAABZiyADAACy1v8Hx65talRMw2gAAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "drowac(hist)\n", "drowls(hist)" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "part 0.33\n", "Train on 160 samples, validate on 18 samples\n", "Epoch 1/500\n", "160/160 [==============================] - 0s 3ms/step - loss: 21.4363 - acc: 0.0813 - val_loss: 27.6097 - val_acc: 0.0000e+00\n", "Epoch 2/500\n", "160/160 [==============================] - 0s 38us/step - loss: 18.7510 - acc: 0.0563 - val_loss: 19.8380 - val_acc: 0.0000e+00\n", "Epoch 3/500\n", "160/160 [==============================] - 0s 38us/step - loss: 11.6125 - acc: 0.0938 - val_loss: -6.2836 - val_acc: 0.3333\n", "Epoch 4/500\n", "160/160 [==============================] - 0s 31us/step - loss: -0.5331 - acc: 0.2063 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 5/500\n", "160/160 [==============================] - 0s 44us/step - loss: -8.9086 - acc: 0.2750 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 6/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.0381 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 7/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.2040 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 8/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.4977 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 9/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.5465 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 10/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.7172 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 11/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.1750 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 12/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1499 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 13/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.2705 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 14/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.4722 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 15/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.5355 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 16/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7037 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 17/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.4223 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 18/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.4782 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 19/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0651 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 20/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7765 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 21/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2639 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 22/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 23/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.9294 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 24/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3088 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 25/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1222 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 26/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 27/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7823 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 28/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3416 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 29/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 30/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 31/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2898 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 32/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 33/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.7763 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 34/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2260 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 35/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2534 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 36/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 37/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 38/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 39/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3008 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 40/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1660 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 41/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2468 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 42/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2725 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 43/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 44/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 45/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3249 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 46/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0673 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 47/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 48/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3103 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 49/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 50/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 51/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0702 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 52/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 53/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 54/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 55/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 56/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 57/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1860 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 58/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 59/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2657 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 60/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 61/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2901 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 62/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 63/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 64/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 65/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 66/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 67/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 68/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 69/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2046 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 70/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 71/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 72/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 73/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1258 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 74/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 75/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0847 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 76/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 77/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 78/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 79/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 80/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 81/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 82/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0434 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 83/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 84/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 85/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 86/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 87/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 88/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 89/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 90/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1109 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 91/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 92/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2913 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 93/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 94/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9958 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 95/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 96/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 97/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 98/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1548 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 99/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3440 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 100/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9535 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 101/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 102/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 103/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 104/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 105/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 106/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 107/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 108/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1777 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 109/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 110/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 111/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 112/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 113/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3302 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 114/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 115/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 116/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 117/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 118/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 119/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 120/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 121/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 122/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 123/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 124/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 125/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2754 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 126/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 127/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 128/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 129/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 130/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 131/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 132/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 133/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 134/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 135/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 136/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 137/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 138/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 139/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 140/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 141/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 142/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 143/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 144/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 145/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 146/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 147/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 148/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 149/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 150/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 151/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 152/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 153/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 154/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 155/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 156/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 157/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 158/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 159/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 160/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 161/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3443 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 162/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 163/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 164/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 165/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 166/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 167/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 168/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 169/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 170/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2860 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 171/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 172/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 173/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 174/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2542 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 175/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 176/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 177/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 178/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 179/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 180/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3409 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 181/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 182/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 183/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 184/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 185/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 186/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 187/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 188/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2651 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 189/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 190/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 191/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 192/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 193/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 194/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 195/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 196/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 197/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 198/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 199/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 200/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 201/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 202/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 203/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 204/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 205/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 206/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 207/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 208/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 209/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 210/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 211/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 212/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 213/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 214/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 215/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 216/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 217/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 218/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 219/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 220/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 221/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 222/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 223/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 224/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 225/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 226/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 227/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3213 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 228/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 229/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 230/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 231/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 232/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 233/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 234/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 235/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 236/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 237/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 238/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 239/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 240/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 241/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 242/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 243/500\n", "160/160 [==============================] - 0s 63us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 244/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3204 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 245/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 246/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 247/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 248/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 249/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 250/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 251/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 252/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 253/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 254/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2478 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 255/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 256/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 257/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 258/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 259/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 260/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 261/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 262/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 263/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 264/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 265/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 266/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 267/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 268/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 269/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 270/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 271/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2969 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 272/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 273/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 274/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 275/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 276/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 277/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 278/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 279/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 280/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 281/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 282/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 283/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 284/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 285/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 286/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 287/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 288/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 289/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 290/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3158 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 291/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 292/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 293/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 294/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 295/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 296/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 297/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 298/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 299/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 300/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 301/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 302/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 303/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 304/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 305/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 306/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 307/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 308/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 309/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 310/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 311/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 312/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 313/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 314/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 315/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 316/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 317/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 318/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 319/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 320/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 321/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 322/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 323/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9326 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 324/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 325/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 326/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 327/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 328/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 329/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 330/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 331/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 332/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 333/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 334/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 335/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 336/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 337/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 338/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 339/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 340/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 341/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 342/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 343/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 344/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 345/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 346/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 347/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 348/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 349/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 350/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 351/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 352/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 353/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 354/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 355/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 356/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 357/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 358/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 359/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 360/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 361/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 362/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 363/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 364/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 365/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 366/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 367/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 368/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 369/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 370/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 371/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 372/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 373/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 374/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 375/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 376/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 377/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 378/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 379/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 380/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 381/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 382/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 383/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 384/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 385/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 386/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 387/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 388/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 389/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 390/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 391/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 392/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 393/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 394/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 395/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 396/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 397/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 398/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 399/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 400/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 401/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 402/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 403/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 404/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 405/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 406/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 407/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 408/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 409/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 410/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 411/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 412/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 413/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 414/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 415/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 416/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 417/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 418/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 419/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 420/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 421/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 422/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 423/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 424/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 425/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 426/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 427/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 428/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 429/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 430/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 431/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 432/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 433/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 434/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 435/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 436/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3377 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 437/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 438/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 439/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 440/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 441/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 442/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 443/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 444/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 445/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 446/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 447/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 448/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 449/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 450/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 451/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 452/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 453/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 454/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 455/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 456/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 457/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 458/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 459/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 460/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 461/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 462/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 463/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 464/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 465/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 466/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 467/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 468/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 469/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 470/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 471/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 472/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 473/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 474/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 475/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 476/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 477/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 478/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 479/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 480/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 481/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 482/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 483/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 484/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 485/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 486/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 487/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 488/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 489/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 490/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 491/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 492/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 493/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 494/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 495/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 496/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 497/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 498/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 499/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 500/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "part 0.665\n", "Train on 160 samples, validate on 18 samples\n", "Epoch 1/500\n", "160/160 [==============================] - 1s 3ms/step - loss: 23.4892 - acc: 0.0750 - val_loss: 13.2206 - val_acc: 0.0000e+00\n", "Epoch 2/500\n", "160/160 [==============================] - 0s 44us/step - loss: 0.3965 - acc: 0.1625 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 3/500\n", "160/160 [==============================] - 0s 38us/step - loss: -5.3782 - acc: 0.2250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 4/500\n", "160/160 [==============================] - 0s 38us/step - loss: -11.8080 - acc: 0.2875 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 5/500\n", "160/160 [==============================] - 0s 38us/step - loss: -12.7663 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 6/500\n", "160/160 [==============================] - 0s 31us/step - loss: -13.1360 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 7/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.2521 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 8/500\n", "160/160 [==============================] - 0s 44us/step - loss: -13.4396 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 9/500\n", "160/160 [==============================] - 0s 44us/step - loss: -12.1816 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 10/500\n", "160/160 [==============================] - 0s 44us/step - loss: -13.7901 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 11/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.3461 - acc: 0.3062 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 12/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.2828 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 13/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.1755 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 14/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.8554 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 15/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0016 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 16/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2194 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 17/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.4928 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 18/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7775 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 19/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0924 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 20/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 21/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 22/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6479 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 23/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1606 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 24/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 25/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1264 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 26/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 27/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 28/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 29/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1181 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 30/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0855 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 31/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 32/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 33/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3071 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 34/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 35/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 36/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 37/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3206 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 38/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 39/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 40/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 41/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2470 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 42/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1780 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 43/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2419 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 44/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 45/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2419 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 46/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 47/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0266 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 48/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 49/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1433 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 50/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2420 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 51/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1434 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 52/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 53/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 54/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1435 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 55/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1435 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 56/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0412 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 57/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2384 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 58/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3375 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 59/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2423 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 60/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1400 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 61/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0319 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 62/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1333 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 63/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 64/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0420 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 65/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.7359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 66/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 67/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2389 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 68/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1371 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 69/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2427 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 70/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1446 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 71/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2391 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 72/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2428 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 73/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3409 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 74/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2356 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 75/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2429 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 76/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7419 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 77/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3410 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 78/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 79/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2395 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 80/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 81/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2387 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 82/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 83/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3410 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 84/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9425 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 85/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 86/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 87/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 88/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2433 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 89/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 90/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2398 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 91/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2433 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 92/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 93/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9467 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 94/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 95/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3411 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 96/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7482 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 97/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2435 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 98/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 99/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2436 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 100/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2436 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 101/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2369 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 102/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2436 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 103/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8470 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 104/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 105/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 106/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1464 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 107/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0426 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 108/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8448 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 109/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1435 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 110/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2409 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 111/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1470 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 112/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3381 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 113/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 114/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 115/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 116/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 117/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 118/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0473 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 119/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 120/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 121/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 122/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 123/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0444 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 124/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 125/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 126/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 127/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 128/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2446 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 129/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2415 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 130/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8451 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 131/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1480 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 132/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2448 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 133/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2448 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 134/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 135/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1453 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 136/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2420 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 137/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1484 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 138/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2391 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 139/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1412 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 140/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 141/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9499 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 142/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3416 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 143/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2452 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 144/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1489 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 145/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 146/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 147/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3417 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 148/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1491 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 149/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3417 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 150/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 151/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1464 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 152/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3417 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 153/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3389 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 154/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9513 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 155/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 156/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1467 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 157/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.9547 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 158/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2457 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 159/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9522 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 160/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 161/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7610 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 162/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8572 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 163/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1504 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 164/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 165/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 166/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 167/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9569 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 168/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9571 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 169/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1509 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 170/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 171/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2466 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 172/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 173/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 174/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 175/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1487 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 176/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3395 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 177/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 178/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2467 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 179/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1514 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 180/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1465 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 181/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2443 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 182/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 183/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0541 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 184/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3421 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 185/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 186/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2470 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 187/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3421 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 188/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3421 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 189/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2446 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 190/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1521 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 191/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 192/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3421 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 193/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1522 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 194/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2472 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 195/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 196/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 197/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 198/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3422 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 199/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2473 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 200/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3422 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 201/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2473 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 202/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1525 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 203/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 204/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2450 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 205/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1503 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 206/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9610 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 207/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1529 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 208/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1531 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 209/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 210/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3423 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 211/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 212/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0566 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 213/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2456 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 214/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 215/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3423 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 216/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1513 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 217/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3423 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 218/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1537 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 219/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0573 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 220/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1539 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 221/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 222/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3424 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 223/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1520 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 224/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2440 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 225/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 226/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 227/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 228/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 229/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3424 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 230/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 231/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3424 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 232/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 233/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2463 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 234/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0543 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 235/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3424 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 236/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 237/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1527 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 238/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1548 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 239/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2487 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 240/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0552 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 241/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 242/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 243/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9641 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 244/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1555 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 245/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2491 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 246/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1557 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 247/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1500 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 248/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 249/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7753 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 250/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2476 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 251/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1546 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 252/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1565 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 253/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 254/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0602 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 255/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2499 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 256/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2499 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 257/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1554 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 258/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 259/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9685 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 260/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2502 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 261/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 262/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2468 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 263/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2503 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 264/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9696 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 265/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1547 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 266/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2489 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 267/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2507 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 268/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3429 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 269/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2507 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 270/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2491 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 271/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 272/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 273/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3429 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 274/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2493 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 275/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 276/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3413 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 277/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1574 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 278/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2494 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 279/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 280/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9722 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 281/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 282/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2512 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 283/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 284/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 285/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 286/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1566 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 287/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2514 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 288/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 289/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2515 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 290/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 291/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1585 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 292/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 293/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1602 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 294/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1603 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 295/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0661 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 296/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0664 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 297/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8844 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 298/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0685 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 299/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.9754 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 300/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3418 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 301/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9791 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 302/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0700 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 303/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 304/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2528 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 305/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9779 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 306/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0712 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 307/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3421 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 308/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 309/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0718 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 310/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 311/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0722 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 312/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2522 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 313/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1625 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 314/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3434 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 315/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2536 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 316/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1640 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 317/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3434 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 318/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 319/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 320/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 321/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3434 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 322/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 323/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1643 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 324/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0737 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 325/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 326/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 327/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2540 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 328/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2541 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 329/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 330/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1648 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 331/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1638 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 332/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3424 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 333/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9847 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 334/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 335/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 336/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1634 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 337/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2546 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 338/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 339/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2526 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 340/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1659 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 341/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0762 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 342/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3426 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 343/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3436 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 344/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3436 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 345/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0759 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 346/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3436 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 347/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 348/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0763 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 349/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1650 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 350/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.9894 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 351/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1663 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 352/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9004 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 353/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 354/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9914 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 355/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2560 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 356/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 357/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2552 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 358/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 359/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 360/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1687 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 361/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1671 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 362/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0799 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 363/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0810 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 364/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0813 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 365/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 366/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 367/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3438 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 368/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9068 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 369/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1700 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 370/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2563 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 371/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 372/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2572 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 373/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 374/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 375/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 376/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2573 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 377/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0837 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 378/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 379/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 380/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8244 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 381/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.9983 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 382/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0836 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 383/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 384/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 385/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 386/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2568 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 387/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 388/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2582 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 389/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2582 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 390/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3439 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 391/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2583 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 392/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2583 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 393/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2584 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 394/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0007 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 395/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 396/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3433 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 397/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2586 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 398/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2587 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 399/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1729 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 400/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3440 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 401/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 402/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 403/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 404/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1738 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 405/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2589 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 406/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3440 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 407/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3440 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 408/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1735 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 409/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1725 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 410/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2592 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 411/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 412/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3440 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 413/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1746 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 414/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2594 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 415/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1748 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 416/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2590 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 417/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2596 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 418/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 419/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3440 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 420/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8367 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 421/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1752 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 422/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 423/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1761 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 424/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2601 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 425/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3441 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 426/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 427/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 428/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1765 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 429/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0083 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 430/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3441 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 431/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0925 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 432/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 433/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1769 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 434/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1771 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 435/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0937 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 436/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0944 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 437/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0948 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 438/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0118 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 439/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1787 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 440/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3442 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 441/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3434 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 442/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2614 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 443/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1791 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 444/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 445/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2616 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 446/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2620 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 447/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1799 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 448/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3442 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 449/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1798 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 450/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3442 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 451/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2623 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 452/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 453/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9345 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 454/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1804 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 455/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1810 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 456/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2628 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 457/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1815 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 458/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 459/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3442 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 460/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 461/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2627 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 462/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 463/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1820 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 464/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2632 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 465/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1816 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 466/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2633 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 467/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2628 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 468/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1013 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 469/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 470/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0213 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 471/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1832 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 472/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 473/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1837 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 474/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1837 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 475/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1839 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 476/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2643 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 477/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 478/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2641 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 479/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1846 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 480/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3443 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 481/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3443 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 482/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1849 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 483/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1052 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 484/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9458 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 485/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1852 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 486/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2648 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 487/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 488/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2653 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 489/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 490/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3441 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 491/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1072 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 492/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 493/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1866 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 494/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 495/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1081 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 496/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7940 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 497/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 498/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2659 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 499/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 500/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "part 1.0\n", "Train on 160 samples, validate on 18 samples\n", "Epoch 1/500\n", "160/160 [==============================] - 1s 4ms/step - loss: 2.3241 - acc: 0.1687 - val_loss: -10.8308 - val_acc: 0.3333\n", "Epoch 2/500\n", "160/160 [==============================] - 0s 38us/step - loss: -6.3352 - acc: 0.2938 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 3/500\n", "160/160 [==============================] - 0s 44us/step - loss: -11.6561 - acc: 0.3000 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 4/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.4265 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 5/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.4993 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 6/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6396 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 7/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0150 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 8/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2526 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 9/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2996 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 10/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2448 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 11/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 12/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3215 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 13/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 14/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2547 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 15/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 16/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 17/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2647 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 18/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2923 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 19/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 20/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 21/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 22/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 23/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 24/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 25/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3345 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 26/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2801 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 27/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 28/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 29/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1703 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 30/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 31/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2621 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 32/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 33/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 34/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2943 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 35/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 36/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 37/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 38/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 39/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 40/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1867 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 41/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 42/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2840 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 43/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 44/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 45/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 46/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2716 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 47/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 48/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2140 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 49/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 50/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 51/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 52/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 53/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 54/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2644 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 55/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 56/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 57/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 58/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 59/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 60/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 61/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 62/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 63/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 64/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2531 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 65/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 66/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 67/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 68/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 69/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 70/500\n", "160/160 [==============================] - 0s 63us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 71/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 72/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1356 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 73/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 74/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 75/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 76/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2535 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 77/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 78/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 79/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 80/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 81/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 82/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 83/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 84/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 85/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 86/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 87/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3333 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 88/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 89/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 90/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 91/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 92/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 93/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 94/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 95/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 96/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 97/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 98/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 99/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 100/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 101/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 102/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 103/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 104/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 105/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 106/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 107/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 108/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3201 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 109/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 110/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 111/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 112/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 113/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3302 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 114/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 115/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 116/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 117/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 118/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 119/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 120/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 121/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 122/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 123/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 124/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 125/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2687 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 126/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 127/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 128/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 129/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 130/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 131/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 132/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 133/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 134/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2723 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 135/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 136/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 137/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 138/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 139/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 140/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 141/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 142/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 143/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 144/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 145/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 146/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 147/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 148/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 149/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 150/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 151/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 152/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 153/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 154/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 155/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 156/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 157/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 158/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 159/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 160/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 161/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 162/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 163/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 164/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 165/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 166/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 167/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 168/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 169/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 170/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2973 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 171/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 172/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 173/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 174/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 175/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 176/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 177/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 178/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 179/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 180/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3005 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 181/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 182/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 183/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 184/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 185/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 186/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 187/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 188/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2991 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 189/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 190/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 191/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 192/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 193/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 194/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 195/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 196/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 197/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 198/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0214 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 199/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 200/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 201/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 202/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 203/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 204/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 205/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 206/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 207/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 208/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3435 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 209/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 210/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 211/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 212/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 213/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 214/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 215/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2135 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 216/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 217/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 218/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 219/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 220/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 221/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 222/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 223/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 224/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 225/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 226/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 227/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 228/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 229/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 230/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 231/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 232/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 233/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3023 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 234/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 235/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 236/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 237/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 238/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 239/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 240/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 241/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 242/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 243/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 244/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 245/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 246/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 247/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 248/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 249/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 250/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 251/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 252/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 253/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 254/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 255/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 256/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 257/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 258/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 259/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 260/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 261/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 262/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 263/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 264/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 265/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 266/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 267/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 268/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3345 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 269/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 270/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 271/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 272/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 273/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 274/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 275/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 276/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 277/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 278/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 279/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 280/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 281/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 282/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 283/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 284/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 285/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 286/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 287/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 288/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 289/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 290/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 291/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 292/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 293/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 294/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 295/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 296/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 297/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 298/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.1422 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 299/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 300/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 301/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 302/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 303/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 304/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 305/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 306/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 307/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 308/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 309/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 310/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 311/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 312/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 313/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 314/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 315/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 316/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 317/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 318/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 319/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 320/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 321/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.1726 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 322/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 323/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 324/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 325/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 326/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 327/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 328/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 329/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 330/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 331/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 332/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 333/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 334/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 335/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 336/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 337/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 338/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 339/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 340/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 341/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 342/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 343/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 344/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 345/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2133 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 346/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 347/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 348/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 349/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 350/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 351/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 352/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 353/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 354/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 355/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 356/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 357/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 358/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 359/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 360/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 361/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 362/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 363/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 364/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 365/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 366/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 367/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 368/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 369/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 370/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 371/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 372/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 373/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 374/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 375/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 376/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 377/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 378/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 379/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 380/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 381/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 382/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 383/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 384/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 385/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 386/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 387/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2193 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 388/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1255 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 389/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 390/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 391/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 392/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 393/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 394/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 395/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 396/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 397/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 398/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 399/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 400/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 401/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3016 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 402/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 403/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 404/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 405/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 406/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3333 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 407/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 408/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1874 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 409/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 410/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 411/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 412/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 413/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 414/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 415/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 416/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 417/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 418/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 419/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 420/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 421/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 422/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 423/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 424/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 425/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 426/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 427/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 428/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 429/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 430/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 431/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 432/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 433/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 434/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 435/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 436/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 437/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 438/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 439/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 440/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 441/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 442/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 443/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 444/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 445/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 446/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 447/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 448/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 449/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 450/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 451/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 452/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 453/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 454/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 455/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 456/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 457/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 458/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 459/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 460/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 461/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 462/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 463/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 464/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 465/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 466/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 467/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 468/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 469/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 470/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 471/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 472/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 473/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 474/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 475/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 476/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 477/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 478/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 479/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 480/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 481/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 482/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 483/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 484/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 485/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 486/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 487/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 488/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 489/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 490/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 491/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2401 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 492/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 493/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 494/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 495/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 496/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 497/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 498/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 499/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 500/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n" ] } ], "source": [ "hist_list = []\n", "\n", "for part in np.linspace(0.33,1,num=3) :\n", " print('part %s' % part)\n", " element_cnt = int(x_train.shape[0] * part)\n", " x_part_train = x_train[ :element_cnt]\n", " y_part_train = y_train[ :element_cnt]\n", " model = Sequential()\n", " hist = neural_net (model)\n", " hist_list.append(hist.history)" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmoAAAFNCAYAAACwk0NsAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3XmUpXV95/H3p6qaRmkUcCkRiY1bIjqK2kM0Jlq4xWgSl2jUqHGMJ20SPdEczYya5GRxTDJzEs1JdBzb0SMxKhqXyLjEIFK4xA2QKIgLg6As0kFAKY1NL9/54z7VfWkb+j7Qz2LX+3VOWfc+91Y936rvsfj07/f8fk+qCkmSJI3P3NAFSJIkad8MapIkSSNlUJMkSRopg5okSdJIGdQkSZJGyqAmSZI0UgY1SZKkkTKoSTqoJHlLkv8+43svTvKoDmqoJPfYz3tmrlPS2mVQkyRJGimDmiRJ0kgZ1CT1rply/P0kX0zy/SRvSrKY5MNJrkvy0SRHTr3/l5Ocn+TaJMtJ7j312gOSnNN83TuBQ/c61y8mObf52n9Ncr8Z6ntwkm8nmZ869qQkX2wen5jk0833vCLJa5Mccgt/J7+Z5MIkVyc5Ncmdm+NJ8pokW5N8t/md3bd57XFJvtz87JcleektqUHS+BjUJA3lV4BHA/cCfgn4MPAK4PZM/jb9LkCSewHvAF4M3AH4EPB/kxzShKN/At4KHAX8Y/N9ab72gcCbgecDtwPeAJyaZP1NFVZVnwG+Dzxi6vCvAW9vHu8Efq+p9SHAI4HfuRm/g9U6HwH8BfCrwNHAJcApzcuPAR7G5Pd0BPA04DvNa28Cnl9VhwP3BT52c2uQNE4GNUlD+buqurKqLgM+AXy2qr5QVduA9wEPaN73NOCDVXVaVW0H/gq4FfAzwIOBdcDfVNX2qno38Pmpc/wm8Iaq+mxV7ayqk4FtzdftzzuAZwAkORx4XHOMqjq7qj5TVTuq6mImAfDhN/9XwTOBN1fVOc3P/3LgIUk2AtuBw4GfAlJVF1TVFc3XbQeOT3Kbqrqmqs65BTVIGiGDmqShXDn1+D/28XxD8/jOTEaYAKiqXcC3gGOa1y6rqpr62kumHt8VeEkzRXltkmuBY5uv25+3A09uRt+eDJxTVZfAZJQvyQea6dHvAX/OZHTt5tr7Z1xhMmp2TFV9DHgt8DrgyiRbktymeeuvMAmQlyQ5M8lDbkENkkbIoCZp7C5nEriAyTVbTMLWZcAVwDHNsVU/MfX4W8CrquqIqY9bV9U79nfSqvoyk/D0C9xw2hPg9cBXgHtW1W2YTNnmR77J7Pb+GQ9jMlV7WVPL31bVg4D7MJkC/f3m+Oer6gnAHZlMAb/rFtQgaYQMapLG7l3A45M8Msk64CVMpi//Ffg0sAP43SQLSZ4MnDj1tW8EfivJTzcX5R+W5PHNVOYs3s7kWrmHMbn+bdXhwPeAlSQ/Bfz2LfkBm/M8N8kJzQjenzOZCr44yX9u6l/H5Lq5HwI7m2v0npnkts2U8PeYXDsn6SBiUJM0alX1VeBZwN8BVzFZePBLVXV9VV3PZFryvwDXMLme7b1TX3sWk+vUXtu8fmHz3lm9A1gCPlZVV00dfymTUbbrmITBd7b/yfaoqtOBPwLew2SU8O7A05uXb9Oc4xomI3zfYXKdHsCzgYub6dffYvJ7knQQyQ0v7ZAkSdJYOKImSZI0UgY1SepIs0nvyj4+njl0bZJ+PDj1KUmSNFKOqEmSJI3UwtAFHCi3v/3ta+PGjZ2e4/vf/z6HHXZYp+dQe/ZlnOzL+NiTcbIv49R1X84+++yrquoO+3vfQRPUNm7cyFlnndXpOZaXl1laWur0HGrPvoyTfRkfezJO9mWcuu5Lkkv2/y6nPiVJkkbLoCZJkjRSBjVJkqSRMqhJkiSNlEFNkiRppAxqkiRJI2VQkyRJGimDmiRJ0kgZ1CRJkkbKoDajy6/9D5a/tZ2rVrYNXYokSVojDGoz+tqV1/GW86/nku/8YOhSJEnSGmFQm9G6+cmvasfOXQNXIkmS1gqD2owW5gLAjl01cCWSJGmtMKjNaN3C5Fd1vSNqkiSpJwa1Ga2bW536dERNkiT1w6A2o4X5ZurTETVJktQTg9qMVhcTOPUpSZL6YlCb0brdI2pOfUqSpH4Y1Ga0sLo9xy5H1CRJUj8MajNaHVG73hE1SZLUE4PajPas+nRETZIk9cOgNqMFr1GTJEk9M6jNaHXV53avUZMkST0ZNKglOTbJGUkuSHJ+khc1x/8kyWVJzm0+HjdknTAV1HY4oiZJkvqxMPD5dwAvqapzkhwOnJ3ktOa111TVXw1Y2w3Mz4Xgqk9JktSfQYNaVV0BXNE8vi7JBcAxQ9Z0U+bnYLvXqEmSpJ6M5hq1JBuBBwCfbQ69MMkXk7w5yZGDFTZlIbDdVZ+SJKknqRp+hCjJBuBM4FVV9d4ki8BVQAGvBI6uqt/Yx9dtBjYDLC4uPuiUU07ptM7f+egKD7nzOp59/PpOz6N2VlZW2LBhw9BlaC/2ZXzsyTjZl3Hqui8nnXTS2VW1aX/vG/oaNZKsA94DvK2q3gtQVVdOvf5G4AP7+tqq2gJsAdi0aVMtLS11WuvCxz7E4tF3ZmnpP3V6HrWzvLxM171Xe/ZlfOzJONmXcRpLX4Ze9RngTcAFVfXqqeNHT73tScB5fde2LwtzsH2HU5+SJKkfQ4+oPRR4NvClJOc2x14BPCPJCUymPi8Gnj9MeTc0H9ixa/ipYkmStDYMverzk0D28dKH+q5lFpNVn46oSZKkfoxm1eePA1d9SpKkPhnUWpifi/f6lCRJvTGotTAf2O41apIkqScGtRZc9SlJkvpkUGthsurToCZJkvphUGthPvFen5IkqTcGtRbcnkOSJPXJoNbCwhyu+pQkSb0xqLUwWfXpiJokSeqHQa0Fpz4lSVKfDGotLMQNbyVJUn8Mai3MB1d9SpKk3hjUWpifcx81SZLUH4NaCwvxzgSSJKk/BrUW5ufivT4lSVJvDGotzM/BDld9SpKknhjUWlgI7CrY6aiaJEnqgUGthflMPruXmiRJ6oNBrYX5uUlS2+GImiRJ6oFBrYWF1RE1V35KkqQeGNRamG9+W97vU5Ik9cGg1sJqUPM2UpIkqQ8GtRYWXEwgSZJ6NGhQS3JskjOSXJDk/CQvao4fleS0JF9vPh85ZJ2r5jNJat7vU5Ik9WHoEbUdwEuq6t7Ag4EXJDkeeBlwelXdEzi9eT643VOfXqMmSZJ6MGhQq6orquqc5vF1wAXAMcATgJObt50MPHGYCm9oYXUxwQ5H1CRJUvdSNY7QkWQj8HHgvsA3q+qIqdeuqaofmf5MshnYDLC4uPigU045pdMaP/vNFV7/5fCHDz6Uexwx3+m5NLuVlRU2bNgwdBnai30ZH3syTvZlnLruy0knnXR2VW3a3/sWOqughSQbgPcAL66q76W5Fmx/qmoLsAVg06ZNtbS01FmNAOe/+3Tgh9zv/g/gxOOO6vRcmt3y8jJd917t2ZfxsSfjZF/GaSx9GfoaNZKsYxLS3lZV720OX5nk6Ob1o4GtQ9U3bTU/jmUUUpIkHdyGXvUZ4E3ABVX16qmXTgWe0zx+DvD+vmvbl9VxPu8gJUmS+jD01OdDgWcDX0pybnPsFcBfAu9K8jzgm8BTB6rvBhxRkyRJfRo0qFXVJ9kzULW3R/ZZyyxWCzWmSZKkPgx+jdqPk9URtV2OqEmSpB4Y1FrYPaJmTpMkST0wqLWwZzGBSU2SJHXPoNbC7sUEw5YhSZLWCINaC676lCRJfTKotbB76tN7skuSpB4Y1Fpwew5JktQng1oLbs8hSZL6ZFBrwe05JElSnwxqLaQZUnMxgSRJ6oNBrQWvUZMkSX0yqLXgNWqSJKlPBrUW9tyZYNAyJEnSGmFQa2HPYgKTmiRJ6p5BrYU9dyYYtg5JkrQ2GNRa2LOYwKQmSZK6Z1BrYfdiAm8hJUmSemBQa8HtOSRJUp8Mai24PYckSeqTQa0FV31KkqQ+GdRacNWnJEnqk0GtBTe8lSRJfRo0qCV5c5KtSc6bOvYnSS5Lcm7z8bgha5zm9hySJKlPQ4+ovQV47D6Ov6aqTmg+PtRzTTdu92KCYcuQJElrw6BBrao+Dlw9ZA1tzDVJzcUEkiSpD0OPqN2YFyb5YjM1euTQxaxyMYEkSepThh4dSrIR+EBV3bd5vghcxWRf2VcCR1fVb9zI124GNgMsLi4+6JRTTum01m9fs8LLPhue+VOH8OiN6zo9l2a3srLChg0bhi5De7Ev42NPxsm+jFPXfTnppJPOrqpN+3vfQmcV3ExVdeXq4yRvBD5wE+/dAmwB2LRpUy0tLXVa2wdPOwP4AXe/xz1Y+tnjOj2XZre8vEzXvVd79mV87Mk42ZdxGktfRjf1meToqadPAs67sfcOxTsTSJKkPgw6opbkHcAScPsklwJ/DCwlOYHJ1OfFwPMHK3Avc9n/eyRJkg6UQYNaVT1jH4ff1HshM9qz4a0japIkqXujm/ocM+9MIEmS+mRQa8HtOSRJUp8MajeDU5+SJKkPBrUWXEwgSZL6ZFC7GXZ5kZokSeqBQW1WV13IPb7xVo7mOxjTJElSHwxqs7rmYjZ+6z3cKVd7jZokSeqFQW1WzZLPUG7PIUmSemFQm1UT1Oazy/05JElSLwxqs8rkVzWHG95KkqR+GNRm1QS1+RTlcgJJktQDg9rMJlOfc/EaNUmS1A+D2qxWpz7jJWqSJKkfBrVZrU59UpRJTZIk9cCgNqtm1ecc5T5qkiSpFwa1WU0vJjCnSZKkHhjUZjV1jZqLCSRJUh8MajPbs+rT7TkkSVIfDGqzmrpGzalPSZLUB4ParKauUXMxgSRJ6oNBbVa77/XpPmqSJKkfBrVZ7b7XpyNqkiSpHzMFtSRPTXJ48/gPk7w3yQO7LW1kpoKaMU2SJPVh1hG1P6qq65L8LPDzwMnA6w9EAUnenGRrkvOmjh2V5LQkX28+H3kgznXLTK36dERNkiT1YNagtrP5/Hjg9VX1fuCQA1TDW4DH7nXsZcDpVXVP4PTm+bCmR9TMaZIkqQezBrXLkrwB+FXgQ0nWt/jam1RVHweu3uvwE5iM2tF8fuKBONctsnvVJ16jJkmSejFr2PpV4CPAY6vqWuAo4Pc7qwoWq+oKgObzHTs812x276O2yzsTSJKkXizM+L6jgQ9W1bYkS8D9gL/vrKoZJdkMbAZYXFxkeXm5s3Pd6geX89PAzh3bueKKb7O8fE1n51I7KysrnfZeN499GR97Mk72ZZzG0pdZg9p7gE1J7gG8CTgVeDvwuI7qujLJ0VV1RZKjga37elNVbQG2AGzatKmWlpY6Kge4+iL4HKxft8Di4iJLSyd0dy61sry8TKe9181iX8bHnoyTfRmnsfRl1qnPXVW1A3gy8DdV9XtMRtm6cirwnObxc4D3d3iuGa1ueOv2HJIkqR+zBrXtSZ4B/DrwgebYugNRQJJ3AJ8GfjLJpUmeB/wl8OgkXwce3TwfVrOYIG54K0mSejLr1Odzgd8CXlVV30hyHPAPB6KAqnrGjbz0yAPx/Q+YqVWf5jRJktSHmUbUqurLwEuBLyW5L3BpVQ0/ytWnG6z6NKlJkqTuzTSi1qz0PBm4mMnFWscmeU6zB9rasHvq0xE1SZLUj1mnPv8aeExVfRUgyb2AdwAP6qqw0dk99Vne7VOSJPVi1sUE61ZDGkBVfY0DtJjgx8fU1OeugUuRJElrwqwjamcleRPw1ub5M4GzuylppFbv9RkcUZMkSb2YNaj9NvAC4HeZDC19HPhfXRU1SjfYnmPgWiRJ0powU1Crqm3Aq5uPtalZ9TlPuZhAkiT14iaDWpIvwY3P81XV/Q54RWO1uj1HijKpSZKkHuxvRO0Xe6nix8HqNWremUCSJPXkJoNaVV0yyzdJ8umqesiBKWmsVld9upRAkiT1Y9btOfbn0AP0fcZrdTFBcDGBJEnqxYEKagd/dNk99bnLa9QkSVIvDlRQO/hlaurTnCZJknpwoIJaDtD3GS83vJUkST07UEHt2Qfo+4zX9KpPbyElSZJ6sL991K5j39efBaiqug2TB+d1UNvITN3r07lPSZLUg/1tz3F4X4WM3tSqT2OaJEnqw6z3+gQgyR2Z2oqjqr55wCsaq9XFBOWdCSRJUj9mukYtyS8n+TrwDeBM4GLgwx3WNT67byG1y1WfkiSpF7MuJngl8GDga1V1HPBI4FOdVTVSxRxz4DVqkiSpF7MGte1V9R1gLslcVZ0BnNBhXaNUCaG8M4EkSerFrNeoXZtkA/AJ4G1JtgI7uitrrMJ83EVNkiT1Y9ag9nHgCOBFwLOA2wJ/1lVRq5JcDFwH7AR2VNWmrs+5n4oILiaQJEn9mDWoBfgIcDVwCvDOZiq0DydV1VU9nesmVeItpCRJUm9mukatqv60qu4DvAC4M3Bmko92WtkoTYKaiwkkSVIf2t5CaivwbeA7wB0PfDk/ooB/SXJ2ks09nO+mi8nq1OfQlUiSpLVgpqnPJL8NPA24A/Bu4Der6stdFtZ4aFVd3my0e1qSr1TVx6fq2gxsBlhcXGR5ebnbYgjbfvgDrquVzs+l2a2s2I8xsi/jY0/Gyb6M01j6Mus1ancFXlxV53ZZzN6q6vLm89Yk7wNOZLKwYfX1LcAWgE2bNtXS0lKn9ez4xBy3vtV6bs1hLC09rNNzaXbLy8t03Xu1Z1/Gx56Mk30Zp7H0ZdZr1F7Wd0hLcliSw1cfA48BBr35+2QxAbhBhyRJ6kOre332bBF4Xya3bloA3l5V/zxsSSHscsNbSZLUi9EGtaq6CLj/0HVMq8w123OY1CRJUvfarvpc8wKu+pQkSb0wqLWwe0Rt6EIkSdKaYFBrJcyxyw1vJUlSLwxqrYSAQU2SJPXCoNZCJSS7vEZNkiT1wqDWyhxz5S2kJElSPwxqLVRo7vVpUpMkSd0zqLUyRyg3vJUkSb0wqLUwuYVUuUGHJEnqhUGtlZA4oiZJkvphUGsl3kJKkiT1xqDWQmVyU3ZzmiRJ6oNBrZWQcsNbSZLUD4NaC5N7fe5yKYEkSeqFQa2VTLbncDWBJEnqgUGtlUlQM6ZJkqQ+GNRa2L2PmklNkiT1wKDWymTVp4sJJElSHwxqLUy258ARNUmS1AuDWiuOqEmSpP4Y1FqozLmYQJIk9cag1lK8hZQkSeqJQa2VOebwpuySJKkfow1qSR6b5KtJLkzysqHrgWYxQe1yRE2SJPVilEEtyTzwOuAXgOOBZyQ5ftiqYLKYAEfUJElSL0YZ1IATgQur6qKquh44BXjCwDU123Psmjx2VE2SJHVsrEHtGOBbU88vbY4NbLLqE9xLTZIkdW9h6AJuRPZx7EeiUZLNwGaAxcVFlpeXOy3qPjt3sm3bNgCWz1xmLvsqU31bWVnpvPdqz76Mjz0ZJ/syTmPpy1iD2qXAsVPP7wJcvvebqmoLsAVg06ZNtbS01GlRV//bOm6Vefge/NzDHs66+bEOSK4ty8vLdN17tWdfxseejJN9Gaex9GWsSePzwD2THJfkEODpwKkD1wRMVn0C3p1AkiR1bpQjalW1I8kLgY8A88Cbq+r8gctqFhN4jZokSerHKIMaQFV9CPjQ0HXckEFNkiT1Z6xTn6O0eq9PwDt+SpKkzhnUWkozlOamt5IkqWsGtVbm3PBWkiT1xqDWQk3tm+aImiRJ6ppBrZU923M4oiZJkrpmUGvhhvf6HLgYSZJ00DOotZLd97Zyw1tJktQ1g1orUyNqA1ciSZIOfga1FipzU9tzGNUkSVK3DGqthN1jaeY0SZLUMYNaC9OLCdyeQ5Ikdc2g1kqc+pQkSb0xqLUwGVFbvdenJElStwxqrQSaDW93OfcpSZI6ZlBrZc+ImiRJUtcMai1U5nYHNa9RkyRJXTOotbLnXp/OfEqSpK4Z1Fqo7NlHzZuyS5KkrhnUWpnenmPgUiRJ0kHPoNbKng1v3aBDkiR1zaDWQiXgiJokSeqJQa2VPas+vURNkiR1zaDWQoU9G96a1CRJUsdGF9SS/EmSy5Kc23w8buia9pibuim7QU2SJHVrYegCbsRrquqvhi5ib5Nr1JrH5jRJktSx0Y2ojdueVZ8GNUmS1LWxjqi9MMmvA2cBL6mqa/b1piSbgc0Ai4uLLC8vd1rUXbbvoHbtBOCss8/iOxfOd3o+zWZlZaXz3qs9+zI+9mSc7Ms4jaUvgwS1JB8F7rSPl/4AeD3wSiaTjK8E/hr4jX19n6raAmwB2LRpUy0tLXVR7m6XXPRW5jJ5/IAHPogTjj2i0/NpNsvLy3Tde7VnX8bHnoyTfRmnsfRlkKBWVY+a5X1J3gh8oONy2ilvISVJkvoxumvUkhw99fRJwHlD1bK3yvSqz4GLkSRJB70xXqP2P5OcwGTq82Lg+cOWMy2OqEmSpN6MLqhV1bOHruHGpbkzQXmnT0mS1LnRTX2OWWWykiAUu5z7lCRJHTOotTL5dQUcUZMkSZ0zqLVQzdYcc+xipyNqkiSpYwa1Vuaa/y2u37lr4FokSdLBzqDWwvQ1atu2G9QkSVK3DGqtTAW1HTsHrkWSJB3sDGqtTILaHMW2HY6oSZKkbhnUWqisrvo0qEmSpO4Z1G6GOYpt2536lCRJ3TKotbBnRG2Xqz4lSVLnDGqtZPf/uupTkiR1zaDWyiSoHbqA16hJkqTOGdRaWN1H7dCFuD2HJEnqnEGtlUlQWz8/54iaJEnqnEGthT0jal6jJkmSumdQa2Xy6zp03qlPSZLUPYNaC6sjaocshOud+pQkSR0zqLXSXKO2EK9RkyRJnTOotdJcozaPU5+SJKlzBrUWVqc+HVGTJEl9MKi1MrU9h6s+JUlSxwxqLewZUXPqU5IkdW+woJbkqUnOT7Iryaa9Xnt5kguTfDXJzw9V44+a/LrWzzn1KUmSurcw4LnPA54MvGH6YJLjgacD9wHuDHw0yb2qajRDWOu916ckSerBYCNqVXVBVX11Hy89ATilqrZV1TeAC4ET+61u3yrNiNq8+6hJkqTujfEatWOAb009v7Q5NgLNhrfemUCSJPWg06nPJB8F7rSPl/6gqt5/Y1+2j2N1I99/M7AZYHFxkeXl5ZtT5sw2bNsGwFVbL2fb9jtzxhlnkOyrXPVpZWWl896rPfsyPvZknOzLOI2lL50Gtap61M34skuBY6ee3wW4/Ea+/xZgC8CmTZtqaWnpZpxuduf946cBOO7YY6hvFg/9uYdzyMIYByXXluXlZbruvdqzL+NjT8bJvozTWPoyxpRxKvD0JOuTHAfcE/jcwDUBe65RO2R+Morm9KckSerSkNtzPCnJpcBDgA8m+QhAVZ0PvAv4MvDPwAvGtOIT4JD5yWdXfkqSpC4NuerzfVV1l6paX1WLVfXzU6+9qqruXlU/WVUfHqrGaV+5+iu89oef5Kr5OQ5pfmsGNUmS1KUxTn2O0rad2/jXnRdzzvr1u6c+3aJDkiR1yaA2o+OPOp5DmOcLh66fmvoc1YysJEk6yBjUZrRufh13n78D50wHNW/MLkmSOjTkLaR+7Pzk/J049ZAr+NoV/8SDDofTPvUpzj5s/dBlrXlb/30rF1135tBlaC/2ZXzsyTjZl/H5qZ84EbjV0GUABrVWfnL93dl1/bn8xQ8+BXeBr20Dtg1dlZgHrh66CP0I+zI+9mSc7Mvo/PJV5/Lo4140dBmAQa2VY458OO9/4BI//OG1bL3uh676HImLLrqIu93tbkOXob3Yl/GxJ+NkX8Zn453uzRWXfHfoMgCDWitJuNvGJQCOH7YUTVnetszSQ5aGLkN7sS/jY0/Gyb6M0xWXLA9dAuBiAkmSpNEyqEmSJI2UQU2SJGmkDGqSJEkjZVCTJEkaKYOaJEnSSBnUJEmSRsqgJkmSNFIGNUmSpJEyqEmSJI1UqmroGg6IJP8OXNLxaW4PXNXxOdSefRkn+zI+9mSc7Ms4dd2Xu1bVHfb3poMmqPUhyVlVtWnoOnRD9mWc7Mv42JNxsi/jNJa+OPUpSZI0UgY1SZKkkTKotbNl6AK0T/ZlnOzL+NiTcbIv4zSKvniNmiRJ0kg5oiZJkjRSBrUZJXlskq8muTDJy4auZy1J8uYkW5OcN3XsqCSnJfl68/nI5niS/G3Tpy8meeBwlR+8khyb5IwkFyQ5P8mLmuP2ZUBJDk3yuST/1vTlT5vjxyX5bNOXdyY5pDm+vnl+YfP6xiHrP5glmU/yhSQfaJ7bk4EluTjJl5Kcm+Ss5tjo/oYZ1GaQZB54HfALwPHAM5IcP2xVa8pbgMfudexlwOlVdU/g9OY5THp0z+ZjM/D6nmpca3YAL6mqewMPBl7Q/H/CvgxrG/CIqro/cALw2CQPBv4H8JqmL9cAz2ve/zzgmqq6B/Ca5n3qxouAC6ae25NxOKmqTpjahmN0f8MMarM5Ebiwqi6qquuBU4AnDFzTmlFVHweu3uvwE4CTm8cnA0+cOv73NfEZ4IgkR/dT6dpRVVdU1TnN4+uY/AfoGOzLoJrf70rzdF3zUcAjgHc3x/fuy2q/3g08Mkl6KnfNSHIX4PHA/2meB3syVqP7G2ZQm80xwLemnl/aHNNwFqvqCpiEBuCOzXF71bNmauYBwGexL4NrptjOBbYCpwH/D7i2qnY0b5n+3e/uS/P6d4Hb9VvxmvA3wH8FdjXPb4c9GYMC/iXJ2Uk2N8dG9zdsoY+THAT29a8Zl8uOk73qUZINwHuAF1fV927iH/72pSdVtRM4IckRwPuAe+/rbc1n+9KxJL8IbK2qs5MsrR7ex1vtSf8eWlWXJ7kjcFqSr9zEewfriyNqs7kUOHbq+V2AyweqRRNXrg47N5+3NsftVU+SrGMS0t5WVe9tDtuXkaiqa4FlJtcQHpFk9R/m07/73X1pXr8tP3qZgW6ZhwK/nORiJpfNPILJCJs9GVhVXd583srkHzUnMsK/YQa12XweuGezSucQ4OnAqQPXtNadCjynefwc4P1Tx3+9WaHzYOC7q8PYOnCaa2beBFxQVa+eesm+DCjJHZqRNJLcCngUk+sHzwCe0rxt776s9uspwMfKzTUPqKp6eVXdpao2Mvlvx8eq6pnYk0ElOSwq9B1nAAACo0lEQVTJ4auPgccA5zHCv2FueDujJI9j8q+geeDNVfWqgUtaM5K8A1gCbg9cCfwx8E/Au4CfAL4JPLWqrm4CxGuZrBL9AfDcqjpriLoPZkl+FvgE8CX2XHfzCibXqdmXgSS5H5MLoOeZ/EP8XVX1Z0nuxmQ05yjgC8CzqmpbkkOBtzK5xvBq4OlVddEw1R/8mqnPl1bVL9qTYTW///c1TxeAt1fVq5LcjpH9DTOoSZIkjZRTn5IkSSNlUJMkSRopg5okSdJIGdQkSZJGyqAmSZI0UgY1SboFkiwl+cDQdUg6OBnUJEmSRsqgJmlNSPKsJJ9Lcm6SNzQ3L19J8tdJzklyepI7NO89IclnknwxyfuSHNkcv0eSjyb5t+Zr7t58+w1J3p3kK0nelpu46akktWFQk3TQS3Jv4GlMbsJ8ArATeCZwGHBOVT0QOJPJXS8A/h74b1V1PyZ3X1g9/jbgdVV1f+BngNVbyDwAeDFwPHA3Jvd3lKRbbGH/b5GkH3uPBB4EfL4Z7LoVk5st7wLe2bznH4D3JrktcERVndkcPxn4x+a+gMdU1fsAquqHAM33+1xVXdo8PxfYCHyy+x9L0sHOoCZpLQhwclW9/AYHkz/a6303dU+9m5rO3Db1eCf+bZV0gDj1KWktOB14SpI7AiQ5KsldmfwNfErznl8DPllV3wWuSfJzzfFnA2dW1feAS5M8sfke65PcutefQtKa47/6JB30qurLSf4Q+Jckc8B24AXA94H7JDkb+C6T69gAngP87yaIXQQ8tzn+bOANSf6s+R5P7fHHkLQGpeqmRvol6eCVZKWqNgxdhyTdGKc+JUmSRsoRNUmSpJFyRE2SJGmkDGqSJEkjZVCTJEkaKYOaJEnSSBnUJEmSRsqgJkmSNFL/H9uUln2g2n8xAAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmwAAAFNCAYAAAC9jTMrAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3X+0XWV95/H3J0EQBQVR0mkAiTV2xCmCpoDV6q2iYlWwVke0tthxNdXKqrY6LbYdnOIwq9rpjzUtVTIjq9RR8Vd1MjbWonKtVtEERSxYIEZ+RFTU8MP4A0jud/44+8LJ9QJnx+x9Nrnv11rXe84+e5/z3PtdXj55nv08T6oKSZIkDdeyaTdAkiRJ98zAJkmSNHAGNkmSpIEzsEmSJA2cgU2SJGngDGySJEkDZ2CTJEkaOAObpL1Skr9N8t8mPPeaJCd20IZK8sg9/b6Slh4DmyRJ0sAZ2CRJkgbOwCZpapqhyP+c5LIk30vytiQrknw4yXeTfDTJwWPnn5zk8iQ3J5lN8uix145N8vnmuncD91/wWc9Jcmlz7aeTHD1B+05I8o0ky8eO/VKSy5rHxyX5TPOeX0/y10n2bfk7eHaSLyS5Ncn1Sf7rgtef1LT35ub1lzXH90/yZ0muTXJLkk8l2b/NZ0u67zCwSZq2XwaeDjwKeC7wYeAPgIcy+hv12wBJHgW8C3gN8DBgA/D/kuzbhKQPAm8HHgK8t3lfmmsfB5wH/CZwCHAusD7JfvfUsKq6GPge8NSxwy8B3tk83gn8TtPWJwBPA36r5c//PeDXgIOAZwOvTPK8pt1HNL+Pv2p+5mOAS5vr/gfweODnmp/594C5lp8t6T7CwCZp2v6qqr5ZVV8DPgl8tqq+UFW3AR8Ajm3OexHwD1V1YVXdwSiw7M8osJwA3A/4y6q6o6reB2wc+4zfAM6tqs9W1c6qOh+4rbnu3rwLeDFAkgOBX2yOUVWXVNXFVbWjqq5hFASf0uaHr6rZqvpSVc1V1WXNe8+/x68AH62qdzU/13eq6tIky4D/BLy6qr7W/Eyfbn5nkvZCBjZJ0/bNscc/WOT5Ac3jnwSunX+hquaA64GVzWtfq6oau/basccPB17bDCvenORm4PDmunvzTuD5TW/c84HPV9W1MOr1S/KhZtj0VuC/M+ptm1iS45NclORbSW4BXjH2HocDX1nksocyGvJd7DVJeyEDm6T7ihsYBS8AkoRRoPka8HVgZXNs3hFjj68Hzq6qg8a+HlBV77q3D62qKxiFv2ex63AowFuAfwNWV9WDGA3l5kfe5J69E1gPHF5VDwbeOvYe1wM/tcg13wZ+eDevSdoLGdgk3Ve8B3h2kqcluR/wWkbDmp8GPgPsAH47yT5Jng8cN3bt/wJe0fRmJckDm5v9D5zws9/J6F66JzO6P27egcCtwPYk/x545W78XAcC26rqh0mOYxQK570DODHJf2x+rkOSHNP0Lp4H/HmSn0yyPMkT7u2ePEn3XQY2SfcJVXUl8FJGN+B/m9EEhedW1e1VdTuj4cqXATcxut/t78eu3cToPra/bl7f3Jw7qXcBM8DHq+rbY8dfxyhgfZdRKHx3+5+M3wLOSvJd4ExGwXS+3dcxumfutcA2RhMOHjv22V9idK/eNuBN+Ddd2mtl11s+JEmSNDSd/2ssyUlJrkyyOckZi7z+iiRfatZH+lSSo5rjRyb5QXP80iRv7bqtkiRJQ9RpD1uz2ORVjNZY2sqo6/7FzU288+c8qKpubR6fDPxWVZ2U5EjgQ1X1HzproCT1IMnljE2YGPObVfWOvtsj6b5nn47f/zhgc1VtAUhyAXAKcGdgmw9rjQcCjtFK2qtU1WOm3QZJ921dD4muZDQtfd7W5tgukrwqyVeAN9Osat5Y1WzZ8okkP99tUyVJkoap6x62xdYj+pEetKo6BzgnyUuAPwJOY7Su0hFV9Z0kjwc+mOQxC3rkSLIWWAuw//77P/7www/f0z/Dj5ibm2PZMidjDYk1GSbrMkzWZXisyTB1XZerrrrq21X1sEnO7TqwbWW0sOW8wxgtfnl3LmC0ECXNFiu3NY8vaXrgHgVsGr+gqtYB6wDWrFlTmzbt8nInZmdnmZmZ6fxzNDlrMkzWZZisy/BYk2Hqui5Jrr33s0a6jvMbgdVJVjWbM5/KaEXvOyVZPfb02cDVzfGHNZMWSPIIYDWwpeP2SpIkDU6nPWxVtSPJ6cBHgOXAeVV1eZKzgE1VtR44PcmJwB2MFrQ8rbn8yYwWk9wB7AReUVXbumyvJEnSEHU9JEpVbQA2LDh25tjjV9/Nde8H3t9t6yRJkobPOxwlSZIGzsAmSZI0cAY2SZKkgTOwSZIkDZyBTZIkaeAMbJIkSQOXqr1nr/U+djp40+fexKarPsryZWHHzr3nd3dfd/sdt7Pv/faddjO0gHUZJusyPNZkeI58wOE855Bf63qng0uqas0k53a+Dttep+Y44HvXwl4UdPcat0+7AVqUdRkm6zI81mRQlt16Exwy7VbcxcDW0u+veR18+E94647n8MPH/QYvOf7h026SgEsu2cTjHz/RP1LUI+syTNZleKzJ8Oxzv3257Iqrp92MOxnY2mp61m6tB7Dvg36SQ1eumnKDBLD/1ddaiwGyLsNkXYbHmgzUgAKbkw5amx8KDcuSqbZEkiQtDQa2tpoetiIY1yRJUh8MbK3Vnf9rB5skSeqDga2t8R42E5skSeqBga2tmht9m3IzJEnS0mFga228h23KTZEkSUuCga2tXSYdmNgkSVL3DGytOelAkiT1y8DWVt21Dpt5TZIk9cHA1pr3sEmSpH4Z2NqqsSFR+9gkSVIPDGy7yR42SZLUFwNbW2OzRCVJkvpgYGttfJaooU2SJHXPwNaWm79LkqSeGdhac5aoJEnql4GtrbprF1HzmiRJ6oOBrbVRYJtjmfewSZKkXnQe2JKclOTKJJuTnLHI669I8qUklyb5VJKjxl57fXPdlUme2XVbJ1JuTSVJkvrVaWBLshw4B3gWcBTw4vFA1nhnVf1MVR0DvBn48+bao4BTgccAJwF/07zflI3fw2ZikyRJ3eu6h+04YHNVbamq24ELgFPGT6iqW8eePpD5RDQ674Kquq2qvgpsbt5vupwlKkmSerZPx++/Erh+7PlW4PiFJyV5FfC7wL7AU8euvXjBtSu7aWYbDolKkqR+dR3YFos09SMHqs4BzknyEuCPgNMmvTbJWmAtwIoVK5idnf1x2nuv9vvht3gCox62q6+6itkffLXTz9Nktm/f3nnt1Z51GSbrMjzWZJiGVJeuA9tW4PCx54cBN9zD+RcAb2lzbVWtA9YBrFmzpmZmZn6M5k7gpmvv7Pf76Z/+aWaOO6Lbz9NEZmdn6bz2as26DJN1GR5rMkxDqkvX97BtBFYnWZVkX0aTCNaPn5Bk9djTZwNXN4/XA6cm2S/JKmA18LmO2zsB72GTJEn96rSHrap2JDkd+AiwHDivqi5PchawqarWA6cnORG4A7iJ0XAozXnvAa4AdgCvqqqdXbZ3IvOTDsqdDiRJUj+6HhKlqjYAGxYcO3Ps8avv4dqzgbO7a93uGJt0YB+bJEnqgTsdtDW2rId5TZIk9cHAtpu8h02SJPXFwNbWLltTGdkkSVL3DGytOUtUkiT1y8DWVs2v3essUUmS1A8DW2tuTSVJkvplYGtrl83fTWySJKl7BrbWxgKbeU2SJPXAwNZW08M2Z++aJEnqiYGttfEeNkObJEnqnoGtrfF12KbbEkmStEQY2FpzWQ9JktQvA1tbYz1sy0xskiSpBwa21tzpQJIk9cvA1la5rIckSeqXga2tmht9A5x2IEmS+mBga80eNkmS1C8DW1s1/8172CRJUj8MbK2Nb/5uZJMkSd0zsLVVY+uwTbUhkiRpqTCwteY9bJIkqV8GtrbGt6YysEmSpB4Y2FobXzjXxCZJkrpnYGtrbOFc85okSeqDga21sSHR6TZEkiQtEQa2tnbZmsrIJkmSumdga83N3yVJUr8MbG25+bskSepZ54EtyUlJrkyyOckZi7z+u0muSHJZko8lefjYazuTXNp8re+6rZNpAls5S1SSJPVjny7fPMly4Bzg6cBWYGOS9VV1xdhpXwDWVNX3k7wSeDPwoua1H1TVMV22sTXXYZMkST3ruoftOGBzVW2pqtuBC4BTxk+oqouq6vvN04uBwzpu04/Je9gkSVK/ug5sK4Hrx55vbY7dnZcDHx57fv8km5JcnOR5XTSwtXLzd0mS1K9Oh0RZfKmyWuQYSV4KrAGeMnb4iKq6IckjgI8n+VJVfWXBdWuBtQArVqxgdnZ2jzT87hy87Ys8llEP2xe/eCm3Xb+808/TZLZv39557dWedRkm6zI81mSYhlSXrgPbVuDwseeHATcsPCnJicAfAk+pqtvmj1fVDc33LUlmgWOBXQJbVa0D1gGsWbOmZmZm9uxPsNDmnXDZKLAde8wxHP+IQ7r9PE1kdnaWzmuv1qzLMFmX4bEmwzSkunQ9JLoRWJ1kVZJ9gVOBXWZ7JjkWOBc4uapuHDt+cJL9mscPBZ4IjE9WmI66q4PQIVFJktSHTnvYqmpHktOBjwDLgfOq6vIkZwGbqmo98KfAAcB7mwB0XVWdDDwaODfJHKNg+ScLZpdOieuwSZKkfnU9JEpVbQA2LDh25tjjE+/muk8DP9Nt63ZDOUtUkiT1y50OWnMdNkmS1C8DW1tjPWyLT4KVJEnaswxsrXkPmyRJ6peBra3xhXOn2xJJkrREGNhaG+9hM7JJkqTuGdjaunMdNmeJSpKkfhjYWnOWqCRJ6peBra2mh22OZfaxSZKkXhjYWrOHTZIk9cvA1tYu67BJkiR1z8DWmuuwSZKkfhnY2tplHTYTmyRJ6p6BrTV72CRJUr8MbG3duQ6bkw4kSVI/DGy7qQjLTGySJKkHBra2xmaJGtckSVIfDGxt1dzoGw6JSpKkfhjYWhtfh83EJkmSumdga6ucJSpJkvplYGttfB02SZKk7hnY2tqlh83IJkmSumdga21+HTZniUqSpH4Y2Nqa72ErZ4lKkqR+GNhaG1+HzcQmSZK6Z2Brq+lhm3OWqCRJ6omBrbXxddgkSZK6Z2Bry3XYJElSzwxsrY2tw2ZikyRJPeg8sCU5KcmVSTYnOWOR1383yRVJLkvysSQPH3vttCRXN1+ndd3WiZTLekiSpH51GtiSLAfOAZ4FHAW8OMlRC077ArCmqo4G3ge8ubn2IcAbgOOB44A3JDm4y/ZOxiFRSZLUr6572I4DNlfVlqq6HbgAOGX8hKq6qKq+3zy9GDisefxM4MKq2lZVNwEXAid13N57V+NbU5nYJElS97oObCuB68eeb22O3Z2XAx/ezWt7ZQ+bJEnqyz4dv/9ikaYWOUaSlwJrgKe0uTbJWmAtwIoVK5idnd2thk5q5darWN005DOf/jQH3d95G0Owffv2zmuv9qzLMFmX4bEmwzSkunQd2LYCh489Pwy4YeFJSU4E/hB4SlXdNnbtzIJrZxdeW1XrgHUAa9asqZmZmYWn7FmfvRI2j3rYnvjEJ/KwA/fr9vM0kdnZWTqvvVqzLsNkXYbHmgzTkOrSdffQRmB1klVJ9gVOBdaPn5DkWOBc4OSqunHspY8Az0hycDPZ4BnNselyHTZJktSzTnvYqmpHktMZBa3lwHlVdXmSs4BNVbUe+FPgAOC9zbpm11XVyVW1LckbGYU+gLOqaluX7Z1IzY2+sfiYrSRJ0p7W9ZAoVbUB2LDg2Jljj0+8h2vPA87rrnW7Y2wdNrvYJElSD7xjvq3xIdEpN0WSJC0NBrbWxremmm5LJEnS0mBga2uXHjYTmyRJ6p6BrbW7Apt5TZIk9WGiwJbkhCQHjj0/MMnx3TVrwMohUUmS1K9Je9jeAmwfe/695tgSNApscyyzg02SJPVi0sCWqrpzW6iqmqOHJUEG6a5fg8t6SJKkXkwa2LYk+e0k92u+Xg1s6bJhw+WyHpIkqV+TBrZXAD8HfI3RHp/H02y4vuS4NZUkSerZRMOazR6fp3bclvuIsUkH9rFJkqQeTDpL9PwkB409PzjJwLaM6knNf7OHTZIk9WPSIdGjq+rm+SdVdRNwbDdNGrqxddgkSZJ6MGlgW5bk4PknSR7CEp8l6jpskiSpL5OGrj8DPp3kfc3zFwJnd9OkoXNrKkmS1K9JJx38XZJLgF9gtCHT86vqik5bNlR3rsPmPWySJKkfEw9rVtXlSb4F3B8gyRFVdV1nLRusuxbOXWZikyRJPZh0lujJSa4Gvgp8ArgG+HCH7RqummOuGQo1rkmSpD5MOungjcAJwFVVtQp4GvAvnbVqyHbZmmqK7ZAkSUvGpIHtjqr6DqPZosuq6iLgmA7bNWB155Ie7iUqSZL6MOk9bDcnOQD4Z+AdSW4EdnTXrAGr0R4HkiRJfZm0h+0U4PvA7wD/CHwFeG5XjRq2cg02SZLUq0mX9fhe83AOOH/h60k+U1VP2JMNG6yqZg02SZKkfkzaw3Zv7r+H3uc+oNn23S42SZLUkz0V2OreT9lLVFGxh02SJPVnTwW2JaQZEjWxSZKknuypwLZ04kvV/KDotFsiSZKWiD0V2H51D73PfUSWUkSVJElTdo+zRJN8l8XvTwtQVfUgRg/+tYO2DZOzRCVJUs/usYetqg6sqgct8nXgfFi7N0lOSnJlks1Jzljk9Scn+XySHUlesOC1nUkubb7Wt/vRujLKr97DJkmS+jLpTgcAJDmUsSU8quq6ezl/OXAO8HRgK7AxyfqqumLstOuAlwGvW+QtflBVw9oC684eNhObJEnqx0T3sCU5OcnVwFeBTwDXAB+e4NLjgM1VtaWqbgcuYLRrwp2q6pqquozRorz3Ac4SlSRJ/Zp00sEbgROAq6pqFfA04F8muG4lcP3Y863NsUndP8mmJBcneV6L67pTzZDolJshSZKWjkmHRO+oqu8kWZZkWVVdlORNE1y3WK5ps8juEVV1Q5JHAB9P8qWq+souH5CsBdYCrFixgtnZ2RZv394jv7aVgwg7d+7s/LM0ue3bt1uPAbIuw2RdhseaDNOQ6jJpYLs5yQHAJ4F3JLkR2DHBdVuBw8eeHwbcMGnjquqG5vuWJLPAsYw2nh8/Zx2wDmDNmjU1MzMz6dvvnu99iO/eEO63zz50/lma2OzsrPUYIOsyTNZleKzJMA2pLpMOif4zcBDwauAfGYWm505w3UZgdZJVSfYFTgUmmu2Z5OAk+zWPHwo8Ebjinq/qQ9NB6JioJEnqyaSBLcBHgFngAODdVfWde7uoqnYApzfXfhl4T1VdnuSsJCcDJPnZJFuBFwLnJrm8ufzRwKYkXwQuAv5kwezS6ag5irDMWQeSJKknEw2JVtUfA3+c5GjgRcAnkmytqhMnuHYDsGHBsTPHHm9kNFS68LpPAz8zSft6Vc4SlSRJ/Wq7NdWNwDeA7wCH7vnm3BeUO1NJkqReTboO2yubm/4/BjwU+I2qOrrLhg3WnT1sRjZJktSPSWeJPhx4TVVd2mVj7hvcS1SSJPVr0nvYfmQP0CWr3EtUkiT1q+09bKKYYxnexSZJkvpiYGtrfhk285okSeqJga0172GTJEn9MrC15TpskiSpZwa21ppJB/axSZKknhjY2rKHTZIk9czA1pr3sEmSpH4Z2NqqosCdDiRJUm8MbK0VrsEmSZL6ZGBry3vYJElSzwxsrc0PiU67HZIkaakwsLU138PmsKgkSeqJga01N3+XJEn9MrC1VXMUYZmJTZIk9cTA1la5DpskSeqXga210aQDE5skSeqLga2tGq3DZl6TJEl9MbDthjniTgeSJKk3Bra2vIdNkiT1zMDWmjsdSJKkfhnY2qpmHTb72CRJUk8MbK3ZwyZJkvplYGuruYdNkiSpLwa21uY3fze0SZKkfhjY2nKWqCRJ6lnngS3JSUmuTLI5yRmLvP7kJJ9PsiPJCxa8dlqSq5uv07pu62SahXNNbJIkqSedBrYky4FzgGcBRwEvTnLUgtOuA14GvHPBtQ8B3gAcDxwHvCHJwV22dyI1PyQ67YZIkqSlousetuOAzVW1papuBy4AThk/oaquqarLgLkF1z4TuLCqtlXVTcCFwEkdt3cC80OiJjZJktSPfTp+/5XA9WPPtzLqMdvda1cuPCnJWmAtwIoVK5idnd2thk7q6G3bmAO2f/e7nX+WJrd9+3brMUDWZZisy/BYk2EaUl26DmyLdUPVnry2qtYB6wDWrFlTMzMzEzdut1x3EN+45fs86EEHMjPzpG4/SxObnZ2l89qrNesyTNZleKzJMA2pLl0PiW4FDh97fhhwQw/Xdmd+HTZvYpMkST3pOrBtBFYnWZVkX+BUYP2E134EeEaSg5vJBs9ojk1ZM0t02s2QJElLRqeBrap2AKczClpfBt5TVZcnOSvJyQBJfjbJVuCFwLlJLm+u3Qa8kVHo2wic1RybLmeJSpKknnV9DxtVtQHYsODYmWOPNzIa7lzs2vOA8zptYFvNkOgyE5skSeqJOx201vSwTbsZkiRpyTCwtVXFHMscEpUkSb0xsLVWTjuQJEm9MrC1Vc2AqHlNkiT1xMDW2vzWVJIkSf0wsLXlsh6SJKlnBrbW3PxdkiT1y8DWVrMOmz1skiSpLwa21hwSlSRJ/TKwtVUOiUqSpH4Z2FpzSFSSJPXLwNZWM0tUkiSpLwa21pp9DuxikyRJPTGwtVXFnHewSZKkHhnYWisoZ4lKkqT+GNjaKtyaSpIk9crA1tr8LFEjmyRJ6oeBra2aa6YdSJIk9cPA1lbZwyZJkvplYGutmSVqXpMkST0xsLXVLJxrXpMkSX0xsLXm1lSSJKlfBra23PxdkiT1zMDWWjMkal6TJEk9MbC1VQ6JSpKkfhnYWnNIVJIk9cvA1lYzS9S8JkmS+mJga62osn9NkiT1p/PAluSkJFcm2ZzkjEVe3y/Ju5vXP5vkyOb4kUl+kOTS5uutXbd1Iu50IEmSerZPl2+eZDlwDvB0YCuwMcn6qrpi7LSXAzdV1SOTnAq8CXhR89pXquqYLtvYngvnSpKkfnXdw3YcsLmqtlTV7cAFwCkLzjkFOL95/D7gaRly91XhLFFJktSrTnvYgJXA9WPPtwLH3905VbUjyS3AIc1rq5J8AbgV+KOq+uTCD0iyFlgLsGLFCmZnZ/foD7DQCT/8AXPAjd/8Zuefpclt377degyQdRkm6zI81mSYhlSXrgPbYv1QNeE5XweOqKrvJHk88MEkj6mqW3c5sWodsA5gzZo1NTMz8+O3+p58fj+4LfzET/wEMzMDG61dwmZnZ+m89mrNugyTdRkeazJMQ6pL10OiW4HDx54fBtxwd+ck2Qd4MLCtqm6rqu8AVNUlwFeAR3Xc3gnMr8MmSZLUj64D20ZgdZJVSfYFTgXWLzhnPXBa8/gFwMerqpI8rJm0QJJHAKuBLR239941s0RNbJIkqS+dDok296SdDnwEWA6cV1WXJzkL2FRV64G3AW9PshnYxijUATwZOCvJDmAn8Iqq2tZleyczP0vUxCZJkvrR9T1sVNUGYMOCY2eOPf4h8MJFrns/8P6u29dazTlLVJIk9cqdDtqqYs7+NUmS1CMDW2ujramW2cUmSZJ6YmBrq9n83bwmSZL6YmBrrRkSNbBJkqSeGNjaml/Ww7vYJElSTwxsrTkkKkmS+mVga6vc6UCSJPXLwNZaUWUPmyRJ6o+Bra2i6WEzsUmSpH4Y2FordzqQJEm9MrC1Nb8O27TbIUmSlgwDW2vzPWxGNkmS1A8DW1vNXqKSJEl9MbC15jpskiSpXwa2tmq0+buzRCVJUl8MbK05S1SSJPXLwNaWs0QlSVLPDGxt1Rxz9rBJkqQeGdhac1kPSZLULwNbW27+LkmSemZga62YA3vYJElSbwxsbc0v62FekyRJPTGwteaQqCRJ6peBbTe404EkSeqTga2NKgDm3OlAkiT1yMDWRhPY3OlAkiT1ycDWSt35v+Y1SZLUFwNbG2M9bHaxSZKkvnQe2JKclOTKJJuTnLHI6/sleXfz+meTHDn22uub41cmeWbXbb13Y0OiU26JJElaOjoNbEmWA+cAzwKOAl6c5KgFp70cuKmqHgn8BfCm5tqjgFOBxwAnAX/TvN/01NiQqIlNkiT1pOsetuOAzVW1papuBy4ATllwzinA+c3j9wFPy2gbgVOAC6rqtqr6KrC5eb+p2Tk31zxylqgkSerPPh2//0rg+rHnW4Hj7+6cqtqR5BbgkOb4xQuuXbnwA5KsBdYCrFixgtnZ2T3V9h+x447bOJHRkOiNX7uG2dmvdfZZamf79u2d1l67x7oMk3UZHmsyTEOqS9eBbbFuqJrwnEmuparWAesA1qxZUzMzMy2bOLm5nTvZfOg/sGrzN/jlXzqR5cvsZRuK2dlZuqy9do91GSbrMjzWZJiGVJeuh0S3AoePPT8MuOHuzkmyD/BgYNuE1/Zq2fLlPPKxT+LQQx5qWJMkSb3pOrBtBFYnWZVkX0aTCNYvOGc9cFrz+AXAx6uqmuOnNrNIVwGrgc913F5JkqTB6XRItLkn7XTgI8By4LyqujzJWcCmqloPvA14e5LNjHrWTm2uvTzJe4ArgB3Aq6pqZ5ftlSRJGqKu72GjqjYAGxYcO3Ps8Q+BF97NtWcDZ3faQEmSpIFzpwNJkqSBM7BJkiQNnIFNkiRp4AxskiRJA2dgkyRJGjgDmyRJ0sAZ2CRJkgYuo00F9g5JvgVc28NHPRT4dg+fo8lZk2GyLsNkXYbHmgxT13V5eFU9bJIT96rA1pckm6pqzbTbobtYk2GyLsNkXYbHmgzTkOrikKgkSdLAGdgkSZIGzsC2e9ZNuwH6EdZkmKzLMFmX4bEmwzSYungPmyRJ0sDZwyZJkjRwBrYWkpyU5Mokm5OcMe32LCVJzktyY5J/HTv2kCQXJrm6+X5wczxJ/mdTp8uSPG56Ld97JTk8yUVJvpzk8iSvbo5blylKcv8kn0vyxaYuf9wcX5Xks01d3p1k3+b4fs3zzc3rR06z/XuzJMuTfCHJh5rn1mTKklyT5EtJLk2yqTk2yL9hBrYJJVkOnAM8CzgKeHGSo6bbqiXlb4GTFhx1MQ5vAAAE/ElEQVQ7A/hYVa0GPtY8h1GNVjdfa4G39NTGpWYH8NqqejRwAvCq5v8T1mW6bgOeWlWPBY4BTkpyAvAm4C+autwEvLw5/+XATVX1SOAvmvPUjVcDXx57bk2G4Req6pix5TsG+TfMwDa544DNVbWlqm4HLgBOmXKbloyq+mdg24LDpwDnN4/PB543dvzvauRi4KAk/66fli4dVfX1qvp88/i7jP5DtBLrMlXN73d78/R+zVcBTwXe1xxfWJf5er0PeFqS9NTcJSPJYcCzgf/dPA/WZKgG+TfMwDa5lcD1Y8+3Nsc0PSuq6uswCg/Aoc1xa9WzZsjmWOCzWJepa4beLgVuBC4EvgLcXFU7mlPGf/d31qV5/RbgkH5bvCT8JfB7wFzz/BCsyRAU8E9JLkmytjk2yL9h+/T1QXuBxf514xTbYbJWPUpyAPB+4DVVdes9dARYl55U1U7gmCQHAR8AHr3Yac1369KxJM8BbqyqS5LMzB9e5FRr0r8nVtUNSQ4FLkzyb/dw7lTrYg/b5LYCh489Pwy4YUpt0cg357ujm+83NsetVU+S3I9RWHtHVf19c9i6DERV3QzMMrrH8KAk8/9IH//d31mX5vUH86O3H+jH80Tg5CTXMLqd5qmMetysyZRV1Q3N9xsZ/ePmOAb6N8zANrmNwOpmVs++wKnA+im3aalbD5zWPD4N+L9jx3+tmdFzAnDLfPe29pzmnpq3AV+uqj8fe8m6TFGShzU9ayTZHziR0f2FFwEvaE5bWJf5er0A+Hi5QOceVVWvr6rDqupIRv/t+HhV/QrWZKqSPDDJgfOPgWcA/8pA/4a5cG4LSX6R0b+KlgPnVdXZU27SkpHkXcAM8FDgm8AbgA8C7wGOAK4DXlhV25og8deMZpV+H/j1qto0jXbvzZI8Cfgk8CXuui/nDxjdx2ZdpiTJ0YxulF7O6B/l76mqs5I8glHvzkOALwAvrarbktwfeDujexC3AadW1ZbptH7v1wyJvq6qnmNNpqv5/X+geboP8M6qOjvJIQzwb5iBTZIkaeAcEpUkSRo4A5skSdLAGdgkSZIGzsAmSZI0cAY2SZKkgTOwSdIekGQmyYem3Q5JeycDmyRJ0sAZ2CQtKUlemuRzSS5Ncm6zUfr2JH+W5PNJPpbkYc25xyS5OMllST6Q5ODm+COTfDTJF5trfqp5+wOSvC/JvyV5R+5hY1VJasPAJmnJSPJo4EWMNnw+BtgJ/ArwQODzVfU44BOMdtIA+Dvg96vqaEY7OswffwdwTlU9Fvg5YH57mmOB1wBHAY9gtIekJP3Y9rn3UyRpr/E04PHAxqbza39GGzvPAe9uzvk/wN8neTBwUFV9ojl+PvDeZu/BlVX1AYCq+iFA836fq6qtzfNLgSOBT3X/Y0na2xnYJC0lAc6vqtfvcjD5LwvOu6c9++5pmPO2scc78W+spD3EIVFJS8nHgBckORQgyUOSPJzR38IXNOe8BPhUVd0C3JTk55vjvwp8oqpuBbYmeV7zHvsleUCvP4WkJcd//UlaMqrqiiR/BPxTkmXAHcCrgO8Bj0lyCXALo/vcAE4D3toEsi3ArzfHfxU4N8lZzXu8sMcfQ9ISlKp76vmXpL1fku1VdcC02yFJd8chUUmSpIGzh02SJGng7GGTJEkaOAObJEnSwBnYJEmSBs7AJkmSNHAGNkmSpIEzsEmSJA3c/wchCoIVwUx6uQAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmoAAAFNCAYAAACwk0NsAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3Xl8XHd97//X58yMRrtkrXa8J84eQhxEyMJiAgUaKGFffhDWW9MWfoWWlkvgcptbyu1G2doUCM0KIWkaEkhD9gRlI5vtOIvjLF7iTbJkbZZG0uzf+8c5kmXHjo+SzMyJ/X4+Hn5Yc+ZoznfmMxq99V3OMeccIiIiIhI9XqUbICIiIiL7p6AmIiIiElEKaiIiIiIRpaAmIiIiElEKaiIiIiIRpaAmIiIiElEKaiJy2DKzy8zs70Lu+7yZvf3lPo6IyGwoqImIiIhElIKaiIiISEQpqIlIpAVDjn9tZo+b2biZXWxmnWZ2s5mNmdkdZjZnxv7vNbN1ZjZiZt1mdvyM+5ab2Zrg+/4TqN7nWO8xs7XB9/7ezE5+iW3+YzPbYGZDZnaDmR0RbDcz+76Z9ZvZ7uA5nRTcd46ZPRW0bYeZ/dVLesFE5JCioCYirwYfBP4AOAb4I+Bm4BtAG/7n2J8DmNkxwFXAV4B24Cbgv82sysyqgF8DPwdagP8KHpfge08FLgG+ALQCPwVuMLPkbBpqZmcDfw98BJgHbAGuDu5+B/Dm4Hk0Ax8FBoP7Lga+4JxrAE4C7prNcUXk0KSgJiKvBv/qnOtzzu0A7gUecs496pzLANcDy4P9Pgr81jl3u3MuB3wXqAHOBE4HEsAPnHM559y1wCMzjvHHwE+dcw855wrOucuBTPB9s/EJ4BLn3JqgfecDZ5jZEiAHNADHAeacW++c6w2+LwecYGaNzrlh59yaWR5XRA5BCmoi8mrQN+Pryf3crg++PgK/BwsA51wR2AbMD+7b4ZxzM753y4yvFwNfDYY9R8xsBFgYfN9s7NuGFH6v2Xzn3F3AvwEXAn1mdpGZNQa7fhA4B9hiZneb2RmzPK6IHIIU1ETkUNKDH7gAf04YftjaAfQC84NtUxbN+Hob8B3nXPOMf7XOuateZhvq8IdSdwA4537knHsdcCL+EOhfB9sfcc6dC3TgD9FeM8vjisghSEFNRA4l1wDvNrO3mVkC+Cr+8OXvgQeAPPDnZhY3sw8Ap8343p8Bf2Jmbwgm/deZ2bvNrGGWbfgl8FkzOyWY3/Z/8Ydqnzez1wePnwDGgTRQCObQfcLMmoIh21Gg8DJeBxE5RCioicghwzn3DPBJ4F+BAfyFB3/knMs657LAB4DPAMP489mum/G9q/Dnqf1bcP+GYN/ZtuFO4FvAr/B78Y4CPhbc3YgfCIfxh0cH8efRAZwHPG9mo8CfBM9DRA5ztvd0DRERERGJCvWoiYiIiESUgpqIiIhIRCmoiYiIiESUgpqIiIhIRFUsqJnZQjP7nZmtD67L9+Vg+wXBde7WBv/OqVQbRURERCqpYqs+zWweMM85tyY4T9Fq4H3418dLOee++6IPMENbW5tbsmRJaRo6w/j4OHV1dSU/joSnmkST6hJNqkv0qCbRVOq6rF69esA51x5m33jJWnEQwfXteoOvx8xsPf5lXmZtyZIlrFq16pVs3n51d3ezYsWKkh9HwlNNokl1iSbVJXpUk2gqdV3MbMvB9/JFYo5acLHi5cBDwaYvmdnjZnaJmc2pWMNEREREKqjiJ7w1s3rgbvxr7F1nZp34ZxR3wLfxh0c/t5/vWwmsBOjs7Hzd1VdfXfK2plIp6uvrD76jlI1qEk2qSzSpLtGjmkRTqevy1re+dbVzrivMvhUNasH17m4EbnXOfW8/9y8BbnTOnfRij9PV1eU09Hl4Uk2iSXWJJtUlelSTaCrD0GfooFbJVZ8GXAysnxnSgkUGU94PPFnutomIiIhEQcUWEwBn4V+E+AkzWxts+wbwcTM7BX/o83ngC5VpnoiIiEhlVXLV532A7eeum8rdFhEREZEoisSqTxERERF5IQU1ERERkYhSUBMRERGJqEouJnj1yGdg7S+pS+1vSp2IiIhIaahHLYzcJNz4FeYMP17ploiIiMhhREEtDJt6mSp7FQcRERE5vCiohREENXPFCjdEREREDicKamHY1Nw09aiJiIhI+SiohaGhTxEREakABbUwpoc+FdRERESkfBTUwpjuUdMcNRERESkfBbVQ/Dlq6lETERGRclJQC0Nz1ERERKQCFNTCMPWoiYiISPkpqIVhhj/8qTlqIiIiUj4KamGZqUdNREREykpBLYR0rkARj/GcetRERESkfBTUQsjki+SK0DeuoCYiIiLlo6AWQn0yjsMoFBXUREREpHwU1EKIeUYRT0FNREREykpBLSwzCkUtJhAREZHyUVALyWHkFdRERESkjBTUwjKPooY+RUREpIwqFtTMbKGZ/c7M1pvZOjP7crC9xcxuN7Pngv/nVKqNMznzNPQpIiIiZVXJHrU88FXn3PHA6cAXzewE4OvAnc65o4E7g9sVZ1r1KSIiImVWsaDmnOt1zq0Jvh4D1gPzgXOBy4PdLgfeV5kW7kM9aiIiIlJmkZijZmZLgOXAQ0Cnc64X/DAHdFSuZTN4RtEVcbqMlIiIiJRJvNINMLN64FfAV5xzo2YW9vtWAisBOjs76e7uLlkbAU4tOMw5brurm2QsXBul9FKpVMlrL7OnukST6hI9qkk0RakuFQ1qZpbAD2lXOueuCzb3mdk851yvmc0D+vf3vc65i4CLALq6utyKFStK2taJB6qwrGP568+go7G6pMeS8Lq7uyl17WX2VJdoUl2iRzWJpijVpZKrPg24GFjvnPvejLtuAD4dfP1p4Dflbtv+mBkejtF0vtJNERERkcNEJXvUzgLOA54ws7XBtm8A/wBcY2afB7YCH65Q+/ZinofhGE3nKt0UEREROUxULKg55+4DDjTZ623lbEsY5nl45hhTj5qIiIiUSSRWfb4amMUwioypR01ERETKREEtJM/z56ipR01ERETKRUEtJM+LYThSCmoiIiJSJgpqIZl5eDiyBV1GSkRERMpDQS0szw9qOQU1ERERKRMFtZD8HrWigpqIiIiUjYJaaEbMHLmCrvUpIiIi5aGgFpZ5eEA2rx41ERERKQ8FtbDMI2ZF8kUFNRERESkPBbWwzIjhyOU19CkiIiLloaAWlhmeadWniIiIlI+CWljBedRyRfWoiYiISHkoqIVlnr/qU4sJREREpEwU1MIynfBWREREyktBLTR/MYEuISUiIiLloqAWlnl4ViSvE96KiIhImSiohRWc8FZDnyIiIlIuCmphaY6aiIiIlJmCWlhmeFYkq6FPERERKRMFtbCCKxPk1aMmIiIiZaKgFpZ5mIY+RUREpIwU1MIyL7iElIY+RUREpDwU1MIKFhPoPGoiIiJSLhUNamZ2iZn1m9mTM7ZdYGY7zGxt8O+cSrZxD8PTHDUREREpo0r3qF0GvGs/27/vnDsl+HdTmdu0f9On59DQp4iIiJRHRYOac+4eYKiSbQgtWEygoU8REREpl3ilG3AAXzKzTwGrgK8654b33cHMVgIrATo7O+nu7i5pg04aHMJcgVy+WPJjSXipVEr1iCDVJZpUl+hRTaIpSnWJYlD7MfBtwAX//wvwuX13cs5dBFwE0NXV5VasWFHaVu38GT2jvTjgTW9+CzHPSns8CaW7u5uS115mTXWJJtUlelSTaIpSXSo9R+0FnHN9zrmCc64I/Aw4rdJtAvwrE+DPT9O51ERERKQcIhfUzGzejJvvB5480L5lFcxRAzRPTURERMqiokOfZnYVsAJoM7PtwN8AK8zsFPyhz+eBL1SsgTPNCGq5vIKaiIiIlF5Fg5pz7uP72Xxx2RsSRnB6DoB8UafoEBERkdKL3NBndBmG35OWVY+aiIiIlIGCWljmTb9YWkwgIiIi5aCgFpZ50z1qujqBiIiIlIOCWlgzFxOoR01ERETKQEEtrBmLCRTUREREpBwU1MIyMKehTxERESkfBbWwzGPqolHqURMREZFyUFALa6/FBApqIiIiUnoKamHttZhAQ58iIiJSegpqoZlWfYqIiEhZKaiFpdNziIiISJkpqIVlHuY09CkiIiLlo6AWlnmgxQQiIiJSRgpqYZnmqImIiEh5KaiFNWPoM5tXUBMREZHSU1ALywx0eg4REREpIwW1sGas+iwU1aMmIiIipaegFpZ509f61BQ1ERERKQcFtdD2DH2qR01ERETKQUEtrGAxQcwzCk5z1ERERKT0FNTCMg8Igpo61ERERKQMFNTCChYTxExDnyIiIlIeFQ1qZnaJmfWb2ZMztrWY2e1m9lzw/5xKtnGaGQBxT4sJREREpDwq3aN2GfCufbZ9HbjTOXc0cGdwu/LMf6niBkXNURMREZEyqGhQc87dAwzts/lc4PLg68uB95W1UQcS9KglPMhr6FNERETKoNI9avvT6ZzrBQj+76hwe3xBj5o/R63CbREREZHDQrzSDXipzGwlsBKgs7OT7u7ukh5v4dbnOQpw+Qw7enro7h4s6fEknFQqVfLay+ypLtGkukSPahJNUapLFINan5nNc871mtk8oH9/OznnLgIuAujq6nIrVqwobavuWwuboK66ivaOuaxY8drSHk9C6e7upuS1l1lTXaJJdYke1SSaolSXKA593gB8Ovj608BvKtiWPaaGPj2nxQQiIiJSFpU+PcdVwAPAsWa23cw+D/wD8Adm9hzwB8HtyguCWsIz8kUFNRERESm9ig59Ouc+foC73lbWhoQxvZigSFFBTURERMogikOf0RScnqPKjIKCmoiIiJSBglpYQY+a56GhTxERESkLBbWwpq5MoMUEIiIiUiYKamHtda1PBTUREREpPQW10IJLSOEU1ERERKQsFNTCmjH0qaAmIiIi5aCgFtb0CW+hoDlqIiIiUgYKamFN9aiZ5qiJiIhIeSiohTV9wlsNfYqIiEh5KKiFFaz6TKhHTURERMpEQS2smXPUFNRERESkDBTUwpqeo6YT3oqIiEh5KKiFNd2j5nQJKRERESkLBbVZigFFBTUREREpAwW1sKZPeKvzqImIiEh5KKiFNeP0HPmCgpqIiIiUnoJaWDNOeKvFBCIiIlIOCmph6VqfIiIiUmYKamEFJ7yNoaAmIiIi5aGgFlbQo+ZpMYGIiIiUiYJaWDNOeFvQYgIREREpAwW1sKZWfaIeNRERESkPBbXQ/DlqWkwgIiIi5RKvdAMOxMyeB8aAApB3znVVuEEAeKaLsouIiEh5RDaoBd7qnBuodCOAveeoaehTREREykBDn2HNuDKBc7rep4iIiJReqKBmZl82s0bzXWxma8zsHSVumwNuM7PVZrayxMc6uOnFBH5AU6+aiIiIlFrYoc/POed+aGbvBNqBzwKXAreVrGVwlnOux8w6gNvN7Gnn3D1TdwbhbSVAZ2cn3d3dJWwKNI08yXKgf2cP0Mjvuu+mKmYlPaYcXCqVKnntZfZUl2hSXaJHNYmmKNUlbFCbSiTnAJc65x4zs5KmFOdcT/B/v5ldD5wG3DPj/ouAiwC6urrcihUrStkc2FIFa2HBEfNgG7zxTW+itirqU/wOfd3d3ZS89jJrqks0qS7Ro5pEU5TqEnaO2mozuw0/qN1qZg1AsVSNMrO64BiYWR3wDuDJUh0vXKOm5qj5N/OaoyYiIiIlFrZL6PPAKcAm59yEmbXgD3+WSidwfdBpFwd+6Zy7pYTHO7ipS0iZH9C0mEBERERKLWxQOwNY65wbN7NPAqcCPyxVo5xzm4DXlurxX5rgouxBUFOPmoiIiJRa2KHPHwMTZvZa4GvAFuCKkrUqiqZ71Pyb6lETERGRUgsb1PLOOQecC/zQOfdDoKF0zYqgYO1EXKfnEBERkTIJO/Q5ZmbnA+cBbzKzGJAoXbMiaJ85avmCgpqIiIiUVtgetY8CGfzzqe0E5gP/XLJWRVHQozZ1wtuietRERESkxEIFtSCcXQk0mdl7gLRz7rCcoxbzgqFPzVETERGREgt7CamPAA8DHwY+AjxkZh8qZcMiZ2roM7ipoCYiIiKlFnaO2jeB1zvn+gHMrB24A7i2VA2LnH3mqGkxgYiIiJRa2Dlq3lRICwzO4nsPDdNXJtDQp4iIiJRH2B61W8zsVuCq4PZHgZtK06So8hcTeCioiYiISHmECmrOub82sw8CZ+Enloucc9eXtGVRMz1HTUFNREREyiNsjxrOuV8BvyphW6LN9r6ElIKaiIiIlNqLBjUzGwP2l0gMcM65xpK0KmLGc+P8xYMX8JHaGhYpqImIiEiZvGhQc84dXpeJOgDnHA8MrOWseJyluoSUiIiIlMnhtXLzJYp7fp7N256LsqtHTUREREpNQS2EmBcDIG+GaTGBiIiIlImCWghxC3rUMF3rU0RERMpGQS0EMyNusWDoswhAvqCgJiIiIqWloBZSzGIUsOnzqKlHTUREREpNQS2kuBcnt9digsq2R0RERA59Cmohxb04edvTo5YvKqmJiIhIaSmohRS3OAXAww9oGvoUERGRUlNQCynmxfbuUdNiAhERESkxBbWQEl6cPDZ1yU/1qImIiEjJRTaomdm7zOwZM9tgZl+vdHv8OWpMn0dNiwlERESk1CIZ1MwsBlwI/CFwAvBxMzuhkm2Km7+YwFwegIIWE4iIiEiJRTKoAacBG5xzm5xzWeBq4NxKNijmxckDXjEH6BJSIiIiUnpRDWrzgW0zbm8PtlVMPJbwFxMUswBoLYGIiIiUWrzSDTgA28+2vaKRma0EVgJ0dnbS3d1d0gZNjE3QZB49WzcBr+fZ5zbQnd9S0mPKwaVSqZLXXmZPdYkm1SV6VJNoilJdohrUtgMLZ9xeAPTM3ME5dxFwEUBXV5dbsWJFSRt06c2Xkhv1WDS3AzbCkqVHsmLFUSU9phxcd3c3pa69zJ7qEk2qS/SoJtEUpbpEdejzEeBoM1tqZlXAx4AbKtmgqSsTWDD0qdNziIiISKlFskfNOZc3sy8BtwIx4BLn3LpKtinuxZk0D6+QASCn83OIiIhIiUUyqAE4524Cbqp0O6b4F2U3rJDFTKs+RUREpPSiOvQZOTGLkcegkCXheeS07FNERERKTEEtpKkrE5DPEI8ZeQ19ioiISIkpqIU0tZiAQoa4Z+Q19CkiIiIlpqAWUsJL+EOf+SyJmKfFBCIiIlJyCmohxSzmD30WpoY+1aMmIiIipaWgFlLci1MAf46a55HTRdlFRESkxBTUQtp3MYFOzyEiIiKlpqAWUsxiFHBQyPqLCTT0KSIiIiWmoBaSv5gAyGe0mEBERETKQkEtJH+OmtuzmEBDnyIiIlJiCmohxb04BQOXz/qLCdSjJiIiIiWmoBZSzGIA5AsZEjo9h4iIiJSBglpIcc+/fn0BR5U58jo9h4iIiJSYglpIU0EtD9R4OV2UXUREREpOQS2k6aBmRrXl1aMmIiIiJaegFlLcpoIaVHsFzVETERGRklNQC2nP0KdR7eV1eg4REREpOQW1kPYMfeIPfer0HCIiIlJiCmohxbzg9BwY1abFBCIiIlJ6CmohTZ+ewyCJFhOIiIhI6SmohZSwBOCv+kxaXosJREREpOQU1ELaazGB5XUJKRERESk5BbWQpuao5QyqyGnVp4iIiJRc5IKamV1gZjvMbG3w75xKtwlmXkLKSJiCmoiIiJRevNINOIDvO+e+W+lGzDTzhLdJdHoOERERKb3I9ahF1cxLSFWRo+igqF41ERERKaGoBrUvmdnjZnaJmc2pdGNg5tAnVJEHIKdTdIiIiEgJVWTo08zuAObu565vAj8Gvg244P9/AT63n8dYCawE6OzspLu7u1TNBWB7djsAOTNSA73ASfyu+x6q41bS48qLS6VSJa+9zJ7qEk2qS/SoJtEUpbpUJKg5594eZj8z+xlw4wEe4yLgIoCuri63YsWKV6x9+7NheAPcAHlgXmsT9MIZZ76RptpESY8rL667u5tS115mT3WJJtUlelSTaIpSXSI39Glm82bcfD/wZKXaMtPU6TkKZiTIARr6FBERkdKK4qrPfzKzU/CHPp8HvlDZ5vhmLiaIO3+Omq5OICIiIqUUuaDmnDuv0m3Yn4QXXELKi5Nwfo+arvcpIiIipRS5oc+omu5R8+LEgqFP9aiJiIhIKSmohRQzf45aPhbfM/SpHjUREREpIQW1kGb2qMWDoc+cetRERESkhBTUQpqao5bzYsSchj5FRESk9BTUQqqKVQGQ9WLEnK5MICIiIqWnoBaSZx4xYmS9GPFiFlCPmoiIiJSWgtosJCxBxjO86aFP9aiJiIhI6SiozULc4v7QZ3HqygTqURMREZHSUVCbhbjFyZrhBUGtoDlqIiIiUkIKarOQsAQZ2zP0qdNziIiISCkpqM3CdI9aQYsJREREpPQU1GZh36FPXZlARERESklBbRb8oU+wooY+RUREpPQU1GYhbnGygE2fR009aiIiIlI6CmqzkLAEGRwWzFHT6TlERESklBTUZsHvUXPTQ5/qURMREZFSUlCbhQQJP6gFPWoF9aiJiIhICSmozULc4mQowtTQpxYTiIiISAkpqM1C3OLkXBEr5jGKGvoUERGRklJQm4WEJcg4P5xVkSenoCYiIiIlpKA2C3GLk3EFAOoTRdJ5BTUREREpHQW1WYhbnDxFikBTvMhktlDpJomIiMghTEFtFhKWACBrRn3CMZlTUBMREZHSqUhQM7MPm9k6MyuaWdc+951vZhvM7Bkze2cl2ncgU0EtYyioiYiISMnFK3TcJ4EPAD+dudHMTgA+BpwIHAHcYWbHOOcikYji5r9cWTMa4gUyCmoiIiJSQhXpUXPOrXfOPbOfu84FrnbOZZxzm4ENwGnlbd2BzQxq9TH1qImIiEhpVapH7UDmAw/OuL092PYCZrYSWAnQ2dlJd3d3yRtXyPjBLGNGfmKYvonhshxXDiyVSqkGEaS6RJPqEj2qSTRFqS4lC2pmdgcwdz93fdM595sDfdt+tu339P/OuYuAiwC6urrcihUrXkozZ+Wxmx6DlN+jNq+lnmcn61mx4k0lP64cWHd3N+WovcyO6hJNqkv0qCbRFKW6lCyoOefe/hK+bTuwcMbtBUDPK9Oil29q6DNjRl2sSFpDnyIiIlJCUTs9xw3Ax8wsaWZLgaOBhyvcpmkzT89R6xV0HjUREREpqUqdnuP9ZrYdOAP4rZndCuCcWwdcAzwF3AJ8MSorPgHi7FlMUBsraDGBiIiIlFRFFhM4564Hrj/Afd8BvlPeFoWz5zxqRtJTUBMREZHSitrQZ6TNPD1HjZcnmy9SKO53rYOIiIjIy6agNgt7BzW/N00LCkRERKRUFNRmYeYlpJJBUNPwp4iIiJSKgtoszFz1WW15AK38FBERkZJRUJuFmedRqzINfYqIiEhpKajNQsISxC3GmOeRJOhRU1ATERGRElFQmwXPPOZUtzBYVUN9ZicA6Vyxwq0SERGRQ5WC2iy11rQyWN1A3ehGYEaP2raHYbS3gi0TERGRQ42C2iy1VrcymKiiZuRZwPmLCZyDX3wQ7vtepZsnIiIihxAFtVlqrWll0BzxzAjt7PYXE4wPQGYUdm+vdPNERETkEKKgNkut1a0MFiZxwDJvhz/0ObLFv3NsZ0XbJiIiIocWBbVZaq1pJecKjHrGsbbNH/pUUBMREZESUFCbpdaaVgAG5izhPbEH/R614SCopfqgqFWgIiIi8spQUJul1mo/qA2d8G66vGeZ3PwIbmSrf6crwMRABVsnIiIih5J4pRvwajPVoza4sIvJWAOf2fI18tuzJKZ2GNsJ9R0Va5+IiIgcOtSjNktTPWqDhQmSK28nl2gkUUzjLHgpNU9NREREXiEKarPUnGwm4SXoG+/D6zyeiY/8F1kX4/G5H/R3SCmoiYiIyCtDQW2WYl6MhQ0L2TLqLyA46pgTeE/jtfyk+nP+DmN9FWydiIiIHEoU1F6CxY2Lp4MawJL2Rp4dyEJtGwxvrmDLRERE5FCioPYSLGlcwtaxrRSK/nU+j2yvZ+vQBMVFZ8DG3/mXlBIRERF5mRTUXoLFjYvJFXP0jvsXYT+yvY5cwTF4xFtgrIc1995I/rdfo/DUjRVuqYiIiLyaKai9BIsbFwPsmafWXgfAg7YcgFPv+iTxR37K1mu/Qb7gnwA3lcmTyuQr0FoRERF5tapIUDOzD5vZOjMrmlnXjO1LzGzSzNYG/35SifYdzJKmJQCsG1wHwJFt9QBcuHqCK/Nv42Z7M3cXTmZ+YTtfuPR+Ht06zCXf/ybf/rtvcvF9msMmIiIi4VTqhLdPAh8Afrqf+zY6504pc3tmpbW6leUdy7lw7YUsa17G2YvOZsGcGp7eOcY3+Tzk4W3eat4SexzrWcMHfrybVVVXUh/L8MHuU/jcWUswM1h7FWRTcNofV/ophTMxBMlGiIV42+TS4MUgljj4vmE4B8X8K/d4IiIirwIV6VFzzq13zj1TiWO/EsyMn7z9JyxtXMqFay/EOce/f+JUABbMqQFgVfFYAL5/+gRnNo/SamMkyfL5zM/5zKWP8Hc3PgX3fhfuuADymenH/vmDWzj/uscB2Dwwzl1P+6f7+PWjOzjv4ocoFB19o2lufLynjM8YGO2FH5wMv/9huP0vfw/c8P+/csd/5D/ge8dDdvyF96VH4bGr977OaiYFj/8XFHKvXBumPHsbDG585R831LFvhX9eBg/8e6QXrWzJbGFt/9pKN+OlcQ5+/gF49BeVbsnLM7It0u+Rly0z9vK+P5+d3evzzM3w9E0v75ilVCzCwz+DcV3GcFZu/SY8emWlW/GiojhHbamZPWpmd5vZmyrdmAOpTdRy3gnn8ezws6zuW83JC5p54oJ38JNPvg6AYrIZN7+LhlUX8vPjHgBg8sh38f7Y/Yw89wA3PvgEDG6AbIqLLr90+nEvvX8zVz28jYFUhr+78SlWXrGaofEsVz28lXufG+ChzYP8481P86VfPsqWwf2Eln0UCgXY+uBeH0jF4n4+nAY3vvBDq+fRPcHonn+G7JgfFA5mcCNsfwSeuQmClbFTRtM5RiaycP8P4ZbzX/wo9KoDAAAaqElEQVRxnrqB8V9+ml+v2Q5P/QbGd8GGOwBYu22EL165hmzBwf0/gOu/AOuum3qCcN1KuO5/+B9cU3bvgP94O/Q/vfdxJodfeOzUrr2D35SxnXDVx+Daz4X7kL/vB3DPdw++30zP3ALX/4l//Oduh19/ETZ1wzWfgkd/7r8Ot56/dy0GN8Jl74ErPwJT156drf2F4JA2797MjtQOAJxzXDFwBX99z1/jpl6jNVfAXX/3wtfMOchN7v9BnYP07pfcJjKp/W+fep6FPNzyDf/nY6b+9bDxTj8Mz7S/X+yrLvX/SJjpyevgoYteerv7n4bbvvWCn53QMil45GL4wUnwmy/6z3FoM3T/A3OGgvBcLMLd/+QHj6nntO//If39Tev5X79+4uDf1/sYrLpkds9l38ecGIL7fwRPXAv/sNhfZX//j2b/Psml4Ycn+38oh1Es+D/3V38cVl+2/8+GAxnZ5v8cl9rW38NNfwV3ffvF99twJ6T6D3y/c/7nTf/6l9aOVZfCFefu1QExa8UibPn97H8GRnvgFx+C5+4It/+WB+CBf/N/H0VYyYY+zewOYO5+7vqmc+43B/i2XmCRc27QzF4H/NrMTnTOje7n8VcCKwE6Ozvp7u5+hVp+YKlUaq/jNBQbqPfqueB3F/AXc/+CmMXIFR2ewbzaIg8s+jNeO/y/qXv0CgBWHfEpTtj0AP878Qt+mn/39OPUb76FS37UT112F5sG3gXAr37+Lc4dvpE/jdfxyys2cfz2Z3mId/C9G1bx2C7/zfvvN/yeP1yaYCCd5cHBHt7d0kTrxCZGmk+iGEuyui9Pdt0NnO9dzqNH/zHNuRRPxk5g23NrqT7xvSxrq2bR1l/RMLaB1qE1PNn+R+w8/vPU5oY4+rmf0j7wEOlkBxuP+gzHr7+MYqwab/sqHrz1VxzRcxu7qhfRlNtFpnY+1yUm2DLYzol1x9C+6Vd8CiC9m9U3XsxY4zEAFJ3j2w+ksfwkt9nfEyukecidQrqmc/q1eLA3z7PDBT55XII3rPoGdRPbuOHJZfxR8gFiwNY7f8bla2p5ZGeBxwcKtKdzrNh5MVXA5G+/wdodRY5f/wOad69jPN5M4s7/y7o+R9Pu9SQzA8zveYSe67/Fs8d+0X/txzZw6pr/ycajPsOOBX8EQM1ED12rvsxA2+msP/4vwWy6fYu2XMORrgC9a3nmqm/Se8Q7AOjouxtncbxijkKsikRuFGcJjn7uJ5gr8kD6aJp2PwUYA+2n+++f0eeoTvexq+ON1I9toj61mZ1zz6Zr1deoH9/ChlQNSzdfSayYofD4NcSKWQD6Ot5Ew9hG3G/+ilVdcZwX5+hnf8y83gcoegnSP3svjy7/ewrxmul2e4UM83pvo7/jzeSqmojnUhzRcwvx/Bj9HW8hH6+ja9VX2Lro/Wxd/JG93vdWzDF3512MNh7LeP0S2vvvJx+vY7jFn6GQLWa5YMcF1MXqOH/e+fRld9Kf74c8/PzWKzhteAPHPfMjv+1PP8Su9jMZaD8DgKOf/Sntu+7n0eX/wHjNXGLjW+kc28BI84kcv/4HNIw9x7aF72fz0k9QlR2ho/8+ds49m3yi/kV/Vjv67uG4p3/A+uP/koG2NzB/x83EChNkku0c8+yP2XjUZ8gkWzlp3YXkVl3GE6/5FvWp50lmBkjkRjkCoH8d2/7j0xRiSQba3sDx679HurqDJ0/6Bm0DD1H0kpy47h9xZjzcGyNT3U4yPcBpD/8psWKWjc88wbaF7wc7+N/D1ZN9zBl+jL7OFRy//ru0DzzEk7vrpt8rAPHcGB3999Lf8ebp5+8V0nT0389A22l09N9Lw9hG5u68C6PIaNUc/mvzDbw7NUHT2l9Rk97Ja4FnJ3spxJIc/7T/y2nT0vPYMf8cTl3zNQba3kDr4COMNRzNYOvrmKhdyETtAloHV5Gubme8fsl0e3qyPUzm4NH7d3JB/DL6NzXy1PK/3ev51kz00jr4EON1i1i6+Zc0jj3HIztj5OO1NO1ez672M3FenOrJnRy56QoaR5/m0eX/RKa6jbZdD9K56WI2veZvqE7v4oSn/oV8vI6a9J6rv2Sv/hRVuVF6nryPZ4/9sz0vqHO07/o9dePPc0TPLSRyKYpenGeP+VP65p5Ne/99nDjWi7v/R6yZnD/9GTVTLD/Biev+kfG6RQy0ncHyqTv++8v0P/ifPHXC16Y/G3Iuxz1j91BwBd7R9I7px7Bintet/ip141t4+LR/Y7J2AQDJ9C5qJ7Yz3LJ8+vdKLD9B88gTDLWcivMSuPwEqV23cqQ1M9h+BsVYNVYs0DK0mtHGY2gY28jupuMoxP3FbMue+w8WAMU1v+DBqjdTN76NVP0SclXN0+1ZsO0Glm28mKE5p1D0Egy1vI6xhmXUpzYxd+ddVKf7MFekKrebfKyWtad8m1TDMgDqUltwFmOibkFQ2x14xRy5RAOJ3Jj/3nAFTn/wO1RndtFz8Sfo61zBZM18kpl+xhqP3ev19QoZGkefIVV/JHXjWxhtPI7mkSdYuO06UvVLWbTt1/TOPZtnj/kSzSOPEc9PUvTiDM85hWIsCa5A0+6nGW08BuclaNv1AEdtvISadD/ZLQ+zqusHZJMtM2pR4JhnL2SoZTnjdUvJVjVy0pN/TzPAwDM8ePN/7vW7aN/f95VkroJd42bWDfyVc27VS7l/SldXl1u16kV3eUV0d3ezYsWKvbbdvPlmvnbP11jesZzRzCjHtBzDc1s6WNxaTy75BE3OeOuaa7l9TgfDi9/AsvECXc/dRA/ttDLIGreM13gbKQAbk3G2MofXpB1Xt+YYiHm8c3yCtHmcMz7O7xPH8LBrpXayjZZ4kZaqHEsWLOSaoQcYSvZz7tg4huPUnEem6SSGRp7myWpocjlurK/jsyOjLM/4f+Vs9JbAnHoeyT5PVz5Oo2uhI72JpxMnsjS3gbgVebrpLI5KrSVTHOX6hgZaGrt477Y7KXhJYsU9fy09U5XgwjnNJIvGn+0qsIBBrq5voc2lOa32JKrTA9SmtlDEI1+EFDW0mp+9BzvOYLT1tczpe5BEepDnUklOsM2k4400FvyermFXzxxLka5ux0uPsKpwNKd4GzEcnjmS5Nh1xNto7/HbZhS5o/4PuW/U+Lp3M7VBwJlS8JJsPfpTYEb7jjupH9tIIV7L1mWfBKB15/00jKxj0oxrmpbSXHssy+va6dx+O8nJPsYbl2HFLHWp58nEa7izsZ22iT5en85g7F+2qpmq7AgO2HHkRyjEa1mw8WpihTQ7F76Htt7fEc+PM9hxBq39D+z1fblkC3Vjmyh6VXjFLFuO+QzFWDVL1/+EoY7T2d36WhY9cxm725Yz3NZF+9M/ZnVtByfl8+Q6z2SXtZPcfj/H5dcz3rCU4fbXM3frTRQK46xPVrMwl6Uu0Up1up+il2DH0g/T1ns3w3WLuCuf4PTJ9SxL7yIXb2DgiBXM2/rfAIy0nMJw5xtYn9nKLeP+z99n8y30u0l+m5gk5hxvy9fwnqFtpBqWkantpKXvAYwiPUs+QC5Rz6LnrsCATLKFy9oXsTG7nb8cHKadBJjHWOMxNA+tZWDum6kd20zt+DbSXh232RuZv2ABCTPqknE8b88r7ya2M9p3B8uyaaoScxhvWMacgYf3fg/EakjXdJLIDOOcI5n3348OMMBZDHN7/pp3GIabfv/MfP87izFRv5hCrJq60Y2YK5BqOobGkacoxGuZrD2CXFUz9bufZTw+h+eaz6KlsZ7mkXUkJ3Yy2HkWc7fdRDIzQDbZSiIzhOGC2s9huP10cslG2rffTv3YJj8kt3dRM/o8+SI0T24hH68jnh/H4THUeSbjDUu4KzbBmpF7WeZq+PP+rWSSbYxSR0u+H2cxstVtZKuaaRxZx+6Wk5kzsPoF79t8vJ5MTQd1Y5soxGrYftTHKMaqSBXTXL77dig6zt81SLLoUW9pdi48h+RkP3Vjm8hUt1M93kMiv/cQ5e6Wk6me6CWZ3kW6ppNU07G09t2Psxi4ImPNxzHUeSbP9FzLLdUen842cEI6RTyXwlyBwc4zaNn1CJlkG7XjW3F4QJGBeWdTk3qeseYTiedHae+9G4DR5hOYqF9M4/A6kuldbD36PNp33EUyvQtwxHMphjrPIJ4dIV81h9GWk6CYp2PHHdSlngcg1biMurHNPHnaP9Hecydzt91Ez+L3kantpH7gUa6ihw2e/574fLaJBVUd1O3egOdyVGWGcRijLSeTrW6lYXgdVelBPJfn2flv5Ylskde0HsMRO35H/dgGsslW+ha8kwd338e9sUk+OjrGafkkfQveSd3YJuYM7PldV4hVk022MNz+etp23kM+Xk/NRA+5qkaqMsMUvSp2zXsrE41LqR7vYd7WG8hWNVGV3T393sU5jCLZZCtjzccBMFk7n46eO/EKaXYs/TBNg2tpHlpLwUsy2nIiNeM9JCd34ixOIV5LPD/OSOvrSGSGqB/bQLpmLtWTO/f62eld9F7SdfOpSW2hdux5YoVJ6sY2T9+frWoinhvHc/ngvee/p/0/gHZNP+dcopGhjjNoGlpL9WQfo80nMFm3kM4dt5Ku6aR//jtYsPEqnMUYmvtGqtK7SNfMZbI4yfXpJzg6V+Cc8QmcV0WsMEn//LfTscPvgRvsfBOjLSfS1rSQxuJRL/h9/0oys9XOua6D7xmxoGZm7cCQc65gZkcC9wKvcc4NvdjjVDKoOee4dN2l/PfG/6Yp2cQTu54gGwSDluoWJnITpAtp6uO1LGxczDPDz1B0++82jzuIO0faMxoKRTKFBeQTPTQWi4zEvGAfR972jgOec5ySybCmuppYMU7B23MakDmFAsOxGCemM6yrTr7gmDX5BJPxg8/jqsslSMXzmO3//TJ/sobJqhRDsZjfzkI1MUuTieLg+iw0FwqMBM/pQOqKRdLmUThQSquAmHMULESDnMEBagr+e6sY4nFOm0yzK5Zgc5X/WrVNNLGQER6tDf+iVBUdi3J5NiRf/oIRc/5TO9zlxk4gUb/+RWv8Ui3M5Shi7EiUbk3avHSS3uqXMYRWJvWFIn+za4QfN3WyqWZ282LNOdwBfsbaJpoZqB15JZooQEvOYyxeIHeQz7TXpqv4H8f+S2SCWkVWfZrZ+4F/BdqB35rZWufcO4E3A39rZnmgAPzJwUJapZkZnzvpc3zuJP9an0PpIbKFLNlClraaNjKFDL3jvSxqWER9VT2Dk4PsGtuODW8hUzsfS9bTEDdixTy1iVay6Un6C4Nkx5LMn7uYqvgksUyeZyYGWFhTRcvEbp4f7yXvxdkxEac6v5tjW9robF5Kb1WStuoO7t70GMnsAO0dR3FcdT2rd49zXH0jDw324Xk55jZVs2vHZnLZIp1z38TOiW0UyLCwpYahoV3MaW5neDJHY02CgVSa4sQoyzpPZSA9yK7hzf6QmsVZ3FpHKpNnMJVlUf2RLKobY+P4AIlkDfNqF7K5ZzsjQ4+Tq52LS9QC0NlUTayQpXdkHDASqe0A5GvacfEaOqsLVDW2MpqapC6WJUOSuskdbE9XU0jOYU5NjDk2xni8hbqqGDfdt4rjjjt+uh5eZjfFZBON1UlOmruAtT1bmcj6H5yWT+Pi1cQn+rDcBIbDmUeuYTHxiT68/AQAzqsi17AAz/N4S1sbm3qfoTc1TK5hAc6resF7YEGsgQkzhvJ7j9BbIQvsmffj4tVYMUtidBvOPPK1nbh4ksToVgo1bRQTdSRS2ykkmykm6rH8OC7RsOcBXZFYZoRCdcv0YyZS27BCFhevJVd/BAC1iSRvbGnm3sFh8sMbSZBjbkszPbSTG+3Hy6XINSzEvDhH1C5iMN1HbnwHheQcvOwo8fQg2cZFxDKjnNnayvNeI9vGdvv3TQ6QbVwMFsPL7CY+OQBmHJHsZLL+CPoz/l++OzaM0HXScfRObMXtO0zp8lTt3gI4cnWduEQDVszSkR6jtnEZW/K7IbubYlXDXs+zGK+jUNNKbZXH3EIvu3aPU52IMZbOU5zxB2csXkNLx5n0pXtgvAfLZ8g1LNirCbH0ELHMCNnGJcypr2Y8kyebL06/h1yiFuftCYyWG2d+2xzSBWMglQnqmwEMF6vCcv58uBc816n3QTGPS9TSWFWkYbKHXamsX+eqehJj28lXt1JMNoEr4GVTFKsa8TIjFJNNJMa2Ya5AsaqBfE07OIeXG8OqG2mzUXa5Jpgcpphs3uu4VbFq5tctZjDdR2rseYpVTTy1fj0nHTmPYrIRzA/VXnaUWHqIXMNi/z0Xr50evrTcGHhVuFgSy02QmNg5XZP5SX+YaLKtFi9RR+/QKInRLeRrOyhWNWKFDFbIUKxqBFfE8mmIJUiMbiVf3UKxeg64PF5uwt8nkBjbjhWzVNceQUvLyfT13UfOIF/T8YLXNjY5SKGm1X8e6WH/9RrfCS5Prn7hXtMWpvaJp4fAjGzjIrB48BqM4WJJvFyKWNr/lZOrn4+L12D5SRKpHdO3p2qaGPPnguZrO2irW0RDrIb/E69l++71UMzu9V6wYo7E2DYKVU3T7cUVaRzvZ+C5HTQunUM+OVXfIomxbdQmW5nTdio9E1vJ5idJpLaBxad/zmdKpHqgmCPXsBDM8987VU1gRnyyHy8zFjznJWAelvPrbPlUUO/9/zE6VfNcbScuUYflJoDC9OeSFbNQyIJXheUn/derkNnzszvFFakafR5cMXgftxFLj+x5LV5w3LHpY8TSQ3s+82bWvXoOmEd8oh/LT5JrWPTCemdGcIk6/7MqO0b7EStIj/cy4Cax/ATEqnFeAsuNg8WwYpb4RD9z6juYGNxv0yqioj1qr5RK9qhJZakm0aS6RJPqEj2qSTSVui6z6VF7lQ9MiYiIiBy6FNREREREIkpBTURERCSiFNREREREIkpBTURERCSiFNREREREIkpBTURERCSiFNREREREIkpBTURERCSiFNREREREIuqQuISUme0CtpThUG3AQBmOI+GpJtGkukST6hI9qkk0lboui51z7WF2PCSCWrmY2aqw1+aS8lBNokl1iSbVJXpUk2iKUl009CkiIiISUQpqIiIiIhGloDY7F1W6AfICqkk0qS7RpLpEj2oSTZGpi+aoiYiIiESUetREREREIkpBLQQze5eZPWNmG8zs65Vuz+HEzC4xs34ze3LGthYzu93Mngv+nxNsNzP7UVCnx83s1Mq1/NBlZgvN7Hdmtt7M1pnZl4PtqksFmVm1mT1sZo8Fdfk/wfalZvZQUJf/NLOqYHsyuL0huH9JJdt/KDOzmJk9amY3BrdVkwozs+fN7AkzW2tmq4JtkfwMU1A7CDOLARcCfwicAHzczE6obKsOK5cB79pn29eBO51zRwN3BrfBr9HRwb+VwI/L1MbDTR74qnPueOB04IvBz4TqUlkZ4Gzn3GuBU4B3mdnpwD8C3w/qMgx8Ptj/88Cwc24Z8P1gPymNLwPrZ9xWTaLhrc65U2achiOSn2EKagd3GrDBObfJOZcFrgbOrXCbDhvOuXuAoX02nwtcHnx9OfC+GduvcL4HgWYzm1eelh4+nHO9zrk1wddj+L+A5qO6VFTw+qaCm4ngnwPOBq4Ntu9bl6l6XQu8zcysTM09bJjZAuDdwH8Etw3VJKoi+RmmoHZw84FtM25vD7ZJ5XQ653rBDw1AR7BdtSqzYGhmOfAQqkvFBUNsa4F+4HZgIzDinMsHu8x87afrEty/G2gtb4sPCz8AvgYUg9utqCZR4IDbzGy1ma0MtkXyMyxergO9iu3vrxktlY0m1aqMzKwe+BXwFefc6Iv84a+6lIlzrgCcYmbNwPXA8fvbLfhfdSkxM3sP0O+cW21mK6Y272dX1aT8znLO9ZhZB3C7mT39IvtWtC7qUTu47cDCGbcXAD0Vaov4+qa6nYP/+4PtqlWZmFkCP6Rd6Zy7LtisukSEc24E6MafQ9hsZlN/lM987afrEtzfxAunGcjLcxbwXjN7Hn/azNn4PWyqSYU553qC//vx/6g5jYh+himoHdwjwNHBKp0q4GPADRVu0+HuBuDTwdefBn4zY/unghU6pwO7p7qx5ZUTzJm5GFjvnPvejLtUlwoys/agJw0zqwHejj9/8HfAh4Ld9q3LVL0+BNzldGLNV5Rz7nzn3ALn3BL83x13Oec+gWpSUWZWZ2YNU18D7wCeJKKfYTrhbQhmdg7+X0Ex4BLn3Hcq3KTDhpldBawA2oA+4G+AXwPXAIuArcCHnXNDQYD4N/xVohPAZ51zqyrR7kOZmb0RuBd4gj3zbr6BP09NdakQMzsZfwJ0DP+P8Gucc39rZkfi9+a0AI8Cn3TOZcysGvg5/hzDIeBjzrlNlWn9oS8Y+vwr59x7VJPKCl7/64ObceCXzrnvmFkrEfwMU1ATERERiSgNfYqIiIhElIKaiIiISEQpqImIiIhElIKaiIiISEQpqImIiIhElIKaiMjLYGYrzOzGSrdDRA5NCmoiIiIiEaWgJiKHBTP7pJk9bGZrzeynwQXMU2b2L2a2xszuNLP2YN9TzOxBM3vczK43sznB9mVmdoeZPRZ8z1HBw9eb2bVm9rSZXWkvcuFTEZHZUFATkUOemR0PfBT/QsynAAXgE0AdsMY5dypwN/6VLwCuAP6nc+5k/CswTG2/ErjQOfda4Exg6jIyy4GvACcAR+Jf41FE5GWLH3wXEZFXvbcBrwMeCTq7avAvuFwE/jPY5xfAdWbWBDQ75+4Otl8O/FdwbcD5zrnrAZxzaYDg8R52zm0Pbq8FlgD3lf5picihTkFNRA4HBlzunDt/r41m39pnvxe7pt6LDWdmZnxdQJ+tIvIK0dCniBwO7gQ+ZGYdAGbWYmaL8T8DPxTs8/8B9znndgPDZvamYPt5wN3OuVFgu5m9L3iMpJnVlvVZiMhhR3/1icghzzn3lJn9L+A2M/OAHPBFYBw40cxWA7vx57EBfBr4SRDENgGfDbafB/zUzP42eIwPl/FpiMhhyJx7sZ5+EZFDl5mlnHP1lW6HiMiBaOhTREREJKLUoyYiIiISUepRExEREYkoBTURERGRiFJQExEREYkoBTURERGRiFJQExEREYkoBTURERGRiPp/tDaTO4VGZe4AAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmsAAAFNCAYAAABfUShSAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3X2cnXV95//X55yZyT25hQkmQLhJIIAKNALetEZEivUG1/UGb3a1P1tqV3ft1nZ/9k77Y7f76N22dVv6q3TLb61rxZuKm+1iLaJj11oVUBQRkQgiIXKThCRMSDIz53x+f1zXzJwMQzJXyHXmkLyej0cec851rutc35zvZPKe721kJpIkSepNjdkugCRJkp6aYU2SJKmHGdYkSZJ6mGFNkiSphxnWJEmSephhTZIkqYcZ1iQJiIj/HhH/aYbn/jAiLq27TJIEhjVJkqSeZliTJEnqYYY1Sc8YZffjr0bEtyNiT0T8VUQMRsRnI+LxiPh8RCztOP/VEXFnROyMiKGIWN/x2vkR8Y3yuo8Dc6fc65URcXt57Vci4jkzLOMrIuKbEbE7Ih6IiN+e8vqLyvfbWb7+9vL4vIj4LxFxf0TsiogvR8S8p/FxSTpKGNYkPdP8S+BlwDrgVcBngV8HVlD8TPt3ABGxDvgY8EvA8cCNwP+KiIGIGAA+A3wEWAZ8snxfymsvAK4DfgFYDnwI2BQRc2ZQvj3AvwaWAK8AfjEiXlO+78llef+0LNN5wO3ldX8I/ATwgrJM/wFoV/pkJB2VDGuSnmn+NDMfzswHgf8DfC0zv5mZ+4EbgPPL894I/O/MvCkzRynC0DyKMHQx0A/8SWaOZuangFs67vHzwIcy82uZ2crMDwP7y+sOKjOHMvOOzGxn5rcpAuOLy5ffAnw+Mz9W3nd7Zt4eEQ3g/wLek5kPlvf8Svl3knSMM6xJeqZ5uOPx3mmeLywfPwu4f/yFzGwDDwCrytcezMzsuPb+jsenAO8tuyp3RsRO4KTyuoOKiIsi4osR8WhE7ALeSdHqR/keP5jmshUU3bDTvSbpGGdYk3S02koRugCIiKAISw8CPwZWlcfGndzx+AHgdzJzScef+Zn5sRnc92+ATcBJmbkY+Atg/D4PAKdPc802YN9TvCbpGGdYk3S0+gTwioh4aUT0A++l6Mr8CvDPwBjw7yKiLyJeC1zYce1fAu8sW8kiIhaUEwcWzeC+i4AdmbkvIi4E3tzx2keBSyPiDeV9l0fEeWWr33XAH0XEsyKiGRHPn+EYOUlHOcOapKNSZt4NvJViMP82iskIr8rMkcwcAV4LvB14jGJ826c7rr2VYtzan5Wvby7PnYl/A1wdEY8D76cIjePv+yPgZyiC4w6KyQXPLV/+FeAOirFzO4Dfw5/RkoA4cMiGJEmSeom/tUmSJPUww5okSVIPM6xJkiT1MMOaJElSDzOsSZIk9bC+2S7AkbJixYpcs2ZN7ffZs2cPCxYsqP0+mjnrpDdZL73Jeuk91klvqrtebrvttm2ZefxMzj1qwtqaNWu49dZba7/P0NAQGzdurP0+mjnrpDdZL73Jeuk91klvqrteIuL+Q59VsBtUkiSphxnWJEmSephhTZIkqYcZ1iRJknqYYU2SJKmHGdYkSZJ6mGFNkiSphxnWJEmSephhTZIkqYcdNTsYzKY7t9/JXdvvOuDY9h/fz+6t32f5wgEaETz6+H7OO/USXnbxG9m69VY+/82P8PDufeydewIj/UtYPXYfCwf6eGj3XvqPG2TpyjW0N3+PNbGUuxoPsmRBHz/c00dr0Xn85OOP8I2Re9jXN5edC9eyfPedRHuMVnMOOxafQ5As23UnjdYIANnoY9eS83np3hZ377uT4dz/1H+ZCHYedxbrFwzzxM6Hmbv6OTwyMsATD21m3r5HnnT6CYvmMNZOduwp7rV84QBBsG14+nvsnTfISN8iFg//ADIP8xM/0O7du7nlxx87Iu81ncHFc9k30mbX3pFK1z0x70RG++az+PEf1FSyp9ZPkw19a7iz9SC7c1/X7w/11kuzEaxeOo+tO/cy2joy30czcajv72eCuv+9qJr5McCJ2+fwwYc+yUiOzXZxVFo4dxnrl/70bBdjgmHtCHjv0Ht5cPjB6V/cMfnwC9/5J1528Rv5w5v/PTe1dxYHp/v//9HiTzOTd+7cxZ8vXQLD5WvDn2Phzl1ct2Rxce0Tnz/w2r1fnL4cD/0jJ+zcxX9bsvjQf6FH/4lNj5aPdw4d/NztU57vmPasSdXyzszMrel9xz166FOmVWeZZiAe+buZ1Xdd6q6XH9f43k/lUN/fzwR114sqe8fYLv5q/yz+W9WTnDSchrWjye6R3Tw4/CA//+yf541nvnHi+L1/fBmPji3gL+e/g3kDfczND3P70kfYvvMhvje6k+fsH+CSkYt45e7reWJgGXfvW8qfDFzFmfu+xa/3/Q2fXLCCDy2fwz+edCns+hYvfvgVvKFxLe9aeQKbFi1iYXMeN9y3mfb842ns38XuS36XxTf9Mjsv/3P6d9zNgq9/kO2vv4Hsm8Oyj7+aV646gU0Liw1p//gn/jMnzjtx2r/Pks/+Irt3bmdZFmHyzjiD39r/Vj498NsMX/Tv2Xf6yyfO/YN/+B5jY8kjw/t4zrOWMHegwZe+v41GBC88YxlvuWjNAe897+7PsOC2P6c1/3haS05l98b/dETq4I47vsWzn/3cI/JeU/3Xz3+fH+7Yw449o7zlwpN52TkrZ3TdvDuvZ8G3rqM1bzljx6/n8Re9v5byTS95+z//G24YXAb7d/D751/NyQtO6uL9C3XVy/6xFr/wkdtYvqCf7XtG+eCV57F43sARv89UH/v6/RPf3y84fRlvvXhN7fesQ53/XlTNtv3bePctv8onl66mkXv46xf+BX3RP9vFEtDfN8DW+3rntzPD2tN0z2P3AHD+CeczuGBw4vi+0X080n4Wmx9fxYI5fZw150wyHuVL3/g4DzRhxRMnsHDNSxi846Ow91E+P/Y8vrf3WZxzylIGH/4I547uBo5nez5Ee2wxX9r1HH5/3n4ik0ebDTYsP5uV990Hjz8EJz6XladdDK0Wg/P6YfgBWHAig2e9uCjMsrWcPrKNu+YMMNAYYOPZL6ev8RRV/6wLWLXtegC+3DqHDY17Obe1g8FWi8GzXw6rN0ycunRZH1+/bwcPPb6Py1aezknL5vE/vnUHAO8849mcc/rJB7534wn4+p8WZX7um+D0DRwJjz4wzDlH6L2mOv2ehXzuvqIb88KzL+Kc01fM7MLWdvjGX8LwI7Dh547Y33Wm1t29jm8/+m36oo+XnvMKBpr1h5mp6qyXucft4Xs79rJswQDPP+f5REQt9+n07B0nTHx//8J039/PEHXWi6ppZ5sFt/82u0cf57TFp/HcM54/20VSh633Dc12ESY4weBpunvH3QCsW7pu4tie/WPMYx9jzXnsHW2xbXg/J55Q/CP8x/tvKM7ZdwrLT5v87fbuLH7wn//c89mbAyxvtQDYumcLTRbxOPMZGVjJyWPFmIZ1y86E488qLh48F+YeVzzetxsevhMGz5ks5OA5rBsp+j1OX3L6Uwe18lyAvTnA37cv5LjYy0ub3yQJOGH9AaeuWDjAgzv30monKxYOcNbK4yZeO2vlomne+9zpH/ews048btrHhzTl8++28e/HNYvXzEpQq9v499pZKxd1Jah13hPgrBOn+f6WKmpEg7VL1gIH/h8iTWVYe5q+/9j3WTJnCSfMP2Hi2Lbh/cxjP/MWTP5wX3/a85nfbvOl2AbA1r1nc8ZJq2BJEdIeX1IEr/WrlvFA/xqWt1sT1y4eWAbAvmVns25kFIAzl50JK59dnDB4LswtxzvseRS2fR9WHhiMziyvO+QPhDJY3J0n8f04FYDXNL9MLDsNBhYccOqKhXMmHy+aw7rBRTQCImDd4DT/mc1fBouedcB9et3Z5X/Kg8fNYdmCCqHnuFUwd0nxeGX3g+mZS88svi47s+v37ob1ZXBeXyVAP02d399nTvf9LR2G8X+jR+u/VR0ZhrWn4YOfeRP/cO//Zt3SdcRX/pSbrv8gr7nmn/jgTd9nAftYdNzkgNHBxQtZPdrHWAQLW212Nc7g5GXzi6DV6GPeiUWr1ZkrF7Fz0bqJljWA4+ctB2Bg1bmsLVvI1i1dNxl4Bs+B/vkQTdhyC7THnrJl7dBhrQgWd7VPZtWZFxT3ZWzacHVAWFs4h3kDTdasWMApy+azYM5TtN4NngPNAVh+xsHL0SPWLF/AQF/jgFaVGYkoPsuBRbC4+91l4/V8tP62vr5suZ22BbcmM/r+lio62v+t6sjwJ87T8Mkd32JRO3nzWW+GT/8Sqx6fw+1PnMFdDzzCH81NFi9eOnHuioUDXLricuY++nlWtE9h3UvW0WgEXHgVrH4eb1q1llNXLmfhnD6W/OTP841bl7Gw/1aGR4e5YPXJnDf/VBZftI5X5na2r1hZ/BZ29lLYdg+cdFERDuYeBw99p7jhstMmC3rKCzjv7Dfw+sULuWzNZQf/Sy0ahBf9Mu1d5/LWDedyy/63s7Z9H0s2/OyTTl2x6MCwBvDul5xBq32QpRQufiec+pPQfGYMou1rNvjVy85k7eDC6hc//12w835odP93onNXnMsb1r2By9dc3vV7d8OL1q7gyuedxEvXDx765CPokN/fUkWXnHwJN3/nZjYMOo5QT82wdphG26PsajZ5y+6dvPTkS2DPI5zaatOgzQKKda1WLOsIa4vm8Iuv/T1+ceobnf4SOP0lXARcdFrRgrbugo1wwUZW3PAqhkeHOfP4Z/H6F54NwEmvuobfHL928Sp41Z9MvtfcxfDYD4vHCya7Zemfx5xX/xkzno946Qd4y/jjn/vgU552fEfL2vFlcHvtBasP/t5nXFr8eQb5+Z867dAnTeesnzmyBalgoDnAbz3/t2bt/nVbNLef3/2Xz+n6fQ/5/S1VtGLeCt60/E3M758/20VRD7Mb9DDt2FtM6V3easP+x2FsH/NihJccP8z8KBbMPH7ZMsbHPnd2Gc7UsrnFWLXlc5fP7II5HV11C46vfL+qViwqxnANNBscN9fcL0lSHQxrh2n7vmI12OWtVjGov/SKwR3ML1vW+uctZOn8gcMOM8vLsWrjXw9pfJLBvKXQV/8MwGXzB4gouni7NSNPkqRjjWHtMG1/oghoy1stRrdNbid0Xv8WFjfL5cEHFrJi4cBhh5nxFrUZt6yNh7XOLtAa9TUbLJs/cMDYNUmSdGQZ1g7Ttj0PAUVY+/ANNwIwkk1W7LmH9SuaxUn98zlp6XxWLz28sQirFq5ioDHAinkzXIh1PKwt7E5YA1i9dB6rl87r2v0kSTrWONDoMG1/otjUfHmrzbL990ATbm2fyYU7v8d7XvwW+F/AwAL+82vPYuwwZ4+94cw38IJVL2Bu39yZXTA+Zq0L49XG/dmbL2BOv5lfkqS6+L/sYdr+xKPMa7eZn8n6uJ82Db7SPoe+3Q+wvF3ubj6wgMHj5rJqyeG1PM3vn19t7Z3xXQwWdm85g5OWzeeERTMMk5IkqTLD2mHavm87K8qFa9c3HmB34zjuHzi9ePGBW4qvU1b8r91EN2j3WtYkSVK9DGuHace+HcWyHaUfjy3i0QVlK9iWrxdfu71uzkQ3aPfGrEmSpHoZ1g7Ttn2PHbAl1LZcDItOLPaD3HFvcXDWWtYMa5IkHS0Ma4dp+8gulrda7M557Mk53Bbn8vzTj5/YW5PmnO5vqXTic+D4s2Bl91d2lyRJ9XA26GEYbY+yc3SY5a02V42+lxdf9lp+aWM5Xu2z58L9X4aBWdg6ZOkaeNfXun9fSZJUG1vWDsNj+x4DijXW9mc//c2OBW8Hzym+DhzGxt+SJElTGNYOw/a9xdIcK1otRuhnTl/HxzjeDeqmvJIk6QgwrB2GbXu3AUXL2j76GegMa8efBdHo/uQCSZJ0VDKsHYbJTdzb7Kef/mbHxzgwH5adbliTJElHhBMMDsN4N+jyVouRnNKyBvCKP4TmwCyUTJIkHW0Ma4dh+77tzIs+5mc+uWUN4LSNs1EsSZJ0FKq1GzQiLo+IuyNic0S8b5rX3xkRd0TE7RHx5Yg4u+O1XyuvuzsifrrOcla1fe92ljeL/T73Tx2zJkmSdATVljIioglcA7wcOBt4U2cYK/1NZj47M88Dfh/4o/Las4ErgXOAy4E/L9+vJ2zft53lzTkAxWzQqS1rkiRJR0idKeNCYHNm3puZI8D1wBWdJ2Tm7o6nC4AsH18BXJ+Z+zPzPmBz+X49Yfve7SyPAdrRR5sG/basSZKkmtQ5Zm0V8EDH8y3ARVNPioh3Ab8MDACXdFz71SnXrprm2quAqwAGBwcZGho6EuU+qMcff5wHdz/ImaPzGYvi47vjW99kzw97puHvmDM8PNyVulc11ktvsl56j3XSm3qpXuoMazHNsXzSgcxrgGsi4s3AbwJvq3DttcC1ABs2bMiNGzc+nfLOyA2fv4G9O/Zy3nGnQN8PAXj+hc9j/YnH1X5vTW9oaIhu1L2qsV56k/XSe6yT3tRL9VJn/90W4KSO56uBrQc5/3rgNYd5bdc8OPIgAOtiDq1GsTzHk2aDSpIkHSF1poxbgLURcWpEDFBMGNjUeUJErO14+grgnvLxJuDKiJgTEacCa4Gv11jWGds6WmTGtdk3EdbmOGZNkiTVpLZu0Mwci4h3A58DmsB1mXlnRFwN3JqZm4B3R8SlwCjwGEUXKOV5nwC+C4wB78rMVl1lreLBkQdZtXAVi8bG2NkoZoTasiZJkupS66K4mXkjcOOUY+/vePyeg1z7O8Dv1Fe6w7N1dCtnrTwLfvwwY2XLmuusSZKkupgyKtrd2s2JC06EsX2MRT9gWJMkSfUxZVSUJM1owth+xmJ8gsF0k1clSZKePsNaRZlJREBrPyNlWBtwzJokSaqJKaOiNm0aNGBsP6PRT38zivAmSZJUA8NaRUnSiAaM7WOUflvVJElSrUwaFWWOh7X9jNDv5AJJklQrk0ZFbdpFWGuNMkqfa6xJkqRamTQqSsoJBu0xRrNpy5okSaqVSaOiiaU72mOMZsOwJkmSamXSqKCdbYBy6Y5RRrLpBANJklQrk0YF42GtQQPaY4zZsiZJkmpm0qggMwFoRgPao+zPhhMMJElSrUwaFbSyBcD4Erh2g0qSpLqZNCqY6AYtGtgYsRtUkiTVzKRRQVKktEb5daTdtBtUkiTVyqRRweQEgyKs7W83mGPLmiRJqpFJo4IndYO2G/Q33cRdkiTVx7BWwcQ6a+XXEWeDSpKkmpk0KhgPa83y+f5s0mfLmiRJqpFhrYLJlrWiH3S03aDZMKxJkqT6GNYqmByzVnzdnw36Gn6EkiSpPiaNCsaX7hjvBh1tN+izZU2SJNXIsFbBxA4GEy1rTZqOWZMkSTUyrFUwdemO/basSZKkmhnWKhjfyL3RsXRH0zFrkiSpRiaNCsa7QcfD2hhN+m1ZkyRJNTKsVTDZslYu3eGYNUmSVDPDWgWTY9aKsNai6Zg1SZJUK8NaBVO7QUdpOmZNkiTVyqRRwfg6a+NLd9iyJkmS6mZYq2Bib9COljX3BpUkSXUyrFUwMWatPTkb1JY1SZJUJ8NaBZMbuU+GNcesSZKkOpk0Kpi6kftY2rImSZLqZVir4ElhjT6ahjVJklQjw1oFTx6z1qDfCQaSJKlGtYa1iLg8Iu6OiM0R8b5pXv/liPhuRHw7Im6OiFM6XmtFxO3ln011lnOmxpfuaJTrrTlmTZIk1a2vrjeOiCZwDfAyYAtwS0Rsyszvdpz2TWBDZj4REb8I/D7wxvK1vZl5Xl3lOxxTu0FH6XPMmiRJqlWdzUIXApsz897MHAGuB67oPCEzv5iZT5RPvwqsrrE8T9vEDgbt8UVxG45ZkyRJtaqtZQ1YBTzQ8XwLcNFBzn8H8NmO53Mj4lZgDPjdzPzM1Asi4irgKoDBwUGGhoaebpkP6q69dwHw8NYtQNEN+p07vk17a7PW++rghoeHa697VWe99CbrpfdYJ72pl+qlzrA2XZNTTntixFuBDcCLOw6fnJlbI+I04AsRcUdm/uCAN8u8FrgWYMOGDblx48YjUvCn0tzShJth1cpB8kdNINhwwflceOqyWu+rgxsaGqLuuld11ktvsl56j3XSm3qpXursBt0CnNTxfDWwdepJEXEp8BvAqzNz//jxzNxafr0XGALOr7GsMzIxZq01RrtR5Fy7QSVJUp3qDGu3AGsj4tSIGACuBA6Y1RkR5wMfoghqj3QcXxoRc8rHK4AXAp0TE2ZF5w4GGUVYc4KBJEmqU23doJk5FhHvBj4HNIHrMvPOiLgauDUzNwF/ACwEPhkRAD/KzFcD64EPRUSbIlD+7pRZpLOizfhG7q2JsGbLmiRJqlOdY9bIzBuBG6cce3/H40uf4rqvAM+us2yHY6JlrdWa6Abtb7rOmiRJqo9Jo4LJddZatG1ZkyRJXWBYqyCz3MGg3aIdxXIdjlmTJEl1MqxVMLko7pgta5IkqSsMaxVMbuTe0bLmRu6SJKlGhrUKxjdyj44xa31u5C5Jkmpk0qig1S66QZvtFi3XWZMkSV1gWKtgvGWt0Rqb6AZt2g0qSZJqZFirYHIHgzFb1iRJUlcY1ioYD2vNdpsWZcuaYU2SJNXIsFbB5A4GoxPdoP1OMJAkSTUyaVQwuXTHGC36iICGLWuSJKlGhrUKJiYYtFu0oul4NUmSVDvDWgXjS3c0WmOM0XS8miRJqp1hrYLJlrVRxuhzQVxJklQ700YFExMM2mO0aLjVlCRJqp1hrYLxjdybrVbZsmZYkyRJ9TKsVZA53g3qmDVJktQdhrUKJrtBRxmj6Zg1SZJUO9NGBRM7GJSzQR2zJkmS6mZYq6DN5AQDu0ElSVI3GNYqaGebIKA9ylg2nGAgSZJqZ1iroJ1tGjSgNcoofTQdsyZJkmpm2qhgomWNtGVNkiR1hWGtgswkoghoI04wkCRJXWBYq6CVraIbFBhLN3KXJEn1M6xVUHSDFkaz4WxQSZJUO8NaBUnSKOPaqIviSpKkLjBtVNBqt4jyIxtpu86aJEmqn2GtgqJlrTBKg34nGEiSpJoZ1ipoZ3tyNmjasiZJkupnWKtgcp21YoKBY9YkSVLdTBsVFDsYFGFtf9vZoJIkqX6GtQqKsFYYcZ01SZLUBYa1CpKc7Aa1ZU2SJHWBYa2CYgeDwoiL4kqSpC6oNaxFxOURcXdEbI6I903z+i9HxHcj4tsRcXNEnNLx2tsi4p7yz9vqLOdMdU4wGMkmDcOaJEmqWW1hLSKawDXAy4GzgTdFxNlTTvsmsCEznwN8Cvj98tplwAeAi4ALgQ9ExNK6yjpTmZPrrI3RxKwmSZLqVmfL2oXA5sy8NzNHgOuBKzpPyMwvZuYT5dOvAqvLxz8N3JSZOzLzMeAm4PIayzojRTdox2zQMK1JkqR61RnWVgEPdDzfUh57Ku8APnuY13ZFZtLI4vEoDbtBJUlS7fpmclJE/AvgC5m5q3y+BNiYmZ852GXTHMuneP+3AhuAF1e5NiKuAq4CGBwcZGho6CDFefoeefQRyKIY+0Zh65YtDA09Wus9dWjDw8O1172qs156k/XSe6yT3tRL9TKjsAZ8IDNvGH+SmTsj4gPAwcLaFuCkjuerga1TT4qIS4HfAF6cmfs7rt045dqhqddm5rXAtQAbNmzIjRs3Tj3liPrbm/+W3Q/+CIBWY4BTTjmZjRvX13pPHdrQ0BB1172qs156k/XSe6yT3tRL9TLTbtDpzjtU0LsFWBsRp0bEAHAlsKnzhIg4H/gQ8OrMfKTjpc8Bl0XE0nJiwWXlsVnVZnIHg5Fs4JA1SZJUt5m2rN0aEX9EMbszgX8L3HawCzJzLCLeTRGymsB1mXlnRFwN3JqZm4A/ABYCnyw3SP9RZr46M3dExH+kCHwAV2fmjqp/uSOtWLqjMOIEA0mS1AUzDWv/Fvgt4OPl838AfvNQF2XmjcCNU469v+PxpQe59jrguhmWryumbjfVMKxJkqSazSisZeYe4EmL2h5r2tmemA06ks4GlSRJ9ZvRmLWIuKmcATr+fGlEzPoYsm4rFsUt0toYTbtBJUlS7WY6wWBFZu4cf1IuVHtCPUXqXa1sTYxZG6PPHQwkSVLtZhrW2hFx8viTiFjDU6yZdjRrZ5tm+XjMRXElSVIXzHSCwW8AX46IL5XPf4pyMdpjSZJTWtYMa5IkqV4znWDw9xGxgSKg3Q78T2BvnQXrRa1sTUwwGKNBs87NuiRJkpj5dlM/B7yHYieB24GLgX8GLqmvaL0nM2mSZDRIGrasSZKk2s20beg9wPOA+zPzJcD5wDG3KWY720QCjX4Aw5okSardTMPavszcBxARczLze8CZ9RWrNxWL4iY0imkGTScYSJKkms10gsGWcp21zwA3RcRjTLMp+9FufDZoTrSszW55JEnS0W+mEwz+RfnwtyPii8Bi4O9rK1WPatMmMqFRfGwu3SFJkuo205a1CZn5pUOfdXQa38Egx8OaY9YkSVLNXHyigla2aCYTYc3tpiRJUt0MaxVM7A0aRVgzq0mSpLoZ1ipoZ5tGTnaDOhtUkiTVzbBWQbGRe07MBjWsSZKkuhnWKshMmplkuc5a2A8qSZJqZliroE37gNmgTjCQJEl1M6xV0G4XY9ba4aK4kiSpOwxrFbRpF92gUXSDuiiuJEmqm2GtgvG9QV0UV5IkdYthrYJi6Y427YmlO2a5QJIk6ahn3KhgYp21sGVNkiR1h2GtgvEdDNqGNUmS1CWGtQpuuOIGfm64TbucYOCiuJIkqW6GtQoGFwyypDU20bJmw5okSaqbYa2i6JxgYFqTJEk1M6xVFDnmOmuSJKlrDGsVRbZoOcFAkiR1iWGtosjWxNIdTjCQJEl1M6xVFFls5w7uDSpJkupnWKsosk07xsOaaU2SJNXLsFZRZIs25QQDw5okSaqZYa2yyW5Qx6xJkqS6GdYqOrAbdJYLI0mSjnqGtSrabYKc2G7KddYkSVLdag1rEXF5RNwdEZsj4n3TvP5TEfGNiBiLiNdNea0VEbeXfzbVWc4ZyxbAZDeoY9YkSVK5HwfAAAAPwklEQVTN+up644hoAtcALwO2ALdExKbM/G7HaT8C3g78yjRvsTczz6urfIelXYS1lhMMJElSl9QW1oALgc2ZeS9ARFwPXAFMhLXM/GH5WrvGchw54y1r42PW7ESWJEk1qzNurAIe6Hi+pTw2U3Mj4taI+GpEvObIFu0wtceKL+k6a5IkqTvqbFmbLslkhetPzsytEXEa8IWIuCMzf3DADSKuAq4CGBwcZGho6LALOxN9o4/zIuChR7cD8LWv/jNL59q8NtuGh4drr3tVZ730Juul91gnvamX6qXOsLYFOKnj+Wpg60wvzsyt5dd7I2IIOB/4wZRzrgWuBdiwYUNu3Ljx6ZX4UPZsg3+CZSecCA/CC1/4Ak5YNLfee+qQhoaGqL3uVZn10pusl95jnfSmXqqXOpuFbgHWRsSpETEAXAnMaFZnRCyNiDnl4xXAC+kY6zZrxrtBy0ZDZ4NKkqS61RbWMnMMeDfwOeAu4BOZeWdEXB0RrwaIiOdFxBbg9cCHIuLO8vL1wK0R8S3gi8DvTplFOjvaU5bucJ01SZJUszq7QcnMG4Ebpxx7f8fjWyi6R6de9xXg2XWW7bCUs0HHyqU7wpY1SZJUM0fHV1F2g6Yta5IkqUsMa1W0i+XgWrg3qCRJ6g7DWhUT3aCusyZJkrrDsFbFxGxQu0ElSVJ3GNaqGN8bNN0bVJIkdYdhrYoc38i9CGk2rEmSpLoZ1qoYb1mLJhEu3SFJkupnWKtiPKzRcPcCSZLUFYa1KsZng2bD8WqSJKkrDGtVdLSsNfzkJElSFxg5qiiX7mil3aCSJKk7DGtVdCyKazeoJEnqBsNaFeV2U22aNFy3Q5IkdYFhrYqyG3QswzXWJElSVxjWqsiOpTtMa5IkqQsMa1W0J8esuSCuJEnqBsNaFePdoO1wNqgkSeoKw1oVWUwwGKNpN6gkSeoKw1oV7ckdDGxYkyRJ3WBYq2J8UVzCljVJktQVhrUq3BtUkiR1mWGtinbnDgazXBZJknRMMKxV0bZlTZIkdZdhrYqyG3S07Zg1SZLUHYa1KtqTOxjYsiZJkrrBsFbFeMtaNmj4yUmSpC7om+0CPKNMLN3RoGnDmiRJ6gLDWhXtYgeD0Xa4KK4kSeoKO/OqKLtBW9lwgoEkSeoKw1oV7TGSYCzDddYkSVJXGNaqaLfIaNDKdDaoJEnqCsNaFdkio0lm2g0qSZK6wrBWRbsFNGi1bVmTJEndYVirouwGbSc0bFmTJEldYFirouwGbWc6wUCSJHWFYa2K9ljZspY07QaVJEldUGtYi4jLI+LuiNgcEe+b5vWfiohvRMRYRLxuymtvi4h7yj9vq7OcMzY+G7QNYViTJEldUFtYi4gmcA3wcuBs4E0RcfaU034EvB34mynXLgM+AFwEXAh8ICKW1lXWGctigkG7nTRtk5QkSV1QZ+S4ENicmfdm5ghwPXBF5wmZ+cPM/DbQnnLtTwM3ZeaOzHwMuAm4vMayzkx7csyaS3dIkqRuqDOsrQIe6Hi+pTxW97X16VgU125QSZLUDXVu5D5dmskjeW1EXAVcBTA4OMjQ0NCMC3c41j/8YxYk7NnzBNsf3Vf7/TQzw8PD1kUPsl56k/XSe6yT3tRL9VJnWNsCnNTxfDWwtcK1G6dcOzT1pMy8FrgWYMOGDblx48appxxZj/x/7Bm+jznNeawcXMLGjefXez/NyNDQELXXvSqzXnqT9dJ7rJPe1Ev1Umc36C3A2og4NSIGgCuBTTO89nPAZRGxtJxYcFl5bHa1xybXWXPMmiRJ6oLawlpmjgHvpghZdwGfyMw7I+LqiHg1QEQ8LyK2AK8HPhQRd5bX7gD+I0XguwW4ujw2u7LN+GxQt5uSJEndUGc3KJl5I3DjlGPv73h8C0UX53TXXgdcV2f5KuuYYOCiuJIkqRtcLayKcgeDVhsafnKSJKkLjBxVZNGyNtpqM+CquJIkqQtMHFWUi+KOjLWZ09+c7dJIkqRjgGGtinaLpMH+sRZz+vzoJElS/UwcVWSLdjRoJ3aDSpKkrjBxVNFu0abo/pzT70cnSZLqZ+Kooj1Gu/zI5vQ5Zk2SJNXPsFZF2Q0KOGZNkiR1hYmjinabVhYf2YBhTZIkdYGJo4r2GC27QSVJUhcZ1qrIVseYNT86SZJUv1r3Bj3aPLzrCbaUW4LaDSpJkrrBsFbB2NgYu21ZkyRJXWTiqKAvWoy0y7DmdlOSJKkLDGsV9JHsb9uyJkmSusfEUUGTNmO4dIckSeoeE0cFzWh1LN3hRydJkupn4qigSdvtpiRJUlc5G7SC61f+Kp++r/jI3MhdkiR1g4mjgntWXMp38jQABpp+dJIkqX4mjgrmD0x2fTpmTZIkdYOJo4IFZVgb6GsQEbNcGkmSdCwwrFUwb6Acr2armiRJ6hJTRwUL5hQta4Y1SZLULaaOCuZPtKy5bIckSeoOw1oF4xMMbFmTJEndYuqoYH7HBANJkqRuMHVUMN8JBpIkqctMHRVMdoM6Zk2SJHWHYa2CBXPcakqSJHWXqaMCJxhIkqRuM3VU4AQDSZLUbaaOClxnTZIkdZthrYJmI+hv2A0qSZK6x9RR0fJ5wcrFc2e7GJIk6RjRN9sFeKZ5/8XzuHTjGbNdDEmSdIyotWUtIi6PiLsjYnNEvG+a1+dExMfL178WEWvK42siYm9E3F7++Ys6y1nF/P5wgoEkSeqa2lrWIqIJXAO8DNgC3BIRmzLzux2nvQN4LDPPiIgrgd8D3li+9oPMPK+u8kmSJD0T1NlEdCGwOTPvzcwR4HrgiinnXAF8uHz8KeClERE1lkmSJOkZpc6wtgp4oOP5lvLYtOdk5hiwC1hevnZqRHwzIr4UET9ZYzklSZJ6Vp0TDKZrIcsZnvNj4OTM3B4RPwF8JiLOyczdB1wccRVwFcDg4CBDQ0NPv9SHMDw83JX7aOask95kvfQm66X3WCe9qZfqpc6wtgU4qeP5amDrU5yzJSL6gMXAjsxMYD9AZt4WET8A1gG3dl6cmdcC1wJs2LAhN27cWMNf40BDQ0N04z6aOeukN1kvvcl66T3WSW/qpXqpsxv0FmBtRJwaEQPAlcCmKedsAt5WPn4d8IXMzIg4vpygQEScBqwF7q2xrJIkST2ptpa1zByLiHcDnwOawHWZeWdEXA3cmpmbgL8CPhIRm4EdFIEO4KeAqyNiDGgB78zMHXWVVZIkqVfVuihuZt4I3Djl2Ps7Hu8DXj/NdX8L/G2dZZMkSXomcHVXSZKkHmZYkyRJ6mFRTLx85ouIR4H7u3CrFcC2LtxHM2ed9CbrpTdZL73HOulNddfLKZl5/ExOPGrCWrdExK2ZuWG2y6FJ1klvsl56k/XSe6yT3tRL9WI3qCRJUg8zrEmSJPUww1p11852AfQk1klvsl56k/XSe6yT3tQz9eKYNUmSpB5my5okSVIPM6zNUERcHhF3R8TmiHjfbJfnWBIR10XEIxHxnY5jyyLipoi4p/y6tDweEfFfy3r6dkRcMHslP3pFxEkR8cWIuCsi7oyI95THrZdZFBFzI+LrEfGtsl7+n/L4qRHxtbJePl7u10xEzCmfby5fXzOb5T/aRUQzIr4ZEX9XPrdeZlFE/DAi7oiI2yPi1vJYT/4MM6zNQLmp/DXAy4GzgTdFxNmzW6pjyn8HLp9y7H3AzZm5Fri5fA5FHa0t/1wF/L9dKuOxZgx4b2auBy4G3lX+m7BeZtd+4JLMfC5wHnB5RFwM/B7wx2W9PAa8ozz/HcBjmXkG8MflearPe4C7Op5bL7PvJZl5XscSHT35M8ywNjMXApsz897MHAGuB66Y5TIdMzLzH4EdUw5fAXy4fPxh4DUdx/86C18FlkTEid0p6bEjM3+cmd8oHz9O8R/QKqyXWVV+vsPl0/7yTwKXAJ8qj0+tl/H6+hTw0oiILhX3mBIRq4FXAP+tfB5YL72oJ3+GGdZmZhXwQMfzLeUxzZ7BzPwxFMEBOKE8bl11WdlFcz7wNayXWVd2td0OPALcBPwA2JmZY+UpnZ/9RL2Ur+8Clne3xMeMPwH+A9Auny/HepltCfxDRNwWEVeVx3ryZ1hft270DDfdbzROo+1N1lUXRcRC4G+BX8rM3Qf55d966ZLMbAHnRcQS4AZg/XSnlV+tly6IiFcCj2TmbRGxcfzwNKdaL931wszcGhEnADdFxPcOcu6s1oktazOzBTip4/lqYOsslUWFh8eboMuvj5THrasuiYh+iqD20cz8dHnYeukRmbkTGKIYU7gkIsZ/Oe/87CfqpXx9MU8ecqCn74XAqyPihxTDaC6haGmzXmZRZm4tvz5C8YvNhfTozzDD2szcAqwtZ+4MAFcCm2a5TMe6TcDbysdvA/5nx/F/Xc7cuRjYNd6krSOnHD/zV8BdmflHHS9ZL7MoIo4vW9SIiHnApRTjCb8IvK48bWq9jNfX64AvpItvHnGZ+WuZuToz11D8//GFzHwL1susiYgFEbFo/DFwGfAdevRnmIvizlBE/AzFb0JN4LrM/J1ZLtIxIyI+BmwEVgAPAx8APgN8AjgZ+BHw+szcUYaIP6OYPfoE8LOZeetslPtoFhEvAv4PcAeTY3B+nWLcmvUySyLiORSDopsUv4x/IjOvjojTKFp0lgHfBN6amfsjYi7wEYoxhzuAKzPz3tkp/bGh7Ab9lcx8pfUye8rP/obyaR/wN5n5OxGxnB78GWZYkyRJ6mF2g0qSJPUww5okSVIPM6xJkiT1MMOaJElSDzOsSZIk9TDDmiQ9TRGxMSL+brbLIenoZFiTJEnqYYY1SceMiHhrRHw9Im6PiA+Vm54PR8R/iYhvRMTNEXF8ee55EfHViPh2RNwQEUvL42dExOcj4lvlNaeXb78wIj4VEd+LiI/GQTZKlaQqDGuSjgkRsR54I8XmzecBLeAtwALgG5l5AfAlih0yAP4a+L8z8zkUOzWMH/8ocE1mPhd4ATC+5cz5wC8BZwOnUewHKUlPW9+hT5Gko8JLgZ8AbikbveZRbNLcBj5envM/gE9HxGJgSWZ+qTz+YeCT5V6CqzLzBoDM3AdQvt/XM3NL+fx2YA3w5fr/WpKOdoY1SceKAD6cmb92wMGI35py3sH24DtY1+b+jsct/Pkq6QixG1TSseJm4HURcQJARCyLiFMofg6+rjznzcCXM3MX8FhE/GR5/F8BX8rM3cCWiHhN+R5zImJ+V/8Wko45/uYn6ZiQmd+NiN8E/iEiGsAo8C5gD3BORNwG7KIY1wbwNuAvyjB2L/Cz5fF/BXwoIq4u3+P1XfxrSDoGRebBWvwl6egWEcOZuXC2yyFJT8VuUEmSpB5my5okSVIPs2VNkiSphxnWJEmSephhTZIkqYcZ1iRJknqYYU2SJKmHGdYkSZJ62P8PRX1dF51hMOoAAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "for key in hist_list[0].keys() :\n", " plt.figure(figsize=(10,5))\n", " plt.grid(True)\n", " plt.title('model ' + key)\n", " plt.ylabel(key)\n", " plt.xlabel('epoch')\n", " for hist in hist_list :\n", " plt.plot(hist[key])\n", " plt.show()" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Train on 160 samples, validate on 18 samples\n", "Epoch 1/500\n", "160/160 [==============================] - 1s 4ms/step - loss: -14.9827 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 2/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1985 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 3/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 4/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2638 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 5/500\n", "160/160 [==============================] - 0s 63us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 6/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 7/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 8/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 9/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 10/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2697 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 11/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 12/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 13/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 14/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 15/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 16/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 17/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 18/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 19/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 20/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 21/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 22/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 23/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 24/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 25/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3333 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 26/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2761 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 27/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 28/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 29/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1194 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 30/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 31/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3218 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 32/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 33/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 34/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 35/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 36/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 37/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2830 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 38/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2419 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 39/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3333 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 40/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2934 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 41/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 42/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3296 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 43/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2962 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 44/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 45/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 46/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2020 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 47/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 48/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 49/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 50/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 51/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 52/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 53/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 54/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 55/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 56/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 57/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 58/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2955 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 59/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 60/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 61/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 62/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 63/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 64/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 65/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 66/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 67/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 68/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 69/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 70/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 71/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 72/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 73/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 74/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 75/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 76/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 77/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 78/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 79/500\n", "160/160 [==============================] - 0s 100us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 80/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 81/500\n", "160/160 [==============================] - 0s 69us/step - loss: -15.3242 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 82/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 83/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 84/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 85/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 86/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 87/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 88/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2479 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 89/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 90/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2491 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 91/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 92/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 93/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 94/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 95/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 96/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 97/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 98/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 99/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 100/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 101/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 102/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 103/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 104/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 105/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 106/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 107/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 108/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 109/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 110/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 111/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 112/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 113/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 114/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3247 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 115/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 116/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 117/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 118/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 119/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 120/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 121/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 122/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 123/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 124/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 125/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 126/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 127/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 128/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2608 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 129/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 130/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 131/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 132/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 133/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 134/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 135/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 136/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 137/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 138/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 139/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 140/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 141/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 142/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 143/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 144/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 145/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 146/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 147/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 148/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 149/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3408 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 150/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 151/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 152/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 153/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 154/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 155/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 156/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 157/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 158/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 159/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 160/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 161/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 162/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 163/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 164/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 165/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 166/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 167/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 168/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 169/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 170/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 171/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 172/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 173/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 174/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 175/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 176/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 177/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 178/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 179/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 180/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 181/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 182/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 183/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 184/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 185/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 186/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 187/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2134 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 188/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 189/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 190/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3005 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 191/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 192/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 193/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 194/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 195/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 196/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2867 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 197/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 198/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 199/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 200/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 201/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 202/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 203/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 204/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 205/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 206/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 207/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 208/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 209/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 210/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 211/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 212/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 213/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 214/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 215/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 216/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 217/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 218/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 219/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 220/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 221/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 222/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 223/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 224/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 225/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 226/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 227/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 228/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 229/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 230/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 231/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 232/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 233/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 234/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 235/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 236/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 237/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 238/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 239/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 240/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 241/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 242/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 243/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 244/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 245/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 246/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2427 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 247/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 248/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 249/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 250/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 251/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 252/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 253/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 254/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 255/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 256/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 257/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 258/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 259/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 260/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 261/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 262/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 263/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 264/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 265/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 266/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 267/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 268/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 269/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 270/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 271/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 272/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 273/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 274/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 275/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 276/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 277/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 278/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 279/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 280/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 281/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 282/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3296 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 283/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 284/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 285/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 286/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 287/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 288/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 289/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 290/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 291/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 292/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 293/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 294/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 295/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 296/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 297/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 298/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 299/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 300/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 301/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 302/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2434 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 303/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 304/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 305/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 306/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 307/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 308/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 309/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 310/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 311/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 312/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 313/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 314/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 315/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 316/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 317/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 318/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 319/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 320/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 321/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 322/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 323/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 324/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 325/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 326/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 327/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 328/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 329/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 330/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 331/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 332/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 333/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2437 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 334/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 335/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 336/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 337/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 338/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 339/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 340/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 341/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 342/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 343/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 344/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 345/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 346/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 347/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 348/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 349/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 350/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 351/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 352/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1466 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 353/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 354/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 355/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 356/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 357/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 358/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 359/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 360/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 361/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 362/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 363/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 364/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 365/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 366/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 367/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 368/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 369/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 370/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 371/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 372/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 373/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 374/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 375/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 376/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 377/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 378/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 379/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 380/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 381/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 382/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 383/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 384/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 385/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 386/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 387/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 388/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 389/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 390/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 391/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 392/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 393/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 394/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 395/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 396/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 397/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 398/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 399/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 400/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 401/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 402/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 403/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 404/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 405/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 406/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 407/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 408/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 409/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 410/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 411/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 412/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 413/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 414/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 415/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 416/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3377 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 417/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 418/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 419/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 420/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 421/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 422/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 423/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 424/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 425/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 426/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 427/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 428/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 429/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 430/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 431/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 432/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 433/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 434/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 435/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 436/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 437/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 438/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 439/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 440/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 441/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 442/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 443/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 444/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 445/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 446/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 447/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 448/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 449/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 450/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 451/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 452/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 453/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 454/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 455/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 456/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 457/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 458/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 459/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 460/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 461/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 462/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 463/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 464/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 465/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 466/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 467/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 468/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 469/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 470/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 471/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 472/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 473/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 474/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 475/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 476/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 477/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 478/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 479/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 480/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 481/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 482/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 483/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 484/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 485/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 486/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 487/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 488/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 489/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 490/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 491/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 492/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 493/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 494/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 495/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 496/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 497/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 498/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 499/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 500/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Train on 160 samples, validate on 18 samples\n", "Epoch 1/500\n", "160/160 [==============================] - 1s 4ms/step - loss: 31.6261 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 2/500\n", "160/160 [==============================] - 0s 44us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 3/500\n", "160/160 [==============================] - 0s 44us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 4/500\n", "160/160 [==============================] - 0s 38us/step - loss: 31.4248 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 5/500\n", "160/160 [==============================] - 0s 44us/step - loss: 31.5505 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 6/500\n", "160/160 [==============================] - 0s 38us/step - loss: 31.3103 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 7/500\n", "160/160 [==============================] - 0s 44us/step - loss: 30.9989 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 8/500\n", "160/160 [==============================] - 0s 38us/step - loss: 31.2015 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 9/500\n", "160/160 [==============================] - 0s 44us/step - loss: 30.6483 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 10/500\n", "160/160 [==============================] - 0s 38us/step - loss: 30.4972 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 11/500\n", "160/160 [==============================] - 0s 44us/step - loss: 30.4114 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 12/500\n", "160/160 [==============================] - 0s 38us/step - loss: 29.4291 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 13/500\n", "160/160 [==============================] - 0s 38us/step - loss: 31.3293 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 14/500\n", "160/160 [==============================] - 0s 44us/step - loss: 30.0473 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 15/500\n", "160/160 [==============================] - 0s 38us/step - loss: 29.8004 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 16/500\n", "160/160 [==============================] - 0s 38us/step - loss: 29.8594 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 17/500\n", "160/160 [==============================] - 0s 44us/step - loss: 26.3126 - acc: 0.0187 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 18/500\n", "160/160 [==============================] - 0s 38us/step - loss: 26.4925 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 19/500\n", "160/160 [==============================] - 0s 38us/step - loss: 22.7845 - acc: 0.0375 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 20/500\n", "160/160 [==============================] - 0s 44us/step - loss: 25.7894 - acc: 0.0500 - val_loss: 26.6211 - val_acc: 0.0000e+00\n", "Epoch 21/500\n", "160/160 [==============================] - 0s 44us/step - loss: 15.9974 - acc: 0.0875 - val_loss: -9.3357 - val_acc: 0.3333\n", "Epoch 22/500\n", "160/160 [==============================] - 0s 44us/step - loss: 1.6443 - acc: 0.1812 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 23/500\n", "160/160 [==============================] - 0s 38us/step - loss: -8.0601 - acc: 0.2625 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 24/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.0489 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 25/500\n", "160/160 [==============================] - 0s 38us/step - loss: -12.2786 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 26/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2157 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 27/500\n", "160/160 [==============================] - 0s 38us/step - loss: -11.2507 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 28/500\n", "160/160 [==============================] - 0s 44us/step - loss: -13.6044 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 29/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.6257 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 30/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.9341 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 31/500\n", "160/160 [==============================] - 0s 44us/step - loss: -13.8125 - acc: 0.3062 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 32/500\n", "160/160 [==============================] - 0s 38us/step - loss: -12.4539 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 33/500\n", "160/160 [==============================] - 0s 44us/step - loss: -12.6345 - acc: 0.3062 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 34/500\n", "160/160 [==============================] - 0s 38us/step - loss: -12.3553 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 35/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.3668 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 36/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.9397 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 37/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.3329 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 38/500\n", "160/160 [==============================] - 0s 44us/step - loss: -13.1371 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 39/500\n", "160/160 [==============================] - 0s 38us/step - loss: -12.9373 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 40/500\n", "160/160 [==============================] - 0s 44us/step - loss: -13.7382 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 41/500\n", "160/160 [==============================] - 0s 38us/step - loss: -12.3514 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 42/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.1401 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 43/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.1371 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 44/500\n", "160/160 [==============================] - 0s 44us/step - loss: -12.8134 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 45/500\n", "160/160 [==============================] - 0s 44us/step - loss: -13.3306 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 46/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8420 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 47/500\n", "160/160 [==============================] - 0s 44us/step - loss: -12.0512 - acc: 0.3062 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 48/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.9015 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 49/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.4273 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 50/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.8702 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 51/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.3223 - acc: 0.3125 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 52/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.1126 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 53/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.7757 - acc: 0.3062 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 54/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6329 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 55/500\n", "160/160 [==============================] - 0s 44us/step - loss: -13.3234 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 56/500\n", "160/160 [==============================] - 0s 38us/step - loss: -12.6473 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 57/500\n", "160/160 [==============================] - 0s 44us/step - loss: -13.7032 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 58/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6095 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 59/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.4257 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 60/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7634 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 61/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.1240 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 62/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 63/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.5170 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 64/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1312 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 65/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0500 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 66/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7494 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 67/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2368 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 68/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 69/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 70/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2648 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 71/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.9686 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 72/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2293 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 73/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 74/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 75/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 76/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2929 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 77/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3393 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 78/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1912 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 79/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 80/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.9102 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 81/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 82/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 83/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 84/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 85/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 86/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 87/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 88/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 89/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 90/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 91/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1452 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 92/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8887 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 93/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 94/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1968 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 95/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0226 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 96/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 97/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 98/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1154 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 99/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 100/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 101/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 102/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 103/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1417 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 104/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1599 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 105/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3054 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 106/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 107/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2691 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 108/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1959 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 109/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 110/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 111/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 112/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3049 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 113/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 114/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 115/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 116/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2784 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 117/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 118/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 119/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3210 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 120/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 121/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 122/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 123/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 124/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1817 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 125/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 126/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2965 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 127/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 128/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 129/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 130/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 131/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 132/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 133/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 134/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2289 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 135/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 136/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 137/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 138/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 139/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 140/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 141/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 142/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 143/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2445 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 144/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 145/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 146/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3345 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 147/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 148/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 149/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 150/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 151/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 152/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 153/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 154/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 155/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 156/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3226 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 157/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 158/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 159/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 160/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2668 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 161/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 162/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1828 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 163/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3171 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 164/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 165/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1907 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 166/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 167/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 168/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 169/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 170/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 171/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 172/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 173/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 174/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 175/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 176/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 177/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 178/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 179/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 180/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 181/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 182/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3276 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 183/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 184/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 185/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 186/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 187/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 188/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 189/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 190/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 191/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 192/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 193/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 194/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 195/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 196/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 197/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 198/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 199/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 200/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 201/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 202/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 203/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 204/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0106 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 205/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 206/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 207/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 208/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 209/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 210/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 211/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2234 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 212/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 213/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 214/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 215/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 216/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 217/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 218/500\n", "160/160 [==============================] - 0s 69us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 219/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 220/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 221/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.1881 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 222/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3221 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 223/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 224/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 225/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3078 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 226/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 227/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 228/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 229/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 230/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 231/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 232/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 233/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 234/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 235/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 236/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 237/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 238/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3272 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 239/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1332 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 240/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 241/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 242/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 243/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 244/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 245/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 246/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 247/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 248/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 249/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 250/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 251/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 252/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 253/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 254/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 255/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 256/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 257/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 258/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 259/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 260/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 261/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 262/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 263/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 264/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 265/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 266/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 267/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 268/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 269/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 270/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 271/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 272/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 273/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 274/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 275/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 276/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 277/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 278/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 279/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2531 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 280/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1548 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 281/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 282/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 283/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 284/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 285/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 286/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 287/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 288/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 289/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 290/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2686 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 291/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 292/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 293/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 294/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 295/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2934 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 296/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 297/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 298/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 299/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 300/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 301/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 302/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3000 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 303/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 304/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 305/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 306/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 307/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 308/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 309/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 310/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 311/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 312/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 313/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 314/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 315/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 316/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 317/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 318/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2884 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 319/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 320/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 321/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 322/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 323/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 324/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 325/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1234 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 326/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 327/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 328/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 329/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 330/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 331/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 332/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 333/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 334/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 335/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 336/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 337/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 338/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 339/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3377 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 340/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 341/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 342/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2980 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 343/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 344/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 345/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 346/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 347/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1861 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 348/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 349/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 350/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 351/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 352/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 353/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 354/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 355/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 356/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 357/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 358/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 359/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 360/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 361/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 362/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 363/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 364/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 365/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 366/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 367/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 368/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 369/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 370/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 371/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 372/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 373/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 374/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 375/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 376/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 377/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3171 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 378/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 379/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 380/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 381/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 382/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 383/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 384/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 385/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 386/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 387/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 388/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 389/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 390/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 391/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 392/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 393/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 394/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 395/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 396/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 397/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 398/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 399/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 400/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 401/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 402/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 403/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 404/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 405/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 406/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 407/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 408/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 409/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 410/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 411/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 412/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 413/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 414/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 415/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 416/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 417/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 418/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 419/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 420/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 421/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 422/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 423/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 424/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 425/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 426/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 427/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 428/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 429/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 430/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 431/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 432/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 433/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 434/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 435/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 436/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 437/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 438/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 439/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 440/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 441/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 442/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 443/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 444/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 445/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1213 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 446/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 447/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 448/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 449/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 450/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 451/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 452/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 453/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 454/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 455/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 456/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 457/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 458/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 459/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 460/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 461/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 462/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 463/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 464/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 465/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 466/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 467/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 468/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 469/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 470/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 471/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 472/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 473/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 474/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 475/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 476/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 477/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 478/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 479/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 480/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 481/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 482/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 483/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 484/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 485/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 486/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 487/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 488/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 489/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 490/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 491/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 492/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 493/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 494/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 495/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 496/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.9762 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 497/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 498/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 499/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 500/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Train on 160 samples, validate on 18 samples\n", "Epoch 1/500\n", "160/160 [==============================] - 1s 5ms/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 2/500\n", "160/160 [==============================] - 0s 44us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 3/500\n", "160/160 [==============================] - 0s 38us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 4/500\n", "160/160 [==============================] - 0s 31us/step - loss: 31.5586 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 5/500\n", "160/160 [==============================] - 0s 31us/step - loss: 31.5684 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 6/500\n", "160/160 [==============================] - 0s 31us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 7/500\n", "160/160 [==============================] - 0s 38us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 8/500\n", "160/160 [==============================] - 0s 31us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 9/500\n", "160/160 [==============================] - 0s 44us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 10/500\n", "160/160 [==============================] - 0s 38us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 11/500\n", "160/160 [==============================] - 0s 38us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 12/500\n", "160/160 [==============================] - 0s 38us/step - loss: 31.0101 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 13/500\n", "160/160 [==============================] - 0s 31us/step - loss: 31.6318 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 14/500\n", "160/160 [==============================] - 0s 38us/step - loss: 31.3751 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 15/500\n", "160/160 [==============================] - 0s 38us/step - loss: 30.8953 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 16/500\n", "160/160 [==============================] - 0s 31us/step - loss: 30.9704 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 17/500\n", "160/160 [==============================] - 0s 38us/step - loss: 30.3977 - acc: 0.0125 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 18/500\n", "160/160 [==============================] - 0s 31us/step - loss: 27.6743 - acc: 0.0125 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 19/500\n", "160/160 [==============================] - 0s 38us/step - loss: 20.7181 - acc: 0.0375 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 20/500\n", "160/160 [==============================] - 0s 38us/step - loss: 24.1505 - acc: 0.0500 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 21/500\n", "160/160 [==============================] - 0s 31us/step - loss: 21.2019 - acc: 0.0438 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 22/500\n", "160/160 [==============================] - 0s 31us/step - loss: 24.9784 - acc: 0.0625 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 23/500\n", "160/160 [==============================] - 0s 38us/step - loss: 21.7482 - acc: 0.0750 - val_loss: 27.4453 - val_acc: 0.0000e+00\n", "Epoch 24/500\n", "160/160 [==============================] - 0s 38us/step - loss: 23.2691 - acc: 0.0250 - val_loss: 26.8088 - val_acc: 0.0000e+00\n", "Epoch 25/500\n", "160/160 [==============================] - 0s 38us/step - loss: 21.4593 - acc: 0.0625 - val_loss: -7.2199 - val_acc: 0.3333\n", "Epoch 26/500\n", "160/160 [==============================] - 0s 31us/step - loss: 6.9909 - acc: 0.1938 - val_loss: -9.4785 - val_acc: 0.3333\n", "Epoch 27/500\n", "160/160 [==============================] - 0s 38us/step - loss: -7.3845 - acc: 0.2812 - val_loss: -11.4529 - val_acc: 0.3333\n", "Epoch 28/500\n", "160/160 [==============================] - 0s 38us/step - loss: -10.4757 - acc: 0.3062 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 29/500\n", "160/160 [==============================] - 0s 31us/step - loss: -12.2303 - acc: 0.3062 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 30/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.2235 - acc: 0.3062 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 31/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.3421 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 32/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.4557 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 33/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.1593 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 34/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8309 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 35/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 36/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6151 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 37/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.4015 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 38/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6809 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 39/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6512 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 40/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.7927 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 41/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6310 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 42/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8126 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 43/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2164 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 44/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1187 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 45/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0853 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 46/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.9427 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 47/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.4932 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 48/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.5685 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 49/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.7846 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 50/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3244 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 51/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 52/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1431 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 53/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3100 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 54/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.4565 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 55/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6546 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 56/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 57/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 58/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 59/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8221 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 60/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9810 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 61/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 62/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.3069 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 63/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.5582 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 64/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.1567 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 65/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.5971 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 66/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2132 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 67/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 68/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 69/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 70/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1134 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 71/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.9902 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 72/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7790 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 73/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9567 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 74/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2497 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 75/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.2767 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 76/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7423 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 77/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.5419 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 78/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 79/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2580 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 80/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 81/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3345 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 82/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3063 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 83/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 84/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 85/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 86/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 87/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3029 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 88/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 89/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0830 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 90/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0434 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 91/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2944 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 92/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2326 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 93/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 94/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1527 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 95/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6636 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 96/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3261 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 97/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.8101 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 98/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 99/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 100/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0124 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 101/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3116 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 102/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 103/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 104/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3084 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 105/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 106/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.9098 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 107/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 108/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 109/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 110/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 111/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 112/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.0549 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 113/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2496 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 114/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8393 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 115/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 116/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 117/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 118/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 119/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 120/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2224 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 121/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 122/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3150 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 123/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 124/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 125/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1896 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 126/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2163 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 127/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 128/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 129/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 130/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 131/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 132/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 133/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.9427 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 134/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 135/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 136/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 137/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 138/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8797 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 139/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3268 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 140/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 141/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 142/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 143/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 144/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 145/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 146/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 147/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 148/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 149/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0726 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 150/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 151/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 152/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 153/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 154/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 155/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 156/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 157/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 158/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 159/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3345 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 160/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 161/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2105 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 162/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 163/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0948 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 164/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 165/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 166/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 167/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 168/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 169/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1645 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 170/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 171/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 172/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 173/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.7423 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 174/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 175/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 176/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 177/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 178/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 179/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 180/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 181/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 182/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 183/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 184/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 185/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3194 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 186/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 187/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1688 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 188/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 189/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 190/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 191/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 192/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 193/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 194/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 195/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3088 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 196/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 197/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2751 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 198/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 199/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 200/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2571 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 201/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 202/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 203/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 204/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 205/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2820 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 206/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 207/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1458 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 208/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 209/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 210/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 211/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 212/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1476 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 213/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 214/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 215/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 216/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 217/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 218/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 219/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 220/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 221/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 222/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 223/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3027 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 224/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 225/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 226/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 227/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0005 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 228/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 229/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 230/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2958 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 231/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1481 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 232/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2625 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 233/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3443 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 234/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 235/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 236/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 237/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 238/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 239/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 240/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2110 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 241/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 242/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 243/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 244/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 245/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 246/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 247/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 248/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 249/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2516 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 250/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 251/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3333 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 252/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2982 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 253/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 254/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 255/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 256/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 257/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 258/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3005 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 259/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 260/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3001 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 261/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 262/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 263/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 264/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3272 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 265/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 266/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 267/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3244 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 268/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 269/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 270/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 271/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 272/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2916 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 273/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 274/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 275/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 276/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 277/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 278/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 279/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 280/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2673 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 281/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 282/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 283/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 284/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 285/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 286/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 287/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 288/500\n", "160/160 [==============================] - 0s 100us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 289/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 290/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1075 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 291/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 292/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 293/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 294/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 295/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 296/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 297/500\n", "160/160 [==============================] - 0s 63us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 298/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 299/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 300/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 301/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 302/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 303/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 304/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 305/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 306/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 307/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 308/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 309/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 310/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 311/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 312/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 313/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 314/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3285 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 315/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0602 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 316/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 317/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 318/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 319/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 320/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 321/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 322/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 323/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 324/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 325/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 326/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 327/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 328/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 329/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 330/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 331/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 332/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 333/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3285 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 334/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 335/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 336/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 337/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 338/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 339/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 340/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 341/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 342/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 343/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2792 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 344/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3058 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 345/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 346/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 347/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 348/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 349/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 350/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 351/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 352/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 353/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 354/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 355/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 356/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0980 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 357/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 358/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 359/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 360/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 361/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 362/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 363/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 364/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 365/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2940 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 366/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 367/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 368/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 369/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 370/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 371/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 372/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 373/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 374/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 375/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 376/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 377/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 378/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 379/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 380/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 381/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 382/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 383/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 384/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 385/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 386/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 387/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 388/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 389/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 390/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0266 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 391/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 392/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 393/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 394/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 395/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 396/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 397/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 398/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 399/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 400/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 401/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 402/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 403/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 404/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 405/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 406/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 407/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 408/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 409/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 410/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.9768 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 411/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 412/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 413/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 414/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 415/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 416/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 417/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 418/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 419/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 420/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 421/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 422/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2256 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 423/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 424/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 425/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 426/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 427/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 428/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 429/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 430/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 431/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 432/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 433/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 434/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 435/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3099 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 436/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 437/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 438/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 439/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 440/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 441/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 442/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 443/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 444/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 445/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 446/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 447/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 448/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3315 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 449/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 450/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 451/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 452/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 453/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 454/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2447 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 455/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 456/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 457/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 458/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 459/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 460/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 461/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 462/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 463/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 464/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 465/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 466/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 467/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 468/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 469/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 470/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 471/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 472/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 473/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 474/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 475/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 476/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 477/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 478/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 479/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 480/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 481/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 482/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 483/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 484/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 485/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 486/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 487/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 488/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1242 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 489/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1302 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 490/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 491/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 492/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 493/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3222 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 494/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 495/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 496/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 497/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 498/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 499/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 500/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Train on 160 samples, validate on 18 samples\n", "Epoch 1/500\n", "160/160 [==============================] - 1s 5ms/step - loss: 31.3758 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 2/500\n", "160/160 [==============================] - 0s 44us/step - loss: 31.2901 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 3/500\n", "160/160 [==============================] - 0s 38us/step - loss: 30.9851 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 4/500\n", "160/160 [==============================] - 0s 38us/step - loss: 30.7651 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 5/500\n", "160/160 [==============================] - 0s 44us/step - loss: 29.8423 - acc: 0.0125 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 6/500\n", "160/160 [==============================] - 0s 44us/step - loss: 31.1227 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 7/500\n", "160/160 [==============================] - 0s 38us/step - loss: 30.5425 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 8/500\n", "160/160 [==============================] - 0s 44us/step - loss: 30.4504 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 9/500\n", "160/160 [==============================] - 0s 38us/step - loss: 29.7347 - acc: 0.0125 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 10/500\n", "160/160 [==============================] - 0s 38us/step - loss: 30.1368 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 11/500\n", "160/160 [==============================] - 0s 38us/step - loss: 30.3709 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 12/500\n", "160/160 [==============================] - 0s 44us/step - loss: 31.0256 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 13/500\n", "160/160 [==============================] - 0s 50us/step - loss: 30.1247 - acc: 0.0187 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 14/500\n", "160/160 [==============================] - 0s 44us/step - loss: 29.8432 - acc: 0.0187 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 15/500\n", "160/160 [==============================] - 0s 38us/step - loss: 30.3207 - acc: 0.0000e+00 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 16/500\n", "160/160 [==============================] - 0s 44us/step - loss: 28.6694 - acc: 0.0062 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 17/500\n", "160/160 [==============================] - 0s 50us/step - loss: 26.9093 - acc: 0.0500 - val_loss: 27.7589 - val_acc: 0.0000e+00\n", "Epoch 18/500\n", "160/160 [==============================] - 0s 38us/step - loss: 19.9009 - acc: 0.0687 - val_loss: 27.6591 - val_acc: 0.0000e+00\n", "Epoch 19/500\n", "160/160 [==============================] - 0s 44us/step - loss: 10.9593 - acc: 0.1313 - val_loss: -6.5729 - val_acc: 0.3333\n", "Epoch 20/500\n", "160/160 [==============================] - 0s 38us/step - loss: -4.8527 - acc: 0.2750 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 21/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.3717 - acc: 0.3188 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 22/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2622 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 23/500\n", "160/160 [==============================] - 0s 38us/step - loss: -13.7486 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 24/500\n", "160/160 [==============================] - 0s 31us/step - loss: -14.2833 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 25/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1029 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 26/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.6998 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 27/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3071 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 28/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.1595 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 29/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 30/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3088 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 31/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1038 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 32/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3441 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 33/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8387 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 34/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.9648 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 35/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 36/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9257 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 37/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1123 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 38/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 39/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2563 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 40/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2563 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 41/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2177 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 42/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 43/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2712 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 44/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2934 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 45/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2459 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 46/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0294 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 47/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9858 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 48/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2819 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 49/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1420 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 50/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 51/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2935 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 52/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1591 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 53/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 54/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 55/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 56/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.7168 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 57/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 58/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0252 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 59/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2112 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 60/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2459 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 61/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 62/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 63/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2988 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 64/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3261 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 65/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2765 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 66/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 67/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 68/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3321 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 69/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1971 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 70/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2667 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 71/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2256 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 72/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0434 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 73/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3221 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 74/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 75/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3205 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 76/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 77/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3315 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 78/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 79/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8395 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 80/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 81/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 82/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 83/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3333 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 84/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 85/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.8387 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 86/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 87/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3252 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 88/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8020 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 89/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 90/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 91/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 92/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3345 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 93/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 94/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 95/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 96/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1682 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 97/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2577 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 98/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3442 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 99/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3160 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 100/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 101/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 102/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 103/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3072 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 104/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 105/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 106/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1849 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 107/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 108/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3308 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 109/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 110/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3062 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 111/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 112/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 113/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1452 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 114/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1989 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 115/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 116/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 117/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2925 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 118/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 119/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 120/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2188 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 121/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 122/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 123/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 124/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2700 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 125/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0424 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 126/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2432 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 127/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 128/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 129/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 130/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 131/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 132/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 133/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 134/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 135/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 136/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 137/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 138/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 139/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1454 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 140/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 141/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0969 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 142/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3033 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 143/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 144/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 145/500\n", "160/160 [==============================] - 0s 63us/step - loss: -14.8431 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 146/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 147/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2884 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 148/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 149/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.1455 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 150/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2834 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 151/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3051 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 152/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 153/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 154/500\n", "160/160 [==============================] - 0s 50us/step - loss: -14.7423 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 155/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 156/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 157/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 158/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 159/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2433 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 160/500\n", "160/160 [==============================] - 0s 44us/step - loss: -14.8499 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 161/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 162/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0449 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 163/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 164/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 165/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 166/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 167/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 168/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.1457 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 169/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2605 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 170/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 171/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 172/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.0070 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 173/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2190 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 174/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3125 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 175/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2438 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 176/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 177/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 178/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 179/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 180/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 181/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 182/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 183/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 184/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.1369 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 185/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 186/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2435 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 187/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 188/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3411 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 189/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 190/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3071 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 191/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 192/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1379 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 193/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 194/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1300 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 195/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1475 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 196/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 197/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 198/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 199/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 200/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 201/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 202/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 203/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3005 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 204/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 205/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 206/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2437 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 207/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 208/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 209/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 210/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2635 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 211/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.1558 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 212/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2437 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 213/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2653 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 214/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 215/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 216/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 217/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 218/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 219/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 220/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 221/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 222/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 223/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 224/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 225/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 226/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 227/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 228/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2806 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 229/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2842 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 230/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.0166 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 231/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 232/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 233/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 234/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 235/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3137 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 236/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 237/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 238/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 239/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3326 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 240/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 241/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.1465 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 242/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3126 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 243/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 244/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 245/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 246/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 247/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 248/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 249/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 250/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 251/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 252/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 253/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 254/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3268 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 255/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 256/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 257/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 258/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 259/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2042 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 260/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 261/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 262/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 263/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3327 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 264/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 265/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 266/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 267/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 268/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2440 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 269/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 270/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1467 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 271/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 272/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3377 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 273/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2034 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 274/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 275/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 276/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2441 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 277/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 278/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3304 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 279/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 280/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 281/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 282/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1071 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 283/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2442 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 284/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 285/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 286/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 287/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 288/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 289/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 290/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 291/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 292/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 293/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1351 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 294/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 295/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1473 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 296/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1473 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 297/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 298/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 299/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3413 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 300/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3402 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 301/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 302/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 303/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 304/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 305/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 306/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 307/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1475 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 308/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 309/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 310/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 311/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 312/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 313/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 314/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 315/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 316/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 317/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 318/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 319/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 320/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 321/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 322/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 323/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 324/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 325/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 326/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 327/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 328/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 329/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 330/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 331/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 332/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1091 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 333/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 334/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 335/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 336/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 337/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 338/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 339/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 340/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1477 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 341/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 342/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1478 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 343/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 344/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 345/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1222 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 346/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 347/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3415 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 348/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 349/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3415 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 350/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 351/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 352/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 353/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 354/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 355/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1480 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 356/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 357/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 358/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 359/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 360/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 361/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 362/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 363/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2448 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 364/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 365/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 366/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 367/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 368/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 369/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 370/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 371/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 372/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 373/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 374/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 375/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2418 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 376/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1482 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 377/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 378/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 379/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2449 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 380/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 381/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1483 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 382/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3416 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 383/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 384/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1484 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 385/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2450 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 386/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 387/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2451 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 388/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 389/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 390/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1486 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 391/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 392/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 393/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 394/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 395/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2452 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 396/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 397/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 398/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 399/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 400/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 401/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 402/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 403/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 404/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0461 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 405/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 406/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 407/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 408/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 409/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 410/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 411/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 412/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 413/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 414/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 415/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0497 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 416/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9505 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 417/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 418/500\n", "160/160 [==============================] - 0s 106us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 419/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 420/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 421/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2341 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 422/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3417 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 423/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 424/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 425/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 426/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 427/500\n", "160/160 [==============================] - 0s 69us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 428/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 429/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 430/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 431/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1494 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 432/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1466 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 433/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 434/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 435/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3417 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 436/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 437/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 438/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 439/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 440/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 441/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 442/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 443/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 444/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 445/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 446/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 447/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 448/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 449/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.2461 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 450/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 451/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 452/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 453/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3418 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 454/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 455/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 456/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 457/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 458/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 459/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 460/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3390 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 461/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 462/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 463/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 464/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1498 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 465/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 466/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 467/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 468/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 469/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 470/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 471/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 472/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 473/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 474/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 475/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 476/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 477/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 478/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 479/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2458 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 480/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 481/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1540 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 482/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 483/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 484/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 485/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 486/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 487/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3430 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 488/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 489/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 490/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 491/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2517 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 492/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 493/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 494/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3443 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 495/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 496/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1816 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 497/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 498/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 499/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 500/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Train on 160 samples, validate on 18 samples\n", "Epoch 1/500\n", "160/160 [==============================] - 1s 5ms/step - loss: -14.6709 - acc: 0.3250 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 2/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2582 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 3/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3359 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 4/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 5/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.1663 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 6/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1411 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 7/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2210 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 8/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3403 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 9/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 10/500\n", "160/160 [==============================] - 0s 38us/step - loss: -14.9306 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 11/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2408 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 12/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 13/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 14/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3035 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 15/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3403 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 16/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3403 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 17/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 18/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 19/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 20/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 21/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1154 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 22/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 23/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3403 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 24/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 25/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2998 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 26/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 27/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 28/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 29/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 30/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 31/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3403 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 32/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2409 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 33/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 34/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 35/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 36/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.1414 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 37/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 38/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2288 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 39/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 40/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 41/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 42/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 43/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 44/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 45/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.1453 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 46/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 47/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2628 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 48/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 49/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 50/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 51/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 52/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 53/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 54/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 55/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 56/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 57/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 58/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 59/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 60/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 61/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 62/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3440 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 63/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 64/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 65/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3403 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 66/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 67/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 68/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 69/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2084 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 70/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 71/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 72/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 73/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2154 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 74/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 75/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 76/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 77/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 78/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 79/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 80/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 81/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 82/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 83/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 84/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 85/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 86/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 87/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 88/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3166 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 89/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 90/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 91/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3182 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 92/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 93/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 94/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 95/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 96/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2756 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 97/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 98/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3258 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 99/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 100/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 101/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2668 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 102/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 103/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 104/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 105/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 106/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 107/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 108/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 109/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2209 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 110/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 111/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 112/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 113/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 114/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 115/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 116/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 117/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 118/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 119/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 120/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 121/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 122/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 123/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 124/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 125/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 126/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 127/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 128/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 129/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 130/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 131/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2420 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 132/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 133/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 134/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 135/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 136/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 137/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 138/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 139/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 140/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 141/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 142/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 143/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 144/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 145/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 146/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 147/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 148/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 149/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 150/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 151/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 152/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 153/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 154/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 155/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 156/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 157/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 158/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 159/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 160/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 161/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 162/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 163/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 164/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 165/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 166/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 167/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 168/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 169/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 170/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 171/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 172/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 173/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 174/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 175/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 176/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 177/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 178/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 179/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 180/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 181/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 182/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 183/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 184/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 185/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 186/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 187/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 188/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 189/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 190/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 191/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 192/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 193/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 194/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 195/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 196/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 197/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 198/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 199/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 200/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2021 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 201/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 202/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 203/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 204/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 205/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 206/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 207/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 208/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 209/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 210/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 211/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 212/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 213/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 214/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 215/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 216/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 217/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 218/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 219/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 220/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 221/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 222/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 223/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 224/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 225/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 226/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 227/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 228/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 229/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 230/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 231/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 232/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 233/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 234/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1958 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 235/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 236/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 237/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 238/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 239/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 240/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 241/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 242/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 243/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 244/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 245/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 246/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 247/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 248/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 249/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2672 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 250/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 251/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 252/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.0200 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 253/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 254/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 255/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 256/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 257/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 258/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 259/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 260/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 261/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 262/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 263/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 264/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 265/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 266/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 267/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 268/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 269/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 270/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 271/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 272/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 273/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 274/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 275/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 276/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 277/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 278/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 279/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 280/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 281/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 282/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 283/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 284/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 285/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 286/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 287/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 288/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 289/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 290/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 291/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 292/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 293/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 294/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 295/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 296/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 297/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 298/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 299/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 300/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 301/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2831 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 302/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 303/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 304/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3064 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 305/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 306/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 307/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 308/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 309/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 310/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 311/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 312/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.2661 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 313/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 314/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 315/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 316/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 317/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3377 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 318/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 319/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 320/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 321/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 322/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 323/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 324/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 325/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 326/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 327/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 328/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 329/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 330/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 331/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 332/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 333/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 334/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 335/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 336/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 337/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 338/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 339/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 340/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 341/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 342/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 343/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 344/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 345/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 346/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 347/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 348/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 349/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 350/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 351/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 352/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 353/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 354/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 355/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 356/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 357/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 358/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 359/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 360/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2434 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 361/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 362/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 363/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 364/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 365/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 366/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 367/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 368/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 369/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 370/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 371/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 372/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 373/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 374/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 375/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 376/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3199 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 377/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 378/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 379/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 380/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 381/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 382/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 383/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 384/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 385/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 386/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 387/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 388/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 389/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 390/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 391/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 392/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 393/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 394/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 395/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 396/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 397/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 398/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 399/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 400/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 401/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 402/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 403/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 404/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.2438 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 405/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 406/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 407/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 408/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 409/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 410/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 411/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 412/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 413/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 414/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 415/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 416/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 417/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 418/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 419/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.1468 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 420/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 421/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 422/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 423/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 424/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 425/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 426/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 427/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 428/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 429/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 430/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 431/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 432/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 433/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 434/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 435/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 436/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 437/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 438/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 439/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 440/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 441/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 442/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 443/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 444/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 445/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 446/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 447/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 448/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 449/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 450/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 451/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 452/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 453/500\n", "160/160 [==============================] - 0s 56us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 454/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 455/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 456/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 457/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 458/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 459/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 460/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 461/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 462/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 463/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 464/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 465/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 466/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 467/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 468/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 469/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 470/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 471/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 472/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 473/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 474/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 475/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 476/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 477/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 478/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 479/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 480/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 481/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 482/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 483/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 484/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 485/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 486/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 487/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 488/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 489/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 490/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 491/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 492/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 493/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 494/500\n", "160/160 [==============================] - 0s 50us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 495/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 496/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 497/500\n", "160/160 [==============================] - 0s 44us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 498/500\n", "160/160 [==============================] - 0s 31us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 499/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n", "Epoch 500/500\n", "160/160 [==============================] - 0s 38us/step - loss: -15.3445 - acc: 0.3312 - val_loss: -11.5139 - val_acc: 0.3333\n" ] } ], "source": [ "hist_list = []\n", "\n", "for noise_koef in np.linspace(0,1,num=5) :\n", " noise = np.random.normal(loc=0.5,scale=0.16,size=x_train.shape)\n", " x_part_noise = x_train + noise_koef * noise\n", " model = Sequential()\n", " hist = neural_net (model)\n", " hist_list.append(hist.history)" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmoAAAFNCAYAAACwk0NsAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3XuYZHV95/H3t6aZHmRgZhBohosMRHQlCaDp4BjN2kYTLzFisrrRNYTEZEfz6EZ31Q3RZNfEsI+7Gy/ZjTFOok+IQdGoRGI0imiDrgoCIoLIRa4zXEYYYKYHGGamv/tHnequrq7qKeiqc47d79fzzNNV51TV+VV/cfzM73YiM5EkSVL9NKpugCRJkrozqEmSJNWUQU2SJKmmDGqSJEk1ZVCTJEmqKYOaJElSTRnUJEmSasqgJmlgIuLvIuLP+nztrRHxgkVe7wsRceYg2rPIdvx8RFw/7Ov0q5/vHRETEbGlrDZJenxGqm6AJD1emfni1uOI+C3gdzPzORW042vAU8u+rqSlzx41SZKkmjKoSctMMeT4toi4OiJ2RcSHI2KsGEbcGRFfjoh1ba9/WURcGxEPRMRkRDyt7dzTI+LK4n2fAFZ1XOulEXFV8d5vRMTJfbTv+OL1jeL530bEtrbz/xARby4eT0bE7xZt+mvgWRExFREPtH3kuoj4l6KNl0bET/S47oaIyIg4MyJuj4h7I+IdbedHI+L9EXFn8ef9ETFanJszjBgRfxARW4trXh8Rzy+ONyLirIj4YUTcFxGfjIhDe7Tnuoh4advzkaJNzyie/2NE3B0RD0bEJRHxk/v73S4kIp5W/D4fKOr9srZzL4mI7xffZ2tEvLU4flhEfK54z/aI+FqrbpIGw/9BScvTvwN+EXgK8CvAF4C3A4fR/Hvh9wEi4inAx4E3A4cDnwf+OSJWRsRK4J+AjwKHAv9YfC7Fe58BfAR4HfBE4EPABa1w00tm3gLsAJ5eHPp5YKotIP5b4OKO91wHvB74Zmauzsy1badfDfwJsA64CTh7P7+b59Acxnw+8N/arvsOYCNwKnAKcBrwR51vjoinAm8EfjYzDwZeCNxanP594OXAc4GjgPuBD/Rox8eLtre8ELg3M68snn8BOBE4ArgSOHc/36uniDgA+GfgS8Xn/Sfg3OK7AHwYeF3xfX4K+Epx/C3AFpr/bYzR/G/IG0hLA2RQk5an/5uZ92TmVuBrwKWZ+Z3M3A2cz2xI+nXgXzLzwszcA/w5cCDwczRDywHA+zNzT2Z+Cvh22zX+I/ChzLw0M/dl5jnA7uJ9+3Mx8NyIOLJ4/qni+fHAIcB3H8N3/UxmXpaZe2mGmVP38/o/ycyHM/O7xXVOKY6/BvjTzNyWmT+iGf7O6PL+fcAocFJEHJCZt2bmD4tzrwPekZlbit/1O4FXRES3+cIfA14WEU8onv+H4hgAmfmRzNzZ9jmnRMSa/Xy3XjYCq4F3Z+ajmfkV4HPMBsU9xfc5JDPvbwuLe4D1wHHFfwNfy0yDmjRABjVpebqn7fHDXZ6vLh4fBdzWOpGZ08AdwNHFua0d/8d8W9vj44C3FMNiDxTDkccW79ufi4EJmr1nlwCTNHuhngt8rWhHv+5ue/wQs9/tsb5+zu+ieDzvu2TmTTR7IN8JbIuI8yKi9brjgPPbfh/X0Qx2Yz0+5zrgV4qw9jKKoBYRKyLi3cUQ6g5me+wO28936+Uo4I6O3+ttNOsMzZ7SlwC3RcTFEfGs4vj/ptlL+aWIuDkiznqc15fUg0FN0kLupBkuAIiIoBm2tgJ3AUcXx1qe1Pb4DuDszFzb9ucJmfnxPq57Mc0hz4ni8deBZ9MMahf3eM+we3Lm/C5oftc7uzYk82PF6tPjinb9z+LUHcCLO34nq4qezW5aw5+nA98vwhs0e9dOB14ArAE2FMej8wP6dCdwbMf8sifRrDOZ+e3MPJ3msOg/AZ8sju/MzLdk5gk0h9D/S2s+nqTBMKhJWsgngV+OiOcX85jeQnP48hvAN4G9wO8XE91/jea8rZa/AV4fEc+MpoMi4pcj4uD9XTQzb6TZs/cbwCWZuYNmr9+/o3dQuwc4ppg7NwwfB/4oIg6PiMOA/wb8Q+eLIuKpEfELxVy8R2h+j33F6b8Gzo6I44rXHh4Rpy9wzfOAXwJ+j7ZhT+BgmnW4D3gC8D8W9c3gUmAX8F8j4oCImKAZvM4r5iO+JiLWFMPfO1rfJ5qLRZ5chPXW8X3dLyHp8TCoSeopM6+nGZb+L3Avzf/z/pViHtOjwK8Bv0VzUvyvA59pe+/lNOep/WVx/qbitf26GLgvM29vex7Ad3q8/ivAtcDdEXHvY7hOv/4MuBy4GvgezQn83TaVHQXeTfP3dTfNXqi3F+f+AriA5lDhTuBbwDN7XTAz76IZiH8O+ETbqb+nOTS5Ffh+8TmPW1HLlwEvLtr9V8BvZuYPipecAdxaDLO+nuZ/E9BczPBlYKpo519l5uRi2iJprnDepyRJUj3ZoyZJklRTBjVJWqIi4u3R3AC4888Xqm6bpP449ClJklRT9qhJkiTVVLfdsH8sHXbYYblhw4ahXmPXrl0cdNBBQ72GHjvrUk/WpX6sST1Zl3oadl2uuOKKezPz8P29bskEtQ0bNnD55ZcP9RqTk5NMTEwM9Rp67KxLPVmX+rEm9WRd6mnYdYmI2/b/Koc+JUmSasugJkmSVFMGNUmSpJoyqEmSJNWUQU2SJKmmDGqSJEk1ZVCTJEmqKYOaJElSTRnUJEmSamrJ3Jlg2O787pfZfsH/5qLJDzzuz8gGTD3tcPauOxDGfhpGVxMEzzn6OYwdNDbA1kqSpKXAoNanO779RZ72+dsX/TkPr4Q/+/UV3HjMl2aOPfPIZ/K3L/zbRX+2JElaWgxqfTr5lW/j4iccyyknn/y4P2N6xxS89m285wcNnnD8U+H0v+Sfb/5n/uLKv+Cyuy7jtPWnDbDFkiTpx51BrU8HrjmCVUedzPqfmljU5+zgbaw+8Ikcfv8dcNAYZ5x0Bh/9/kf51A2fMqhJkqQ5XExQtkaDXLUG7r8FpqcZXTHKxvUbuezuy8jMqlsnSZJqxKBWtgg4cB3sfQR23gnAaUeexn2P3MctD95SceMkSVKdGNTKFgGr1jYfb78ZaAY1gMvuvqyqVkmSpBoyqJUtAkbXNB/f90MAjjn4GNaOruWG+2+osGGSJKluDGolC4CVq6ExAg80t/uICA5oHMB0TlfaNkmSVC8GtbI1il95YwSm984cjgiDmiRJmsOgVraIYnVnALOrPIMgcdWnJEmaZVArW0Qzn0VA23YcMRPgJEmSmgxqZYuA6WmIxtygZo+aJEnqYFArWcz0pM0f+pQkSWpnUCtbFAGty9CniwkkSVK7SoNaRBwbEV+NiOsi4tqIeFNx/J0RsTUirir+vKTKdg6UiwkkSVKfqr4p+17gLZl5ZUQcDFwRERcW596XmX9eYduGo7WYoIGLCSRJ0oIqDWqZeRdwV/F4Z0RcBxxdZZuGrrWYYIU9apIkaWG1maMWERuApwOXFofeGBFXR8RHImJdZQ0bsJnFBB1z1BrRwJwmSZLaRR2G2yJiNXAxcHZmfiYixoB7aUaXdwHrM/O1Xd63CdgEMDY29jPnnXfeUNs5NTXF6tWrF/UZh7/1bTzyM8/gp5/0eX50+LO58SmvB+BdW9/F0SuP5rWHz/ua2o9B1EWDZ13qx5rUk3Wpp2HX5XnPe94VmTm+v9dVPUeNiDgA+DRwbmZ+BiAz72k7/zfA57q9NzM3A5sBxsfHc2JiYqhtnZycZLHXuGHlSg496ihWHrCSo49az9HF5733n97L4WsPX/TnL0eDqIsGz7rUjzWpJ+tST3WpS9WrPgP4MHBdZr637fj6tpf9KnBN2W0bmgiY7rI9h3PUJElSh6p71J4NnAF8LyKuKo69HXh1RJxKc+jzVuB11TRvCNzwVpIk9anqVZ9fh64J5fNlt6UsvRYTuOGtJEnqVJtVn8tG684EnT1q7qMmSZI6GNTK1gpk3pRdkiTth0GtbC4mkCRJfTKola3XYoLWraUkSZIKBrWS9VxMQDCNiwkkSdIsg1rZFuhRczGBJElqZ1ArW2vVZ+AcNUmStCCDWtkiyOnuG94a1CRJUjuDWtkarTlqc7fnaETDxQSSJGkOg1rJgvbFBNPtJ7wzgSRJmsOgVrYF7vXp0KckSWpnUCtbBJnTbngrSZL2y6BWtpmNbd3wVpIkLcygVraGG95KkqT+GNRKFm54K0mS+mRQK1379hzTbUedoyZJkuYyqJWt12ICe9QkSVIHg1rZ5iwmaDvc8VySJMmgVrZGo/tiggg3vJUkSXMY1Mo2czN2N7yVJEkLM6iVbPYWUrjhrSRJWpBBrWwziwkauOGtJElaSKVBLSKOjYivRsR1EXFtRLypOH5oRFwYETcWP9dV2c6Bat9HrX17jrBHTZIkzVV1j9pe4C2Z+TRgI/CGiDgJOAu4KDNPBC4qni8NjUaz56zbnQlcTCBJktpUGtQy867MvLJ4vBO4DjgaOB04p3jZOcDLq2nhELiYQJIk9anqHrUZEbEBeDpwKTCWmXdBM8wBR1TXssGaXUzghreSJGlhI1U3ACAiVgOfBt6cmTsi+tv8NSI2AZsAxsbGmJycHFobAaamphZ9jXU7d5K7d7N2x072PrSPq4vP237fdnbu2zn077AUDaIuGjzrUj/WpJ6sSz3VpS6VB7WIOIBmSDs3Mz9THL4nItZn5l0RsR7Y1u29mbkZ2AwwPj6eExMTQ23r5OQki73GrR/aTKwa5ZA1O2Hl6pnP+/RXPs2eqT2L/vzlaBB10eBZl/qxJvVkXeqpLnWpetVnAB8GrsvM97adugA4s3h8JvDZsts2NDOLCTq253COmiRJ6lB1j9qzgTOA70XEVcWxtwPvBj4ZEb8D3A68sqL2DV77YoL27TkMapIkqUOlQS0zv07n3clnPb/MtpQlCJiedjGBJEnar9qs+lw22je8bdMISyFJkuYyHZSt0WgOcXb0qAFueCtJkuYwqJVt5p6ebngrSZIWZlArW2sxgXPUJEnSfhjUShbRvphg7qpPSZKkdga10kX3e32GQ5+SJGkug1rZeiwmCMLFBJIkaQ6DWtl6LSZwjpokSepgUCtbr8UErvqUJEkdDGplay0m6LI9hyRJUjuDWsmitZggGvO253COmiRJamdQK9ucxQTelF2SJPVmUCubiwkkSVKfDGplm7PhrYsJJElSbwa1srVWfXb0qDWigTlNkiS1M6iVLFo9aTNDoLOmcTGBJEmaZVArWzRoJjTnqEmSpIUZ1MrWCmTOUZMkSfthUCtbBEx3355DkiSpnUGtbD0WEzj0KUmSOhnUShatOxJ0Gfp0MYEkSWpnUCvbTECzR02SJC3MoFa2iLZbSLmYQJIk9VZpUIuIj0TEtoi4pu3YOyNia0RcVfx5SZVtHLjWYoIuPWrmNEmS1K7qHrW/A17U5fj7MvPU4s/nS27TcLUWE0Rj3qpP56hJkqR2lQa1zLwE2F5lG8rWczGBc9QkSVKHqnvUenljRFxdDI2uq7oxA9VrMYFz1CRJUoeRqhvQxQeBd9FMMe8C3gO8ttsLI2ITsAlgbGyMycnJoTZsampq0dc45J57WPnII9y9bRtrHn6YS4vP23L/Fvbt2zf077AUDaIuGjzrUj/WpJ6sSz3VpS61C2qZeU/rcUT8DfC5BV67GdgMMD4+nhMTE0Nt2+TkJIu9xp1fupBdt9zCkWNHwu6bZz7vysuv5Bs/+MaiP385GkRdNHjWpX6sST1Zl3qqS11qN/QZEevbnv4qcE2v1/5YmllM0LHKM2A6XUwgSZJmVdqjFhEfByaAwyJiC/DfgYmIOJVmjLkVeF1lDRyCaDRmV306R02SJC2g0qCWma/ucvjDpTekVG2LCTq25zCoSZKkdrUb+lzyWttwzNycvXXYDW8lSdJcBrWyuT2HJEnqk0GtbHMWE8ztUXMxgSRJamdQK9nMYgJ71CRJ0n4Y1EoXPXvUJEmS2hnUyjazmGD+9hyA9/uUJEkzDGplix7bcxQ9as5TkyRJLQa1skWPoc9Wj5rz1CRJUsGgVrJo9N6eAwxqkiRplkGtdPtZTGBOkyRJBYNa2VqLCTp61BrRLIU9apIkqcWgVrY5c9Tmn3YxgSRJajGola01Ry0a827KDvaoSZKkWQa1kkWve32G+6hJkqS5DGpli4Dp6Z7bc0iSJLUY1EoXbf1oLiaQJEm9GdTK1mPD2xYXE0iSpBaDWtnc8FaSJPXJoFa2XreQcjGBJEnqYFArWcwsJui+PYckSVKLQa100fZzfo+ac9QkSVJLX0EtIl4ZEQcXj/8oIj4TEc8YbtOWqNYQJ3TdnsM5apIkqaXfHrU/zsydEfEc4IXAOcAHB9GAiPhIRGyLiGvajh0aERdGxI3Fz3WDuFYtNFo3X3fDW0mStLB+g9q+4ucvAx/MzM8CKwfUhr8DXtRx7Czgosw8EbioeL40FIGsOfJpj5okSeqt36C2NSI+BPx74PMRMfoY3rugzLwE2N5x+HSavXYUP18+iGvVQavnrLNHrbXhrSRJUku/6eDfA18EXpSZDwCHAm8bWqtgLDPvAih+HjHEa5Ur2hcTMG/TWxcTSJKklpE+X7ce+JfM3B0RE8DJwN8PrVV9iohNwCaAsbExJicnh3q9qampRV/jCbfcwsHALbfdzgnA5ORXIRrcuPNGAL7xjW+wdmTtotu6nAyiLho861I/1qSerEs91aUu/Qa1TwPjEfFk4MPABcDHgJcMqV33RMT6zLwrItYD27q9KDM3A5sBxsfHc2JiYkjNaZqcnGSx17j3hhv5EbDhuONgC0w897nQWMG9N9wL34SNz9rIkQcdOZD2LheDqIsGz7rUjzWpJ+tST3WpS79Dn9OZuRf4NeD9mfmfafayDcsFwJnF4zOBzw7xWuWKjgfF0Kcb3kqSpE79BrU9EfFq4DeBzxXHDhhEAyLi48A3gadGxJaI+B3g3cAvRsSNwC8Wz5eE2cUErSPNB63FBG7PIUmSWvod+vxt4PXA2Zl5S0QcD/zDIBqQma/ucer5g/j82omOnrPOxQS4mECSJDX11aOWmd8H3gp8LyJ+CtiSmUuml6tUrZ6zmQPF0Kcb3kqSpA599agVKz3PAW6lObnq2Ig4s9gDTY/FTI9akZE75qi54a0kSWrpd+jzPcAvZeb1ABHxFODjwM8Mq2FLVueagWLftPlz1yRJ0nLX72KCA1ohDSAzb2BAiwmWm7l3JoCZoc8iwTlHTZIktfTbo3Z5RHwY+Gjx/DXAFcNp0hLXYzGBc9QkSVKnfoPa7wFvAH6f5uDdJcBfDatRS9rMYoLuPWrOUZMkSS19BbXM3A28t/ijxejVo2ZQkyRJHRYMahHxPRaY3p6ZJw+8RUvd3I40Oje8NadJkqSW/fWovbSUViwn87bnKBYPFIen08UEkiSpacGglpm39fMhEfHNzHzWYJq0tIVDn5IkqU/9bs+xP6sG9DlL37zFBMVhg5okSeowqKBmuuiX23NIkqQ+DSqoqV+9FhNYCkmS1GFQ6aDzxkjqZaZHrXWHgpzz1MUEkiSpZVBB7YwBfc6SN28xgRveSpKkHva3j9pOus8/CyAz8xCaD64ZQtuWpujIxq2bshvUJElSh/1tz3FwWQ1ZNlqLBjqGPmdv1l5FoyRJUh31e69PACLiCNq24sjM2wfeoqVuXiCbe2cC56hJkqSWvuaoRcTLIuJG4BbgYuBW4AtDbNfSNW+K2twuNIc+JUlSS7+LCd4FbARuyMzjgecD/29orVrCotG6ddTcfTqcoyZJkjr1G9T2ZOZ9QCMiGpn5VeDUIbZr6XLDW0mS1Kd+56g9EBGrga8B50bENmDv8Jq1lHUsJnDDW0mS1EO/6eASYC3wJuBfgR8CvzKsRi1pnYsJWosH3PBWkiR16LdHLYAvAtuB84BPFEOhQxURtwI7gX3A3swcH/Y1h67jxgQzQ5/OUZMkSR366lHLzD/JzJ8E3gAcBVwcEV8eastmPS8zT10SIY0FFhM4R02SJHV4rBOjtgF3A/cBRwy+OctAr8UE9qhJkqQO/e6j9nsRMQlcBBwG/MfMPHmYDSsk8KWIuCIiNpVwvRK0es7mHm103lpKkiQte/3OUTsOeHNmXjXMxnTx7My8s7gjwoUR8YPMvKR1sghvmwDGxsaYnJwcamOmpqYWfY3R71/LWuDmm2/mJODSS7/Fw0+4gxsfuRGAK79zJbsO3LXoti4ng6iLBs+61I81qSfrUk91qUtfQS0zzxp2Q3pc987i57aIOB84jeYK1Nb5zcBmgPHx8ZyYmBhqeyYnJ1nsNXY8+ihbgRNO+Am4Ap75sz8Lhz+F1Xevhi/CKaeewsb1GwfS3uViEHXR4FmX+rEm9WRd6qkudanteFtEHBQRB7ceA78EXFNtqxZvdjFB64iLCSRJUneP6absJRsDzi8CzAjwscz812qbNAAuJpAkSX2qbVDLzJuBU6pux8BF52KC4s4ExWICe9QkSVJLbYc+l67uPWozT+1RkyRJBYNa2aLzgHPUJElSdwa1ks0sJmj96p2jJkmSejColW3mpuxFICtuwu6Gt5IkqZPpoGw9FhO0etSmi+AmSZJkUCtd0aPW2bM289ShT0mS1GRQK9tMQGsdcI6aJEnqzqBWsmi44a0kSeqPQa1sPXrUZhYTmNMkSVLBoFa21mICOgNb0zQuJpAkSU0GtdJ1Dn02g5kb3kqSpE4GtbI1XEwgSZL6Y1ArWavnbCapuZhAkiT1YFAr234WEzj0KUmSWgxqZZtZTFBww1tJktSDQa1sbngrSZL6ZFAr28wctVZgK1Z9GtQkSVIHg1rJZhcTFNI5apIkqTuDWtlaQW1mX1uHPiVJUncGtbK5mECSJPXJoFa2zqFPe9QkSVIPBrWyRcevvLXhrbeQkiRJHWob1CLiRRFxfUTcFBFnVd2egem4McHMhrdFKexRkyRJLbUMahGxAvgA8GLgJODVEXFSta0ajOjcR82bskuSpB5qGdSA04CbMvPmzHwUOA84veI2Dca8xQRzT9ujJkmSWuoa1I4G7mh7vqU49uPPOxNIkqQ+jVTdgB46l0bCvL4niIhNwCaAsbExJicnh9qoqampRV9jZOtWngjcfMvNnApcffV32b71AB7c+yAA119/PZN3Lu4ay80g6qLBsy71Y03qybrUU13qUtegtgU4tu35McCdnS/KzM3AZoDx8fGcmJgYaqMmJydZ7DUeueEGbgFO2HAC3Agn//RPw1MmuPfhe+GTcOKJJzLxbxZ3jeVmEHXR4FmX+rEm9WRd6qkudanr0Oe3gRMj4viIWAm8Crig4jYNxOwtpIoOwo7FAw59SpKkllr2qGXm3oh4I/BFYAXwkcy8tuJmDUbnTdmdoyZJknqoZVADyMzPA5+vuh0D12jdfL147vYckiSph7oOfS5hHesk0g1vJUlSdwa1srVy2vRMl1rzsD1qkiSpg0GtZNF5U3YXE0iSpB4MamXrDGr2qEmSpB4MamWbt5jAVZ+SJKk7g1rZ5g19Nld9NqIV4AxqkiSpyaBWOu/1KUmS+mNQK9n8KWouJpAkSd0Z1MoWnT1qrcMuJpAkSXMZ1MrWWkzQet7a8Dbc8FaSJM1lUCtbq0etc8Nb7FGTJElzGdRK133DWxcTSJKkTga1ss1bTDA957g9apIkqcWgVrKYt5jAHjVJktSdQa1sLiaQJEl9MqiVzcUEkiSpTwa1ss27hVTHTdntUZMkSQWDWlVybo/a7GGDmiRJajKolWxmMcHsMs/Zc4Q9apIkaYZBrWyNjl95a3sOmgsK7FGTJEktBrWyteaiTc8f+rRHTZIktTOola3HYoLmOeeoSZKkWQa10nXOTbNHTZIkdVe7oBYR74yIrRFxVfHnJVW3aZCi0btHrRENg5okSZoxUnUDenhfZv551Y0Yinlz1NpOEQ59SpKkGbXrUVvyFpijFmFQkyRJs+rao/bGiPhN4HLgLZl5f7cXRcQmYBPA2NgYk5OTQ23U1NTU4q+xZw9jwK233cZhB8GNN17P1oebn7lv3z5uv+N2Jnct8hrLzEDqooGzLvVjTerJutRTXepSSVCLiC8DR3Y59Q7gg8C7aM6yfxfwHuC13T4nMzcDmwHGx8dzYmJiGM2dMTk5yWKvMf3oo1wPbHjSk+A+OPHJT+bEjc3PHDl3hGOOPYaJn13cNZabQdRFg2dd6sea1JN1qae61KWSoJaZL+jndRHxN8DnhtycUsUCQ59ueCtJktrVbo5aRKxve/qrwDVVtWUo3PBWkiT1qY5z1P5XRJxKM8HcCryu2uYMmBveSpKkPtUuqGXmGVW3YahaQW0mj9mjJkmSuqvd0OeSN69HbbrtlNtzSJKkWQa1kkVnj1r7YgK8M4EkSZplUKtCBDnTk+aGt5IkqTuDWhUiuvaoAfaoSZKkGQa1KqxYAdNdetRcTCBJktoY1CrQWLmS3LOn+cQNbyVJUg8GtQrE6CjTu3cXz+xRkyRJ3RnUKhCjo+TuR5tP3PBWkiT1YFCrQGN0lHx0flAbiRH2Tu+tqFWSJKluDGoViNFRph+ZP/R58MqD2fnozmoaJUmSasegVoFY1b1Hbe3oWh589MGKWiVJkurGoFaBxspR8pFHimezQe2Q0UN4YPcD1TRKkiTVjkGtAjE6ynSXHrU1o2t4cLc9apIkqcmgVoFYNUru3g3RoL1Hbe3oWnbs3uHKT0mSBBjUKjE79Bkwc89PWLNyDXtzL7v27KqucZIkqTYMahWYGfqMmDf0CbigQJIkAQa1SswMfRK0D33OBDXnqUmSJAxqlZgZ+uzRo+bKT0mSBAa1Ssyu+uzoUVvZDGo7du+opmGSJKlWDGoViFWjsHcvmXN71NauWgs49ClJkpoMahVojI4CkNONOas+D1l5COBiAkmS1GRQq0CMrgJgenrFnB61lStWcuDIgc5RkyRJQIVBLSJeGRHXRsR0RIx3nPvDiLgpIq6PiBdW1cZhidGVAGTjIOiYj7Z2dK1Dn5IkCYCRCq99DfBrwIfaD0bEScCrgJ8EjgK+HBFPycx95TdxOGaGPlcIvwT2AAAJa0lEQVSugYfvn3Nu7eha7n/k/m5vkyRJy0xlPWqZeV1mXt/l1OnAeZm5OzNvAW4CTiu3dcM1M/Q5sgYe2j7n3KGrDjWoSZIkoJ5z1I4G7mh7vqU4tmTMDH2uWA0Pzw1q61at4/7dBjVJkjTkoc+I+DJwZJdT78jMz/Z6W5djXe9SHhGbgE0AY2NjTE5OPp5m9m1qamog11j5gx+wDvjRA7s5ctXdfKPtM3dt38WPdv1o6N9lKRlUXTRY1qV+rEk9WZd6qktdhhrUMvMFj+NtW4Bj254fA9zZ4/M3A5sBxsfHc2Ji4nFcrn+Tk5MM4hoPHbKG24BDj3gSK++/lInnPrd5lwLgpu/dxFev/Cobn7ORVSOrFn2t5WBQddFgWZf6sSb1ZF3qqS51qePQ5wXAqyJiNCKOB04ELqu4TQM1M/QZB8L0Xti9c+bc2tHmprfOU5MkSVVuz/GrEbEFeBbwLxHxRYDMvBb4JPB94F+BNyylFZ/QtuqzcWDzQNs8tXWr1gGwfff2ee+TJEnLS2Xbc2Tm+cD5Pc6dDZxdbovKE6uKVZ/Z7Fnjoe2wbgPQXPUJ9qhJkqR6Dn0uebGyNfTZ7Fmb06M22uxRM6hJkiSDWgVmhj7zgOaBh2ZD2czQ5yMOfUqStNwZ1CowM/Q5vaJ5oK1H7ZCVhzASI97vU5IkEZldtyj7sTM+Pp6XX3750D7/z9/9LXbdPcXIyACm9WWyYev17DrwEA5duZ1dcRAPxUEzp3eseJAGDVZNj9J9Wzm1m85pGuG/OerGutSPNakn61I/uW6Ek39xbKjbc0TEFZk5vr/XVXmvz+Urgn2NEQ56eAe7Hx5hhN0cwu6Z04cAsA/YU1EDJUlavh7acx8wVnUzAINa39561saBbn63996fYs899zRvyj71o3nnH9izk/sf3dnlnep08y23cMLxx1fdDHWwLvVjTerJutTPE49/Ltdt21t1MwCDWmVGDjuMkcMO63n+QGB9ec35sbZtcpKTarB7tOayLvVjTerJutTTddsmq24C4GICSZKk2jKoSZIk1ZRBTZIkqaYMapIkSTVlUJMkSaopg5okSVJNGdQkSZJqyqAmSZJUUwY1SZKkmjKoSZIk1VRkZtVtGIiI+BFw25Avcxhw75CvocfOutSTdakfa1JP1qWehl2X4zLz8P29aMkEtTJExOWZOV51OzSXdakn61I/1qSerEs91aUuDn1KkiTVlEFNkiSppgxqj83mqhugrqxLPVmX+rEm9WRd6qkWdXGOmiRJUk3ZoyZJklRTBrU+RcSLIuL6iLgpIs6quj3LSUR8JCK2RcQ1bccOjYgLI+LG4ue64nhExP8p6nR1RDyjupYvXRFxbER8NSKui4hrI+JNxXHrUqGIWBURl0XEd4u6/Elx/PiIuLSoyyciYmVxfLR4flNxfkOV7V/KImJFRHwnIj5XPLcmFYuIWyPiexFxVURcXhyr3d9hBrU+RMQK4APAi4GTgFdHxEnVtmpZ+TvgRR3HzgIuyswTgYuK59Cs0YnFn03AB0tq43KzF3hLZj4N2Ai8ofjfhHWp1m7gFzLzFOBU4EURsRH4n8D7irrcD/xO8frfAe7PzCcD7ytep+F4E3Bd23NrUg/Py8xT27bhqN3fYQa1/pwG3JSZN2fmo8B5wOkVt2nZyMxLgO0dh08HzikenwO8vO3432fTt4C1EbG+nJYuH5l5V2ZeWTzeSfP/gI7GulSq+P1OFU8PKP4k8AvAp4rjnXVp1etTwPMjIkpq7rIREccAvwz8bfE8sCZ1Vbu/wwxq/TkauKPt+ZbimKozlpl3QTM0AEcUx61VyYqhmacDl2JdKlcMsV0FbAMuBH4IPJCZe4uXtP/uZ+pSnH8QeGK5LV4W3g/8V2C6eP5ErEkdJPCliLgiIjYVx2r3d9hIGRdZArr9a8blsvVkrUoUEauBTwNvzswdC/zD37qUJDP3AadGxFrgfOBp3V5W/LQuQxYRLwW2ZeYVETHROtzlpdakfM/OzDsj4gjgwoj4wQKvrawu9qj1ZwtwbNvzY4A7K2qLmu5pdTsXP7cVx61VSSLiAJoh7dzM/Exx2LrURGY+AEzSnEO4NiJa/zBv/93P1KU4v4b50wy0OM8GXhYRt9KcNvMLNHvYrEnFMvPO4uc2mv+oOY0a/h1mUOvPt4ETi1U6K4FXARdU3Kbl7gLgzOLxmcBn247/ZrFCZyPwYKsbW4NTzJn5MHBdZr637ZR1qVBEHF70pBERBwIvoDl/8KvAK4qXddalVa9XAF9JN9ccqMz8w8w8JjM30Pz/jq9k5muwJpWKiIMi4uDWY+CXgGuo4d9hbnjbp4h4Cc1/Ba0APpKZZ1fcpGUjIj4OTACHAfcA/x34J+CTwJOA24FXZub2IkD8Jc1Vog8Bv52Zl1fR7qUsIp4DfA34HrPzbt5Oc56adalIRJxMcwL0Cpr/EP9kZv5pRJxAszfnUOA7wG9k5u6IWAV8lOYcw+3AqzLz5mpav/QVQ59vzcyXWpNqFb//84unI8DHMvPsiHgiNfs7zKAmSZJUUw59SpIk1ZRBTZIkqaYMapIkSTVlUJMkSaopg5okSVJNGdQkaREiYiIiPld1OyQtTQY1SZKkmjKoSVoWIuI3IuKyiLgqIj5U3Lx8KiLeExFXRsRFEXF48dpTI+JbEXF1RJwfEeuK40+OiC9HxHeL9/xE8fGrI+JTEfGDiDg3FrjpqSQ9FgY1SUteRDwN+HWaN2E+FdgHvAY4CLgyM58BXEzzrhcAfw/8QWaeTPPuC63j5wIfyMxTgJ8DWreQeTrwZuAk4ASa93eUpEUb2f9LJOnH3vOBnwG+XXR2HUjzZsvTwCeK1/wD8JmIWAOszcyLi+PnAP9Y3Bfw6Mw8HyAzHwEoPu+yzNxSPL8K2AB8ffhfS9JSZ1CTtBwEcE5m/uGcgxF/3PG6he6pt9Bw5u62x/vw71ZJA+LQp6Tl4CLgFRFxBEBEHBoRx9H8O/AVxWv+A/D1zHwQuD8ifr44fgZwcWbuALZExMuLzxiNiCeU+i0kLTv+q0/SkpeZ34+IPwK+FBENYA/wBmAX8JMRcQXwIM15bABnAn9dBLGbgd8ujp8BfCgi/rT4jFeW+DUkLUORuVBPvyQtXRExlZmrq26HJPXi0KckSVJN2aMmSZJUU/aoSZIk1ZRBTZIkqaYMapIkSTVlUJMkSaopg5okSVJNGdQkSZJq6v8DnzYLgUtqiR4AAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmwAAAFNCAYAAAC9jTMrAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3X+03XV95/vnK4EACvJDMFPDr1ijFauCTcHRtkalitcKro4usdWLLmcYe2Wpt+3M0OrVSocZtdOpt1Na4Y7MqFXxV/XmOlhrlYNaRUBFNFAkBISAiPI7gIFw3veP/T0n37PP3ic75Ozz3STPx1pZZ39/7f3Z56Phlc/PVBWSJEmaXMu6LoAkSZIWZmCTJEmacAY2SZKkCWdgkyRJmnAGNkmSpAlnYJMkSZpwBjZJkqQJZ2CTNBZJ/meS/zjivTckOXEXP+8LSU5bjPLsYjl+Pck14/6cUS3V95Y0Xnt1XQBJWgxV9dKZ10leD/zrqvq1DsrxNeCpS/25knZvtrBJkiRNOAObtAdruiL/XZIrk9yX5INJVjbdi/cm+cckB7fuPznJhiR3JZlK8rTWteOSfKd57hPAvn2f9VtJrmie/UaSZ45QvtXN/cua4/+e5LbW9b9N8rbm9VSSf92U6QPAv0yyJcldrbc8OMn/asr4rSS/OORzj05SSU5LcmOSnyV5e+v6Pknen+SW5s/7k+zTXFuXZHPr3v+Q5ObmM69J8qLm/LIkZya5LsntST6Z5JAh5bk6yW+1jvdqyvTs5vhTSW5NcneSryZ5+o5+t33vf3CSzyf5aZI7m9eHt64fkuR/NN/1ziSfa107panXe5rvctLOfLak0RjYJP0r4DeBpwAvB74A/DFwKL2/I94CkOQpwMeBtwGHARcC/1+SFUlWAJ8DPgIcAnyqeV+aZ58NnA/8W+DxwLnA+pmQM0xVXQ/cAxzXnPp1YEsrKP4GcHHfM1cDbwK+WVX7V9VBrcuvAd4NHAxsBM7ewe/m1+h1b74IeGfrc98OPAc4FngWcDzwjv6HkzwVOAP41ao6AHgJcENz+S3AK4DnA08E7gTOGVKOjzdln/ES4GdV9Z3m+AvAGuAJwHeAj+7ge/VbBvwP4CjgSOAB4K9a1z8CPAZ4evMZf9F8v+OBDwP/DjiIXn3cgKRFZ2CT9N+q6idVdTPwNeBbVfXdqtoKfJbtYenVwP+qqi9V1UPAfwH2A55LL7zsDby/qh6qqk8Dl7U+498A51bVt6rq4ar6ELC1eW5HLgaen+RfNMefbo5XA48DvrcT3/XvqurSqtpGL9Qcu4P7311VD1TV95rPeVZz/neBs6rqtqr6Kb0Q+LoBzz8M7AMck2Tvqrqhqq5rrv1b4O1Vtbn5Xf8J8Mokg8YWfww4OcljmuPfac4BUFXnV9W9rfd5VpIDd/DdZlXV7VX1maq6v6rupRdknw+Q5BeAlwJvqqo7m/qdCclvBM5v/jcxXVU3V9U/j/q5kkZnYJP0k9brBwYc79+8fiLwo5kLVTUN3ASsaq7dXFXVevZHrddHAX/QdG/e1XRTHtE8tyMXA+votd58FZiiFyaeD3ytKceobm29vp/t321n75/zu2hez/suVbWRXovknwC3Jbkgycx9RwGfbf0+rqYX8FYOeZ+rgZc3oe1kmsCWZHmS9zTdkfewvYXr0B18t1lJHpPk3CQ/at7jq8BBSZbTq6c7qurOAY8eAVw34LykRWZgkzSqW+iFDACShN5/sG8Gfgysas7NOLL1+ibg7Ko6qPXnMVX18RE+92J6XaHrmtdfB55HL7BdPOSZGnJ+scz5XdD7rrcMLEjVx5rZqkc15Xpvc+km4KV9v5N9m5bOQWa6RU8BrmpCHPRa204BTgQOBI5uzqf/DRbwB/S6fk+oqsfRC8cz73ETcEiSgwY8dxMwcBygpMVlYJM0qk8CL0vyoiR70/uP/FbgG8A3gW3AW5oB8b9Nb1zXjP8HeFOSE9Lz2CQvS3LAjj60qq6l19L3WuCrVXUPvVbAf8XwwPYT4PBmbN04fBx4R5LDkhwKvBP42/6bkjw1yQubsXo/p/c9Hm4ufwA4O8lRzb2HJTllgc+8AHgx8Hu0ukOBA+jVw+30xpn9p0fwfQ5oynZXM/HhXTMXqurH9MbI/XUzOWHvJDOB7oPAG5r/TSxLsirJLz2Cz5e0AwY2SSOpqmvohab/BvyM3gSFl1fVg1X1IPDbwOvpDZ5/NfB3rWcvpzeO7a+a6xube0d1MXB7Vd3YOg7w3SH3fwXYANya5Gc78Tmj+o/A5cCVwPfpDfQftDjtPsB76P2+bqU3YP+Pm2v/N7Ae+Ick9wKXACcM+8AmOH2T3pjBT7QufZhel+zNwFXN++ys99Mbj/iz5vm/77v+OuAh4J+B2+h181JVlwJvoDcJ4W569XIUkhZd5g45kSRJ0qQZewtbkpOatYc2JjlzwPU3Jfl+s47P15Mc05w/OskDzfkrknxg3GWVJEmaRGNtYWtmGP2Q3hpPm+lN839NVV3VuudxzZgUkpwM/B9VdVKSo4HPV9Uvj62AkrSHSPLHbO+Obftae1svSZNp3C1sxwMbq2pTM8blAnqzmWbNhLXGYxn/7C5J2uNU1X9qFhLu/2NYkx4Fxh3YVtGb9j1jc3NujiRvTnId8D6aVdUbq5N8N8nFSX59vEWVJEmaTINW1F5Mg9YBmteCVlXnAOck+R1627ucRm9dpyOr6vYkvwJ8LsnT+1rkSHI6cDrAfvvt9ytHHHHEYn+Heaanp1m2zAm2k8Q6mUzWy2SyXiaPdTKZxl0vP/zhD39WVYeNcu+4A9tmegtrzjicIYtLNi4A/gag2WJla/P6200L3FPoTaWfVVXnAecBrF27ti6/fM7lsZiammLdunVj/xyNzjqZTNbLZLJeJo91MpnGXS9JfrTju3rGHecvA9YkWd0sYHkqvXWHZiVZ0zp8GXBtc/6wZtICSZ5Eb2PjTWMuryRJ0sQZawtbVW1LcgbwRWA5vU2CNyQ5C7i8qtYDZyQ5kd6ijHfS6w6F3tYoZyXZRm9l8DdV1R3jLK8kSdIkGneXKFV1IXBh37l3tl6/dchznwE+M97SSZIkTT5HOEqSJE04A5skSdKEM7BJkiRNOAObJEnShDOwSZIkTTgDmyRJ0oRL1e6z1/pS7HTwX95zCffduoW99tq1FVH2fuhB9t16P8vZxoraOnt+mmJbtu1qMfc4VUUyaCc0dcl6mUzWy+SxTibPsv3u42mnPH3cOx18u6rWjnLv2Ndh02AH330bj/n5lq6LIUmSBpi+61bg6V0XY5aBbSf94ZnPWZS9xX70+jcw/fB9HP7CB+HeW+G3zwXgP1/513z/jmv4y+f8ya4Xdg/yvSuv5FnPfGbXxVAf62UyWS+TxzqZPHvv96v84KZ7uy7GLANbV6rIihXsfcA0LN8XnvwsAO675SDu4zH8wi+v67Z8jzLX/Ax/ZxPIeplM1svksU4m1E1TXZdglpMOujI7XqEg26uhKJbFapEkSduZDLpSBUnvJ62BpgXBgaeSJGk7A1tXZgIbMz+b0+w+s3YlSdLiMLB1pBjcwlY4tVuSJM1lYOvK9JAWtiq7RCVJ0hwGtq5U9RrWapp5LWwGNkmS1GJg68rMLNEa0MJml6gkSWoxsHVlztg1Jx1IkqThDGwdmTPpwBY2SZK0AANbV4rtkw4cwyZJkhZgYOtKDWlhM7BJkqQ+BrauzMwS7R+zVtglKkmS5jCwdcUWNkmSNCIDW1dmF8idP4ZNkiSpzcDWEWeJSpKkURnYuuIsUUmSNCIDW1ccwyZJkkZkYOtK1cAWNmeJSpKkfmMPbElOSnJNko1Jzhxw/U1Jvp/kiiRfT3JM69ofNc9dk+Ql4y7rkqoiy2xhkyRJOzbWwJZkOXAO8FLgGOA17UDW+FhVPaOqjgXeB/zX5tljgFOBpwMnAX/dvN/uYXYv0YIsa5120oEkSZpr3C1sxwMbq2pTVT0IXACc0r6hqu5pHT6W7SvJngJcUFVbq+p6YGPzfruJ1hg2Jx1IkqQF7DXm918F3NQ63gyc0H9TkjcDvw+sAF7YevaSvmdXjaeYS6/aY9j6u0RtYZMkSS3jDmyDkse8lWGr6hzgnCS/A7wDOG3UZ5OcDpwOsHLlSqampnalvCPZsmXLLn/O47fcxz0//SkH3nsvWx9cwQ+a97vzzjvZVtuW5HvsThajTrT4rJfJZL1MHutkMk1SvYw7sG0GjmgdHw7cssD9FwB/szPPVtV5wHkAa9eurXXr1u1CcUczNTXFrn7Odfu9j31WPoED9v8RBxx42Oz7feSLH2Hb9LZdfv89zWLUiRaf9TKZrJfJY51Mpkmql3GPYbsMWJNkdZIV9CYRrG/fkGRN6/BlwLXN6/XAqUn2SbIaWANcOubyLp2ZyQV9Y9ima9ouUUmSNMdYW9iqaluSM4AvAsuB86tqQ5KzgMuraj1wRpITgYeAO+l1h9Lc90ngKmAb8Oaqenic5V1SM0HNZT0kSdIOjLtLlKq6ELiw79w7W6/fusCzZwNnj6903ZndS7RvWF5VsWyZ6xlLkqTtTAZdmdlLtK+FDbCFTZIkzWFg68qQransEpUkSf0MbF2pajY66BvDNnNekiSpYWDryuwWVLawSZKkhRnYuuIsUUmSNCIDW0fmzhJN+4LrsEmSpDkMbF0ZMkvUFjZJktTPwNaVYbNEnXQgSZL6GNi6MmyWqC1skiSpj4GtK1Vk2TKcJSpJknbEwNaVYbNEZ5f7kCRJ6jGwdWToLFHcmkqSJM1lYOvK7CxRIMtap21hkyRJcxnYujI93TSsDegStYVNkiS1GNi6MjN2rZx0IEmSFmZg60p7L9E5Gx3YJSpJkuYysHWlPUsUu0QlSdJwBraOFDRdotMu6yFJkhZkYOvKsK2pelFOkiRploGtK+1JB25NJUmSFmBg68rsJu8DxrDZJSpJkloMbF2ZCWZ9LWzgTgeSJGkuA1tXZmeHug6bJElamIGtI9tnic7f6cC8JkmS2gxsXVlglqgtbJIkqc3A1pVhs0SddCBJkvoY2LoybJaoLWySJKmPga0rVWTZsiavOUtUkiQNN/bAluSkJNck2ZjkzAHXfz/JVUmuTPLlJEe1rj2c5Irmz/pxl3VJDZslapeoJEnqs9c43zzJcuAc4DeBzcBlSdZX1VWt274LrK2q+5P8HvA+4NXNtQeq6thxlrErQ2eJujWVJEnqM+4WtuOBjVW1qaoeBC4ATmnfUFUXVdX9zeElwOFjLtNkcJaoJEka0bgD2yrgptbx5ubcMG8EvtA63jfJ5UkuSfKKcRSwM9PTTY/o/Fmiy+LQQkmStN1Yu0QZvATswD6/JK8F1gLPb50+sqpuSfIk4CtJvl9V1/U9dzpwOsDKlSuZmppalIIvZMuWLbv8OU+o4kc33sjjD9rGzZs3s6l5v61bt3LrrbcuyffYnSxGnWjxWS+TyXqZPNbJZJqkehl3YNsMHNE6Phy4pf+mJCcCbweeX1VbZ85X1S3Nz01JpoDjgDmBrarOA84DWLt2ba1bt25xv8EAU1NT7OrnXF3F0UcfzfJ7lnHkEUdyZPN+Kz65gif+whNZ99xde/89zWLUiRaf9TKZrJfJY51Mpkmql3H3vV0GrEmyOskK4FRgzmzPJMcB5wInV9VtrfMHJ9mneX0o8DygPVlhN9CMYXPSgSRJWsBYW9iqaluSM4AvAsuB86tqQ5KzgMuraj3wZ8D+wKea5SxurKqTgacB5yaZphcs39M3u/RRq6oJZTOzRPsnHbishyRJahl3lyhVdSFwYd+5d7ZenzjkuW8Azxhv6ToyJ7BNz2lhm65pZ4lKkqQ5nI7YhdnABv3LevROG9gkSdJ2BrYu9HeJuvm7JElagIGtC01gy5CFcyVJktoMbF1YqIXNnQ4kSVIfA1sHtrehufm7JEnaMQNbF9otbO2f2MImSZLmM7B1Yc4s0TkvmnV0DWySJGk7A1sXqm9igS1skiRpAQa2LsyZJQrOEpUkSQsxsHVhoTFsTjqQJEl9DGwdmO0RHTCGzS5RSZLUz8DWif4xbP2HBjZJkrSdga0L09O9n4Na2OwSlSRJfQxsXZiZdIDrsEmSpB0zsHWhfx22LGtdsoVNkiTNZWDrQPWvw+akA0mStAADW5dmW9j6ApstbJIkqcXA1oV563r0bU1lC5skSWoxsHVh3hg2dzqQJEnDGdi6MNvCNm8FXaZr2i5RSZI0h4GtCy7rIUmSdoKBrQPV3yXaF9BsYZMkSW0Gti7MW9WjF9Cqv+VNkiQJA1tHZlrY5s4SnZlwYGCTJEltBrYu9C+c29fCZl6TJEltBrYuDBnDZgubJEkaxMDWhZmxapk7S9TAJkmSBjGwdWFY1+fs0DYDmyRJ2m7sgS3JSUmuSbIxyZkDrv9+kquSXJnky0mOal07Lcm1zZ/Txl3WpTJ/73db2CRJ0nBjDWxJlgPnAC8FjgFek+SYvtu+C6ytqmcCnwbe1zx7CPAu4ATgeOBdSQ4eZ3mXzg7GsNnCJkmSWsbdwnY8sLGqNlXVg8AFwCntG6rqoqq6vzm8BDi8ef0S4EtVdUdV3Ql8CThpzOVdGtPTzYu+MWzzmt4kSZLGH9hWATe1jjc354Z5I/CFR/jso8fspIOZE3aJSpKk4fYa8/sPSh4Dm5GSvBZYCzx/Z55NcjpwOsDKlSuZmpp6RAXdGVu2bNmlz1n+k9s4FLh247WsBa754bX8eMsUW6e3AnD9puuZuv2Rv/+eaFfrRONhvUwm62XyWCeTaZLqZdyBbTNwROv4cOCW/puSnAi8HXh+VW1tPbuu79mp/mer6jzgPIC1a9fWunXr+m9ZdFNTU+zK52y9/no2AWvWrIGr4am/9Es89dnruP+h++Fj8ORffDLrfvmRv/+eaFfrRONhvUwm62XyWCeTaZLqZdxdopcBa5KsTrICOBVY374hyXHAucDJVXVb69IXgRcnObiZbPDi5tyj3/x2wua0kw4kSdJ8Y21hq6ptSc6gF7SWA+dX1YYkZwGXV9V64M+A/YFPNUHlxqo6uaruSPKn9EIfwFlVdcc4y7t0+maJOulAkiQtYNxdolTVhcCFfefe2Xp94gLPng+cP77SdWT+Qmy90046kCRJA7jTQRf6Z4n2L5xrl6gkSWoxsHVhWAtb2cImSZLmM7B1YN5Ytb4xbLawSZKkNgNbF2by2pCFcyVJktpGCmxJnpPkgNbxAUlOGF+xdndDWticdCBJkgYYtYXtb4AtreP7mnN6JGrI5u92iUqSpAFGDWyp1sCrqppmCZYE2W0NG8NmC5skSRpg1MC2Kclbkuzd/HkrsGmcBdut7aAlzcAmSZLaRg1sbwKeC9xMb4/PE2g2XNfOc5aoJEnaGSN1azZ7fJ465rLsOZwlKkmSdsKos0Q/lOSg1vHBSXa/LaOWii1skiRpJ4zaJfrMqrpr5qCq7gSOG0+R9gA13fs5pIXNMWySJKlt1MC2LMnBMwdJDsFZoo/c7BZUjb4WNQObJElqGzV0/TnwjSSfbo5fBZw9niLtAVyHTZIk7YRRJx18OMm3gRfQSxe/XVVXjbVku7Ghs0TtEpUkSQOM3K1ZVRuS/BTYFyDJkVV149hKtjvrnwyaZc1pW9gkSdJ8o84SPTnJtcD1wMXADcAXxliu3dy8xNY7W7awSZKk+UaddPCnwHOAH1bVauBFwD+NrVS7u/4xbP1dorawSZKkllED20NVdTu92aLLquoi4Ngxlmv31j9LdObVbI4zsEmSpO1GHcN2V5L9ga8CH01yG7BtfMXazc22sM38dKcDSZI03KgtbKcA9wP/J/D3wHXAy8dVqN3dvFmiTYvadLOgrl2ikiSpbdRlPe5rXk4DH+q/nuSbVfUvF7Ngu7V5ec1lPSRJ0nCjtrDtyL6L9D57hmEL5xrYJEnSAIsV2Bx8tVP6F86de9ouUUmS1LZYgU07Y8gsUVvYJEnSIIsV2EwYO2PYOmzzukolSZIWL7C9bpHeZ48wP5jZwiZJkoZbcJZoknsZPD4tQFXV4+i9+MEYyrb7cpaoJEnaCQu2sFXVAVX1uAF/DpgJazuS5KQk1yTZmOTMAdd/I8l3kmxL8sq+aw8nuaL5s37nvtoEG7IO2+xeok46kCRJLaPudABAkifQWsKjqm7cwf3LgXOA3wQ2A5clWV9VV7VuuxF4PfCHA97igara/bbAmlkgd+a4L6DZwiZJktpGGsOW5OQk1wLXAxcDNwBfGOHR44GNVbWpqh4ELqC3a8Ksqrqhqq6ktyjvnsExbJIkaSeMOungT4HnAD+sqtXAi4B/GuG5VcBNrePNzblR7Zvk8iSXJHnFTjw30bZvTdW3l6izRCVJ0gCjdok+VFW3J1mWZFlVXZTkvSM8Nyh67Mwiu0dW1S1JngR8Jcn3q+q6OR+QnA6cDrBy5UqmpqZ24u0fmS1btuzS56y48vscDPzw2mt5FvDdK77H3Tds46atvWy74QcbWL5p+aKUdU+xq3Wi8bBeJpP1Mnmsk8k0SfUyamC7K8n+wNeAjya5Ddg2wnObgSNax4cDt4xauKq6pfm5KckUcBy9jefb95wHnAewdu3aWrdu3ahv/4hNTU2xK59z73SxGXjKU9bAJXDcccfBUc9lw+0b4PPwjF9+BuuOfOTvvyfa1TrReFgvk8l6mTzWyWSapHoZtUv0q8BBwFuBv6cXml4+wnOXAWuSrE6yAjgVGGm2Z5KDk+zTvD4UeB5w1cJPPVoMniXq1lSSJGmQUQNbgC8CU8D+wCeq6vYdPVRV24AzmmevBj5ZVRuSnJXkZIAkv5pkM/Aq4NwkG5rHnwZcnuR7wEXAe/pmlz569S/rkV41zEw6WBZ3DJMkSduN1CVaVe8G3p3kmcCrgYuTbK6qE0d49kLgwr5z72y9voxeV2n/c98AnjFK+R51ZtdbGzLpQJIkqWVnm3JuA24FbgeesPjF2TNsD2aZ89NlPSRJ0iCjrsP2e82g/y8DhwL/pqqeOc6C7daGbf6OOx1IkqT5Rp0lehTwtqq6YpyF2WPMLsM2t6VtdmsqW9gkSVLLqGPY5u0Bql0wb+HcmSMDmyRJms/piJ0YsjWVOx1IkqQBDGxdmO36bPSPYTOxSZKkFgNbF4Zt/l5OOpAkSfMZ2Dowb701W9gkSdICDGxdmJ0c2r8e28yRgU2SJG1nYOvCsBY2u0QlSdIABrZO9E066NvpQJIkqc3A1oXp6eZF316ijmGTJEkDGNg6MH+9NbtEJUnScAa2LvT3fNrCJkmSFmBg60L/1lTb96bqHdnCJkmSWgxsXXAdNkmStBMMbJ3oH6vmLFFJkjScga0L/V2iWdac7h0vi9UiSZK2Mxl0wK2pJEnSzjCwdWHepIO5nHQgSZLaDGxd6N9CtH9rKlvYJElSi4GtCzMtbLMNbH2TDsxrkiSpxcDWiZmk1mxR1bSwTVfv2BY2SZLUZmDrwuwWVDMnnHQgSZKGM7B1YNgsUXc6kCRJgxjYujBs83db2CRJ0gAGti5U34u404EkSRrOwNaF/i5R+pb1sEtUkiS1GNi6MLve2uAWNrtEJUlS29gDW5KTklyTZGOSMwdc/40k30myLckr+66dluTa5s9p4y7rkqmZ5TxmThjYJEnScGMNbEmWA+cALwWOAV6T5Ji+224EXg98rO/ZQ4B3AScAxwPvSnLwOMu7VGZnidbcFjZniUqSpEHG3cJ2PLCxqjZV1YPABcAp7Ruq6oaqupLZVWRnvQT4UlXdUVV3Al8CThpzeZdG/9ZUtrBJkqQF7DXm918F3NQ63kyvxeyRPruq/6YkpwOnA6xcuZKpqalHVNCdsWXLll36nMds3MgBwMbrNvJU4J+++U0eWnEQG+7bAMCll13KjXvfuChl3VPsap1oPKyXyWS9TB7rZDJNUr2MO7ANaioade2KkZ6tqvOA8wDWrl1b69atG7lwj9TU1BS78jm3X7eJ24An/+KT4UZ43nOfB/sfxn2b7oOvwQnHn8DqA1cvWnn3BLtaJxoP62UyWS+TxzqZTJNUL+PuEt0MHNE6Phy4ZQmenXDOEpUkSaMbd2C7DFiTZHWSFcCpwPoRn/0i8OIkBzeTDV7cnHv0G7bTgeuwSZKkAcYa2KpqG3AGvaB1NfDJqtqQ5KwkJwMk+dUkm4FXAecm2dA8ewfwp/RC32XAWc25R72hs0QbtrBJkqS2cY9ho6ouBC7sO/fO1uvL6HV3Dnr2fOD8sRawC/2zRPu7RG1hkyRJLe500IXZranmJrfZLlFb2CRJUouBrQv9e4nawiZJkhZgYOtE3yxRW9gkSdICDGxd6O8SddKBJElagIGtA7OzRDO3hW262RTeLlFJktRmYOvC7LIezXHfGDZJkqQ2A1sXZoNa3xg2dzqQJEkDGNi6MDu5oBF3OpAkScMZ2LowZB02Zo8MbJIkaTsDWweqpnutan1bU9nCJkmSBjGwdaGqCWmDx7BJkiS1Gdi6UDQtbM1x/04HdolKkqQWA1sXhrWw2SUqSZIGMLB1YSaw9Y9hs4VNkiQNYGDrRM2OWutxlqgkSRrOwNaFYS1sdolKkqQBDGwdqP4xbG5NJUmSFmBg68LsLNG5Ac0WNkmSNIiBrQvtFrZsr4KZFrZlVoskSWoxGXRhzhi2+a1ptrBJkqQ2A1sXqjVLtBXOZrtEnSUqSZJaDGydGNzC5qQDSZI0iIGtA1UFy5Yxr4UNJx1IkqT5DGxdmJ0lOk27hW26pgG7RCVJ0lwGti60Jx0MaE2zhU2SJLUZ2LrQnnSAkw4kSdLCDGxdGNLC5ubvkiRpEANbB6qmW0FtfgubeU2SJLWNPbAlOSnJNUk2JjlzwPV9knyiuf6tJEc3549O8kCSK5o/Hxh3WZeMLWySJGkn7DXON0+yHDgH+E1gM3BZkvVVdVXrtjcCd1bVk5OcCrwXeHVz7bqqOnacZezEzCzR/jFsBjZJkjTAuFvYjgc2VtWmqnoQuAA4pe+eU4APNa8/Dbwou/s0yWGzRGd6RHfzry9JknbOWFvYgFXATa3jzcAJw+6pqm1J7gYe31xbneS7wD3AO6rqa/0fkOR04HSAlStXMjU1tahfYJAtW7bs0ucccMst7PPgg2yAuAEjAAALe0lEQVTefBP/YtvDfL15r+vvuh6Ai6cuNrTtpF2tE42H9TKZrJfJY51Mpkmql3EHtkGpo3//pWH3/Bg4sqpuT/IrwOeSPL2q7plzY9V5wHkAa9eurXXr1u16qXdgamqKXfmcH3/5y2y55hoOX7UKbt9r9r02XLEBvgcveMELFqege5BdrRONh/UymayXyWOdTKZJqpdxd4luBo5oHR8O3DLsniR7AQcCd1TV1qq6HaCqvg1cBzxlzOVdEjXbFTp4HTZJkqS2cQe2y4A1SVYnWQGcCqzvu2c9cFrz+pXAV6qqkhzWTFogyZOANcCmMZd3aSwwS9QJB5Ikqd9Yu0SbMWlnAF8ElgPnV9WGJGcBl1fVeuCDwEeSbATuoBfqAH4DOCvJNuBh4E1Vdcc4y7tkhs0SrXLsmiRJmmfcY9ioqguBC/vOvbP1+ufAqwY89xngM+MuXycW2EvUFjZJktTPnQ66UNU0rM1fh83AJkmS+hnYulBNMKuCLGudtktUkiTNZ2DrRGuWqJMOJEnSDhjYOlBVsGxZr4Wtv0vUFjZJktTHwNaF9izRvq2pbGGTJEn9DGxdmJl0YAubJEkagYGtC3MmHWwPaNM13WGhJEnSpDKwdWHY1lROOpAkSQMY2DpQNT14ayqX9ZAkSQMY2LowZGsqcNKBJEmaz8DWhTlbU7VO2yUqSZIGMLB1YdjWVH0BTpIkCQxs3ZgZq9Y/hs0WNkmSNICBrRMzLWvzW9icdCBJkvoZ2DpQc8aw2cImSZIWZmDrgrNEJUnSTjCwdWFYC5tdopIkaQADWxcW2OlAkiSpn4GtC+3N3x3DJkmSdsDA1oWZzd+dJSpJkkZgYOtA0R7DNrcKllklkiSpj+mgC1WwbBkwv0vUHlFJktTPwNaFmWU9akCXqIlNkiT1MbB1ob1naP+kA8ewSZKkPga2Lsy0pNnCJkmSRmBg68LswrnTc8asuayHJEkaxMDWgarpwQvnuqyHJEkawMDWhfakg/5ZopIkSX3GHtiSnJTkmiQbk5w54Po+ST7RXP9WkqNb1/6oOX9NkpeMu6xLZoGtqewSlSRJ/cYa2JIsB84BXgocA7wmyTF9t70RuLOqngz8BfDe5tljgFOBpwMnAX/dvN+j37CtqewSlSRJA+w15vc/HthYVZsAklwAnAJc1brnFOBPmtefBv4qvdRyCnBBVW0Frk+ysXm/b465zENt2/oAl37w33PXbbfxjas+84jfZ/+br6X234eLHnigt4DujRcB8JP7f2ILmyRJmmfcgW0VcFPreDNwwrB7qmpbkruBxzfnL+l7dlX/ByQ5HTgdYOXKlUxNTS1W2ed5+IG7eOJf/iMHL8J7XbYm/NmKpsHworfMnj9yxZFj/Q67qy1btvh7m0DWy2SyXiaPdTKZJqlexh3YBjUX9Y+sH3bPKM9SVecB5wGsXbu21q1bt5NFHN3DDz3INX9+M5uuv54nrV69S++19omH8on99oHHrYJ9Hjt7ftX+qzhwnwN3tah7nKmpKcZZ93pkrJfJZL1MHutkMk1SvYw7sG0GjmgdHw7cMuSezUn2Ag4E7hjx2SW1fO8VHPOyN3Pb1BTHTEgFSpKk3d+4Z4leBqxJsjrJCnqTCNb33bMeOK15/UrgK1VVzflTm1mkq4E1wKVjLq8kSdLEGWsLWzMm7Qzgi8By4Pyq2pDkLODyqloPfBD4SDOp4A56oY7mvk/Sm6CwDXhzVT08zvJKkiRNonF3iVJVFwIX9p17Z+v1z4FXDXn2bODssRZQkiRpwrnTgSRJ0oQzsEmSJE04A5skSdKEM7BJkiRNOAObJEnShDOwSZIkTTgDmyRJ0oRLb1OB3UOSnwI/WoKPOhT42RJ8jkZnnUwm62UyWS+TxzqZTOOul6Oq6rBRbtytAttSSXJ5Va3tuhzazjqZTNbLZLJeJo91MpkmqV7sEpUkSZpwBjZJkqQJZ2B7ZM7rugCaxzqZTNbLZLJeJo91Mpkmpl4cwyZJkjThbGGTJEmacAa2nZDkpCTXJNmY5Myuy7MnSXJ+ktuS/KB17pAkX0pybfPz4OZ8kvxlU09XJnl2dyXffSU5IslFSa5OsiHJW5vz1kuHkuyb5NIk32vq5d3N+dVJvtXUyyeSrGjO79Mcb2yuH91l+XdnSZYn+W6SzzfH1knHktyQ5PtJrkhyeXNuIv8OM7CNKMly4BzgpcAxwGuSHNNtqfYo/xM4qe/cmcCXq2oN8OXmGHp1tKb5czrwN0tUxj3NNuAPquppwHOANzf/n7BeurUVeGFVPQs4FjgpyXOA9wJ/0dTLncAbm/vfCNxZVU8G/qK5T+PxVuDq1rF1MhleUFXHtpbvmMi/wwxsozse2FhVm6rqQeAC4JSOy7THqKqvAnf0nT4F+FDz+kPAK1rnP1w9lwAHJfmFpSnpnqOqflxV32le30vvP0SrsF461fx+tzSHezd/Cngh8OnmfH+9zNTXp4EXJckSFXePkeRw4GXAf2+Og3UyqSby7zAD2+hWATe1jjc359SdlVX1Y+iFB+AJzXnraok1XTbHAd/Ceulc0/V2BXAb8CXgOuCuqtrW3NL+3c/WS3P9buDxS1viPcL7gX8PTDfHj8c6mQQF/EOSbyc5vTk3kX+H7bVUH7QbGPSvG6fYTibragkl2R/4DPC2qrpngYYA62WJVNXDwLFJDgI+Czxt0G3NT+tlzJL8FnBbVX07ybqZ0wNutU6W3vOq6pYkTwC+lOSfF7i303qxhW10m4EjWseHA7d0VBb1/GSmObr5eVtz3rpaIkn2phfWPlpVf9ectl4mRFXdBUzRG2N4UJKZf6S3f/ez9dJcP5D5ww+0a54HnJzkBnrDaV5Ir8XNOulYVd3S/LyN3j9ujmdC/w4zsI3uMmBNM6tnBXAqsL7jMu3p1gOnNa9PA/7f1vn/vZnR8xzg7pnmbS2eZkzNB4Grq+q/ti5ZLx1KcljTskaS/YAT6Y0vvAh4ZXNbf73M1Ncrga+UC3Quqqr6o6o6vKqOpvffjq9U1e9inXQqyWOTHDDzGngx8AMm9O8wF87dCUn+N3r/KloOnF9VZ3dcpD1Gko8D64BDgZ8A7wI+B3wSOBK4EXhVVd3RBIm/ojer9H7gDVV1eRfl3p0l+TXga8D32T4u54/pjWOzXjqS5Jn0Bkovp/eP8k9W1VlJnkSvdecQ4LvAa6tqa5J9gY/QG4N4B3BqVW3qpvS7v6ZL9A+r6resk241v//PNod7AR+rqrOTPJ4J/DvMwCZJkjTh7BKVJEmacAY2SZKkCWdgkyRJmnAGNkmSpAlnYJMkSZpwBjZJWgRJ1iX5fNflkLR7MrBJkiRNOAObpD1KktcmuTTJFUnObTZK35Lkz5N8J8mXkxzW3HtskkuSXJnks0kObs4/Ock/Jvle88wvNm+/f5JPJ/nnJB/NAhurStLOMLBJ2mMkeRrwanobPh8LPAz8LvBY4DtV9WzgYno7aQB8GPgPVfVMejs6zJz/KHBOVT0LeC4wsz3NccDbgGOAJ9HbQ1KSdtleO75FknYbLwJ+Bbisafzaj97GztPAJ5p7/hb4uyQHAgdV1cXN+Q8Bn2r2HlxVVZ8FqKqfAzTvd2lVbW6OrwCOBr4+/q8laXdnYJO0Jwnwoar6ozknk/+r776F9uxbqJtza+v1w/h3rKRFYpeopD3Jl4FXJnkCQJJDkhxF7+/CVzb3/A7w9aq6G7gzya83518HXFxV9wCbk7yieY99kjxmSb+FpD2O//qTtMeoqquSvAP4hyTLgIeANwP3AU9P8m3gbnrj3ABOAz7QBLJNwBua868Dzk1yVvMer1rCryFpD5SqhVr+JWn3l2RLVe3fdTkkaRi7RCVJkiacLWySJEkTzhY2SZKkCWdgkyRJmnAGNkmSpAlnYJMkSZpwBjZJkqQJZ2CTJEmacP8/jxRXR5B3K38AAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmkAAAFNCAYAAABbpPhvAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzs3XmcXHWd7//Xp05V9d7pNZ2dBAzIIiA3KoqjUdQRF8Bdf46X+f1Uxjt6ld/wmxmXmVFnRq/eO64zjsqIiogLIrsoe7NKAoRAAh3Ivnan0+mteqvtfH9/nFPd1VvSmO6qSvr9fDx4dNU5p+p8u77dzTvf1ZxziIiIiEhpiRS7ACIiIiIymUKaiIiISAlSSBMREREpQQppIiIiIiVIIU1ERESkBCmkiYiIiJQghTQRKQoz+6mZ/esMr91lZm86xvv93swum43yHGM5/szMnp/l91xrZvtm8z1FpPiixS6AiEghOOcuyj02s78EPuace20RyvEQcFqh7ysixx+1pImIiIiUIIU0EZlW2M34t2b2jJkNmtnVZtYSdh0mzOweM6vPu/5iM3vWzHrNrNXMTs8793Iz2xC+7tdA+YR7vcPMNoavfdTMzp5B+VaF10fC5z8ys8688z83syvCx61m9rGwTD8AXm1mA2bWm/eW9Wb2u7CM68zslGnuu9LMnJldZmZ7zKzLzL6Qd77MzL5tZgfC/75tZmXhuXFdk2b292a2P7zn82Z2YXg8YmafNbPtZnbYzK43s4ajfSbha08Pv9/esD4uzjv3NjN7LrzffjP7/8LjTWZ2e/iabjN7KPe5ikhx6BdQRI7mPcCbgVOBdwK/Bz4PNBH8Dfk0gJmdCvwSuAJoBu4AbjOzuJnFgZuBa4EG4Dfh+xK+9jzgx8BfAY3AD4Fbc8FmOs65nUA/8PLw0J8BA3nh8HXAAxNe0wZ8Avijc67aOVeXd/pDwJeBemAb8JWjfDavJei6vBD4p7z7fgE4HzgXOAd4JfAPE19sZqcBnwJe4ZyrAf4c2BWe/jRwKfB6YAnQA3zvKOXBzGLAbcBdwELgfwLXhfcCuBr4q/B+ZwH3hcevBPYR1F0LQR1r30CRIlJIE5Gj+Xfn3EHn3H7gIWCdc+4p51wSuImxgPQB4HfOubudc2ng34AK4DUEgSUGfNs5l3bO3QA8nnePjwM/dM6tc85lnXPXAMnwdUfzAPB6M1sUPr8hfL4KqAWefhHf643OufXOuQxwHUHIOpIvO+eGnXNPh/c5Jzz+YeCfnXOdzrlDBMHvI1O8PguUAWeYWcw5t8s5tz0891fAF5xz+8LP+kvAe83saGOJzweqga8551LOufuA2wkCKEA6vF+tc67HObch7/hi4KSwjh5y2txZpKgU0kTkaA7mPR6e4nl1+HgJsDt3wjnnA3uBpeG5/RP+p7877/FJwJVhV1tv2AW5PHzd0TwArCVoNXsQaCVofXo98FBYjpnqyHs8xNj39mKvH/dZhI8nfS/OuW0ELY9fAjrN7FdmlrvuJOCmvM+jjSDUtRylTEuAvRO+790E9QBBC+bbgN1m9oCZvTo8/n8IWg/vMrMdZvbZo9xHROaYQpqIzJYDBMECADMzgqC1H2gHlobHclbkPd4LfMU5V5f3X6Vz7pczuO8DBN2ca8PHDwMXEIS0B6Z5zVy3EI37LAi+1wNTFsS5X4SzTE8Ky/X18NRe4KIJn0l52KJ5tHsvnzCebAVBPeCce9w5dwlBV+jNwPXh8YRz7krn3MkE3dp/kxsfJyLFoZAmIrPleuDtZnZhOC7qSoIuy0eBPwIZ4NNmFjWzdxOM08r5L+ATZvYqC1SZ2dvNrOZoN3XObSVo0fsL4EHnXD9Ba997mD6kHQSWhWPl5sIvgX8ws2YzawL+Cfj5xIvM7DQze2M49m6E4PvIhqd/AHzFzE4Kr202s0tmcO91wCDwd2YWM7O1BKHrV+H4wA+b2YKwS7o/d79w4sZLwiCdO56d+hYiUggKaSIyK5xzzxMEpX8HugiCwTvDcVEp4N3AXxIMgP8AcGPea58gGJf2H+H5beG1M/UAcNg5tyfvuQFPTXP9fcCzQIeZdb2I+8zUvwJPAM8Am4AN4bGJyoCvEXxeHQStW58Pz30HuJWg+zEBPAa86mg3Dj/ri4GLwvf9T+C/O+e2hJd8BNhlZv0EEyj+Ijy+GrgHGCAI1f/pnGud8XcsIrPONC5UREREpPSoJU1ERESkBCmkiYiIiJQghTQRERGREqSQJiIiIlKCFNJEREREStDRthc5LjQ1NbmVK1fO+X0GBwepqqqa8/vIzKlOSpPqpTSpXkqP6qQ0zXW9PPnkk13OueajXXdChLSVK1fyxBNPzPl9WltbWbt27ZzfR2ZOdVKaVC+lSfVSelQnpWmu68XMdh/9KnV3ioiIiJQkhTQRERGREqSQJiIiIlKCFNJERERESpBCmoiIiEgJUkgTERERKUEKaSIiIiIlSCFNREREpAQppImIiIiUoBNix4G51tuzk3s2fJ+O9gP03Hcnr3r5x4iXL2B9+3qGMkPjro1YhDcufyNbe7dySt0pNJQ3FKnUIiIicjxTSJuBjs5N/Mdzd3C41mDv07D39iNev/7k9fxh5x84veF0fnbRz4h5sQKVVERERE4UCmkzsCy9gu//p8/QS1ex4OPn8+iWH2JeGeeNDNP0/uug4RSIlgHwxUe/yO92/A6AzYc3c/Xmq/nEOZ8oZvFFRETkOKQxaTNQvvQkmj71SSq27aXs9g4+8rHH+YvLHqbxsVrqfvYZWv79lbS8cDctVS28+aQ3UzvoWJrMcEG0gd88/xsyfqbY34KIiIgcZxTSZiBaX0/zJz/JwDvfycB999F396MkO/roaTO61/dCZhie+AkAf9bwCr77wywffzTF+1MROoc7ufjmi7lx641F/i5ERETkeKKQ9iIMXfhGql7zatq/+CX6fxd0aSa6WnCv/gzsfwK6d1K+YQuVSXj5Lp/Xde3jZU0vo2u4i2ufu7bIpRcREZHjSdFCmpmVm9l6M3vazJ41sy+Hx1eZ2Toz22pmvzazeLHKOInnsfSb3wTfp+e6XwCQ7RugZ1cTDg9u/DgDd94GQLo7jjfQyS8u/D5XnHcF23q3sa1nWzFLLyIiIseRYrakJYE3OufOAc4F3mpm5wNfB77lnFsN9AAfLWIZJ/Hq6ig75RT8oSG85ibKzzyTg9/6Pr11n8Df38bAvXdiUcNPRUglPBK3/ZoL4y/DMO7ec3exiy8iIiLHiaKFNBcYCJ/Gwv8c8EbghvD4NcClRSjeEZW/7GXB19WrWfmb64mfdBKJtsMc6LqU7EiEhWf3AtC/p4J9//gthr/wVRZVLWJfYl8xiy0iIiLHkaKOSTMzz8w2Ap3A3cB2oNc5l5sOuQ9YWqzyTafi7CCkxVeuwiIRqi64gMGHHiZx9300v2Ex9S8ZxKuArmdrAHCpFAvKFtCX7CtmsUVEROQ4UtR10pxzWeBcM6sDbgJOn+qyqV5rZpcDlwO0tLTQ2to6V8UcNTAwQGtrK9FUikZgdzbDltZWymprqfN9sgsWkDjvJTTt30DN66rpvTNoKHTtG4l1L2XPYLIg5ZxPcnUipUX1UppUL6VHdVKaSqVeSmIxW+dcr5m1AucDdWYWDVvTlgEHpnnNVcBVAGvWrHFr166d83K2traydu1anHP0VVVz6lvejFdTQ3bNGrb/8pcs+n+voP7cWvjV9Sx+3TlUXfRe+q76Xwxvb2dRzGNbORSinPNJrk6ktKheSpPqpfSoTkpTqdRLMWd3NoctaJhZBfAmoA24H3hveNllwC3FKeH0zIy697wbrybozvSqq1n9yMPUf/CDsOTc4KKaRdS+9a1UvOVDZJMeDSmn7k4RERGZsWK2pC0GrjEzjyAsXu+cu93MngN+ZWb/CjwFXF3EMs6YRcOPsmYxrP0cnPY2AKKLFgHQ0pumPzKEcw4zK1YxRURE5DhRtJDmnHsGePkUx3cAryx8iWaJGaz97OjT2KLFADT2pMjUZxjKDFEVqypW6UREROQ4oR0H5lhscdCStqA3BaAuTxEREZkRhbQ5luvurO5NAwppIiIiMjMKaXMsUlaGVx2noi8Iab3J3iKXSERERI4HCmkFEKuvIp7wAehLqSVNREREjk4hrQCijQuIDAQzOvuT/UUujYiIiBwPFNIKINZcjxvyAI1JExERkZlRSCuAaHMTfjpCfaZMIU1ERERmRCGtAGKLWgBYNhhjID1Q5NKIiIjI8UAhrQBiS5YA0NzvSGVTRS6NiIiIHA8U0gogunQ5AE39jpSvkCYiIiJHp5BWALGlKwBHQ5+vljQRERGZEYW0ArCqBry4T+2gWtJERERkZhTSCqGsBvMgnkqRTg0WuzQiIiJyHFBIK4RoGRZxxJJJUt3bi10aEREROQ4opBWIxePEspD008UuioiIiBwHFNIKxFpOJeaipJ1f7KKIiIjIcUAhrUCsLE40a6ScDxuuhV2PFLtIIiIiUsKixS7AfBGJ5UJaFm79VHDwS9oiSkRERKamlrQCsXicqG+kcMUuioiIiBwHFNIKxOJxvKyRRmPSRERE5OgU0grE4nGiWchfynZrz1baB9qLViYREREpXQppBWLxOJEsJCOR0Q7Pv3/o7/n2hm8XtVwiIiJSmhTSCiTo7gweZ8JjPSM9dI90F61MIiIiUroU0grEyuJEMkEbWsoMvDIG04MMpAaKXDIREREpRQppBRKJx4lkx0KaH69kODNMIp0ocslERESkFGmdtAKxeBzL+ECEjy5eyGuzMQASKYU0ERERmUwhrUAsFieS8cEZW+NxyrMGOHV3ioiIyJTU3VkgFo8DEA0nD7Rb2PXpp0hmk8UqloiIiJQohbQCyYW0WBjSuvI+eXV5ioiIyEQKaQUysSUt39OdT7M3sbfAJRIREZFSppBWIBYPJgrEMpPPXdF6BV9f//UCl0hERERKmUJagUzs7pxo/8D+ApZGRERESp1CWoFEjtDdCdA51FnA0oiIiEipU0grkFxLWnyK7k6A/lQ/w5nhApZIRERESplCWoGMThyYJqSBWtNERERkjEJagYyNSXOjx6piVeOuUUgTERGRHIW0Aplq4kB1rHrcNQeHDhaySCIiIlLCFNIKxGKTJw5URSvHXXNwUCFNREREAgppBTLakpaBKgvWTKuKVlBfVs95C8+jOlat7k4REREZpQ3WC2R0MdssNEUrSaV6qYxW8OAHH8Q5x6W3XKqQJiIiIqPUklYgkbIyAMqyEaq9MmqzPlXRcgDMjKpYlZbgEBERkVFqSSuQXHdnmfOo9sq5YHiYUxvOGD3vmUfWTbPSrYiIiMw7CmkFkgtp5X4Q0r7S1Q0nXzx6PmIRhTQREREZpZBWILmQVuFHyUYrgoPZsZVto5EoGf8IK92KiIjIvFK0MWlmttzM7jezNjN71sw+Ex5vMLO7zWxr+LW+WGWcTbmQ9palb+Cji18XHPTTo+fVkiYiIiL5ijlxIANc6Zw7HTgf+KSZnQF8FrjXObcauDd8ftyzSASiUZbGW1hZtTg4mB0LaZ55+M4vUulERESk1BQtpDnn2p1zG8LHCaANWApcAlwTXnYNcGlxSjj7LB7HpVIQCZbjGBfSIp66O0VERGRUSSzBYWYrgZcD64AW51w7BEEOWFi8ks2uSCwWhDQvDGkTujvVkiYiIiI5RZ84YGbVwG+BK5xz/WY209ddDlwO0NLSQmtr65yVMWdgYOCY7tME7N+9m45NCzgX2Pjk4/TuGAGg53AP/en+gnwfJ5JjrROZG6qX0qR6KT2qk9JUKvVS1JBmZjGCgHadc+7G8PBBM1vsnGs3s8XAlMvwO+euAq4CWLNmjVu7du2cl7e1tZVjuc/WqkoamptZct4r4Gk49+wz4ZTg/W5rvY3+3v5jev/56FjrROaG6qU0qV5Kj+qkNJVKvRRzdqcBVwNtzrlv5p26FbgsfHwZcEuhyzZXLBbDZTJj3Z15S3B4EU0cEBERkTHFbEm7APgIsMnMNobHPg98DbjezD4K7AHeV6TyzTqLxnDpNETCj90fP7tTEwdEREQkp2ghzTn3MDDdALQLC1mWQpnckqaJAyIiIjK1kpjdOV9YNIpL5y3B4Y/fcSDrazFbERERCSikFZDFwu5OL2zAzGrHAREREZmaQloBWTQK6UxeS9r4MWkKaSIiIpKjkFZAYy1pU+w4oJAmIiIieRTSCmh04sAUY9K0BIeIiIjkU0grpFh02jFpnnmaOCAiIiKjFNIKaHJLmro7RUREZGoKaQU0upjtFDsOaHaniIiI5FNIK6CxlrQpdhwIx6Q554pUOhERESklCmkFFCxmmwazIKhNGJMGqDVNREREAIW0ghpdggOCcWn+5JCmGZ4iIiICCmkFFSxmGwYzLzZuTJoXUUuaiIiIjFFIKyCL57ekRadsSdMyHCIiIgIKaQWVmzjgnAtb0jQmTURERKamkFZI0Sg4B9lsMCZtwgbroJAmIiIiAYW0ArJYsD6ay2SCXQc0cUBERESmoZBWQBYNQ1o6HY5JG2s1y00cyOTt5ykiIiLzl0JaAY1rSYtEIa9rUy1pIiIikk8hrYAsGuw04FJpMG/KljSNSRMRERFQSCuoXEsamTRExoe00YkDWoJDREREUEgrKIuFLWnpXEgbG38WteCcujtFREQEFNIKanRMWm7igJvckpZxmjggIiIiCmmFlRuTlsmEY9LytoXSxAERERHJo5BWQONb0jzwxwLZ6MQBjUkTERERFNIKavwSHONb0rTjgIiIiORTSCug0cVsc0tw5AWy3MQBhTQREREBhbSCmrSYbf4SHBEtwSEiIiJjFNIKaGwJjtSk7k5NHBAREZF8CmkFNHlbqLyJA2FI0xIcIiIiAgppBZXbFop0GiwyviUtopY0ERERGaOQVkBHGpOWa0nTmDQREREBhbSCGt1gfYptobQEh4iIiORTSCuk/MVsJyzBMdqSppAmIiIiKKQV1NiOA7nuzskTBxTSREREBBTSCmr8tlDTTBzwNXFAREREFNIKavISHOruFBERkakppBXQ2MSBVDAmbYqWNIU0ERERAYW0gjLPg0gkbwmOKcakaQkOERERQSGt4CwaDRaz1RIcIiIicgQKaQVmsVg4u1NLcIiIiMj0FNIKzKLRsXXStMG6iIiITEMhrdDisbwdB/Ja0iIakyYiIiJjFNIKzLwoLpsNJg7gRicPqLtTRERE8hU1pJnZj82s08w25x1rMLO7zWxr+LW+mGWcbRaJQDYbdHfC6Lg0LcEhIiIi+YrdkvZT4K0Tjn0WuNc5txq4N3x+4ojmWtLCkBZ2b47O7lR3p4iIiFDkkOacexDonnD4EuCa8PE1wKUFLdQcs0gkCGajIS2YPKCJAyIiIpKv2C1pU2lxzrUDhF8XFrk8s8vzcFk/HJPGaHdnxCIYRsZljvBiERERmS+ixS7An8rMLgcuB2hpaaG1tXXO7zkwMHDM92kYHqb/4EGGt3usBh5+6AEysVoADGPnrp209h3bPeaT2agTmX2ql9Kkeik9qpPSVCr1Uooh7aCZLXbOtZvZYqBzqoucc1cBVwGsWbPGrV27ds4L1trayrHeZ8e3v0OsoYHlp74UtsFrX/1qqG4GIPbzGMuWL2PtmmO7x3wyG3Uis0/1UppUL6VHdVKaSqVeSrG781bgsvDxZcAtRSzLrBud3TlhTBoEXZ6a3SkiIiIww5BmZp8xs1oLXG1mG8zsLcd6czP7JfBH4DQz22dmHwW+BrzZzLYCbw6fnzg8D+f7k5bgAIhaVBMHREREBJh5d+f/45z7jpn9OdAM/N/AT4C7juXmzrkPTXPqwmN531I21pIWfvR5S25EIhEyviYOiIiIyMy7Oy38+jbgJ865p/OOyYsxaZ208ft3qiVNREREYOYh7Ukzu4sgpN1pZjWA0sSfYFJLWl4o88zTmDQREREBZt7d+VHgXGCHc27IzBoIujzlxfI8XCoF4Q4DmjggIiIiU5lpS9qrgeedc71m9hfAPwB9c1esE9eRxqRFI1FtCyUiIiLAzEPa94EhMzsH+DtgN/CzOSvViSw3u1NLcIiIiMgRzDSkZZxzjmBfze84574D1MxdsU5ck8ekjYUyTRwQERGRnJmOSUuY2eeAjwB/ZmYeEJu7Yp3AJq6T5o8PaWpJExEREZh5S9oHgCTBemkdwFLg/8xZqU5g5uVa0nITB/JCWsTTmDQREREBZhjSwmB2HbDAzN4BjDjnNCbtT+Hl1kmburtTLWkiIiICM98W6v3AeuB9wPuBdWb23rks2IlqdEyaaeKAiIiITG+mY9K+ALzCOdcJYGbNwD3ADXNVsBPW6OzOyUtwqLtTREREcmY6Ji2SC2ihwy/itZJnbHbn1BMHNLtTREREYOYtaX8wszuBX4bPPwDcMTdFOsFNXCdNY9JERERkCjMKac65vzWz9wAXEGysfpVz7qY5LdkJanR25xRj0jzzSPmpIpVMRERESslMW9Jwzv0W+O0clmV+iBxlTFpWLWkiIiJylJBmZgnATXUKcM652jkp1QlsbJ20aWZ3auKAiIiIcJSQ5pzT1k+zLbdOWq67M2+igCYOiIiISI5maBbY0WZ3ZlxmmleKiIjIfKKQVmgTZ3fmTxyIePi+WtJEREREIa3gxsakaVsoERERmZ5CWqHlZndqWygRERE5AoW0ApvUkpbXvRmNRDVxQERERACFtMILx6KNrmsyoSUt42vigIiIiCikFZx54Ufucl+1d6eIiIhMppBWaF7Qzen8sC1twrZQGpMmIiIioJBWcKMtaVjwJW+dNE0cEBERkRyFtELLjUlzYUhz4ycOaFsoERERAYW0ghttSZuiu1MtaSIiIpKjkFZooy1pLlgrLX9bqIgmDoiIiEhAIa3ARlvScvt3Tpw4oO5OERERQSGt8HItab4fLGirbaFERERkCgppBTauJW1id6d5OJy6PEVEREQhreBy66SNdneOX4IDUGuaiIiIKKQV2hHHpIVdoRqXJiIiIgpphZY/Js28SWPSAHV3ioiIiEJaoY1vSYtOGpMG6u4UERERhbTCGze7c/I6aaDuThEREVFIK7hJY9Km6O5US5qIiIgopBVariUtG45Jm7AtFCikiYiIiEJawY3t3ZkbkzZ+xwHQxAERERFRSCu8/HXSvBhkJy/BkckLbiIiIjI/KaQV2OTZnenRc2pJExERkRyFtELLH5PmxSA7OaRlnFrSRERE5juFtAIbNybNi48LaZFIcM731ZImIiIy35VsSDOzt5rZ82a2zcw+W+zyzJr8lrQJ3Z1RC8araXaniIiIlGRIMzMP+B5wEXAG8CEzO6O4pZod41vSxnd3agkOERERySnJkAa8EtjmnNvhnEsBvwIuKXKZZse4lrSYJg6IiIjIlEo1pC0F9uY93xceO+6Nb0mLagkOERERmVK02AWYhk1xzI27wOxy4HKAlpYWWltb57xQAwMDx3yf6P79NAKbn36GU8p7qB7oY334nluGtwDw5IYn6S3vPbbCzhOzUScy+1QvpUn1UnpUJ6WpVOqlVEPaPmB53vNlwIH8C5xzVwFXAaxZs8atXbt2zgvV2trKsd4nuW0bO4AzT38ptSNbYM/e0fesbK+Eu+Dsc8/mFYtecczlnQ9mo05k9qleSpPqpfSoTkpTqdRLqXZ3Pg6sNrNVZhYHPgjcWuQyzY5JY9K0d6eIiIhMVpItac65jJl9CrgT8IAfO+eeLXKxZsXkMWl5S3BEgurQOmkiIiJSkiENwDl3B3BHscsx67y8lrRYHLKp0VO5ljTtOCAiIiKl2t15wrJIXkvahO7O3OxOLcEhIiIiCmmFlt+SNqG7M7dOWtbXmDQREZH5TiGtwCwMaWMtadpxQERERCZTSCu0XEtaJtwWyvkQThTQ3p0iIiKSo5BWYOPHpIXzNsLWNLWkiYiISI5CWqGNG5MWC46F49I0cUBERERyFNIKbNLsThhtSdPEAREREclRSCu0I7Wk5UKaujtFRETmPYW0AhvXkjZNd6da0kREREQhrdC8CXt3giYOiIiIyCQKaQVmkQiYTWhJC3YdUHeniIiI5CikFYPnBeukTViCIxfSNLtTREREFNKKwCKRI49JU0uaiIjIvKeQVgyeN+WYNC3BISIiIjkKaUUw1pIWdndqTJqIiIhMoJBWDNO0pGl2p4iIiOQopBXB5DFpqeC4GRGLqLtTREREFNKKIteS5sWD52F3JwRdnmpJExEREYW0IjDPC/fuHL8EBwQhTUtwiIiIiEJaMeTWSZuwBAcEy3Bk/Mw0LxQREZH5QiGtCMzzcNlM3sSBsVAWsYha0kREREQhrRgsFoNMJm8JjrGWtKhFNSZNREREFNKKwaJRXDo9aQkOCFrSFNJEREREIa0ILBbDpTPTjklTd6eIiIgopBVBENLSU45J80wTB0REREQhrShGQ9qExWxBEwdEREQkoJBWBBafGNLyJg5EotpxQERERBTSimLSxIHxS3Bo4oCIiIgopBWBxeJhSPOCA1ntOCAiIiLjKaQVgcViuEwGzILWtAnbQmWcJg6IiIjMdwppRTA6cQCCcWlagkNEREQmUEgrgtHFbCFsSRu/BIcmDoiIiIhCWhGMb0mLjmtJ08QBERERAYW0ohgX0iaMSYtGoqTznouIiMj8pJBWBBaLwWhLWnxcS1p1rJqh9FCRSiYiIiKlQiGtCI7U3VkTr6E/1V+kkomIiEipUEgrAosFEwecc5O6O2viNSRSiSKWTkREREqBQloRWCzcaSCTCbo7M2N7d9bEaxhID2gZDhERkXlOIa0IciHNpdMQK4dscvRcbbwW3/kalyYiIjLPKaQVwWhIy2QgWg6ZsZBWE68BUJeniIjIPKeQVgz5LWnRMkgPj57KhTRNHhAREZnfFNKKwKJRIBfSpm5JG0gPFKVsIiIiUhoU0orAYnEgP6SNjJ5Td6eIiIiAQlpRjJs4MKElrTZWCyikiYiIzHdFCWlm9j4ze9bMfDNbM+Hc58xsm5k9b2Z/XozyzbWxkJYJxqRlNCZNRERExosW6b6bgXcDP8w/aGZnAB8EzgSWAPeY2anOnVg7jh+pJa06Xg2oJU1ERGS61Ik8AAAgAElEQVS+K0pLmnOuzTn3/BSnLgF+5ZxLOud2AtuAVxa2dHPPYrmJA6lgnbS8MWnRSJTKaKVCmoiIyDxXrJa06SwFHst7vi88NomZXQ5cDtDS0kJra+ucF25gYGBW7hPbsoUG4KnHH2dJvJ1VfoYH7rsXF/EAiLs4L+x+gdbBY7/XiW626kRml+qlNKleSo/qpDSVSr3MWUgzs3uARVOc+oJz7pbpXjbFMTfVhc65q4CrANasWePWrl37pxTzRWltbWU27jNUU8Nu4Jwzz6SaYdgFr7/gVVAWdHU23dJEdW31rNzrRDdbdSKzS/VSmlQvpUd1UppKpV7mLKQ55970J7xsH7A87/ky4MDslKh0jNu7s7w8fJwcDWm18VpNHBAREZnnSm0JjluBD5pZmZmtAlYD64tcplk3aeIAjBuX1ljRSNdwVzGKJiIiIiWiWEtwvMvM9gGvBn5nZncCOOeeBa4HngP+AHzyRJvZCVPsOADjQlpLZQsHhw4Wo2giIiJSIooyccA5dxNw0zTnvgJ8pbAlKiybuHcnjAtpi6oWMZgeJJFKjK6bJiIiIvNLqXV3zgtH7O589D9o2f80AAcH1ZomIiIyXymkFUP+jgOxvIkDAJt/S8uuYBUSdXmKiIjMX6W2Ttq8cMSWtNQgLdkhKDOFNBERkXlMLWlFMOWYtPRYSGtOdGGYujtFRETmMYW0IjhyS1qCWGaYxvIGOoY6jv1mffvg4W+Dm3JNYBERESlRCmlFMOUSHAc2QNc2SA0C0FLeQMfghJDm3IsPW5tvhHu+GIQ1EREROW5oTFoRmOeB540PaY/+O7Q/A34GgIWxavYOdQbnDmyERDtsuBYyw/CRKVYveeInsO4H8NePgeXtrjXcE3xNtEPd8smvExERkZKkkFYkFovhMnlj0gB6do4+bI6Us3F4V/Dklk/Cwc1HfsPdj8ChLTDSCxX1Y8dzIa3/CLtrZVLwzK/h3P8Lwk3eRUREpLjU3VkkFo2Ob0kD6G8ffdhElJ5kD+lsGoZ7x784PTz5Dbt3hO9xADrb4KFvBF2j+S1p09n0G7j1U7D9/j/xuxEREZHZppBWJBaLTQ5pfnr0YZMfjD07PNgRBKzX/S1c8r3g5EA469M5uOsf4dp3QXfYCte3D278ONz7z7D/yaBlDY4c0rbeFXw9uGk2vjURERGZBeruLJLRkBaZOic3Z4OxaYc6N7PIZaHhZKhqDk4mDkL9Slj/X/Dod8e/8KFvQscmwGDjL/K6O6cJadkM7Ahb0DqO0qUqIiIiBaOWtCKxWAzS6WnPNyeDHQgOdbUxYEZ39UKobglO5lrSdkzRPbn3MaheBC97L2y+AQa7guPdO2D7fZOv3/8kjPRBrCoY9+YcbL0HfP9Yvj0RERE5RgppRRKpribbn5jmZJSmw8EYs67eHXy5qYGPbvrO5JB2aAucfjFUNIx//eJzYNXrgvDVvz84tv+JoFt09x/HX9v5bPD1jEugaytsvxeue0/wVURERIpGIa1IogsXkjl0iJ5fX0//3vLxJ5e/iob2TZiDrsQ+1ldUsK1/F3v9YbAIJDqCHQp6dsHC0+Gk1wSvazot+Lr4bKg7aeobr/t+8HWoO1j249ALwbi41W8Gl4VNvw3OH3p+1r9nERERmTmNSSuS6MJmklu20PHFLwIN1HzgwNjyZu/7KbH7v0r9obt5eugA3V6QpR9pf4wPVi2EjddB725wPjSdCiv/DJpWB4Gr6/mgJa1+ipDWdBq03Q69e4PJBXv+CGULoOEUWHpecE3brcHXw9vm/DMQERGR6aklrUiiCxeSOXx49PnQofjYybIaWHgGTZkMjxIst1HmlfHI/kegqimYqbnpN8G1zafBqj+DN30JahcHxxadDbXLwMI1z970JXjTl+HD1wMObr8iCGgAyT5oPCVoeatshNRAcLz9abjlU0GrnYiIiBScQlqRxBYuHDc4v7/s3cED84Lux8aTWZ7JjJ5/5ynvZF3HOlLhorbDZnR4HjS+ZOxNT70IznwX1K0ALwoLlgbHF54Jr70imBH60rfDtnuC1rPaZcH5xpcEuxQsXTP2Xgc2MLjx5/ztvZ9ia8/WufgIRERE5AgU0mbIOYebxU3KowsXjj726usZ3LQ9eBKvDgJT40v47OEeLhwc4oOLLuD1y17PcGaYp17xEQYrFnDZS/8bl6xYzt7hrtH3yZ7yBnjfT8e2hao7iU3xOJ964WfcvuP2oPxrPwenvQ0+chMvrHoV71y6mD/Gw17vpf8t+BpORHiyvIw/9D3Pu299N4nu7exL7MN3M5v16Zzjrl13sTex95g+JxERkflKIW0GDu1J8PN//CPDh49+7Uzlh7QFF7+T9L79HHq2hr49VeHB5SzC49udXXzh/H/glYteSTQSpbV5OVee9zZeSB6GaDnvu/19vO+29/Hp+z7Nhb+5kGcPPzt2k7qT+I/6BTxw+Gk+99DnuHrz1dByJnzol1B/EvdUlLErHuMzbb9k+ze+ir8wGJfmXvp2AF6Ij3XBfuO+v+HtN72dWx78EiQnz0rd1beL4czYTgg/2vQjrnzgSr706Jdm9Hn4IyP4IyNTnruu7Tr+8ZF/JOtnSWVTM3q/41ly+3aGHn98Vt5rODPMbdtvm3G4FhGR0qGQNgO1zRUkupP07T1yS9rOZ7oYTswsREQXhstpxGLUXnQRAF2bauhcZzjfD/bQbDgZymqhdimD37+af/t5lEfuu5admx7m67yb//26/83rlr6OnpEeHtj3AACX33U5/al+ALZV1fFktJwrFr2fi1ZdxHc2fIcdfTtGy/BMJEtVpIw3bXSk/uta+p8f5Ln3X80FA0/wX3V1tFXXsTSd4dRkit8O7qBqMEvT569n8Ed/N+57uefZ67jklku47PeX0T3STddwF9/bGOyOsK1nK3s++jEO//P/hD3rRl+z/2+uZMvLz+OWT13M19d/nX1//dfs/cT/GHvTzTfC41eTyqb44dM/5OZtN/O5hz7Hu255F1k/CzBl8Nib2MtPN/902lbPjZ0beduNb+PAwPR7mR4cPMilN1/KM4eemXRuKD3EB27/APfuCZYouWPHHbzrlndx09axTe+/9eS3uH/P1FtsHfj8F+j8xjemvTdA+z99kb1//UlcavzPUs9vfsOej18e/HwcwYP7HuRdt7yL3pFern/uV3z+4c/z2IHHjviaOeP7wdp7cySdHVtrcDZbuhOpBO+/7f3ct+e+WX3fubKxcyOX3nwph4YOvajX9Y708u5b38269nVHv1iKIutn2ds/uUcim0gw8NDDx8XPZ0nr3QuZZLFLMS2FtBnYcKCPbZEMHTvHujy79iXY+sRBBnuDym3f1ssd//kMd/7o2Rn90uxrdyRqVhBbvJjyM8/EKioAyAw5Evfcw1NPvUBH4/kM2ivou/VWuq+5hiW7B/iXG6L8y70nU/fdhzn9xo18/bX/i5suuYkbL76RH7z5B/Sn+vnmE9/kG49+i1/e1sZPvpXlNVf+ms+4Czl7F/xu++2MjAyw7sFfs7HrGd5+ysX8eVvQYtb9k5+QuPyrnLVpgO/W13JXHE5LpXjT0BAAb9ngaDpsHLzpfsimyWYyfLf1c3ztnq/y1Z9naXnkBT7/0Of5/c7fk3VZPnz6h2nc3s3gI49w+Ld34W76JPg+yZ076b/jDohHWXn/Vtbd/wsGH/0jQ489xieueRdfuPNy/N9+DH7/d9z37C/oSfZQnnQ8/vQd7EnsYf26b/P9jd/nkpsvYSg7FOyq8OxN0P4M33z8G/znI//GHw88Om7M3+auzXzq93/Fr6/7B/o79nDj1hunrZvrX7ie7X3bufnef2f7+99Pz+/vGD139+67ee7wc/xk80/Y2rOVv3/o79nVv4vvPvVdktkkbYfb+PHmH3Pdrf9KsvMgvTfcwOCjjwKQ2ruXvhtvpPtn15JNBK2RN7f+gBvu/s7o+2cOHWJ4wwb8RILBdetHj/uDgxz65rcYfOghOr74Rdr+8kP89Mmr8P0spIZGrzn8cCv/8siX2da7jYd/9jXO+ci/cc52n3v23HPUn0nn+ww8+CB+WN/7EvtGA3HO4Lr1ZHt7GXjoIXpvupls7/h9ZX0/b1hANgM/fRvcHITv3f272ZfYN3pt+uBB9m95kp19O49cMN+H5MCkw9c+dy1v+M0b2JfYR+9vf8vWN7yRx58YazXMZiaE2UwS2m7HTw4xuH79pBCc79fP/5q27jauffIX/PLL69j84P4prztaYM6X2ref1O7dweucm/61A53Q2UZy61a6f37dtOU8OHiQX7T9grSf5hdP/ZjaJ7dyS9sNJO67j4GHHppR2W7bfiu26QV+/tSPpy5zNsUXHv7CtP/oGGfXI+FuJ5MNPf44ifunf49MTw8jbW1k+/oYfjboDUh3dND/hzuD72Okf9z1I+ksj+/qDj7HVIr2R+/jqqd/yGB68OjlnAHnHEMbNkz6+R554YXRCV/JbdtI7dkz6XVfu+Vv+MqNn37R4an/jjvY+4n/gT88fm/mqzZdxTtufgdth9tGjw0/8wzb3vRm9n784/TfcQd+1ufA1h58f+b3HNqwgf477yI78CI/s50PQvvkf8D+KbKJBCPPPXdM7+F8n+FNm3HZ7KRzvvO5dfut/GrLr+hL9k1+cd9+st94Je72K4+pDHPJToQUvmbNGvfEE0/M2fs75/jq956gbnOCnpUV+FlH496wa86g7mUNDHeNMHJgCANqX1ZPxbJK0r0pvPIoXnkwyzL3Saf6U/Q80omXHWbl4B/ped9fUH7P/dT2PEdsdycYDEXLeXLR6bymfTPd9WeQ9cqpPrORqvvvZOsp7yFZtoCmw5sZKaulebiN6MgAXsR4bolPvL8Hv+oi/NgKYn3PUudnae7YgJcZoW1VGS39RsPhEZ5YbSxsPJMVj21mqAwqc/+YMOip9Xhqpc/JlmTZcAOPxgd59fMOLw0R5+g6YwWH/NdRldjM4s6NQA27Vr4Vl9lBio30NMf5b9EW2nfs4ZSO4DvfvTpLtmkJde0pGnb3su71C3lV60F6q4yKdDURYOvSEba3lFGXjVDh99LnRYhmjZfuXkI8U0VX1R4STR49kQzpWDP12SWcln6asq5B/KRxKBplRafj2dMjNEdW0uO9ijpvE8+Wb+fkHSOc3OHwDR4/PQrVUVYM1kBDPen+Hhpf6OLQS5p4trYfS6V4TZujbgB8gx1n1TNc2cChSCfDWZ/G5BoW9bTTVbGNVQviPBBNsiLWQtplGOw/zBufdmTjHrFkFhcx+s6sJpbIUrUrCECJkxoZWBCnoa2dWAYOn9KI1VYTHUhS90IHzoyhZXX0vaQRZ1FqdnZRu7ML34NI+LfomVVGtKWc5SN9DCw4jbq2w1R09nOgAdpeUsarn0lROeJIe7BjqcfikXJeWO5RV9tMRaRs0s951d5u6l7oYLC5ihHP0dYwTLqujtVli3Ddg1SkMjS1HSBTGSc6FASHwdpy2s6uoypWTqVbQG/PmVi0n/LYZpqGBygb2c+hSqOyu57DDLN9aZSzylZS197Pguc7MAeddUZkYTPJpY0QsUnlqkjsJJbqp7vmJIaiMaqJEsmmeTJzgIrhLKt7K1ixZ5hI1rGzxdi5upqF6bPwh86kJvY45dUDDFqKg8mDNKQHoSvO6bsdQ4sW0HvaYlw0wkAmye7kIZqjtVRHq2kfqGYouo+K9AoaUueBpalvWo9ZdvRvwsBIHys2dpCojdN/+hLKrZzDg0miZtRVxTEA31Gz+zA4R/X+bvBh15rFNG89TFnKcfi8k6jo6MMbSTOwopFYsoehdDuDEWP1c2VEhzMkG6roPmsZfszL+5sEW5I76XcjLPcaaXn+MCs6HQOVRvVQ8PvWt7qFgRWNAPh+nJHhWlIjO6ioriRijXjRIfr2b+GsF5LsWWj4Z51KBMN3HqnhJURivXRHdrMvfYgYUc4pP5myVIK0F2OPG6Dc4iz06jGDSHqYxue34jzj8Gmn4yJjqztFkhla/rgN8322nrWcyqZqDBv9W4NzLHxiJ7GBJOnKGLHBFFvOreek7YNUJFIkTq3AKnoZqDuVbDQYDnIwkaR/OE1LTZzle7up3tvNxlXG4LIGVsYWkslUMTK8jPKK/ZSnuqne3cXAkgUcqvapilQQt6lWnwo+t0jap66tA6/fyFY5es9qpmZ3L37Uo2bXITIVcbpftoyFj+/EfEffyc2Y73AR49CicpY9vgdzsOcVy6msqAEHVfu68VIZ+lc1T/oZHxwcpKq8gspNWYbLWqio2s3Qipqg3pzjqZGtZMjS5NVxSjyYwd/0xC4imSzOi+Aixt6Xvp7hwZNo7N9Ifc9TbDoJahoWURupDD/ksS8A5YcTND4dtM6NNFbTdc5yBl2SuEWn+WzCusyMUNP7HC7i0V93FuBRs6cLBwysaIJI8LPZke0m63wWRxvxbPLvdO7jbnpqF2W9Qxw6byWpuspp7zuVaGKE6n3dOC9C9d5u+lc1kzi5edw1BzM97EoHKxS0ZKs550CM4aYa+uviRM1jweH91G3qJ13rc+jlYz+3DauWUrZqNWvXrn1RZXoxzOxJ59yao16nkDYzw0NpvvNPD1AzEDQ+PhfL8GRZhrNSUc5NeRjG41VZ4mnH2eHzI2n3fJpTKaJe+RGvA7BsCnMZ/GjwQ+xlhjgQL2Nh1ohkh3DR6smv8TNkU4eJlAfBIXaEX7w/lZfqIBtbGCywC1SMdDJS1ogz7yiv/BO5NFhsfBkyw2SjFdO+pGLoIMOVLbNelHiyl1S8dvR7l0D58CHS8Vqy3uQQWGjViT0M1Kz4k16b/3PVcnA9XY1nk40e/Xe1lJmfGReejuRov1fHg3iyl1RZXbGLMXPOBz8B3oI/6eX1PVvoqX/pLBdqfqoYeZST//K1JRHStJjtDFVUxjj/HVFOfsl5VEYi9MdgKBX8qzrZnyIzkuWjZzbTM5ziYPsAqaEMZQvKSCczZEZ8wBHBRide1i6swM84hnuTRMyoqI3TUBmjrydJ10CSJVUeNpCgu6yGyhqPocFhhoY9SCUpr3A0LGumuSxOJG4c2JPAixhdfUM0kMJbUMehTIaWhVVU+8b+ZIpExxDOdwx07cavqaG2rAE6DgQbvLcsxjnHSM9eVp9yKh19STK+Y3Df89QkU/grTgv2Ga2oBDN6u3aQHjhAxdI49VUvJZ3yMIOaWDN+fw+pRIbMSAzzMwxiRBsWEHNlpLv2EEn1krQ0w9VxorFy6rPleANDxFdUUIWHf2CAaMMiulJRhoYGcS5NvKKRaMsCKvx9DAwv4FDfbhoyWWpHkmzb34a37HRYcirV0SpcdwfV1TUcPNSOlWUpz3YzQobqPqhcuJpYeRbnZ0l07CdevYoerx26DmHxGha2LKOvfQcVWY94vJrek06l1vn48SoGd66nIjvASMZRE60jW91Pd0WSpv5qfBYQSQ8yNNTBUHaEpngdfksTiaEeklFHNgIky4gMDuCqDZzDwrFazRXNWHk5HakuLJXBGxjCLy/DxTy87kEgAn4av7aGdGML1V491WnHUHU1NXt30zVyiKxXjjfchasso6KhmTqvGr/jIIeiI3h1dTTF6jmY6iICNPU4Do90kfEndw0Q8/Cb6qkecmRryjmNMvYe6qAnNUTN4gaGEsMkqyuJDI3gV5RTX7aAivQg0d40vckhhrIjLK+rZCidYMvOfTSuXkU23kT9iEFFmhgR6OmnJ92HX1dNqnEpTWUtlA110ploxzs8RXcE4OJV+NFqyrMjVPuOfj9JNlpOTSZDZXUdB6vBIhGayxrhUA8ulWIkMkRTfSO7Uj34fQNEiVJX3kKfS1HvReiuHCGVGsLrTkA2SzQS4aX1LewZ6CGd8VlSV05yJMFQOkVtzWK6hncxMDS+XPXxWsoWLYGRJP2H2xnKjrCgIhb87iTHls/JNtSOvSgSYUF/GquvpzebwOvqw19QhYvF8HoTZCvrqIg1UZaN0O/2ka0qg6xP9FAPTOjKqixbQFnVChK9W/AqK6hrWk5nXzvDjUuIDh8i2tUNyWDMnpmjsjbNwY4s9bU1WHSQTDKOeXEal66mv+sAqcF+Iljw96g6hZ8sw2WiVEcrGcyOkMgMki2rx/NHaLQ4qWyaRHbsQ0muOBVLJokf3D2pDrONC6itLsPr6qN/OK/7Nmwk8Gur8cvLsAzE61fQvG8v2epKBsp8Brt6cV4l3nDX6PXlsQhV8Sh9I2nSZXHKG5qp78vSM9RF0s8QifjEK5OkhjrwLUpmYQPRw/00UkVfJkHGz4zNgp/IjExzPV5VBH8AvK5usg1VWDKNXx7HMlm83gSZpjqIethICsywrE+kN0HjklUYEboObB99S39BFS4anfJn/FBnJ80LF2ILy4k0luG/0IHLjtV1hVdGTbSaw6kesmFXvqsqJ7ugGpwjeqgXjxSxBT41VZ142TgLExG6Bw+TdhO6ynONMhGPTHMdRAyvfwgbGKImWkXST5LyMxxJtrIRsj5esid43lALzuH1jE0oq41W4VmUnvTUv9Ojn0ttFX5lOdHO7kk/30cV88g01eH1D5JdUE30cD8kx3+/0YhHY6wOA/qzw/Q3VuAlBqhKGik/TcZg5OTTiXYfItp9cPR1zSvPZcKve9GoJe1FaG1tndNkLS+e6qQ0qV5Kk+ql9KhOStNc18tMW9LUVyMiIiJSghTSREREREqQQpqIiIhICVJIExERESlBCmkiIiIiJUghTURERKQEKaSJiIiIlCCFNBEREZESpJAmIiIiUoIU0kRERERK0AmxLZSZHQImbxg3+5qArgLcR2ZOdVKaVC+lSfVSelQnpWmu6+Uk51zz0S46IUJaoZjZEzPZa0sKR3VSmlQvpUn1UnpUJ6WpVOpF3Z0iIiIiJUghTURERKQEKaS9OFcVuwAyieqkNKleSpPqpfSoTkpTSdSLxqSJiIiIlCC1pImIiIiUIIW0GTCzt5rZ82a2zcw+W+zyzCdm9mMz6zSzzXnHGszsbjPbGn6tD4+bmX03rKdnzOy84pX8xGVmy83sfjNrM7Nnzewz4XHVSxGZWbmZrTezp8N6+XJ4fJWZrQvr5ddmFg+Pl4XPt4XnVxaz/CcyM/PM7Ckzuz18rjopMjPbZWabzGyjmT0RHiu5v2EKaUdhZh7wPeAi4AzgQ2Z2RnFLNa/8FHjrhGOfBe51zq0G7g2fQ1BHq8P/Lge+X6AyzjcZ4Ern3OnA+cAnw98J1UtxJYE3OufOAc4F3mpm5wNfB74V1ksP8NHw+o8CPc65lwDfCq+TufEZoC3vueqkNLzBOXdu3lIbJfc3TCHt6F4JbHPO7XDOpYBfAZcUuUzzhnPuQaB7wuFLgGvCx9cAl+Yd/5kLPAbUmdniwpR0/nDOtTvnNoSPEwT/81mK6qWows93IHwaC/9zwBuBG8LjE+slV183ABeamRWouPOGmS0D3g78KHxuqE5KVcn9DVNIO7qlwN685/vCY1I8Lc65dggCA7AwPK66KrCwO+blwDpUL0UXdqttBDqBu4HtQK9zLhNekv/Zj9ZLeL4PaCxsieeFbwN/B/jh80ZUJ6XAAXeZ2ZNmdnl4rOT+hkULcZPj3FT/itGU2NKkuiogM6sGfgtc4ZzrP8I/+FUvBeKcywLnmlkdcBNw+lSXhV9VL3PMzN4BdDrnnjSztbnDU1yqOim8C5xzB8xsIXC3mW05wrVFqxe1pB3dPmB53vNlwIEilUUCB3NNzeHXzvC46qpAzCxGENCuc87dGB5WvZQI51wv0EowZrDOzHL/IM//7EfrJTy/gMlDC+TYXABcbGa7CIbKvJGgZU11UmTOuQPh106Cf9C8khL8G6aQdnSPA6vD2Thx4IPArUUu03x3K3BZ+Pgy4Ja84/89nIlzPtCXa7qW2ROOkbkaaHPOfTPvlOqliMysOWxBw8wqgDcRjBe8H3hveNnEesnV13uB+5wWzpxVzrnPOeeWOedWEvy/4z7n3IdRnRSVmVWZWU3uMfAWYDMl+DdMi9nOgJm9jeBfPx7wY+fcV4pcpHnD/v/27h1kiisM4/j/SQQTIyRqtBESMTZJwGjSiYJga5FCiXghWNukEIIhIggp7RS0NPGCF/JZWEksPrQQ7xeQVBZiY2NQVJRgXos54gX5Ipi44+7/V+0czg5ndmB45pyzvMl+YCnwMXAT2AIcAQ4CnwDXgZVVdauFh+10/wa9D6yvqrODGPcwS7IYOAFc4ek+m5/o9qV5XwYkyXy6zc7v0r2AH6yqrUnm0s3iTAcuAGur6mGS94Df6PYU3gJWVdW1wYx++LXlzo1Vtdx7Mljt9x9rh5OAfVX1S5IZ9OwZZkiTJEnqIZc7JUmSesiQJkmS1EOGNEmSpB4ypEmSJPWQIU2SJKmHDGmS9BqSLE1ydNDjkDR8DGmSJEk9ZEiTNBKSrE1yOsnFJLtaMfK7SbYlOZ/keJKZre+CJKeSXE4ylmRaa5+X5I8kl9p3Pmunn5rkcJI/k+zNBIVMJelVGdIkDb0knwPf0RVVXgA8AtYAHwDnq+prYJyuogXAr8CPVTWfrrLCk/a9wI6q+gpYBDwpDbMQ+AH4AphLV7NRkl7LpH/vIklvvWXAN8CZNsn1Pl3x5H+AA63PHuD3JB8CH1XVeGvfDRxqtf5mV9UYQFU9AGjnO11VN9rxRWAOcPL/vyxJw8yQJmkUBNhdVZuea0w2v9Bvojp5Ey1hPnzm8yN8tkr6D7jcKWkUHAdWJJkFkGR6kk/pnoErWp/VwMmqug38lWRJa18HjFfVHeBGkm/bOSYnmfJGr0LSSPFtT9LQq6qrSX4GjiV5B/gb2ADcA75Mcg64TbdvDeB7YGcLYdeA9a19HbArydZ2jpVv8DIkjZhUTTS7L0nDK8ndqpo66HFI0su43ClJktRDzqRJkiT1kDNpkiRJPWRIkyRJ6iFDmiRJUhGn6vcAAAAaSURBVA8Z0iRJknrIkCZJktRDhjRJkqQeegwF6KYWfl4P/gAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmsAAAFNCAYAAABfUShSAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzs3XmcnXV9//3X55wzc2bNLFkmZE8ghCSQAIZdYEBEEARssWAVwVtN25/+tLX2V1x+olTu262irbSFVqq1KiJWjYoiFQa1iiyy70mA7Hsyk9nOnOVz/3Fd58w5Z2aSCZmzJHk/H4885lzbub7nfGHyzne7zN0RERERkeoUqXQBRERERGRsCmsiIiIiVUxhTURERKSKKayJiIiIVDGFNREREZEqprAmIiIiUsUU1kSk4szs62b2mXGe+4qZXXCQ9/uZmV07EeU5yHKcbWYvlPo+InJoi1W6ACIi5ebuF2dfm9l1wHvd/fUVKMevgUXlvq+IHFrUsiYiIiJSxRTWRGRcwu7HvzGzJ82sz8y+ZmYdYZfiXjP7bzNryzv/MjN7xsz2mFmXmS3OO3aSmf0hvO67QF3RvS41s8fDa39rZsvGUb754fmRcPvfzGxb3vH/NLO/DF93mdl7wzL9C3CGmfWa2Z68t2wzs5+GZfy9mR09xn3nmZmb2bVmts7MdpjZx/OOx83sy2a2KfzzZTOLh8c6zWxD3rl/a2Ybw3u+YGZvCPdHzOx6M1tjZjvN7E4zax+jPG1m9hMz225mu8PXs/KOt5vZv4dl2W1mP8w7dnn4vfeE97pof9+7iJSewpqIHIg/Bt4IHAu8BfgZ8DFgCsHvkw8CmNmxwHeAvwSmAncDPzazWjOrBX4IfBNoB74Xvi/htScDtwN/BkwGbgVWZQPOWNz9ZaAHOCncdTbQmxcSzwEeKLrmOeDPgd+5e5O7t+YdfjvwaaANWA3ctJ/v5vUEXZpvAD6Zd9+PA6cDJwLLgVOBTxRfbGaLgA8Ap7h7M/Am4JXw8AeBK4BzgRnAbuCWMcoRAf4dmAvMAQaAr+Yd/ybQACwFpgE3h/c/FfgP4G+AVoLv6xVEpOIU1kTkQPyju291943Ar4Hfu/tj7p4AfsBwULoK+Km73+vuSeCLQD1wJkFwqQG+7O5Jd78LeDjvHu8DbnX337t72t2/ASTC6/bnAeBcM5sebt8Vbs8HJgFPHMBn/S93f8jdU8C3CMLWvnza3Qfc/YnwPsvD/e8AbnT3be6+nSAAXjPK9WkgDiwxsxp3f8Xd14TH/gz4uLtvCL/rTwFXmtmIccfuvtPdv+/u/e6+lyBkngtgZkcBFwN/7u67w+8/G2DfA9we1lnG3Te6+/Pj+J5EpMQU1kTkQGzNez0wynZT+HoG8Gr2gLtngPXAzPDYRnf3vGtfzXs9F/jrsEtzT9g1OTu8bn8eADoJWoV+BXQRBJVzgV+H5RivLXmv+xn+bAd6fsF3Eb4e8VncfTVBS+SngG1mdoeZZc+bC/wg7/t4jiDcdRS/j5k1mNmtZvaqmfUQfA+tZhYl+B53ufvuUco/G1gzyn4RqTCFNREphU0EAQMAMzOCMLAR2AzMDPdlzcl7vR64yd1b8/40uPt3xnHfBwi6PzvD178BziIIaw+McY2PsX+iFHwXBJ9106gFcf92OCt1bliuz4WH1gMXF30ndWELZ7G/JuiOPc3dJxEEVwAL36fdzFpHuW49MOq4PBGpLIU1ESmFO4FLzOwNZlZDECASwG+B3wEp4INmFjOzPyIYx5X1r8Cfm9lpFmg0s0vMrHl/N3X3lwha+N4J/Mrdewha//6YscPaVmBWOJauFL4DfMLMpprZFOCTwH8Wn2Rmi8zs/HBs3iDB50iHh/8FuMnM5obnTjWzy8e4X3N47Z5wEsIN2QPuvplgnOE/hRMRaswsG+a+Brw7rLOImc00s+MO9sOLyMFTWBORCefuLxAEpn8EdhBMRniLuw+5+xDwR8B1BAPlrwL+K+/aRwjGrX01PL46PHe8HgB2uvu6vG0DHhvj/PuAZ4AtZrbjAO4zXp8BHgGeBJ4C/hDuKxYHPkvwfW0hGPz/sfDYV4BVwC/MbC/wIHDaGPf7MsH4wB3heT8vOn4NkASeB7YRdL3i7g8B7yaYcNBN8L3NRUQqzgqHjYiIiIhINVHLmoiIiEgVU1gTERERqWIKayIiIiJVTGFNREREpIoprImIiIhUsRGPKjlUTZkyxefNm1fy+/T19dHY2Fjy+8j4qU6qk+qlOqleqo/qpDqVul4effTRHe4+dTznHjZhbd68eTzyyCMlv09XVxednZ0lv4+Mn+qkOqleqpPqpfqoTqpTqevFzF7d/1kBdYOKiIiIVDGFNREREZEqprAmIiIiUsUU1kRERESqmMKaiIiISBVTWBMRERGpYgprIiIiIlVMYU1ERESkiimsiYiIiFQxc/dKl2FCrFixwkv9BINP//gZnv/DeqbFJv7xE/FEPx07NuS2++ub6W9qYsHW5+mJtOCkSdpOBuM1bG1rYc7W7bSku0nEanhx2rEs2JpiKLKL9R1TOHozeGYn09IptkVjxGghabtIRzKs75jC9J27iQ+lqfV2UtZLhElsnTqfZE0ts3Y8SM1gA2mroyWzG2MPNZkYO6NN1HuGGnpI2BAAu9vqiaYzTOpJAFCfiQOwpQ0avYHJu5PsidTRE01R40naMkliHqUvMjCu7yTmUeozcfZGBsDG/u80nc4QjUZIR43tUxuZtn2A1qF6eiMDZCw9fKIbzZl6BiND1GdqGYwkiWdqGIwMkbTUgVYZAN2tdTSn6mnsGQAcMAYigwDUeS3RvM87RJyk1dDovft936HaKLvb6mjfnmCARpozPRjD30E6GmHX1CYWbHH6vJfMPr4fN4P2maT6drK7JcqcXTHSQ737/MyN6XrSkQzRTIR0JM1gWOcHIlsv+Wo8RjxTS29kgMH6KHuba5myfYCheITu1jqmbuvHxvidlCFKb6SJ5sxejMzw/oixbVojHoEp2weoSaazH5wmb2BjRw3xnl7qBtM0ZepJ7KO+GzP1pCyd+298PIJ6jtAX1ntfYy2DdVEm79z3f+eJeIzu1njBZ07WRNk1uZ6p2/qIZErzu3m0epGxDVg9jtHg/SW7h+qk/OozcTAYsMSox2P1MY697MJSP8HgUXdfMa5zFdbG79M/eprWn28l5lbS+4iIiEjlxAc3c8x1M6smrB02zwYth/918lzu/Nk2zv3TRcw7YcqEvve6664j0t7G9I99jIEnnmDrjX+H1Ro19UlmfOU2vvD81xlct45rvr0F6uugxmlasYXe+9oYqIX6sCFgoBZqUvDJd0ZZkkrwVDxOXbSORGqQz36/HlIpGBjki1fWMn3+Up7a8STX/zTOjKOXsf30WTTdfBc/e3Mjf/H2m9n7rT/mvUdNow5jEKcuk2EwEuGao6/gpHvWkXz4SRhMEL/yzfx0UT/3b/4d5z+WovPxNOkotJw0l+uXrGPFjLOYteZXfLuxhrponBPaFvH/LPyTfX4fr/Zu5LNP/TN10Tgttc186sS/HPPcZ597jsULj2Xv/74BamIwmOD/fVcD3dEEnzvlo0wKW0I//ocv0p8aYDCdoC4aL/j5V0vfw7GT5h9QnQ3e9TMG7vsNpDM8sDzKL06KUBuJ8vppK5hc186dr/yUumic5W1LuK5uFjzyNYjWw9Rj4Yz3j/m+6Y1b6Pv0V6AujtkAzW8YhFgNXPwFMMMHE+z94Kdydf3pd9TwhTM/QW20dvTv8m8+Qd1Amvohcv+t3HJJhCvPex8LJ80bcf6/vXgHz+x5qeD7+ZN5l3LeUacf0Pfz7HPPsWTx4tz2U7te4J9e+CZ10Tin7G7j0u9vhLo4DIb/sq2LY5OaaP7MR0a+WWoAfvyh4PvLDMFb/hGiMdydvR/6NLXnnUFk1lEM/ut3aPrs3xJpb+WGx77MNd/ZRnO/Uz8E/3XldJ5o7WEwneBDS97NcS1HF9ziay/dydO7X2AwneBPF1zO2R2n7Pcz3rfpt3zv1bupi8Y5uX0pV22fy+C/3wV1cWKLj6HhL9456nWpVzfSf9NXg8/cOonmGz9MZuduej/6+eA7iUaY9KX/Czbx/zAsrhfZhxd+Cs+ugkgc5p0Jy68uyW1UJ+V11yt385utDzOUSXPJrE4umX3+iHNi8QWs7R691a0SFNYOwNaXewCYs6Sdprb4hL1vurcXe/EJJn/g/bQunEXztGb2fKIbhqDtqF5apkf5zUtPcPaZZ1F/53oy3d00Hz+NWTNSvFDTTbwXnpsXo2Vvmhk7ndXTYf3UGHvSGfZGE0APMxpn0L50Ln2/+jXRlha2Lq5lbfIJ9rbvZfW0GDMef5C9TdNpG+rm7gV9fGRygq2tu9nUXjeivKedfQVTBh9jyy//G4C5b34zy6bs4hsP/JQn5mR48++DLqo1xzezur2X6yanWfjyWr7YfhQAV77u3SxYct4+v5NZ6SG2b/4SyUwPly44jQWnj33+ukHj6Nd38vKCf2HwmWfY0VHHc1P7mNk0kxNff2nuvBmDP+EXr/5ixPURi3D2eVfRWHNg3ds9exJs/MmPAXhyToSN7UE3xp+ddh6zmmfx5Z7vAHD1KWexYMoKeOb/Cy48+RrYx+fxVIoXPvMFvKeb5nkpZtVug2MvgjOGf6Gsmf1V4mvWsGY6xBct5biz3jTm+z03r47pD28CID4EqQg8dkwNXznvKhpqGkacf2zTq/z40d8W7Dv19ZeyYOoJ4/hWhq0bNBac3pnbnjRwAp/YfgsA5530RuJ3PAdDTssf/RE9P/kJ3tPNpHNOZeZY383jN8COF2HGSXDWG3O7X138H2TWrCEORJpjHH3xFZgZM5P3sKbjJTqfcojFaDjvZNau/R4AZ3W+jZZ4S8HbL2pez6pHfhN+3rewYPL+//Ls2dbGV/beAcA7Tjmb+bHXsfbWr8EQTD3zHUwZ47NkTh7ixc98PvjMnacx8/TzcHdeuunvSe/aRuNZZzHnjJF/gUyE4nqRfZiSgZf/NXh9+sdh+b5/b71WqpPyOn7aAN/89c8BOPGMC1kw6+xRz1vb1VXGUu2bOskPwNa13UTj0Dx5ZIA5GINPPQXu1C8/EYBoSwu1c2YCUD85ycY9q9k1uItlHSdSt2xZsL/DYNIMGua0AvD8UWk2zAn+4n1pZvCv8b15YyCWTV1G/fLlANSduJyOxunsTe4F4OmOITLd3bT+bi3rp8KeOLy09TG2RqMjytpY08iClgW5shKNUrd0KcumBuVaPWO4JeBbza8G937hPhYOJamL1ObKsj+10VoWh39Zjud8IPf5BhbNDq6bUnjdWO9zTOsxBxzU8u8H8FLe5142dRmL2xdTE6kZvm/70VAX1BWz9t3qbbEY9ccfH9zjmJmjXpO990szbL/fT80JSwu2X5kG86YeO2pQy5U3T22kluPaj9vnPcZjSv0UZjYFn2fpnBXEFy4EoGHFCuqWLAEKv9MRZp1S+DNUv3w5g88/T99DD1G/fDkWtkYtm7IsVy91ixdz/MyTAZjfMn9EUIPhz10XrWNh28Jxfabj2o8jFonlrq+dP59Ic/N+P0uktnbEZzaz3Ot9fg9SPjNfN/x61v5bWuXQkP877oQpB/aP0EpRWDsAW17uoWEKub8MJkLvAw+w5TM3AVDf0gv/cQXc/X+on9cW7Js8xJM7ngYKA1d92wBMmknD8UGgeXGmsWtBO1AYHLKWtS+mvufe4NqFs+lomJY7lg13rbuS9HQErWL/96Xv8JOm4QBzysAgUeD4KccTjUSJH3M0kYYG6hYtIlJfz/TG6UxrmMakabOomTuHnuYoT8Y2M8mNuTtfoaamgSVTllITqWFx+/ia+7Nha9xh7cTgu2k88eRRr1s+NTh+xlFnAHDmjDMP6P2LxaZPJzZtGjvaojROm8G8SfNor2tnVtOsIGy2LyYejXNs27EQiYSByyAMDeP5LPUnheeOElAgqLv9lf+oU88FYPvxM8d1zeLJi4lZjJOmnUQ8Gmfx5MXURGv2W+bxyNXplLz/lk9cXvB6TNnAWvxdnLgcUilSmzcXhJzlU5fzYvjfdv3y5SyfsrygDMUWT15MLBJj6ZSluQC2P3WxOo5rO476WD3HtB6DRSLUL1sGkQh1YeAeS66Os//wgfF9D1I+9a0wZRHUt0H7gkqXRibIzKaZtNe1M2/SPFqz/4iucuoGHadMOsOkKfUMxcc3k3G8dv3nt0ht3UrbNdcQfeVuWHs/rL2f1vlnYJsHqGlKs6Ev6MKa3zIfLnsLqS1bqGv4Lkw6i0lvv4r7n3mIZ+bA3MZW4r3H0n7yZk7ueZI/1NVxfCLB7Kkn8MbGuTT0dTFpbgsti2rpiAStg9Mj9exs6+X545xMn9E0r5+37h3i50R4samRiEV415J3ceqWNbyuLs7iY98CgEWjTPlff0GsoyP3Wd53wvuIRqJMfi+8uv5BTurYzjmRSUTSL8GCTq5ZcAov7XlpzLFVxa445gr6kn0sals0rvObzjmHSW9+M0e99ToufrGfC+ZeUHB86eSlXHb0Zbxrybv49vPf5qpFVzGzaSZXHHPFuN6/mJkx+c9W8srOJ3jvCSuoidTQl+zLhflrllzD+r3rcy1snLoSpi+DupGtOsVaLr+c1PYd1F9yLfy2F2afVnC8+Q3ns/t3v6btvAjnzjp3n++16IyL+cWZ/8rCP/tLmn/8G2JL9/LWY9465vn1sXret+x9LGpfxJo9a5gzac5+yzteb1v0Ntrr2+lo7GDgyj+GSITaefNo+aO3kt67l7rj9tGCd9yl8PKv4ZjCem049TSazj2XTCJB84XD3aOL2hdx8hlXwPYeWt56BXXNs7hq0VVcsuCSUd8+Ho2zctlKFraOr1Ut611L38XG3o25gNf2zndQd8LxRJv23Vrb8ta3kt7bS91xw/99T7rkzSRWr6ZhxbjGHEs5nPUhGNxTkvGDUhlmxsplK6mP1Ve6KOOm2aAHqKura8Jmh7g7L55+BpMufCNH/d3fwbfeBi/ljamafy68/ABfWH4R3+t/mYfe8VCwPzkAN02H8/8vnPMRPnL/X3HPuv/musaF/PWV/wU//WtuemUVdzTVcdlgmpumng3Lr4Jvhn9Bv/7DfKM2zRdf/gEnWwPWv4tH64Pw9oMNmzkmmeTDc47m3miSaQ3T+OXbfjkhn7dUJrJOZOKoXqqT6qX6qE6qU6nr5UBmg6obtIKGXnmFTHf3cNdNzyY4+g2QbXmadzbUNLBnaG/hGJueoKWNSUG3VkfTjODn3u3B/g0P09EUDObvaDwKNjwM/bsKru8I13DqGEqwLBFMJW1yWGBBaFu2ZxsA0xumT+hnFhERkQOjsFZBA088AeQNJu7ZCG3zgq4yCMbo1LfRneqjNZ7Xr54La2FIawi6Ijt2r4PujbDlaTrajg32tR0Nu9bArrXBNe0LoGcj04eCKckdAz0sTwSvjx8cIBKOp8oGuGl5Y9tERESk/BTWKmjw6WeINDRQu2ABDPXDwO4ggM0+FSwSDESvb6c7PUBLKgVfOAaG+qB7ffAGYctadobdjME+uHkJeJqZRwWzmGZMDweorw67MqctgZ5NHDUQrKI/YyjBsjCsLR8cCgZvW5TFQ0PUEOGosIVOREREKkMTDCoos7eHaFsbFo3Cns3Bzkkz4XXXwaI3BwPRG9rYk9nAsclB6NsOu1+FTY9DTWPQCgecO/tc/uHcv2fJjo2QSkBtAycteztf6VjEmS0Lgb+FLU9CbXNwzepf0tG3k3/Zso2TBhM0uHPr0e/g+GgTLLsaZp9G/e5X+LdpC5gz/cQxSi8iIiLloLBWQZnEEBYPF9ft2Rj8nDQDGqfA/HCRvvo2evpeoTUVPstw76ZgDNrMkyEaVF8sEuO8eRfCvOH3NuD8OedDJg0WhWQ/tM4J3j81ANue5ayB4FmGRGs586y/HZ7tdOyFAOx/gQkREREpNXWDVpAnEnlhrXDSQO6cuja6zWlJhkuG7FwbtJLtZ2HVnEgUmsNJAvVtuXFu7Fk3fE59u6ali4iIVCmFtQryRIJIbTjzM9eyVjhGrLd+EmkzWgaCpw3w4s8gkzqw1bSzAa2+vTAMZlewr297DaUXERGRclBYq6DMUF7LWt8OqG2C2sKFNPfUBY+uaenbEex4+VfBz5kHsGhmLqy1BY89yi4EOPes4GdD+2spvoiIiJSBxqxVkCeGiLSGS3KkBiE28pmj3e1zYS20ptLBjkwKWuZAc8eIc8fUHIa1hnZonAx/sxrSQ7C2C1bfq5Y1ERGRKqawVkHBmLWwGzQ1BLH4iHO6m4J1zlqzEwU8Pf7xaln53aAA8abgZ7ZFTWFNRESkaqkbtIKCMWthQEsnhp9ckGdPuh+ASZkMdCwNdh7IeDUo7AbNl91WN6iIiEjVUstaBWWG8pbuSCUKWtYe3/Y4X338q2zt2wpAazoThLQDmQmalZ1UUBzKsi1t9QprIiIi1UphrYIKukHTQwUta7/a8Cse2vwQJ3eczLKmObS218Cp7wvGrM046cBudNRyOPEdwYPh802aASveA8dedJCfREREREqlpGHNzC4CvgJEgX9z988WHf9z4P1AGugFVrr7s+GxjwLvCY990N3vKWVZK8ETCSJjtKz1Jntprm3m6xd9vfCiy/7hwG9U2wBX/NPI/ZEoXPqlA38/ERERKZuSjVkzsyhwC3AxsAR4u5ktKTrt2+5+grufCHwe+FJ47RLgamApcBHwT+H7HVYyQ0NYbszaEESHw1pfso+mmqYKlUxERESqRSknGJwKrHb3te4+BNwBXJ5/grv35G02Ah6+vhy4w90T7v4ysDp8v8OGp9OQTBaNWRvuBu0d6qWxaM01EREROfKUsht0JrA+b3sDcFrxSWb2fuDDQC1wft61DxZdO5PDiA8NAeSNWUuoZU1ERERGKGVYG+1hkz5ih/stwC1m9qfAJ4Brx3utma0EVgJ0dHTQ1dV1MOUdl97e3gm5j/X1MQ1Ys249T3d1cUrPbvrSzTwbvvfmXZtpijSV5TMd6iaqTmRiqV6qk+ql+qhOqlM11Uspw9oGYHbe9ixg0z7OvwP45wO51t1vA24DWLFihXd2dh5Eccenq6uLibhPcus2VgPHLl1KW2cnPBGjcfospoXv/fc/+HvmtM+h89yDv9fhbqLqRCaW6qU6qV6qj+qkOlVTvZRyzNrDwEIzm29mtQQTBlbln2BmC/M2LwFeCl+vAq42s7iZzQcWAg+VsKxl50MJgLwxa0OFY9aSvTTWaMyaiIjIka5kLWvunjKzDwD3ECzdcbu7P2NmNwKPuPsq4ANmdgGQBHYTdIESnncn8CyQAt7v7ulSlbUSPBGEtUjBmLXhsKYxayIiIgIlXmfN3e8G7i7a98m81x/ax7U3ATeVrnSVlUmM0rIWTjBIZ9IMpAbUsiYiIiJ6NmileCKcDZr/bNCwG7Qv1QegsCYiIiIKa5UyPGatFtwLFsXtGwrCWlOtukFFRESOdAprFTI8Zi0eBDXItaz1JnsBtayJiIiIwlrFZHKL4saDpxfAcMtaMmxZ0wQDERGRI57CWoUUjFnLtawFYU0tayIiIpKlsFYhBUt35FrWCrtB1bImIiIiCmsVUrAobjoMazFNMBAREZFCCmsVUrDOWirsBo1qgoGIiIgUUlirkNyYtVFa1vqT/QA0xBoqUjYRERGpHgprFZIds2Y1NXkta0FYS2aSxCxGNBKtVPFERESkSiisVYgPJbB4HDPLa1kLukFTnlJQExEREUBhrWIyiaG854IWrrOWzqSJmsKaiIiIKKxVjCcSwaOmANLJ4GfYspb2tFrWREREBFBYqxgfGgrGq8FwN2jYspbKpIhZrEIlExERkWqisFYhnklj2dazVOETDNSyJiIiIlkKa5WScYiGX3+68AkGGrMmIiIiWQprlZLJYBZ+/anCddbSniYWUTeoiIiIKKxVjGcyEMm2rBU+wSCVSallTURERACFtcrJZLDo2C1rGrMmIiIioLBWMZ5JgxW3rGmdNRERESmksFYpGYdodjZoIghu0WCcWspTGrMmIiIigMJa5aTTwaOmIJgNGraqgVrWREREZJjCWoW4Z/Ja1oZyTy8AjVkTERGRYQprlZJxiIQta5kURGpyh9KZtJ5gICIiIoDCWuVk0sPrrGWSkDdGLeUptayJiIgIoLBWMZ4/wSCThmhhy5rGrImIiAgorFVO/gSDTAryWtI0Zk1ERESyFNYqpGCCQbqoGzST0pg1ERERAUoc1szsIjN7wcxWm9n1oxz/sJk9a2ZPmtkvzWxu3rG0mT0e/llVynJWRDoz9gQDVzeoiIiIBErWfGNmUeAW4I3ABuBhM1vl7s/mnfYYsMLd+83sL4DPA1eFxwbc/cRSla/iMhkskjdmLa9lLZ1RN6iIiIgEStmydiqw2t3XuvsQcAdwef4J7n6/u/eHmw8Cs0pYnqri7sMPcs8kR4xZUzeoiIiIQGnD2kxgfd72hnDfWN4D/Cxvu87MHjGzB83silIUsKLS6aJu0MIxa2pZExEREShhNyhgo+zzUU80eyewAjg3b/ccd99kZguA+8zsKXdfU3TdSmAlQEdHB11dXRNS8H3p7e2dkPu09/SQiUZY29XF8p07ME/xePi+fQN9bN+6vSyf53AwUXUiE0v1Up1UL9VHdVKdqqleShnWNgCz87ZnAZuKTzKzC4CPA+e6eyK73903hT/XmlkXcBJQENbc/TbgNoAVK1Z4Z2fnxH6CUXR1dTER91l785epmTqNEzs74eVmsEjufWvurGHWjFl0nnnw9zkSTFSdyMRSvVQn1Uv1UZ1Up2qql1J2gz4MLDSz+WZWC1wNFMzqNLOTgFuBy9x9W97+NjOLh6+nAGcB+RMTDn2ZDJYds5YuHLOW8pRmg4qIiAhQwpY1d0+Z2QeAe4AocLu7P2NmNwKPuPsq4AtAE/C9cIHYde5+GbAYuNXMMgSB8rNFs0gPeZ7J5E0wGGXpDo1ZExEREUrbDYq73w3cXbTvk3mvLxjjut8CJ5SybBWXKV5nrWjpDrWsiYiICHqCQeUUrLM28nFTsYiW7hARERGFtYoZ0Q2a9yCM2yhEAAAgAElEQVT3VEZj1kRERCSgsFYp+RMMirtBNWZNREREQgprFVLQspYeDmsZz5DxjJ5gICIiIoDCWuVkMhAd2bKW9jSAWtZEREQEUFirGM+kMRslrGXCsKYxayIiIoLCWuVkih/kXtiyptmgIiIiAgprlZPJYLlu0HQurKUyKUAtayIiIhJQWKsQz2Qgvxs0qjFrIiIiMpLCWqWMNcFAY9ZEREQkj8JapaTzJhikNWZNRERERqewViHuDtFo0MKG5x7krjFrIiIikk9hrVIyGSxiQRco5J4NqjFrIiIikk9hrUJyEwwyyWBH0Zg1PcFAREREQGGtcrITDLIta+GD3FMedoOqZU1ERERQWKucdDp4kHvYkqbZoCIiIjIahbUKcPfgRSQazATNvkazQUVERKSQwlolpLOtafkTDPQEAxERERlJYa0SMhkALBLNC2vBmDXNBhUREZF8CmsVMNwNGhnRsqYxayIiIpJPYa0Swm7Q0dZZy84G1Zg1ERERAYW1ivBM3gSDoqU71LImIiIi+RTWKiEz9gQDjVkTERGRfAprlZA/wSA9+pg1PcFAREREQGGtIjwMa6NNMMg9wUDdoCIiIoLCWmVkW9ai+5gNqm5QERERQWGtInIta6M9yN3VDSoiIiLDFNYqIdcNaiMf5J7Rg9xFRERkWEnDmpldZGYvmNlqM7t+lOMfNrNnzexJM/ulmc3NO3atmb0U/rm2lOUsu1w3aDRvZmjhs0E1Zk1ERESghGHNzKLALcDFwBLg7Wa2pOi0x4AV7r4MuAv4fHhtO3ADcBpwKnCDmbWVqqzlVtANmi7qBtWYNREREclTypa1U4HV7r7W3YeAO4DL809w9/vdvT/cfBCYFb5+E3Cvu+9y993AvcBFJSxree1jgkHuCQYasyYiIiKUNqzNBNbnbW8I943lPcDPXuO1hxRPZ7s+IyMf5K6WNREREclTyuYbG2Wfj3qi2TuBFcC5B3Ktma0EVgJ0dHTQ1dX1mgp6IHp7ew/6PtEtW5gCPPf887T09bIY+P3DjzLQsIkXu18E4Le/+S3xSPygy3skmIg6kYmneqlOqpfqozqpTtVUL6UMaxuA2Xnbs4BNxSeZ2QXAx4Fz3T2Rd21n0bVdxde6+23AbQArVqzwzs7O4lMmXFdXFwd7n8Tq1awFlhx/PJOm74Tn4bQzzoS2uax+ajX8Ac479zxqo7UTUubD3UTUiUw81Ut1Ur1UH9VJdaqmeillN+jDwEIzm29mtcDVwKr8E8zsJOBW4DJ335Z36B7gQjNrCycWXBjuOyx4On+dtTGW7tBsUBEREaGELWvunjKzDxCErChwu7s/Y2Y3Ao+4+yrgC0AT8D0zA1jn7pe5+y4z+zuCwAdwo7vvKlVZy87DsDbaEwzCpTsipiXwREREpLTdoLj73cDdRfs+mff6gn1ceztwe+lKVznZCQYWiYz6IPeoRQnDq4iIiBzh1HxTCZlwrsQoD3JPe1pdoCIiIpKjsFYJmbyWtaKwlvGMukBFREQkR6mgAnJPMIiM/iB3hTURERHJUiqoBM/vBs0ukBsLD7m6QUVERCRHYa0S0kXdoBYJghtBy5omF4iIiEiWwloFeG6CQTR4kHtkeFJuxjNqWRMREZEchbVKyE0wsKBlrSisacyaiIiIZCkVVEBugkE0GgS38CHuoLAmIiIihZQKKiEb1izbsjbc7anZoCIiIpJPqaASwrBm2aU78sKaxqyJiIhIPoW1ChheZ230blDNBhUREZEshbVKyIU1C8Pa8AQDPW5KRERE8imsVUC2Zc2i0XDM2nA1uLvGrImIiEiOUkElpLMTDCIjlu7QBAMRERHJp1RQCZ5tWYuAp7XOmoiIiIxpXKnAzN5qZi15261mdkXpinV4K3yQ+8iwpjFrIiIikjXeJpwb3L07u+Hue4AbSlOkI0DxOmt5LWlqWRMREZF8400Fo50XG2WfjEfBBIORs0EV1kRERCRrvKngETP7kpkdbWYLzOxm4NFSFuxw5un8btDCCQaaDSoiIiL5xpsK/jcwBHwXuBMYAN5fqkId9jz/CQZ63JSIiIiMbVxdme7eB1xf4rIcMTydDl5ogoGIiIjsx3hng95rZq15221mdk/pinWYy3jwM5JduqPw2aB63JSIiIhkjbe/bUo4AxQAd98NTCtNkY4AI7pB1bImIiIioxtvWMuY2ZzshpnNA7wUBToSjJhgYIUtaxqzJiIiIlnjXX7j48BvzOyBcPscYGVpinQEyOS3rGX0BAMREREZ03gnGPzczFYQBLTHgR8RzAiV18Az+RMMNBtURERExjausGZm7wU+BMwiCGunA78Dzi9d0Q5juQkG0RFhTWPWREREJN94m3A+BJwCvOru5wEnAdtLVqrDXdiyZhEbdYKBZoOKiIhI1njD2qC7DwKYWdzdnwcWla5YhzfPtqxFo+HSHYWPm1LLmoiIiGSNN6xtCNdZ+yFwr5n9CNi0v4vM7CIze8HMVpvZiEV1zewcM/uDmaXM7MqiY2kzezz8s2qc5Tw0ZCcYmIWL4g6HMz1uSkRERPKNd4LBW8OXnzKz+4EW4Of7usbMosAtwBuBDcDDZrbK3Z/NO20dcB3wkVHeYsDdTxxP+Q41IyYYmCYYiIiIyOjGu3RHjrs/sP+zADgVWO3uawHM7A7gciAX1tz9lfBY5kDLcUjL7wYd5XFTCmsiIiKSVcpUMBNYn7e9Idw3XnVm9oiZPWhmV0xs0SosO8HARp9goDFrIiIiknXALWsHYLQpjQfy1IM57r7JzBYA95nZU+6+puAGZisJF+ft6Oigq6vrNRd2vHp7ew/6Po0vv0xjJEJXVxevTybYvGkTa8L37B/oZ9vWbWX5LIeLiagTmXiql+qkeqk+qpPqVE31UsqwtgGYnbc9i3FMSshy903hz7Vm1kWwXMiaonNuA24DWLFihXd2dh5cicehq6uLg73Ptkf/wM5oNHif38DsOfOYHb5nzfdqmHHUDDrPOrh7HEkmok5k4qleqpPqpfqoTqpTNdVLKbtBHwYWmtl8M6sFrgbGNavTzNrMLB6+ngKcRd5Yt0Ne/lpqRUt3uLu6QUVERCSnZGHN3VPAB4B7gOeAO939GTO70cwuAzCzU8xsA/A24FYzeya8fDHwiJk9AdwPfLZoFukhzTMeTC6AEWPW0p7WorgiIiKSU8puUNz9buDuon2fzHv9MEH3aPF1vwVOKGXZKiodBjJ38EzB0h2aYCAiIiL5tEZEBbhnhpftgMLZoGjpDhERERmmVFAJ6czwgrhQ+CD3jMKaiIiIDFMqqATPYAVhrXDMmsKaiIiIZCkVVIDvo2XN0WxQERERGaawVgmZDEQsmFwAhS1rGc0GFRERkWEKaxXgnsEi0dHHrGk2qIiIiORRWKuE4m7Q/KU7NBtURERE8igVVEImO8GgcOkOdyfjCmsiIiIyTKmgAjxTPMEgCGuZcAybwpqIiIhkKRVUgKeSWE3NiJa1DGFYU7WIiIhISKmgAjyZxGKxvJa1oBqyLWvRiCYYiIiISEBhrRKSqaBlzQtb1tJhS5u6QUVERCRLqaACRrashRMM8GBT1SIiIiIhpYIK8FTYsla0dEfa1bImIiIihZQKKsCTSax25AQD96BlTWPWREREJEthrQI8mYRYLC+sFbasGXrclIiIiAQU1iog6AatHXOdNT1uSkRERLIU1ipg5ASDIJzlFsWNqFpEREQkoFRQAblFcYuW7siFNVWLiIiIhJQKKsCTxU8w0GxQERERGZ1SQSUkU4XdoFbYDarZoCIiIpKlsFYBwy1ro08w0GxQERERyVJYq4AgrMVGrLOW7QbVbFARERHJUlirgOEnGIy+KK5mg4qIiEiWUkEFeDIJBd2gQTXkJhioWkRERCSkVFBmnk5DJhNMMBhj6Q51g4qIiEiWwlqZeSpoTdvXEwy0dIeIiIhkKRWUmSeTAPtcukNhTURERLJKmgrM7CIze8HMVpvZ9aMcP8fM/mBmKTO7sujYtWb2Uvjn2lKWs5xyYW2UCQZaFFdERESKlSwVmFkUuAW4GFgCvN3MlhSdtg64Dvh20bXtwA3AacCpwA1m1laqspbTcFiLjXiCQXY2qMasiYiISFYpm3BOBVa7+1p3HwLuAC7PP8HdX3H3J4FM0bVvAu51913uvhu4F7iohGUtn9yYtZGL4mZb1sy0KK6IiIgEShnWZgLr87Y3hPtKfW1VK+wGzYa1osdNqWVNREREQrESvvdozUM+kdea2UpgJUBHRwddXV3jLtxr1dvbe1D3iW7ezBTg2RdfpGPqWhYAD/z6f/BIjBcGXgDgySeepP+F/gkp75HgYOtESkP1Up1UL9VHdVKdqqleShnWNgCz87ZnAZsO4NrOomu7ik9y99uA2wBWrFjhnZ2dxadMuK6uLg7mPoPPP8/LwNLly5lUk4KX4dzO8yESoXZTLdwLJ590Mid3nDxhZT7cHWydSGmoXqqT6qX6qE6qUzXVSym7QR8GFprZfDOrBa4GVo3z2nuAC82sLZxYcGG475A3cukOyz3BQEt3iIiISLGSpQJ3TwEfIAhZzwF3uvszZnajmV0GYGanmNkG4G3ArWb2THjtLuDvCALfw8CN4b5DnieLFsWNDDduasyaiIiIFCtlNyjufjdwd9G+T+a9fpigi3O0a28Hbi9l+SqhYIJBf3rUsKaWNREREclSKigzTxWts5YX1rQoroiIiBRTKiizEUt3RIarILsorsKaiIiIZCkVlFnBBANXy5qIiIjsm1JBuRU/wUATDERERGQfFNbKrKAbNJ2ESE3uWDas6XFTIiIikqWwVma5pTtiMUgOQE1d7pha1kRERKSYwlqZZVvWqKkJw1p97pjGrImIiEgxpYIy8/wxa6kBiA2HNc0GFRERkWJKBWVWMGYtOVjQDaqWNRERESmmVFBmhWGtv6BlTWPWREREpJjCWpnlnmAQi0FqsGDMmmaDioiISDGFtTLzZBIiESwaDbtBR04wUMuaiIiIZJX0Qe4yilQqaFWDcIJBHQOpAfqSfZpgICIiIiMoFZSZDyWD8WoQLt3RwLt//m7Ou/M8TTAQERGREZQKysxTqaKwVsczO58BNMFARERERlJYKzNPJqEmFjxqytOjzgZVy5qIiIhkKRWUmSeTw8t2QME6a8lMMFNUYU1ERESylArKzFMpLBYuiAskY7W5Y4l0AlBYExERkWFKBWWWGegnEo8HM0GBbZ7KHRtMBQFOY9ZEREQkS2GtzDJ7uom2tgaTC4BtPpQ7NpgOwpoWxRUREZEshbUyS3fvKQhrW8OABkHLmlrVREREJJ/CWpml9oRhLezy3Jrqzx1LpBMaryYiIiIFlAzKyN3DbtCW3GzQLane3PHB1KDCmoiIiBRQMigj7+/Hk8mwGzRoWduV6ssd7032UhupHetyEREROQIprJVRes8eAKItLblu0O68MWt7h/ZSF6sb9VoRERE5MimslVG6uxugYIJBd7KPeDQOQM9QT+61iIiICCislVVBy1ourPUypX4KoJY1ERERGUlhrYxyYa21Nbco7p6hvUxrmAbAQGpALWsiIiJSoKRhzcwuMrMXzGy1mV0/yvG4mX03PP57M5sX7p9nZgNm9nj4519KWc5yKewGHSQN7E3uzbWsAWpZExERkQKxUr2xmUWBW4A3AhuAh81slbs/m3fae4Dd7n6MmV0NfA64Kjy2xt1PLFX5KqGwG7SfnvC5oNmWNYC6qMKaiIiIDCtly9qpwGp3X+vuQ8AdwOVF51wOfCN8fRfwBjuMn7WU3rOHSEMDVlsLqUH21DYAFLSsqRtURERE8pUyrM0E1udtbwj3jXqOu6eAbmByeGy+mT1mZg+Y2dklLGfZpPd0E2ltAeDpga3c0RyEtYKWNXWDioiISJ6SdYMCo7WQ+TjP2QzMcfedZvY64IdmttTdewouNlsJrATo6Oigq6vr4Eu9H729va/5Pi2vvELUInR1dfG93c/yq4bg69/04qbcObu37y7L5zicHEydSOmoXqqT6qX6qE6qUzXVSynD2gZgdt72LGDTGOdsMLMY0ALscncHEgDu/qiZrQGOBR7Jv9jdbwNuA1ixYoV3dnaW4GMU6urq4rXeZ91/fou0O8s6O/nXl1O5/RecfgG3rLoFgHmz5tF52mt7/yPVwdSJlI7qpTqpXqqP6qQ6VVO9lLIb9GFgoZnNN7Na4GpgVdE5q4Brw9dXAve5u5vZ1HCCAma2AFgIrC1hWcvCEwkitcGkgq0+lNs/tWFq7rUmGIiIiEi+krWsuXvKzD4A3ANEgdvd/RkzuxF4xN1XAV8Dvmlmq4FdBIEO4BzgRjNLAWngz919V6nKWi4+NESkqYl0aogdeTG5ubaZWCRGKpPSmDUREREpUMpuUNz9buDuon2fzHs9CLxtlOu+D3y/lGWrhMzQENF4nJ07XySdN+k1YhFqI7WkMinNBhUREZECJQ1rUsgTCSxey9YdzwHwwdkXseDoiwCojdbSn+pXy5qIiIgUUFgro2DMWpytu1cDcPbMszlu7hsAqI0EY9k0Zk1ERETy6dmgZZQZSmDxOFv3rgOgY+rS3LGaaA0A8Zi6QUVERGSYwloZeWIoCGt9W6h1p7V1fu5YbVQtayIiIjKSwloZeSJBJF7LtsQepmYMiwx//bluUI1ZExERkTwKa2Xi7sEEg9o4vekEk6zwq8+2rGk2qIiIiORTWCsTTyYBsHicXk/SSLTgeE0kGLOmblARERHJp7BWJp5IAGDxWvo8RVMYzrJyLWuaYCAiIiJ5FNbKJBvWIvE4fZ6mMRyjlpUNa/XR+rKXTURERKqXwlqZ5FrWauP04TRFClvQshMM1LImIiIi+RTWyiSTCB7cbvE4veY0FoWy3DprmmAgIiIieRTWysSHgpa1dE2EITOaYg0Fx7Mta/UxdYOKiIjIMIW1Msl2gw5ZGoDG4rAWrcWw3KxQEREREVBYK5tMGNYGLPjZVNNYcLwuVkd9rB4zK3vZREREpHrpQe5l4uGYtUHvB6Cxtrng+NWLruZ1015X9nKJiIhIdVNYK5PsmLWBzAAATfFJBcfnTJrDnElzyl4uERERqW7qBi2T7Ji1fu8DoKl20r5OFxEREQHUslZSyW3bSDz3HPFFi3JLd/QNbAGgoa61kkUTERGRQ4TCWglt/ujH6Puf/6FhxQomveUtAPQ9/S2Y20ZTvK3CpRMREZFDgbpBS2ho/XogaGHLdoP2hitzNDa0V6pYIiIicghRWCsRdye1JejyTHd35yYY9NREiLhTr5Y1ERERGQeFtRJJ79mDDw0RaW4m09NDpj+YBbo3FqEx41i8qcIlFBERkUOBwtpB6P7Rj+h/9NHcds89v6D3N/8DkGtViy86FtxJ7diOR5xvtzXT4BmobRz1PUVERETyKawdhK2f/Rw7brklt739S19i+803A5DcuhWAukXHAZDaspmhGNS4c17/ANQ0jHxDERERkSKaDfoaZRIJ0rt3M/DkU3gmA2Ykt27FN24kMzhIaksQ1uLHLQIguWULySic0z/Ax3fuhli8ksUXERGRQ4Ra1l6jVNhyluntZWjtWjLd3fjgIKRSDD77LMmtWyAaJX70McH527eTiBkdqeBB7ugZoCIiIjIOCmuvUTIckwYw8MQTuW5PgIHHnyC1ZSuxKVOITQ6W6Ejv7iYRg450quxlFRERkUOXukFfo2zLGmZs/uQN1B2/NDziDNzxGdLxGcSmz+DWrvdzQXgkGYNp2ZY1ERERkXFQy9prlG1Zm/G5z1I7bx6DTzwJQP3MOAO76kis20584UJ+mHqZTHjN5naj4/wb4JofVqjUIiIicqgpaVgzs4vM7AUzW21m149yPG5m3w2P/97M5uUd+2i4/wUze1Mpy/lapLZuI9LcTMtll9Fy6SXBTnOa33A+qd4M6YSRnB5hWyyS+5JfmmlMX/BGOPq8ipVbREREDi0lC2tmFgVuAS4GlgBvN7MlRae9B9jt7scANwOfC69dAlwNLAUuAv4pfL+qkdq6hZrpHQDUL18OQKwuQ8M5F+fOWZN6sOCaF2cY0ybNKl8hRURE5JBXyjFrpwKr3X0tgJndAVwOPJt3zuXAp8LXdwFfNTML99/h7gngZTNbHb7f70pY3v168LYPs2fTRn777Pdpfuph0lOauP93X8QGhpgG9DdHeHC601ETw0ny09hq4pnh9dR2T00Tj2rJDhERERm/Uoa1mcD6vO0NwGljnePuKTPrBiaH+x8sunZm8Q3MbCWwEqCjo4Ourq6JKvuomm/5GYsSw9u/mNXD11/8BgA3HQUbpkb45//5CJ+YmSZRE+EXzY2cnIyxY0qMKTsSzCRd8jIeiXp7e/W9ViHVS3VSvVQf1Ul1qqZ6KWVYG20hMR/nOeO5Fne/DbgNYMWKFd7Z2XmARTwwz3/+r1j90ossmD8fgIvndHBxTfgVnjDIMZPn0tnUAucNQLKf7yZ3MnvGKdif1LFux/N8dep8JjXPKGkZj0RdXV2Uuu7lwKleqpPqpfqoTqpTNdVLKcPaBmB23vYsYNMY52wwsxjQAuwa57Vld9ybVrIl3sWS/VXe5JG7lkw6qyRlEhERkcNbKWeDPgwsNLP5ZlZLMGFgVdE5q4Brw9dXAve5u4f7rw5ni84HFgIPlbCsIiIiIlWpZC1r4Ri0DwD3AFHgdnd/xsxuBB5x91XA14BvhhMIdhEEOsLz7iSYjJAC3u/uWk1WREREjjglfYKBu98N3F2075N5rweBt41x7U3ATaUsn4iIiEi10xMMRERERKqYwpqIiIhIFVNYExEREaliCmsiIiIiVUxhTURERKSKKayJiIiIVDGFNREREZEqZsEDAw59ZrYdeLUMt5oC7CjDfWT8VCfVSfVSnVQv1Ud1Up1KXS9z3X3qeE48bMJauZjZI+6+otLlkGGqk+qkeqlOqpfqozqpTtVUL+oGFREREaliCmsiIiIiVUxh7cDdVukCyAiqk+qkeqlOqpfqozqpTlVTLxqzJiIiIlLF1LImIiIiUsUU1sbJzC4ysxfMbLWZXV/p8hxJzOx2M9tmZk/n7Ws3s3vN7KXwZ1u438zsH8J6etLMTq5cyQ9fZjbbzO43s+fM7Bkz+1C4X/VSQWZWZ2YPmdkTYb18Otw/38x+H9bLd82sNtwfD7dXh8fnVbL8hzszi5rZY2b2k3Bb9VJBZvaKmT1lZo+b2SPhvqr8HaawNg5mFgVuAS4GlgBvN7MllS3VEeXrwEVF+64HfunuC4FfhtsQ1NHC8M9K4J/LVMYjTQr4a3dfDJwOvD/8f0L1UlkJ4Hx3Xw6cCFxkZqcDnwNuDutlN/Ce8Pz3ALvd/Rjg5vA8KZ0PAc/lbateKu88dz8xb4mOqvwdprA2PqcCq919rbsPAXcAl1e4TEcMd/8VsKto9+XAN8LX3wCuyNv/Hx54EGg1s6PKU9Ijh7tvdvc/hK/3EvwFNBPVS0WF329vuFkT/nHgfOCucH9xvWTr6y7gDWZmZSruEcXMZgGXAP8Wbhuql2pUlb/DFNbGZyawPm97Q7hPKqfD3TdDEByAaeF+1VWZhV00JwG/R/VScWFX2+PANuBeYA2wx91T4Sn5332uXsLj3cDk8pb4iPFl4P8AmXB7MqqXSnPgF2b2qJmtDPdV5e+wWLludIgb7V80mkZbnVRXZWRmTcD3gb909559/ONf9VIm7p4GTjSzVuAHwOLRTgt/ql7KwMwuBba5+6Nm1pndPcqpqpfyOsvdN5nZNOBeM3t+H+dWtE7UsjY+G4DZeduzgE0VKosEtmaboMOf28L9qqsyMbMagqD2LXf/r3C36qVKuPseoItgTGGrmWX/cZ7/3efqJTzewsghB3LwzgIuM7NXCIbRnE/Q0qZ6qSB33xT+3EbwD5tTqdLfYQpr4/MwsDCcuVMLXA2sqnCZjnSrgGvD19cCP8rb/65w5s7pQHe2SVsmTjh+5mvAc+7+pbxDqpcKMrOpYYsaZlYPXEAwnvB+4MrwtOJ6ydbXlcB9rsU3J5y7f9TdZ7n7PIK/P+5z93egeqkYM2s0s+bsa+BC4Gmq9HeYFsUdJzN7M8G/hKLA7e5+U4WLdMQws+8AncAUYCtwA/BD4E5gDrAOeJu77wpDxFcJZo/2A+9290cqUe7DmZm9Hvg18BTDY3A+RjBuTfVSIWa2jGBQdJTgH+N3uvuNZraAoEWnHXgMeKe7J8ysDvgmwZjDXcDV7r62MqU/MoTdoB9x90v///bu58XGKI7j+PujKflRfsXGgrBB+blTSvkHLEYKk6xt7KTYKEtLxZJQIrOxkllMzUIjwkJWVvYajSKNr8U9UyhXGszTve/X6j7fzj095956+txzbn39XhZP++zH2+UIcKeqLidZRwefYYY1SZKkDvMYVJIkqcMMa5IkSR1mWJMkSeoww5okSVKHGdYkSZI6zLAmSQuU5FCSh4t9H5IGk2FNkiSpwwxrkoZGkpNJppO8SHK9NT2fTXIlyfMkE0nWt7F7kjxJ8irJeJI1rb4tyeMkL9t7trbpVya5n+RNktvp0yhVkv6EYU3SUEiyHThGr3nzHmAOOAGsAJ5X1T5gkl6HDICbwLmq2kWvU8N8/TZwtap2AweA+ZYze4GzwA5gC71+kJK0YCO/HyJJA+EwsB942ja9ltFr0vwVuNvG3AIeJFkFrK6qyVa/AdxrvQQ3VtU4QFV9AmjzTVfVu3b9AtgMTP37ZUkadIY1ScMiwI2qOv9DMbn407h+Pfj6HW1+/u71HD5fJf0lHoNKGhYTwGiSDQBJ1ibZRO85ONrGHAemqmoGeJ/kYKuPAZNV9QF4l+RIm2NpkuX/dRWSho6//CQNhap6neQC8CjJEuALcAb4COxM8gyYofe/NoBTwLUWxt4Cp1t9DLie5FKb4+h/XIakIZSqfjv+kjTYksxW1crFvg9J+hWPQSVJkjrMnTVJkm0wbjIAAAAvSURBVKQOc2dNkiSpwwxrkiRJHWZYkyRJ6jDDmiRJUocZ1iRJkjrMsCZJktRh3wBbFA6hyVrsfQAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "for key in hist_list[0].keys() :\n", " plt.figure(figsize=(10,5))\n", " plt.grid(True) \n", " plt.title('model with noise ' + key)\n", " plt.ylabel(key)\n", " plt.xlabel('epoch')\n", " for hist in hist_list :\n", " plt.plot(hist[key])\n", " plt.show()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.5" } }, "nbformat": 4, "nbformat_minor": 2 }