{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Equity premium prediction with python and Keras\n", "\n", "* An exploratory data analysis and a demo of classification using a feedforward neural network written in python using the [Keras](https://keras.io/) machine learning framework\n", "* Using [dataset](http://www.hec.unil.ch/agoyal/docs/PredictorData2016.xlsx) from [Prof. Amit Goyal](http://www.hec.unil.ch/agoyal/), we attempt to predict quarterly equity outperformance based on fundamental data like interest rates, valuation.\n", "* Train binary classifier to predict whether next quarter's equity premium will be above or below long term average\n" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Using TensorFlow backend.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "6882\n" ] } ], "source": [ "import numpy as np\n", "import pandas as pd\n", "\n", "import sklearn\n", "from sklearn import preprocessing\n", "from sklearn.decomposition import PCA\n", "from sklearn.model_selection import train_test_split\n", "from sklearn.model_selection import StratifiedKFold\n", "from sklearn.model_selection import cross_val_score\n", "from sklearn.model_selection import GridSearchCV\n", "from sklearn.utils import class_weight\n", "\n", "from pprint import pprint\n", "import time\n", "import copy\n", "\n", "\n", "from keras.models import Sequential\n", "from keras.layers import Dense, Dropout\n", "from keras import regularizers\n", "from keras import optimizers\n", "from keras.optimizers import Adam\n", "from keras.wrappers.scikit_learn import KerasClassifier\n", "from keras.models import model_from_json\n", "\n", "import matplotlib.pyplot as plt\n", "\n", "#use a fixed seed for reproducibility\n", "#seed = np.random.randint(10000, size=1)[0]\n", "seed = 6882\n", "print(seed)\n", "np.random.seed(seed)\n" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Loading data...\n" ] }, { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
Unnamed: 0D12E12b.mtblAAABAAltyntisinfl...ltr.diffcorpr.diffsvar.difftbl.lagdiffAAA.lagdiffBAA.lagdifflty.lagdiffltr.lagdiffcorpr.lagdiffEqPremResponse
030.7701.1100.3746890.03170.0503130.0589130.0373130.076475-0.005713...0.012577-0.0044090.000268-0.0035-0.011438-0.012138-0.0128380.0301670.0052591
140.7901.1770.3632550.03270.0444300.0530300.0315300.0543640.000170...-0.020049-0.005564-0.0001610.00130.0167410.0159410.0150410.007711-0.0001990
250.8101.2450.3703000.03920.0455300.0553300.0324300.0502990.000170...-0.011019-0.0224530.004100-0.0013-0.029141-0.028641-0.028441-0.038657-0.0122301
360.8301.3120.3281660.04570.0401500.0499500.0286500.0279800.005950...-0.0142760.019109-0.003478-0.00390.0111610.0102610.0111610.0110380.0149181
470.8501.3800.2596670.04260.0517440.0616440.0396440.063069-0.005644...0.0348180.0027960.0027250.0049-0.006513-0.006713-0.0071130.012577-0.0044091
580.8801.4380.2723000.04600.0644430.0753430.0551430.079805-0.017443...-0.055116-0.0145600.0017030.0010-0.005882-0.005882-0.005782-0.020049-0.0055641
690.9101.4950.2535810.04800.0887440.1004440.0777440.116197-0.041044...0.0605790.0032300.0004220.00650.0011000.0023000.000900-0.011019-0.0224531
7100.9401.5520.2448680.04580.1133840.1265840.1028840.121390-0.065384...-0.0226830.005639-0.0003480.0065-0.005380-0.005380-0.003780-0.0142760.0191090
8110.9701.6100.3384580.03030.1256980.1384980.1129980.163522-0.078998...0.0539530.0173750.092115-0.00310.0115940.0116940.0109940.0348180.0027961
9120.9731.4500.3191190.02950.1513320.1624320.1386320.145496-0.105132...-0.0377580.002332-0.0955090.00340.0127000.0137000.015500-0.055116-0.0145600
10130.9751.2900.4033750.01890.1455500.1576500.1326500.131001-0.099850...0.001955-0.0018200.0164130.00200.0243010.0251010.0226010.0605790.0032300
11140.9781.1300.4455830.01770.1405960.1528960.1287960.127200-0.096396...-0.0053010.004981-0.008366-0.00220.0246400.0261400.025140-0.0226830.0056390
12150.9800.9700.5547450.01480.1516360.1735360.1394360.113886-0.106436...-0.011479-0.0351290.015494-0.01550.0123140.0119140.0101140.0539530.0173751
13160.9400.8800.5291250.01380.1468570.1673570.1361570.073969-0.102957...0.0059980.041738-0.015925-0.00080.0256340.0239340.025634-0.0377580.0023320
14170.9000.7900.6072710.00550.1546400.1846400.1429400.063746-0.111040...0.016975-0.0113990.024675-0.0106-0.005782-0.004782-0.0059820.001955-0.0018200
15180.8600.7000.9440020.00450.1522610.1874610.1420610.030352-0.106761...-0.054655-0.020497-0.001913-0.0012-0.004954-0.004754-0.003854-0.0053010.0049810
16190.8200.6101.1707320.02410.1570330.2080330.144533-0.012944-0.103833...-0.020700-0.0865520.040092-0.00290.0110400.0206400.010640-0.011479-0.0351290
17200.7400.5601.1858620.02250.1171250.1556250.104325-0.008416-0.067325...0.0946800.087251-0.017631-0.0010-0.004780-0.006180-0.0032800.0059980.0417380
18210.6600.5102.0284780.00340.0673490.1232490.047949-0.040689-0.013249...0.004268-0.0136780.019837-0.00830.0077830.0172830.0067830.016975-0.0113991
19220.5800.4601.2143660.00030.0378610.0669610.0227610.0007050.009139...0.0071630.0876170.026091-0.0010-0.0023790.002821-0.000879-0.054655-0.0204970
20230.5000.4101.4420840.0004-0.0089820.029318-0.023382-0.0050320.054882...-0.039776-0.050776-0.0342440.01960.0047720.0205720.002472-0.020700-0.0865520
21240.4850.4181.4765340.0134-0.0076650.034635-0.022365-0.0218830.054465...-0.016406-0.024622-0.010147-0.0016-0.039908-0.052408-0.0402080.0946800.0872511
22250.4700.4250.8335030.00070.0145800.0406800.0005800.0007620.030020...0.0339300.0644300.001897-0.0191-0.049776-0.032376-0.0563760.004268-0.0136780
23260.4550.4330.8679970.00040.0282860.0573860.015486-0.0004580.015314...-0.027139-0.044554-0.001937-0.0031-0.029488-0.056288-0.0251880.0071630.0876171
24270.4400.4400.8290260.00290.0148740.0473740.0034740.0061960.030126...-0.039893-0.019850-0.0161490.0001-0.046844-0.037644-0.046144-0.039776-0.0507761
25280.4430.4530.8025120.00240.0186920.0399920.0080920.0096060.022608...0.0892730.055872-0.0242690.01300.0013180.0053180.001018-0.016406-0.0246220
26290.4450.4650.8407310.00150.0316180.0529180.0212180.0059830.007682...-0.021640-0.024534-0.002422-0.01270.0222450.0060450.0229450.0339300.0644300
27300.4480.4780.8703640.00210.0099130.0360130.0013130.0194410.029687...-0.055071-0.0323370.006707-0.00030.0137060.0167060.014906-0.027139-0.0445541
28310.4500.4900.7737410.00230.0380470.0622470.0292470.0208150.000053...0.0557460.030302-0.0138710.0025-0.013412-0.010012-0.012012-0.039893-0.0198500
29320.4500.7300.8007540.00150.0293480.0546480.0200480.0120480.007352...-0.0016330.0063840.000474-0.00050.003819-0.0073810.0046190.0892730.0558721
..................................................................
32232528.71251.3700.2744910.01860.0687930.0826930.057993-0.048432-0.011993...-0.057695-0.013557-0.006426-0.00330.0057090.0051090.003609-0.028789-0.0186010
32332628.85445.9500.2871240.01130.0294830.0460830.017283-0.0503480.027017...0.056794-0.0604870.023719-0.00720.0460180.0454180.0433180.0707880.0444020
32432728.38714.8800.3549840.00030.0274960.0612960.007296-0.0247930.023004...0.1738070.3189110.082747-0.00890.0023240.0054240.0009240.009819-0.0059960
32532827.2556.8600.4461410.00210.0444890.0736890.024989-0.0377900.010511...-0.267683-0.357943-0.072139-0.01740.0109320.0131320.008932-0.025959-0.0308971
32632925.5947.5100.4018760.00180.0446940.0635940.031494-0.0225460.011406...-0.0197280.206711-0.0248920.00600.0002740.0003740.001374-0.057695-0.0135571
32733023.90112.5400.3495210.00120.0364140.0482140.025414-0.0000700.014886...0.1236060.028493-0.009543-0.0073-0.039310-0.036610-0.0407100.056794-0.0604871
32833122.40550.9700.3255310.00050.0259390.0370390.0191390.0105410.026661...-0.098336-0.132506-0.001029-0.0110-0.0019870.015213-0.0099870.1738070.3189111
32933221.90460.9300.3818780.00150.0174830.0274830.0105830.0138610.035217...0.0664470.039697-0.0018340.00180.0169940.0123940.017694-0.267683-0.3579430
33033322.03767.1000.4241770.00120.0105870.024087-0.0006130.0187130.038213...0.1121360.0659430.010495-0.00030.000205-0.0100950.006505-0.0197280.2067111
33133422.35371.8600.3843060.00150.0158390.0271390.0046390.0032270.029461...-0.066957-0.034212-0.007103-0.0006-0.008280-0.015380-0.0060800.1236060.0284931
33233522.72977.3500.3581000.00140.0238190.0346190.0150190.0125550.026381...-0.138471-0.079181-0.004741-0.0007-0.010475-0.011175-0.006275-0.098336-0.1325061
33333623.43181.3100.3542700.00100.0346350.0436350.0262350.0124450.016665...0.0729800.0178320.0006260.0010-0.008456-0.009556-0.0085560.0664470.0396970
33433724.34183.8700.3515700.00040.0299990.0375990.0204990.0117180.019901...0.0463190.039739-0.000411-0.0003-0.006895-0.003395-0.0111950.1121360.0659430
33533825.18186.9800.3999230.00010.0234390.0352390.0090390.0132010.017461...0.1745310.1059400.0241030.00030.0052520.0030520.005252-0.066957-0.0342121
33633926.42586.9500.3572330.00010.0245080.0377080.010008-0.0065050.014792...-0.191170-0.110793-0.010124-0.00010.0079800.0074800.010380-0.138471-0.0791811
33734027.35388.5400.3461820.00080.0223430.0347430.011443-0.0137920.017557...-0.069581-0.029160-0.015417-0.00040.0108160.0090160.0112160.0729800.0178320
33834128.32087.9200.3551040.00090.0244860.0382860.010586-0.0198320.011914...0.1417810.0730030.004134-0.0006-0.004636-0.006036-0.0057360.0463190.0397391
33934229.59086.5000.3403830.00110.0198800.0333800.007580-0.0182560.015020...-0.089890-0.029066-0.003236-0.0003-0.006559-0.002359-0.0114590.1745310.1059400
34034331.24786.5100.3490320.00070.0213750.0311750.008975-0.0122660.015125...-0.010356-0.0331430.0005390.00000.0010690.0024690.000969-0.191170-0.1107931
34134432.11287.7000.3461300.00090.0186400.0278400.007740-0.0095480.020660...-0.020759-0.028992-0.0012300.0007-0.002165-0.0029650.001435-0.069581-0.0291601
34234533.26690.9500.3384440.00050.0261110.0353110.0163110.0001410.016589...-0.026954-0.0328570.0023120.00010.0021440.003544-0.0008560.1417810.0730031
34334634.40494.3700.3335210.00020.0386370.0469370.0264370.0101420.007763...0.0360780.053978-0.0028710.0002-0.004607-0.004907-0.003007-0.089890-0.0290661
34434734.992100.2000.3044080.00070.0467910.0543910.0372910.012184-0.000591...-0.0124560.0154500.000754-0.00040.001495-0.0022050.001395-0.010356-0.0331430
34534836.228100.8500.3506160.00050.0423980.0491980.0316980.0164640.001402...0.1008720.0444480.0005980.0002-0.002735-0.003335-0.001235-0.020759-0.0289921
34634937.381103.1200.3429280.00040.0426930.0481930.0308930.013931-0.000193...-0.025411-0.019798-0.001226-0.00040.0074710.0074710.008571-0.026954-0.0328570
34735038.495105.9600.3385760.00020.0337370.0406370.0221370.0080250.007363...-0.018806-0.0272180.000016-0.00030.0125260.0116260.0101260.0360780.0539781
34835139.443102.3100.3237560.00030.0293090.0388090.0154090.0056420.008591...0.0651000.0492710.0028710.00050.0081540.0074540.010854-0.0124560.0154500
34935240.80799.2500.3074540.00030.0263580.0353580.012258-0.0069000.010042...-0.061386-0.027291-0.000327-0.0002-0.004393-0.005193-0.0055930.1008720.0444480
35035341.74294.9100.3101870.00020.0272530.0366530.011453-0.0081020.014647...-0.097891-0.104827-0.002247-0.00010.000295-0.001005-0.000805-0.025411-0.0197980
35135442.51090.6600.3356120.00020.0200690.0327690.003369-0.0129100.020631...0.1212290.1034600.008547-0.0002-0.008956-0.007556-0.008756-0.018806-0.0272181
\n", "

352 rows × 32 columns

\n", "
" ], "text/plain": [ " Unnamed: 0 D12 E12 b.m tbl AAA BAA \\\n", "0 3 0.770 1.110 0.374689 0.0317 0.050313 0.058913 \n", "1 4 0.790 1.177 0.363255 0.0327 0.044430 0.053030 \n", "2 5 0.810 1.245 0.370300 0.0392 0.045530 0.055330 \n", "3 6 0.830 1.312 0.328166 0.0457 0.040150 0.049950 \n", "4 7 0.850 1.380 0.259667 0.0426 0.051744 0.061644 \n", "5 8 0.880 1.438 0.272300 0.0460 0.064443 0.075343 \n", "6 9 0.910 1.495 0.253581 0.0480 0.088744 0.100444 \n", "7 10 0.940 1.552 0.244868 0.0458 0.113384 0.126584 \n", "8 11 0.970 1.610 0.338458 0.0303 0.125698 0.138498 \n", "9 12 0.973 1.450 0.319119 0.0295 0.151332 0.162432 \n", "10 13 0.975 1.290 0.403375 0.0189 0.145550 0.157650 \n", "11 14 0.978 1.130 0.445583 0.0177 0.140596 0.152896 \n", "12 15 0.980 0.970 0.554745 0.0148 0.151636 0.173536 \n", "13 16 0.940 0.880 0.529125 0.0138 0.146857 0.167357 \n", "14 17 0.900 0.790 0.607271 0.0055 0.154640 0.184640 \n", "15 18 0.860 0.700 0.944002 0.0045 0.152261 0.187461 \n", "16 19 0.820 0.610 1.170732 0.0241 0.157033 0.208033 \n", "17 20 0.740 0.560 1.185862 0.0225 0.117125 0.155625 \n", "18 21 0.660 0.510 2.028478 0.0034 0.067349 0.123249 \n", "19 22 0.580 0.460 1.214366 0.0003 0.037861 0.066961 \n", "20 23 0.500 0.410 1.442084 0.0004 -0.008982 0.029318 \n", "21 24 0.485 0.418 1.476534 0.0134 -0.007665 0.034635 \n", "22 25 0.470 0.425 0.833503 0.0007 0.014580 0.040680 \n", "23 26 0.455 0.433 0.867997 0.0004 0.028286 0.057386 \n", "24 27 0.440 0.440 0.829026 0.0029 0.014874 0.047374 \n", "25 28 0.443 0.453 0.802512 0.0024 0.018692 0.039992 \n", "26 29 0.445 0.465 0.840731 0.0015 0.031618 0.052918 \n", "27 30 0.448 0.478 0.870364 0.0021 0.009913 0.036013 \n", "28 31 0.450 0.490 0.773741 0.0023 0.038047 0.062247 \n", "29 32 0.450 0.730 0.800754 0.0015 0.029348 0.054648 \n", ".. ... ... ... ... ... ... ... \n", "322 325 28.712 51.370 0.274491 0.0186 0.068793 0.082693 \n", "323 326 28.854 45.950 0.287124 0.0113 0.029483 0.046083 \n", "324 327 28.387 14.880 0.354984 0.0003 0.027496 0.061296 \n", "325 328 27.255 6.860 0.446141 0.0021 0.044489 0.073689 \n", "326 329 25.594 7.510 0.401876 0.0018 0.044694 0.063594 \n", "327 330 23.901 12.540 0.349521 0.0012 0.036414 0.048214 \n", "328 331 22.405 50.970 0.325531 0.0005 0.025939 0.037039 \n", "329 332 21.904 60.930 0.381878 0.0015 0.017483 0.027483 \n", "330 333 22.037 67.100 0.424177 0.0012 0.010587 0.024087 \n", "331 334 22.353 71.860 0.384306 0.0015 0.015839 0.027139 \n", "332 335 22.729 77.350 0.358100 0.0014 0.023819 0.034619 \n", "333 336 23.431 81.310 0.354270 0.0010 0.034635 0.043635 \n", "334 337 24.341 83.870 0.351570 0.0004 0.029999 0.037599 \n", "335 338 25.181 86.980 0.399923 0.0001 0.023439 0.035239 \n", "336 339 26.425 86.950 0.357233 0.0001 0.024508 0.037708 \n", "337 340 27.353 88.540 0.346182 0.0008 0.022343 0.034743 \n", "338 341 28.320 87.920 0.355104 0.0009 0.024486 0.038286 \n", "339 342 29.590 86.500 0.340383 0.0011 0.019880 0.033380 \n", "340 343 31.247 86.510 0.349032 0.0007 0.021375 0.031175 \n", "341 344 32.112 87.700 0.346130 0.0009 0.018640 0.027840 \n", "342 345 33.266 90.950 0.338444 0.0005 0.026111 0.035311 \n", "343 346 34.404 94.370 0.333521 0.0002 0.038637 0.046937 \n", "344 347 34.992 100.200 0.304408 0.0007 0.046791 0.054391 \n", "345 348 36.228 100.850 0.350616 0.0005 0.042398 0.049198 \n", "346 349 37.381 103.120 0.342928 0.0004 0.042693 0.048193 \n", "347 350 38.495 105.960 0.338576 0.0002 0.033737 0.040637 \n", "348 351 39.443 102.310 0.323756 0.0003 0.029309 0.038809 \n", "349 352 40.807 99.250 0.307454 0.0003 0.026358 0.035358 \n", "350 353 41.742 94.910 0.310187 0.0002 0.027253 0.036653 \n", "351 354 42.510 90.660 0.335612 0.0002 0.020069 0.032769 \n", "\n", " lty ntis infl ... ltr.diff corpr.diff \\\n", "0 0.037313 0.076475 -0.005713 ... 0.012577 -0.004409 \n", "1 0.031530 0.054364 0.000170 ... -0.020049 -0.005564 \n", "2 0.032430 0.050299 0.000170 ... -0.011019 -0.022453 \n", "3 0.028650 0.027980 0.005950 ... -0.014276 0.019109 \n", "4 0.039644 0.063069 -0.005644 ... 0.034818 0.002796 \n", "5 0.055143 0.079805 -0.017443 ... -0.055116 -0.014560 \n", "6 0.077744 0.116197 -0.041044 ... 0.060579 0.003230 \n", "7 0.102884 0.121390 -0.065384 ... -0.022683 0.005639 \n", "8 0.112998 0.163522 -0.078998 ... 0.053953 0.017375 \n", "9 0.138632 0.145496 -0.105132 ... -0.037758 0.002332 \n", "10 0.132650 0.131001 -0.099850 ... 0.001955 -0.001820 \n", "11 0.128796 0.127200 -0.096396 ... -0.005301 0.004981 \n", "12 0.139436 0.113886 -0.106436 ... -0.011479 -0.035129 \n", "13 0.136157 0.073969 -0.102957 ... 0.005998 0.041738 \n", "14 0.142940 0.063746 -0.111040 ... 0.016975 -0.011399 \n", "15 0.142061 0.030352 -0.106761 ... -0.054655 -0.020497 \n", "16 0.144533 -0.012944 -0.103833 ... -0.020700 -0.086552 \n", "17 0.104325 -0.008416 -0.067325 ... 0.094680 0.087251 \n", "18 0.047949 -0.040689 -0.013249 ... 0.004268 -0.013678 \n", "19 0.022761 0.000705 0.009139 ... 0.007163 0.087617 \n", "20 -0.023382 -0.005032 0.054882 ... -0.039776 -0.050776 \n", "21 -0.022365 -0.021883 0.054465 ... -0.016406 -0.024622 \n", "22 0.000580 0.000762 0.030020 ... 0.033930 0.064430 \n", "23 0.015486 -0.000458 0.015314 ... -0.027139 -0.044554 \n", "24 0.003474 0.006196 0.030126 ... -0.039893 -0.019850 \n", "25 0.008092 0.009606 0.022608 ... 0.089273 0.055872 \n", "26 0.021218 0.005983 0.007682 ... -0.021640 -0.024534 \n", "27 0.001313 0.019441 0.029687 ... -0.055071 -0.032337 \n", "28 0.029247 0.020815 0.000053 ... 0.055746 0.030302 \n", "29 0.020048 0.012048 0.007352 ... -0.001633 0.006384 \n", ".. ... ... ... ... ... ... \n", "322 0.057993 -0.048432 -0.011993 ... -0.057695 -0.013557 \n", "323 0.017283 -0.050348 0.027017 ... 0.056794 -0.060487 \n", "324 0.007296 -0.024793 0.023004 ... 0.173807 0.318911 \n", "325 0.024989 -0.037790 0.010511 ... -0.267683 -0.357943 \n", "326 0.031494 -0.022546 0.011406 ... -0.019728 0.206711 \n", "327 0.025414 -0.000070 0.014886 ... 0.123606 0.028493 \n", "328 0.019139 0.010541 0.026661 ... -0.098336 -0.132506 \n", "329 0.010583 0.013861 0.035217 ... 0.066447 0.039697 \n", "330 -0.000613 0.018713 0.038213 ... 0.112136 0.065943 \n", "331 0.004639 0.003227 0.029461 ... -0.066957 -0.034212 \n", "332 0.015019 0.012555 0.026381 ... -0.138471 -0.079181 \n", "333 0.026235 0.012445 0.016665 ... 0.072980 0.017832 \n", "334 0.020499 0.011718 0.019901 ... 0.046319 0.039739 \n", "335 0.009039 0.013201 0.017461 ... 0.174531 0.105940 \n", "336 0.010008 -0.006505 0.014792 ... -0.191170 -0.110793 \n", "337 0.011443 -0.013792 0.017557 ... -0.069581 -0.029160 \n", "338 0.010586 -0.019832 0.011914 ... 0.141781 0.073003 \n", "339 0.007580 -0.018256 0.015020 ... -0.089890 -0.029066 \n", "340 0.008975 -0.012266 0.015125 ... -0.010356 -0.033143 \n", "341 0.007740 -0.009548 0.020660 ... -0.020759 -0.028992 \n", "342 0.016311 0.000141 0.016589 ... -0.026954 -0.032857 \n", "343 0.026437 0.010142 0.007763 ... 0.036078 0.053978 \n", "344 0.037291 0.012184 -0.000591 ... -0.012456 0.015450 \n", "345 0.031698 0.016464 0.001402 ... 0.100872 0.044448 \n", "346 0.030893 0.013931 -0.000193 ... -0.025411 -0.019798 \n", "347 0.022137 0.008025 0.007363 ... -0.018806 -0.027218 \n", "348 0.015409 0.005642 0.008591 ... 0.065100 0.049271 \n", "349 0.012258 -0.006900 0.010042 ... -0.061386 -0.027291 \n", "350 0.011453 -0.008102 0.014647 ... -0.097891 -0.104827 \n", "351 0.003369 -0.012910 0.020631 ... 0.121229 0.103460 \n", "\n", " svar.diff tbl.lagdiff AAA.lagdiff BAA.lagdiff lty.lagdiff \\\n", "0 0.000268 -0.0035 -0.011438 -0.012138 -0.012838 \n", "1 -0.000161 0.0013 0.016741 0.015941 0.015041 \n", "2 0.004100 -0.0013 -0.029141 -0.028641 -0.028441 \n", "3 -0.003478 -0.0039 0.011161 0.010261 0.011161 \n", "4 0.002725 0.0049 -0.006513 -0.006713 -0.007113 \n", "5 0.001703 0.0010 -0.005882 -0.005882 -0.005782 \n", "6 0.000422 0.0065 0.001100 0.002300 0.000900 \n", "7 -0.000348 0.0065 -0.005380 -0.005380 -0.003780 \n", "8 0.092115 -0.0031 0.011594 0.011694 0.010994 \n", "9 -0.095509 0.0034 0.012700 0.013700 0.015500 \n", "10 0.016413 0.0020 0.024301 0.025101 0.022601 \n", "11 -0.008366 -0.0022 0.024640 0.026140 0.025140 \n", "12 0.015494 -0.0155 0.012314 0.011914 0.010114 \n", "13 -0.015925 -0.0008 0.025634 0.023934 0.025634 \n", "14 0.024675 -0.0106 -0.005782 -0.004782 -0.005982 \n", "15 -0.001913 -0.0012 -0.004954 -0.004754 -0.003854 \n", "16 0.040092 -0.0029 0.011040 0.020640 0.010640 \n", "17 -0.017631 -0.0010 -0.004780 -0.006180 -0.003280 \n", "18 0.019837 -0.0083 0.007783 0.017283 0.006783 \n", "19 0.026091 -0.0010 -0.002379 0.002821 -0.000879 \n", "20 -0.034244 0.0196 0.004772 0.020572 0.002472 \n", "21 -0.010147 -0.0016 -0.039908 -0.052408 -0.040208 \n", "22 0.001897 -0.0191 -0.049776 -0.032376 -0.056376 \n", "23 -0.001937 -0.0031 -0.029488 -0.056288 -0.025188 \n", "24 -0.016149 0.0001 -0.046844 -0.037644 -0.046144 \n", "25 -0.024269 0.0130 0.001318 0.005318 0.001018 \n", "26 -0.002422 -0.0127 0.022245 0.006045 0.022945 \n", "27 0.006707 -0.0003 0.013706 0.016706 0.014906 \n", "28 -0.013871 0.0025 -0.013412 -0.010012 -0.012012 \n", "29 0.000474 -0.0005 0.003819 -0.007381 0.004619 \n", ".. ... ... ... ... ... \n", "322 -0.006426 -0.0033 0.005709 0.005109 0.003609 \n", "323 0.023719 -0.0072 0.046018 0.045418 0.043318 \n", "324 0.082747 -0.0089 0.002324 0.005424 0.000924 \n", "325 -0.072139 -0.0174 0.010932 0.013132 0.008932 \n", "326 -0.024892 0.0060 0.000274 0.000374 0.001374 \n", "327 -0.009543 -0.0073 -0.039310 -0.036610 -0.040710 \n", "328 -0.001029 -0.0110 -0.001987 0.015213 -0.009987 \n", "329 -0.001834 0.0018 0.016994 0.012394 0.017694 \n", "330 0.010495 -0.0003 0.000205 -0.010095 0.006505 \n", "331 -0.007103 -0.0006 -0.008280 -0.015380 -0.006080 \n", "332 -0.004741 -0.0007 -0.010475 -0.011175 -0.006275 \n", "333 0.000626 0.0010 -0.008456 -0.009556 -0.008556 \n", "334 -0.000411 -0.0003 -0.006895 -0.003395 -0.011195 \n", "335 0.024103 0.0003 0.005252 0.003052 0.005252 \n", "336 -0.010124 -0.0001 0.007980 0.007480 0.010380 \n", "337 -0.015417 -0.0004 0.010816 0.009016 0.011216 \n", "338 0.004134 -0.0006 -0.004636 -0.006036 -0.005736 \n", "339 -0.003236 -0.0003 -0.006559 -0.002359 -0.011459 \n", "340 0.000539 0.0000 0.001069 0.002469 0.000969 \n", "341 -0.001230 0.0007 -0.002165 -0.002965 0.001435 \n", "342 0.002312 0.0001 0.002144 0.003544 -0.000856 \n", "343 -0.002871 0.0002 -0.004607 -0.004907 -0.003007 \n", "344 0.000754 -0.0004 0.001495 -0.002205 0.001395 \n", "345 0.000598 0.0002 -0.002735 -0.003335 -0.001235 \n", "346 -0.001226 -0.0004 0.007471 0.007471 0.008571 \n", "347 0.000016 -0.0003 0.012526 0.011626 0.010126 \n", "348 0.002871 0.0005 0.008154 0.007454 0.010854 \n", "349 -0.000327 -0.0002 -0.004393 -0.005193 -0.005593 \n", "350 -0.002247 -0.0001 0.000295 -0.001005 -0.000805 \n", "351 0.008547 -0.0002 -0.008956 -0.007556 -0.008756 \n", "\n", " ltr.lagdiff corpr.lagdiff EqPremResponse \n", "0 0.030167 0.005259 1 \n", "1 0.007711 -0.000199 0 \n", "2 -0.038657 -0.012230 1 \n", "3 0.011038 0.014918 1 \n", "4 0.012577 -0.004409 1 \n", "5 -0.020049 -0.005564 1 \n", "6 -0.011019 -0.022453 1 \n", "7 -0.014276 0.019109 0 \n", "8 0.034818 0.002796 1 \n", "9 -0.055116 -0.014560 0 \n", "10 0.060579 0.003230 0 \n", "11 -0.022683 0.005639 0 \n", "12 0.053953 0.017375 1 \n", "13 -0.037758 0.002332 0 \n", "14 0.001955 -0.001820 0 \n", "15 -0.005301 0.004981 0 \n", "16 -0.011479 -0.035129 0 \n", "17 0.005998 0.041738 0 \n", "18 0.016975 -0.011399 1 \n", "19 -0.054655 -0.020497 0 \n", "20 -0.020700 -0.086552 0 \n", "21 0.094680 0.087251 1 \n", "22 0.004268 -0.013678 0 \n", "23 0.007163 0.087617 1 \n", "24 -0.039776 -0.050776 1 \n", "25 -0.016406 -0.024622 0 \n", "26 0.033930 0.064430 0 \n", "27 -0.027139 -0.044554 1 \n", "28 -0.039893 -0.019850 0 \n", "29 0.089273 0.055872 1 \n", ".. ... ... ... \n", "322 -0.028789 -0.018601 0 \n", "323 0.070788 0.044402 0 \n", "324 0.009819 -0.005996 0 \n", "325 -0.025959 -0.030897 1 \n", "326 -0.057695 -0.013557 1 \n", "327 0.056794 -0.060487 1 \n", "328 0.173807 0.318911 1 \n", "329 -0.267683 -0.357943 0 \n", "330 -0.019728 0.206711 1 \n", "331 0.123606 0.028493 1 \n", "332 -0.098336 -0.132506 1 \n", "333 0.066447 0.039697 0 \n", "334 0.112136 0.065943 0 \n", "335 -0.066957 -0.034212 1 \n", "336 -0.138471 -0.079181 1 \n", "337 0.072980 0.017832 0 \n", "338 0.046319 0.039739 1 \n", "339 0.174531 0.105940 0 \n", "340 -0.191170 -0.110793 1 \n", "341 -0.069581 -0.029160 1 \n", "342 0.141781 0.073003 1 \n", "343 -0.089890 -0.029066 1 \n", "344 -0.010356 -0.033143 0 \n", "345 -0.020759 -0.028992 1 \n", "346 -0.026954 -0.032857 0 \n", "347 0.036078 0.053978 1 \n", "348 -0.012456 0.015450 0 \n", "349 0.100872 0.044448 0 \n", "350 -0.025411 -0.019798 0 \n", "351 -0.018806 -0.027218 1 \n", "\n", "[352 rows x 32 columns]" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "############################################################\n", "# 1. load data\n", "############################################################\n", "\n", "# load dataset\n", "print(\"Loading data...\")\n", "dataframe = pd.read_csv(\"EqPremClass.csv\")\n", "dataframe" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# pandas dataframe -> numpy ndarray\n", "dataset = dataframe.values\n", "del dataframe" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Observations: 352\n", "Features: 31\n", "Histogram: check all 0s and 1s, no -1s etc.\n", "(array([176, 0, 0, 0, 0, 0, 0, 0, 0, 176]),\n", " array([ 0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1. ]))\n" ] }, { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
0123456789...20212223242526272829
0-0.767621-0.689589-0.774685-0.0997090.5193190.4282910.3794382.381861-0.9103260.266007...0.4216620.185423-0.0712730.018805-0.398291-0.782983-0.821576-0.8705200.4519250.085675
1-0.765478-0.686961-0.817516-0.0676740.3807850.2974350.2402221.486580-0.762275-0.164262...0.434384-0.299704-0.089852-0.0141610.1634391.1632711.0967381.0356510.114856-0.002960
2-0.763335-0.684293-0.7911250.1405540.4066910.3485990.2618901.321980-0.762275-0.400750...-0.006831-0.165443-0.3616330.313814-0.140831-2.005689-1.949039-1.937349-0.581140-0.198327
3-0.761192-0.681664-0.9489620.3487820.2799790.2289110.1708750.418281-0.616790-0.707137...0.426733-0.2138730.307189-0.269523-0.4451010.7778540.7086680.7703420.1647900.242523
4-0.759049-0.678996-1.2055630.2494730.5530260.4890480.4355641.839043-0.9085950.040092...-0.8764490.5161280.0446730.2079740.584736-0.442798-0.450921-0.4790580.187893-0.071330
5-0.755834-0.676721-1.1582360.3583930.8521100.7938000.8087292.516687-1.205575-1.142770...-0.891871-0.821140-0.2346200.1292860.128331-0.399259-0.394191-0.388096-0.301823-0.090078
6-0.752620-0.674484-1.2283620.4224631.4244131.3521771.3528623.990253-1.7995750.157329...-1.7770350.8991850.0516570.0307030.7719790.0829910.1648100.068793-0.166292-0.364338
7-0.749405-0.672248-1.2609990.3519862.0047101.9336771.9581354.200511-2.412188-0.329478...-1.832500-0.3388770.090419-0.0286190.771979-0.364587-0.359895-0.251214-0.2151800.310585
8-0.746191-0.669973-0.910407-0.1445582.2947172.1987102.2016405.906464-2.7548370.828413...-1.0279730.8006560.2792777.089744-0.3514800.8077740.8065830.7589390.5217260.045674
9-0.745869-0.676250-0.982851-0.1701872.8984242.7311372.8188065.176581-3.4126020.018082...-1.967060-0.5630310.037204-7.3546590.4091950.8841350.9436011.066998-0.828192-0.236167
10-0.745655-0.682527-0.667222-0.5097592.7622472.6247532.6747774.589664-3.2796540.060034...0.3893720.027475-0.0296101.2617900.2453581.6853921.7225051.5525220.9084070.052722
11-0.745334-0.688805-0.509109-0.5482012.6455802.5190022.5819934.435772-3.192725-0.053735...0.252228-0.0804170.079835-0.645896-0.2461561.7088381.7935191.726152-0.3413660.091837
12-0.745119-0.695082-0.100182-0.6411032.9055822.9781492.8381613.896661-3.445420-0.300079...-0.759896-0.172271-0.5656111.191043-1.8026140.8575120.8216220.6987800.8089450.282419
13-0.749405-0.698613-0.196157-0.6731382.7930122.8406752.7591952.280408-3.357836-0.171356...0.2541810.0875950.671341-1.227832-0.0823181.7774901.6428091.759927-0.5676400.038137
14-0.753691-0.7021440.096583-0.9390292.9763173.2251522.9225111.866470-3.5612850.192956...-0.6131360.250823-0.1837561.897846-1.229182-0.392345-0.319034-0.4017630.028451-0.029287
15-0.757977-0.7056751.357994-0.9710652.9202993.2879152.9013580.514341-3.453597-0.979999...0.314093-0.814276-0.330166-0.149046-0.129129-0.335127-0.317092-0.256237-0.0804610.081157
16-0.762263-0.7092062.207338-0.3431763.0326853.7455512.960875-1.238768-3.379904-1.424245...0.212785-0.309388-1.3931253.084688-0.3280740.7695171.4177600.734741-0.173185-0.570177
17-0.770835-0.7111672.264018-0.3944322.0928172.5797101.992831-1.055398-2.4610430.607710...2.7315021.4062531.403731-1.359133-0.105723-0.323112-0.414516-0.2169940.0891390.678059
18-0.779407-0.7131295.420504-1.0063030.9205481.8594910.635526-2.362157-1.1000150.699302...4.0492190.061868-0.2204211.525357-0.9600200.5445921.1884430.4710540.253912-0.184839
19-0.787979-0.7150912.370793-1.1056120.2260780.6073360.029099-0.686104-0.5365350.853027...1.6724200.1049171.4096252.006815-0.105723-0.1572660.200431-0.052815-0.821262-0.332584
20-0.796551-0.7170523.223840-1.102409-0.877133-0.230066-1.081854-0.9183810.614780-0.000621...3.424244-0.593043-0.817408-2.6381092.3050320.3366091.4131190.176278-0.311599-1.405241
21-0.798158-0.7167383.352892-0.685952-0.846100-0.111771-1.057352-1.6006950.604266-0.352722...-0.038162-0.245546-0.396539-0.782964-0.175939-2.749304-3.572722-2.7418591.4202721.417131
22-0.799766-0.7164640.944061-1.092798-0.3222170.022696-0.504936-0.683813-0.0109780.375464...-1.8403430.5029341.0365070.144224-2.223911-3.430859-2.204179-3.8473070.063170-0.221838
23-0.801373-0.7161501.073275-1.1024090.0005690.394327-0.146063-0.733209-0.381107-0.206980...-1.109868-0.405139-0.717279-0.150958-0.351480-2.029634-3.837804-1.7149120.1066251.423078
24-0.802980-0.7158750.927289-1.022321-0.3153030.171598-0.435271-0.463776-0.008299-1.063143...1.104188-0.594786-0.319750-1.2450520.023006-3.228336-2.564060-3.147701-0.597937-0.824272
25-0.802659-0.7153650.827967-1.038338-0.2253670.007400-0.324069-0.325683-0.1975380.852764...-0.5707901.3258490.898785-1.8701901.5326540.0980280.3709740.076841-0.247152-0.399563
26-0.802444-0.7148950.971137-1.0671700.0790370.294931-0.008063-0.472403-0.5731920.388347...-1.126331-0.323363-0.395126-0.188237-1.4749391.5433930.4206411.5760450.5084071.046556
27-0.802123-0.7143841.082145-1.047949-0.432139-0.081133-0.4872980.072535-0.019349-0.793544...1.643699-0.820468-0.5206880.514511-0.0238040.9536421.1489901.026410-0.408255-0.723228
28-0.801908-0.7139140.720188-1.0415420.2304530.5024650.1852500.128152-0.7652120.402834...-2.2296070.8273220.487300-1.0696880.303871-0.919330-0.676340-0.814053-0.599696-0.322074
29-0.801908-0.7044980.821381-1.0671700.0255770.333416-0.036231-0.226810-0.5814980.367790...0.540662-0.0258720.1024140.034693-0.0472100.270771-0.4965890.3230571.3391060.907578
..................................................................
3222.2263421.282266-1.150029-0.5193690.9545470.9572950.877336-2.675697-1.068394-0.823140...0.100116-0.859490-0.218477-0.496480-0.3748850.4012930.3566870.253986-0.433020-0.301789
3232.2415571.069623-1.102708-0.7532260.0287600.142886-0.102798-2.753261-0.0865570.395719...2.9191780.842898-0.9736821.824228-0.8312903.1853363.1105352.9690101.0616400.721314
3242.191518-0.149349-0.848498-1.105612-0.0180450.481298-0.343254-1.718515-0.1875504.125837...-0.3078022.5828295.1316486.368563-1.0302360.1675470.3782530.0704510.146491-0.097099
3252.070226-0.463999-0.507021-1.0479490.3821750.7570050.082742-2.244807-0.502005-1.618973...-0.943951-3.981896-5.760390-5.555482-2.0249650.7620350.9048070.617938-0.390538-0.501455
3261.892251-0.438497-0.672838-1.0575590.3870010.5324360.239355-1.627570-0.479477-2.042352...0.060305-0.2949313.326101-1.9181370.7134650.0259530.0332410.101213-0.866904-0.219877
3271.710847-0.241155-0.868962-1.0767800.1919930.1902940.092966-0.717481-0.3918830.610392...0.2542131.8363680.458202-0.736489-0.842993-2.708011-2.493444-2.7761910.851588-0.981971
3281.5505521.266573-0.958833-1.099205-0.054703-0.058300-0.058112-0.287832-0.095519-1.500013...0.876374-1.463790-2.132624-0.081000-1.275993-0.1302461.046972-0.6756062.6079775.179055
3291.4968701.657335-0.747752-1.067170-0.253856-0.270884-0.264112-0.1534250.119832-0.073969...0.6349450.9864460.638493-0.1430250.2219521.1807320.8544011.217030-4.018846-5.812346
3301.5111211.899403-0.589300-1.076780-0.416249-0.346417-0.5336520.0430600.1952232.332615...0.2178451.6658161.0608500.806166-0.0238040.021172-0.6819950.452017-0.2970043.357037
3311.5449802.086153-0.738658-1.067170-0.292571-0.278534-0.407217-0.583980-0.0250420.895628...-0.663253-0.997211-0.550861-0.548614-0.058912-0.564876-1.043069-0.4084661.8544550.462975
3321.5852682.301542-0.836825-1.070373-0.104639-0.112142-0.157313-0.206271-0.102558-2.076139...-0.237837-2.060587-1.274514-0.366757-0.070615-0.716462-0.755776-0.421782-1.476920-2.151486
3331.6604872.456905-0.851174-1.0831870.1500890.0884250.112725-0.210724-0.347100-0.509907...-0.7356001.0835760.2866390.0463910.128331-0.577031-0.645185-0.5777550.9964930.644911
3341.7579932.557342-0.861288-1.1024090.040908-0.045848-0.025374-0.240175-0.2656540.484146...0.2358870.6871400.639166-0.033410-0.023804-0.469228-0.224289-0.7582001.6822891.071121
3351.8479982.679357-0.680156-1.112019-0.113572-0.098335-0.301270-0.180138-0.3270814.229796...-0.1898902.5935911.7044921.8538090.0464120.3697250.2161530.366318-1.005928-0.555292
3361.9812922.678180-0.840076-1.112019-0.088400-0.043414-0.277945-0.978027-0.3942520.127043...-0.207010-2.844193-1.783220-0.781223-0.0003990.5581600.5186850.716952-2.079362-1.285547
3372.0807262.740561-0.881472-1.089594-0.139400-0.109383-0.243408-1.273100-0.324648-1.366243...0.200599-1.036218-0.469565-1.188721-0.0355070.7540520.6236380.7741291.0945420.289848
3382.1843392.716236-0.848050-1.086391-0.088912-0.030550-0.264022-1.517643-0.4666951.676559...-0.4301502.1066211.1744580.316439-0.058912-0.313174-0.404685-0.3849240.6943560.645590
3392.3204192.660525-0.903196-1.079984-0.197401-0.139699-0.336408-1.453840-0.388506-0.252597...0.226182-1.338211-0.468046-0.250944-0.023804-0.446022-0.153512-0.7762512.6188401.720635
3402.4979652.660918-0.870794-1.092798-0.162182-0.188740-0.302811-1.211290-0.385875-0.474839...0.001011-0.155572-0.5336600.0397110.0113040.0808390.1763440.073499-2.870381-1.798894
3412.5906492.707605-0.881667-1.086391-0.226600-0.262935-0.332552-1.101262-0.246559-0.920344...0.408351-0.310261-0.466859-0.0964830.093223-0.142546-0.1949170.105339-1.045304-0.473255
3422.7142992.835113-0.910458-1.099205-0.050644-0.096732-0.126189-0.708929-0.349029-1.498801...-0.312205-0.402376-0.5290480.1761830.0230060.1550820.249782-0.0512852.1272641.185766
3432.8362352.969290-0.928899-1.1088160.2443450.1618840.117594-0.303996-0.571158-0.724516...-0.6688070.5348740.868312-0.2228420.034709-0.311144-0.327527-0.198309-1.350154-0.471722
3442.8992393.198019-1.037959-1.0927980.4363710.3276950.378906-0.221332-0.781410-0.991838...-0.633410-0.1868060.2483060.056213-0.0355070.110304-0.1429310.102668-0.156328-0.537935
3453.0316753.223520-0.864862-1.0992050.3329200.2121810.244257-0.048003-0.7312561.173004...0.1426331.4983240.7149450.0442270.034709-0.181899-0.220180-0.077201-0.312480-0.470525
3463.1552183.312579-0.893660-1.1024090.3398660.1898230.224874-0.150557-0.7713990.627648...-0.126462-0.379442-0.318908-0.096212-0.0355070.5230380.5181030.593299-0.405466-0.533281
3473.2745823.424001-0.909964-1.1088160.1289510.0217430.014073-0.389726-0.5812320.224044...0.559895-0.281229-0.438308-0.000533-0.0238040.8721230.8019150.6995690.5406490.876827
3483.3761603.280801-0.965482-1.1056120.024667-0.018923-0.147911-0.486183-0.5503231.621165...0.0852810.9664070.7925650.2192060.0698170.5701670.5168980.749350-0.1878570.251165
3493.5223113.160747-1.026549-1.105612-0.044831-0.095691-0.223774-0.994028-0.5138040.303738...0.102002-0.914373-0.439485-0.026939-0.012102-0.296372-0.347074-0.3751271.5132130.722060
3503.6224962.990476-1.016312-1.108816-0.023756-0.066886-0.243158-1.042688-0.397899-1.797134...0.338581-1.457185-1.687210-0.174794-0.0003990.027389-0.060984-0.047786-0.382316-0.321223
3513.7047862.823735-0.921068-1.108816-0.192953-0.153295-0.437796-1.237392-0.2472800.804591...0.4420331.8010201.6645710.656181-0.012102-0.611525-0.508509-0.591390-0.283173-0.441713
\n", "

352 rows × 30 columns

\n", "
" ], "text/plain": [ " 0 1 2 3 4 5 6 \\\n", "0 -0.767621 -0.689589 -0.774685 -0.099709 0.519319 0.428291 0.379438 \n", "1 -0.765478 -0.686961 -0.817516 -0.067674 0.380785 0.297435 0.240222 \n", "2 -0.763335 -0.684293 -0.791125 0.140554 0.406691 0.348599 0.261890 \n", "3 -0.761192 -0.681664 -0.948962 0.348782 0.279979 0.228911 0.170875 \n", "4 -0.759049 -0.678996 -1.205563 0.249473 0.553026 0.489048 0.435564 \n", "5 -0.755834 -0.676721 -1.158236 0.358393 0.852110 0.793800 0.808729 \n", "6 -0.752620 -0.674484 -1.228362 0.422463 1.424413 1.352177 1.352862 \n", "7 -0.749405 -0.672248 -1.260999 0.351986 2.004710 1.933677 1.958135 \n", "8 -0.746191 -0.669973 -0.910407 -0.144558 2.294717 2.198710 2.201640 \n", "9 -0.745869 -0.676250 -0.982851 -0.170187 2.898424 2.731137 2.818806 \n", "10 -0.745655 -0.682527 -0.667222 -0.509759 2.762247 2.624753 2.674777 \n", "11 -0.745334 -0.688805 -0.509109 -0.548201 2.645580 2.519002 2.581993 \n", "12 -0.745119 -0.695082 -0.100182 -0.641103 2.905582 2.978149 2.838161 \n", "13 -0.749405 -0.698613 -0.196157 -0.673138 2.793012 2.840675 2.759195 \n", "14 -0.753691 -0.702144 0.096583 -0.939029 2.976317 3.225152 2.922511 \n", "15 -0.757977 -0.705675 1.357994 -0.971065 2.920299 3.287915 2.901358 \n", "16 -0.762263 -0.709206 2.207338 -0.343176 3.032685 3.745551 2.960875 \n", "17 -0.770835 -0.711167 2.264018 -0.394432 2.092817 2.579710 1.992831 \n", "18 -0.779407 -0.713129 5.420504 -1.006303 0.920548 1.859491 0.635526 \n", "19 -0.787979 -0.715091 2.370793 -1.105612 0.226078 0.607336 0.029099 \n", "20 -0.796551 -0.717052 3.223840 -1.102409 -0.877133 -0.230066 -1.081854 \n", "21 -0.798158 -0.716738 3.352892 -0.685952 -0.846100 -0.111771 -1.057352 \n", "22 -0.799766 -0.716464 0.944061 -1.092798 -0.322217 0.022696 -0.504936 \n", "23 -0.801373 -0.716150 1.073275 -1.102409 0.000569 0.394327 -0.146063 \n", "24 -0.802980 -0.715875 0.927289 -1.022321 -0.315303 0.171598 -0.435271 \n", "25 -0.802659 -0.715365 0.827967 -1.038338 -0.225367 0.007400 -0.324069 \n", "26 -0.802444 -0.714895 0.971137 -1.067170 0.079037 0.294931 -0.008063 \n", "27 -0.802123 -0.714384 1.082145 -1.047949 -0.432139 -0.081133 -0.487298 \n", "28 -0.801908 -0.713914 0.720188 -1.041542 0.230453 0.502465 0.185250 \n", "29 -0.801908 -0.704498 0.821381 -1.067170 0.025577 0.333416 -0.036231 \n", ".. ... ... ... ... ... ... ... \n", "322 2.226342 1.282266 -1.150029 -0.519369 0.954547 0.957295 0.877336 \n", "323 2.241557 1.069623 -1.102708 -0.753226 0.028760 0.142886 -0.102798 \n", "324 2.191518 -0.149349 -0.848498 -1.105612 -0.018045 0.481298 -0.343254 \n", "325 2.070226 -0.463999 -0.507021 -1.047949 0.382175 0.757005 0.082742 \n", "326 1.892251 -0.438497 -0.672838 -1.057559 0.387001 0.532436 0.239355 \n", "327 1.710847 -0.241155 -0.868962 -1.076780 0.191993 0.190294 0.092966 \n", "328 1.550552 1.266573 -0.958833 -1.099205 -0.054703 -0.058300 -0.058112 \n", "329 1.496870 1.657335 -0.747752 -1.067170 -0.253856 -0.270884 -0.264112 \n", "330 1.511121 1.899403 -0.589300 -1.076780 -0.416249 -0.346417 -0.533652 \n", "331 1.544980 2.086153 -0.738658 -1.067170 -0.292571 -0.278534 -0.407217 \n", "332 1.585268 2.301542 -0.836825 -1.070373 -0.104639 -0.112142 -0.157313 \n", "333 1.660487 2.456905 -0.851174 -1.083187 0.150089 0.088425 0.112725 \n", "334 1.757993 2.557342 -0.861288 -1.102409 0.040908 -0.045848 -0.025374 \n", "335 1.847998 2.679357 -0.680156 -1.112019 -0.113572 -0.098335 -0.301270 \n", "336 1.981292 2.678180 -0.840076 -1.112019 -0.088400 -0.043414 -0.277945 \n", "337 2.080726 2.740561 -0.881472 -1.089594 -0.139400 -0.109383 -0.243408 \n", "338 2.184339 2.716236 -0.848050 -1.086391 -0.088912 -0.030550 -0.264022 \n", "339 2.320419 2.660525 -0.903196 -1.079984 -0.197401 -0.139699 -0.336408 \n", "340 2.497965 2.660918 -0.870794 -1.092798 -0.162182 -0.188740 -0.302811 \n", "341 2.590649 2.707605 -0.881667 -1.086391 -0.226600 -0.262935 -0.332552 \n", "342 2.714299 2.835113 -0.910458 -1.099205 -0.050644 -0.096732 -0.126189 \n", "343 2.836235 2.969290 -0.928899 -1.108816 0.244345 0.161884 0.117594 \n", "344 2.899239 3.198019 -1.037959 -1.092798 0.436371 0.327695 0.378906 \n", "345 3.031675 3.223520 -0.864862 -1.099205 0.332920 0.212181 0.244257 \n", "346 3.155218 3.312579 -0.893660 -1.102409 0.339866 0.189823 0.224874 \n", "347 3.274582 3.424001 -0.909964 -1.108816 0.128951 0.021743 0.014073 \n", "348 3.376160 3.280801 -0.965482 -1.105612 0.024667 -0.018923 -0.147911 \n", "349 3.522311 3.160747 -1.026549 -1.105612 -0.044831 -0.095691 -0.223774 \n", "350 3.622496 2.990476 -1.016312 -1.108816 -0.023756 -0.066886 -0.243158 \n", "351 3.704786 2.823735 -0.921068 -1.108816 -0.192953 -0.153295 -0.437796 \n", "\n", " 7 8 9 ... 20 21 22 \\\n", "0 2.381861 -0.910326 0.266007 ... 0.421662 0.185423 -0.071273 \n", "1 1.486580 -0.762275 -0.164262 ... 0.434384 -0.299704 -0.089852 \n", "2 1.321980 -0.762275 -0.400750 ... -0.006831 -0.165443 -0.361633 \n", "3 0.418281 -0.616790 -0.707137 ... 0.426733 -0.213873 0.307189 \n", "4 1.839043 -0.908595 0.040092 ... -0.876449 0.516128 0.044673 \n", "5 2.516687 -1.205575 -1.142770 ... -0.891871 -0.821140 -0.234620 \n", "6 3.990253 -1.799575 0.157329 ... -1.777035 0.899185 0.051657 \n", "7 4.200511 -2.412188 -0.329478 ... -1.832500 -0.338877 0.090419 \n", "8 5.906464 -2.754837 0.828413 ... -1.027973 0.800656 0.279277 \n", "9 5.176581 -3.412602 0.018082 ... -1.967060 -0.563031 0.037204 \n", "10 4.589664 -3.279654 0.060034 ... 0.389372 0.027475 -0.029610 \n", "11 4.435772 -3.192725 -0.053735 ... 0.252228 -0.080417 0.079835 \n", "12 3.896661 -3.445420 -0.300079 ... -0.759896 -0.172271 -0.565611 \n", "13 2.280408 -3.357836 -0.171356 ... 0.254181 0.087595 0.671341 \n", "14 1.866470 -3.561285 0.192956 ... -0.613136 0.250823 -0.183756 \n", "15 0.514341 -3.453597 -0.979999 ... 0.314093 -0.814276 -0.330166 \n", "16 -1.238768 -3.379904 -1.424245 ... 0.212785 -0.309388 -1.393125 \n", "17 -1.055398 -2.461043 0.607710 ... 2.731502 1.406253 1.403731 \n", "18 -2.362157 -1.100015 0.699302 ... 4.049219 0.061868 -0.220421 \n", "19 -0.686104 -0.536535 0.853027 ... 1.672420 0.104917 1.409625 \n", "20 -0.918381 0.614780 -0.000621 ... 3.424244 -0.593043 -0.817408 \n", "21 -1.600695 0.604266 -0.352722 ... -0.038162 -0.245546 -0.396539 \n", "22 -0.683813 -0.010978 0.375464 ... -1.840343 0.502934 1.036507 \n", "23 -0.733209 -0.381107 -0.206980 ... -1.109868 -0.405139 -0.717279 \n", "24 -0.463776 -0.008299 -1.063143 ... 1.104188 -0.594786 -0.319750 \n", "25 -0.325683 -0.197538 0.852764 ... -0.570790 1.325849 0.898785 \n", "26 -0.472403 -0.573192 0.388347 ... -1.126331 -0.323363 -0.395126 \n", "27 0.072535 -0.019349 -0.793544 ... 1.643699 -0.820468 -0.520688 \n", "28 0.128152 -0.765212 0.402834 ... -2.229607 0.827322 0.487300 \n", "29 -0.226810 -0.581498 0.367790 ... 0.540662 -0.025872 0.102414 \n", ".. ... ... ... ... ... ... ... \n", "322 -2.675697 -1.068394 -0.823140 ... 0.100116 -0.859490 -0.218477 \n", "323 -2.753261 -0.086557 0.395719 ... 2.919178 0.842898 -0.973682 \n", "324 -1.718515 -0.187550 4.125837 ... -0.307802 2.582829 5.131648 \n", "325 -2.244807 -0.502005 -1.618973 ... -0.943951 -3.981896 -5.760390 \n", "326 -1.627570 -0.479477 -2.042352 ... 0.060305 -0.294931 3.326101 \n", "327 -0.717481 -0.391883 0.610392 ... 0.254213 1.836368 0.458202 \n", "328 -0.287832 -0.095519 -1.500013 ... 0.876374 -1.463790 -2.132624 \n", "329 -0.153425 0.119832 -0.073969 ... 0.634945 0.986446 0.638493 \n", "330 0.043060 0.195223 2.332615 ... 0.217845 1.665816 1.060850 \n", "331 -0.583980 -0.025042 0.895628 ... -0.663253 -0.997211 -0.550861 \n", "332 -0.206271 -0.102558 -2.076139 ... -0.237837 -2.060587 -1.274514 \n", "333 -0.210724 -0.347100 -0.509907 ... -0.735600 1.083576 0.286639 \n", "334 -0.240175 -0.265654 0.484146 ... 0.235887 0.687140 0.639166 \n", "335 -0.180138 -0.327081 4.229796 ... -0.189890 2.593591 1.704492 \n", "336 -0.978027 -0.394252 0.127043 ... -0.207010 -2.844193 -1.783220 \n", "337 -1.273100 -0.324648 -1.366243 ... 0.200599 -1.036218 -0.469565 \n", "338 -1.517643 -0.466695 1.676559 ... -0.430150 2.106621 1.174458 \n", "339 -1.453840 -0.388506 -0.252597 ... 0.226182 -1.338211 -0.468046 \n", "340 -1.211290 -0.385875 -0.474839 ... 0.001011 -0.155572 -0.533660 \n", "341 -1.101262 -0.246559 -0.920344 ... 0.408351 -0.310261 -0.466859 \n", "342 -0.708929 -0.349029 -1.498801 ... -0.312205 -0.402376 -0.529048 \n", "343 -0.303996 -0.571158 -0.724516 ... -0.668807 0.534874 0.868312 \n", "344 -0.221332 -0.781410 -0.991838 ... -0.633410 -0.186806 0.248306 \n", "345 -0.048003 -0.731256 1.173004 ... 0.142633 1.498324 0.714945 \n", "346 -0.150557 -0.771399 0.627648 ... -0.126462 -0.379442 -0.318908 \n", "347 -0.389726 -0.581232 0.224044 ... 0.559895 -0.281229 -0.438308 \n", "348 -0.486183 -0.550323 1.621165 ... 0.085281 0.966407 0.792565 \n", "349 -0.994028 -0.513804 0.303738 ... 0.102002 -0.914373 -0.439485 \n", "350 -1.042688 -0.397899 -1.797134 ... 0.338581 -1.457185 -1.687210 \n", "351 -1.237392 -0.247280 0.804591 ... 0.442033 1.801020 1.664571 \n", "\n", " 23 24 25 26 27 28 29 \n", "0 0.018805 -0.398291 -0.782983 -0.821576 -0.870520 0.451925 0.085675 \n", "1 -0.014161 0.163439 1.163271 1.096738 1.035651 0.114856 -0.002960 \n", "2 0.313814 -0.140831 -2.005689 -1.949039 -1.937349 -0.581140 -0.198327 \n", "3 -0.269523 -0.445101 0.777854 0.708668 0.770342 0.164790 0.242523 \n", "4 0.207974 0.584736 -0.442798 -0.450921 -0.479058 0.187893 -0.071330 \n", "5 0.129286 0.128331 -0.399259 -0.394191 -0.388096 -0.301823 -0.090078 \n", "6 0.030703 0.771979 0.082991 0.164810 0.068793 -0.166292 -0.364338 \n", "7 -0.028619 0.771979 -0.364587 -0.359895 -0.251214 -0.215180 0.310585 \n", "8 7.089744 -0.351480 0.807774 0.806583 0.758939 0.521726 0.045674 \n", "9 -7.354659 0.409195 0.884135 0.943601 1.066998 -0.828192 -0.236167 \n", "10 1.261790 0.245358 1.685392 1.722505 1.552522 0.908407 0.052722 \n", "11 -0.645896 -0.246156 1.708838 1.793519 1.726152 -0.341366 0.091837 \n", "12 1.191043 -1.802614 0.857512 0.821622 0.698780 0.808945 0.282419 \n", "13 -1.227832 -0.082318 1.777490 1.642809 1.759927 -0.567640 0.038137 \n", "14 1.897846 -1.229182 -0.392345 -0.319034 -0.401763 0.028451 -0.029287 \n", "15 -0.149046 -0.129129 -0.335127 -0.317092 -0.256237 -0.080461 0.081157 \n", "16 3.084688 -0.328074 0.769517 1.417760 0.734741 -0.173185 -0.570177 \n", "17 -1.359133 -0.105723 -0.323112 -0.414516 -0.216994 0.089139 0.678059 \n", "18 1.525357 -0.960020 0.544592 1.188443 0.471054 0.253912 -0.184839 \n", "19 2.006815 -0.105723 -0.157266 0.200431 -0.052815 -0.821262 -0.332584 \n", "20 -2.638109 2.305032 0.336609 1.413119 0.176278 -0.311599 -1.405241 \n", "21 -0.782964 -0.175939 -2.749304 -3.572722 -2.741859 1.420272 1.417131 \n", "22 0.144224 -2.223911 -3.430859 -2.204179 -3.847307 0.063170 -0.221838 \n", "23 -0.150958 -0.351480 -2.029634 -3.837804 -1.714912 0.106625 1.423078 \n", "24 -1.245052 0.023006 -3.228336 -2.564060 -3.147701 -0.597937 -0.824272 \n", "25 -1.870190 1.532654 0.098028 0.370974 0.076841 -0.247152 -0.399563 \n", "26 -0.188237 -1.474939 1.543393 0.420641 1.576045 0.508407 1.046556 \n", "27 0.514511 -0.023804 0.953642 1.148990 1.026410 -0.408255 -0.723228 \n", "28 -1.069688 0.303871 -0.919330 -0.676340 -0.814053 -0.599696 -0.322074 \n", "29 0.034693 -0.047210 0.270771 -0.496589 0.323057 1.339106 0.907578 \n", ".. ... ... ... ... ... ... ... \n", "322 -0.496480 -0.374885 0.401293 0.356687 0.253986 -0.433020 -0.301789 \n", "323 1.824228 -0.831290 3.185336 3.110535 2.969010 1.061640 0.721314 \n", "324 6.368563 -1.030236 0.167547 0.378253 0.070451 0.146491 -0.097099 \n", "325 -5.555482 -2.024965 0.762035 0.904807 0.617938 -0.390538 -0.501455 \n", "326 -1.918137 0.713465 0.025953 0.033241 0.101213 -0.866904 -0.219877 \n", "327 -0.736489 -0.842993 -2.708011 -2.493444 -2.776191 0.851588 -0.981971 \n", "328 -0.081000 -1.275993 -0.130246 1.046972 -0.675606 2.607977 5.179055 \n", "329 -0.143025 0.221952 1.180732 0.854401 1.217030 -4.018846 -5.812346 \n", "330 0.806166 -0.023804 0.021172 -0.681995 0.452017 -0.297004 3.357037 \n", "331 -0.548614 -0.058912 -0.564876 -1.043069 -0.408466 1.854455 0.462975 \n", "332 -0.366757 -0.070615 -0.716462 -0.755776 -0.421782 -1.476920 -2.151486 \n", "333 0.046391 0.128331 -0.577031 -0.645185 -0.577755 0.996493 0.644911 \n", "334 -0.033410 -0.023804 -0.469228 -0.224289 -0.758200 1.682289 1.071121 \n", "335 1.853809 0.046412 0.369725 0.216153 0.366318 -1.005928 -0.555292 \n", "336 -0.781223 -0.000399 0.558160 0.518685 0.716952 -2.079362 -1.285547 \n", "337 -1.188721 -0.035507 0.754052 0.623638 0.774129 1.094542 0.289848 \n", "338 0.316439 -0.058912 -0.313174 -0.404685 -0.384924 0.694356 0.645590 \n", "339 -0.250944 -0.023804 -0.446022 -0.153512 -0.776251 2.618840 1.720635 \n", "340 0.039711 0.011304 0.080839 0.176344 0.073499 -2.870381 -1.798894 \n", "341 -0.096483 0.093223 -0.142546 -0.194917 0.105339 -1.045304 -0.473255 \n", "342 0.176183 0.023006 0.155082 0.249782 -0.051285 2.127264 1.185766 \n", "343 -0.222842 0.034709 -0.311144 -0.327527 -0.198309 -1.350154 -0.471722 \n", "344 0.056213 -0.035507 0.110304 -0.142931 0.102668 -0.156328 -0.537935 \n", "345 0.044227 0.034709 -0.181899 -0.220180 -0.077201 -0.312480 -0.470525 \n", "346 -0.096212 -0.035507 0.523038 0.518103 0.593299 -0.405466 -0.533281 \n", "347 -0.000533 -0.023804 0.872123 0.801915 0.699569 0.540649 0.876827 \n", "348 0.219206 0.069817 0.570167 0.516898 0.749350 -0.187857 0.251165 \n", "349 -0.026939 -0.012102 -0.296372 -0.347074 -0.375127 1.513213 0.722060 \n", "350 -0.174794 -0.000399 0.027389 -0.060984 -0.047786 -0.382316 -0.321223 \n", "351 0.656181 -0.012102 -0.611525 -0.508509 -0.591390 -0.283173 -0.441713 \n", "\n", "[352 rows x 30 columns]" ] }, "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ "num_obs, num_features = dataset.shape\n", "num_features -=1\n", "num_labels=1\n", "print(\"Observations: %d\\nFeatures: %d\" % (num_obs, num_features))\n", "\n", "# last column is target \n", "y=dataset[:,num_features].astype(float)\n", "print(\"Histogram: check all 0s and 1s, no -1s etc.\")\n", "pprint(np.histogram(y))\n", "\n", "# omit 1st id column\n", "X_raw = dataset[:,1:num_features].astype(float)\n", "num_features -=1\n", "del dataset\n", "\n", "# normalize\n", "# not necessary for NN but may speed convergence, lets pca work\n", "# min_max_scaler = preprocessing.MinMaxScaler()\n", "# X = min_max_scaler.fit_transform(X_raw)\n", "X = preprocessing.scale(X_raw)\n", "\n", "pd.DataFrame(X)" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Split into training, xval, test\n", "Split into train, xval\n", "Training set\n", "(210, 30)\n", "(array([103, 0, 0, 0, 0, 0, 0, 0, 0, 107]),\n", " array([ 0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1. ]))\n", "Xval set\n", "(71, 30)\n", "(array([36, 0, 0, 0, 0, 0, 0, 0, 0, 35]),\n", " array([ 0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1. ]))\n", "Test set\n", "(71, 30)\n", "(array([37, 0, 0, 0, 0, 0, 0, 0, 0, 34]),\n", " array([ 0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1. ]))\n" ] } ], "source": [ "print(\"Split into training, xval, test\")\n", "# split into training, xval, test, 60/20/20\n", "X_bigtrain, X_test, y_bigtrain, y_test = train_test_split(X, y, test_size=0.2)\n", "print(\"Split into train, xval\")\n", "X_train, X_xval, y_train, y_xval = train_test_split(X_bigtrain, y_bigtrain, test_size=0.25)\n", "\n", "print \"Training set\"\n", "print X_train.shape\n", "pprint(np.histogram(y_train))\n", "\n", "print \"Xval set\"\n", "print X_xval.shape\n", "pprint(np.histogram(y_xval))\n", "\n", "print \"Test set\"\n", "print X_test.shape\n", "pprint(np.histogram(y_test))" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[ 17.07 32.24 44. 54.6 63.46 69.6 74.57 79.08 82.88 85.83\n", " 88.46 90.63 92.59 94.25 95.78 96.88 97.91 98.63 99.12 99.48\n", " 99.62 99.73 99.81 99.87 99.91 99.93 99.95 99.96 99.97 99.97]\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHPlJREFUeJzt3Xt0VPW99/H3NzcCSbgEQrjfJKJCRSTi3Vqxaj0eLz2t\n1daWVqv28rT0strarqfH0571nNOn7WnPaW3t4am2aL0hWMHWZevBqLWKmnATBCQgkBtMgEBIIJfJ\nfJ8/ZqCRcksmsGf2fF5rZc2e3+xNvtvNfPj5m9/8trk7IiISXllBFyAiIieXgl5EJOQU9CIiIaeg\nFxEJOQW9iEjIKehFREJOQS8iEnIKehGRkFPQi4iEXE7QBQAMGzbMJ0yYEHQZIiJppaqqaqe7lxxv\nv5QI+gkTJlBZWRl0GSIiacXMtp7Ifhq6EREJOQW9iEjIKehFREJOQS8iEnIKehGRkDtu0JvZg2YW\nMbM13dqKzex5M9uYeBzS7bVvm1m1mW0ws6tPVuEiInJiTqRH/1vgmsPa7gGWunsZsDTxHDM7C7gF\nmJo45pdmlt1n1YqISI8ddx69u79sZhMOa74BuDyxPR94EfhWov1xd28H3jWzamAW8FrflCsiqcTd\nicacaJcTjcWIdjmdsRhdh9qcrsN/3OmKxeiKQTQWIxaDLndi7sRiTswh5o670xWLbx/6iYEnfu+h\nRz/YBk7354ntQ/se1s57j4W/tXU/P7ody2H7Hv4avbg16+kjirju7FE9Pq4nevuFqVJ3b0hsbwdK\nE9ujgWXd9qtNtP0dM7sLuAtg3LhxvSxDRA5yd5rbouxu7Tj009oepa2zi/ZojPZoF+2dsUPbbZ2J\ntmiM9s4YHV0xOrtidET/tt0ZdToOa4t2efwxEdzyXmY92/+6s0elbNAf4u5uZj2+2u4+D5gHUF5e\nrr8tIkfR2h6lYW8bDXsP0LC3jR1729jV2sGu1g52t7azqyUe6k37O+jsOv5bKcsgPzebfjlZ9MvJ\nJj83i7yc+E9udhZ52VkU9ss5tJ2bE3/MyzFys+P75GQZOdlGTtbB7Sxys43sg9tZB7eNLIvvl32w\nLcvIOvhodqg9yyDL4m1ZWX/bzs4CO9huYBhmJH4MI7F9sB0g8WfFXztsnyyO3N4toA9ud28/eMzB\n7ffs19N0P8V6G/Q7zGykuzeY2UggkmivA8Z2229Mok1EjsDd2dXawcYdLdQ07adhTxvbm+OB3rAn\nHu7NbdG/O64oP4ehBXkUF+QxZsgApo8ZTHFh3qG24oI8hhb0oyg/h3658UCPB3sWOdmabJdpehv0\nS4A5wA8Sj4u7tT9qZj8BRgFlwBvJFimS7tydHc3tbIzsozrSwsZIC9U7WtgY2UfT/s737DusMI8R\ng/IZN3QA508qZuSg/owanM+IgfmMGtyf4QP70S9HcxzkxB036M3sMeIfvA4zs1rgXuIBv8DM7gC2\nAjcDuPtaM1sAvA1EgS+6e9dJql0kJbk7mxpbqNzSxIpte3gnso/qHS3sa/9bz3xQ/1zKhhdyzbQR\nTB5eRNnwQsYPHUDpwHzycxXi0rfMe/EpcV8rLy93rV4p6aojGuOtur1UbtnNm1uaqNq6+1AvfciA\nXKaMKKJseBFlpYVMHl5I2fAihhXmpfy4rqQ+M6ty9/Lj7ZcSyxSLpJPmtk6qtjYdCvZVNXtoj8YA\nmDisgCvPLOW8CcWUTxjCxGEFCnQJnIJe5Dia2zqp3LKbZZt3s2zzLtbU7SXmkJNlTB09iNsuGM95\nE4Ywc3wxJUX9gi5X5O8o6EUOc7Rgz8vOYsa4wXzpijLOn1jMOeMGMyBPbyFJffpbKhnP3Vm+bQ9/\nWrv9qMF+waShzBg3WB+USlpS0EtGcnfWb9/HklX1PLOqntqmAwp2CS0FvWSULTtbWbKqniWr6qmO\ntJCdZVwyeRhfvfJ0rppaSlF+btAlivQ5Bb2E3va9bfxhdTzcV9fuBWDWhGL+9cZpXDttBEML9QGq\nhJuCXkJpf0eU59ZsZ2FVLa9t3oU7TBs9kO9cewbXnT2KUYP7B12iyCmjoJfQcHcqtzaxsLKWP77V\nQEt7lHHFA/jyFWVcf84oTispDLpEkUAo6CXt1e85wO9X1LGwqpZ3d7YyIC+ba983ko/OHMN5E4rJ\nytIXliSzKeglLbV1dvHnt3fwZGUNr1TvxB3On1jMFy4/jWvfN5KCfvqrLXKQ3g2SVur3HGD+q1t4\n7I1tNLdFGT24P1+6oox/Onc044cWBF2eSEpS0EtaWFWzh1+/8i7PvhW/sdk100bw8VnjuHDSUA3N\niByHgl5SVlfMef7tHTzwymbe3NJEUb8c7rhkInMumsBozZoROWEKekk5re1Rnqys4cG/bmHb7v2M\nGdKf7153Fh87byyFGnsX6TG9ayRlNOw9wG9f3cKjr29jX1uUc8cN5p4PncFVZ5Xq9nciSVDQS+Bq\nm/Zz/4ubeLKylmgsxoemjeT2SyYyc/yQoEsTCQUFvQSmZvd+flFRzcKqWszgo+Vj+fz7T2Ns8YCg\nSxMJFQW9nHJbdrbyi4pqnlpRR7YZHz9/HJ97/2lalkDkJFHQyymzubGF+yqqWbyynpws41MXjufu\ny05jxKD8oEsTCTUFvZx01ZF9/PyFap5ZVU9eTha3XzyBOy+bxPAiBbzIqaCgl5OmqbWDH/15A4+9\nsY3+udncedkk7rx0EsO0LLDIKaWglz7XFXOeeLOGH/5pPfvaonz6ogl86Yoyigvygi5NJCMp6KVP\nrazZwz8vXsPq2r3MmljM92+YyhkjBgZdlkhGU9BLn9jd2sEPn1vPE5U1lBT2479uOYfrp4/CTOvQ\niARNQS9J6Yo5j76xjR//aQOt7VE+e8lEvjy7TPdeFUkhCnrptaqtTdy7ZA1r6pq5cNJQvn/DVMpK\ni4IuS0QOo6CXHmtu6+Tfn13HY2/UMGJgPj+/dQbXnT1SwzQiKUpBLz3y8juNfGvRanY0t3HXZZOY\nO7tMd3MSSXF6h8oJ2dfWyb8levGnlRTw1Bcu5pyxg4MuS0ROgIJejuuVjTv51qLVNOw9wN2XTeKr\nHzyd/NzsoMsSkROkoJejammP8u/PruOR17cxaVgBT37uIi0dLJKGFPRyRK9u2sk3F66mbs8B7rx0\nIl+/aop68SJpSkEv79HaHuX/Preeh17byoShA3jy7gspn1AcdFkikgQFvRzydn0zn/tdFTVN+7n9\n4ol84+op9M9TL14k3SnoBYCKDRH+1yPLKcrP5fE7L+D8SUODLklE+oiCXvjdsq3cu2QtZ4wo4oE5\n5+lGICIho6DPYLGY84Pn1jPv5c1cccZwfn7rDH35SSSEspI52My+amZrzWyNmT1mZvlmVmxmz5vZ\nxsSj5uOloLbOLr746HLmvbyZT104nnmfnKmQFwmpXge9mY0GvgyUu/s0IBu4BbgHWOruZcDSxHNJ\nITtb2rll3jKeW7ud//0PZ/K966eSk53Uv/kiksKSfXfnAP3NLAcYANQDNwDzE6/PB25M8ndIH6qO\n7OOmX/6V9dubuf8TM/nspZO0GJlIyPX6/9Xdvc7MfgxsAw4Af3b3P5tZqbs3JHbbDpT2QZ3SB17b\ntIu7H64kLyeLx++6UGvViGSIZIZuhhDvvU8ERgEFZnZb933c3QE/yvF3mVmlmVU2Njb2tgw5QYuq\navnUg68zfGA+v9eCZCIZJZmhmyuBd9290d07gaeAi4AdZjYSIPEYOdLB7j7P3cvdvbykpCSJMuR4\nflFRzdefXMV5E4pZ9PmLGFs8IOiSROQUSibotwEXmNkAiw/yzgbWAUuAOYl95gCLkytRkvGLimp+\n9KcN3DRjNL/9zCwG9dct/kQyTTJj9K+b2UJgORAFVgDzgEJggZndAWwFbu6LQqXn5r286VDI//ij\n08nO0oeuIpkoqYnT7n4vcO9hze3Ee/cSoN/89V3+7dn1XHf2SH70kbMV8iIZTJOnQ+jhZVv53jNv\nc83UEfz0Y+dojrxIhlMChMwTb27ju0+v4cozh/OzW2eQq5AXyXhKgRBZVFXLPU+9xftPL+EXnziX\nvBxdXhFR0IfG4pV1fGPhKi46bSj//cmZ9MvROvIiEqegD4Fn32rgawvi8+R//anzdMs/EXkPBX2a\n+/Pa7Xz5sRXMGDuYBz99nu4IJSJ/R0GfxirWR/jio8uZNnoQv/nMeVpmWESOSEGfppZt3sXdv6ti\nyogi5t8+i6J8feNVRI5MQZ+GNmzfx50PVTKueAAP336+ljUQkWNS0KeZ7Xvb+PRv3qB/bja//cx5\nDCnIC7okEUlxGtRNI81tnXz6N2+wry3KE3dfwJghWoVSRI5PPfo00RGN8bmHq6iOtHD/becyddSg\noEsSkTShHn0aiMWcby5cxaubdvGTm6dzaZnW7xeRE6cefRr44Z828PTKer5x9RQ+fO6YoMsRkTSj\noE9xD722hV+9tIlPnD+OL1x+WtDliEgaUtCnsOfWbOfeJWu58sxSvn/DNOI38hIR6RkFfYqq2rqb\nuY+vYPqYwfz81hm6cYiI9JqCPgVtamzhjvmVjByUzwNzyrV+jYgkRUGfYiL72pjz4BvkZBnzb5/F\n0MJ+QZckImlO0ytTSLQrPld+V0sHj991AeOHFgRdkoiEgII+hfxs6UaWb9vDz26dwfSxg4MuR0RC\nQkM3KeKNd3dzX0U1H5k5huunjwq6HBEJEQV9Cti7v5OvPL6CscUD+JfrpwZdjoiEjIZuAubufOfp\nt4jsa2fh5y+iUDcPEZE+ph59wBZW1fLH1Q187arTOUfj8iJyEijoA/TuzlbuXbKWCyYVc/dlWt5A\nRE4OBX1AOqIx5j6+gtzsLH76sXP0zVcROWk0IByQn/7PO6yu3cuvbjuXkYP6B12OiISYevQBeHXT\nTn710iZunTWWa6aNDLocEQk5Bf0p1tTawdeeWMXEYQV897qzgi5HRDKAgv4Ucne+/dRb7Gpt52e3\nzGBAnkbOROTkU9CfQo+/WcNza7fzjaunMG207vkqIqeGgv4UqY608L1n1nLJ5GF89pJJQZcjIhlE\nQX8KHJxK2T83m/+4eTpZmkopIqeQBolPgSfe3Mba+mZ+ddtMSgfmB12OiGQY9ehPsvZoF798cRPl\n44dw9dTSoMsRkQykoD/JFlTW0rC3ja9cebpu7i0igVDQn0Tt0S7ur6hm5vghXDx5aNDliEiGUtCf\nRAuraqnf28bc2WXqzYtIYJIKejMbbGYLzWy9ma0zswvNrNjMnjezjYnHIX1VbDrpiMb4ZcUmZowb\nzKVlw4IuR0QyWLI9+v8CnnP3M4DpwDrgHmCpu5cBSxPPM86i5bXU7Tmg3ryIBK7XQW9mg4DLgAcA\n3L3D3fcANwDzE7vNB25Mtsh00xGNcd8L1UwfO5j3n14SdDkikuGS6dFPBBqB35jZCjP7tZkVAKXu\n3pDYZzuQcXMKn0r05r9ypXrzIhK8ZII+BzgXuN/dZwCtHDZM4+4O+JEONrO7zKzSzCobGxuTKCO1\ndHbFuK+imuljBnG5evMikgKSCfpaoNbdX088X0g8+HeY2UiAxGPkSAe7+zx3L3f38pKS8ATi75fX\nUdt0gLnqzYtIiuh10Lv7dqDGzKYkmmYDbwNLgDmJtjnA4qQqTCMHe/NnjxnEB6YMD7ocEREg+bVu\nvgQ8YmZ5wGbgM8T/8VhgZncAW4Gbk/wdaePpFXVs272ff76uXL15EUkZSQW9u68Eyo/w0uxk/tx0\nFE305qeNHsjsM9WbF5HUoW/G9pGnV9azddd+5s7WmjYikloU9H0g2hXjvhc2MnXUQK5Ub15EUoyC\nvg8sWVXPll37+bK+BSsiKUhBn6SumHPfC9WcOXIgV52Vcd8NE5E0oKBP0jOr6tm8s5W5syerNy8i\nKUlBn4SumPOzFzZyxogirjprRNDliIgckYI+CX9YXc/mxlbmzi7TDb9FJGUp6HvJ3fnVS5s5vbSQ\nq6eqNy8iqUtB30tVW5tY19DMZy6eqN68iKQ0BX0vPbxsK0X5OdxwzqigSxEROSYFfS/sbGnn2bca\n+MjMMQzIS3a5IBGRk0tB3wtPvFlDZ5dz2wXjgy5FROS4FPQ91BVzHlm2lUsmD+O0ksKgyxEROS4F\nfQ+9sD5C/d429eZFJG0o6Hvoode2MGJgvhYvE5G0oaDvgXd3tvKXjTv5+PnjyMnWfzoRSQ9Kqx54\nZNlWcrKMW2aNDboUEZETpqA/QQc6ulhQWcM100YwvCg/6HJERE6Ygv4EPbOqnua2KJ/Uh7AikmYU\n9CfA3Xlo2RamlBYxa2Jx0OWIiPSIgv4ErKzZw5q6Zm67cLzWnBeRtKOgPwEPL9tKYb8cbpoxOuhS\nRER6TEF/HLtbO/jD6gY+fO5oCvtpXRsRST8K+uNYUFlDRzSmb8KKSNpS0B9DV8x55PWtnD+xmNNL\ni4IuR0SkVxT0x/DyO43U7D7Apy6cEHQpIiK9pqA/hode20JJUT+umloadCkiIr2moD+Kbbv28+I7\njdw6axy5WtdGRNKYEuwoHnljK1lmfHzWuKBLERFJioL+CNo6u1jwZg1XnVXKiEFa10ZE0puC/gj+\nuLqBpv2dWtdGREJBQX8EDy/bymklBVx42tCgSxERSZqC/jBr6vaysmYPt12gdW1EJBwU9IdZWFVL\nXk4WH54xJuhSRET6hIK+m45ojMUr6/jgWaUMGpAbdDkiIn1CQd/NC+sjNO3v5CMz1ZsXkfBQ0Hez\nsKqW4UX9uHTysKBLERHpMwr6hMZ97VRsiHDTuaPJ0TdhRSRElGgJi1fW0RVzPnKuhm1EJFySDnoz\nyzazFWb2h8TzYjN73sw2Jh6HJF/mybdoeR3TxwyiTMsRi0jI9EWPfi6wrtvze4Cl7l4GLE08T2lr\n6/eyrqFZH8KKSCglFfRmNgb4B+DX3ZpvAOYntucDNybzO06FhVW15GVn8Y/TRwVdiohIn0u2R/+f\nwDeBWLe2UndvSGxvB464mLuZ3WVmlWZW2djYmGQZvRefO1/PB88qZfCAvMDqEBE5WXod9GZ2HRBx\n96qj7ePuDvhRXpvn7uXuXl5SUtLbMpJWsSHC7tYODduISGjlJHHsxcD1ZnYtkA8MNLPfATvMbKS7\nN5jZSCDSF4WeLAuraikp6selZZo7LyLh1Osevbt/293HuPsE4BbgBXe/DVgCzEnsNgdYnHSVJ8mu\nlnYq1ke4aYbmzotIeJ2MdPsB8EEz2whcmXiekhavrCcac/5Jc+dFJMSSGbo5xN1fBF5MbO8CZvfF\nn3uyLayq5ewxg5gyQnPnRSS8Mna8Ym39Xt7W3HkRyQAZG/SLqurIzTb+8WzNnReRcMvIoO/siq87\nf+WZpQwp0Nx5EQm3jAz6Fzc0sktz50UkQ2Rk0C+sqmFYYT8uOz24L2qJiJwqGRf0u1raWbouwk0z\nRpGrufMikgEyLumWrErMndewjYhkiIwL+oVVtUwbPZAzRgwMuhQRkVMio4J+XUMza+ubdRcpEcko\nGRX0i6pqyc02rj9ndNCliIicMhkT9J1dMZ5eWcfsM0op1tx5EckgGRP0L21oZGeL5s6LSObJmKD/\n/Yo6hhbk8f4pmjsvIpklI4K+IxrjpXcauWrqCM2dF5GMkxGpV7llNy3tUT6g3ryIZKCMCPqKDRHy\nsrO4eLJuFygimSdDgr6R8ycVU9CvT+6zIiKSVkIf9DW791MdaeHyKcODLkVEJBChD/qKDREAjc+L\nSMYKf9CvjzBh6AAmlRQGXYqISCBCHfRtnV28ummXhm1EJKOFOuhf27SL9miMD5yhoBeRzBXqoK/Y\nEKF/bjbnTywOuhQRkcCENujdnRfWR7h48lDyc7ODLkdEJDChDfpNja3UNh3Q+LyIZLzQBn3F+sS0\nSo3Pi0iGC2/Qb4gwpbSI0YP7B12KiEigQhn0+9o6eXPLbi4/Q1+SEhEJZdD/tXoXnV3OBzQ+LyIS\nzqCvWB+hKD+HmeOHBF2KiEjgQhf07k7FhgiXlZXoJiMiIoQw6N9uaCayr53LtYiZiAgQwqA/OK1S\n94YVEYkLX9BvaOR9owcxvCg/6FJERFJCqIK+qbWDFdua9CUpEZFuQhX0L29sJOa6yYiISHehCvqK\n9RGKC/I4e8zgoEsREUkZoQn6rpjz0juNXH56CdlZFnQ5IiIpo9dBb2ZjzazCzN42s7VmNjfRXmxm\nz5vZxsTjKfnW0qraPTTt7+Ryjc+LiLxHMj36KPB1dz8LuAD4opmdBdwDLHX3MmBp4vlJ9+L6CFkG\nl5UNOxW/TkQkbfQ66N29wd2XJ7b3AeuA0cANwPzEbvOBG5Mt8kS8sCHCueOGMHhA3qn4dSIiaaNP\nxujNbAIwA3gdKHX3hsRL24HSvvgdxxJpbmNNXbOmVYqIHEHSQW9mhcAi4Cvu3tz9NXd3wI9y3F1m\nVmlmlY2NjUnV8OI78eO1WqWIyN9LKujNLJd4yD/i7k8lmneY2cjE6yOByJGOdfd57l7u7uUlJcnN\ne69YH6F0YD/OHFmU1J8jIhJGycy6MeABYJ27/6TbS0uAOYntOcDi3pd3fJ1dMf6ycScfmDKceEki\nItJdThLHXgx8EnjLzFYm2r4D/ABYYGZ3AFuBm5Mr8dgqtzTR0h7V+LyIyFH0Oujd/RXgaF3o2b39\nc3vqxQ0RcrONiydrWqWIyJGk/TdjX1gfYdbEYgr7JfM/JyIi4ZXWQV+zez8bIy2abSMicgxpHfTt\n0S6unlrKFRqfFxE5qrQe75g8vIj//mR50GWIiKS0tO7Ri4jI8SnoRURCTkEvIhJyCnoRkZBT0IuI\nhJyCXkQk5BT0IiIhp6AXEQk5i98bJOAizBqJr3TZW8OAnX1UTirQ+aS+sJ1T2M4HwndORzqf8e5+\n3Bt6pETQJ8vMKt09NF+R1fmkvrCdU9jOB8J3Tsmcj4ZuRERCTkEvIhJyYQn6eUEX0Md0PqkvbOcU\ntvOB8J1Tr88nFGP0IiJydGHp0YuIyFGkddCb2TVmtsHMqs3snqDr6QtmtsXM3jKzlWZWGXQ9PWVm\nD5pZxMzWdGsrNrPnzWxj4nFIkDX21FHO6V/MrC5xnVaa2bVB1tgTZjbWzCrM7G0zW2tmcxPtaXmd\njnE+6XyN8s3sDTNblTin7yXae3WN0nboxsyygXeADwK1wJvAre7+dqCFJcnMtgDl7p6W83/N7DKg\nBXjI3acl2n4I7Hb3HyT+QR7i7t8Kss6eOMo5/QvQ4u4/DrK23jCzkcBId19uZkVAFXAj8GnS8Dod\n43xuJn2vkQEF7t5iZrnAK8Bc4MP04hqlc49+FlDt7pvdvQN4HLgh4Joynru/DOw+rPkGYH5iez7x\nN2HaOMo5pS13b3D35YntfcA6YDRpep2OcT5py+NaEk9zEz9OL69ROgf9aKCm2/Na0vziJjjwP2ZW\nZWZ3BV1MHyl194bE9nagNMhi+tCXzGx1YmgnLYY5DmdmE4AZwOuE4Doddj6QxtfIzLLNbCUQAZ53\n915fo3QO+rC6xN3PAT4EfDExbBAaHh8rTM/xwve6H5gEnAM0AP8RbDk9Z2aFwCLgK+7e3P21dLxO\nRziftL5G7t6VyIIxwCwzm3bY6yd8jdI56OuAsd2ej0m0pTV3r0s8RoDfEx+iSnc7EuOoB8dTIwHX\nkzR335F4I8aA/0eaXafEuO8i4BF3fyrRnLbX6Ujnk+7X6CB33wNUANfQy2uUzkH/JlBmZhPNLA+4\nBVgScE1JMbOCxIdJmFkBcBWw5thHpYUlwJzE9hxgcYC19ImDb7aEm0ij65T4oO8BYJ27/6TbS2l5\nnY52Pml+jUrMbHBiuz/xSSfr6eU1SttZNwCJ6VL/CWQDD7r7/wm4pKSY2STivXiAHODRdDsnM3sM\nuJz4Sns7gHuBp4EFwDjiq5Te7O5p8+HmUc7pcuJDAg5sAe7uNnaa0szsEuAvwFtALNH8HeLj2ml3\nnY5xPreSvtfobOIftmYT75AvcPfvm9lQenGN0jroRUTk+NJ56EZERE6Agl5EJOQU9CIiIaegFxEJ\nOQW9iEjIKehFREJOQS8iEnIKehGRkPv/srLhLlDFnK8AAAAASUVORK5CYII=\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "# principal component analysis\n", "# scree chart to see how much variation is explained by how many predictors\n", "\n", "# we can predict using PCA components for dimensionality reduction when we have too many/collinear columns\n", "# can speed things up or sometimes get a better result\n", "# but won't do that here\n", "# merely exploratory to understand the data, see that it's scaled\n", "\n", "pca = PCA(n_components=num_features)\n", "pca.fit(X_train)\n", "\n", "#The amount of variance that each PC explains\n", "var= pca.explained_variance_ratio_\n", "\n", "#Cumulative Variance explains\n", "var1=np.cumsum(np.round(pca.explained_variance_ratio_, decimals=4)*100)\n", "\n", "#print(var1)\n", "%matplotlib inline\n", "plt.plot(var1)\n", "print(var1)\n", "# looks like ~10 orthogonal PCA components explain > 80% of the variation\n", "\n", "num_pca_components=15\n", "pca = PCA(n_components=num_pca_components)\n", "pca.fit(X_train)\n", "\n", "pca_train=pca.transform(preprocessing.normalize(X_train))\n", "pca_bigtrain=pca.transform(preprocessing.normalize(X_bigtrain))\n", "pca_xval=pca.transform(preprocessing.normalize(X_xval))\n", "pca_test=pca.transform(preprocessing.normalize(X_test))" ] }, { "cell_type": "code", "execution_count": 7, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# function to instantiate Keras feed-forward neural network model\n", "\n", "def declare_model(num_components=num_features, \n", " hidden_layer_size=30, \n", " dropout=(1.0/3.0), \n", " reg_penalty=0.0001, \n", " activation='relu'):\n", " # create model\n", " model = Sequential()\n", " # 1 hidden layer of specified size hidden_layer_size, specified L1 regularization, specified activation\n", " model.add(Dense(hidden_layer_size, \n", " input_dim=num_components, \n", " kernel_initializer='TruncatedNormal', \n", " kernel_regularizer=regularizers.l1(reg_penalty),\n", " activation=activation\n", " ))\n", " # 1 dropout layer\n", " model.add(Dropout(dropout))\n", " # send outputs to sigmoid layer for binary classification\n", " model.add(Dense(1, \n", " activation='sigmoid',\n", " kernel_initializer='TruncatedNormal', \n", " kernel_regularizer=regularizers.l1(reg_penalty)\n", " ))\n", " return model\n", "\n", "def create_model(num_components=num_features, \n", " hidden_layer_size=30, \n", " dropout=(1.0/3.0), \n", " reg_penalty=0.0001, \n", " activation='relu'):\n", " \n", " model = declare_model(num_components=num_components, \n", " hidden_layer_size=hidden_layer_size, \n", " dropout=dropout, \n", " reg_penalty=reg_penalty, \n", " activation=activation)\n", " \n", " model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n", " return model\n" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "21:07:34 Training...\n", "21:07:36 Train Accuracy 0.952, Train F1 0.955\n", "21:07:36 Confusion matrix (train):\n", "[[ 95 2]\n", " [ 8 105]]\n", "21:07:36 Xval Accuracy 0.577, Xval F1 0.571\n", "21:07:36 Confusion matrix (xval):\n", "[[21 15]\n", " [15 20]]\n", "21:07:36 Test Accuracy 0.577, Test F1 0.545\n", "21:07:36 Confusion matrix (test):\n", "[[23 16]\n", " [14 18]]\n" ] } ], "source": [ "# run this a couple of different ways\n", "# 1st, run a simple model\n", "\n", "ac='sigmoid' # sigmoid activation\n", "hl = 8 # 8 hidden units\n", "dr = 0 # no dropout\n", "rp = 0 # no regularization\n", "\n", "model = create_model(num_components=num_features, \n", " hidden_layer_size=hl, \n", " dropout=dr, \n", " reg_penalty=rp, \n", " activation=ac)\n", "\n", "# recompile with SGD which doesn't work as quickly, shows more readable progress plots\n", "model.compile(loss='binary_crossentropy', \n", " optimizer=optimizers.SGD(lr=1, clipnorm=1.),\n", " metrics=['accuracy'])\n", "\n", "# fit model on training data\n", "print('%s Training...' % time.strftime(\"%H:%M:%S\"))\n", "\n", "fit = model.fit(X_train, y_train, validation_data=(X_xval, y_xval), \n", " epochs=1000, \n", " batch_size=X_train.shape[0], # small data, full batch, no sgd \n", " verbose=False)\n", "# See how it does on training data\n", "# predict probabilities\n", "y_train_prob = model.predict(X_train)\n", " \n", "# select threshold that maximizes accuracy\n", "def selectThresholdAcc (logits, labels, beta=1):\n", " # return threshold that yields best accuracy\n", " # predict using true if >= threshold\n", "\n", " precision, recall, thresholds = sklearn.metrics.precision_recall_curve(labels, logits)\n", " accuracies = [sklearn.metrics.accuracy_score(logits >= thresh, labels) for thresh in thresholds]\n", " \n", " best_index = np.argmax(accuracies)\n", " best_threshold = thresholds[best_index]\n", " best_score = accuracies[best_index]\n", " return (best_threshold, best_score)\n", "\n", "thresh, score = selectThresholdAcc(y_train_prob, y_train)\n", "\n", "# predict true if predicted prob > threshold\n", "y_train_pred = y_train_prob >= thresh\n", " \n", "# show training accuracy and F1\n", "# (see https://en.wikipedia.org/wiki/F1_score , https://en.wikipedia.org/wiki/Receiver_operating_characteristic )\n", "print(\"%s Train Accuracy %.3f, Train F1 %.3f\" % \n", " (time.strftime(\"%H:%M:%S\"),\n", " sklearn.metrics.accuracy_score(y_train_pred, y_train), \n", " sklearn.metrics.f1_score(y_train_pred, y_train)))\n", " \n", "# show training set confusion matrix \n", "# True negative False negative\n", "# False postiive True positive\n", "\n", "print \"%s Confusion matrix (train):\" % time.strftime(\"%H:%M:%S\")\n", "print(sklearn.metrics.confusion_matrix(y_train_pred, y_train))\n", "\n", "# same in cross-validation set\n", "y_xval_prob = model.predict(X_xval)\n", "thresh, score = selectThresholdAcc(y_xval_prob, y_xval)\n", "y_xval_pred = y_xval_prob >= thresh\n", "\n", "print(\"%s Xval Accuracy %.3f, Xval F1 %.3f\" % \n", " (time.strftime(\"%H:%M:%S\"),\n", " sklearn.metrics.accuracy_score(y_xval_pred, y_xval), \n", " sklearn.metrics.f1_score(y_xval_pred, y_xval)))\n", "\n", "print \"%s Confusion matrix (xval):\" % time.strftime(\"%H:%M:%S\")\n", "print(sklearn.metrics.confusion_matrix(y_xval_pred, y_xval))\n", "\n", "# same in test\n", "# note we use threshold selected using xval\n", "y_test_prob = model.predict(X_test)\n", "y_test_pred = y_test_prob >= thresh\n", "print(\"%s Test Accuracy %.3f, Test F1 %.3f\" % \n", " (time.strftime(\"%H:%M:%S\"),\n", " sklearn.metrics.accuracy_score(y_test_pred, y_test), \n", " sklearn.metrics.f1_score(y_test_pred, y_test)))\n", "\n", "print \"%s Confusion matrix (test):\" % time.strftime(\"%H:%M:%S\")\n", "print(sklearn.metrics.confusion_matrix(y_test_pred, y_test))\n" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZIAAAEKCAYAAAA4t9PUAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xl8FeXZ//HPRVjDEiCsshhQQBBZA6jVqnUpbo/VWhUX\nQBGkrVZrtdra9vH5Pa1P1Wrt4oaALFVcqlZsLe47CkkwYFiEsK8hIewBsl2/P85gj5iEQHIyJ8n3\n/XqdF3PumTm5zuRwvpmZe+4xd0dERORoNQi7ABERqd0UJCIiUiUKEhERqRIFiYiIVImCREREqkRB\nIiIiVaIgERGRKlGQiIhIlShIRESkShqGXUBNaNeunaekpIRdhohIrZKRkZHn7u0Pt1y9CJKUlBTS\n09PDLkNEpFYxs7WVWU6HtkREpEoUJCIiUiUKEhERqRIFiYiIVImCREREqkRBIiIiVaIgERGRKlGQ\niIjUQQWFxdw7ezE79xXF/GcpSERE6pjd+4sYPWU+Mz5dQ8ba/Jj/vHpxZbuISH2xs6CI0U/PZ/HG\nnfxl1BC+c0LHmP9MBYmISB2Rv7eQayfPI3vrHh6/dijn9ot9iICCRESkTti+t5BrJs9jVe4eJo0e\nypl9OtTYz1aQiIjUcgdDZGXuHiaPTuXbvQ87YG+10sl2EZFabEdBIddOmUd27h6eCiFEQEEiIlJr\n7SiI7Ims2LqHSdcN5YwQQgQUJCIitdLOgiKunTKPFTl7ePK6mj0ncigFiYhILbNzXyRElm+JhMhZ\nIYYIKEhERGqVvQeKuf7p+SzbsosnrhvCWSeEGyKgXlsiIrXG/qISxs9IZ+GGnTx6dc1cbFgZ2iMR\nEakFikpKufnZBcxduY0HLx/AyP6dwi7pKwoSEZE4V1Lq/OyFhby9dCv/e8mJXDaka9glfY2CREQk\njrk7v/rHF8xeuIm7Rp7AdaekhF3SNyhIRETilLvzu38tZdb89dx81vH88Mzjwi6pTAoSEZE49Zd3\ns5n88WrGnprCz87rHXY55VKQiIjEoWfnrePht5Zz2eAu/OaifphZ2CWVS0EiIhJn5mRt4Vf/+IIz\n+7Tn/ssH0KBB/IYIKEhEROLKvFXb+MlznzOga2seu2YIjRLi/2s6phWa2Ugz+9LMss3s7jLm32lm\nmcEjy8xKzKxtMG+qmW01s6xD1rnXzDZGrXdBLN+DiEhNWbZlFzfOSKdbm2Y8PXYYiY1rxzXjMQsS\nM0sAHgXOB/oBo8ysX/Qy7v6guw9y90HAL4AP3P3gDYanASPLefk/HlzP3V+PzTsQEak56/MLGD1l\nPs0bN2TGuBG0ad447JIqLZZ7JMOBbHdf5e6FwHPAJRUsPwqYdfCJu38IxP6u9SIiIcvfW8iYqfPZ\nX1TC9BuG06V1s7BLOiKxDJIuwPqo5xuCtm8ws0Qiex8vVfK1bzGzRcHhrzZVK1NEJDz7i0oYNz2N\njTv2MWXsMPp0ahl2SUcsXs7iXAx8EnVYqyKPAz2BQcBm4KGyFjKzCWaWbmbpubm51VepiEg1KSl1\nbnsuk8z1O/jTVYMZltI27JKOSiyDZCPQLep516CtLFcRdVirIu6e4+4l7l4KPEXkEFpZy01y91R3\nT23fPpy7homIVOT/Xl/KnMVb+NWF/eJqEMYjFcsgSQN6mVkPM2tMJCxmH7qQmSUBZwCvVuZFzaxz\n1NNLgazylhURiVfT56756qr1caf1CLucKolZkLh7MXAz8AawFHjB3Reb2UQzmxi16KXAm+6+N3p9\nM5sFfAr0MbMNZjYumPWAmX1hZouAs4Cfxuo9iIjEwltLcvif1xZzbr+O/PqifodfIc6Zu4ddQ8yl\npqZ6enp62GWIiLBoww6ufPIzendswawJJ8f1tSJmluHuqYdbLl5OtouI1Hnr8wu4YVo6yS0aM3lM\n7bng8HDqxrsQEYlzO/cVcf20NAqLS3huwgjat2wSdknVRkEiIhJjhcWlTJyZwdpte5lxwwiO71D7\nrhWpiIJERCSG3J17XvmCT1dt449XDuSU45LDLqna6RyJiEgMTf5oNS9mbOAnZ/fi0sHxda/16qIg\nERGJkbeX5HDfv5dy4Umdue3sXmGXEzMKEhGRGFi6eRe3Pvc5J3VJ4g8/GBj3N6eqCgWJiEg1y919\ngBunp9OiaUOeGp1Ks8YJYZcUUzrZLiJSjfYXlXDTzHS27T3AizedSsdWTcMuKeYUJCIi1cTd+cXL\nX7Bg3Q4eu2YIJ3VNCrukGqFDWyIi1eSx91fyyucb+dm5vbngpM6HX6GOUJCIiFSDOVmbefCNL7lk\n0DHc/J3jwy6nRilIRESqKGvjTn76/EIGdWvN/d8fgFnd7aFVFgWJiEgV5Ozaz43T02mT2IhJo4fS\ntFHd7qFVFp1sFxE5SvsKSxg/I51d+4v4+8RT6dCy7vfQKouCRETkKLg7d/59IV9s3Mmk61Lpd0yr\nsEsKjQ5tiYgchb+8m80/F23m5989gXP7dQy7nFApSEREjtCcrM08/NZyLhvchYln9Ay7nNApSERE\njsCSTbu+6qF132Un1bseWmVRkIiIVFLengOMn5FOUrNGTLqufvbQKotOtouIVEJhcSk//FsGeXsO\n8OLEU+hQD8bQqiwFiYjIYbg7v/5HFmlrtvOXUYMZ0LV12CXFFR3aEhE5jKc/WcPz6eu5+azjuXjg\nMWGXE3cUJCIiFfhweS6//dcSzuvXkdvP7R12OXFJQSIiUo5VuXu4+dkF9O7Ykj9eOahO3+WwKmIa\nJGY20sy+NLNsM7u7jPl3mllm8MgysxIzaxvMm2pmW80s65B12prZW2a2Ivi3TSzfg4jUTzv3FXHj\njHQaJjTgqdGpNG+iU8rliVmQmFkC8ChwPtAPGGVm/aKXcfcH3X2Quw8CfgF84O75wexpwMgyXvpu\n4B137wW8EzwXEak2JaXOT2Z9zrptBTx+zRC6tU0Mu6S4Fss9kuFAtruvcvdC4DngkgqWHwXMOvjE\n3T8E8stY7hJgejA9Hfhe9ZQrIhLxf68v5YPlufzv9/ozomdy2OXEvVgGSRdgfdTzDUHbN5hZIpG9\nj5cq8bod3X1zML0FKHOQGzObYGbpZpaem5tb+apFpF57MX09kz9ezdhTUxg1vHvY5dQK8XKy/WLg\nk6jDWpXi7g54OfMmuXuqu6e2b9++OmoUkTouY20+97ySxbeOT+ZXF/YNu5xaI5ZBshHoFvW8a9BW\nlquIOqx1GDlm1hkg+HfrUVcoIhLYuGMfN83M4JjWTXn06iE0TIiXv7PjXyy3VBrQy8x6mFljImEx\n+9CFzCwJOAN4tZKvOxsYE0yPOYL1RETKVFBYzPjp6RwoKmXymFRaJzYOu6RaJWZB4u7FwM3AG8BS\n4AV3X2xmE81sYtSilwJvuvve6PXNbBbwKdDHzDaY2bhg1u+Bc81sBXBO8FxE5KiUljp3vLiQpVt2\n8edRgzm+Q8uwS6p1LHKaoW5LTU319PT0sMsQkTj0yNvLeeTtFfzyghOY8O3jwi4nrphZhrunHm45\nHQQUkXrr319s5pG3V3DZkC6MP103qDpaChIRqZcWb9rJ7S8sZHD31tx3qW5QVRUKEhGpd3J3H2D8\n9HRaJzbiSd2gqso0eIyI1CsHikuY+LcM8gsK+fvEU+nQUjeoqioFiYjUG+7OL1/OImPtdv569WD6\nd0kKu6Q6QYe2RKTeeOKDVby0YAO3ndOLiwboBlXVRUEiIvXCnKzN3D9nGRcPPIZbz+4Vdjl1ioJE\nROq8rI07+enzCxnUrTUPXj5APbSqmYJEROq0LTv3M256Gm2bN2bSaPXQigUFiYjUWQWFxdw4I409\n+4uZPCZVPbRiRL22RKROKi11bn9+IYs37WLy6FT6dm4Vdkl1lvZIRKRO+sObXzJn8RbuuaAvZ/ct\n8/53Uk0UJCJS57yUsYHH3l/JqOHdGHdaj7DLqfMUJCJSp8xfnc/dLy/i1OOS+X+X9FcPrRqgIBGR\nOmPdtgJumplOtzaJPH7NUBrpLoc14rBb2cxuNrNWwfSTZjbfzM6OfWkiIpW3a38RN0xPo9Rhythh\nJCU2CrukeqMycT3B3XeZ2XlAR2A88EBsyxIRqbziklJ+/MwC1uTt5fFrh9CjXfOwS6pXKhMkB2+h\neAEw090XVnI9EZGYc3d+/WoWH63I47ff68+px7ULu6R6pzKBsNDMXgcuAv5tZi34T7iIiITqiQ9W\nMWv+en505nFcNbx72OXUS5W5IPF6YCiQ7e4FZpYMjIttWSIihzd74aavBmK847w+YZdTb1Vmj2QY\nkOXu+WY2CrgLyIttWSIiFUtbk88dLyxkeEpb/vCDATRooG6+YalMkEwC9pnZACIhshGYGdOqREQq\nsCp3D+NnpNO1TTOevG4oTRpqIMYwVSZIit3dgUuAv7r7nwANWiMiodi25wDXT0ujgRlPXz+MNs0b\nh11SvVeZcyR7zexO4DrgDDNrAKiDtojUuP1FJYyfkc6WnfuZNeFkjk1WN994UJk9kisBA25y981A\nV+Dhyry4mY00sy/NLNvM7i5j/p1mlhk8ssysxMzaVrSumd1rZhuj1rugUu9URGq10lLnp89n8vn6\nHTxy5SCGdG8TdkkSOGyQuPsmYCrQxMxGAgXu/vTh1jOzBOBR4HygHzDKzPod8toPuvsgdx8E/AL4\nIDipf7h1/3hwPXd/vXJvVURqs9/PWca/syKj+Z5/Uuewy5EolRki5fvAAiKHtkYD6WZ2aSVeeziR\nLsOr3L0QeI7IeZbyjAJmHeW6IlKHzfx0DZM+XMXoU47VaL5xqDKHtn4DDHP3a9z9amAEcG8l1usC\nrI96viFo+wYzSwRGAi9Vct1bzGyRmU01M+3fitRhby/J4b9nL+bsEzrwm4v6aTTfOFSZIGng7jlR\nz7dWcr0jcTHwibvnV2LZx4GewCBgM/BQWQuZ2QQzSzez9Nzc3OqrVERqTMba7dw8awH9uyTx51GD\naajRfONSZX4rb5rZv8zsWjO7FngNeKMS620EukU97xq0leUq/nNYq8J13T3H3UvcvRR4ishhsG9w\n90nunuruqe3bt69EuSIST7K37mHc9DQ6tWrK1LHDaN5EdwaPV5UJkjuAGUS+sIcD04E7K7FeGtDL\nzHqYWWMiYTH70IXMLAk4A3i1MuuaWfRZtkuBrErUIiK1SM6u/YyZOp+GDYzpNwynXYsmYZckFThs\nxAcXIz4fPCrN3YvN7GYiey8JwFR3X2xmE4P5TwSLXgq86e57D7duMPsBMxtEZODINcBNR1KXiMS3\nXfuLGPt0GjsKCnluwim6VqQWsEhOlDHDbDtlj/JrRPKlbSwLq06pqamenp4edhkichgHiku4/uk0\n5q/OZ+rYYXy7tw5Lh8nMMtw99XDLVbRHokH9RaTGlJY6d7y4iLkrt/HwFQMVIrVIuUHi7iU1WYiI\n1G/3vb6U1xZu4q6RJ3DZkK5hlyNHQH3pRCR0kz9axeSPVzP21BQmntEz7HLkCClIRCRUr2Zu5Lf/\nWsoFJ3Xi17rgsFZSkIhIaD7JzuOOFxcyokdbHr5iEAm6OVWtVO45krrUa0tE4s/iTTu5aWYGPdu1\nYNLoVJo20s2paiv12hKRGrduWwFjn06jZdOGTLthGEnNdIuj2qzSvbaC+4Q0jWraFKuiRKTu2rp7\nP9dOmUdRSSnP3ngKnZOahV2SVFFlhpG/0MyWExmBd17w77uxLkxE6p6d+4oYPWU+eXsO8PTYYfTq\n2DLskqQaVOZk+++AbwFfuns34LvARzGtSkTqnH2FJdw4PY2VuXt48rqhDNYdDuuMygRJsbvnAg3M\nzNz9LcoZcVdEpCxFJaX8+NkFpK/dziNXDub0XrpqvS6pzLjMO82sBfAxMMPMtgL7YluWiNQVpaXO\nz/++iHeXbeV3l/bnwgG6TW5dU5k9ku8RCY7bgPeJ3BfkohjWJCJ1hLvz//65hFc+38id3+3DNSOO\nDbskiYHKBMkvghtJFbn7FHd/GLg91oWJSO3313ezmTZ3DeNO68GPzjwu7HIkRioTJCPLaLuwugsR\nkbpl5mdreeit5Vw2pAv3XNBXQ5/UYRVd2X4TMBHobWYLoma1BDJiXZiI1F6vLdzEb17N4py+Hbj/\n+wNooKFP6rSKTra/ALwD/B9wd1T7bnffGtOqRKTW+mB5Lre/kMmwY9vy16uH0ChBQ/rVdRVd2b4d\n2A78wMxOBE4PZn0EKEhE5BsWrNvOxJkZHN+hJZPHavys+qIyV7b/GHgR6B48XjCzH8W6MBGpXZbn\n7Ob6p9Po2KoJM24YTqumGj+rvqjMdSQ3AcPdfQ+Amd0HzAUei2VhIlJ7rM8v4Lop82jSsAEzx42g\nfcsmYZckNagyBy8NKIx6XhS0iYiQu/sA102Zx77CEmaOG0G3tolhlyQ1rKJeWw3dvRiYCcwzs5eC\nWZcC02uiOBGJbzv3FTFm6nxydh3gbzeOoE8nDcJYH1V0aGs+MMTdHzCz94HTgvaJ7p4W88pEJK4V\nFBYzbloaK7buZvKYYQw9VoMw1lcVBclXh6/cfT6RYBER4UBxCTfNzGDBuu08evUQzuitQRjrs4qC\npL2ZlTsUSjBUiojUM8Ulpdw6K5OPVuTxwOUDOP8kDcJY31V0sj0BaEHkSvayHodlZiPN7Eszyzaz\nu8uYf6eZZQaPLDMrCe7EWO66ZtbWzN4ysxXBv9qfFqkhpaXOXS99wZzFW/jNRf24IrVb2CVJHDB3\nL3uG2QJ3H3LUL2yWACwHziVyV8U0YJS7Lyln+YuBn7r7dypa18weAPLd/fdBwLRx97sqqiU1NdXT\n09OP9q2ICJGRfP/ntSVMm7uGn57Tm1vP6RV2SRJjZpbh7qmHW66iPZKqdvEdDmS7+yp3LwSeAy6p\nYPlRwKxKrHsJ/+k1Np3IMPciEmN/fGs50+au4cbTevCTs48PuxyJIxUFydlVfO0uwPqo5xuCtm8w\ns0Qiowwf7GJc0bod3X1zML0F6FjFOkXkMJ76cBV/fjebK1O7cc+FGslXvq7cIHH3/Bqs42LgkyP9\nmR45LlfmsTkzm2Bm6WaWnpubWx01itRLs+av43evL+XCAZ2577KTFCLyDbEclnMjEH0mrmvQVpar\n+M9hrcOtm2NmnQGCf8scQNLdJ7l7qruntm+vrokiR+O1hZv45StfcGaf9vzxikEkaDh4KUMsgyQN\n6GVmPcysMZGwmH3oQmaWBJwBvFrJdWcDY4LpMYesJyLV5N1lOfz0+UyGpbTl8WuG0rihhoOXslVm\n0Maj4u7FZnYz8AaRrsRT3X2xmU0M5j8RLHop8Ka77z3cusHs3xMZgXgcsBa4IlbvQaS++mzVNn74\ntwX07dyKKWNSadZYw8FL+crt/luXqPuvSOUt2rCDq5+aR+ekpjx/0ym0bd447JIkJNXR/VdE6pnl\nObsZPXU+bZo3Yua4EQoRqRQFiYgAsG5bAddOnkfjhAY8M+5kOiU1DbskqSUUJCLClp37uWbKZxSV\nlPK3G0fQPVn3FJHKi9nJdhGpHfL3FnLtlHls31vEs+NH0Luj7ikiR0Z7JCL12O79kRtTrc8vYPKY\nVAZ0bR12SVILKUhE6qnIjanSWbp5F09cO5STeyaHXZLUUgoSkXpof1EJE2ZkkL42n0euGsRZJ3QI\nuySpxXSORKSeKSop5eZnF/Bxdh5/+MFALhpwTNglSS2nPRKReqSk1Lnt+UzeXrqV336vP5cP7Rp2\nSVIHKEhE6onSUufnf1/EvxZt5lcX9uXak48NuySpIxQkIvWAu/PrV7N4acEGbj+3Nzee3jPskqQO\nUZCI1HHuzn2vL+WZeeuYeMZx3PId3d1QqpeCRKSO++PbK3jqo9WMPTWFu0b20Y2ppNopSETqsMff\nX8mf31nBland+M1F/RQiEhMKEpE6atonq7l/zjIuGXQM9112Eg10d0OJEQWJSB30Qtp67n1tCd89\nsSN/+MFA3SJXYkpBIlLHvJq5kbteXsQZvdvz51GDaZSg/+YSW/qEidQhc7K2cPsLCxnRoy1PXjeU\nJg11i1yJPQWJSB3x9pIcbpm1gAFdk5g8ZhhNGylEpGYoSETqgPeWbeVHzyygX+dWTL9hOC2aaBg9\nqTkKEpFa7oPludz0twz6dGrJjHEjaNW0UdglST2jIBGpxT5ekcf4Gekc374FM8cNJ6mZQkRqnoJE\npJaam53HuOlp9GzXnGduHEHrxMZhlyT1lIJEpBb6bNU2bpieRkpyJETaNFeISHgUJCK1zPzV+dww\nLY1ubRJ5ZvwIkls0CbskqediGiRmNtLMvjSzbDO7u5xlzjSzTDNbbGYfRLXfamZZQfttUe33mtnG\nYJ1MM7sglu9BJJ5krM3n+qfn0ympKc+MH0E7hYjEgZj1ETSzBOBR4FxgA5BmZrPdfUnUMq2Bx4CR\n7r7OzDoE7f2B8cBwoBCYY2b/dPfsYNU/uvsfYlW7SDxasG47Y6am0aFVU2aNP5kOLZuGXZIIENs9\nkuFAtruvcvdC4DngkkOWuRp42d3XAbj71qC9LzDP3QvcvRj4ALgshrWKxLWMtdsZM2U+yS0aM2v8\nyXRspRCR+BHLIOkCrI96viFoi9YbaGNm75tZhpmNDtqzgNPNLNnMEoELgG5R691iZovMbKqZtSnr\nh5vZBDNLN7P03Nzc6nlHIiGYt2obo6fMo13LJswafzKdkhQiEl/CPtneEBgKXAh8F/i1mfV296XA\n/cCbwBwgEygJ1nkc6AkMAjYDD5X1wu4+yd1T3T21ffv2sX0XIjEyNzuPsU+n0SmpKc9POJljWjcL\nuySRb4hlkGzk63sRXYO2aBuAN9x9r7vnAR8CAwHcfYq7D3X3bwPbgeVBe467l7h7KfAUkUNoInXO\nh8tzuX5aGt3bJvLchFPooMNZEqdiGSRpQC8z62FmjYGrgNmHLPMqcJqZNQwOYY0AlgJEnXjvTuT8\nyLPB885R619K5DCYSJ3y7rIcbpyeznHtWzBrwsm0b6neWRK/YtZry92Lzexm4A0gAZjq7ovNbGIw\n/wl3X2pmc4BFQCkw2d0PBsNLZpYMFAE/dvcdQfsDZjYIcGANcFOs3oNIGN5YvIWbn11A386tmHHD\ncF2xLnHP3D3sGmIuNTXV09PTwy5D5LD+tWgztz73OSd1TWL6DcM1AKOEyswy3D31cMuFfbJdRAL/\n+Hwjt8xawODurZmpUXylFlGQiMSBGZ+u4bbnMxnRI1n3E5FaR59WkRC5O399N5uH3lrOuf068pdR\ng3VnQ6l1FCQV2HOgGAOa669DiQF353f/Wsrkj1dz2ZAuPPD9ATRM0EECqX30qa3AQ29+yXceep9X\nMzdSUlr3OyVIzSkuKeXnf1/E5I9XM/bUFP5w+UCFiNRa+lO7AhcN6EzamnxufS6TX/0ji4FdW3Ns\nciIpyc3pnpzIscmJHNu2Oc0a61CEVN6B4hJunZXJnMVbuPXsXtx2Ti/MLOyyRI6auv8eRkmpMydr\nC5+szCNr407Wbitg576iry3ToWUTerRrTs/2zenRrjkpyZHpbm0TadJQISP/sWt/ERNnZjB35TZ+\nc1E/bjitR9gliZSrst1/tUdyGAkNjAsHdObCAf+5oH5HQSFrtxWwNr+AtXl7WbOtgDXb9vLG4hzy\n9xZ+tVwDg65tEklp15ye7ZrTp1NLTujUkj6dWpLYWJu+vtm8cx/XP51G9tY9PHzFQC4b0jXskkSq\nhb7NjkLrxMa0TmzMwG6tvzFvZ0ERq7ftZXXeHlbnFbA6LzKdsSafvYWRcSfN4Ni2iZzQqRUndG7J\nSV2SGNStte50V4d9uWU3Y5+ez+79xTx9/TBO76WBRKXuUJBUs6TERgxKbM2gQ0KmtNTZuGMfSzfv\nYtmW3Szbsotlm3fzxpItHDy62K1tMwZ1a8OgbpH1+3dppUNjdcDclXncNDODZo0SeP6mkznxmKSw\nSxKpVgqSGtKggdGtbSLd2iZy3omdvmovKCzmiw07yVy/g8z1O0hfk89rCzcB0LRRA4altOXU49px\n6nHJ9O+SREIDnZStTV7N3MgdLy4kJbk5024YThcNAy91kIIkZImNGzKiZzIjeiZ/1Zazaz+fr9vB\nZ6u28enKbdw/ZxkALZs25OSeyXy7d3vO6duBzkn6UopX7s4TH6zi/jnLGN6jLU9dl0pSooY8kbpJ\nvbZqgdzdB/h01TY+XZnHx9l5rM/fB0D/Lq04+4SOnNuvIyce00pdSOPEgeIS7nkli79nbOCiAZ15\n6IqBOkQptVJle20pSGoZd2dl7l7eXprD20tyyFi3HXdISU7k4oHHcPHAY+jdsWXYZdZbeXsOMHFm\nBulrt3Pr2b249exeNNDhSKmlFCRR6lKQHGrbngO8tSSHfy7azNyVeZQ6nNCpJZcM6sL3h3TRXfVq\n0LItuxg3LZ28PQd46IqBXDTgmLBLEqkSBUmUuhwk0XJ3H+D1LzYze+EmMtZuJ6GBcVafDlw1rBtn\n9mmvIThi6I3FW7j9+UyaN2nIU6NTy+waLlLbKEii1JcgibYqdw8vpG/gpQUbyN19gA4tmzBqeHeu\nObk7HVpqL6W6FJeU8tBby3n8/ZUM7JrEk9el0ilJ21fqBgVJlPoYJAcVlZTy3rKtzJq/jve+zKVx\nQgMuGtiZG77Vg/5ddD1DVWzbc4CfPPc5n2RvY9Tw7vz3xf00BLzUKRoiRQBolNCA807sxHkndmJV\n7h6mz13DixkbeHnBRoantOX6b6Vwbr+OOux1hDLX7+BHf8sgb28hD1w+gCtSu4VdkkhotEdSD+3a\nX8QLaeuZNncNG7bv49jkRCaecRyXDemibqqHUVrqTP1kNffPWUaHlk158rqh2rOTOkuHtqIoSMpW\nUuq8tWQLj72/kkUbdtKxVRPGn96Tq0d016CSZcjdfYCfvbiQD5fncm6/jjzw/QG0ad447LJEYkZB\nEkVBUjF35+PsPB59L5vPVuXTJrERN3yrB6NPTSGpma7GBvhgeS4/eyGT3fuL+dVF/bh2RHddACp1\nnoIkioKk8jLW5vPYeyt5Z9lWWjZpyOhTj2XcaT1pW0//8i4oLOaBOV8ybe4a+nRsyZ9HDaZPJ13w\nKfWDgiThqyHiAAALnUlEQVSKguTILd60k0ffy+bfWVto1iiBa08+lhtP71Gvug7PXZnHXS8tYn3+\nPsaemsLd55+gXllSr8RFkJjZSOBPQAIw2d1/X8YyZwKPAI2APHc/I2i/FRgPGPCUuz8StLcFngdS\ngDXAFe6+vaI6FCRHb0XObh59L5vZCzfRKKEBo4Z356YzetbpASN37y/i9/9exjPz1pGSnMj93x/w\ntUE1ReqL0IPEzBKA5cC5wAYgDRjl7kuilmkNzAVGuvs6M+vg7lvNrD/wHDAcKATmABPdPdvMHgDy\n3f33ZnY30Mbd76qoFgVJ1a3O28tj72XzyucbaWDG5ald+eEZx9GtbWLYpVUbd+ffWVv47T+XsHnX\nfsZ9qwc/O68PzRprL0Tqp8oGSSwvHhgOZLv7KncvJBIMlxyyzNXAy+6+DsDdtwbtfYF57l7g7sXA\nB8BlwbxLgOnB9HTgezF8DxLo0a45D/5gIO/dcSaXp3bl7+kbOOsP73PniwtZnbc37PKqbOnmXYx6\n6jN+9MwCWjVrxEs/PJVfXdRPISJSCbHs49kFWB/1fAMw4pBlegONzOx9oCXwJ3efAWQBvzOzZGAf\ncAFwcJeio7tvDqa3AB1jU76UpVvbRO679CRu+c7xPPnBKmbNX8dLCzZw8cBjuPms4+lVy0Ye3r63\nkIffWs4z89bSqlkj/vd7/Rk1rJsu0BQ5AmFfLNAQGAqcDTQDPjWzz9x9qZndD7wJ7AUygZJDV3Z3\nN7Myj82Z2QRgAkD37t1jVH791TmpGff+14n86KzjmPzRav722VpmL9zE+f07Mf70ngzu3ibsEiu0\nc18RUz5ezdSPV7OvqITRp6Rw2zm9aJ1YP3uniVRFLINkIxA9bkTXoC3aBmCbu+8F9prZh8BAYLm7\nTwGmAJjZfcGyADlm1tndN5tZZ2ArZXD3ScAkiJwjqab3JIfo0LIpv7ygLxPPOI6pH69m+tw1vP7F\nFgZ2TWL0KSlcOKBzXPV02rbnADM/W8vUj1eza38x5/fvxE/P7a17uIhUQSxPtjckcrL9bCIBkgZc\n7e6Lo5bpC/wV+C7QGJgPXOXuWVEn3rsT2TM52d13mNmDRMLn4Mn2tu7+84pq0cn2mrPnQDEvL9jA\n9LlrWJm7l+TmjbliWDcuH9qV49q3CK2uFTm7mfrJGl5esIEDxaWc07cjt53TS8ObiFQg9F5bQREX\nEOnamwBMdfffmdlEAHd/IljmTuB6oJRIF+GD3Xw/ApKBIuB2d38naE8GXgC6A2uJdP/Nr6gOBUnN\nc3c+yd7GtLlreHdZDqUOg7q15tLBXTjvxI410n14Z0ERry3axIsZG1i4fgdNGjbgsiFdGXdaCsd3\n0B6IyOHERZDECwVJuLbu2s+rmZt4acEGlm3ZDcDArkmc07cjpxyXzICurWncsOont92dtdsKeO/L\nrby9NId5q/IpLnX6dGzJD1K7cungLiS3aFLlnyNSXyhIoihI4seKnN28uSSHt5bkkLl+BwBNGzVg\nSPc29O3cij6dWtK7Y0uOSWpKcosmJJRzv/P9RSWsyy9gdd5eVuXuJXP9djLW7iBvzwEAju/QgnP6\nduTCkzrTv0srjYslchQUJFEUJPEpf28h81dv47NV+WSs3c7ynN0cKC79an4Dg7bNm9CkYQMaJhgJ\nZuw5UMyu/UXsLyr92msdm5zIkO5tGNK9Naf1ak+Pds1r+u2I1Dm6sZXEvbbNGzOyf2dG9u8MRIa1\nX5dfwIqc3eTsPkDurv3k7imksLiU4tJSSkqdFk0a0qpZI5KaNaJrm2akJDcnpV1zjVIsEiIFicSN\nhAZGj3bNtTchUsvo8l0REakSBYmIiFSJgkRERKpEQSIiIlWiIBERkSpRkIiISJUoSEREpEoUJCIi\nUiX1YogUM8slMlLw0WgH5FVjOdUlXuuC+K1NdR2ZeK0L4re2ulbXse7e/nAL1YsgqQozS6/MWDM1\nLV7rgvitTXUdmXitC+K3tvpalw5tiYhIlShIRESkShQkhzcp7ALKEa91QfzWprqOTLzWBfFbW72s\nS+dIRESkSrRHIiIiVaIgqYCZjTSzL80s28zuDrGObmb2npktMbPFZnZr0H6vmW00s8zgcUEIta0x\nsy+Cn58etLU1s7fMbEXwb5sarqlP1DbJNLNdZnZbWNvLzKaa2VYzy4pqK3cbmdkvgs/cl2b23Rqu\n60EzW2Zmi8zsFTNrHbSnmNm+qG33RA3XVe7vLuTt9XxUTWvMLDNor8ntVd73Q819xtxdjzIeQAKw\nEugJNAYWAv1CqqUzMCSYbgksB/oB9wJ3hLyd1gDtDml7ALg7mL4buD/k3+MW4NiwthfwbWAIkHW4\nbRT8XhcCTYAewWcwoQbrOg9oGEzfH1VXSvRyIWyvMn93YW+vQ+Y/BPwmhO1V3vdDjX3GtEdSvuFA\ntruvcvdC4DngkjAKcffN7r4gmN4NLAW6hFFLJV0CTA+mpwPfC7GWs4GV7n60F6RWmbt/COQf0lze\nNroEeM7dD7j7aiCbyGexRupy9zfdvTh4+hnQNRY/+0jrqkCo2+sgMzPgCmBWLH52RSr4fqixz5iC\npHxdgPVRzzcQB1/eZpYCDAbmBU23BIchptb0IaSAA2+bWYaZTQjaOrr75mB6C9AxhLoOuoqv/+cO\ne3sdVN42iqfP3Q3Av6Oe9wgO03xgZqeHUE9Zv7t42V6nAznuviKqrca31yHfDzX2GVOQ1CJm1gJ4\nCbjN3XcBjxM59DYI2Exk17qmnebug4DzgR+b2bejZ3pkXzqUroFm1hj4L+DFoCkettc3hLmNymNm\n9wDFwDNB02age/C7vh141sxa1WBJcfm7izKKr//BUuPbq4zvh6/E+jOmICnfRqBb1POuQVsozKwR\nkQ/JM+7+MoC757h7ibuXAk8Ro136irj7xuDfrcArQQ05ZtY5qLszsLWm6wqcDyxw95ygxtC3V5Ty\ntlHonzszGwtcBFwTfAERHAbZFkxnEDmu3rumaqrgdxcP26shcBnw/MG2mt5eZX0/UIOfMQVJ+dKA\nXmbWI/jL9ipgdhiFBMdfpwBL3f3hqPbOUYtdCmQdum6M62puZi0PThM5UZtFZDuNCRYbA7xak3VF\n+dpfiWFvr0OUt41mA1eZWRMz6wH0AubXVFFmNhL4OfBf7l4Q1d7ezBKC6Z5BXatqsK7yfnehbq/A\nOcAyd99wsKEmt1d53w/U5GesJnoV1NYHcAGRHhArgXtCrOM0Iruli4DM4HEBMBP4ImifDXSu4bp6\nEun9sRBYfHAbAcnAO8AK4G2gbQjbrDmwDUiKagtlexEJs81AEZHj0eMq2kbAPcFn7kvg/BquK5vI\n8fODn7MngmW/H/yOM4EFwMU1XFe5v7swt1fQPg2YeMiyNbm9yvt+qLHPmK5sFxGRKtGhLRERqRIF\niYiIVImCREREqkRBIiIiVaIgERGRKlGQiFQDMyuxr484XG2jRQcjyYZ5zYtIhRqGXYBIHbHPI8Nh\niNQ72iMRiaHgHhUPWOSeLfPN7PigPcXM3g0GIXzHzLoH7R0tch+QhcHj1OClEszsqeB+E2+aWbPQ\n3pTIIRQkItWj2SGHtq6MmrfT3U8C/go8ErT9BZju7gOIDIz456D9z8AH7j6QyL0vFgftvYBH3f1E\nYAeRK6dF4oKubBepBma2x91blNG+BviOu68KBtbb4u7JZpZHZJiPoqB9s7u3M7NcoKu7H4h6jRTg\nLXfvFTy/C2jk7r+N/TsTOTztkYjEnpczfSQORE2XoPObEkcUJCKxd2XUv58G03OJjCgNcA3wUTD9\nDvBDADNLMLOkmipS5GjprxqR6tHMzDKjns9x94NdgNuY2SIiexWjgrZbgKfN7E4gF7g+aL8VmGRm\n44jsefyQyIizInFL50hEYig4R5Lq7nlh1yISKzq0JSIiVaI9EhERqRLtkYiISJUoSEREpEoUJCIi\nUiUKEhERqRIFiYiIVImCREREquT/A/k8L/J2Y0qDAAAAAElFTkSuQmCC\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYwAAAEKCAYAAAAB0GKPAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xl8VOXZ//HPlR1IWBP2LUCCbIIQ9kUpLrjhVi2IoGhF\n6lKtba3W9qm/tlqr9SnaqogsCii4YaWtSt0AUbYE2SJbCBiCLGFfAoSE6/fHOfhMY5YTyMyZJNf7\n9ZrXZO45c+abk2EuznLft6gqxhhjTHki/A5gjDGmarCCYYwxxhMrGMYYYzyxgmGMMcYTKxjGGGM8\nsYJhjDHGEysYxhhjPLGCYYwxxhMrGMYYYzyJ8jtAZUpMTNS2bdv6HcMYY6qMjIyMvaqa5GXZalUw\n2rZtS3p6ut8xjDGmyhCRb7wua4ekjDHGeGIFwxhjjCdWMIwxxnhiBcMYY4wnVjCMMcZ4YgXDGGOM\nJ1YwjDHGeGIFA3juk83Mz9zFiVNFfkcxxpiwVa067p2N/IJCXv1yG/uOFRAVISTGx5KYEENSfCxJ\nCbEkuvdJCbHftTWuG0d8bI3fdMaYGqbGf+vVjoli6a+HsTR7H0u27CPvyEnyjp5kz5GTfL3zMHuP\nFlB0Wkt4XSSNE2JpnBD3XUFpXNd53Dghlub1a9GyQS3ioiN9+K2MMaby1fiCARAdGcHglCQGp3x/\nOJXTp5UD+QXkHT3pFJMjTjHZc9gtLIdPsH7nYRZuOsnRk4Xfe33jhFhaNqhFq4a1nfsGtWndsDYd\nmsSTFB+LiITiVzTGmHNmBaMcERFCo/hYGsXHcl7TspfNLygk78hJdh8+ybcHj7N9fz7bD+STe+A4\nK3MO8K81O/9rb6VB7WhSmiSQ2iSe1CYJdGySQNcW9ahjh7uMMWHIvpkqUe2YKNo0iqJNozolPl9Y\ndJpdh0+wbW8+m/ccYdPuI2zafZT3Vn3LkRPO3kmEQGqTBHq0qk/3VvVJa9OADo3jbU/EGOM7Kxgh\nFBUZQcsGtWnZoDaDUhK/a1dVdh8+yfqdh1m1/SCrth/kw8xdzFmxHXAOaw3skOjeGtGsXi2/fgVj\nTA0W1IIhIsOBZ4FIYIqqPlnCMhcBE4FoYK+qXui23w/cCQjwsqpODGZWP4kITevF0bReHEPPaww4\nRWTbvnyWb93H4qx9LNqUx7tf7QCgS/O6XN61KcO7NqVD4wQ/oxtjahBR/f4VQJWyYpFIYBNwCZAL\nrABGqerXAcvUB74Ehqtqjog0VtU9ItIVmAP0AQqAD4EJqppV1numpaVpdZ0P4/RpZePuIyzalMf8\nzF2szDkIQIfG8VzRrRk39mpJq4a1fU5pjKlqRCRDVdO8LBvMPYw+QJaqZruh5gDXAF8HLHMzMFdV\ncwBUdY/b3glYpqr57msXAtcDTwUxb1iLiBA6NatLp2Z1uevC9uw6dIL5mbv4YN1O/vbpZv726WYG\ndUhkZO/WXNK5CTFR1ifTGFO5glkwWgDbAx7nAn2LLZMKRIvIAiABeFZVZwDrgMdFpBFwHLgCKHHX\nQUTGA+MBWrduXZn5w1rTenHcOqAttw5oy46Dx3krfTtvrtjOPa+vpFGdGEb2acWtA9rSOCHO76jG\nmGrC75PeUUAvYBhQC1giIktVdb2I/Bn4D3AMWAWUOG6Hqk4GJoNzSCokqcNMi/q1eODiVO77QQqf\nb87j9WU5vLBgCy9/vpXrL2jBXRe2Jzmx5Cu3jDHGq2AWjB1Aq4DHLd22QLnAPlU9BhwTkUVAd2CT\nqk4FpgKIyBPusqYMkRHCRR0bc1HHxmzde4wpn2fzVkYub2XkckPPFtz3gxQ7z2GMOWvBPNC9AkgR\nkWQRiQFGAvOKLfMeMEhEokSkNs4hq/UAItLYvW+Nc/7i9SBmrXaSE+vw+HXdWPyroYzt34Z/rPqW\nHzyzgMfmZXIwv8DveMaYKihoexiqWigi9wLzcS6rnaaqmSIywX1+knvo6UNgDXAa59Lbde4q3nHP\nYZwC7lHVg8HKWp01Tojjd1d3YfyQdjz3SRYzlmzjH6t28LOLUxndtzVRkXZy3BjjTdAuq/VDdb6s\ntrJs2HWYP/zra77I2sd5TRP40/XduKB1A79jGWN8UpHLau2/lzXMeU3rMuuOvky6pRcH809x/Ytf\n8ti8zBIHTjTGmEBWMGogEWF416Z89OAQxvRrw6tLtnHZXxexfOt+v6MZY8KYFYwaLCEumt9f05W3\nJ/QnKlL40eQlPPnBBgoKT/sdzRgThqxgGHq1acj7Px3MyN6tmLRwC9e/+AXf7DvmdyxjTJixgmEA\nqBMbxZ+uP5+Xx6axff9xrvrbYuZn7vI7ljEmjFjBMP/lks5N+Nd9g0hOrMNdMzP40/vrS5yi1hhT\n81jBMN/TqmFt3prQn1v6tealRdmMn5FuV1EZY6xgmJLFRkXyx2u78YdrurBgUx4/fPFLcg/k+x3L\nGOMjKximTGP6t+WVcb3ZcfA41z7/BRnfHPA7kjHGJ1YwTLkGpyTx7t0DqRMbxaiXl/LvNTv9jmSM\n8YEVDONJh8bx/OPugZzfoh73zl7J7OU5fkcyxoSYFQzjWYM6Mcy8oy8XpibxyNy1TFq4xe9IxpgQ\nsoJhKqRWTCSTx6Rx1fnNePKDDTz5wQaq0wCWxpjS+T3jnqmCYqIieHbkBdStFc2khVs4dPwUj1/b\nlYgI8TuaMSaIyi0YIiJAV6A5zvzamaq6L9jBTHiLjBAev7Yr9WpF8+KCLagqT1zXzYqGMdVYqQVD\nRNoCDwHDga1AHhCHM4veQWASMEvteESNJSI8dFlHIkX4+2dZRLhFxPk/hjGmuilrD+Mp4EXgXlX9\nr+FLRaQZMBq4FXglaOlM2BMRfn5pKkWqvLhgC5Ei/P6aLlY0jKmGSi0YqnpTGc/tBP4SlESmyjmz\np1F0Wpm8KJvICOF3V3e2omFMNePlHMb1wEeqekREHgZ6Ak+o6qqgpzNVhojwyOXnUXRambp4KzFR\nETxy+XlWNIypRrxcVvuYWywGAFcAr+GcvyiXiAwXkY0ikuUWm5KWuUhEVolIpogsDGj/mdu2TkRm\ni0icl/c0/hERfnNlJ27t34bJi7KZtDDb70jGmErkpWAUufdXAS+p6ntAbHkvEpFI4HngcqAzMEpE\nOhdbpj7wAjBCVbsAN7rtLYCfAmmq2hWIBEZ6+o2Mr0SE313dhWt6NOfPH26wHuHGVCNe+mHsFJHn\nca6WShORGLwVmj5AlqpmA4jIHOAa4OuAZW4G5qpqDoCq7imWrZaInAJqA996eE8TBiIihKd/2J2D\n+ad49N21NKgdzfCuzfyOZYw5R16++G8CFgJXquoBIBEo8fBSMS2A7QGPc922QKlAAxFZICIZIjIW\nQFV34JxUzwF2AodU9T8e3tOEiZioCF68pSc9WtXnp7NX8WXWXr8jGWPOUbkFQ1WPquqbwB4Rae42\nr66k948CegFXApcBvxWRVBFpgLM3kozTYbCOiNxS0gpEZLyIpItIel5eXiXFMpWhdkwU027rTdvE\n2tw5I511Ow75HckYcw7KLRgicqWIbMLZQ1iGs9fwqYd17wBaBTxu6bYFygXmq+oxVd0LLAK6AxcD\nW1U1T1VPAXOBASW9iapOVtU0VU1LSkryEMuEUv3aMcy4vS/1akUz7pUV7Dh43O9Ixpiz5OWQ1OPA\nQGCjqrbCOZfxuYfXrcDpFZ7snvcYCcwrtsx7wCARiRKR2kBfYD3Ooah+IlLbHZpkmNtuqqCm9eKY\nPq4PJwqKGDd9OYeOn/I7kjHmLHgpGIWqmgdEiIio6kc4J7TLpKqFwL3AfJwv+zdVNVNEJojIBHeZ\n9cCHwBpgOTBFVdep6jLgbWAlsNbNObniv54JFx2bJjBpTC+y847xk1kZFBSeLv9FxpiwIuUNBSUi\nn+CcT/gzUBfYAwxU1X7Bj1cxaWlpmp6e7ncMU4a3M3L5xVuruaFnS/5y4/nWsc8Yn4lIhqqmeVnW\ny2W11wIngAeAsUA94Oqzj2dqsh/2aknugXwmfryZVg1r8cDFqX5HMsZ4VG7BUNUjAQ+nBjGLqSHu\nH5bC9v3HmfjxZtonxXN19+blv8gY47tSz2GcGaZDRA6IyP7i96GLaKobEeGJ67uS1qYBv3hrNWty\nD/odyRjjQVknvYe694lAUgn3xpy12KhIJo3pRWJ8LHfOSGf34RN+RzLGlKPUgnFmDgxVLQK6AHcB\n44HObpsx5yQxPpYpt6Zx5EQh42ekc+KUfayMCWdeOu49CszGGdajJTBbRB4JdjBTM3RqVpeJP+rB\nmh2HeOjtNdgEjsaELy/9MMYCvVX1UVV9FKcPxm1BTWVqlEu7NOUXl3Zk3upveWHBFr/jGGNK4Wm0\n2mLLRbltxlSauy9qz6bdR3h6/kbaJ8UzvGtTvyMZY4optWCIyF8BBfYDmSIy3318Kc6wH8ZUGhHh\nzzecz7Z9+Tz45iqSEwfSsWmC37GMMQHKOiS1DsgE/g08BiwBlgK/Bz4IejJT48RFRzJ5TC/qxEYx\nfmY6h/JtzCljwkmpexiqap30TMg1qRvHi6N7Murlpdz/xldMvbU3kRE2fIgx4cDLSW9jQiqtbUN+\nd3UXFmzMY+LHm/yOY4xxWcEwYWl039b8KK0Vf/s0iw/X7fI7jjGGcgqGiESKyJOhCmPMGSLC/7um\nC91b1efnb64ia8+R8l9kjAmqMguG26N7aFnLGBMscdGRTLqlJ7ViIhk/I4PDJ+wkuDF+8nJIKkNE\n5orIKBEZceYW9GTGAM3q1eL5m3uSsz+fB99YxenT1hPcGL94KRgJwDHgCuBG9/bDYIYyJlDfdo34\n7VWd+Xj9Hp77dLPfcYypsbzMhzEmFEGMKcvY/m1Yk3uIiR9vpkvzelzSuYnfkYypcbwMPthcRN4S\nkZ3u7Q0RsRlvTEiJCI9f15VuLerx4BuryM476nckY2ocL4ekpgP/Adq6t4/ctnKJyHAR2SgiWSLy\ncCnLXCQiq0QkM2DSpo5u25nbYRF5wMt7muorLtqZQyM6KoK7ZmZw7GSh35GMqVG8FIwmqvqyqp50\nb1OAco8HiEgk8DxwOdAZGCUinYstUx94ARihql1wzo+gqhtVtYeq9gB6AfnAuxX5xUz11KJ+Lf42\n6gK25B3loXdsOHRjQslLwdgvIiPl//wIZ0DC8vQBslQ1W1ULgDnANcWWuRmYq6o5AKq6p4T1DAO2\nqOo3Ht7T1AADOyTy0PDz+PeanUz5fKvfcYypMbwUjNtx5sTYC+QBY9y28rQAtgc8znXbAqUCDURk\ngYhkiMjYEtYzEmcCJ2O+c9eQdlzetSl/+mA9X2bt9TuOMTVCqQVDRJ5wf+ypqleoaiNVTVTVq1R1\nWyW9fxTOIacrgcuA34pIakCGGGAE8FYZOceLSLqIpOfl5VVSLBPuRISnb+xOu6R47p39Fd8ePO53\nJGOqvbL2MK52739zluveAbQKeNzSbQuUC8xX1WOquhdYBHQPeP5yYKWq7i7tTVR1sqqmqWpaUlLS\nWUY1VVF8bBQvjelFQeFpfjIrw+YENybIyioYH4nIAaCbiOwPuB0QES/nMFYAKSKS7O4pjATmFVvm\nPWCQiESJSG2gL7A+4PlR2OEoU4b2SfE8c1N3Vuce4rF5mX7HMaZaK7VgqOqDQEOcS2qTAm6J7n2Z\nVLUQuBeYj1ME3lTVTBGZICIT3GXWAx8Ca4DlwBRVXQcgInWAS4C5Z/3bmRrhsi5NuXdoB+as2M7s\n5Tl+xzGm2pLqdFliWlqapqen+x3D+KDotDLulRUs3bKPN+7qxwWtG/gdyZgqQUQyVDXNy7I2H4ap\nFiIjhOdG9qBJvVh+MmsleUdO+h3JmGrHCoapNurXjmHSLb04kF/Ava+vpLDotN+RjKlWvIwldbmI\n2KTKpkro0rweT97QjWVb9/PkBxv8jmNMteJlD+NWYLOIPCEiKcEOZMy5uu6Cltzavw1TFm9l3upv\n/Y5jTLVRbsFQ1ZFAGk4fitdF5HMRud29ismYsPTolZ1Ja9OAX729hg27Dvsdx5hqwdM5DFU9CLwO\nvAq0xukfsVpE7g5iNmPOWkxUBC+M7kl8XBR3zczg0HGb3tWYc+XlHMYVIvIWsBhn9r1+qnoJTo/s\nXwU5nzFnrXHdOF4c3ZMdB47zM5ve1Zhz5mUPYzTwoqp2UdU/qepOAFU9BtwZ1HTGnKO0tg35n6s7\n8+kGm97VmHPlpWD8XlU/DWwQkcEAqvqfoKQyphKN6deG63u2YOLHm/l0Q6nDkhljyuGlYMwVkQcB\nRCRWRP4K/CW4sYypPCLCE9d1o3OzujwwZxXb9h7zO5IxVZKXgtEXSBWRxTgDCu4HBgQ1lTGVLC46\nkpfG9CIiQpgwK4P8Apve1ZiK8lIwTgAHgHpAHWC9qto40qbKadWwNs+NvICNu4/w8DtrbXpXYyrI\nS8FYASjOREdDgHEiMieoqYwJkiGpSfzi0o7MW/0t077Y5nccY6qUKA/LTFDVZe7PO4ArRWRcEDMZ\nE1R3X9Se1dsP8sT76+nSvC792jXyO5IxVYKXnt7LRKTLmXksRKSzqk4PRThjgkFEeOam7rRpWJt7\nX1/JzkM2vasxXnjpuHcvzpzard3bW9bD21R1CXHRvDSmF8cLivjJrJWcLLTTcsaUx8s5jPFAH1X9\ntar+GueqqQnBjWVM8KU0SeAvN3Zn1faDPDbva7/jGBP2vBQMAQoCHp9y24yp8i7v1owJF7Zn9vIc\nZi7Z5nccY8Kal5PeM4FlIvKO+/g6nEEIjakWfnlZRzbvPsJj//ya5MR4BqUk+h3JmLDk5aT3U8Bd\nQL57m6Cqnnp6i8hwEdkoIlki8nApy1wkIqtEJFNEFga01xeRt0Vkg4isF5H+3n4lYyomMkKYOLIH\n7ZPqcPdrGWTnHfU7kjFhqcyCISKRIpKpqstV9X/d2wovKxaRSOB54HKgMzBKRDoXW6Y+8AIwQlW7\nADcGPP0s8KGqnoczMu56z7+VMRWUEBfN1Ft7ExUZwY9fTedQvg2HbkxxZRYMt0d3toi0OIt19wGy\nVDVbVQuAOcA1xZa5GZirqjnu++0BEJF6OJ0Ep7rtBe6cHMYETauGtZl0Sy+2H8jnHpsT3Jjv8XLS\nOx5YLyLzRWTumZuH17UAtgc8znXbAqUCDURkgYhkiMhYtz0ZyAOmi8hXIjLFZvgzodAnuSGPX9uN\nxVl7+cO/7MopYwJ5Oen9xyC/fy9gGFALWCIiS932nsB9bsfBZ4GHgd8WX4GIjMe59JfWrVsHMaqp\nKW7q3YpNu48wZfFWUpokcEu/Nn5HMiYseDnp/QmwBogFYoA1blt5dgCtAh63dNsC5QLzVfWYqu4F\nFuGcr8gFcgOGJHkbp4CUlG+yqqapalpSUpKHWMaU75ErOjG0YxK/m5fJok15fscxJix46ek9DliJ\nc77hFiBdRG71sO4VQIqIJItIDDASmFdsmfeAQSISJSK1cToFrlfVXcB2EenoLjcMsOMDJmQiI4Tn\nRl1ASuN47n5tJet3HvY7kjG+83IO42Ggp6reoqqjgTTg1+W9SFULgXuB+ThXOL2pqplnxqRyl1kP\nfIizB7McmKKq69xV3Ae8JiJrgB7AExX71Yw5Nwlx0Uwf15v42CjGTV9hY06ZGk/KmxNARJYAQ1T1\nlPs4GlikqmHXLyItLU3T09P9jmGqmfU7D3PjpCW0bFCLtyb0JyEu2u9IxlQaEclQ1TQvy3rZw9iI\nczL6NyLyKPAlsEFEfioiPz2XoMZUBZ2a1eWF0T3ZvOcod7+2klN2ua2pobwUjO3ABzgnveNwDiHl\nAknuzZhqb0hqEn+6rhufb97Lo+/abH2mZir3slpV/d6lrMbURDf1bkXugXye+zSLVg1qc9+wFL8j\nGRNSpe5hiMiLItKplOdqichYERkVvGjGhJ+fXZLK9Re04JmPNvHmiu3lv8CYaqSsPYwpwOMikgqs\nxel5HQekAInAKzhjRRlTY4gIT95wPnlHT/Lw3DXUrRXN8K5N/Y5lTEh4uUqqLs64UM2A4zj9JDJD\nkK3C7CopEyr5BYWMnrKMzB2HeWVcbwZ0sCHRTdVUqVdJqephVf1YVWeq6tvhWiyMCaXaMVFMv603\nyYl1uHNGOqu329iYpvrzcpWUMaYE9WvHMOOOPjSMj+G26cvJ2nPE70jGBJUVDGPOQZO6ccy8vS+R\nERGMmbqcHQetN7ipvipUMMRhw4wbE6BtYh1m3tGHoycLGTNlGXuPnvQ7kjFB4WXwwRkiUtcdHHAt\nkCUiDwY/mjFVR6dmdZl+W2++PXScW6YsY/+xAr8jGVPpvOxhnK+qh4FrgY+ANsBtwQxlTFWU1rYh\nU2/tzda9xxg9ZRkH861omOrFS8GIFpEonOlV33OnW7XBdIwpwcAOibw8No0teUe5ZeoymxvcVCte\nCsYUIAdoACwUkdbA0aCmMqYKG5KaxEu39GLTrqOMnbaMwyesaJjqwUs/jL+qanNVvVSdXn7bgR8E\nP5oxVdfQ8xrzwuiefL3zMGOnLufQcSsapurzctL7Xre3NyLyErAMGBzsYMZUdRd3bsLzN/ck89tD\njJ6ylAN2ItxUcV4OSY1X1cMicinQBLgTeCq4sYypHi7t0pTJY9PYvPsoIycvJe+IXXJrqi4vBePM\nYFNXADNVdbXH1xljgKEdGzP9tt7k7M/nRy8tsaleTZXl5Yt/tYi8D1wFfCAi8fxfETHGeDCgQyIz\n7+jDniMnuemlJWzfn+93JGMqzEvBGAc8BvRR1XycIc7v8LJyERkuIhtFJEtEHi5lmYtEZJWIZIrI\nwoD2bSKy1n3OhqA1VV5a24a89uO+HD5eyA0vfsn6nYf9jmRMhXi5SqoIZ/6Lh0TkSaC3qn5V3utE\nJBJnvozLgc7AKBHpXGyZ+sALwAhV7QLcWGw1Q1W1h9ehd40Jd91b1eetCf2JEOGml5awLHuf35GM\n8czLVVKPAw8B2e7tlyLyRw/r7gNkqWq229lvDk7nv0A3A3NVNQdAVfdUJLwxVVFqkwTeuXsAjRNi\nGTNtOfMzd/kdyRhPvBySuhq4WFUnq+pk4FJghIfXtcDps3FGrtsWKBVoICILRCRDRMYGPKfAx277\neA/vZ0yV0aJ+Ld6eMIDOzeryk1kZvL4sx+9IxpTL69VOCaX8fK6igF7AlcBlwG/dKWEBBqlqD5xD\nWveIyJCSViAi40UkXUTS8/LyKjGaMcHVoE4Mr9/ZlyGpSfz63bU89eEGTp+260lM+PJSMJ4CVorI\nFBGZCqQDT3p43Q6gVcDjlm5boFxgvqoeU9W9wCKgO4Cq7nDv9wDv4hzi+h53zydNVdOSkpI8xDIm\nfNSOieLlsWmM6tOaFxZs4b7ZX3HiVJHfsYwpkZeT3rOAQcD7wL+BIar6uod1rwBSRCRZRGKAkcC8\nYsu8BwwSkSh3+PS+wHoRqSMiCQDu/BuXAuu8/lLGVCXRkRE8cV1XHr2iE++v22kd/EzYiirtCRE5\nv1hTlnvfSEQaqeqaslasqoUici8wH4gEpqlqpohMcJ+fpKrrReRDYA3OCLhTVHWdiLQD3hWRMxlf\nV9UPz+YXNKYqEBHuHNKO1o1q88CcVVz7/BdMu603HZtW5hFgY86NOOMJlvCEyOdlvE5VtcRzCn5K\nS0vT9HTrsmGqtrW5h7jj1RUcO1nIMzd1Z3jXZn5HMtWYiGR47bpQasGoiqxgmOpi16ETTJiVwart\nB7lnaHsevKQjkRHidyxTDVWkYNiYUMaEoab14njjrn6M7N2K5z/bwh2vrrAh0o3vrGAYE6ZioyL5\n0/Xd+OO1Xfkiay/X/H2xDSdifGUFw5gwJiLc0q8Ns+/sR35BEdc+/wVzludQnQ4lm6rDy9Ag55dw\nayMiVmyMCZG0tg35908H07ttQx6eu5YH3ljF0ZOFfscyNYyXL/2pQAYwA5iJ03HvPWCziAwLYjZj\nTICkhFhevb0PP78klX+u/pYRf7NDVCa0vBSMbUAvd9TY7jhDeWzCGcrjmSBmM8YUExkh3Dcshdd+\n3I8jJwu55vkvmPJ5tg0pYkLCS8HoFNhJT1XXAp1VNauM1xhjgqh/+0Z8cP9ghqQk8sd/r+eWqcv4\n9qDN5GeCy0vB2CAifxORge7tObctFrCDqMb4JDE+lpfHpvHk9d1Ytf0gl01cxHurdtgJcRM0XgrG\nWJxBAh92b98Ct+IUCzuHYYyPRISRfVrzwf2DSWkcz/1zVnHf7K84mF/gdzRTDVlPb2OqicKi07y0\nKJu/frSJRvEx/OGarlzapanfsUyYq9Se3iLST0Q+EJGvRWTTmdu5xzTGVKaoyAjuGdqBf9wzkAa1\nYxg/M4O7X8tgz5ETfkcz1US5exgish5nitYM4LuB+lV1d3CjVZztYRjjOFV0msmLsnn2k83ERUXw\nmys7c2NaS9wRoI35TmWPJXVYVf+pqt+q6u4zt3PMaIwJomh3b+OD+wdzXrO6PPTOGkZPWca2vcf8\njmaqMC8F41MR+ZOI9A7s7R30ZMaYc9Y+KZ45d/bjieu6sTb3EJdNXMSzH2+2Wf3MWfFySKqkeTFs\nPgxjqpjdh0/wh399zb/W7KRVw1r8z1VduLhTYztMVcPZfBjGmFJ9uWUvj83LZNPuo1yYmsTvru5M\nu6R4v2MZn1RKwRCRUao6W0R+WtLzqvrcOWQMCisYxnhzqug0M5Z8w8SPNnGisIgfD27HPUM7EB9b\n6qzNppqqrJPeDdz7pFJuxpgqKjoygjsGJfPJLy5kRPcWvLhgCxc9/Rmzln5DYdFpv+OZMGWHpIwx\nrN5+kMffX8/yrftpn1SHRy7vxDA7v1EjVHbHvUQReUhEXhCRyWduHoMMF5GNIpIlIg+XssxFIrJK\nRDJFZGGx5yJF5CsR+ZeX9zPGnJ3urerzxvh+TB7TCwV+PCOdUS8vZW3uIb+jmTDi5YDle8BSYDEB\nHffKIyKRwPPAJThjUa0QkXmq+nXAMvWBF4DhqpojIo2LreZ+YD1Q1+v7GmPOjohwaZemDD2vMXOW\n5zDx481c/ffFXHl+M352cQodGif4HdH4zEvBqKOqPz+LdfcBslQ1G0BE5gDXAF8HLHMzMFdVcwBU\ndc+ZJ0RyUWh+AAAR6UlEQVSkJXAl8Djw4Fm8vzHmLERHRjCmf1uuvaAFLy/KZurirXywdifXXtCC\n+4el0KZRHb8jGp946bj3gYhcehbrbgFsD3ic67YFSgUaiMgCEckQkbEBz03EGZKkzDNwIjJeRNJF\nJD0vL+8sYhpjSpIQF82Dl3bk81/9gDsHt+P9tTsZ9sxCHpm71ubeqKG8FIwJwIciclRE9ovIARHZ\nX0nvH4Uzg9+VODP4/VZEUkXkKmCPqmaUtwJVnayqaaqalpRkF28ZU9ka1onhkSs6seiXQ7mlXxve\nycjloqcX8Ni8TBvYsIbxckgq8SzXvQNoFfC4pdsWKBfYp6rHgGMisgjoDvQERojIFUAcUFdEZqnq\nLWeZxRhzjhrXjeOxEV24c0g7/v7pZmYu/YbZy3MY1ac1d13Yjmb1avkd0QRZWR33UlR1c2njRgVO\n21rK66Nw5v4ehlMoVgA3q2pmwDKdgL/j7F3EAMuBkaq6LmCZi4BfqOpV5f0ydlmtMaGzbe8xXliQ\nxdyVOxCBG3q25CcXtbdzHFVMRS6rLWsP42HgDpwrnYpToMyxpFS1UETuBeYDkcA0Vc0UkQnu85NU\ndb2IfAiswTlXMSWwWBhjwlfbxDo89cPu/HRYCpMXZTNnxXbeTN/OiO7NuWdoB1Ka2FVV1Y113DPG\nVIo9h08wZfFWZi39hvyCIoZ3acq9P+hA1xb1/I5mylDpgw+KyHlAZ5zzCQCo6utnnTBIrGAY478D\nxwqY/sVWpn+5jSMnCrkwNYkJF7anX7uG1nM8DFVqwRCR3wCXAufhHF66DFisqtefa9DKZgXDmPBx\n+MQpZi75hmmLt7LvWAHnt6zH+CHtGN6lKVGRXi7QNKFQ2QVjLdADWKmq3UWkGfCKql527lErlxUM\nY8LPiVNFvLMylymfb2Xr3mO0aliLOwYmc1PvVtSOsdFx/VbZU7QeV9UioFBEEoBdQJtzCWiMqTni\noiMZ3bcNHz94IS+N6UXjhDge++fXDHjyU575z0byjpz0O6LxyEt5/8od82kakA4cxrn81RhjPIuM\nEC7r0pTLujQl45v9vLQwm79/lsVLi7K5oWcLfjy4He1tIqewVuYhKXHOUDVV1Z3u4w5AXVVdGaJ8\nFWKHpIypWrLzjjJl8VbezsjlVNFphp3XhDsGJdsJ8hCq7HMY61S1a6UkCzIrGMZUTXuPnmTGl9uY\ntSyH/ccK6NysLrcPSubq7s2IjYr0O161VtkFYxbwjKp+VRnhgskKhjFV24lTRfzjqx1M+2Irm3Yf\nJTE+ljH92jC6X2sS42P9jlctVdac3lFub+1MoCOwBTgGCKCq2rOyAlcWKxjGVA+qyuKsvUxbvJXP\nNuYRExXBtT2ac/ugZM5ratPjVKbKGhpkOe4ggJWSyhhjPBIRBqckMTgliS15R5n+xVbeydjBm+m5\nDOzQiNsHJjO0Y2MiIuw8RyiVtYfxlapeEOI858T2MIypvg7mFzB7+XZmLNnGzkMnSE6sw7iBbbmh\nZ0vqxFp/jrNVWYekcoH/Le2Fqlrqc36xgmFM9Xeq6DQfrNvF1MVbWb39IHXjohjVpzVjB7SlRX0b\nYr2iKuuQVCQQj3POwhhjwkJ0ZAQjujdnRPfmrMw5wNTFW5ni3oZ3bcrtA5Pp1aaB3zGrpbIKxk5V\n/X3IkhhjTAX1bN2Anjc3YMfB48z4chuzl+fw7zU76dGqPncMSubyrjZuVWWycxjGmGrj2MlC3lmZ\ny7TFW9m2L5/m9eIYO6Ato3q3pl7taL/jhaXKOofRUFUra+7ukLCCYYwBOH1a+XTDHqYu3sqS7H3U\njonkh71aMm5gMsmJNiNgoEqfD6OqsIJhjCku89tDTFu8jX+u/pZTp08z7LzG3D4omf7tGtnwI1jB\n8DuGMSYM7TlygllLc3ht6Tfss+FHvmMFwxhjSnHiVBHvrdrB1MX/N/zI2P5tGN23NY1q4PAjYVMw\nRGQ48CzOJbpTVPXJEpa5CJgIRAN7VfVCEYkDFgGxOFdyva2qvyvv/axgGGO8OjP8yNTFW1ngDj9y\nXY8W3D4omY5NE/yOFzJhUTBEJBLYBFwC5AIrgFGq+nXAMvWBL4HhqpojIo1VdY87rHodVT0qItHA\nYuB+VV1a1ntawTDGnI2sPUeY/sU23lmZy4lTpxmcksjtg5K5MCWp2g8/Utkz7p2tPkCWqmaragEw\nB7im2DI3A3NVNQdAVfe496qqR91lot1b9Tl2ZowJKx0aJ/D4dd1Y8vAwfnlZRzbtPsK46Su45K8L\nmb08hxOnivyOGBaCWTBaANsDHue6bYFSgQYiskBEMkRk7JknRCRSRFYBe4CPVHVZSW8iIuNFJF1E\n0vPy8ir5VzDG1CQN6sRwz9AOfP7QD5j4ox7UionkkblrGfTnz3j+sywO5Z/yO6Kv/O4CGQX0Aq4E\nLgN+KyKpAKpapKo9gJZAHxEpcRInVZ2sqmmqmpaUlBSq3MaYaiwmKoJrL2jBP+8dxOt39qVL87o8\nPX8j/Z/8hN//82t2HDzud0RfBHOIxx1Aq4DHLd22QLnAPlU9BhwTkUVAd5xzHwCo6kER+QwYDqwL\nYl5jjPkvIsKA9okMaJ/I+p2HeXlRNjOWbOPVJdsY0b0544e0o1OzmjM/RzD3MFYAKSKSLCIxwEhg\nXrFl3gMGiUiUiNQG+gLrRSTJPSGOiNTCOXG+IYhZjTGmTJ2a1eV/f9SDhQ8N5bYBbflP5i4uf/Zz\nxk5bzpdZe6lOXRRKE+zLaq/AuWQ2Epimqo+LyAQAVZ3kLvNLYBxwGufS24kicj7wqvu6COBNLwMh\n2lVSxphQOZR/ilnLvmH6F9vYe/QkXVvU5a4h7avcgIdhcVmtH6xgGGNC7cw85JMXZZO99xitGtbi\nx4PacVNaK2rFhH8PcisYxhgTYqdPKx+t381LC7ewMucgjerEcPugZMb0b0PduPAdKdcKhjHG+Gj5\n1v08/1kWCzflkRAXxW0D2jJuYDIN68T4He17rGAYY0wYWJt7iOc/y+LDzF3Uio7k5r6tGT+kHU3q\nxvkd7TtWMIwxJoxs3n2EFxds4b3V3xIpwg/TWjJhSHtaN6rtdzQrGMYYE45y9uUzadEW3k7PpUiV\nEd2bc/dF7Ulp4t9gh1YwjDEmjO06dIIpn2fz2rIcjp8qYniXptwztAPdWtYLeRYrGMYYUwXsP1bA\n9C+28sqX2zhyopAhqUncP6wDvdo0DFkGKxjGGFOFHDlxiplLv2HK51vZf6yAwSmJ3D8shbS2wS8c\nVjCMMaYKyi8oZNbSb3hpYTb7jhUwqEMi91+cQu8gFg4rGMYYU4XlFxTy2tIcXlq0hb1HCxjQvhEP\nXJxKn+TKLxxWMIwxpho4XlDEa8u+YdLCbPYePUn/do144OIU+rZrVGnvYQXDGGOqkeKFo1+7htw/\nLJX+7c+9cFjBMMaYauh4QRGvL89h0sIt5B05Sd/khjxw8bkVDisYxhhTjZ04VcTry5zCscctHK/e\n3oe46IqPjluRghHMGfeMMcYEQVx0JLcPSubmvq2ZvTyHDTuPnFWxqCgrGMYYU0XFRUcybmByyN6v\n6kwLZYwxxldWMIwxxnhiBcMYY4wnQS0YIjJcRDaKSJaIPFzKMheJyCoRyRSRhW5bKxH5TES+dtvv\nD2ZOY4wx5QvaSW8RiQSeBy4BcoEVIjJPVb8OWKY+8AIwXFVzRKSx+1Qh8HNVXSkiCUCGiHwU+Fpj\njDGhFcw9jD5Alqpmq2oBMAe4ptgyNwNzVTUHQFX3uPc7VXWl+/MRYD3QIohZjTHGlCOYBaMFsD3g\ncS7f/9JPBRqIyAIRyRCRscVXIiJtgQuAZSW9iYiMF5F0EUnPy8urlODGGGO+z++T3lFAL+BK4DLg\ntyKSeuZJEYkH3gEeUNXDJa1AVSerapqqpiUlJYUiszHG1EjB7Li3A2gV8Lil2xYoF9inqseAYyKy\nCOgObBKRaJxi8ZqqzvXyhhkZGXtF5JuzzJsI7D3L1waT5aq4cM1muSrGclXc2WRr43XBoI0lJSJR\nwCZgGE6hWAHcrKqZAct0Av6Os3cRAywHRgKZwKvAflV9ICgBv5833et4KqFkuSouXLNZroqxXBUX\n7GxB28NQ1UIRuReYD0QC01Q1U0QmuM9PUtX1IvIhsAY4DUxR1XUiMggYA6wVkVXuKn+tqu8HK68x\nxpiyBXUsKfcL/v1ibZOKPX4aeLpY22JAgpnNGGNMxfh90jucTPY7QCksV8WFazbLVTGWq+KCmq1a\nzYdhjDEmeGwPwxhjjCc1vmB4Ge8qRDlKHD9LRB4TkR3ueFurROQKn/JtE5G1boZ0t62hiHwkIpvd\n+wYhztQxYLusEpHDIvKAH9tMRKaJyB4RWRfQVur2EZFH3M/cRhG5zIdsT4vIBhFZIyLvusP0ICJt\nReR4wLabVPqag5Kr1L9dqLZZKbneCMi07czFOCHeXqV9R4Tuc6aqNfaGc/XWFqAdzmW9q4HOPmVp\nBvR0f07AuSS5M/AY8Isw2FbbgMRibU8BD7s/Pwz82ee/5S6ca8pDvs2AIUBPYF1528f9u64GYoFk\n9zMYGeJslwJR7s9/DsjWNnA5H7ZZiX+7UG6zknIVe/4Z4H982F6lfUeE7HNW0/cwvIx3FRJaNcfP\nuganvwzu/bU+ZhkGbFHVs+24eU5UdRGwv1hzadvnGmCOqp5U1a1AFs5nMWTZVPU/qlroPlyK07E2\npErZZqUJ2TYrK5eICHATMDsY712WMr4jQvY5q+kFw8t4VyFXwvhZ97mHDqaF+rBPAAU+FmfMr/Fu\nWxNV3en+vAto4k80wOnwGfiPOBy2WWnbJ9w+d7cDHwQ8TnYPrywUkcE+5Cnpbxcu22wwsFtVNwe0\nhXx7FfuOCNnnrKYXjLAj3x8/60WcQ2Y9gJ04u8N+GKSqPYDLgXtEZEjgk+rsA/tyyZ2IxAAjgLfc\npnDZZt/xc/uURUQexZlO4DW3aSfQ2v1bPwi8LiJ1Qxgp7P52xYziv/9jEvLtVcJ3xHeC/Tmr6QXD\ny3hXISMljJ+lqrtVtUhVTwMvE8RDF2VR1R3u/R7gXTfHbhFp5mZvBuzxIxtOEVupqrvdjGGxzSh9\n+4TF505EbgOuAka7XzS4hy/2uT9n4Bz3Ti11JZWsjL+d79tMnOGOrgfeONMW6u1V0ncEIfyc1fSC\nsQJIEZFk93+pI4F5fgRxj41OBdar6v8GtDcLWOw6YF3x14YgWx1xJrJCROrgnDBdh7OtbnUXuxV4\nL9TZXP/1v75w2Gau0rbPPGCkiMSKSDKQgjOOWsiIyHDgIWCEquYHtCeJM/kZItLOzZYdwlyl/e18\n32bAxcAGVc090xDK7VXadwSh/JyF4ux+ON+AK3CuNtgCPOpjjkE4u5JrgFXu7QpgJrDWbZ8HNPMh\nWzucqy1W4wwM+ajb3gj4BNgMfAw09CFbHWAfUC+gLeTbDKdg7QRO4RwrvqOs7QM86n7mNgKX+5At\nC+f49pnP2iR32Rvcv/EqYCVwdYhzlfq3C9U2KymX2/4KMKHYsqHcXqV9R4Tsc2Y9vY0xxnhS0w9J\nGWOM8cgKhjHGGE+sYBhjjPHECoYxxhhPrGAYY4zxxAqGMRUgIkXy3yPkVtoIx+7Ip371GTGmXEGd\notWYaui4OsNAGFPj2B6GMZXAnSPhKXHmDFkuIh3c9rYi8qk7mN4nItLabW8izjwUq93bAHdVkSLy\nsjvfwX9EpJZvv5QxxVjBMKZiahU7JPWjgOcOqWo34O/ARLftb8Crqno+zgB/z7ntzwELVbU7ztwL\nmW57CvC8qnYBDuL0JDYmLFhPb2MqQESOqmp8Ce3bgB+oarY7QNwuVW0kIntxhrc45bbvVNVEEckD\nWqrqyYB1tAU+UtUU9/GvgGhV/WPwfzNjymd7GMZUHi3l54o4GfBzEXae0YQRKxjGVJ4fBdwvcX/+\nEmcUZIDRwOfuz58APwEQkUgRqReqkMacLfvfizEVU0tEVgU8/lBVz1xa20BE1uDsJYxy2+4DpovI\nL4E8YJzbfj8wWUTuwNmT+AnOCKnGhC07h2FMJXDPYaSp6l6/sxgTLHZIyhhjjCe2h2GMMcYT28Mw\nxhjjiRUMY4wxnljBMMYY44kVDGOMMZ5YwTDGGOOJFQxjjDGe/H9YkTzwNsAO/wAAAABJRU5ErkJg\ngg==\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYwAAAEKCAYAAAAB0GKPAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd8VfX9x/HXh0DYm8heIqLINgzRLqt1VMWtOMDxE7Ha\nat3Wals7HHXb/rSAoLgY4q+ldWKr1qqMgAQZgkzZhJVAApmf3x/nYNOUhJOQe2/G+/l43EfuPSN5\nc3K5n5zz/Z7v19wdERGRQ6mT6AAiIlI9qGCIiEgkKhgiIhKJCoaIiESigiEiIpGoYIiISCQxLRhm\ndrqZLTezlWZ290HWH2Nmn5lZrpndXp59RUQkvixW92GYWRKwAjgV2ADMA0a6+9Ji2xwBdAXOBXa5\n+6NR9xURkfiK5RnGEGClu6929zxgCjCi+Abuvs3d5wH55d1XRETiq24Mv3dHYH2x1xuAobHct02b\nNt6tW7eo+UREar358+dvd/eUKNvGsmDEhZmNAcYAdOnShbS0tAQnEhGpPsxsXdRtY3lJaiPQudjr\nTuGySt3X3ce5e6q7p6akRCqSIiJSAbEsGPOAnmbW3cySgUuBmXHYV0REYiBml6TcvcDMbgLeBZKA\nie6+xMzGhuufM7N2QBrQDCgys1uA3u6edbB9Y5VVREQOLWbdahMhNTXV1YYhIhKdmc1399Qo2+pO\nbxERiUQFQ0REIlHBEBGRSFQwRESqsbS1O3nuo1Vx+VnV/sY9EZHaaH9+IY++u5znP1lD55aNGHVC\nVxolx/YjXQVDRKSaSV+/m1unLWRVRjaXD+3Cz848NubFAlQwRESqjbyCIp75x1f874erOKJpfV6+\ndign9WwTt5+vgiEiUg18uSWLW6ems3RzFhcM6sT9Z/emecN6cc2ggiEiUoUVFjl/+ucqnpi1guYN\n6zHuyuP5wXHtEpJFBUNEpIpanbGX26an8/nXuzmzbzt+c25fWjVOTlgeFQwRkSqmqMiZ/NlaHnrn\nS5KT6vDUpQM4p38HzCyhuVQwRESqkA27crjz9UV8umoH3+2VwsMX9KNtswaJjgWoYIiIVAnuzvT5\nG3jgr0txdx48vy+XDu6c8LOK4lQwREQSbMfeXO5+4wtmLd3K0O6tePSi/nRu1SjRsf6LCoaISAJ9\nsHwbd0xfRNa+fO4981iuPak7depUnbOK4lQwREQSYF9eIQ++vYzJn62jV9umvHTtEI5t3yzRscqk\ngiEiEmeLN2Zyy9SFrNy2l2tP6s4dp/WiQb2kRMc6JBUMEZE4KSxyxv1zNY/PWk6rxslxH9rjcKlg\niIjEwYZdOdw6LZ25a3ZyZt92/O68vrRolLib8CpCBUNEJIbcnb8s3MR9f16MA49d1J/zB3WsUt1l\no1LBEBGJkcycfH7+l8X8NX0TqV1b8sQlA6pkd9moVDBERGLg01XbuX1aOtv25HL7D45m7Hd6UDep\nek9yqoIhIlKJcgsKefy9FYz7eDXdWzdmxg3D6d+5RaJjVQoVDBGRSrJi6x5unrKQZZuzuHxoF+79\nYXxmwouXmvMvERFJkKIi58XP1vLg21/StH5dJoxK5ZTebRMdq9KpYIiIHIatWfu5fXo6H3+1nZOP\nOYKHL+hHStP6iY4VEyoYIiIV9M7izdzzxhfsyy/kN+f24fKhXapld9moVDBERMppb24BD/x1CdPS\nNtC3Y3OevHQAPVKaJDpWzB2yYJhZKvAtoAOwD1gMzHL3XTHOJiJS5cxft4ufTl3Ihl053PS9o7j5\nlJ7Uq+bdZaMqtWCY2dXAj4E1wHxgOdAAOAm4y8wWA/e5+9fxCCoikkj5hUU884+V/OEfX9GhRUOm\nXn8Cg7u1SnSsuCrrDKMRcKK77zvYSjMbAPQEVDBEpEZbsz2bW6YuJH39bs4f1JFfnnMczRrUS3Ss\nuCu1YLj7H8va0d0XVn4cEZGqw92ZMm89D/x1Kcl16/CHywZyVr8OiY6VMFHaMI4GngXaunsfM+sH\nnOPuv4l5OhGRBNmxN5e7ZnzB+8u2ctJRbXj0ov60a94g0bESKkpLzXjgHiAfwN0XAZdG+eZmdrqZ\nLTezlWZ290HWm5k9Ha5fZGaDiq37qZktMbPFZvaamdXu35SIxM0HX27jtCc/5p9fZXDfWb2ZfM2Q\nWl8sIFrBaOTuc0ssKzjUTmaWBPwROAPoDYw0s94lNjuDoB2kJzCG4EwGM+sI/ARIdfc+QBIRi5SI\nSEXtyyvk53/+gqtfmEebJsnMvOnEKj3HdrxFuQ9ju5n1ABzAzC4ENkfYbwiw0t1Xh/tNAUYAS4tt\nMwKY7O4OzDazFmbWvli2hmaWT9AAvynKP0hEpCK+2JDJzVM/Z3VGNtd9qzu3/aB6TJsaT1EKxo3A\nOOAYM9tI0M32igj7dQTWF3u9ARgaYZuO7p5mZo8S9MDaB7zn7u8d7IeY2RiCsxO6dOkSIZaIyL8V\nFjnPfbSKJ2atoE2T+rzyP0M58ajqM21qPB2yYIRnCKeYWWOgjrvviXUoM2tJcPbRHdgNTDezK9z9\n5YPkG0dQ0EhNTfVYZxORmmP9zhx+OnUhaet28cN+7fntuX2q3bSp8XTINgwz+52ZtXD3bHffY2Yt\nzSxKD6mNQOdirzuFy6Jscwqwxt0z3D0feAMYHuFniogckrszY/4GznjqY5Zv2cMTl/TnDyMHqlgc\nQpRG7zPcffeBF+GQIGdG2G8e0NPMuptZMkGj9cwS28wERoW9pYYBme6+meBS1DAza2TBSF7fB5ZF\n+JkiImXanZPHTa9+zm3T0+ndvhlv3fwtzhvYqUYPGlhZorRhJJlZfXfPBTCzhsAhx+519wIzuwl4\nl6CX00R3X2JmY8P1zwFvERSflUAOcHW4bo6ZvQ4sIOiR9TnhZScRkYr611fbuW36QnbszePO03tx\n/bd7kKQeUJFZ0EGpjA3M7gLOBiaFi64GZrr7IzHOVm6pqamelpaW6BgiUsXszy/kkXeWM/GTNfRI\nacxTlw6kT8fmiY5VJZjZfHdPjbJtlEbvh81sEcFlIYBfu/u7hxNQRCRelm3O4pYpC1m+dQ+jTujK\nPWccS8NkdZetiEjzYbj728DbMc4iIlJpCoucCR+v5rH3VtCsYT0mXT2Y7/U6ItGxqrUoY0mdDzwM\nHAFY+HB3bxbjbCIiFbJuRza3T09n3tpdnHZcW353Xl9aN6mZ06bGU5QzjEeAs91dvZREpEpzd16d\n+zW/fXMZSWY8fnF/zhvYUT2gKkmUgrFVxUJEqrqtWfu58/VFfLQig5OOasMjF/ajQ4uGiY5Vo0Qp\nGGlmNhX4M5B7YKG7vxGzVCIi5TAzfRP3/XkxuQWFPDDiOK4Y2lUDBsZAlILRjOAeiR8UW+YEd1+L\niCTMruw8fv6Xxby5aDMDOrfg8Yv7c2RKk0THqrGidKu9Oh5BRETK44Mvt3HnjEXsys7j9h8czdjv\n9KBuUpTBK6SiovSSagBcCxwHfDODiLtfE8NcIiIHtTe3gN++uZTX5q6nV9umTLpqsG7Ci5Mol6Re\nAr4ETgMeAC5H4zqJSALMXbOT26YvZMOufVz/nSO59dSjqV9XN+HFS5SCcZS7X2RmI9z9RTN7Ffg4\n1sFERA7Yn1/I47NWMP7j1XRu2Yhp15/A4G6tEh2r1olSMPLDr7vNrA+wheAmPhGRmFu8MZNbpy1k\nxda9XDa0C/eeeSyN60capEIqWZSjPi6c0Og+guHImwD3xzSViNR6BYVFPPvhKp76+1e0apysoT2q\ngCi9pCaETz8CjoxtHBERWJWxl1unpZO+fjfn9O/AAyOO0+RGVUCpBePAlKhmduvB1rv747GLJSK1\nUVGR8+Jna3no7S9pmJzEMyMHcnb/DomOJaGyzjAah1+bxiOIiNRuG3fv447p6Xy6agff65XCwxf0\n44hmDQ69o8RNqQXD3f9kZklAlrs/EcdMIlKLuDszFmzkVzOXUOTOg+f35dLBnTVgYBVUZhuGuxea\n2UhABUNEKt32vbn87I0veG/pVoZ0a8WjF/WnS+tGiY4lpYjSS+oTM/sDMBXIPrDQ3RfELJWI1Hjv\nLN7Cvf/3BXv2F/CzM4/h2pOO1PzaVVyUgjEg/PpAsWUOnFz5cUSkpsvcl8+v/rqENxZspE/HZrx2\n8QCObqum0uogSrfa78UjiIjUfP/6ajt3vJ7Otj25/OT7PfnxyUdRTwMGVhuRbpc0sx/y34MPPlD6\nHiIi/7Yvr5CH3l7Gi5+t48iUxrxxw3D6d26R6FhSTlFGq30OaAR8D5gAXAjMjXEuEakhFny9i9um\npbNmezZXn9iNu04/hgb1NGBgdRTlDGO4u/czs0Xu/iszewx4O9bBRKR6yyso4qm/r+DZD1fRvnlD\nXr1uKMN7tEl0LDkMUQrGvvBrjpl1AHYA7WMXSUSqu2Wbs7h1WjrLNmdx0fGduP/s3jRtUC/RseQw\nRSkYfzOzFsDvgQUEPaTGxzSViFRLhUXOuH+u5olZK2jWsC7jR6Vyau+2iY4llSRKL6lfh09nmNnf\ngAbunhnbWCJS3azdns3t09NJW7eLM/q04zfn9qF1k/qJjiWVKEqj9yJgCjDV3VcBuTFPJSLVhrvz\n2tz1/PpvS6mXZDx5yQBGDOigoT1qoCiXpM4GLgGmmVkRwR3f09z965gmE5Eqb8feXO6a8QXvL9vK\nSUe14fcX9aN984aJjiUxEuWS1DrgEeARM+tJMJHSw4D6xYnUYh8s38Yd0xeRtT+f+87qzdXDu1FH\nQ3vUaFFv3OtKcJZxCVAI3BnLUCJSde3PL+TBt4Kb8Hq1bcrL/zOEY9o1S3QsiYMobRhzgHrANOAi\nd18d81QiUiUt2ZTJzVMWsnLbXq45sTt3nt5LN+HVIlHOMEa5+/KKfHMzOx14iuDy1QR3f6jEegvX\nnwnkAFcdGAU37Mo7AehD0JX3Gnf/rCI5ROTwFBU54z9ezaPvLadlo2ReunYI3+qZkuhYEmdR2jAq\nWiySgD8CpwIbgHlmNtPdlxbb7AygZ/gYCjwbfoWgkLzj7heaWTLB8CQiEmebM/dx69R0Plu9g9OP\na8eD5/elZWPNr10bRWrDqKAhwMoDl7DMbAowAiheMEYAk93dgdlm1sLM2hOcbXwbuArA3fOAvBhm\nFZGDmLV0K3e8nk5eQRGPXNCPi1I7qbtsLVZmwTCzOsAwd/+0At+7I7C+2OsN/PvsoaxtOgIFQAYw\nycz6A/OBm909GxGJudyCQh5860te+HQtfTo245mRg+jepnGiY0mClTkQvbsXEVxWire6wCDgWXcf\nSDDT390H29DMxphZmpmlZWRkxDOjSI20OmMv5//vp7zw6VquObE7M24YrmIhwCEKRujvZnaBlf88\ndCPQudjrTuGyKNtsADa4+5xw+esEBeS/uPs4d09199SUFDXCiRyOGfM3cNYz/2LT7n1MGJXK/Wf3\npn5d9YKSQJQ2jOuBW4FCM9sHGODufqiO1/OAnmbWnaAIXApcVmKbmcBNYfvGUCDT3TcDmNl6M+sV\nNrp/n/9s+xCRSpSdW8B9f17MG59vZEj3Vjx16QDdsS3/JUovqQpNtuvuBWZ2E/AuQbfaie6+xMzG\nhuufA94i6FK7kqCh++pi3+LHwCthD6nVJdaJSCVZuimLm15dwNod2dxySk9+fHJPknTHthyEBR2U\nDrGR2TkEvZYAPnT3v8U0VQWlpqZ6WlpaomOIVBvT09bz8z8vpnnDejw9ciDDjmyd6EgSZ2Y2391T\no2wb5U7vh4DBwCvhopvN7ER3v+cwMopIAu3PL+SXM5cwZd56hvdozVOXDiSlqYYil7JFacM4ExgQ\n9pjCzF4EPgdUMESqoa935HDDK/NZsimLG7/Xg1tP7aVLUBJJ1Bv3WgA7w+fNY5RFRGLs/aVbuXXa\nQgCeH53K94/VbHgSXZSC8SDwuZl9QNBD6tuUck+EiFRNBYVFPDZrBc9+uIo+HZvx7OXH07mVRtuR\n8im1YITtFJ8AbwAfErRjANzl7lvikE1EKsHO7DxuenUBn67awcghXfjF2b01wqxUSFlnGE8DxwOf\nufsggnsmRKQaWbopizEvpbFtTy6PXtSfC4/vlOhIUo2VVTDyzWwc0MnMni650t1/ErtYInK43ly0\nmdunp9O8YT2mX38C/Tu3SHQkqebKKhhnAacApxEM/ici1UBRkfPE+yt45h8rGdSlBc9deTxHNG2Q\n6FhSA5RaMNx9OzDFzJa5e3ocM4lIBe3Zn89Ppy7k/WXbuCS1Mw+ce5zGgpJKE2VoEBULkWpgzfZs\nrpucxprt2fzqnOMYdUJXzV0hlSqWEyiJSJx8/FUGN76ygKQ6xsvXDuWEHhriQypflKFBkty9MB5h\nRKT8Xpmzjvv/soSeRzRh/KhU3V8hMRPlDOMrM5sBTCoxH7eIJFBRkfPg28sY//EavtsrhT9cNogm\n9XXRQGInygRK/YEVwAQzmx3OcHeouTBEJIZy8goY+/J8xn+8htEndGXCqFQVC4m5QxYMd9/j7uPd\nfThwF/ALYLOZvWhmR8U8oYj8h21Z+7nkT7OZtWwrvzi7N78a0Ye6SVH+9hM5PJHaMIAfEkxg1A14\njGCo828RTIB0dAzziUgxX27J4ppJ89i9L5/xV6ZySm8NHijxE6kNA/gA+L27f1ps+etm9u1S9hGR\nSvbZqh2MmZxGo/pJTLv+BPp01MDREl9RCsYAd88qvsDMkt09T8ODiMTHm4s289OpC+nauhEvXjOE\nDi0037bEX5QLnzPNrNuBF2Y2BJgXq0Ai8p9e+GQNN722gP6dmzN97AkqFpIwUefDeCccgLAjcAZB\ne4aIxJC788i7y3n2w1X8oHdbnh45UMOSS0JFGRrkXTMbC8wCtgMDNR+GSGzlFxZx14xFvLFgI5cN\n7cKvR/TRNKqScFF6Sd0HXEww014/4EMzu83d34x1OJHaaF9eIT96ZT4fLM/g1lOP5scnH6UxoaRK\niHJJqjUwxN33AZ+Z2TvABEAFQ6SS7dmfz7UvpDFv3U5+d15fLhvaJdGRRL4R5ZLULQBm1iR8vQ44\nNca5RGqdndl5jJ44l2Wbs3jq0oGc079DoiOJ/IdD9pIysz5m9jmwBFhqZvPN7LjYRxOpPbZk7ueS\nP33Giq17GDfqeBULqZKiXJIaB9zq7h8AmNl3gfHA8BjmEqk1vt6Rw+XPz2bn3jxeuHqIhiaXKitK\nwWh8oFgAuPuHZtY4hplEao0VW/dwxYQ55BUW8cp1wxigebelCotSMFaHPaVeCl9fAayOXSSR2mHp\npiwunzCbukl1mDrmBHq1a5roSCJlinKn9zVACvAGMANoEy4TkQpavDGTyybMpkG9JKZfr2Ih1UOZ\nZxjhSLX3aswokcqzaMNurpgwh6YN6vHadcPo0loz5En1UOYZRjg160lxyiJS433+9S4unzCHZg3r\nMWWMioVUL1HaMD43s5nAdCD7wEJ3fyNmqURqoPnrdjJ64jxaN0nm1euG0VGDCEo1E6VgNAB2ACcX\nW+YEbRoiEsG8tTu5auJcjmjWgFevG0r75ioWUv1EudO7wiPTmtnpwFNAEjDB3R8qsd7C9WcCOcBV\n7r6g2PokIA3Y6O5nVTSHSCKlrd3J6Ilzade8Aa9dN4y2zRokOpJIhUS50/tIM/urmWWY2TYz+4uZ\ndY+wXxLwR4Lh0HsDI82sd4nNzgB6ho8xwLMl1t8MLIvw7xCpkhau381Vk+bRtlkDpqhYSDUXpVvt\nq8A0oD3QgaAtY0qE/YYAK919tbvnhfuMKLHNCGCyB2YDLcysPYCZdSKYS3xCpH+JSBWzeGMmo56f\nQ6vGybx63VCOULGQai5KwWjk7i+5e0H4eJmgXeNQOgLri73eEC6Lus2TwJ1AUVk/xMzGmFmamaVl\nZGREiCUSe8u37OHK54Ous2qzkJoiSsF428zuNrNuZtbVzO4E3jKzVmbWKhahzOwsYJu7zz/Utu4+\nzt1T3T01JSUlFnFEymXltr1cPmE2yXXr8Op1Q+nUUl1npWaI0kvq4vDr9SWWX0rQW+rIUvbbCHQu\n9rpTuCzKNhcA55jZmQRnM83M7GV3vyJCXpGEWbs9m8vGzwaMV68bRtfWGnZNao4ovaQO2cBdinlA\nz7CBfCNBgbmsxDYzgZvMbAowFMh0983APeHjwOi4t6tYSFW3fmcOl42fTUGRM2XMMHqkNEl0JJFK\nVeolKTMr8w5vM2tmZn1KW+/uBcBNwLsEPZ2mufsSMxsbzhEO8BbBQIYrCYZM/1E584tUCZsz93HZ\nhNlk5xXy0rVDOLqtxoaSmsfc/eArzJ4g+Kv/HWA+kEFweego4HtAV+A2d58Xn6iHlpqa6mlpaYmO\nIbXMzuw8LnruU7Zl5fLKdUPp10lDlEv1YWbz3T01yralXpJy95+GjdoXABcRdKvdR3C28Cd3/1dl\nhBWpzrJzC7j6hXls2LWPl65VsZCarcw2DHffSXCpaHx84ohUH3kFRdzwygIWb8zkuSuOZ0j3mHQa\nFKkyonSrFZESioqc26en888VGTx4Xl9O7d020ZFEYk4FQ6Sc3J0H/raUmembuOv0Y7h4cOdD7yRS\nA6hgiJTT/364ihc+Xcu1J3Vn7HdKuw1JpOaJMvhgIzO7z8zGh697hndii9Q6U+Z+ze/fXc55Azty\n75nHEgy4LFI7RDnDmATkAieErzcCv4lZIpEq6p3FW/jZ/33Bd3ul8MiF/ahTR8VCapcoBaOHuz8C\n5AO4ew6g/ylSq8xevYOfTPmc/p1b8L+XD6Jekq7mSu0T5V2fZ2YNCcaNwsx6EJxxiNQKSzZlct2L\naXRp1YiJowfTKDnKEGwiNU+Ud/4vCO727mxmrwAnAlfFMpRIVfH1jhxGT5xHkwZ1mXzNEFo2Tk50\nJJGEiTL44CwzWwAMI7gUdbO7b495MpEEy9iTy5UT51BQVMSUMSfQoYXmtJDaLUovqfOAAnd/093/\nBhSY2bmxjyaSOFn78xk9cS7bsnKZdNVgjjpCgwmKRGnD+IW7Zx544e67CS5TidRI+/MLGTM5jRVb\n9/DsFYMY2KVloiOJVAlR2jAOVlTU6ic1UmGRc8uUhcxevZMnLxnAd3sdkehIIlVGlDOMNDN73Mx6\nhI/HCYY7F6lR3J2f/3kx7yzZwv1n9ebcgSWnoBep3aIUjB8DecDU8JEL3BjLUCKJ8MSsFbw292t+\n9N0eXHNSRSeaFKm5ovSSygbujkMWkYR54ZM1PP2PlVyS2pk7TuuV6DgiVdIhC4aZHQ3cDnQrvr27\nnxy7WCLxMzN9E7/621J+0Lstvz2vj8aHEilFlMbr6cBzwASgMLZxROLr468yuG3aQgZ3bcXTIwdS\nV0N+iJQqSsEocPdnY55EJM7S1+/m+pfm0yOlCeNHp9KgXlKiI4lUaVH+nPqrmf3IzNqbWasDj5gn\nE4mhVRl7uWrSXFo3SWbyNUNo3rBeoiOJVHlRzjBGh1/vKLbMAc0cI9XSlsz9jHp+Lkl1jMnXDOWI\nZg0SHUmkWojSS0r9C6XG2J2Tx6iJc8jcl8+UMcPo3qZxoiOJVBtRZ9z7uZmNC19rxj2plvblFXLt\ni2ms3Z7DuCuPp0/H5omOJFKtRJ1xLw8YHr7WjHtS7eQXFnHjqwtY8PUunrx0AMOPapPoSCLVjmbc\nkxqvqMi5a8Yi/vHlNn49og9n9m2f6Egi1ZJm3JMa76F3vuSNBRv56SlHc8WwromOI1JtacY9qdH+\n9NEqxv1zNaNO6MpPvn9UouOIVGtlFgwLxkj4Ejgfzbgn1czUeV/z4Ntf8sN+7fnF2cdpyA+Rw1Rm\nwXB3N7O33L0v8GacMokctre/2Mw9b3zBd45O4YmLB5BUR8VC5HBFacNYYGaDY55EpJJ8/FUGN09Z\nyMAuLXn2ikEk19X4UCKVIUobxlDgcjNbB2QTXJZyd+8X02QiFbDg611c/9J8jkxpzMTRg2mUrMkh\nRSpLlP9Np1X0m5vZ6cBTQBIwwd0fKrHewvVnAjnAVe6+wMw6A5OBtgS9s8a5+1MVzSG1w/Ite7h6\n0jxSmtYPxodqpPGhRCrTIc/V3X0d0Bk4OXyeE2U/M0sC/gicAfQGRppZ7xKbnQH0DB9jgAOj4hYA\nt7l7b4LG9hsPsq/IN9bvzOHK5+dQv24dXr5W40OJxEKUD/5fAHcB94SL6gEvR/jeQ4CV7r7a3fOA\nKcCIEtuMACZ7YDbQwszau/tmd18A4O57gGWAJliWg9qWtZ/LJ8whr7CIl/9nKJ1bNUp0JJEaKUpr\n4HnAOQTtF7j7JqBphP06AuuLvd7Af3/oH3IbM+sGDATmRPiZUstk5uQzauJctu/NZdJVgzm6bZS3\npohURKQ7vd3d+fed3nEb3tPMmgAzgFvcPauUbcaYWZqZpWVkZMQrmlQBWfvzuXLiHFZnZDPuylQG\ndmmZ6EgiNVqUgjHNzP5EcLnoOuB9YHyE/TYStH0c0ClcFmkbM6tHUCxecfc3Svsh7j7O3VPdPTUl\nJSVCLKkJ9uYWMHriXJZtzuLZKwZxUk8NJigSa6UWDDOrD+DujwKvE3x49wLud/dnInzveUBPM+tu\nZsnApcDMEtvMBEZZYBiQ6e6bw95TzwPL3P3xcv+rpEbLzi3g6klzWbQhk2dGDuL7x7ZNdCSRWqGs\nbrWfAYPM7CV3vxKYVZ5v7O4FZnYT8C5Bt9qJ7r7EzMaG658D3iLoUruSoPfV1eHuJwJXAl+Y2cJw\n2c/c/a3yZJCaJ5jTYh7z1+3i6ZEDOb1Pu0RHEqk1yioYyWZ2GTDczM4vubKsy0TFtnmLoCgUX/Zc\nsecO3HiQ/f6FhlCXEvbnF3Ld5DTmrNnJExcP4Kx+HRIdSaRWKatgjAUuB1oAZ5dY58AhC4ZIZckt\nKGTsy/P5ZNV2fn9hf84dqF7WIvFWVsFo7+43mNnn7j4ubolEStifHxSLD5dn8OD5fbnw+E6JjiRS\nK5XVS+rAjXpj4xFE5GBy8gq45oV5fLQiKBYjh3RJdCSRWqusM4wdZvYe0N3MSvZuwt3PiV0sEdiz\nP59rXggauB+7qD/nD9KZhUgilVUwfggMAl4CHotPHJFAZk4+oybNZcnGoOvsD/tpHm6RRCu1YITj\nP802s+FZr2MuAAAMhElEQVTurluoJW4y9uQyeuJcVm7by7NXHM+pvXWfhUhVUGrBMLMn3f0WYKKZ\necn1uiQlsbBuRzajJs5lW1Yu40en8p2jdfe+SFVR1iWpl8Kvj8YjiMjijZlcNWkuhUXOq9cN1dhQ\nIlVMWZek5odfPzKzlPC5Lk1JTHy6cjtjXppP84b1mHztEHqkNEl0JBEpoczBB83sl2a2HVgOrDCz\nDDO7Pz7RpLaYmb6JqybNo2OLhsy4YbiKhUgVVdbgg7cSjOk02N1buXtLgvm9TzSzn8YroNRc7s7j\ns1bwk9c+Z0CXFkwbewLtmmumPJGqqqw2jCuBU919+4EF7r7azK4A3gOeiHU4qbn25xdy2/R03ly0\nmYuO78RvzutD/bpJiY4lImUoq2DUK14sDnD3jHCuCpEK2Zq1nzGT01i0MZN7zjiGMd8+kmBEexGp\nysoqGHkVXCdSqgVf7+JHLy8ga38+465M1T0WItVIWQWjv5kdbFpUA3ShWcrF3Zn0yVp+99Yy2jVv\nwOtjh9O7Q7NExxKRciirW60uKEulyNqfz12vL+LtxVs45di2PHZRf5o30lVNkeqmrDMMkcO2ZFMm\nN76ygPW79qm9QqSaU8GQmCgscsb9czWPz1pOy0bJvHbdMIZ0b5XoWCJyGFQwpNKt3Z7NbdPTmb9u\nF6cf147fnteH1k3qJzqWiBwmFQypNO7Oy3O+5ndvLqNukvHEJf05d0BHXYISqSFUMKRSfLkli/v+\nvJh5a3fxrZ5teOTCfrRv3jDRsUSkEqlgyGHZsz+fJ9//ihc+XUuzBnV5+IK+XJzaWWcVIjWQCoZU\nSFGR89dFm/jtm8vI2JvLpYO7cOdpvWjZODnR0UQkRlQwpFzcnQ+Wb+P3765g2eYs+nZszrhRqQzo\n3CLR0UQkxlQwJLI5q3fw+3eXk7ZuF11aNeLJSwZwdv8OJNXR5SeR2kAFQ8pUVOT848ttjP94NXPW\n7OSIpvX5zbl9uGRwZ+ollTmdiojUMCoYclD78gqZsWADE/+1htXbs+nQvAE//+GxXDGsKw3qadQY\nkdpIBUO+4e4s3pjF6/PX85f0TezOyadfp+Y8PXIgZ/RppzMKkVpOBUPYkrmfmekbmTF/I8u37iG5\nbh1O7d2W0Sd0Y3C3luoiKyKACkat5O4s37qHWUu28v6yraRvyARgYJcW/Pa8PpzVt4NGkxWR/6KC\nUUts2r2POWt2MGf1Tj5ZtZ31O/cBMKBzC+44rRen92lHj5QmCU4pIlWZCkYNtDe3gGWbs1i8MZMv\nNmaStnYXX+/MAaBZg7oM6d6aH333KL5/zBEc0UxzYYlINCoY1ZS7szsnn9Xbs1mdsZc127NZnZHN\nim17WLM9G/dguzZN6jOoSwtGD+/GsCNbcUy7ZrpvQkQqJKYFw8xOB54CkoAJ7v5QifUWrj8TyAGu\ncvcFUfatqfblFbIrJ4/dOfnszslj9758dmbnsSVzP5sz97M5cx9bMvezKXMf+/OLvtmvbh2jS+tG\nHJXShHMHdKRPx2b06dBcZxAiUmliVjDMLAn4I3AqsAGYZ2Yz3X1psc3OAHqGj6HAs8DQiPvGXEFh\nEfmFTl5BEXmFwSP/wPOCIvK/+erkFRaSV+DfbJNf+O/t9uUVkpNfGHzNKyAnL3ienVcQLiska38+\nu3PyyS0oOmiWpDpG26b1ade8Acd2aMbJxxxBu+YN6Na6MUemNKZzq0bq9ioiMRXLM4whwEp3Xw1g\nZlOAEUDxD/0RwGR3d2C2mbUws/ZAtwj7Vgp356xn/sWu7Dz25hZQ5JBfGHzgF3nl/Zy6dYyGyUk0\nTq5Lo+QkGiYn0Sg5ieaNkmnfPImmDerSsnEyLRrVo2WjZFo0rEeLRsm0bBy8bt04mboqCCKSQLEs\nGB2B9cVebyA4izjUNh0j7guAmY0BxgB06dKl3CHNjKPbNiWpjtGkfl3qmFEvyUiuW4d6SXX+42ty\nkh1k2b9f10sy6pfYr15SHRrWSyK5rj7sRaR6q/aN3u4+DhgHkJqaWqFzgicuGVCpmUREaqJYFoyN\nQOdirzuFy6JsUy/CviIiEkexvE4yD+hpZt3NLBm4FJhZYpuZwCgLDAMy3X1zxH1FRCSOYnaG4e4F\nZnYT8C5B19iJ7r7EzMaG658D3iLoUruSoFvt1WXtG6usIiJyaOZeiV2BEiw1NdXT0tISHUNEpNow\ns/nunhplW3XdERGRSFQwREQkEhUMERGJRAVDREQiqVGN3maWAayr4O5tgO2VGKeyKFf5VdVsylU+\nylV+FcnW1d1TomxYowrG4TCztKg9BeJJucqvqmZTrvJRrvKLdTZdkhIRkUhUMEREJBIVjH8bl+gA\npVCu8quq2ZSrfJSr/GKaTW0YIiISic4wREQkklpfMMzsdDNbbmYrzezuBObobGYfmNlSM1tiZjeH\ny39pZhvNbGH4ODNB+daa2RdhhrRwWSszm2VmX4VfW8Y5U69ix2WhmWWZ2S2JOGZmNtHMtpnZ4mLL\nSj0+ZnZP+J5bbmanJSDb783sSzNbZGb/Z2YtwuXdzGxfsWP3XJxzlfq7i9cxKyXX1GKZ1prZwnB5\nPI9XaZ8R8XufuXutfRCMhLsKOBJIBtKB3gnK0h4YFD5vCqwAegO/BG6vAsdqLdCmxLJHgLvD53cD\nDyf4d7kF6JqIYwZ8GxgELD7U8Ql/r+lAfaB7+B5MinO2HwB1w+cPF8vWrfh2CThmB/3dxfOYHSxX\nifWPAfcn4HiV9hkRt/dZbT/D+GbecXfPAw7MHR537r7Z3ReEz/cAywimqq3KRgAvhs9fBM5NYJbv\nA6vcvaI3bh4Wd/8nsLPE4tKOzwhgirvnuvsaguH9h8Qzm7u/5+4F4cvZBJOUxVUpx6w0cTtmZeUy\nMwMuBl6Lxc8uSxmfEXF7n9X2glHanOIJZWbdgIHAnHDRj8NLBxPjfdmnGAfeN7P5FsyjDtDWgwmv\nIPjrvm1iogHBJFvF/xNXhWNW2vGpau+7a4C3i73uHl5e+cjMvpWAPAf73VWVY/YtYKu7f1VsWdyP\nV4nPiLi9z2p7wahyzKwJMAO4xd2zgGcJLpkNADYTnA4nwknuPgA4A7jRzL5dfKUH58AJ6XJnwayM\n5wDTw0VV5Zh9I5HHpyxmdi9QALwSLtoMdAl/17cCr5pZszhGqnK/uxJG8p9/mMT9eB3kM+IbsX6f\n1faCEWXe8bgxs3oEb4RX3P0NAHff6u6F7l4EjCeGly7K4u4bw6/bgP8Lc2w1s/Zh9vbAtkRkIyhi\nC9x9a5ixShwzSj8+VeJ9Z2ZXAWcBl4cfNISXL3aEz+cTXPc+Ol6ZyvjdJfyYmVld4Hxg6oFl8T5e\nB/uMII7vs9peMKrM3OHhtdHngWXu/nix5e2LbXYesLjkvnHI1tjMmh54TtBgupjgWI0ONxsN/CXe\n2UL/8VdfVThmodKOz0zgUjOrb2bdgZ7A3HgGM7PTgTuBc9w9p9jyFDNLCp8fGWZbHcdcpf3uEn7M\ngFOAL919w4EF8TxepX1GEM/3WTxa96vyg2BO8RUEfxncm8AcJxGcSi4CFoaPM4GXgC/C5TOB9gnI\ndiRBb4t0YMmB4wS0Bv4OfAW8D7RKQLbGwA6gebFlcT9mBAVrM5BPcK342rKOD3Bv+J5bDpyRgGwr\nCa5vH3ivPRdue0H4O14ILADOjnOuUn938TpmB8sVLn8BGFti23ger9I+I+L2PtOd3iIiEkltvyQl\nIiIRqWCIiEgkKhgiIhKJCoaIiESigiEiIpGoYIiUg5kV2n+OkFtpIxyHI58m6p4RkUOqm+gAItXM\nPg+GgRCpdXSGIVIJwjkSHrFgzpC5ZnZUuLybmf0jHEzv72bWJVze1oJ5KNLDx/DwWyWZ2fhwvoP3\nzKxhwv5RIiWoYIiUT8MSl6QuKbYu0937An8AngyXPQO86O79CAb4ezpc/jTwkbv3J5h7YUm4vCfw\nR3c/DthNcCexSJWgO71FysHM9rp7k4MsXwuc7O6rwwHitrh7azPbTjC8RX64fLO7tzGzDKCTu+cW\n+x7dgFnu3jN8fRdQz91/E/t/mcih6QxDpPJ4Kc/LI7fY80LUzihViAqGSOW5pNjXz8LnnxKMggxw\nOfBx+PzvwA0AZpZkZs3jFVKkovTXi0j5NDSzhcVev+PuB7rWtjSzRQRnCSPDZT8GJpnZHUAGcHW4\n/GZgnJldS3AmcQPBCKkiVZbaMEQqQdiGkeru2xOdRSRWdElKREQi0RmGiIhEojMMERGJRAVDREQi\nUcEQEZFIVDBERCQSFQwREYlEBUNERCL5f5cWFZHq8gKTAAAAAElFTkSuQmCC\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "# plot path of loss function over first 200 epochs\n", "# xval loss, proxy for total error \n", "total_loss = np.array(fit.history['val_loss'][:200])\n", "# training loss, proxy for bias error\n", "bias = np.array(fit.history['loss'][:200])\n", "# difference, proxy for variance error\n", "variance = total_loss - bias\n", "\n", "plt.plot(total_loss)\n", "plt.ylabel('Total loss')\n", "plt.xlabel('Epoch')\n", "plt.show()\n", "\n", "plt.plot(bias)\n", "plt.ylabel('Training loss (proxy for bias)')\n", "plt.xlabel('Epoch')\n", "plt.show()\n", "\n", "plt.plot(variance)\n", "plt.ylabel('Difference (proxy for variance)')\n", "plt.xlabel('Epoch')\n", "plt.show()\n", "\n", "# note that training loss (bias) declines continuously\n", "# total loss declines, reaches a minimum, then climbs as overfitting and variance increases\n", "# http://scott.fortmann-roe.com/docs/BiasVariance.html\n", "\n" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYoAAAEWCAYAAAB42tAoAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd4VNXWwOHfSiEJEFqCSA9NCCA1AhYkCkgRxI4NxUpQ\nEbGAHzasXCyg0qIXvei9Cl4VFQFFsVBU6qUKgqGIwUgvAUJJsr4/ziEOIUwmIZNJWe/zzMOcvs5h\nMmvO3vvsLaqKMcYYczpBgQ7AGGNM0WaJwhhjjFeWKIwxxnhlicIYY4xXliiMMcZ4ZYnCGGOMV5Yo\nTJ6JyM0i8nWg4wg0EakjIgdFJLgQjxkjIioiIYV1TH8SkV9EJD4f29lnsBCJPUdRvInIFqAakAEc\nBL4C7lfVg4GMqyRyr/VdqjongDHEAJuBUFVND1QcbiwKNFLVJD8fJ4Yics6lld1RlAy9VbU80Apo\nDfxfgOPJl0D+Si4pv9Dzwq638ZUlihJEVf8CZuMkDABEJExEXhGRrSKyXUQSRSTCY3kfEVkhIgdE\nZKOIdHfnVxSRt0UkRUS2icjzJ4pYRKS/iCxw308UkVc84xCRz0XkIfd9DRH5RER2ishmEXnAY70R\nIvKxiPxHRA4A/bOfkxvHe+72v4vIEyIS5BHHjyIyTkT2i8ivItI527bezuFHERkjIruBESLSQES+\nE5HdIrJLRN4XkUru+v8G6gBfuMVNQ7MXA4nIDyLynLvfVBH5WkSiPeK51T2H3SLypIhsEZEuOf1f\nikiEiLzqrr9fRBZ4/r8BN7v/p7tE5HGP7dqJyM8iss8973EiUsZjuYrIfSLyG/CbO+91EfnD/Qws\nE5GOHusHi8hw97OR6i6vLSLz3FVWutejr7t+L/fztE9EfhKRFh772iIiw0RkFXBIREI8r4Eb+1I3\nju0iMtrd9MSx9rnHOt/zM+hu20xEvhGRPe62w3O6riafVNVexfgFbAG6uO9rAauB1z2WjwGmA1WA\nSOALYKS7rB2wH+iK86OhJtDEXfYp8CZQDjgLWAwMcJf1Bxa47y8G/uDvYszKQBpQw93nMuApoAxQ\nH9gEdHPXHQEcB650143I4fzeAz53Y48BNgB3esSRDgwBQoG+7vlU8fEc0oFBQAgQATR0r0UYUBXn\nC+q1nK61Ox0DKBDiTv8AbATOcff3A/APd1lTnKLBi9xr8Yp77l1O8/863t2+JhAMXODGdeKY/3SP\n0RI4CsS627UFOrjnFAOsAx702K8C3+B8HiLcebcAUe42DwN/AeHuskdxPlONAXGPF+Wxr4Ye+24N\n7ADauzHf5l6zMI/rtwKo7XHsrGsK/Az0c9+XBzrkdJ1z+AxGAilu7OHudPtA/22WpFfAA7DXGf4H\nOn9oB4FU94/pW6CSu0yAQ0ADj/XPBza7798ExuSwz2rul0+Ex7wbge/d955/pAJsBS52p+8GvnPf\ntwe2Ztv3/wH/ct+PAOZ5Obdg4BjQ1GPeAOAHjzj+xE1S7rzFQD8fz2Hr6Y7trnMlsDzbtc4tUTzh\nsfxe4Cv3/VPAFI9lZd1zOyVR4CTNNKBlDstOHLNWtnO+4TTn8CDwqce0Apfmct57TxwbWA/0Oc16\n2RPFROC5bOusBzp5XL87cvj8nkgU84BngOjTnPPpEsWNnv9P9ir4l5UTlgxXquocEekEfABEA/tw\nfhWXBZaJyIl1BecLGJxfdrNy2F9dnF/oKR7bBeHcOZxEVVVEpuL8sc4DbgL+47GfGiKyz2OTYGC+\nx/Qp+/QQ7cbxu8e833F+ZZ+wTd1vC4/lNXw8h5OOLSLVgNeBjji/SoNwvjTz4i+P94dxfhnjxpR1\nPFU97BZ55SQa55fxxrweR0TOAUYDcTj/9yE4d3Wesp/3I8CdbowKVHBjAOcz4i0OT3WB20RkkMe8\nMu5+czx2NncCzwK/ishm4BlVneHDcfMSo8kHq6MoQVR1LjAZp1gDYBfOL9NmqlrJfVVUp+IbnD/a\nBjns6g+cX+PRHttVUNVmpzn0FOBaEamLcxfxicd+Nnvso5KqRqpqT8+wvZzSLpzimboe8+oA2zym\na4pHJnCX/+njOWQ/9ovuvHNVtQJOkYx4WT8vUnCKBgGnDgKnuCcnu4Aj5Px/k5uJwK84rZEqAMM5\n+RzA4zzc+oihwPVAZVWthFN8d2Kb031GcvIH8EK2/++yqjolp2Nnp6q/qeqNOMWEo4CPRaSct208\njlvfxxhNPliiKHleA7qKSEtVzcQpyx4jImcBiEhNEenmrvs2cLuIdBaRIHdZE1VNAb4GXhWRCu6y\nBu4dyylUdTnOl9skYLaqnriDWAykuhWYEW7FaHMROc+XE1HVDOC/wAsiEukmoof4+44FnC+VB0Qk\nVESuA2KBWXk9B1ckTjHefhGpiVM+72k7+f9C+hjoLSIXuJXLIzj1CxwA9//tHWC0OI0Bgt0K3DAf\njhMJHAAOikgTYKAP66cDO4EQEXkK547ihEnAcyLSSBwtROREgst+Pf4JJIhIe3fdciJyuYhE+hA3\nInKLiFR1z//EZyjTjS2T01/7GUB1EXlQnMYbkSLS3pdjGt9YoihhVHUnTgXwU+6sYUASsFCclkVz\ncComUdXFwO04Fd77gbn8/ev9Vpxig7U4xS8fA9W9HPoDoIv774lYMoBeOK2wNvN3MqmYh1MahFPP\nsglY4O7/HY/li4BG7r5fAK5V1RNFOnk9h2eANjjXYiYwLdvykcATboueR/JwDqjqL+65TMW5uziI\nU/F79DSbPIJTibwE2IPzC9uXv9dHcIr/UnG+uD/MZf3ZOM/ebMAptjvCycVDo3GS9dc4CehtnEp0\ncJLdu+71uF5Vl+LUUY3Dud5J5NCSzYvuwC8ichCnCPAGVU1T1cM4/7c/usfq4LmRqqbiNELojVMk\n9xtwSR6Oa3JhD9yZYktE+uM8AHdRoGPJKxEpj/OruZGqbg50PMZ4Y3cUxhQSEektImXdcvdXcO4Y\ntgQ2KmNyZ4nCmMLTB6ei/U+c4rIb1G7pTTFgRU/GGGO8sjsKY4wxXhW7B+6io6M1JiYm0GEYY0yx\nsmzZsl2qWjU/2xa7RBETE8PSpUsDHYYxxhQrIvJ77mvlzIqejDHGeGWJwhhjjFeWKIwxxnhlicIY\nY4xXliiMMcZ4ZYnCGGOMV35LFCLyjojsEJE1p1kuIvKGiCSJyCoRaeOvWIwxxuSfP+8oJuN0G3w6\nPXD6u2kE3IMz4IoxxpgCduxYxhlt77dEoarzcPrRP50+wHvqWAhUEhFvYwUYY4zJo9dfX0jbuoNy\nX9GLQNZR1OTkAVKSOXks5Cwico+ILBWRpTt37iyU4IwxpiRo2fJs1m7PV88dWYpFZbaqvqWqcaoa\nV7XqmZ2wMcaUZH/8sZ+JE5dkTcfHx5D02BtntM9A9vW0DajtMV3LnWeMMSaP0tMzeeONRTz11Pcc\nOnSc5s3PomNHZ2TjelH7ctnau0AmiunA/SIyFWgP7FfVlADGY4wxxdKiRckMGDCDlSu3A3DNNbHU\nr1+5wPbvt0QhIlOAeCBaRJKBp4FQAFVNBGYBPXEGYD8M3O6vWIwxpiTauzeN4cO/5c03l6EKMTGV\nGDeuB5dffk6BHsdviUJVb8xluQL3+ev4xhhTZE27HDbPOuPdPPN5dxLndyAkKINHLvmJJ7vMo+yv\nD8KvBRCjh2I3HoUxxhR7Z5Ak0jOCCAnOBOCJLvPYvKcSL3T/jubVdxRUdKewRGGMMYHysPq86pEj\n6YwatYDPPlvPokV3UaZMMNHA50/5uINHJF8hgiUKY4wp8r79dhMDB87kt9+cZ5hnz06id+/GhXZ8\nSxTGGFNEbd9+kIcf/pr3318NQGxsNBMnXk6nTjGFGoclCmOMKYL+859VDBr0Jfv2HSE8PISnnrqY\nhx++gDJlggs9FksUxhhTQK2QClJmprJv3xG6d2/I+PE9C/S5iLyyRGGMMYFIEvV6njR58OAxfv75\nD7p2bQBAv34tqFEjks6d6yGS/4rogmCJwhhjTshDK6SC9NlnvzJo0Jfs3HmINWvupWHDKogIXbrU\nD0g82VmiMMaYAPn993088MBXTJ++HoC4uBocPZoe4KhOZYnCGGMK2fHjGbz22kJGjJjL4cPHiYws\nw4svdmbgwDiCg4tep96WKIwxppA98MCXJCYuA+D665sxZkw3atSIDHBUp2eJwhhjCtmDD3Zg7tzf\nGT26G927Nwx0OLkqevc4xhhTgqgq//73Sm688ROcvlChceNo1qy5t1gkCbA7CmOM8Zv163cxcOBM\nvv9+C+A0ee3ZsxEAQUGBbfKaF5YojDGmgKWlHWfkyAWMGvUjx45lEBUVwauvXkaPHsXjDiI7SxTG\nGFOA5szZRELCDDZu3AvAnXe2ZtSoLkRFlQ1wZPlnicIYU7IVcvccP/30Bxs37qVZs6okJvbioovq\nFNqx/cUShTGmZPM1SWTrUsNXGRmZJCXtoXHjaACGDbuQ6Oiy3HVXm4B04OcPliiMMaWDH7rnWL48\nhYSEmWzatJf16++nSpUIwsJCuPfe8wr8WIFkzWONMSaPUlOPMmTIV8TF/ZPFi7cRFhbMxo17Ah2W\n39gdhTHG+EhVmTZtHYMHf8W2bakEBQlDhnTgmWfiiYwMC3R4fmOJwhhjfPTgg1/xxhuLATjvvBq8\n+WYvWreuHuCo/M8ShTGm+CrkFk1XXRXLu++u5MUXOzNgQNsi2YGfP1iiMMYUX35u0bRgwVa+/34z\nTz7ZCYD4+Bi2bh1ChQolt5gpJ5YojDHFXwG3aNq9+zDDhs3h7beXA9C5c30uuKA2QKlLEmCJwhhj\nsqgq7723kkce+YZduw4TGhrEY49dROvWZwc6tICyRGGMMcC6dTsZOHAmc+f+DsAll8QwYcLlNGkS\nHdjAigBLFMYYA4we/TNz5/5O1aplGT26GzfffC4ixaeHV3+yRGGMKVyF3FLJm/37j1CxYjgAI0d2\noVy5Mjz1VCeqVIkIcGRFS+lo22WMKToKOknko0XTn3+m0rfvx3To8DbHjmUAEB1dltde625JIgd2\nR2GMCQw/9L2Um4yMTCZMWMLjj39HauoxypYN5X//S6FDh1qFHktxYonCGFMqLFv2JwMGzGDZshQA\nrriiMWPH9qBOnYoBjqzo82vRk4h0F5H1IpIkIo/lsLyiiHwhIitF5BcRud2f8RhjSqcRI36gXbtJ\nLFuWQu3aFfjss758/vkNliR85Lc7ChEJBsYDXYFkYImITFfVtR6r3QesVdXeIlIVWC8i76vqMX/F\nZYwpferXr4wIPPzw+YwYEU/58mUCHVKx4s+ip3ZAkqpuAhCRqUAfwDNRKBApThu08sAeIN2PMRlj\nSoFNm/ayZMk2+vZtDkC/fi1o375m1uBCJm/8mShqAn94TCcD7bOtMw6YDvwJRAJ9VTUz+45E5B7g\nHoA6dYr/sILGGP84diyDV175ieeem4eq0rZtDRo2rIKIWJI4A4FuHtsNWAHUAFoB40SkQvaVVPUt\nVY1T1biqVasWdozGmGJg3rzfadUqkccf/44jR9K59tqmpbJfJn/w5x3FNqC2x3Qtd56n24F/qKoC\nSSKyGWgCLPZjXMaYEmTXrsM8+ug3TJ68AoBGjaowceLldO5cP8CRlRz+TBRLgEYiUg8nQdwA3JRt\nna1AZ2C+iFQDGgOb/BiTMaaESUiYwSefrCMsLJjhwzsydOiFhIdby/+C5LerqarpInI/MBsIBt5R\n1V9EJMFdngg8B0wWkdWAAMNUdZe/YjLGlAyZmUpQkNMP0wsvXEpaWjqvvdaNRo2iAhxZySROqU/x\nERcXp0uXLg10GMaY/HrV7WgvH09mHz58nOeem8uKFduZNesm67QvD0RkmarG5Wdbuz8zxhQLM2du\n4P77v2TLln2IwOLF22jf3rreKAyWKIwxRVpy8gEGD/6KadPWAdCyZTUSE3tZkihEliiMMUXWhAlL\nGDZsDgcPHqNcuVCee+4SBg1qT0hIoFv2ly6WKIwxRdauXYc5ePAYV13VhNdf707t2tY3UyBYojDG\nFBn79h3h1193ZXX7PWzYhbRrV5Pu3RsGOLLSze7fjDEBp6pMnbqG2NjxXHHFFPbsSQMgLCzEkkQR\nYInCGBNQSUl76N79fW688RP++usgjRpFsX//kUCHZTz4VPQkImWAOqqa5Od4jDGlxNGj6bz00o+8\n8MJ8jh7NoHLlcF56qSt33NE662E6UzTkmihE5HJgNFAGqCcirYCnVfUqfwdnjCm5+vb9mM8/Xw/A\nrbe25OWXu3LWWeUCHJXJiS93FM/idA/+PYCqrhARKzQ0xpyRBx/swPr1u5kwoSeXXFIv0OEYL3xJ\nFMdVdV+2R+WLV78fxpiAysxU3nlnOevW7eTVGs68+PgY1qwZSHCwVZUWdb4kinUicj0Q5PYE+wCw\n0L9hGWOKlGmXw+ZZ+dp0dcpZJHzSi5+2OIOO3fpQNVrW2A5gSaKY8CVR3A88BWQC03B6gx3uz6CM\nMUVMPpLEoaOhPPNNPKPnnU9GZhBnR6byWp+vaFF9O9Tr6Ycgjb/4kii6qeowYNiJGSJyNU7SMMaU\nJj72+PrFF+u5//4v2bp1PyJw333n8cILl1Kx4it+DtD4gy/3fU/kMO/xgg7EGFNyfPbZr2zdup/W\nrc9m0aK7GDeuJxUrhgc6LJNPp72jEJFuQHegpoiM9lhUAacYyhhjAEhPz2TbtgPUrVsJgFGjutK6\ndXUSEuKsA78SwFvR0w5gDXAE+MVjfirwmD+DMsYUHwsXJpOQMIOjRzNYuTKBMmWCiY4uy/33twt0\naKaAnDZRqOpyYLmIvK+q9jy9MSVVPls07d2bxvDh3/Lmm8tQhZiYSmzZso9zzrHhSEsaXyqza4rI\nC0BTIKuQUVXP8VtUxpjC42uScFsqqSpTpqxhyJDZ7NhxiJCQIB599AKeeOJiypYN9WOgJlB8SRST\ngeeBV4AewO3YA3fGlDw+tmi6+eZpTJmyBoCOHeswceLlNGt2lj8jMwHmSy1TWVWdDaCqG1X1CZyE\nYYwphbp3b0hUVATvvHMFP/zQ35JEKeDLHcVREQkCNopIArANiPRvWMaYomLOnE1s3LiHAQPiAOjX\nrwW9ep1DlSoRAY7MFBZfEsUQoBxO1x0vABWBO/wZlDEm8LZvP8hDD33NBx+sJiwsmC5d6tOgQRVE\nxJJEKZNrolDVRe7bVKAfgIjU9GdQxpgCko8WTZmZyltvLeOxx+awf/9RwsNDeOqpi2286lLMa6IQ\nkfOAmsACVd0lIs1wuvK4FKhVCPEZY85EHls0rVz5FwMGzGDRom0A9OjRkHHjelK/fmV/RWiKAW9P\nZo8ErgFWAk+IyAzgXmAUkFA44RljCoSPLZqGDp3DokXbqFEjktdf784118SSbYgBUwp5u6PoA7RU\n1TQRqQL8AZyrqpsKJzRjjL+pKocPH6dcuTIAvPFGdxITl/LMM5dQoUJYgKMzRYW35rFHVDUNQFX3\nABssSRhTcvz++z769JnKFVdMRdW542jcOJoxY7pbkjAn8XZHUV9ETnQlLjjjZWd1La6qV/s1MmOM\nXxw/nsGYMQt55pm5HD58nMjIMvz22x7resOclrdEcU226XH+DMQYk0f5aNH0449bSUiYyZo1OwDo\n27cZo0d3o0YNezTKnJ63TgG/LcxAjDF5lMcWTYMGzWLcuCUA1K9fmfHje9K9e0N/RWdKEF8euDPG\nFGU+tmiqWrUcoaFBDBt2IcOHdyQiwjrwM77x64giItJdRNaLSJKI5DiGhYjEi8gKEflFROb6Mx5j\nSpNff93F119vzJoeNuxCVq0ayHPPXWpJwuSJz4lCRPLUDEJEgoHxOB0INgVuFJGm2dapBEwArlDV\nZsB1eTmGMeZUaWnHefLJ72jRYiK33DKNPXvSAAgLC6FJk+gAR2eKo1wThYi0E5HVwG/udEsRGevD\nvtsBSaq6SVWPAVNxns3wdBMwTVW3AqjqjjxFb4w5yddfb+Tccyfy/PPzOX48kyuuaIw9L2fOlC91\nFG8AvYDPAFR1pYhc4sN2NXEe0jshGWifbZ1zgFAR+QGnR9rXVfU9H/ZtjPGQkpLKkCGz+fBDZ9Ti\nZs2qkpjYi4suqhPgyExJ4EuiCFLV37M9xp9RgMdvC3QGIoCfRWShqm7wXElE7gHuAahTxz74xmR3\n9dX/ZeHCZCIiQhgxIp4hQzoQGhoc6LBMCeFLHcUfItIOUBEJFpEHgQ25bYQzbkVtj+la7jxPycBs\nVT2kqruAeUDL7DtS1bdUNU5V46pWrerDoY0p+dSjsdM//tGZXr3OYe3a+xg69EJLEqZA+ZIoBgIP\nAXWA7UAHd15ulgCNRKSeiJQBbgCmZ1vnc+AiEQkRkbI4RVPrfA3emNIoNfUoQ4Z8xYCPe2fN69Qp\nhi++uJGYmEoBjMyUVL4UPaWr6g153bGqpovI/cBsIBh4R1V/cUfJQ1UTVXWdiHwFrAIygUmquiav\nxzKmNFBVpk1bx+DBX7FtWyohQa0Y3nk+MYEOzJR4viSKJSKyHvgQp4VSqq87V9VZwKxs8xKzTb8M\nvOzrPo3xWT66uCiqNu+uxP2f9mTWr+cA0K52MonXziCmyr4AR2ZKA19GuGsgIhfgFB09IyIrgKmq\nOtXv0RlzJkpAklCFl76/kGe+iSfteCgVw48wsucc7umwjOAgzeqewxh/8qkLD1X9CfhJREYArwHv\n4zwXYUzR52MXF0WRABvWfU7a8RXceGNzRo/uxtlnjwx0WKaUyTVRiEh5nAflbgBicSqgL/BzXMaU\nWrt2Heavvw7SvPlZAIwa1ZUbbmhO164NAhyZKa18uaNYA3wBvKSq8/0cjzGllqry7rsreeSRr6la\ntRwrVyZQpkww0dFlLUmYgPIlUdRX1Uy/R2JMKbZu3U4SEmYyb97vALRseTZ796ZRrVr5AEdmjJdE\nISKvqurDwCcickohr41wZ8yZO3z4OC+8MI+XX/6J48czqVq1LKNHd+Pmm89FrJMmU0R4u6P40P3X\nRrYzxg9UlUsvfZdFi5wOCwYMaMvIkZ2pXDkiwJEZczJvI9wtdt/GqupJycJ9kM5GwDPmDIgI9957\nHocPH+fNN3tx/vm1c9/ImADwpQuPO3KYd2dBB2JMSZeRkcnYsYsYPfrnrHn9+rVg2bJ7LEmYIs1b\nHUVfnCax9URkmseiSMAeBzUmD5Yu/ZOEhBksW5ZCWFgwN9zQnBo1IhER68DPFHne6igWA7txen0d\n7zE/FVjuz6CMKSn27z/CE098x/jxS1CF2rUrMHZsD2rUiAx0aMb4zFsdxWZgMzCn8MIxxgfFoA8n\nVeWjj9by4INfkZJykOBgYciQDjz9dDzly5cJdHjG5Im3oqe5qtpJRPYCns1jBVBVreL36IzJSV6S\nRAD7QnrzzWWkpBykQ4daJCZeTsuWZwcsFmPOhLeipxPDndpo7KZoKmJ9OB09ms6+fUeoVq08IsKE\nCT354Yct3H13W4KC7JkIU3ydttWTx9PYtYFgVc0AzgcGAOUKITZjio25c7fQqtWb3HTTNNQdeq5x\n42gGDIizJGGKPV+ax36GMwxqA+BfQCPgA79GZUwxsXPnIfr3/4z4+Hf59ddd/PHHfrZvPxTosIwp\nUL709ZSpqsdF5GpgrKq+ISLW6smUapmZyr/+tZyhQ+ewZ08aYWHBDB/ekaFDLyQ83Kfe+40pNnwa\nClVErgP6AVe680L9F5Ip1YpJi6Zu3f7DnDmbAOjSpT4TJvSkUaOoAEdmjH/4+mT2JTjdjG8SkXrA\nFP+GZUotX5NEAFsziQgdO9ahWrVyfPDB1Xz99S2WJEyJJicq3ryuJBICNHQnk1Q13a9ReREXF6dL\nly4N1OGNv73qVvwWsRZNM2du4PjxTK68sgngtHBKS0unUqXwAEdmjG9EZJmqxuVnW19GuOsI/BvY\nhvMMxdki0k9Vf8zPAY0pTpKTDzB48FdMm7aO6OiyXHxxXapUiSAsLISwMKuLMKWDL5/0MUBPVV0L\nICKxOIkjX5nJmOIgPd3pwO+pp37g4MFjlCsXyvDhF1GhQligQzOm0PmSKMqcSBIAqrpORKwPAlNi\nLV68jQEDZrBixV8AXHVVE15/vTu1a1cMcGTGBIYvieJ/IpII/MedvhnrFNCUUJmZyu23f87atTup\nU6ci48b1oHfvxoEOy5iA8iVRJAAPAEPd6fnAWL9FZEwhU1WOHs0gPDyEoCBh/PiefPnlbzz1VCfK\nlbObZ2O8JgoRORdoAHyqqi8VTkjGFJ6kpD3ce+9MateuwNtv9wEgPj6G+PiYwAZmTBFy2ucoRGQ4\nTvcdNwPfiEhOI90ZUywdPZrOs8/OpXnzCXzzzSY++2w9u3cfDnRYxhRJ3u4obgZaqOohEakKzALe\nKZywjPGf777bzMCBM9mwYTcAt93Wkpdf7kpUVNkAR2ZM0eQtURxV1UMAqrpTRHx5ituYIisjI5Pb\nb/+cf/97FQCNG0eRmNjLipmMyYW3RFHfY6xsARp4jp2tqlf7NTJT8gS4H6fg4CBCQoIIDw/hiSc6\n8sgjF9hDc8b4wNtfyTXZpsf5MxBTCgSgH6fVq7dz5Eg6551XE4CXX+7K4493pEEDG6DRGF95GzP7\n28IMxJQihdCP06FDxxgx4gfGjFlIo0ZRrFyZQJkywURFlbW6CGPyyO67TYkzffp6Bg36kq1b9yMC\nXbrU4/jxDMqUCQ50aMYUS36toBaR7iKyXkSSROQxL+udJyLpInKtP+MxJdvWrfu58sqp9Okzla1b\n99OmTXUWL76bsWN72oNzxpwBn+8oRCRMVY/mYf1gYDzQFUgGlojIdM9+ozzWGwV87eu+jckuIyOT\n+PjJbN68j8jIMjz//KXce+95hIRYYz1jzpQv3Yy3A94GKgJ1RKQlcJeqDspl03Y4Y1dscvczFegD\nrM223iDgE+C8PMZuCkMRH3FOVRERgoODGDEini++2MBrr3WjZs0KgQ7NmBLDl59bbwC9gN0AqroS\nZ8S73NQE/vCYTnbnZRGRmsBVwERvOxKRe0RkqYgs3blzpw+HNgWmoJNEAbVo2rs3jYSEGbz44vys\nef36teDTD5wHAAAdBElEQVSjj66zJGFMAfOl6ClIVX8XEc95GQV0/NeAYaqamW3/J1HVt4C3wBnh\nroCObfKiiIw4p6p88MFqHnroa3bsOERkZBnuv78dFSuG4+0zZIzJP18SxR9u8ZO69QmDgA0+bLcN\nqO0xXcud5ykOmOr+gUcDPUUkXVU/82H/ppTZsGE39947k2+/3QxAx451mDjxcipWtOFIjfEnXxLF\nQJzipzrAdmCOOy83S4BGIlIPJ0HcANzkuYKq1jvxXkQmAzMsSZjs0tMzef75eYwcuYBjxzKIiorg\n5Ze70r9/K7uLMKYQ5JooVHUHzpd8nqhquojcD8wGgoF3VPUXEUlwlyfmdZ+mdAoOFubP38qxYxnc\ncUcrRo3qSnS0PTRnTGHxpdXTP4FTCqhV9Z7ctlXVWTi9znrOyzFBqGr/3PZnSo/t2w9y5Eg6detW\nQkRITLyclJSDXHxx3UCHZkyp40urpznAt+7rR+AswOfnKYzJi8xMJTFxKY0bj+POO6ej6vxGadQo\nypKEMQHiS9HTh57TIvJvYIHfIjKl1ooVf5GQMINFi5w2D2XKBHPw4DEiI8MCHJkxpVt++nqqB1Qr\n6EBM6ZWaepSnn/6B119fRGamUqNGJK+/3p1rrom1ympjigBf6ij28ncdRRCwBzhtv03G5MWxYxm0\nafMWSUl7CAoSBg9uz7PPXkKFCnYXYUxR4TVRiPNzriV/P/+QqScKjY0pAGXKBNOvXwu++GIDiYmX\n07ZtjUCHZIzJxmtltpsUZqlqhvuyJGHOyPHjGbz00o9Mnboma95jj13EwoV3WpIwpojypY5ihYi0\nVtXlfo/GlGg//riVhISZrFmzg6pVy9Kr1zmUL1/Gxokwpog7baIQkRBVTQda43QRvhE4hDN+tqpq\nm0KK0RRze/akMWzYN0ya5PzWqF+/MhMm9KR8eRsjwpjiwNsdxWKgDXBFIcViShhV5d//XsXDD3/N\nrl2HCQ0NYtiwCxk+vCMREaGBDs8Y4yNviUIAVHVjIcViSpjjxzMZOXIBu3YdplOnukyceDmxsVUD\nHZYxJo+8JYqqIvLQ6Raq6mg/xGOKubS04xw7lkHFiuGUKRPMW2/1YtOmvdx6a0t7JsKYYspboggG\nyuPeWZgSqIBHr5s9O4l7751FfHxd3n67DwAdO9alY0fresOY4sxbokhR1WcLLRJT+HxNErmMSpeS\nksqQIbP58MNfAChXLpTDh49TtqzVQxhTEuRaR2FKgXyOXpeRkcnEiUt5/PHvOHDgKBERIYwYEc+Q\nIR0IDbUmr8aUFN4SRedCi8IUO0eOpHPxxf9iyZI/AejV6xzGju1BTEylAEdmjClop00UqrqnMAMx\nxUt4eAjNm59FSspB3nijO1de2cQqq40pofLTe6wphVSVadPWUa1aeS66qA4Ao0d3IzhYrBtwY0o4\nSxTFTQG3VPLF5s17uf/+L5k16zeaNIlmxYoBhIWFUKlSeKHGYYwJDEsUxU1BJwkvLZqOHcvg1Vd/\n4rnn5pGWlk7FimEMHtyekBBfBkY0xpQUliiKq3y2VPLV/Pm/k5Awk7VrdwJw003n8uqrl3H22eX9\nelxjTNFjicKcIi3tONde+xE7dhyiYcMqTJjQk65dGwQ6LGNMgFiiMIBTWZ2RoYSEBBEREcro0Zex\nYcNu/u//OhIebh8TY0oz+wYwrF27k4SEGXTtWp8nn+wEwM03twhwVMaYosJqJUuxw4ePM3z4t7Rs\nmcj8+VuZNGk5R4+mBzosY0wRY3cUpdSXX/7GfffNYvPmfQAMGNCWkSM7ExZmHwljzMnsW6GUOXTo\nGP37f87HH68FoEWLaiQmXs7559cOcGTGmKLKEkUpU7ZsKHv2pFGuXCjPPBPP4MEd7LkIY4xXlihK\ngaVL/6RSpXAaNqyCiDBpUm+Cg4OoU6dioEMzxhQD9lOyBNu//wiDBs2iXbt/kpAwA1XnIb169Spb\nkjDG+MzuKEogVeW///2FBx+czV9/HSQ4WGjTpjrp6Zk2ToQxJs8sUZQwGzfu4b77ZjF79kYAzj+/\nFomJvWjRolqAIzPGFFeWKEqQ1NSjxMX9k337jlCpUjijRnXhrrvaEBRk40QYY/LPr4lCRLoDrwPB\nwCRV/Ue25TcDw3CGXU0FBqrqSn/GVJJFRoYxZEgHkpL28Morl3HWWeUCHZIxpgTwW6IQkWBgPNAV\nSAaWiMh0VV3rsdpmoJOq7hWRHsBbQHt/xVTS7Nx5iEcf/YbOnevRr19LAJ588mIbac4YU6D8eUfR\nDkhS1U0AIjIV6ANkJQpV/clj/YVALT/GE1gFOOBQZqbwzqT/MXToN+zde4TvvtvMDTc0JzQ02JKE\nMabA+TNR1AT+8JhOxvvdwp3AlzktEJF7gHsA6tSpU1DxFa4CShJrUs4iYWY/fvz1CwC6dKnPhAk9\nrTWTMcZvikRltohcgpMoLsppuaq+hVMsRVxcnH9H7PG3fA44lJZ2nBEjfmD06wtJT8+kWrVyjBnT\njRtuaG53EcYYv/JnotgGeHYgVMuddxIRaQFMAnqo6m4/xlOsBQUJ06dvICMjk3vvjeOFFzrbmNXG\nmELhz0SxBGgkIvVwEsQNwE2eK4hIHWAa0E9VN/gxlmIpOfkAZcuGUqVKBGFhIUye3AeA9u1LblWO\nMabo8VsXHqqaDtwPzAbWAf9V1V9EJEFEEtzVngKigAkiskJElvornuIkPT2TMWN+JjZ2PI8++nXW\n/Pbta1mSMMYUOr/WUajqLGBWtnmJHu/vAu7yZwzFzaJFyQwYMIOVK7cDsH//UdLTM62HV2NMwBSJ\nymwD+/YdYfjwb0lMXIoq1K1bkXHjetKr1zmBDs0YU8pZoigC9u5No2nTCfz110FCQoJ4+OHzefLJ\niylXrkygQzPGGEsURUHlyhH06NGQDRt2M3Hi5Zx7rnXgZ4wpOixRBMDRo+mMGvUjnTrVpVOnGADG\njetJeHiIdeBnjClyLFEUsu++28zAgTPZsGE3sbHRrF49kODgIMqWDQ10aMYYk6PSmygKsO8lX+xI\nLcfDX1zGfx55D4AmTaKZMOFygoOtNZMxpmgrvYmikJJEZqYwaXEbhs3swr60CMLDQ3jiiY48+uiF\nlClj/TMZY4q+0psoTshn30u+2r83jcdfGse+tMN069aA8eN70qBBFb8e0xhjCpIlCj84dOgYISFB\nhIWFULlyBImJl5ORoVx3XVPrwM8YU+xYAXkBmz59PU2bTuCll37MmnfNNU25/vpmliSMMcWSJYoC\nsnXrfq68cip9+kxl69b9zJ69kczM4t0jujHGgCWKM3b8eAavvPITsbHj+fzz9URGluH117szd25/\neybCGFMiWB3FGdi16zCdO7/HqlVOB37XXdeUMWO6UbNmhQBHZowxBccSxRmIioogOros9epVYty4\nnvTs2SjQIZkAOX78OMnJyRw5ciTQoZhSLjw8nFq1ahEaWnAP8VqiyANV5f33V9OuXU3OOScKEeE/\n/7mKihXD7cnqUi45OZnIyEhiYmKs0YIJGFVl9+7dJCcnU69evQLbr9VR+Gj9+l106fJv+vX7lHvv\nnYmqU1FdvXqkJQnDkSNHiIqKsiRhAkpEiIqKKvA7W7ujyMWRI+mMHDmff/zjR44dyyAqKoJbbmkR\n6LBMEWRJwhQF/vgcWqLwYs6cTQwcOJOkpD0A3HFHK156qStRUWUDHJkxxhQeK3o6je3bD9Kr1wck\nJe2hadOqzJvXn7ff7mNJwhRZwcHBtGrViubNm9O7d2/27duXteyXX37h0ksvpXHjxjRq1Ijnnnsu\nq/gU4MsvvyQuLo6mTZvSunVrHn744UCcglfLly/nzjvvPGnelVdeSYcOHU6a179/fz7++OOT5pUv\nXz7r/YYNG+jZsyeNGjWiTZs2XH/99Wzfvv2MYtuzZw9du3alUaNGdO3alb179+a4XkxMDOeeey6t\nWrUiLi4ua/6IESOoWbMmrVq1olWrVsya5fRFt2XLFiIiIrLmJyQkZG3TpUuX0x6nwKlqsXq1bdtW\nC8QrOC8PGRmZmpmZmTU9atQCHTlyvh49ml4wxzQl1tq1awMdgpYrVy7r/a233qrPP/+8qqoePnxY\n69evr7Nnz1ZV1UOHDmn37t113Lhxqqq6evVqrV+/vq5bt05VVdPT03XChAkFGtvx48fPeB/XXnut\nrlixImt67969WqtWLW3SpIlu3Lgxa/5tt92mH3300Unbnrg2aWlp2rBhQ50+fXrWsu+//15Xr159\nRrE9+uijOnLkSFVVHTlypA4dOjTH9erWras7d+48Zf7TTz+tL7/88inzN2/erM2aNctxX5MnT876\nP84up88jsFTz+b1rRU+uFSv+IiFhBvfddx79+rUEYOjQCwMclSmWXvVTXUUeOrA8//zzWbVqFQAf\nfPABF154IZdddhkAZcuWZdy4ccTHx3Pffffx0ksv8fjjj9OkSRPAuTMZOHDgKfs8ePAggwYNYunS\npYgITz/9NNdccw3ly5fn4MGDAHz88cfMmDGDyZMn079/f8LDw1m+fDkXXngh06ZNY8WKFVSqVAmA\nRo0asWDBAoKCgkhISGDr1q0AvPbaa1x44cl/e6mpqaxatYqWLVtmzZs2bRq9e/emWrVqTJ06leHD\nh+d6XT744APOP/98evfunTUvPj7e18t6Wp9//jk//PADALfddhvx8fGMGjXqjPfrzRVXXEHHjh15\n/PHH/XocsKInUlOP8tBDs2nb9i0WLdrG6NELT7olN6a4ycjI4Ntvv+WKK64AnGKntm3bnrROgwYN\nOHjwIAcOHGDNmjWnLM/Jc889R8WKFVm9ejWrVq3i0ksvzXWb5ORkfvrpJ0aPHk2fPn349NNPAVi0\naBF169alWrVqDB48mCFDhrBkyRI++eQT7rrrrlP2s3TpUpo3b37SvClTpnDjjTdy4403MmXKlFxj\nAXw+19TU1KzinuyvtWvXnrL+9u3bqV69OgBnn332aYuyRIQuXbrQtm1b3nrrrZOWjR07lhYtWnDH\nHXecVKS0efNmWrVqRadOnZg/f37W/MqVK3P06FF2797t07mfiVJ7R6EKn61pwgNNJ5CcfICgIGHw\n4PY8++wl1nrFnBk/d11/OmlpabRq1Ypt27YRGxtL165dC3T/c+bMYerUqVnTlStXznWb6667juBg\nZ9yVvn378uyzz3L77bczdepU+vbtm7Vfzy/fAwcOcPDgwZPqFVJSUqhatWrW9Pbt2/ntt9+46KKL\nEBFCQ0NZs2YNzZs3z/HvN69/05GRkaxYsSJP23ge63THW7BgATVr1mTHjh107dqVJk2acPHFFzNw\n4ECefPJJRIQnn3yShx9+mHfeeYfq1auzdetWoqKiWLZsGVdeeSW//PILFSo4vT+cddZZ/Pnnn0RF\nReUrVl+VzESRy+h1uw6V5fapNzJjXWPgAHFxNXjzzV60aVO98GI0poBFRESwYsUKDh8+TLdu3Rg/\nfjwPPPAATZs2Zd68eSetu2nTJsqXL0+FChVo1qwZy5YtO6lYJy88vxSzt98vV65c1vvzzz+fpKQk\ndu7cyWeffcYTTzwBQGZmJgsXLiQ8PNzruXnu+7///S979+7NeqjswIEDTJkyhRdeeIGoqKiTfpHv\n2bOH6OhoAJo1a8bcuXNzPafU1FQ6duyY47IPPviApk2bnjSvWrVqpKSkUL16dVJSUjjrrLNy3LZm\nzZqA8wV/1VVXsXjxYi6++GKqVauWtc7dd99Nr169AAgLCyMsLAyAtm3b0qBBAzZs2JBVEX7kyBEi\nIiJyPZ8zVTKLnnIZvS4y7ChJu6tQoWw648b1YOHCOy1JmBKjbNmyvPHGG7z66qukp6dz8803s2DB\nAubMmQM4dx4PPPAAQ4cOBeDRRx/lxRdfZMOGDYDzxZ2YmHjKfrt27cr48eOzpk98GVerVo1169aR\nmZmZVbSUExHhqquu4qGHHiI2NjbrV/Bll13G2LFjs9bL6Zd8bGwsSUlJWdNTpkzhq6++YsuWLWzZ\nsoVly5Zl3e3Ex8fz4YcfcuzYMQAmT57MJZdcAsBNN93ETz/9xMyZM7P2NW/ePNasWXPS8U7cUeT0\nyp4kwKkvePfddwF499136dOnzynrHDp0iNTU1Kz3X3/9dVZxWkpKStZ6n376adb8nTt3kpGRATjJ\n/bfffqN+/fqA0xDpr7/+IiYm5pRjFbj81oIH6uVTq6ccWjQtWPC77tp1KGt6xYoU/fPPA7nvyxgf\nFLVWT6qqvXr10vfee09VVVetWqWdOnXSc845Rxs0aKAjRow4qYXfF198oW3atNEmTZpobGysPvro\no6fsPzU1VW+99VZt1qyZtmjRQj/55BNVVf3oo4+0fv362r59e73vvvv0tttuU9WcWx8tWbJEAZ08\neXLWvJ07d+r111+v5557rsbGxuqAAQNyPL/mzZvrgQMHdPPmzVqjRo2T4ldVbd26tS5cuFBVVUeM\nGKHNmzfXli1b6tVXX607duzIWm/dunXarVs3bdiwocbGxmrfvn31r7/+8nptc7Nr1y699NJLtWHD\nhtq5c2fdvXu3qqpu27ZNe/TooaqqGzdu1BYtWmiLFi20adOmJ7VYuuWWW7R58+Z67rnnau/evfXP\nP/9UVdWPP/5YmzZtqi1bttTWrVuf1FpryZIlevXVV+cYT0G3ehItZhW3cXFxunTpUu8rnWh18rCy\ne/dhHntsDpMmLefOO1szadIV/g/SlDrr1q0jNjY20GGUaGPGjCEyMjLHyu7SaPDgwVxxxRV07tz5\nlGU5fR5FZJmqxp2ysg9KZtETTmX1u++uoEmT8UyatJzQ0CBq1Ii0Fk3GFFMDBw7MKq830Lx58xyT\nhD+UyMrsX3dEk/BxL+Zu+hyA+PgYJk68nCZNogMcmTEmv8LDw+nXr1+gwygy7r777kI7VolLFMnJ\nB2j5agLHMkKIji7Lq69eRr9+LazJq/E7VbXPmQk4f5SalLhEUatWBfq1XUWQKP+Y9SFVqvi/6Zgx\n4eHh7N6927oaNwGl6oxH4a2pcX4U+0SRkpLKkCGzSUiIIz4+BoC3rv2CoCAFSxKmkNSqVYvk5GR2\n7twZ6FBMKXdihLuCVGwTRUZGJhMnLuXxx7/jwIGjJCXtYcmSuxERJ0kYU4hCQ0MLdEQxY4oSv7Z6\nEpHuIrJeRJJE5LEclouIvOEuXyUibXzZ7//+l0KHDm8zaNCXHDhwlN69z+GTT663W35jjPEDv91R\niEgwMB7oCiQDS0Rkuqp69qjVA2jkvtoDE91/T+uPP/Zz3nn/JDNTqVWrAmPH9qBPn8aWJIwxxk/8\nWfTUDkhS1U0AIjIV6AN4Joo+wHvuU4MLRaSSiFRX1ZRTd+fYsysVIYOHLl7IM91+oPzGh2C0H8/C\nGGNKOX8miprAHx7TyZx6t5DTOjWBkxKFiNwD3ONOHoVn14yeB6NP7ufsVI+U+LuMaGBXoIMoIuxa\n/M2uxd/sWvytcX43LBaV2ar6FvAWgIgsze9j6CWNXYu/2bX4m12Lv9m1+JuI5NL30en5szJ7G1Db\nY7qWOy+v6xhjjAkgfyaKJUAjEaknImWAG4Dp2daZDtzqtn7qAOz3Vj9hjDGm8Pmt6ElV00XkfmA2\nEAy8o6q/iEiCuzwRmAX0BJKAw8DtPuz6rdxXKTXsWvzNrsXf7Fr8za7F3/J9LYpdN+PGGGMKV4nt\nZtwYY0zBsERhjDHGqyKbKPzV/Udx5MO1uNm9BqtF5CcRaRmIOAtDbtfCY73zRCRdRK4tzPgKky/X\nQkTiRWSFiPwiInMLO8bC4sPfSEUR+UJEVrrXwpf60GJHRN4RkR0isuY0y/P3vZnfMVT9+cKp/N4I\n1AfKACuBptnW6Ql8CQjQAVgU6LgDeC0uACq773uU5mvhsd53OI0lrg103AH8XFTC6Qmhjjt9VqDj\nDuC1GA6Mct9XBfYAZQIdux+uxcVAG2DNaZbn63uzqN5RZHX/oarHgBPdf3jK6v5DVRcClUSkemEH\nWghyvRaq+pOq7nUnF+I8j1IS+fK5ABgEfALsKMzgCpkv1+ImYJqqbgVQ1ZJ6PXy5FgpEitMpXHmc\nRJFeuGH6n6rOwzm308nX92ZRTRSn69ojr+uUBHk9zztxfjGURLleCxGpCVyF08FkSebL5+IcoLKI\n/CAiy0Tk1kKLrnD5ci3GAbHAn8BqYLCqZhZOeEVKvr43i0UXHsY3InIJTqK4KNCxBNBrwDBVzbQe\nhQkB2gKdgQjgZxFZqKobAhtWQHQDVgCXAg2Ab0RkvqoeCGxYxUNRTRTW/cfffDpPEWkBTAJ6qOru\nQoqtsPlyLeKAqW6SiAZ6iki6qn5WOCEWGl+uRTKwW1UPAYdEZB7QEihpicKXa3E78A91CuqTRGQz\n0ARYXDghFhn5+t4sqkVP1v3H33K9FiJSB5gG9CvhvxZzvRaqWk9VY1Q1BvgYuLcEJgnw7W/kc+Ai\nEQkRkbI4vTevK+Q4C4Mv12Irzp0VIlINpyfVTYUaZdGQr+/NInlHof7r/qPY8fFaPAVEARPcX9Lp\nWgJ7zPTxWpQKvlwLVV0nIl8Bq4BMYJKq5thssjjz8XPxHDBZRFbjtPgZpqolrvtxEZkCxAPRIpIM\nPA2Ewpl9b1oXHsYYY7wqqkVPxhhjighLFMYYY7yyRGGMMcYrSxTGGGO8skRhjDHGK0sUpsgRkQy3\nx9MTrxgv68acrqfMPB7zB7f30ZUi8qOINM7HPhJOdJMhIv1FpIbHskki0rSA41wiIq182OZB9zkK\nY/LFEoUpitJUtZXHa0shHfdmVW0JvAu8nNeN3WcX3nMn+wM1PJbdpaprCyTKv+OcgG9xPghYojD5\nZonCFAvuncN8Efmf+7ogh3Waichi9y5klYg0cuff4jH/TREJzuVw84CG7radRWS5OGN9vCMiYe78\nf4jIWvc4r7jzRojII+KMgREHvO8eM8K9E4hz7zqyvtzdO49x+YzzZzw6dBORiSKyVJzxFp5x5z2A\nk7C+F5Hv3XmXicjP7nX8SETK53IcU8pZojBFUYRHsdOn7rwdQFdVbQP0Bd7IYbsE4HVVbYXzRZ0s\nIrHu+he68zOAm3M5fm9gtYiEA5OBvqp6Lk5PBgNFJAqnh9pmqtoCeN5zY1X9GFiK88u/laqmeSz+\nxN32hL44fVPlJ87ugGf3JI+7T+S3ADqJSAtVfQOnx9RLVPUSEYkGngC6uNdyKfBQLscxpVyR7MLD\nlHpp7pelp1BgnFsmn4HThXZ2PwOPi0gtnHEYfhORzjg9qC5xuzeJ4PTjVLwvImnAFpwxLRoDmz36\nz3oXuA+ny+ojwNsiMgOY4euJqepOEdnk9rPzG07HdD+6+81LnGVwxlXwvE7Xi8g9OH/X1YGmON13\neOrgzv/RPU4ZnOtmzGlZojDFxRBgO07vp0E4X9QnUdUPRGQRcDkwS0QG4PTr866q/p8Px7hZVZee\nmBCRKjmt5PYt1A6nk7lrgftxuq/21VTgeuBX4FNVVXG+tX2OE1iGUz8xFrhaROoBjwDnqepeEZkM\nhOewrQDfqOqNeYjXlHJW9GSKi4pAijvYTD+czt9OIiL1gU1uccvnOEUw3wLXishZ7jpVRKSuj8dc\nD8SISEN3uh8w1y3Tr6iqs3ASWE5jlKcCkafZ76c4I43diJM0yGucbnfZTwIdRKQJUAE4BOwXp3fU\nHqeJZSFw4YlzEpFyIpLT3ZkxWSxRmOJiAnCbiKzEKa45lMM61wNrRGQF0BxnyMe1OGXyX4vIKuAb\nnGKZXKnqEZzeNT9yex3NBBJxvnRnuPtbQM5l/JOBxBOV2dn2uxenu++6qrrYnZfnON26j1eBR1V1\nJbAc5y7lA5zirBPeAr4Ske9VdSdOi6wp7nF+xrmexpyW9R5rjDHGK7ujMMYY45UlCmOMMV5ZojDG\nGOOVJQpjjDFeWaIwxhjjlSUKY4wxXlmiMMYY49X/A01cX4JpG82vAAAAAElFTkSuQmCC\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "# plot ROC curve\n", "\n", "# at bottom left we classify everything as negative, no true positives, no false positives\n", "# as we lower the threshold at which we classify positive, we get some true positives, some false positives\n", "# https://en.wikipedia.org/wiki/Receiver_operating_characteristic\n", "\n", "# AUC of a coinflip is 0.5 so area under curve (AUC) ~0.5 is not good\n", "# but highest, lowest prob predictions have predictive value\n", "\n", "(fpr, tpr, thresholds) = sklearn.metrics.roc_curve(y_xval, y_xval_prob)\n", "roc_auc = sklearn.metrics.auc(fpr, tpr)\n", "\n", "plt.figure()\n", "lw = 2\n", "plt.plot(fpr, tpr, color='darkorange',\n", " lw=lw, label='ROC curve (AUC = %0.3f)' % roc_auc)\n", "plt.plot([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')\n", "plt.xlim([0.0, 1.0])\n", "plt.ylim([0.0, 1.05])\n", "plt.xlabel('False Positive Rate')\n", "plt.ylabel('True Positive Rate')\n", "plt.title('Receiver operating characteristic')\n", "plt.legend(loc=\"lower right\")\n", "plt.show()\n", "\n" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "21:07:47 Starting\n", "21:07:47 Starting grid search\n", "22:53:22 Finishing\n", "Best Xval: 0.526690 using {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0.0003}\n", "0.491103 (0.063526) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0}\n", "0.516014 (0.070171) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0.0001}\n", "0.526690 (0.072711) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0.0003}\n", "0.508897 (0.047046) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0.001}\n", "0.476868 (0.042427) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0.003}\n", "0.523132 (0.094458) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0, 'reg_penalty': 0}\n", "0.526690 (0.088422) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0, 'reg_penalty': 0.0001}\n", "0.508897 (0.041013) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0, 'reg_penalty': 0.0003}\n", "0.498221 (0.077644) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0, 'reg_penalty': 0.001}\n", "0.476868 (0.054782) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0, 'reg_penalty': 0.003}\n", "0.523132 (0.070264) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0, 'reg_penalty': 0}\n", "0.526690 (0.106173) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0, 'reg_penalty': 0.0001}\n", "0.523132 (0.110483) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0, 'reg_penalty': 0.0003}\n", "0.512456 (0.064586) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0, 'reg_penalty': 0.001}\n", "0.483986 (0.030503) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0, 'reg_penalty': 0.003}\n", "0.508897 (0.077358) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0.333, 'reg_penalty': 0}\n", "0.498221 (0.045010) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0.333, 'reg_penalty': 0.0001}\n", "0.487544 (0.059406) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0.333, 'reg_penalty': 0.0003}\n", "0.491103 (0.040090) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0.333, 'reg_penalty': 0.001}\n", "0.487544 (0.032225) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0.333, 'reg_penalty': 0.003}\n", "0.512456 (0.083894) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0.333, 'reg_penalty': 0}\n", "0.508897 (0.086396) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0.333, 'reg_penalty': 0.0001}\n", "0.512456 (0.068044) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0.333, 'reg_penalty': 0.0003}\n", "0.516014 (0.060904) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0.333, 'reg_penalty': 0.001}\n", "0.491103 (0.041700) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0.333, 'reg_penalty': 0.003}\n", "0.505338 (0.098331) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0.333, 'reg_penalty': 0}\n", "0.519573 (0.082294) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0.333, 'reg_penalty': 0.0001}\n", "0.498221 (0.076849) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0.333, 'reg_penalty': 0.0003}\n", "0.501779 (0.072658) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0.333, 'reg_penalty': 0.001}\n", "0.494662 (0.032186) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0.333, 'reg_penalty': 0.003}\n", "0.508897 (0.051400) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0.5, 'reg_penalty': 0}\n", "0.494662 (0.049406) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0.5, 'reg_penalty': 0.0001}\n", "0.508897 (0.085364) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0.5, 'reg_penalty': 0.0003}\n", "0.494662 (0.063142) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0.5, 'reg_penalty': 0.001}\n", "0.480427 (0.049650) with: {'activation': 'relu', 'hidden_layer_size': 8, 'dropout': 0.5, 'reg_penalty': 0.003}\n", "0.508897 (0.077749) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0.5, 'reg_penalty': 0}\n", "0.505338 (0.064741) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0.5, 'reg_penalty': 0.0001}\n", "0.505338 (0.080497) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0.5, 'reg_penalty': 0.0003}\n", "0.498221 (0.048460) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0.5, 'reg_penalty': 0.001}\n", "0.469751 (0.058713) with: {'activation': 'relu', 'hidden_layer_size': 16, 'dropout': 0.5, 'reg_penalty': 0.003}\n", "0.498221 (0.087977) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0.5, 'reg_penalty': 0}\n", "0.501779 (0.059671) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0.5, 'reg_penalty': 0.0001}\n", "0.494662 (0.080382) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0.5, 'reg_penalty': 0.0003}\n", "0.498221 (0.051613) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0.5, 'reg_penalty': 0.001}\n", "0.498221 (0.027390) with: {'activation': 'relu', 'hidden_layer_size': 32, 'dropout': 0.5, 'reg_penalty': 0.003}\n", "0.501779 (0.041107) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0}\n", "0.491103 (0.041725) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0.0001}\n", "0.480427 (0.058293) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0.0003}\n", "0.501779 (0.033401) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0.001}\n", "0.501779 (0.006650) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0, 'reg_penalty': 0.003}\n", "0.483986 (0.043349) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0, 'reg_penalty': 0}\n", "0.483986 (0.055002) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0, 'reg_penalty': 0.0001}\n", "0.483986 (0.046823) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0, 'reg_penalty': 0.0003}\n", "0.491103 (0.034190) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0, 'reg_penalty': 0.001}\n", "0.505338 (0.004347) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0, 'reg_penalty': 0.003}\n", "0.469751 (0.049342) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0, 'reg_penalty': 0}\n", "0.476868 (0.047310) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0, 'reg_penalty': 0.0001}\n", "0.476868 (0.054820) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0, 'reg_penalty': 0.0003}\n", "0.469751 (0.041530) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0, 'reg_penalty': 0.001}\n", "0.501779 (0.006650) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0, 'reg_penalty': 0.003}\n", "0.487544 (0.047225) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0.333, 'reg_penalty': 0}\n", "0.480427 (0.035208) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0.333, 'reg_penalty': 0.0001}\n", "0.494662 (0.036667) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0.333, 'reg_penalty': 0.0003}\n", "0.491103 (0.018516) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0.333, 'reg_penalty': 0.001}\n", "0.505338 (0.004347) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0.333, 'reg_penalty': 0.003}\n", "0.501779 (0.056158) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0.333, 'reg_penalty': 0}\n", "0.487544 (0.050521) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0.333, 'reg_penalty': 0.0001}\n", "0.498221 (0.055529) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0.333, 'reg_penalty': 0.0003}\n", "0.491103 (0.016911) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0.333, 'reg_penalty': 0.001}\n", "0.501779 (0.006650) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0.333, 'reg_penalty': 0.003}\n", "0.480427 (0.041042) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0.333, 'reg_penalty': 0}\n", "0.476868 (0.048612) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0.333, 'reg_penalty': 0.0001}\n", "0.473310 (0.060954) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0.333, 'reg_penalty': 0.0003}\n", "0.483986 (0.028263) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0.333, 'reg_penalty': 0.001}\n", "0.501779 (0.006650) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0.333, 'reg_penalty': 0.003}\n", "0.498221 (0.054230) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0.5, 'reg_penalty': 0}\n", "0.498221 (0.050833) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0.5, 'reg_penalty': 0.0001}\n", "0.480427 (0.061030) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0.5, 'reg_penalty': 0.0003}\n", "0.501779 (0.006650) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0.5, 'reg_penalty': 0.001}\n", "0.505338 (0.004347) with: {'activation': 'sigmoid', 'hidden_layer_size': 8, 'dropout': 0.5, 'reg_penalty': 0.003}\n", "0.498221 (0.048082) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0.5, 'reg_penalty': 0}\n", "0.483986 (0.052513) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0.5, 'reg_penalty': 0.0001}\n", "0.473310 (0.056395) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0.5, 'reg_penalty': 0.0003}\n", "0.501779 (0.010381) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0.5, 'reg_penalty': 0.001}\n", "0.505338 (0.004347) with: {'activation': 'sigmoid', 'hidden_layer_size': 16, 'dropout': 0.5, 'reg_penalty': 0.003}\n", "0.480427 (0.050802) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0.5, 'reg_penalty': 0}\n", "0.491103 (0.058213) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0.5, 'reg_penalty': 0.0001}\n", "0.501779 (0.041821) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0.5, 'reg_penalty': 0.0003}\n", "0.498221 (0.010381) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0.5, 'reg_penalty': 0.001}\n", "0.505338 (0.004347) with: {'activation': 'sigmoid', 'hidden_layer_size': 32, 'dropout': 0.5, 'reg_penalty': 0.003}\n", "Evaluate performance in test set\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "[[18 16]\n", " [19 18]]\n", "Test Accuracy 0.507\n", "Test F1 0.507\n" ] } ], "source": [ "# 2nd, use Keras native k-fold cross-validation and grid search\n", "# this will take some time ~ note timestamps in output on AWS p2.xlarge\n", "\n", "print('%s Starting' % time.strftime(\"%H:%M:%S\"))\n", "\n", "estimator = KerasClassifier(build_fn=create_model, epochs=100, batch_size=10, verbose=0)\n", "kfold = StratifiedKFold(n_splits=5, shuffle=True, random_state=seed)\n", "\n", "# hyperparameter options to try\n", "hidden_layer_hp = [8, 16, 32]\n", "dropout_hp = [0, 0.333, 0.5]\n", "reg_penalty_hp = [0, 0.0001, 0.0003, 0.001, 0.003]\n", "activation_hp = ['relu','sigmoid']\n", "\n", "param_grid = dict(hidden_layer_size=hidden_layer_hp, \n", " dropout=dropout_hp, \n", " reg_penalty=reg_penalty_hp,\n", " activation=activation_hp,\n", " )\n", "\n", "grid = GridSearchCV(estimator=estimator, param_grid=param_grid, cv=kfold)\n", "print('%s Starting grid search' % time.strftime(\"%H:%M:%S\"))\n", "classifier = grid.fit(X_bigtrain, y_bigtrain)\n", "\n", "print('%s Finishing' % time.strftime(\"%H:%M:%S\"))\n", "\n", "# summarize xval results\n", "print(\"Best Xval: %f using %s\" % (classifier.best_score_, classifier.best_params_))\n", "means = classifier.cv_results_['mean_test_score']\n", "stds = classifier.cv_results_['std_test_score']\n", "params = classifier.cv_results_['params']\n", "for mean, stdev, param in zip(means, stds, params):\n", " print(\"%f (%f) with: %r\" % (mean, stdev, param))\n", " \n", "# evaluate with test set\n", "print(\"Evaluate performance in test set\")\n", "y_test_pred = classifier.predict(X_test)\n", "confusion_matrix = sklearn.metrics.confusion_matrix(y_test_pred, y_test)\n", "print(confusion_matrix)\n", "\n", "print(\"Test Accuracy %.3f\" % sklearn.metrics.accuracy_score(y_test_pred, y_test))\n", "print(\"Test F1 %.3f\" % sklearn.metrics.f1_score(y_test_pred, y_test))" ] }, { "cell_type": "code", "execution_count": 11, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Finally, roll our own grid search\n", "# more fine-grained control such as custom metric and threshold\n", "\n", "# define some custom metrics\n", "import keras.backend as K\n", "\n", "def recall(y_true, y_pred):\n", " # return keras tensor for recall\n", " true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))\n", " possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))\n", " recall = true_positives / (possible_positives + K.epsilon())\n", " return recall\n", "\n", "def precision(y_true, y_pred):\n", " # return keras tensor for precision\n", " true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))\n", " predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))\n", " precision = true_positives / (predicted_positives + K.epsilon())\n", " return precision\n", "\n", "def fbeta_score(y_true, y_pred, beta=1):\n", " if beta < 0:\n", " raise ValueError('The lowest choosable beta is zero (only precision).')\n", "\n", " # If there are no true positives, fix the F score at 0 like sklearn.\n", " if K.sum(K.round(K.clip(y_true, 0, 1))) == 0:\n", " return 0\n", "\n", " p = precision(y_true, y_pred)\n", " r = recall(y_true, y_pred)\n", " bb = beta ** 2\n", " fbeta_score = (1 + bb) * (p * r) / (bb * p + r + K.epsilon())\n", " return fbeta_score\n", "\n", "def f_score(y_true, y_pred):\n", " beta = 1 # can adjust to penalize false positives/negatives\n", " return fbeta_score(y_true, y_pred, beta=beta)\n", "\n", "def selectThresholdF1 (logits, labels, beta=1):\n", " # return threshold, f-score that yields best F-score\n", " # predict using true if >= threshold\n", "\n", " precision, recall, thresholds = sklearn.metrics.precision_recall_curve(labels, logits)\n", " bb = beta**2\n", " f1_scores = (1 + bb) * precision * recall / (bb * precision + recall)\n", " f1_scores = np.nan_to_num(f1_scores)\n", " \n", " best_index = np.argmax(f1_scores)\n", " best_threshold = thresholds[best_index]\n", " best_score = f1_scores[best_index]\n", " return (best_threshold, best_score)\n", "\n", "def selectThresholdAcc (logits, labels, beta=1):\n", " # return threshold that yields best accuracy\n", " # predict using true if >= threshold\n", "\n", " precision, recall, thresholds = sklearn.metrics.precision_recall_curve(labels, logits)\n", " accuracies = [sklearn.metrics.accuracy_score(logits >= thresh, labels) for thresh in thresholds]\n", " \n", " best_index = np.argmax(accuracies)\n", " best_threshold = thresholds[best_index]\n", " best_score = accuracies[best_index]\n", " return (best_threshold, best_score)\n", "\n", "def selectThresholdTest (logits, labels, beta=1):\n", " # show all thresholds, resulting F1 and accuracy\n", "\n", " precision, recall, thresholds = sklearn.metrics.precision_recall_curve(labels, logits)\n", " bb = beta**2\n", " f1_scores = (1 + bb) * precision * recall / (bb * precision + recall)\n", " f1_scores = np.nan_to_num(f1_scores)\n", " \n", " for thresh in thresholds:\n", " labels_pred = logits >= thresh\n", " f_test = sklearn.metrics.f1_score(labels_pred, labels)\n", " acc_test = sklearn.metrics.accuracy_score(labels_pred, labels)\n", " \n", " print (\"Threshold %f, f1 %f, accuracy %f\") % (thresh, f_test, acc_test)\n", " print(sklearn.metrics.confusion_matrix(labels_pred, labels))\n", " \n", " best_index = np.argmax(f1_scores)\n", " best_threshold = thresholds[best_index]\n", " best_score = f1_scores[best_index]\n", " return (best_threshold, best_score)" ] }, { "cell_type": "code", "execution_count": 12, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# same as above, compile with custom metric\n", "def create_model(num_components=num_features, \n", " hidden_layer_size=30, \n", " dropout=(1.0/3.0), \n", " reg_penalty=0.0001, \n", " activation='relu'):\n", " \n", " model = declare_model(num_components=num_components, \n", " hidden_layer_size=hidden_layer_size, \n", " dropout=dropout, \n", " reg_penalty=reg_penalty, \n", " activation=activation)\n", " \n", " model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy', f_score])\n", " return model\n" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "rm: cannot remove 'model*.json': No such file or directory\n", "rm: cannot remove 'model*.h5': No such file or directory\n" ] } ], "source": [ "# delete old saved model files\n", "!rm model*.json\n", "!rm model*.h5" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "14:08:30 Starting\n", "14:08:31 epoch 0 of 1000 Train loss: 0.6930 Train f_score 0.4953 Xval loss: 0.6931 Xval f_score 0.6000\n", "14:08:31 epoch 50 of 1000 Train loss: 0.6857 Train f_score 0.6844 Xval loss: 0.6925 Xval f_score 0.6465\n", "14:08:31 epoch 100 of 1000 Train loss: 0.6668 Train f_score 0.6481 Xval loss: 0.6900 Xval f_score 0.5278\n", "14:08:31 epoch 150 of 1000 Train loss: 0.6474 Train f_score 0.6408 Xval loss: 0.6910 Xval f_score 0.5217\n", "14:08:32 epoch 200 of 1000 Train loss: 0.6317 Train f_score 0.6540 Xval loss: 0.6918 Xval f_score 0.5507\n", "14:08:32 epoch 250 of 1000 Train loss: 0.6182 Train f_score 0.6667 Xval loss: 0.6986 Xval f_score 0.5373\n", "14:08:32 epoch 300 of 1000 Train loss: 0.6050 Train f_score 0.6857 Xval loss: 0.7088 Xval f_score 0.5455\n", "14:08:32 epoch 350 of 1000 Train loss: 0.5922 Train f_score 0.7019 Xval loss: 0.7212 Xval f_score 0.5455\n", "14:08:32 epoch 400 of 1000 Train loss: 0.5789 Train f_score 0.7087 Xval loss: 0.7280 Xval f_score 0.5294\n", "14:08:32 epoch 450 of 1000 Train loss: 0.5606 Train f_score 0.7547 Xval loss: 0.7520 Xval f_score 0.5143\n", "Best Xval loss epoch 119, value 0.689570\n", "NN units 4\n", "Reg_penalty 0.00000000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.752, Train F1 0.748\n", "[[81 30]\n", " [22 77]]\n", "Final Xval Accuracy 0.577, Xval F1 0.516\n", "[[25 19]\n", " [11 16]]\n", "14:08:32 Starting\n", "14:08:33 epoch 0 of 1000 Train loss: 0.6932 Train f_score 0.6466 Xval loss: 0.6932 Xval f_score 0.5823\n", "14:08:33 epoch 50 of 1000 Train loss: 0.6834 Train f_score 0.6791 Xval loss: 0.6911 Xval f_score 0.6154\n", "14:08:33 epoch 100 of 1000 Train loss: 0.6636 Train f_score 0.6484 Xval loss: 0.6915 Xval f_score 0.5634\n", "14:08:34 epoch 150 of 1000 Train loss: 0.6456 Train f_score 0.6471 Xval loss: 0.7007 Xval f_score 0.4687\n", "14:08:34 epoch 200 of 1000 Train loss: 0.6270 Train f_score 0.6603 Xval loss: 0.7091 Xval f_score 0.5312\n", "14:08:34 epoch 250 of 1000 Train loss: 0.6093 Train f_score 0.6699 Xval loss: 0.7243 Xval f_score 0.5000\n", "14:08:34 epoch 300 of 1000 Train loss: 0.5941 Train f_score 0.6731 Xval loss: 0.7403 Xval f_score 0.5373\n", "14:08:34 epoch 350 of 1000 Train loss: 0.5798 Train f_score 0.6860 Xval loss: 0.7562 Xval f_score 0.5152\n", "Best Xval loss epoch 79, value 0.690104\n", "NN units 4\n", "Reg_penalty 0.00010000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.705, Train F1 0.670\n", "[[85 44]\n", " [18 63]]\n", "Final Xval Accuracy 0.563, Xval F1 0.627\n", "[[14 9]\n", " [22 26]]\n", "14:08:34 Starting\n", "14:08:35 epoch 0 of 1000 Train loss: 0.6945 Train f_score 0.6551 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:08:35 epoch 50 of 1000 Train loss: 0.6880 Train f_score 0.6751 Xval loss: 0.6925 Xval f_score 0.6604\n", "14:08:35 epoch 100 of 1000 Train loss: 0.6748 Train f_score 0.6154 Xval loss: 0.6911 Xval f_score 0.6133\n", "14:08:35 epoch 150 of 1000 Train loss: 0.6629 Train f_score 0.6385 Xval loss: 0.6922 Xval f_score 0.5507\n", "14:08:35 epoch 200 of 1000 Train loss: 0.6519 Train f_score 0.6479 Xval loss: 0.6937 Xval f_score 0.5294\n", "14:08:36 epoch 250 of 1000 Train loss: 0.6428 Train f_score 0.6442 Xval loss: 0.6993 Xval f_score 0.5588\n", "14:08:36 epoch 300 of 1000 Train loss: 0.6334 Train f_score 0.6351 Xval loss: 0.7002 Xval f_score 0.5672\n", "14:08:36 epoch 350 of 1000 Train loss: 0.6229 Train f_score 0.6667 Xval loss: 0.7044 Xval f_score 0.6197\n", "14:08:36 epoch 400 of 1000 Train loss: 0.6128 Train f_score 0.6822 Xval loss: 0.7180 Xval f_score 0.5915\n", "14:08:36 epoch 450 of 1000 Train loss: 0.6042 Train f_score 0.7042 Xval loss: 0.7270 Xval f_score 0.5714\n", "14:08:36 epoch 500 of 1000 Train loss: 0.5954 Train f_score 0.7042 Xval loss: 0.7294 Xval f_score 0.5915\n", "14:08:36 epoch 550 of 1000 Train loss: 0.5854 Train f_score 0.7075 Xval loss: 0.7332 Xval f_score 0.5915\n", "14:08:36 epoch 600 of 1000 Train loss: 0.5747 Train f_score 0.7136 Xval loss: 0.7314 Xval f_score 0.5714\n", "14:08:36 epoch 650 of 1000 Train loss: 0.5631 Train f_score 0.7204 Xval loss: 0.7399 Xval f_score 0.5714\n", "14:08:37 epoch 700 of 1000 Train loss: 0.5511 Train f_score 0.7368 Xval loss: 0.7532 Xval f_score 0.5507\n", "Best Xval loss epoch 92, value 0.691045\n", "NN units 4\n", "Reg_penalty 0.00030000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.762, Train F1 0.764\n", "[[79 26]\n", " [24 81]]\n", "Final Xval Accuracy 0.592, Xval F1 0.508\n", "[[27 20]\n", " [ 9 15]]\n", "14:08:37 Starting\n", "14:08:38 epoch 0 of 1000 Train loss: 0.6965 Train f_score 0.4870 Xval loss: 0.6978 Xval f_score 0.5143\n", "14:08:38 epoch 50 of 1000 Train loss: 0.6881 Train f_score 0.6766 Xval loss: 0.6976 Xval f_score 0.5122\n", "14:08:38 epoch 100 of 1000 Train loss: 0.6742 Train f_score 0.6581 Xval loss: 0.7050 Xval f_score 0.5278\n", "14:08:38 epoch 150 of 1000 Train loss: 0.6581 Train f_score 0.6809 Xval loss: 0.7189 Xval f_score 0.5278\n", "14:08:38 epoch 200 of 1000 Train loss: 0.6437 Train f_score 0.6724 Xval loss: 0.7334 Xval f_score 0.5143\n", "14:08:38 epoch 250 of 1000 Train loss: 0.6293 Train f_score 0.6923 Xval loss: 0.7453 Xval f_score 0.5278\n", "14:08:38 epoch 300 of 1000 Train loss: 0.6158 Train f_score 0.7074 Xval loss: 0.7649 Xval f_score 0.5278\n", "Best Xval loss epoch 29, value 0.697055\n", "NN units 4\n", "Reg_penalty 0.00100000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.700, Train F1 0.687\n", "[[78 38]\n", " [25 69]]\n", "Final Xval Accuracy 0.577, Xval F1 0.559\n", "[[22 16]\n", " [14 19]]\n", "14:08:39 Starting\n", "14:08:39 epoch 0 of 1000 Train loss: 0.7066 Train f_score 0.5948 Xval loss: 0.7063 Xval f_score 0.6337\n", "14:08:40 epoch 50 of 1000 Train loss: 0.6950 Train f_score 0.6751 Xval loss: 0.6956 Xval f_score 0.6604\n", "14:08:40 epoch 100 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:08:40 epoch 150 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:40 epoch 200 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:40 epoch 250 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:40 epoch 300 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:40 epoch 350 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:40 epoch 400 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:40 epoch 450 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:40 epoch 500 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:41 epoch 550 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:41 epoch 600 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:41 epoch 650 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:41 epoch 700 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:41 epoch 750 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:41 epoch 800 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:41 epoch 850 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:41 epoch 900 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:08:41 epoch 950 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "Best Xval loss epoch 578, value 0.693634\n", "NN units 4\n", "Reg_penalty 0.00300000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.510, Train F1 0.675\n", "[[ 0 0]\n", " [103 107]]\n", "Final Xval Accuracy 0.535, Xval F1 0.400\n", "[[27 24]\n", " [ 9 11]]\n", "14:08:42 Starting\n", "14:08:43 epoch 0 of 1000 Train loss: 0.6932 Train f_score 0.5976 Xval loss: 0.6940 Xval f_score 0.5500\n", "14:08:43 epoch 50 of 1000 Train loss: 0.6793 Train f_score 0.7080 Xval loss: 0.6924 Xval f_score 0.6067\n", "14:08:43 epoch 100 of 1000 Train loss: 0.6428 Train f_score 0.7000 Xval loss: 0.6895 Xval f_score 0.5714\n", "14:08:43 epoch 150 of 1000 Train loss: 0.6051 Train f_score 0.6854 Xval loss: 0.7019 Xval f_score 0.5294\n", "14:08:43 epoch 200 of 1000 Train loss: 0.5734 Train f_score 0.7014 Xval loss: 0.7237 Xval f_score 0.5000\n", "14:08:43 epoch 250 of 1000 Train loss: 0.5445 Train f_score 0.7196 Xval loss: 0.7555 Xval f_score 0.5070\n", "Best Xval loss epoch 88, value 0.688773\n", "NN units 8\n", "Reg_penalty 0.00000000\n", "Dropout 0.0000\n", "Activation relu\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Final Train Accuracy 0.752, Train F1 0.737\n", "[[85 34]\n", " [18 73]]\n", "Final Xval Accuracy 0.606, Xval F1 0.562\n", "[[25 17]\n", " [11 18]]\n", "14:08:44 Starting\n", "14:08:44 epoch 0 of 1000 Train loss: 0.6944 Train f_score 0.2105 Xval loss: 0.6942 Xval f_score 0.2326\n", "14:08:45 epoch 50 of 1000 Train loss: 0.6791 Train f_score 0.6987 Xval loss: 0.6956 Xval f_score 0.4615\n", "14:08:45 epoch 100 of 1000 Train loss: 0.6473 Train f_score 0.7149 Xval loss: 0.7005 Xval f_score 0.4706\n", "14:08:45 epoch 150 of 1000 Train loss: 0.6166 Train f_score 0.7273 Xval loss: 0.7165 Xval f_score 0.4412\n", "14:08:45 epoch 200 of 1000 Train loss: 0.5899 Train f_score 0.7240 Xval loss: 0.7420 Xval f_score 0.4478\n", "14:08:45 epoch 250 of 1000 Train loss: 0.5667 Train f_score 0.7373 Xval loss: 0.7635 Xval f_score 0.5070\n", "Best Xval loss epoch 5, value 0.694174\n", "NN units 8\n", "Reg_penalty 0.00010000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.743, Train F1 0.748\n", "[[76 27]\n", " [27 80]]\n", "Final Xval Accuracy 0.563, Xval F1 0.475\n", "[[26 21]\n", " [10 14]]\n", "14:08:45 Starting\n", "14:08:46 epoch 0 of 1000 Train loss: 0.6963 Train f_score 0.0840 Xval loss: 0.6963 Xval f_score 0.0976\n", "14:08:46 epoch 50 of 1000 Train loss: 0.6862 Train f_score 0.6515 Xval loss: 0.6947 Xval f_score 0.5895\n", "14:08:47 epoch 100 of 1000 Train loss: 0.6624 Train f_score 0.6514 Xval loss: 0.7016 Xval f_score 0.5507\n", "14:08:47 epoch 150 of 1000 Train loss: 0.6358 Train f_score 0.6789 Xval loss: 0.7091 Xval f_score 0.5507\n", "14:08:47 epoch 200 of 1000 Train loss: 0.6058 Train f_score 0.6759 Xval loss: 0.7221 Xval f_score 0.5294\n", "14:08:47 epoch 250 of 1000 Train loss: 0.5805 Train f_score 0.7123 Xval loss: 0.7550 Xval f_score 0.4615\n", "Best Xval loss epoch 51, value 0.694705\n", "NN units 8\n", "Reg_penalty 0.00030000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.738, Train F1 0.734\n", "[[79 31]\n", " [24 76]]\n", "Final Xval Accuracy 0.549, Xval F1 0.673\n", "[[ 6 2]\n", " [30 33]]\n", "14:08:47 Starting\n", "14:08:48 epoch 0 of 1000 Train loss: 0.7022 Train f_score 0.5781 Xval loss: 0.7034 Xval f_score 0.5476\n", "14:08:48 epoch 50 of 1000 Train loss: 0.6887 Train f_score 0.6950 Xval loss: 0.6997 Xval f_score 0.5870\n", "14:08:48 epoch 100 of 1000 Train loss: 0.6724 Train f_score 0.6608 Xval loss: 0.7060 Xval f_score 0.5455\n", "14:08:48 epoch 150 of 1000 Train loss: 0.6521 Train f_score 0.6761 Xval loss: 0.7177 Xval f_score 0.5217\n", "14:08:49 epoch 200 of 1000 Train loss: 0.6330 Train f_score 0.6884 Xval loss: 0.7333 Xval f_score 0.5143\n", "14:08:49 epoch 250 of 1000 Train loss: 0.6167 Train f_score 0.6948 Xval loss: 0.7464 Xval f_score 0.5278\n", "14:08:49 epoch 300 of 1000 Train loss: 0.6016 Train f_score 0.6986 Xval loss: 0.7613 Xval f_score 0.5205\n", "Best Xval loss epoch 50, value 0.699721\n", "NN units 8\n", "Reg_penalty 0.00100000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.724, Train F1 0.678\n", "[[91 46]\n", " [12 61]]\n", "Final Xval Accuracy 0.577, Xval F1 0.516\n", "[[25 19]\n", " [11 16]]\n", "14:08:49 Starting\n", "14:08:50 epoch 0 of 1000 Train loss: 0.7212 Train f_score 0.5590 Xval loss: 0.7218 Xval f_score 0.5333\n", "14:08:51 epoch 50 of 1000 Train loss: 0.6977 Train f_score 0.6711 Xval loss: 0.6996 Xval f_score 0.6465\n", "14:08:51 epoch 100 of 1000 Train loss: 0.6883 Train f_score 0.6642 Xval loss: 0.6959 Xval f_score 0.5641\n", "14:08:51 epoch 150 of 1000 Train loss: 0.6788 Train f_score 0.6667 Xval loss: 0.6966 Xval f_score 0.5352\n", "14:08:51 epoch 200 of 1000 Train loss: 0.6676 Train f_score 0.6638 Xval loss: 0.6978 Xval f_score 0.5634\n", "14:08:51 epoch 250 of 1000 Train loss: 0.6573 Train f_score 0.6696 Xval loss: 0.7032 Xval f_score 0.5714\n", "14:08:51 epoch 300 of 1000 Train loss: 0.6492 Train f_score 0.6696 Xval loss: 0.7142 Xval f_score 0.5714\n", "14:08:51 epoch 350 of 1000 Train loss: 0.6440 Train f_score 0.6547 Xval loss: 0.7217 Xval f_score 0.5507\n", "14:08:51 epoch 400 of 1000 Train loss: 0.6393 Train f_score 0.6484 Xval loss: 0.7293 Xval f_score 0.5507\n", "14:08:51 epoch 450 of 1000 Train loss: 0.6356 Train f_score 0.6484 Xval loss: 0.7390 Xval f_score 0.5507\n", "14:08:52 epoch 500 of 1000 Train loss: 0.6321 Train f_score 0.6457 Xval loss: 0.7441 Xval f_score 0.5634\n", "14:08:52 epoch 550 of 1000 Train loss: 0.6288 Train f_score 0.6400 Xval loss: 0.7511 Xval f_score 0.5429\n", "14:08:52 epoch 600 of 1000 Train loss: 0.6254 Train f_score 0.6368 Xval loss: 0.7595 Xval f_score 0.5231\n", "Best Xval loss epoch 91, value 0.695768\n", "NN units 8\n", "Reg_penalty 0.00300000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.681, Train F1 0.630\n", "[[86 50]\n", " [17 57]]\n", "Final Xval Accuracy 0.592, Xval F1 0.431\n", "[[31 24]\n", " [ 5 11]]\n", "14:08:52 Starting\n", "14:08:53 epoch 0 of 1000 Train loss: 0.6943 Train f_score 0.5956 Xval loss: 0.6951 Xval f_score 0.5652\n", "14:08:53 epoch 50 of 1000 Train loss: 0.6647 Train f_score 0.6750 Xval loss: 0.6947 Xval f_score 0.5600\n", "14:08:53 epoch 100 of 1000 Train loss: 0.6145 Train f_score 0.6758 Xval loss: 0.7026 Xval f_score 0.5217\n", "14:08:53 epoch 150 of 1000 Train loss: 0.5689 Train f_score 0.7306 Xval loss: 0.7219 Xval f_score 0.5217\n", "14:08:53 epoch 200 of 1000 Train loss: 0.5270 Train f_score 0.7558 Xval loss: 0.7554 Xval f_score 0.5143\n", "Best Xval loss epoch 12, value 0.694608\n", "NN units 16\n", "Reg_penalty 0.00000000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.776, Train F1 0.751\n", "[[92 36]\n", " [11 71]]\n", "Final Xval Accuracy 0.563, Xval F1 0.523\n", "[[23 18]\n", " [13 17]]\n", "14:08:54 Starting\n", "14:08:55 epoch 0 of 1000 Train loss: 0.6951 Train f_score 0.5776 Xval loss: 0.6938 Xval f_score 0.5570\n", "14:08:55 epoch 50 of 1000 Train loss: 0.6640 Train f_score 0.6725 Xval loss: 0.6993 Xval f_score 0.5128\n", "14:08:55 epoch 100 of 1000 Train loss: 0.6223 Train f_score 0.7256 Xval loss: 0.7221 Xval f_score 0.4722\n", "14:08:55 epoch 150 of 1000 Train loss: 0.5776 Train f_score 0.7465 Xval loss: 0.7493 Xval f_score 0.5143\n", "Best Xval loss epoch 16, value 0.692968\n", "NN units 16\n", "Reg_penalty 0.00010000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.762, Train F1 0.775\n", "[[74 21]\n", " [29 86]]\n", "Final Xval Accuracy 0.549, Xval F1 0.686\n", "[[ 4 0]\n", " [32 35]]\n", "14:08:56 Starting\n", "14:08:57 epoch 0 of 1000 Train loss: 0.6995 Train f_score 0.0000 Xval loss: 0.6980 Xval f_score 0.1081\n", "14:08:57 epoch 50 of 1000 Train loss: 0.6800 Train f_score 0.6847 Xval loss: 0.6939 Xval f_score 0.4675\n", "14:08:57 epoch 100 of 1000 Train loss: 0.6353 Train f_score 0.7265 Xval loss: 0.7004 Xval f_score 0.5143\n", "14:08:57 epoch 150 of 1000 Train loss: 0.5901 Train f_score 0.7455 Xval loss: 0.7120 Xval f_score 0.5143\n", "14:08:57 epoch 200 of 1000 Train loss: 0.5517 Train f_score 0.7685 Xval loss: 0.7341 Xval f_score 0.5000\n", "14:08:57 epoch 250 of 1000 Train loss: 0.5179 Train f_score 0.7850 Xval loss: 0.7574 Xval f_score 0.5294\n", "Best Xval loss epoch 50, value 0.693860\n", "NN units 16\n", "Reg_penalty 0.00030000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.810, Train F1 0.821\n", "[[78 15]\n", " [25 92]]\n", "Final Xval Accuracy 0.577, Xval F1 0.500\n", "[[26 20]\n", " [10 15]]\n", "14:08:58 Starting\n", "14:08:59 epoch 0 of 1000 Train loss: 0.7113 Train f_score 0.4375 Xval loss: 0.7103 Xval f_score 0.5312\n", "14:08:59 epoch 50 of 1000 Train loss: 0.6901 Train f_score 0.6884 Xval loss: 0.7011 Xval f_score 0.6237\n", "14:08:59 epoch 100 of 1000 Train loss: 0.6644 Train f_score 0.6870 Xval loss: 0.7072 Xval f_score 0.5429\n", "14:08:59 epoch 150 of 1000 Train loss: 0.6340 Train f_score 0.6698 Xval loss: 0.7225 Xval f_score 0.5588\n", "14:08:59 epoch 200 of 1000 Train loss: 0.6065 Train f_score 0.7042 Xval loss: 0.7417 Xval f_score 0.5882\n", "14:08:59 epoch 250 of 1000 Train loss: 0.5813 Train f_score 0.7230 Xval loss: 0.7694 Xval f_score 0.5915\n", "Best Xval loss epoch 55, value 0.700980\n", "NN units 16\n", "Reg_penalty 0.00100000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.733, Train F1 0.689\n", "[[92 45]\n", " [11 62]]\n", "Final Xval Accuracy 0.592, Xval F1 0.592\n", "[[21 14]\n", " [15 21]]\n", "14:09:00 Starting\n", "14:09:01 epoch 0 of 1000 Train loss: 0.7477 Train f_score 0.5258 Xval loss: 0.7469 Xval f_score 0.5070\n", "14:09:01 epoch 50 of 1000 Train loss: 0.7016 Train f_score 0.6787 Xval loss: 0.7072 Xval f_score 0.5814\n", "14:09:01 epoch 100 of 1000 Train loss: 0.6858 Train f_score 0.6805 Xval loss: 0.7027 Xval f_score 0.5000\n", "14:09:01 epoch 150 of 1000 Train loss: 0.6734 Train f_score 0.6900 Xval loss: 0.7099 Xval f_score 0.5152\n", "14:09:01 epoch 200 of 1000 Train loss: 0.6616 Train f_score 0.6847 Xval loss: 0.7158 Xval f_score 0.5588\n", "14:09:01 epoch 250 of 1000 Train loss: 0.6507 Train f_score 0.7032 Xval loss: 0.7175 Xval f_score 0.5312\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:09:02 epoch 300 of 1000 Train loss: 0.6409 Train f_score 0.7032 Xval loss: 0.7182 Xval f_score 0.5455\n", "14:09:02 epoch 350 of 1000 Train loss: 0.6323 Train f_score 0.6972 Xval loss: 0.7264 Xval f_score 0.5538\n", "14:09:02 epoch 400 of 1000 Train loss: 0.6256 Train f_score 0.6941 Xval loss: 0.7363 Xval f_score 0.5455\n", "14:09:02 epoch 450 of 1000 Train loss: 0.6199 Train f_score 0.7005 Xval loss: 0.7457 Xval f_score 0.5846\n", "14:09:02 epoch 500 of 1000 Train loss: 0.6148 Train f_score 0.7005 Xval loss: 0.7567 Xval f_score 0.5846\n", "14:09:02 epoch 550 of 1000 Train loss: 0.6092 Train f_score 0.6792 Xval loss: 0.7710 Xval f_score 0.5672\n", "Best Xval loss epoch 83, value 0.701817\n", "NN units 16\n", "Reg_penalty 0.00300000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.710, Train F1 0.674\n", "[[86 44]\n", " [17 63]]\n", "Final Xval Accuracy 0.620, Xval F1 0.571\n", "[[26 17]\n", " [10 18]]\n", "14:09:03 Starting\n", "14:09:04 epoch 0 of 1000 Train loss: 0.6936 Train f_score 0.6731 Xval loss: 0.6916 Xval f_score 0.6476\n", "14:09:04 epoch 50 of 1000 Train loss: 0.6436 Train f_score 0.6698 Xval loss: 0.6962 Xval f_score 0.4687\n", "14:09:04 epoch 100 of 1000 Train loss: 0.5804 Train f_score 0.6981 Xval loss: 0.7200 Xval f_score 0.5000\n", "Best Xval loss epoch 22, value 0.689963\n", "NN units 32\n", "Reg_penalty 0.00000000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.757, Train F1 0.787\n", "[[65 13]\n", " [38 94]]\n", "Final Xval Accuracy 0.563, Xval F1 0.608\n", "[[16 11]\n", " [20 24]]\n", "14:09:05 Starting\n", "14:09:06 epoch 0 of 1000 Train loss: 0.6963 Train f_score 0.4787 Xval loss: 0.6986 Xval f_score 0.4638\n", "14:09:06 epoch 50 of 1000 Train loss: 0.6475 Train f_score 0.6957 Xval loss: 0.7026 Xval f_score 0.5600\n", "14:09:06 epoch 100 of 1000 Train loss: 0.5821 Train f_score 0.7163 Xval loss: 0.7195 Xval f_score 0.5373\n", "Best Xval loss epoch 13, value 0.697802\n", "NN units 32\n", "Reg_penalty 0.00010000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.767, Train F1 0.778\n", "[[75 21]\n", " [28 86]]\n", "Final Xval Accuracy 0.577, Xval F1 0.545\n", "[[23 17]\n", " [13 18]]\n", "14:09:07 Starting\n", "14:09:08 epoch 0 of 1000 Train loss: 0.7013 Train f_score 0.6644 Xval loss: 0.7003 Xval f_score 0.6400\n", "14:09:08 epoch 50 of 1000 Train loss: 0.6569 Train f_score 0.6726 Xval loss: 0.7002 Xval f_score 0.5556\n", "14:09:08 epoch 100 of 1000 Train loss: 0.6021 Train f_score 0.7290 Xval loss: 0.7078 Xval f_score 0.5352\n", "14:09:08 epoch 150 of 1000 Train loss: 0.5457 Train f_score 0.7512 Xval loss: 0.7474 Xval f_score 0.5429\n", "Best Xval loss epoch 20, value 0.697823\n", "NN units 32\n", "Reg_penalty 0.00030000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.781, Train F1 0.796\n", "[[74 17]\n", " [29 90]]\n", "Final Xval Accuracy 0.606, Xval F1 0.548\n", "[[26 18]\n", " [10 17]]\n", "14:09:09 Starting\n", "14:09:10 epoch 0 of 1000 Train loss: 0.7274 Train f_score 0.6245 Xval loss: 0.7253 Xval f_score 0.6522\n", "14:09:10 epoch 50 of 1000 Train loss: 0.6809 Train f_score 0.7064 Xval loss: 0.7107 Xval f_score 0.5333\n", "14:09:10 epoch 100 of 1000 Train loss: 0.6466 Train f_score 0.6933 Xval loss: 0.7161 Xval f_score 0.5231\n", "14:09:10 epoch 150 of 1000 Train loss: 0.6141 Train f_score 0.7130 Xval loss: 0.7241 Xval f_score 0.5797\n", "14:09:11 epoch 200 of 1000 Train loss: 0.5841 Train f_score 0.7383 Xval loss: 0.7452 Xval f_score 0.5373\n", "14:09:11 epoch 250 of 1000 Train loss: 0.5556 Train f_score 0.7642 Xval loss: 0.7705 Xval f_score 0.5000\n", "Best Xval loss epoch 48, value 0.710627\n", "NN units 32\n", "Reg_penalty 0.00100000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.790, Train F1 0.809\n", "[[73 14]\n", " [30 93]]\n", "Final Xval Accuracy 0.592, Xval F1 0.508\n", "[[27 20]\n", " [ 9 15]]\n", "14:09:11 Starting\n", "14:09:13 epoch 0 of 1000 Train loss: 0.8015 Train f_score 0.5022 Xval loss: 0.7977 Xval f_score 0.5476\n", "14:09:13 epoch 50 of 1000 Train loss: 0.7100 Train f_score 0.6757 Xval loss: 0.7159 Xval f_score 0.6122\n", "14:09:13 epoch 100 of 1000 Train loss: 0.6870 Train f_score 0.6667 Xval loss: 0.7030 Xval f_score 0.5600\n", "14:09:13 epoch 150 of 1000 Train loss: 0.6742 Train f_score 0.6610 Xval loss: 0.7099 Xval f_score 0.5833\n", "14:09:14 epoch 200 of 1000 Train loss: 0.6624 Train f_score 0.6696 Xval loss: 0.7177 Xval f_score 0.5714\n", "14:09:14 epoch 250 of 1000 Train loss: 0.6524 Train f_score 0.6787 Xval loss: 0.7219 Xval f_score 0.5455\n", "14:09:14 epoch 300 of 1000 Train loss: 0.6441 Train f_score 0.6544 Xval loss: 0.7290 Xval f_score 0.5455\n", "14:09:14 epoch 350 of 1000 Train loss: 0.6367 Train f_score 0.6728 Xval loss: 0.7346 Xval f_score 0.5373\n", "14:09:14 epoch 400 of 1000 Train loss: 0.6259 Train f_score 0.6820 Xval loss: 0.7332 Xval f_score 0.5373\n", "14:09:14 epoch 450 of 1000 Train loss: 0.6157 Train f_score 0.6981 Xval loss: 0.7447 Xval f_score 0.5152\n", "14:09:14 epoch 500 of 1000 Train loss: 0.6070 Train f_score 0.7150 Xval loss: 0.7558 Xval f_score 0.5152\n", "14:09:14 epoch 550 of 1000 Train loss: 0.5979 Train f_score 0.7308 Xval loss: 0.7664 Xval f_score 0.5455\n", "Best Xval loss epoch 96, value 0.702993\n", "NN units 32\n", "Reg_penalty 0.00300000\n", "Dropout 0.0000\n", "Activation relu\n", "Final Train Accuracy 0.767, Train F1 0.778\n", "[[75 21]\n", " [28 86]]\n", "Final Xval Accuracy 0.592, Xval F1 0.525\n", "[[26 19]\n", " [10 16]]\n", "14:09:15 Starting\n", "14:09:16 epoch 0 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:09:16 epoch 50 of 1000 Train loss: 0.6906 Train f_score 0.6710 Xval loss: 0.6937 Xval f_score 0.6600\n", "14:09:16 epoch 100 of 1000 Train loss: 0.6833 Train f_score 0.6286 Xval loss: 0.6926 Xval f_score 0.5952\n", "14:09:17 epoch 150 of 1000 Train loss: 0.6742 Train f_score 0.6114 Xval loss: 0.6924 Xval f_score 0.4928\n", "14:09:17 epoch 200 of 1000 Train loss: 0.6647 Train f_score 0.6514 Xval loss: 0.6928 Xval f_score 0.5075\n", "14:09:17 epoch 250 of 1000 Train loss: 0.6551 Train f_score 0.6787 Xval loss: 0.6946 Xval f_score 0.4923\n", "14:09:17 epoch 300 of 1000 Train loss: 0.6463 Train f_score 0.6789 Xval loss: 0.6971 Xval f_score 0.4194\n", "14:09:17 epoch 350 of 1000 Train loss: 0.6389 Train f_score 0.6789 Xval loss: 0.6991 Xval f_score 0.4516\n", "14:09:17 epoch 400 of 1000 Train loss: 0.6328 Train f_score 0.6852 Xval loss: 0.7010 Xval f_score 0.4516\n", "14:09:17 epoch 450 of 1000 Train loss: 0.6276 Train f_score 0.6852 Xval loss: 0.7030 Xval f_score 0.4516\n", "14:09:17 epoch 500 of 1000 Train loss: 0.6231 Train f_score 0.6852 Xval loss: 0.7051 Xval f_score 0.4516\n", "14:09:17 epoch 550 of 1000 Train loss: 0.6189 Train f_score 0.6852 Xval loss: 0.7073 Xval f_score 0.4516\n", "14:09:18 epoch 600 of 1000 Train loss: 0.6149 Train f_score 0.6912 Xval loss: 0.7102 Xval f_score 0.4516\n", "14:09:18 epoch 650 of 1000 Train loss: 0.6103 Train f_score 0.6944 Xval loss: 0.7145 Xval f_score 0.4516\n", "14:09:18 epoch 700 of 1000 Train loss: 0.6052 Train f_score 0.6944 Xval loss: 0.7180 Xval f_score 0.4516\n", "14:09:18 epoch 750 of 1000 Train loss: 0.5979 Train f_score 0.7000 Xval loss: 0.7199 Xval f_score 0.5079\n", "14:09:18 epoch 800 of 1000 Train loss: 0.5896 Train f_score 0.7027 Xval loss: 0.7214 Xval f_score 0.5079\n", "14:09:18 epoch 850 of 1000 Train loss: 0.5820 Train f_score 0.7027 Xval loss: 0.7216 Xval f_score 0.5000\n", "14:09:18 epoch 900 of 1000 Train loss: 0.5746 Train f_score 0.7085 Xval loss: 0.7212 Xval f_score 0.4615\n", "14:09:18 epoch 950 of 1000 Train loss: 0.5670 Train f_score 0.7149 Xval loss: 0.7223 Xval f_score 0.4615\n", "Best Xval loss epoch 139, value 0.692360\n", "NN units 4\n", "Reg_penalty 0.00000000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.752, Train F1 0.717\n", "[[92 41]\n", " [11 66]]\n", "Final Xval Accuracy 0.606, Xval F1 0.659\n", "[[16 8]\n", " [20 27]]\n", "14:09:19 Starting\n", "14:09:20 epoch 0 of 1000 Train loss: 0.6941 Train f_score 0.0000 Xval loss: 0.6942 Xval f_score 0.0000\n", "14:09:20 epoch 50 of 1000 Train loss: 0.6904 Train f_score 0.6775 Xval loss: 0.6941 Xval f_score 0.6327\n", "14:09:20 epoch 100 of 1000 Train loss: 0.6844 Train f_score 0.6694 Xval loss: 0.6938 Xval f_score 0.5679\n", "14:09:21 epoch 150 of 1000 Train loss: 0.6763 Train f_score 0.6278 Xval loss: 0.6941 Xval f_score 0.5135\n", "14:09:21 epoch 200 of 1000 Train loss: 0.6676 Train f_score 0.6727 Xval loss: 0.6949 Xval f_score 0.4928\n", "14:09:21 epoch 250 of 1000 Train loss: 0.6592 Train f_score 0.6697 Xval loss: 0.6963 Xval f_score 0.5152\n", "14:09:21 epoch 300 of 1000 Train loss: 0.6514 Train f_score 0.6789 Xval loss: 0.6987 Xval f_score 0.5000\n", "14:09:21 epoch 350 of 1000 Train loss: 0.6447 Train f_score 0.6789 Xval loss: 0.7021 Xval f_score 0.4762\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:09:21 epoch 400 of 1000 Train loss: 0.6385 Train f_score 0.6667 Xval loss: 0.7053 Xval f_score 0.4444\n", "14:09:21 epoch 450 of 1000 Train loss: 0.6323 Train f_score 0.6789 Xval loss: 0.7069 Xval f_score 0.4444\n", "14:09:21 epoch 500 of 1000 Train loss: 0.6257 Train f_score 0.6758 Xval loss: 0.7064 Xval f_score 0.4687\n", "14:09:21 epoch 550 of 1000 Train loss: 0.6185 Train f_score 0.6697 Xval loss: 0.7048 Xval f_score 0.4923\n", "14:09:22 epoch 600 of 1000 Train loss: 0.6110 Train f_score 0.6759 Xval loss: 0.7045 Xval f_score 0.4848\n", "14:09:22 epoch 650 of 1000 Train loss: 0.6034 Train f_score 0.6667 Xval loss: 0.7069 Xval f_score 0.4923\n", "14:09:22 epoch 700 of 1000 Train loss: 0.5962 Train f_score 0.6542 Xval loss: 0.7095 Xval f_score 0.4923\n", "14:09:22 epoch 750 of 1000 Train loss: 0.5896 Train f_score 0.6509 Xval loss: 0.7111 Xval f_score 0.4762\n", "14:09:22 epoch 800 of 1000 Train loss: 0.5834 Train f_score 0.6509 Xval loss: 0.7126 Xval f_score 0.4516\n", "14:09:22 epoch 850 of 1000 Train loss: 0.5777 Train f_score 0.6573 Xval loss: 0.7139 Xval f_score 0.4516\n", "14:09:22 epoch 900 of 1000 Train loss: 0.5722 Train f_score 0.6635 Xval loss: 0.7146 Xval f_score 0.4516\n", "14:09:22 epoch 950 of 1000 Train loss: 0.5666 Train f_score 0.6604 Xval loss: 0.7154 Xval f_score 0.4516\n", "Best Xval loss epoch 99, value 0.693756\n", "NN units 4\n", "Reg_penalty 0.00010000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.705, Train F1 0.622\n", "[[97 56]\n", " [ 6 51]]\n", "Final Xval Accuracy 0.592, Xval F1 0.592\n", "[[21 14]\n", " [15 21]]\n", "14:09:23 Starting\n", "14:09:24 epoch 0 of 1000 Train loss: 0.6951 Train f_score 0.0000 Xval loss: 0.6944 Xval f_score 0.0000\n", "14:09:24 epoch 50 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:09:25 epoch 100 of 1000 Train loss: 0.6916 Train f_score 0.6795 Xval loss: 0.6936 Xval f_score 0.6275\n", "14:09:25 epoch 150 of 1000 Train loss: 0.6879 Train f_score 0.6592 Xval loss: 0.6932 Xval f_score 0.5647\n", "14:09:25 epoch 200 of 1000 Train loss: 0.6824 Train f_score 0.6831 Xval loss: 0.6935 Xval f_score 0.5432\n", "14:09:25 epoch 250 of 1000 Train loss: 0.6764 Train f_score 0.6608 Xval loss: 0.6947 Xval f_score 0.5600\n", "14:09:25 epoch 300 of 1000 Train loss: 0.6705 Train f_score 0.6636 Xval loss: 0.6962 Xval f_score 0.5352\n", "14:09:25 epoch 350 of 1000 Train loss: 0.6651 Train f_score 0.6698 Xval loss: 0.6972 Xval f_score 0.5429\n", "14:09:25 epoch 400 of 1000 Train loss: 0.6602 Train f_score 0.6698 Xval loss: 0.6979 Xval f_score 0.5000\n", "14:09:25 epoch 450 of 1000 Train loss: 0.6560 Train f_score 0.6698 Xval loss: 0.6989 Xval f_score 0.5000\n", "14:09:25 epoch 500 of 1000 Train loss: 0.6523 Train f_score 0.6794 Xval loss: 0.6997 Xval f_score 0.5075\n", "14:09:26 epoch 550 of 1000 Train loss: 0.6493 Train f_score 0.6794 Xval loss: 0.7002 Xval f_score 0.5294\n", "14:09:26 epoch 600 of 1000 Train loss: 0.6464 Train f_score 0.6857 Xval loss: 0.6998 Xval f_score 0.5294\n", "14:09:26 epoch 650 of 1000 Train loss: 0.6436 Train f_score 0.6857 Xval loss: 0.6989 Xval f_score 0.5455\n", "14:09:26 epoch 700 of 1000 Train loss: 0.6410 Train f_score 0.6825 Xval loss: 0.6996 Xval f_score 0.5455\n", "14:09:26 epoch 750 of 1000 Train loss: 0.6384 Train f_score 0.6854 Xval loss: 0.7005 Xval f_score 0.5231\n", "14:09:26 epoch 800 of 1000 Train loss: 0.6352 Train f_score 0.6884 Xval loss: 0.7007 Xval f_score 0.5000\n", "14:09:26 epoch 850 of 1000 Train loss: 0.6320 Train f_score 0.6791 Xval loss: 0.7018 Xval f_score 0.5000\n", "14:09:26 epoch 900 of 1000 Train loss: 0.6293 Train f_score 0.6852 Xval loss: 0.7032 Xval f_score 0.5000\n", "14:09:26 epoch 950 of 1000 Train loss: 0.6268 Train f_score 0.6820 Xval loss: 0.7052 Xval f_score 0.5000\n", "Best Xval loss epoch 157, value 0.693145\n", "NN units 4\n", "Reg_penalty 0.00030000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.690, Train F1 0.649\n", "[[85 47]\n", " [18 60]]\n", "Final Xval Accuracy 0.606, Xval F1 0.533\n", "[[27 19]\n", " [ 9 16]]\n", "14:09:27 Starting\n", "14:09:29 epoch 0 of 1000 Train loss: 0.6973 Train f_score 0.6751 Xval loss: 0.6982 Xval f_score 0.6604\n", "14:09:29 epoch 50 of 1000 Train loss: 0.6934 Train f_score 0.6794 Xval loss: 0.6950 Xval f_score 0.6604\n", "14:09:29 epoch 100 of 1000 Train loss: 0.6916 Train f_score 0.6818 Xval loss: 0.6945 Xval f_score 0.6471\n", "14:09:29 epoch 150 of 1000 Train loss: 0.6897 Train f_score 0.6812 Xval loss: 0.6942 Xval f_score 0.5806\n", "14:09:29 epoch 200 of 1000 Train loss: 0.6873 Train f_score 0.6641 Xval loss: 0.6940 Xval f_score 0.5882\n", "14:09:29 epoch 250 of 1000 Train loss: 0.6845 Train f_score 0.6640 Xval loss: 0.6936 Xval f_score 0.6076\n", "14:09:29 epoch 300 of 1000 Train loss: 0.6816 Train f_score 0.6724 Xval loss: 0.6930 Xval f_score 0.5946\n", "14:09:29 epoch 350 of 1000 Train loss: 0.6786 Train f_score 0.6754 Xval loss: 0.6917 Xval f_score 0.5833\n", "14:09:29 epoch 400 of 1000 Train loss: 0.6754 Train f_score 0.6875 Xval loss: 0.6898 Xval f_score 0.5634\n", "14:09:30 epoch 450 of 1000 Train loss: 0.6723 Train f_score 0.6906 Xval loss: 0.6885 Xval f_score 0.5714\n", "14:09:30 epoch 500 of 1000 Train loss: 0.6693 Train f_score 0.6849 Xval loss: 0.6876 Xval f_score 0.5231\n", "14:09:30 epoch 550 of 1000 Train loss: 0.6664 Train f_score 0.6912 Xval loss: 0.6870 Xval f_score 0.5000\n", "14:09:30 epoch 600 of 1000 Train loss: 0.6637 Train f_score 0.6884 Xval loss: 0.6865 Xval f_score 0.5000\n", "14:09:30 epoch 650 of 1000 Train loss: 0.6611 Train f_score 0.6854 Xval loss: 0.6862 Xval f_score 0.5000\n", "14:09:30 epoch 700 of 1000 Train loss: 0.6586 Train f_score 0.6887 Xval loss: 0.6859 Xval f_score 0.5231\n", "14:09:30 epoch 750 of 1000 Train loss: 0.6563 Train f_score 0.6887 Xval loss: 0.6858 Xval f_score 0.5231\n", "14:09:30 epoch 800 of 1000 Train loss: 0.6542 Train f_score 0.6887 Xval loss: 0.6859 Xval f_score 0.5231\n", "14:09:30 epoch 850 of 1000 Train loss: 0.6522 Train f_score 0.6952 Xval loss: 0.6860 Xval f_score 0.5231\n", "14:09:31 epoch 900 of 1000 Train loss: 0.6503 Train f_score 0.6952 Xval loss: 0.6862 Xval f_score 0.5231\n", "14:09:31 epoch 950 of 1000 Train loss: 0.6486 Train f_score 0.6952 Xval loss: 0.6864 Xval f_score 0.5231\n", "Best Xval loss epoch 755, value 0.685822\n", "NN units 4\n", "Reg_penalty 0.00100000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.710, Train F1 0.702\n", "[[77 35]\n", " [26 72]]\n", "Final Xval Accuracy 0.620, Xval F1 0.491\n", "[[31 22]\n", " [ 5 13]]\n", "14:09:32 Starting\n", "14:09:33 epoch 0 of 1000 Train loss: 0.7058 Train f_score 0.6751 Xval loss: 0.7057 Xval f_score 0.6604\n", "14:09:34 epoch 50 of 1000 Train loss: 0.6951 Train f_score 0.6751 Xval loss: 0.6956 Xval f_score 0.6604\n", "14:09:34 epoch 100 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:34 epoch 150 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:34 epoch 200 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:34 epoch 250 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:34 epoch 300 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:34 epoch 350 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:34 epoch 400 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:34 epoch 450 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:35 epoch 500 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:35 epoch 550 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:35 epoch 600 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:35 epoch 650 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:35 epoch 700 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:35 epoch 750 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:35 epoch 800 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:35 epoch 850 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:35 epoch 900 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:09:36 epoch 950 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Best Xval loss epoch 971, value 0.693628\n", "NN units 4\n", "Reg_penalty 0.00300000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.510, Train F1 0.675\n", "[[ 0 0]\n", " [103 107]]\n", "Final Xval Accuracy 0.521, Xval F1 0.056\n", "[[36 34]\n", " [ 0 1]]\n", "14:09:36 Starting\n", "14:09:38 epoch 0 of 1000 Train loss: 0.6936 Train f_score 0.0000 Xval loss: 0.6931 Xval f_score 0.0000\n", "14:09:38 epoch 50 of 1000 Train loss: 0.6888 Train f_score 0.6689 Xval loss: 0.6929 Xval f_score 0.6465\n", "14:09:38 epoch 100 of 1000 Train loss: 0.6770 Train f_score 0.6531 Xval loss: 0.6915 Xval f_score 0.5610\n", "14:09:38 epoch 150 of 1000 Train loss: 0.6628 Train f_score 0.6518 Xval loss: 0.6925 Xval f_score 0.4706\n", "14:09:38 epoch 200 of 1000 Train loss: 0.6504 Train f_score 0.6816 Xval loss: 0.6961 Xval f_score 0.4848\n", "14:09:38 epoch 250 of 1000 Train loss: 0.6407 Train f_score 0.6758 Xval loss: 0.7006 Xval f_score 0.4687\n", "14:09:38 epoch 300 of 1000 Train loss: 0.6329 Train f_score 0.6667 Xval loss: 0.7042 Xval f_score 0.4516\n", "14:09:38 epoch 350 of 1000 Train loss: 0.6255 Train f_score 0.6697 Xval loss: 0.7056 Xval f_score 0.4762\n", "14:09:38 epoch 400 of 1000 Train loss: 0.6179 Train f_score 0.6818 Xval loss: 0.7065 Xval f_score 0.4516\n", "14:09:39 epoch 450 of 1000 Train loss: 0.6098 Train f_score 0.6818 Xval loss: 0.7079 Xval f_score 0.4516\n", "14:09:39 epoch 500 of 1000 Train loss: 0.5995 Train f_score 0.7000 Xval loss: 0.7080 Xval f_score 0.4516\n", "14:09:39 epoch 550 of 1000 Train loss: 0.5882 Train f_score 0.7037 Xval loss: 0.7094 Xval f_score 0.4687\n", "14:09:39 epoch 600 of 1000 Train loss: 0.5755 Train f_score 0.7070 Xval loss: 0.7099 Xval f_score 0.4615\n", "14:09:39 epoch 650 of 1000 Train loss: 0.5616 Train f_score 0.7163 Xval loss: 0.7127 Xval f_score 0.4375\n", "14:09:39 epoch 700 of 1000 Train loss: 0.5471 Train f_score 0.7170 Xval loss: 0.7181 Xval f_score 0.4545\n", "14:09:39 epoch 750 of 1000 Train loss: 0.5326 Train f_score 0.7109 Xval loss: 0.7243 Xval f_score 0.4545\n", "14:09:39 epoch 800 of 1000 Train loss: 0.5193 Train f_score 0.7143 Xval loss: 0.7319 Xval f_score 0.4776\n", "14:09:39 epoch 850 of 1000 Train loss: 0.5066 Train f_score 0.7299 Xval loss: 0.7406 Xval f_score 0.4776\n", "14:09:40 epoch 900 of 1000 Train loss: 0.4942 Train f_score 0.7393 Xval loss: 0.7492 Xval f_score 0.4776\n", "14:09:40 epoch 950 of 1000 Train loss: 0.4820 Train f_score 0.7678 Xval loss: 0.7570 Xval f_score 0.4545\n", "Best Xval loss epoch 108, value 0.691422\n", "NN units 8\n", "Reg_penalty 0.00000000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.795, Train F1 0.807\n", "[[77 17]\n", " [26 90]]\n", "Final Xval Accuracy 0.620, Xval F1 0.675\n", "[[16 7]\n", " [20 28]]\n", "14:09:41 Starting\n", "14:09:42 epoch 0 of 1000 Train loss: 0.6956 Train f_score 0.0000 Xval loss: 0.6939 Xval f_score 0.0000\n", "14:09:42 epoch 50 of 1000 Train loss: 0.6898 Train f_score 0.6712 Xval loss: 0.6930 Xval f_score 0.6263\n", "14:09:42 epoch 100 of 1000 Train loss: 0.6816 Train f_score 0.6234 Xval loss: 0.6920 Xval f_score 0.5333\n", "14:09:42 epoch 150 of 1000 Train loss: 0.6703 Train f_score 0.6359 Xval loss: 0.6929 Xval f_score 0.5075\n", "14:09:42 epoch 200 of 1000 Train loss: 0.6586 Train f_score 0.6575 Xval loss: 0.6969 Xval f_score 0.5294\n", "14:09:42 epoch 250 of 1000 Train loss: 0.6474 Train f_score 0.6547 Xval loss: 0.7022 Xval f_score 0.5231\n", "14:09:42 epoch 300 of 1000 Train loss: 0.6372 Train f_score 0.6636 Xval loss: 0.7066 Xval f_score 0.5000\n", "14:09:43 epoch 350 of 1000 Train loss: 0.6274 Train f_score 0.6847 Xval loss: 0.7094 Xval f_score 0.5231\n", "14:09:43 epoch 400 of 1000 Train loss: 0.6174 Train f_score 0.6818 Xval loss: 0.7125 Xval f_score 0.5000\n", "14:09:43 epoch 450 of 1000 Train loss: 0.6063 Train f_score 0.6820 Xval loss: 0.7187 Xval f_score 0.4923\n", "14:09:43 epoch 500 of 1000 Train loss: 0.5950 Train f_score 0.7005 Xval loss: 0.7247 Xval f_score 0.5000\n", "14:09:43 epoch 550 of 1000 Train loss: 0.5833 Train f_score 0.6977 Xval loss: 0.7323 Xval f_score 0.5000\n", "14:09:43 epoch 600 of 1000 Train loss: 0.5716 Train f_score 0.7005 Xval loss: 0.7389 Xval f_score 0.4776\n", "14:09:43 epoch 650 of 1000 Train loss: 0.5596 Train f_score 0.7103 Xval loss: 0.7446 Xval f_score 0.4776\n", "14:09:43 epoch 700 of 1000 Train loss: 0.5481 Train f_score 0.7196 Xval loss: 0.7495 Xval f_score 0.4776\n", "14:09:44 epoch 750 of 1000 Train loss: 0.5371 Train f_score 0.7196 Xval loss: 0.7528 Xval f_score 0.5000\n", "14:09:44 epoch 800 of 1000 Train loss: 0.5263 Train f_score 0.7407 Xval loss: 0.7548 Xval f_score 0.5429\n", "14:09:44 epoch 850 of 1000 Train loss: 0.5158 Train f_score 0.7477 Xval loss: 0.7561 Xval f_score 0.5429\n", "14:09:44 epoch 900 of 1000 Train loss: 0.5042 Train f_score 0.7742 Xval loss: 0.7551 Xval f_score 0.5429\n", "14:09:44 epoch 950 of 1000 Train loss: 0.4908 Train f_score 0.8000 Xval loss: 0.7529 Xval f_score 0.5507\n", "Best Xval loss epoch 109, value 0.691996\n", "NN units 8\n", "Reg_penalty 0.00010000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.819, Train F1 0.821\n", "[[85 20]\n", " [18 87]]\n", "Final Xval Accuracy 0.606, Xval F1 0.667\n", "[[15 7]\n", " [21 28]]\n", "14:09:45 Starting\n", "14:09:46 epoch 0 of 1000 Train loss: 0.6954 Train f_score 0.6751 Xval loss: 0.6961 Xval f_score 0.6604\n", "14:09:46 epoch 50 of 1000 Train loss: 0.6898 Train f_score 0.6667 Xval loss: 0.6938 Xval f_score 0.5652\n", "14:09:47 epoch 100 of 1000 Train loss: 0.6807 Train f_score 0.6612 Xval loss: 0.6937 Xval f_score 0.5570\n", "14:09:47 epoch 150 of 1000 Train loss: 0.6700 Train f_score 0.6547 Xval loss: 0.6965 Xval f_score 0.5867\n", "14:09:47 epoch 200 of 1000 Train loss: 0.6605 Train f_score 0.6514 Xval loss: 0.7001 Xval f_score 0.5507\n", "14:09:47 epoch 250 of 1000 Train loss: 0.6526 Train f_score 0.6607 Xval loss: 0.7053 Xval f_score 0.5672\n", "14:09:47 epoch 300 of 1000 Train loss: 0.6455 Train f_score 0.6667 Xval loss: 0.7129 Xval f_score 0.4923\n", "14:09:47 epoch 350 of 1000 Train loss: 0.6382 Train f_score 0.6878 Xval loss: 0.7195 Xval f_score 0.4923\n", "14:09:47 epoch 400 of 1000 Train loss: 0.6299 Train f_score 0.6818 Xval loss: 0.7256 Xval f_score 0.4687\n", "14:09:47 epoch 450 of 1000 Train loss: 0.6199 Train f_score 0.6728 Xval loss: 0.7339 Xval f_score 0.4848\n", "14:09:47 epoch 500 of 1000 Train loss: 0.6092 Train f_score 0.6884 Xval loss: 0.7435 Xval f_score 0.4848\n", "14:09:48 epoch 550 of 1000 Train loss: 0.5969 Train f_score 0.6852 Xval loss: 0.7518 Xval f_score 0.4776\n", "14:09:48 epoch 600 of 1000 Train loss: 0.5844 Train f_score 0.7075 Xval loss: 0.7594 Xval f_score 0.4545\n", "Best Xval loss epoch 76, value 0.693216\n", "NN units 8\n", "Reg_penalty 0.00030000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.719, Train F1 0.753\n", "[[61 17]\n", " [42 90]]\n", "Final Xval Accuracy 0.577, Xval F1 0.605\n", "[[18 12]\n", " [18 23]]\n", "14:09:49 Starting\n", "14:09:50 epoch 0 of 1000 Train loss: 0.7021 Train f_score 0.1053 Xval loss: 0.7019 Xval f_score 0.2400\n", "14:09:50 epoch 50 of 1000 Train loss: 0.6945 Train f_score 0.6751 Xval loss: 0.6952 Xval f_score 0.6604\n", "14:09:50 epoch 100 of 1000 Train loss: 0.6929 Train f_score 0.6772 Xval loss: 0.6943 Xval f_score 0.6604\n", "14:09:50 epoch 150 of 1000 Train loss: 0.6917 Train f_score 0.6667 Xval loss: 0.6947 Xval f_score 0.6263\n", "14:09:50 epoch 200 of 1000 Train loss: 0.6893 Train f_score 0.6541 Xval loss: 0.6951 Xval f_score 0.5455\n", "14:09:51 epoch 250 of 1000 Train loss: 0.6862 Train f_score 0.6803 Xval loss: 0.6957 Xval f_score 0.5750\n", "14:09:51 epoch 300 of 1000 Train loss: 0.6827 Train f_score 0.6667 Xval loss: 0.6967 Xval f_score 0.5641\n", "14:09:51 epoch 350 of 1000 Train loss: 0.6792 Train f_score 0.6726 Xval loss: 0.6977 Xval f_score 0.5600\n", "14:09:51 epoch 400 of 1000 Train loss: 0.6759 Train f_score 0.6759 Xval loss: 0.6987 Xval f_score 0.5556\n", "14:09:51 epoch 450 of 1000 Train loss: 0.6728 Train f_score 0.6759 Xval loss: 0.6997 Xval f_score 0.5556\n", "14:09:51 epoch 500 of 1000 Train loss: 0.6700 Train f_score 0.6761 Xval loss: 0.7006 Xval f_score 0.5556\n", "14:09:51 epoch 550 of 1000 Train loss: 0.6674 Train f_score 0.6792 Xval loss: 0.7015 Xval f_score 0.5556\n", "14:09:51 epoch 600 of 1000 Train loss: 0.6650 Train f_score 0.6730 Xval loss: 0.7024 Xval f_score 0.5556\n", "14:09:52 epoch 650 of 1000 Train loss: 0.6629 Train f_score 0.6762 Xval loss: 0.7034 Xval f_score 0.5556\n", "14:09:52 epoch 700 of 1000 Train loss: 0.6609 Train f_score 0.6827 Xval loss: 0.7044 Xval f_score 0.5556\n", "14:09:52 epoch 750 of 1000 Train loss: 0.6591 Train f_score 0.6794 Xval loss: 0.7054 Xval f_score 0.5634\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:09:52 epoch 800 of 1000 Train loss: 0.6575 Train f_score 0.6890 Xval loss: 0.7065 Xval f_score 0.5634\n", "14:09:52 epoch 850 of 1000 Train loss: 0.6560 Train f_score 0.6890 Xval loss: 0.7076 Xval f_score 0.5634\n", "14:09:52 epoch 900 of 1000 Train loss: 0.6546 Train f_score 0.6890 Xval loss: 0.7086 Xval f_score 0.5634\n", "14:09:52 epoch 950 of 1000 Train loss: 0.6535 Train f_score 0.6890 Xval loss: 0.7098 Xval f_score 0.5634\n", "Best Xval loss epoch 86, value 0.694210\n", "NN units 8\n", "Reg_penalty 0.00100000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.690, Train F1 0.689\n", "[[73 35]\n", " [30 72]]\n", "Final Xval Accuracy 0.620, Xval F1 0.542\n", "[[28 19]\n", " [ 8 16]]\n", "14:09:53 Starting\n", "14:09:55 epoch 0 of 1000 Train loss: 0.7189 Train f_score 0.6751 Xval loss: 0.7190 Xval f_score 0.6604\n", "14:09:55 epoch 50 of 1000 Train loss: 0.6972 Train f_score 0.6751 Xval loss: 0.6975 Xval f_score 0.6604\n", "14:09:55 epoch 100 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:09:55 epoch 150 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:55 epoch 200 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:55 epoch 250 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:55 epoch 300 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:55 epoch 350 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:56 epoch 400 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:56 epoch 450 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:56 epoch 500 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:56 epoch 550 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:56 epoch 600 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:56 epoch 650 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:56 epoch 700 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:56 epoch 750 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:57 epoch 800 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:57 epoch 850 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:57 epoch 900 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:09:57 epoch 950 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "Best Xval loss epoch 337, value 0.693665\n", "NN units 8\n", "Reg_penalty 0.00300000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.510, Train F1 0.675\n", "[[ 0 0]\n", " [103 107]]\n", "Final Xval Accuracy 0.493, Xval F1 0.660\n", "[[ 0 0]\n", " [36 35]]\n", "14:09:59 Starting\n", "14:10:00 epoch 0 of 1000 Train loss: 0.6946 Train f_score 0.0000 Xval loss: 0.6934 Xval f_score 0.0000\n", "14:10:00 epoch 50 of 1000 Train loss: 0.6852 Train f_score 0.6441 Xval loss: 0.6917 Xval f_score 0.5455\n", "14:10:00 epoch 100 of 1000 Train loss: 0.6685 Train f_score 0.6075 Xval loss: 0.6910 Xval f_score 0.4776\n", "14:10:00 epoch 150 of 1000 Train loss: 0.6517 Train f_score 0.6575 Xval loss: 0.6968 Xval f_score 0.4687\n", "14:10:00 epoch 200 of 1000 Train loss: 0.6384 Train f_score 0.6697 Xval loss: 0.7041 Xval f_score 0.4762\n", "14:10:01 epoch 250 of 1000 Train loss: 0.6270 Train f_score 0.6787 Xval loss: 0.7103 Xval f_score 0.4687\n", "14:10:01 epoch 300 of 1000 Train loss: 0.6153 Train f_score 0.6758 Xval loss: 0.7153 Xval f_score 0.4762\n", "14:10:01 epoch 350 of 1000 Train loss: 0.6029 Train f_score 0.6728 Xval loss: 0.7209 Xval f_score 0.5000\n", "14:10:01 epoch 400 of 1000 Train loss: 0.5891 Train f_score 0.6605 Xval loss: 0.7274 Xval f_score 0.4687\n", "14:10:01 epoch 450 of 1000 Train loss: 0.5727 Train f_score 0.6698 Xval loss: 0.7335 Xval f_score 0.5075\n", "14:10:01 epoch 500 of 1000 Train loss: 0.5542 Train f_score 0.6919 Xval loss: 0.7412 Xval f_score 0.5373\n", "14:10:01 epoch 550 of 1000 Train loss: 0.5341 Train f_score 0.7177 Xval loss: 0.7501 Xval f_score 0.5152\n", "Best Xval loss epoch 83, value 0.690506\n", "NN units 16\n", "Reg_penalty 0.00000000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.757, Train F1 0.771\n", "[[73 21]\n", " [30 86]]\n", "Final Xval Accuracy 0.577, Xval F1 0.625\n", "[[16 10]\n", " [20 25]]\n", "14:10:03 Starting\n", "14:10:04 epoch 0 of 1000 Train loss: 0.6952 Train f_score 0.6751 Xval loss: 0.6947 Xval f_score 0.6667\n", "14:10:04 epoch 50 of 1000 Train loss: 0.6855 Train f_score 0.6537 Xval loss: 0.6925 Xval f_score 0.5778\n", "14:10:04 epoch 100 of 1000 Train loss: 0.6696 Train f_score 0.6316 Xval loss: 0.6940 Xval f_score 0.4789\n", "14:10:04 epoch 150 of 1000 Train loss: 0.6551 Train f_score 0.6545 Xval loss: 0.7002 Xval f_score 0.5000\n", "14:10:04 epoch 200 of 1000 Train loss: 0.6449 Train f_score 0.6667 Xval loss: 0.7066 Xval f_score 0.5000\n", "14:10:04 epoch 250 of 1000 Train loss: 0.6353 Train f_score 0.6758 Xval loss: 0.7129 Xval f_score 0.4923\n", "14:10:05 epoch 300 of 1000 Train loss: 0.6227 Train f_score 0.6697 Xval loss: 0.7186 Xval f_score 0.5152\n", "14:10:05 epoch 350 of 1000 Train loss: 0.6072 Train f_score 0.6605 Xval loss: 0.7228 Xval f_score 0.4923\n", "14:10:05 epoch 400 of 1000 Train loss: 0.5891 Train f_score 0.6573 Xval loss: 0.7266 Xval f_score 0.5152\n", "14:10:05 epoch 450 of 1000 Train loss: 0.5691 Train f_score 0.6698 Xval loss: 0.7323 Xval f_score 0.4923\n", "14:10:05 epoch 500 of 1000 Train loss: 0.5485 Train f_score 0.6948 Xval loss: 0.7411 Xval f_score 0.4687\n", "14:10:05 epoch 550 of 1000 Train loss: 0.5288 Train f_score 0.7642 Xval loss: 0.7530 Xval f_score 0.4615\n", "Best Xval loss epoch 63, value 0.692240\n", "NN units 16\n", "Reg_penalty 0.00010000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.781, Train F1 0.783\n", "[[81 24]\n", " [22 83]]\n", "Final Xval Accuracy 0.606, Xval F1 0.481\n", "[[30 22]\n", " [ 6 13]]\n", "14:10:06 Starting\n", "14:10:08 epoch 0 of 1000 Train loss: 0.7002 Train f_score 0.6751 Xval loss: 0.7019 Xval f_score 0.6604\n", "14:10:08 epoch 50 of 1000 Train loss: 0.6920 Train f_score 0.6777 Xval loss: 0.6947 Xval f_score 0.6535\n", "14:10:08 epoch 100 of 1000 Train loss: 0.6850 Train f_score 0.6720 Xval loss: 0.6933 Xval f_score 0.5542\n", "14:10:08 epoch 150 of 1000 Train loss: 0.6746 Train f_score 0.6522 Xval loss: 0.6951 Xval f_score 0.5455\n", "14:10:08 epoch 200 of 1000 Train loss: 0.6637 Train f_score 0.6516 Xval loss: 0.6986 Xval f_score 0.5714\n", "14:10:08 epoch 250 of 1000 Train loss: 0.6547 Train f_score 0.6606 Xval loss: 0.7035 Xval f_score 0.5455\n", "14:10:08 epoch 300 of 1000 Train loss: 0.6476 Train f_score 0.6606 Xval loss: 0.7086 Xval f_score 0.4918\n", "14:10:09 epoch 350 of 1000 Train loss: 0.6416 Train f_score 0.6636 Xval loss: 0.7123 Xval f_score 0.4762\n", "14:10:09 epoch 400 of 1000 Train loss: 0.6347 Train f_score 0.6728 Xval loss: 0.7152 Xval f_score 0.4923\n", "14:10:09 epoch 450 of 1000 Train loss: 0.6260 Train f_score 0.6759 Xval loss: 0.7210 Xval f_score 0.5152\n", "14:10:09 epoch 500 of 1000 Train loss: 0.6149 Train f_score 0.6791 Xval loss: 0.7288 Xval f_score 0.5152\n", "14:10:09 epoch 550 of 1000 Train loss: 0.6012 Train f_score 0.6822 Xval loss: 0.7376 Xval f_score 0.4923\n", "14:10:09 epoch 600 of 1000 Train loss: 0.5856 Train f_score 0.6981 Xval loss: 0.7505 Xval f_score 0.4923\n", "Best Xval loss epoch 95, value 0.693248\n", "NN units 16\n", "Reg_penalty 0.00030000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.733, Train F1 0.769\n", "[[61 14]\n", " [42 93]]\n", "Final Xval Accuracy 0.592, Xval F1 0.508\n", "[[27 20]\n", " [ 9 15]]\n", "14:10:10 Starting\n", "14:10:12 epoch 0 of 1000 Train loss: 0.7126 Train f_score 0.6751 Xval loss: 0.7141 Xval f_score 0.6604\n", "14:10:12 epoch 50 of 1000 Train loss: 0.6963 Train f_score 0.6751 Xval loss: 0.6969 Xval f_score 0.6604\n", "14:10:12 epoch 100 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:10:12 epoch 150 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:10:12 epoch 200 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:10:12 epoch 250 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:10:12 epoch 300 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:10:13 epoch 350 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:10:13 epoch 400 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:10:13 epoch 450 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:10:13 epoch 500 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:10:13 epoch 550 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:10:13 epoch 600 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:10:13 epoch 650 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:10:13 epoch 700 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:10:14 epoch 750 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:10:14 epoch 800 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:10:14 epoch 850 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:10:14 epoch 900 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:10:14 epoch 950 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "Best Xval loss epoch 269, value 0.693593\n", "NN units 16\n", "Reg_penalty 0.00100000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.510, Train F1 0.675\n", "[[ 0 0]\n", " [103 107]]\n", "Final Xval Accuracy 0.549, Xval F1 0.238\n", "[[34 30]\n", " [ 2 5]]\n", "14:10:15 Starting\n", "14:10:17 epoch 0 of 1000 Train loss: 0.7462 Train f_score 0.0180 Xval loss: 0.7444 Xval f_score 0.2727\n", "14:10:17 epoch 50 of 1000 Train loss: 0.7020 Train f_score 0.6751 Xval loss: 0.7022 Xval f_score 0.6604\n", "14:10:17 epoch 100 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:17 epoch 150 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:17 epoch 200 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:17 epoch 250 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:17 epoch 300 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:17 epoch 350 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:18 epoch 400 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:18 epoch 450 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:18 epoch 500 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:18 epoch 550 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:18 epoch 600 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:18 epoch 650 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:18 epoch 700 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:18 epoch 750 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:19 epoch 800 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:19 epoch 850 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:19 epoch 900 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:19 epoch 950 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "Best Xval loss epoch 534, value 0.693753\n", "NN units 16\n", "Reg_penalty 0.00300000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.543, Train F1 0.520\n", "[[62 55]\n", " [41 52]]\n", "Final Xval Accuracy 0.493, Xval F1 0.660\n", "[[ 0 0]\n", " [36 35]]\n", "14:10:20 Starting\n", "14:10:22 epoch 0 of 1000 Train loss: 0.6938 Train f_score 0.6751 Xval loss: 0.6951 Xval f_score 0.6604\n", "14:10:22 epoch 50 of 1000 Train loss: 0.6770 Train f_score 0.6174 Xval loss: 0.6916 Xval f_score 0.5195\n", "14:10:22 epoch 100 of 1000 Train loss: 0.6543 Train f_score 0.6452 Xval loss: 0.6969 Xval f_score 0.4848\n", "14:10:22 epoch 150 of 1000 Train loss: 0.6393 Train f_score 0.6575 Xval loss: 0.7044 Xval f_score 0.5079\n", "14:10:22 epoch 200 of 1000 Train loss: 0.6275 Train f_score 0.6574 Xval loss: 0.7118 Xval f_score 0.5231\n", "14:10:22 epoch 250 of 1000 Train loss: 0.6127 Train f_score 0.6574 Xval loss: 0.7180 Xval f_score 0.5294\n", "14:10:22 epoch 300 of 1000 Train loss: 0.5923 Train f_score 0.6574 Xval loss: 0.7246 Xval f_score 0.5429\n", "14:10:22 epoch 350 of 1000 Train loss: 0.5649 Train f_score 0.6635 Xval loss: 0.7350 Xval f_score 0.5075\n", "14:10:22 epoch 400 of 1000 Train loss: 0.5318 Train f_score 0.7042 Xval loss: 0.7532 Xval f_score 0.5075\n", "Best Xval loss epoch 44, value 0.691533\n", "NN units 32\n", "Reg_penalty 0.00000000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.748, Train F1 0.725\n", "[[87 37]\n", " [16 70]]\n", "Final Xval Accuracy 0.563, Xval F1 0.608\n", "[[16 11]\n", " [20 24]]\n", "14:10:24 Starting\n", "14:10:26 epoch 0 of 1000 Train loss: 0.6975 Train f_score 0.0185 Xval loss: 0.6966 Xval f_score 0.2326\n", "14:10:26 epoch 50 of 1000 Train loss: 0.6823 Train f_score 0.6213 Xval loss: 0.6936 Xval f_score 0.4737\n", "14:10:26 epoch 100 of 1000 Train loss: 0.6632 Train f_score 0.6457 Xval loss: 0.6977 Xval f_score 0.4848\n", "14:10:26 epoch 150 of 1000 Train loss: 0.6488 Train f_score 0.6636 Xval loss: 0.7036 Xval f_score 0.5231\n", "14:10:27 epoch 200 of 1000 Train loss: 0.6384 Train f_score 0.6544 Xval loss: 0.7108 Xval f_score 0.5152\n", "14:10:27 epoch 250 of 1000 Train loss: 0.6262 Train f_score 0.6544 Xval loss: 0.7194 Xval f_score 0.5152\n", "14:10:27 epoch 300 of 1000 Train loss: 0.6085 Train f_score 0.6667 Xval loss: 0.7283 Xval f_score 0.5294\n", "14:10:27 epoch 350 of 1000 Train loss: 0.5847 Train f_score 0.6729 Xval loss: 0.7420 Xval f_score 0.4848\n", "Best Xval loss epoch 51, value 0.693628\n", "NN units 32\n", "Reg_penalty 0.00010000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.719, Train F1 0.763\n", "[[56 12]\n", " [47 95]]\n", "Final Xval Accuracy 0.592, Xval F1 0.453\n", "[[30 23]\n", " [ 6 12]]\n", "14:10:28 Starting\n", "14:10:30 epoch 0 of 1000 Train loss: 0.7049 Train f_score 0.6751 Xval loss: 0.7085 Xval f_score 0.6604\n", "14:10:30 epoch 50 of 1000 Train loss: 0.6891 Train f_score 0.6614 Xval loss: 0.6971 Xval f_score 0.5679\n", "14:10:30 epoch 100 of 1000 Train loss: 0.6772 Train f_score 0.6609 Xval loss: 0.6984 Xval f_score 0.5714\n", "14:10:30 epoch 150 of 1000 Train loss: 0.6659 Train f_score 0.6545 Xval loss: 0.7014 Xval f_score 0.5429\n", "14:10:30 epoch 200 of 1000 Train loss: 0.6569 Train f_score 0.6606 Xval loss: 0.7070 Xval f_score 0.5672\n", "14:10:30 epoch 250 of 1000 Train loss: 0.6506 Train f_score 0.6545 Xval loss: 0.7148 Xval f_score 0.5373\n", "14:10:30 epoch 300 of 1000 Train loss: 0.6452 Train f_score 0.6514 Xval loss: 0.7206 Xval f_score 0.5152\n", "14:10:30 epoch 350 of 1000 Train loss: 0.6387 Train f_score 0.6545 Xval loss: 0.7241 Xval f_score 0.5294\n", "14:10:31 epoch 400 of 1000 Train loss: 0.6298 Train f_score 0.6698 Xval loss: 0.7301 Xval f_score 0.5294\n", "14:10:31 epoch 450 of 1000 Train loss: 0.6166 Train f_score 0.6729 Xval loss: 0.7400 Xval f_score 0.5294\n", "14:10:31 epoch 500 of 1000 Train loss: 0.5996 Train f_score 0.6854 Xval loss: 0.7519 Xval f_score 0.4923\n", "14:10:31 epoch 550 of 1000 Train loss: 0.5806 Train f_score 0.7163 Xval loss: 0.7664 Xval f_score 0.4848\n", "Best Xval loss epoch 59, value 0.696955\n", "NN units 32\n", "Reg_penalty 0.00030000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.733, Train F1 0.772\n", "[[59 12]\n", " [44 95]]\n", "Final Xval Accuracy 0.577, Xval F1 0.643\n", "[[14 8]\n", " [22 27]]\n", "14:10:32 Starting\n", "14:10:34 epoch 0 of 1000 Train loss: 0.7351 Train f_score 0.0000 Xval loss: 0.7312 Xval f_score 0.0000\n", "14:10:34 epoch 50 of 1000 Train loss: 0.6998 Train f_score 0.6772 Xval loss: 0.7004 Xval f_score 0.6604\n", "14:10:34 epoch 100 of 1000 Train loss: 0.6940 Train f_score 0.6751 Xval loss: 0.6950 Xval f_score 0.6604\n", "14:10:34 epoch 150 of 1000 Train loss: 0.6935 Train f_score 0.6772 Xval loss: 0.6948 Xval f_score 0.6604\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:10:34 epoch 200 of 1000 Train loss: 0.6930 Train f_score 0.6837 Xval loss: 0.6951 Xval f_score 0.6476\n", "14:10:34 epoch 250 of 1000 Train loss: 0.6917 Train f_score 0.6621 Xval loss: 0.6956 Xval f_score 0.5806\n", "14:10:34 epoch 300 of 1000 Train loss: 0.6892 Train f_score 0.6589 Xval loss: 0.6959 Xval f_score 0.5185\n", "14:10:34 epoch 350 of 1000 Train loss: 0.6852 Train f_score 0.6639 Xval loss: 0.6959 Xval f_score 0.5479\n", "14:10:35 epoch 400 of 1000 Train loss: 0.6801 Train f_score 0.6812 Xval loss: 0.6954 Xval f_score 0.5556\n", "14:10:35 epoch 450 of 1000 Train loss: 0.6744 Train f_score 0.6697 Xval loss: 0.6952 Xval f_score 0.5429\n", "14:10:35 epoch 500 of 1000 Train loss: 0.6688 Train f_score 0.6574 Xval loss: 0.6954 Xval f_score 0.5429\n", "14:10:35 epoch 550 of 1000 Train loss: 0.6639 Train f_score 0.6574 Xval loss: 0.6951 Xval f_score 0.5429\n", "14:10:35 epoch 600 of 1000 Train loss: 0.6595 Train f_score 0.6605 Xval loss: 0.6958 Xval f_score 0.5507\n", "14:10:35 epoch 650 of 1000 Train loss: 0.6555 Train f_score 0.6636 Xval loss: 0.6981 Xval f_score 0.5373\n", "14:10:35 epoch 700 of 1000 Train loss: 0.6519 Train f_score 0.6667 Xval loss: 0.7018 Xval f_score 0.5294\n", "14:10:35 epoch 750 of 1000 Train loss: 0.6482 Train f_score 0.6697 Xval loss: 0.7061 Xval f_score 0.5075\n", "14:10:36 epoch 800 of 1000 Train loss: 0.6450 Train f_score 0.6696 Xval loss: 0.7089 Xval f_score 0.5075\n", "14:10:36 epoch 850 of 1000 Train loss: 0.6423 Train f_score 0.6607 Xval loss: 0.7107 Xval f_score 0.5075\n", "14:10:36 epoch 900 of 1000 Train loss: 0.6398 Train f_score 0.6607 Xval loss: 0.7126 Xval f_score 0.5075\n", "14:10:36 epoch 950 of 1000 Train loss: 0.6374 Train f_score 0.6607 Xval loss: 0.7145 Xval f_score 0.5075\n", "Best Xval loss epoch 127, value 0.694819\n", "NN units 32\n", "Reg_penalty 0.00100000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.700, Train F1 0.674\n", "[[82 42]\n", " [21 65]]\n", "Final Xval Accuracy 0.592, Xval F1 0.613\n", "[[19 12]\n", " [17 23]]\n", "14:10:38 Starting\n", "14:10:39 epoch 0 of 1000 Train loss: 0.8115 Train f_score 0.6751 Xval loss: 0.8125 Xval f_score 0.6604\n", "14:10:39 epoch 50 of 1000 Train loss: 0.7125 Train f_score 0.6751 Xval loss: 0.7124 Xval f_score 0.6604\n", "14:10:39 epoch 100 of 1000 Train loss: 0.6954 Train f_score 0.6751 Xval loss: 0.6960 Xval f_score 0.6604\n", "14:10:39 epoch 150 of 1000 Train loss: 0.6941 Train f_score 0.6751 Xval loss: 0.6946 Xval f_score 0.6604\n", "14:10:39 epoch 200 of 1000 Train loss: 0.6936 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:10:40 epoch 250 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:40 epoch 300 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:40 epoch 350 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:10:40 epoch 400 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:40 epoch 450 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:40 epoch 500 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:40 epoch 550 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:40 epoch 600 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:41 epoch 650 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:41 epoch 700 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:41 epoch 750 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:41 epoch 800 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:41 epoch 850 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:41 epoch 900 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:10:41 epoch 950 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "Best Xval loss epoch 294, value 0.693774\n", "NN units 32\n", "Reg_penalty 0.00300000\n", "Dropout 0.0000\n", "Activation sigmoid\n", "Final Train Accuracy 0.510, Train F1 0.675\n", "[[ 0 0]\n", " [103 107]]\n", "Final Xval Accuracy 0.549, Xval F1 0.467\n", "[[25 21]\n", " [11 14]]\n", "14:10:43 Starting\n", "14:10:44 epoch 0 of 1000 Train loss: 0.6932 Train f_score 0.0000 Xval loss: 0.6932 Xval f_score 0.1053\n", "14:10:45 epoch 50 of 1000 Train loss: 0.6885 Train f_score 0.6914 Xval loss: 0.6926 Xval f_score 0.5412\n", "14:10:45 epoch 100 of 1000 Train loss: 0.6773 Train f_score 0.6751 Xval loss: 0.6935 Xval f_score 0.4478\n", "14:10:45 epoch 150 of 1000 Train loss: 0.6591 Train f_score 0.6637 Xval loss: 0.7000 Xval f_score 0.5152\n", "14:10:45 epoch 200 of 1000 Train loss: 0.6394 Train f_score 0.6757 Xval loss: 0.7096 Xval f_score 0.5075\n", "14:10:45 epoch 250 of 1000 Train loss: 0.6434 Train f_score 0.7193 Xval loss: 0.7208 Xval f_score 0.5075\n", "14:10:45 epoch 300 of 1000 Train loss: 0.6209 Train f_score 0.7105 Xval loss: 0.7282 Xval f_score 0.4928\n", "14:10:45 epoch 350 of 1000 Train loss: 0.6345 Train f_score 0.7074 Xval loss: 0.7386 Xval f_score 0.4928\n", "14:10:45 epoch 400 of 1000 Train loss: 0.6323 Train f_score 0.7236 Xval loss: 0.7467 Xval f_score 0.4928\n", "14:10:46 epoch 450 of 1000 Train loss: 0.6471 Train f_score 0.6835 Xval loss: 0.7518 Xval f_score 0.4928\n", "14:10:46 epoch 500 of 1000 Train loss: 0.6296 Train f_score 0.7265 Xval loss: 0.7543 Xval f_score 0.4928\n", "14:10:46 epoch 550 of 1000 Train loss: 0.6199 Train f_score 0.7220 Xval loss: 0.7578 Xval f_score 0.5000\n", "Best Xval loss epoch 69, value 0.692291\n", "NN units 4\n", "Reg_penalty 0.00000000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.729, Train F1 0.730\n", "[[76 30]\n", " [27 77]]\n", "Final Xval Accuracy 0.563, Xval F1 0.340\n", "[[32 27]\n", " [ 4 8]]\n", "14:10:47 Starting\n", "14:10:49 epoch 0 of 1000 Train loss: 0.6948 Train f_score 0.4848 Xval loss: 0.6954 Xval f_score 0.3824\n", "14:10:49 epoch 50 of 1000 Train loss: 0.6897 Train f_score 0.6689 Xval loss: 0.6930 Xval f_score 0.6465\n", "14:10:49 epoch 100 of 1000 Train loss: 0.6761 Train f_score 0.6581 Xval loss: 0.6889 Xval f_score 0.5333\n", "14:10:49 epoch 150 of 1000 Train loss: 0.6571 Train f_score 0.6695 Xval loss: 0.6890 Xval f_score 0.5882\n", "14:10:49 epoch 200 of 1000 Train loss: 0.6508 Train f_score 0.6583 Xval loss: 0.6933 Xval f_score 0.5588\n", "14:10:50 epoch 250 of 1000 Train loss: 0.6348 Train f_score 0.6348 Xval loss: 0.6931 Xval f_score 0.5507\n", "14:10:50 epoch 300 of 1000 Train loss: 0.6094 Train f_score 0.6971 Xval loss: 0.6929 Xval f_score 0.5588\n", "14:10:50 epoch 350 of 1000 Train loss: 0.6211 Train f_score 0.7029 Xval loss: 0.6924 Xval f_score 0.5455\n", "14:10:50 epoch 400 of 1000 Train loss: 0.6176 Train f_score 0.6753 Xval loss: 0.6940 Xval f_score 0.5231\n", "14:10:50 epoch 450 of 1000 Train loss: 0.6356 Train f_score 0.6891 Xval loss: 0.6965 Xval f_score 0.5455\n", "14:10:50 epoch 500 of 1000 Train loss: 0.5943 Train f_score 0.7203 Xval loss: 0.7006 Xval f_score 0.5152\n", "14:10:50 epoch 550 of 1000 Train loss: 0.6096 Train f_score 0.7190 Xval loss: 0.7017 Xval f_score 0.5455\n", "14:10:50 epoch 600 of 1000 Train loss: 0.5906 Train f_score 0.6807 Xval loss: 0.7035 Xval f_score 0.5455\n", "14:10:51 epoch 650 of 1000 Train loss: 0.5961 Train f_score 0.7102 Xval loss: 0.7034 Xval f_score 0.5231\n", "14:10:51 epoch 700 of 1000 Train loss: 0.5933 Train f_score 0.7197 Xval loss: 0.7040 Xval f_score 0.5231\n", "14:10:51 epoch 750 of 1000 Train loss: 0.5771 Train f_score 0.7054 Xval loss: 0.7022 Xval f_score 0.5231\n", "14:10:51 epoch 800 of 1000 Train loss: 0.5859 Train f_score 0.6857 Xval loss: 0.7038 Xval f_score 0.5429\n", "14:10:51 epoch 850 of 1000 Train loss: 0.5898 Train f_score 0.7083 Xval loss: 0.7047 Xval f_score 0.5217\n", "14:10:51 epoch 900 of 1000 Train loss: 0.5589 Train f_score 0.7287 Xval loss: 0.7112 Xval f_score 0.5143\n", "14:10:51 epoch 950 of 1000 Train loss: 0.5820 Train f_score 0.7303 Xval loss: 0.7143 Xval f_score 0.5000\n", "Best Xval loss epoch 110, value 0.688671\n", "NN units 4\n", "Reg_penalty 0.00010000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.743, Train F1 0.757\n", "[[72 23]\n", " [31 84]]\n", "Final Xval Accuracy 0.577, Xval F1 0.516\n", "[[25 19]\n", " [11 16]]\n", "14:10:53 Starting\n", "14:10:54 epoch 0 of 1000 Train loss: 0.6952 Train f_score 0.3778 Xval loss: 0.6943 Xval f_score 0.4242\n", "14:10:54 epoch 50 of 1000 Train loss: 0.6888 Train f_score 0.6667 Xval loss: 0.6935 Xval f_score 0.6600\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:10:55 epoch 100 of 1000 Train loss: 0.6774 Train f_score 0.6803 Xval loss: 0.6935 Xval f_score 0.5570\n", "14:10:55 epoch 150 of 1000 Train loss: 0.6644 Train f_score 0.6835 Xval loss: 0.6945 Xval f_score 0.5352\n", "14:10:55 epoch 200 of 1000 Train loss: 0.6443 Train f_score 0.6638 Xval loss: 0.6978 Xval f_score 0.5753\n", "14:10:55 epoch 250 of 1000 Train loss: 0.6541 Train f_score 0.6862 Xval loss: 0.6996 Xval f_score 0.5556\n", "14:10:55 epoch 300 of 1000 Train loss: 0.6353 Train f_score 0.7107 Xval loss: 0.7043 Xval f_score 0.5753\n", "14:10:55 epoch 350 of 1000 Train loss: 0.6432 Train f_score 0.6803 Xval loss: 0.7058 Xval f_score 0.5556\n", "14:10:55 epoch 400 of 1000 Train loss: 0.6357 Train f_score 0.6695 Xval loss: 0.7090 Xval f_score 0.5352\n", "14:10:56 epoch 450 of 1000 Train loss: 0.6191 Train f_score 0.7020 Xval loss: 0.7106 Xval f_score 0.5143\n", "14:10:56 epoch 500 of 1000 Train loss: 0.6296 Train f_score 0.7059 Xval loss: 0.7150 Xval f_score 0.4928\n", "14:10:56 epoch 550 of 1000 Train loss: 0.5974 Train f_score 0.7342 Xval loss: 0.7199 Xval f_score 0.5152\n", "14:10:56 epoch 600 of 1000 Train loss: 0.6159 Train f_score 0.6883 Xval loss: 0.7185 Xval f_score 0.5152\n", "14:10:56 epoch 650 of 1000 Train loss: 0.6307 Train f_score 0.6809 Xval loss: 0.7223 Xval f_score 0.5373\n", "14:10:56 epoch 700 of 1000 Train loss: 0.6272 Train f_score 0.6870 Xval loss: 0.7182 Xval f_score 0.4923\n", "14:10:56 epoch 750 of 1000 Train loss: 0.6215 Train f_score 0.6844 Xval loss: 0.7187 Xval f_score 0.4923\n", "14:10:56 epoch 800 of 1000 Train loss: 0.6080 Train f_score 0.7064 Xval loss: 0.7190 Xval f_score 0.5152\n", "14:10:57 epoch 850 of 1000 Train loss: 0.5824 Train f_score 0.7009 Xval loss: 0.7197 Xval f_score 0.4848\n", "14:10:57 epoch 900 of 1000 Train loss: 0.6024 Train f_score 0.7179 Xval loss: 0.7183 Xval f_score 0.5294\n", "14:10:57 epoch 950 of 1000 Train loss: 0.5778 Train f_score 0.7089 Xval loss: 0.7174 Xval f_score 0.5507\n", "Best Xval loss epoch 73, value 0.693368\n", "NN units 4\n", "Reg_penalty 0.00030000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.771, Train F1 0.788\n", "[[73 18]\n", " [30 89]]\n", "Final Xval Accuracy 0.592, Xval F1 0.592\n", "[[21 14]\n", " [15 21]]\n", "14:10:58 Starting\n", "14:11:01 epoch 0 of 1000 Train loss: 0.6982 Train f_score 0.5622 Xval loss: 0.6980 Xval f_score 0.5957\n", "14:11:01 epoch 50 of 1000 Train loss: 0.6915 Train f_score 0.6599 Xval loss: 0.6955 Xval f_score 0.6337\n", "14:11:01 epoch 100 of 1000 Train loss: 0.6810 Train f_score 0.6741 Xval loss: 0.6946 Xval f_score 0.5789\n", "14:11:02 epoch 150 of 1000 Train loss: 0.6711 Train f_score 0.6131 Xval loss: 0.6964 Xval f_score 0.5797\n", "14:11:02 epoch 200 of 1000 Train loss: 0.6556 Train f_score 0.6091 Xval loss: 0.7003 Xval f_score 0.5672\n", "14:11:02 epoch 250 of 1000 Train loss: 0.6632 Train f_score 0.5894 Xval loss: 0.6990 Xval f_score 0.5455\n", "14:11:02 epoch 300 of 1000 Train loss: 0.6472 Train f_score 0.6408 Xval loss: 0.6992 Xval f_score 0.5625\n", "14:11:02 epoch 350 of 1000 Train loss: 0.6571 Train f_score 0.5876 Xval loss: 0.6974 Xval f_score 0.5231\n", "14:11:02 epoch 400 of 1000 Train loss: 0.6496 Train f_score 0.5895 Xval loss: 0.6993 Xval f_score 0.5079\n", "14:11:02 epoch 450 of 1000 Train loss: 0.6805 Train f_score 0.5510 Xval loss: 0.6992 Xval f_score 0.5079\n", "14:11:03 epoch 500 of 1000 Train loss: 0.6467 Train f_score 0.6070 Xval loss: 0.7000 Xval f_score 0.5079\n", "14:11:03 epoch 550 of 1000 Train loss: 0.6429 Train f_score 0.6341 Xval loss: 0.7010 Xval f_score 0.5000\n", "14:11:03 epoch 600 of 1000 Train loss: 0.6427 Train f_score 0.6108 Xval loss: 0.7021 Xval f_score 0.5000\n", "14:11:03 epoch 650 of 1000 Train loss: 0.6371 Train f_score 0.5773 Xval loss: 0.7039 Xval f_score 0.5000\n", "14:11:03 epoch 700 of 1000 Train loss: 0.6110 Train f_score 0.6020 Xval loss: 0.7042 Xval f_score 0.5000\n", "14:11:03 epoch 750 of 1000 Train loss: 0.6461 Train f_score 0.5803 Xval loss: 0.7053 Xval f_score 0.5000\n", "14:11:03 epoch 800 of 1000 Train loss: 0.6176 Train f_score 0.5895 Xval loss: 0.7069 Xval f_score 0.5000\n", "14:11:03 epoch 850 of 1000 Train loss: 0.6378 Train f_score 0.6332 Xval loss: 0.7086 Xval f_score 0.4762\n", "14:11:04 epoch 900 of 1000 Train loss: 0.6230 Train f_score 0.6417 Xval loss: 0.7107 Xval f_score 0.4762\n", "14:11:04 epoch 950 of 1000 Train loss: 0.6232 Train f_score 0.5792 Xval loss: 0.7131 Xval f_score 0.4516\n", "Best Xval loss epoch 89, value 0.694353\n", "NN units 4\n", "Reg_penalty 0.00100000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.710, Train F1 0.677\n", "[[85 43]\n", " [18 64]]\n", "Final Xval Accuracy 0.592, Xval F1 0.659\n", "[[14 7]\n", " [22 28]]\n", "14:11:05 Starting\n", "14:11:07 epoch 0 of 1000 Train loss: 0.7062 Train f_score 0.6255 Xval loss: 0.7055 Xval f_score 0.6604\n", "14:11:07 epoch 50 of 1000 Train loss: 0.6951 Train f_score 0.6751 Xval loss: 0.6960 Xval f_score 0.6604\n", "14:11:07 epoch 100 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:11:07 epoch 150 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:11:07 epoch 200 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:11:08 epoch 250 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:08 epoch 300 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:08 epoch 350 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:08 epoch 400 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:08 epoch 450 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:08 epoch 500 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:11:08 epoch 550 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:09 epoch 600 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:09 epoch 650 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:09 epoch 700 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:09 epoch 750 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:09 epoch 800 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:09 epoch 850 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:09 epoch 900 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:11:09 epoch 950 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "Best Xval loss epoch 212, value 0.693632\n", "NN units 4\n", "Reg_penalty 0.00300000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.519, Train F1 0.252\n", "[[92 90]\n", " [11 17]]\n", "Final Xval Accuracy 0.563, Xval F1 0.340\n", "[[32 27]\n", " [ 4 8]]\n", "14:11:11 Starting\n", "14:11:13 epoch 0 of 1000 Train loss: 0.6954 Train f_score 0.5200 Xval loss: 0.6923 Xval f_score 0.5393\n", "14:11:13 epoch 50 of 1000 Train loss: 0.6863 Train f_score 0.6522 Xval loss: 0.6929 Xval f_score 0.6304\n", "14:11:13 epoch 100 of 1000 Train loss: 0.6621 Train f_score 0.7043 Xval loss: 0.6929 Xval f_score 0.6027\n", "14:11:13 epoch 150 of 1000 Train loss: 0.6396 Train f_score 0.6786 Xval loss: 0.6972 Xval f_score 0.4615\n", "14:11:13 epoch 200 of 1000 Train loss: 0.6225 Train f_score 0.7240 Xval loss: 0.6987 Xval f_score 0.4923\n", "14:11:13 epoch 250 of 1000 Train loss: 0.6060 Train f_score 0.6698 Xval loss: 0.6995 Xval f_score 0.4923\n", "14:11:14 epoch 300 of 1000 Train loss: 0.6185 Train f_score 0.7042 Xval loss: 0.7013 Xval f_score 0.5455\n", "14:11:14 epoch 350 of 1000 Train loss: 0.5928 Train f_score 0.6791 Xval loss: 0.6985 Xval f_score 0.5455\n", "14:11:14 epoch 400 of 1000 Train loss: 0.5997 Train f_score 0.6537 Xval loss: 0.6977 Xval f_score 0.5672\n", "14:11:14 epoch 450 of 1000 Train loss: 0.5578 Train f_score 0.7136 Xval loss: 0.6957 Xval f_score 0.5672\n", "14:11:14 epoch 500 of 1000 Train loss: 0.5334 Train f_score 0.7570 Xval loss: 0.6949 Xval f_score 0.5882\n", "14:11:14 epoch 550 of 1000 Train loss: 0.5479 Train f_score 0.7500 Xval loss: 0.6976 Xval f_score 0.6197\n", "14:11:14 epoch 600 of 1000 Train loss: 0.5408 Train f_score 0.7299 Xval loss: 0.7036 Xval f_score 0.5714\n", "14:11:14 epoch 650 of 1000 Train loss: 0.5635 Train f_score 0.6961 Xval loss: 0.7122 Xval f_score 0.5714\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:11:15 epoch 700 of 1000 Train loss: 0.5773 Train f_score 0.6791 Xval loss: 0.7137 Xval f_score 0.5507\n", "14:11:15 epoch 750 of 1000 Train loss: 0.5202 Train f_score 0.7700 Xval loss: 0.7175 Xval f_score 0.5507\n", "14:11:15 epoch 800 of 1000 Train loss: 0.5093 Train f_score 0.7373 Xval loss: 0.7244 Xval f_score 0.5714\n", "14:11:15 epoch 850 of 1000 Train loss: 0.5547 Train f_score 0.6768 Xval loss: 0.7294 Xval f_score 0.5634\n", "14:11:15 epoch 900 of 1000 Train loss: 0.5030 Train f_score 0.7524 Xval loss: 0.7353 Xval f_score 0.5429\n", "14:11:15 epoch 950 of 1000 Train loss: 0.5176 Train f_score 0.7081 Xval loss: 0.7435 Xval f_score 0.5634\n", "Best Xval loss epoch 80, value 0.692245\n", "NN units 8\n", "Reg_penalty 0.00000000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.838, Train F1 0.850\n", "[[80 11]\n", " [23 96]]\n", "Final Xval Accuracy 0.620, Xval F1 0.675\n", "[[16 7]\n", " [20 28]]\n", "14:11:17 Starting\n", "14:11:19 epoch 0 of 1000 Train loss: 0.6938 Train f_score 0.4891 Xval loss: 0.6936 Xval f_score 0.4333\n", "14:11:19 epoch 50 of 1000 Train loss: 0.6809 Train f_score 0.6719 Xval loss: 0.6933 Xval f_score 0.4878\n", "14:11:19 epoch 100 of 1000 Train loss: 0.6600 Train f_score 0.6637 Xval loss: 0.6941 Xval f_score 0.4848\n", "14:11:19 epoch 150 of 1000 Train loss: 0.6363 Train f_score 0.6937 Xval loss: 0.7024 Xval f_score 0.4706\n", "14:11:19 epoch 200 of 1000 Train loss: 0.6309 Train f_score 0.6696 Xval loss: 0.7101 Xval f_score 0.5000\n", "14:11:20 epoch 250 of 1000 Train loss: 0.6076 Train f_score 0.6697 Xval loss: 0.7164 Xval f_score 0.5000\n", "14:11:20 epoch 300 of 1000 Train loss: 0.5951 Train f_score 0.6875 Xval loss: 0.7210 Xval f_score 0.5152\n", "14:11:20 epoch 350 of 1000 Train loss: 0.5614 Train f_score 0.7064 Xval loss: 0.7263 Xval f_score 0.5152\n", "14:11:20 epoch 400 of 1000 Train loss: 0.5980 Train f_score 0.7005 Xval loss: 0.7262 Xval f_score 0.4923\n", "14:11:20 epoch 450 of 1000 Train loss: 0.5669 Train f_score 0.7064 Xval loss: 0.7269 Xval f_score 0.4923\n", "14:11:20 epoch 500 of 1000 Train loss: 0.5438 Train f_score 0.7232 Xval loss: 0.7243 Xval f_score 0.5075\n", "14:11:20 epoch 550 of 1000 Train loss: 0.5594 Train f_score 0.7368 Xval loss: 0.7208 Xval f_score 0.4615\n", "14:11:20 epoch 600 of 1000 Train loss: 0.5502 Train f_score 0.7615 Xval loss: 0.7183 Xval f_score 0.5000\n", "14:11:21 epoch 650 of 1000 Train loss: 0.5530 Train f_score 0.7143 Xval loss: 0.7249 Xval f_score 0.4615\n", "14:11:21 epoch 700 of 1000 Train loss: 0.5505 Train f_score 0.7189 Xval loss: 0.7314 Xval f_score 0.4615\n", "14:11:21 epoch 750 of 1000 Train loss: 0.5233 Train f_score 0.7414 Xval loss: 0.7278 Xval f_score 0.5075\n", "14:11:21 epoch 800 of 1000 Train loss: 0.5130 Train f_score 0.7368 Xval loss: 0.7314 Xval f_score 0.5075\n", "14:11:21 epoch 850 of 1000 Train loss: 0.5086 Train f_score 0.7477 Xval loss: 0.7321 Xval f_score 0.5075\n", "14:11:21 epoch 900 of 1000 Train loss: 0.5484 Train f_score 0.7306 Xval loss: 0.7311 Xval f_score 0.5075\n", "14:11:21 epoch 950 of 1000 Train loss: 0.5487 Train f_score 0.7354 Xval loss: 0.7315 Xval f_score 0.5075\n", "Best Xval loss epoch 82, value 0.692622\n", "NN units 8\n", "Reg_penalty 0.00010000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.810, Train F1 0.831\n", "[[72 9]\n", " [31 98]]\n", "Final Xval Accuracy 0.592, Xval F1 0.540\n", "[[25 18]\n", " [11 17]]\n", "14:11:23 Starting\n", "14:11:25 epoch 0 of 1000 Train loss: 0.6961 Train f_score 0.5788 Xval loss: 0.6957 Xval f_score 0.6263\n", "14:11:25 epoch 50 of 1000 Train loss: 0.6863 Train f_score 0.6757 Xval loss: 0.6951 Xval f_score 0.6465\n", "14:11:25 epoch 100 of 1000 Train loss: 0.6688 Train f_score 0.6608 Xval loss: 0.6934 Xval f_score 0.5753\n", "14:11:25 epoch 150 of 1000 Train loss: 0.6576 Train f_score 0.6359 Xval loss: 0.6915 Xval f_score 0.5714\n", "14:11:26 epoch 200 of 1000 Train loss: 0.6384 Train f_score 0.6351 Xval loss: 0.6930 Xval f_score 0.5970\n", "14:11:26 epoch 250 of 1000 Train loss: 0.6168 Train f_score 0.6937 Xval loss: 0.6914 Xval f_score 0.6377\n", "14:11:26 epoch 300 of 1000 Train loss: 0.6239 Train f_score 0.6184 Xval loss: 0.6937 Xval f_score 0.5970\n", "14:11:26 epoch 350 of 1000 Train loss: 0.6180 Train f_score 0.6408 Xval loss: 0.6952 Xval f_score 0.5758\n", "14:11:26 epoch 400 of 1000 Train loss: 0.5825 Train f_score 0.6919 Xval loss: 0.6969 Xval f_score 0.5538\n", "14:11:26 epoch 450 of 1000 Train loss: 0.6095 Train f_score 0.6573 Xval loss: 0.6977 Xval f_score 0.5538\n", "14:11:26 epoch 500 of 1000 Train loss: 0.5985 Train f_score 0.6184 Xval loss: 0.6992 Xval f_score 0.5538\n", "14:11:26 epoch 550 of 1000 Train loss: 0.6056 Train f_score 0.6502 Xval loss: 0.7036 Xval f_score 0.5538\n", "14:11:27 epoch 600 of 1000 Train loss: 0.5987 Train f_score 0.6479 Xval loss: 0.7052 Xval f_score 0.5538\n", "14:11:27 epoch 650 of 1000 Train loss: 0.5564 Train f_score 0.7150 Xval loss: 0.7079 Xval f_score 0.5538\n", "14:11:27 epoch 700 of 1000 Train loss: 0.5908 Train f_score 0.6832 Xval loss: 0.7176 Xval f_score 0.5455\n", "14:11:27 epoch 750 of 1000 Train loss: 0.5724 Train f_score 0.6927 Xval loss: 0.7285 Xval f_score 0.5455\n", "14:11:27 epoch 800 of 1000 Train loss: 0.5649 Train f_score 0.6834 Xval loss: 0.7374 Xval f_score 0.5231\n", "14:11:27 epoch 850 of 1000 Train loss: 0.5441 Train f_score 0.7464 Xval loss: 0.7450 Xval f_score 0.5588\n", "14:11:27 epoch 900 of 1000 Train loss: 0.5474 Train f_score 0.7512 Xval loss: 0.7479 Xval f_score 0.5373\n", "14:11:28 epoch 950 of 1000 Train loss: 0.5590 Train f_score 0.6897 Xval loss: 0.7593 Xval f_score 0.5588\n", "Best Xval loss epoch 176, value 0.691069\n", "NN units 8\n", "Reg_penalty 0.00030000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.790, Train F1 0.776\n", "[[90 31]\n", " [13 76]]\n", "Final Xval Accuracy 0.592, Xval F1 0.567\n", "[[23 16]\n", " [13 19]]\n", "14:11:29 Starting\n", "14:11:31 epoch 0 of 1000 Train loss: 0.7018 Train f_score 0.4379 Xval loss: 0.7020 Xval f_score 0.4138\n", "14:11:31 epoch 50 of 1000 Train loss: 0.6932 Train f_score 0.6644 Xval loss: 0.6973 Xval f_score 0.5833\n", "14:11:31 epoch 100 of 1000 Train loss: 0.6818 Train f_score 0.6587 Xval loss: 0.6985 Xval f_score 0.5333\n", "14:11:32 epoch 150 of 1000 Train loss: 0.6623 Train f_score 0.6552 Xval loss: 0.7048 Xval f_score 0.5429\n", "14:11:32 epoch 200 of 1000 Train loss: 0.6649 Train f_score 0.6609 Xval loss: 0.7140 Xval f_score 0.5672\n", "14:11:32 epoch 250 of 1000 Train loss: 0.6374 Train f_score 0.6667 Xval loss: 0.7204 Xval f_score 0.5588\n", "14:11:32 epoch 300 of 1000 Train loss: 0.6383 Train f_score 0.7186 Xval loss: 0.7293 Xval f_score 0.5507\n", "14:11:32 epoch 350 of 1000 Train loss: 0.6199 Train f_score 0.7359 Xval loss: 0.7361 Xval f_score 0.5714\n", "14:11:32 epoch 400 of 1000 Train loss: 0.6367 Train f_score 0.7013 Xval loss: 0.7438 Xval f_score 0.5429\n", "14:11:32 epoch 450 of 1000 Train loss: 0.6218 Train f_score 0.7197 Xval loss: 0.7520 Xval f_score 0.5429\n", "14:11:32 epoch 500 of 1000 Train loss: 0.5791 Train f_score 0.7401 Xval loss: 0.7589 Xval f_score 0.5143\n", "14:11:33 epoch 550 of 1000 Train loss: 0.6147 Train f_score 0.7162 Xval loss: 0.7636 Xval f_score 0.5143\n", "Best Xval loss epoch 72, value 0.697106\n", "NN units 8\n", "Reg_penalty 0.00100000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.762, Train F1 0.752\n", "[[84 31]\n", " [19 76]]\n", "Final Xval Accuracy 0.549, Xval F1 0.529\n", "[[21 17]\n", " [15 18]]\n", "14:11:34 Starting\n", "14:11:36 epoch 0 of 1000 Train loss: 0.7193 Train f_score 0.5820 Xval loss: 0.7189 Xval f_score 0.5714\n", "14:11:36 epoch 50 of 1000 Train loss: 0.6974 Train f_score 0.6795 Xval loss: 0.6991 Xval f_score 0.6604\n", "14:11:36 epoch 100 of 1000 Train loss: 0.6934 Train f_score 0.6505 Xval loss: 0.6961 Xval f_score 0.6327\n", "14:11:37 epoch 150 of 1000 Train loss: 0.6882 Train f_score 0.6693 Xval loss: 0.6979 Xval f_score 0.5476\n", "14:11:37 epoch 200 of 1000 Train loss: 0.6840 Train f_score 0.6824 Xval loss: 0.6983 Xval f_score 0.5789\n", "14:11:37 epoch 250 of 1000 Train loss: 0.6738 Train f_score 0.6802 Xval loss: 0.6982 Xval f_score 0.5797\n", "14:11:37 epoch 300 of 1000 Train loss: 0.6710 Train f_score 0.6750 Xval loss: 0.6984 Xval f_score 0.5588\n", "14:11:37 epoch 350 of 1000 Train loss: 0.6664 Train f_score 0.6774 Xval loss: 0.6984 Xval f_score 0.5758\n", "14:11:37 epoch 400 of 1000 Train loss: 0.6786 Train f_score 0.6561 Xval loss: 0.6995 Xval f_score 0.5672\n", "14:11:37 epoch 450 of 1000 Train loss: 0.6455 Train f_score 0.6964 Xval loss: 0.7006 Xval f_score 0.5373\n", "14:11:37 epoch 500 of 1000 Train loss: 0.6578 Train f_score 0.6805 Xval loss: 0.7002 Xval f_score 0.5294\n", "14:11:38 epoch 550 of 1000 Train loss: 0.6569 Train f_score 0.6827 Xval loss: 0.6975 Xval f_score 0.5294\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:11:38 epoch 600 of 1000 Train loss: 0.6602 Train f_score 0.6722 Xval loss: 0.6988 Xval f_score 0.5373\n", "14:11:38 epoch 650 of 1000 Train loss: 0.6582 Train f_score 0.6333 Xval loss: 0.6986 Xval f_score 0.5294\n", "14:11:38 epoch 700 of 1000 Train loss: 0.6655 Train f_score 0.6667 Xval loss: 0.7014 Xval f_score 0.5294\n", "14:11:38 epoch 750 of 1000 Train loss: 0.6683 Train f_score 0.6584 Xval loss: 0.7034 Xval f_score 0.5373\n", "14:11:38 epoch 800 of 1000 Train loss: 0.6676 Train f_score 0.6773 Xval loss: 0.7010 Xval f_score 0.5152\n", "14:11:38 epoch 850 of 1000 Train loss: 0.6770 Train f_score 0.6390 Xval loss: 0.7000 Xval f_score 0.5152\n", "14:11:39 epoch 900 of 1000 Train loss: 0.6764 Train f_score 0.6452 Xval loss: 0.7024 Xval f_score 0.5152\n", "14:11:39 epoch 950 of 1000 Train loss: 0.6447 Train f_score 0.6888 Xval loss: 0.7028 Xval f_score 0.5152\n", "Best Xval loss epoch 94, value 0.695912\n", "NN units 8\n", "Reg_penalty 0.00300000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.676, Train F1 0.685\n", "[[68 33]\n", " [35 74]]\n", "Final Xval Accuracy 0.592, Xval F1 0.408\n", "[[32 25]\n", " [ 4 10]]\n", "14:11:41 Starting\n", "14:11:42 epoch 0 of 1000 Train loss: 0.6950 Train f_score 0.4600 Xval loss: 0.6947 Xval f_score 0.4000\n", "14:11:42 epoch 50 of 1000 Train loss: 0.6710 Train f_score 0.6805 Xval loss: 0.6894 Xval f_score 0.5263\n", "14:11:43 epoch 100 of 1000 Train loss: 0.6438 Train f_score 0.6909 Xval loss: 0.6972 Xval f_score 0.4848\n", "14:11:43 epoch 150 of 1000 Train loss: 0.6020 Train f_score 0.7175 Xval loss: 0.7138 Xval f_score 0.4776\n", "14:11:43 epoch 200 of 1000 Train loss: 0.5911 Train f_score 0.7000 Xval loss: 0.7275 Xval f_score 0.5000\n", "14:11:43 epoch 250 of 1000 Train loss: 0.5658 Train f_score 0.7059 Xval loss: 0.7416 Xval f_score 0.4848\n", "14:11:43 epoch 300 of 1000 Train loss: 0.5334 Train f_score 0.7511 Xval loss: 0.7526 Xval f_score 0.4687\n", "Best Xval loss epoch 50, value 0.689387\n", "NN units 16\n", "Reg_penalty 0.00000000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.767, Train F1 0.782\n", "[[73 19]\n", " [30 88]]\n", "Final Xval Accuracy 0.549, Xval F1 0.680\n", "[[ 5 1]\n", " [31 34]]\n", "14:11:45 Starting\n", "14:11:47 epoch 0 of 1000 Train loss: 0.6982 Train f_score 0.3432 Xval loss: 0.6959 Xval f_score 0.2308\n", "14:11:47 epoch 50 of 1000 Train loss: 0.6838 Train f_score 0.6484 Xval loss: 0.6926 Xval f_score 0.5897\n", "14:11:47 epoch 100 of 1000 Train loss: 0.6470 Train f_score 0.7048 Xval loss: 0.6902 Xval f_score 0.5217\n", "14:11:47 epoch 150 of 1000 Train loss: 0.6030 Train f_score 0.7297 Xval loss: 0.6959 Xval f_score 0.5429\n", "14:11:47 epoch 200 of 1000 Train loss: 0.5905 Train f_score 0.6762 Xval loss: 0.7027 Xval f_score 0.5294\n", "14:11:47 epoch 250 of 1000 Train loss: 0.5654 Train f_score 0.7136 Xval loss: 0.7139 Xval f_score 0.5714\n", "14:11:48 epoch 300 of 1000 Train loss: 0.5333 Train f_score 0.7383 Xval loss: 0.7281 Xval f_score 0.5294\n", "14:11:48 epoch 350 of 1000 Train loss: 0.5631 Train f_score 0.7000 Xval loss: 0.7383 Xval f_score 0.5294\n", "14:11:48 epoch 400 of 1000 Train loss: 0.5388 Train f_score 0.7256 Xval loss: 0.7542 Xval f_score 0.5217\n", "Best Xval loss epoch 85, value 0.689177\n", "NN units 16\n", "Reg_penalty 0.00010000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.800, Train F1 0.811\n", "[[78 17]\n", " [25 90]]\n", "Final Xval Accuracy 0.577, Xval F1 0.595\n", "[[19 13]\n", " [17 22]]\n", "14:11:51 Starting\n", "14:11:53 epoch 0 of 1000 Train loss: 0.6998 Train f_score 0.4466 Xval loss: 0.6981 Xval f_score 0.4545\n", "14:11:53 epoch 50 of 1000 Train loss: 0.6812 Train f_score 0.6667 Xval loss: 0.6977 Xval f_score 0.5747\n", "14:11:53 epoch 100 of 1000 Train loss: 0.6501 Train f_score 0.6784 Xval loss: 0.7079 Xval f_score 0.5152\n", "14:11:53 epoch 150 of 1000 Train loss: 0.6069 Train f_score 0.7022 Xval loss: 0.7166 Xval f_score 0.5075\n", "14:11:53 epoch 200 of 1000 Train loss: 0.5989 Train f_score 0.7085 Xval loss: 0.7254 Xval f_score 0.4848\n", "14:11:53 epoch 250 of 1000 Train loss: 0.5835 Train f_score 0.6952 Xval loss: 0.7323 Xval f_score 0.4848\n", "14:11:54 epoch 300 of 1000 Train loss: 0.5680 Train f_score 0.6884 Xval loss: 0.7423 Xval f_score 0.4923\n", "14:11:54 epoch 350 of 1000 Train loss: 0.5625 Train f_score 0.7182 Xval loss: 0.7542 Xval f_score 0.4923\n", "14:11:54 epoch 400 of 1000 Train loss: 0.5494 Train f_score 0.7500 Xval loss: 0.7653 Xval f_score 0.5152\n", "Best Xval loss epoch 34, value 0.696237\n", "NN units 16\n", "Reg_penalty 0.00030000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.805, Train F1 0.804\n", "[[85 23]\n", " [18 84]]\n", "Final Xval Accuracy 0.577, Xval F1 0.700\n", "[[ 6 0]\n", " [30 35]]\n", "14:11:56 Starting\n", "14:11:58 epoch 0 of 1000 Train loss: 0.7116 Train f_score 0.5185 Xval loss: 0.7103 Xval f_score 0.5538\n", "14:11:58 epoch 50 of 1000 Train loss: 0.6944 Train f_score 0.6693 Xval loss: 0.7032 Xval f_score 0.5647\n", "14:11:58 epoch 100 of 1000 Train loss: 0.6763 Train f_score 0.6696 Xval loss: 0.7095 Xval f_score 0.5075\n", "14:11:58 epoch 150 of 1000 Train loss: 0.6585 Train f_score 0.6787 Xval loss: 0.7214 Xval f_score 0.5455\n", "14:11:58 epoch 200 of 1000 Train loss: 0.6348 Train f_score 0.6818 Xval loss: 0.7300 Xval f_score 0.5075\n", "14:11:58 epoch 250 of 1000 Train loss: 0.6102 Train f_score 0.7005 Xval loss: 0.7349 Xval f_score 0.4923\n", "14:11:59 epoch 300 of 1000 Train loss: 0.6184 Train f_score 0.6881 Xval loss: 0.7402 Xval f_score 0.4923\n", "14:11:59 epoch 350 of 1000 Train loss: 0.6075 Train f_score 0.6906 Xval loss: 0.7468 Xval f_score 0.4848\n", "14:11:59 epoch 400 of 1000 Train loss: 0.5954 Train f_score 0.6941 Xval loss: 0.7526 Xval f_score 0.4923\n", "14:11:59 epoch 450 of 1000 Train loss: 0.6106 Train f_score 0.6941 Xval loss: 0.7613 Xval f_score 0.5075\n", "14:11:59 epoch 500 of 1000 Train loss: 0.5679 Train f_score 0.7123 Xval loss: 0.7693 Xval f_score 0.5075\n", "Best Xval loss epoch 48, value 0.703211\n", "NN units 16\n", "Reg_penalty 0.00100000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.757, Train F1 0.761\n", "[[78 26]\n", " [25 81]]\n", "Final Xval Accuracy 0.592, Xval F1 0.525\n", "[[26 19]\n", " [10 16]]\n", "14:12:01 Starting\n", "14:12:03 epoch 0 of 1000 Train loss: 0.7474 Train f_score 0.5760 Xval loss: 0.7448 Xval f_score 0.5783\n", "14:12:03 epoch 50 of 1000 Train loss: 0.7022 Train f_score 0.6751 Xval loss: 0.7027 Xval f_score 0.6604\n", "14:12:03 epoch 100 of 1000 Train loss: 0.6935 Train f_score 0.6801 Xval loss: 0.6955 Xval f_score 0.6346\n", "14:12:03 epoch 150 of 1000 Train loss: 0.6882 Train f_score 0.6766 Xval loss: 0.6956 Xval f_score 0.5897\n", "14:12:04 epoch 200 of 1000 Train loss: 0.6870 Train f_score 0.6640 Xval loss: 0.6977 Xval f_score 0.5915\n", "14:12:04 epoch 250 of 1000 Train loss: 0.6732 Train f_score 0.6776 Xval loss: 0.7003 Xval f_score 0.5634\n", "14:12:04 epoch 300 of 1000 Train loss: 0.6632 Train f_score 0.6831 Xval loss: 0.7041 Xval f_score 0.5429\n", "14:12:04 epoch 350 of 1000 Train loss: 0.6609 Train f_score 0.6807 Xval loss: 0.7072 Xval f_score 0.5000\n", "14:12:04 epoch 400 of 1000 Train loss: 0.6662 Train f_score 0.6555 Xval loss: 0.7098 Xval f_score 0.5075\n", "14:12:04 epoch 450 of 1000 Train loss: 0.6517 Train f_score 0.6864 Xval loss: 0.7133 Xval f_score 0.5075\n", "14:12:04 epoch 500 of 1000 Train loss: 0.6567 Train f_score 0.7029 Xval loss: 0.7168 Xval f_score 0.5373\n", "14:12:05 epoch 550 of 1000 Train loss: 0.6615 Train f_score 0.6833 Xval loss: 0.7180 Xval f_score 0.5455\n", "14:12:05 epoch 600 of 1000 Train loss: 0.6680 Train f_score 0.6694 Xval loss: 0.7213 Xval f_score 0.5455\n", "14:12:05 epoch 650 of 1000 Train loss: 0.6616 Train f_score 0.6639 Xval loss: 0.7199 Xval f_score 0.5455\n", "14:12:05 epoch 700 of 1000 Train loss: 0.6859 Train f_score 0.6480 Xval loss: 0.7217 Xval f_score 0.5294\n", "14:12:05 epoch 750 of 1000 Train loss: 0.6607 Train f_score 0.6885 Xval loss: 0.7214 Xval f_score 0.5373\n", "14:12:05 epoch 800 of 1000 Train loss: 0.6731 Train f_score 0.6556 Xval loss: 0.7242 Xval f_score 0.5373\n", "14:12:05 epoch 850 of 1000 Train loss: 0.6702 Train f_score 0.6772 Xval loss: 0.7238 Xval f_score 0.5294\n", "14:12:05 epoch 900 of 1000 Train loss: 0.6495 Train f_score 0.6833 Xval loss: 0.7254 Xval f_score 0.5294\n", "14:12:06 epoch 950 of 1000 Train loss: 0.6602 Train f_score 0.6776 Xval loss: 0.7283 Xval f_score 0.5152\n", "Best Xval loss epoch 107, value 0.695346\n", "NN units 16\n", "Reg_penalty 0.00300000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.686, Train F1 0.700\n", "[[67 30]\n", " [36 77]]\n", "Final Xval Accuracy 0.577, Xval F1 0.571\n", "[[21 15]\n", " [15 20]]\n", "14:12:08 Starting\n", "14:12:10 epoch 0 of 1000 Train loss: 0.6959 Train f_score 0.5783 Xval loss: 0.6892 Xval f_score 0.6067\n", "14:12:10 epoch 50 of 1000 Train loss: 0.6548 Train f_score 0.6903 Xval loss: 0.6858 Xval f_score 0.5676\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:12:10 epoch 100 of 1000 Train loss: 0.6109 Train f_score 0.6849 Xval loss: 0.6900 Xval f_score 0.4848\n", "14:12:10 epoch 150 of 1000 Train loss: 0.5640 Train f_score 0.7005 Xval loss: 0.7080 Xval f_score 0.5075\n", "14:12:10 epoch 200 of 1000 Train loss: 0.5493 Train f_score 0.7070 Xval loss: 0.7314 Xval f_score 0.5075\n", "14:12:10 epoch 250 of 1000 Train loss: 0.5018 Train f_score 0.7415 Xval loss: 0.7424 Xval f_score 0.4776\n", "Best Xval loss epoch 53, value 0.685693\n", "NN units 32\n", "Reg_penalty 0.00000000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.805, Train F1 0.813\n", "[[80 18]\n", " [23 89]]\n", "Final Xval Accuracy 0.577, Xval F1 0.500\n", "[[26 20]\n", " [10 15]]\n", "14:12:12 Starting\n", "14:12:14 epoch 0 of 1000 Train loss: 0.6985 Train f_score 0.5114 Xval loss: 0.6967 Xval f_score 0.5479\n", "14:12:15 epoch 50 of 1000 Train loss: 0.6603 Train f_score 0.6979 Xval loss: 0.6953 Xval f_score 0.5676\n", "14:12:15 epoch 100 of 1000 Train loss: 0.6142 Train f_score 0.7005 Xval loss: 0.7051 Xval f_score 0.5429\n", "14:12:15 epoch 150 of 1000 Train loss: 0.5791 Train f_score 0.7273 Xval loss: 0.7208 Xval f_score 0.5075\n", "14:12:15 epoch 200 of 1000 Train loss: 0.5258 Train f_score 0.7488 Xval loss: 0.7406 Xval f_score 0.5294\n", "Best Xval loss epoch 26, value 0.694056\n", "NN units 32\n", "Reg_penalty 0.00010000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.786, Train F1 0.803\n", "[[73 15]\n", " [30 92]]\n", "Final Xval Accuracy 0.577, Xval F1 0.516\n", "[[25 19]\n", " [11 16]]\n", "14:12:17 Starting\n", "14:12:19 epoch 0 of 1000 Train loss: 0.7029 Train f_score 0.5959 Xval loss: 0.7033 Xval f_score 0.5843\n", "14:12:19 epoch 50 of 1000 Train loss: 0.6708 Train f_score 0.6609 Xval loss: 0.7035 Xval f_score 0.5479\n", "14:12:19 epoch 100 of 1000 Train loss: 0.6288 Train f_score 0.6941 Xval loss: 0.7108 Xval f_score 0.5882\n", "14:12:20 epoch 150 of 1000 Train loss: 0.6000 Train f_score 0.7130 Xval loss: 0.7203 Xval f_score 0.5588\n", "14:12:20 epoch 200 of 1000 Train loss: 0.5742 Train f_score 0.7373 Xval loss: 0.7386 Xval f_score 0.5429\n", "14:12:20 epoch 250 of 1000 Train loss: 0.5374 Train f_score 0.7477 Xval loss: 0.7681 Xval f_score 0.5352\n", "Best Xval loss epoch 25, value 0.701768\n", "NN units 32\n", "Reg_penalty 0.00030000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.771, Train F1 0.730\n", "[[97 42]\n", " [ 6 65]]\n", "Final Xval Accuracy 0.592, Xval F1 0.540\n", "[[25 18]\n", " [11 17]]\n", "14:12:22 Starting\n", "14:12:24 epoch 0 of 1000 Train loss: 0.7332 Train f_score 0.4456 Xval loss: 0.7303 Xval f_score 0.4762\n", "14:12:24 epoch 50 of 1000 Train loss: 0.6916 Train f_score 0.6693 Xval loss: 0.7119 Xval f_score 0.5679\n", "14:12:24 epoch 100 of 1000 Train loss: 0.6672 Train f_score 0.6724 Xval loss: 0.7143 Xval f_score 0.5429\n", "14:12:24 epoch 150 of 1000 Train loss: 0.6379 Train f_score 0.7149 Xval loss: 0.7221 Xval f_score 0.5797\n", "14:12:25 epoch 200 of 1000 Train loss: 0.6381 Train f_score 0.6875 Xval loss: 0.7313 Xval f_score 0.5000\n", "14:12:25 epoch 250 of 1000 Train loss: 0.6149 Train f_score 0.7085 Xval loss: 0.7407 Xval f_score 0.5152\n", "14:12:25 epoch 300 of 1000 Train loss: 0.5935 Train f_score 0.7220 Xval loss: 0.7446 Xval f_score 0.4848\n", "14:12:25 epoch 350 of 1000 Train loss: 0.5792 Train f_score 0.7248 Xval loss: 0.7496 Xval f_score 0.5152\n", "14:12:25 epoch 400 of 1000 Train loss: 0.5724 Train f_score 0.7281 Xval loss: 0.7581 Xval f_score 0.5294\n", "14:12:25 epoch 450 of 1000 Train loss: 0.5362 Train f_score 0.7685 Xval loss: 0.7682 Xval f_score 0.5373\n", "Best Xval loss epoch 68, value 0.710715\n", "NN units 32\n", "Reg_penalty 0.00100000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.805, Train F1 0.800\n", "[[87 25]\n", " [16 82]]\n", "Final Xval Accuracy 0.577, Xval F1 0.667\n", "[[11 5]\n", " [25 30]]\n", "14:12:28 Starting\n", "14:12:30 epoch 0 of 1000 Train loss: 0.7966 Train f_score 0.5320 Xval loss: 0.7972 Xval f_score 0.4516\n", "14:12:30 epoch 50 of 1000 Train loss: 0.7123 Train f_score 0.6714 Xval loss: 0.7168 Xval f_score 0.6292\n", "14:12:30 epoch 100 of 1000 Train loss: 0.6882 Train f_score 0.7004 Xval loss: 0.7030 Xval f_score 0.5897\n", "14:12:30 epoch 150 of 1000 Train loss: 0.6820 Train f_score 0.6608 Xval loss: 0.7045 Xval f_score 0.5714\n", "14:12:30 epoch 200 of 1000 Train loss: 0.6749 Train f_score 0.6870 Xval loss: 0.7084 Xval f_score 0.5373\n", "14:12:30 epoch 250 of 1000 Train loss: 0.6504 Train f_score 0.7013 Xval loss: 0.7117 Xval f_score 0.5455\n", "14:12:31 epoch 300 of 1000 Train loss: 0.6695 Train f_score 0.6991 Xval loss: 0.7171 Xval f_score 0.5373\n", "14:12:31 epoch 350 of 1000 Train loss: 0.6667 Train f_score 0.6726 Xval loss: 0.7195 Xval f_score 0.5455\n", "14:12:31 epoch 400 of 1000 Train loss: 0.6495 Train f_score 0.6933 Xval loss: 0.7206 Xval f_score 0.5588\n", "14:12:31 epoch 450 of 1000 Train loss: 0.6428 Train f_score 0.6960 Xval loss: 0.7199 Xval f_score 0.5294\n", "14:12:31 epoch 500 of 1000 Train loss: 0.6387 Train f_score 0.6575 Xval loss: 0.7225 Xval f_score 0.5507\n", "14:12:31 epoch 550 of 1000 Train loss: 0.6376 Train f_score 0.6906 Xval loss: 0.7263 Xval f_score 0.5507\n", "14:12:31 epoch 600 of 1000 Train loss: 0.6224 Train f_score 0.7009 Xval loss: 0.7271 Xval f_score 0.5507\n", "14:12:32 epoch 650 of 1000 Train loss: 0.6499 Train f_score 0.7022 Xval loss: 0.7278 Xval f_score 0.5634\n", "14:12:32 epoch 700 of 1000 Train loss: 0.6428 Train f_score 0.6667 Xval loss: 0.7309 Xval f_score 0.5714\n", "14:12:32 epoch 750 of 1000 Train loss: 0.6545 Train f_score 0.6518 Xval loss: 0.7281 Xval f_score 0.5429\n", "14:12:32 epoch 800 of 1000 Train loss: 0.6333 Train f_score 0.6697 Xval loss: 0.7297 Xval f_score 0.5634\n", "14:12:32 epoch 850 of 1000 Train loss: 0.6378 Train f_score 0.6872 Xval loss: 0.7292 Xval f_score 0.5634\n", "14:12:32 epoch 900 of 1000 Train loss: 0.6438 Train f_score 0.6857 Xval loss: 0.7281 Xval f_score 0.5714\n", "14:12:32 epoch 950 of 1000 Train loss: 0.6312 Train f_score 0.7037 Xval loss: 0.7274 Xval f_score 0.5634\n", "Best Xval loss epoch 110, value 0.702660\n", "NN units 32\n", "Reg_penalty 0.00300000\n", "Dropout 0.3330\n", "Activation relu\n", "Final Train Accuracy 0.714, Train F1 0.694\n", "[[82 39]\n", " [21 68]]\n", "Final Xval Accuracy 0.606, Xval F1 0.632\n", "[[19 11]\n", " [17 24]]\n", "14:12:35 Starting\n", "14:12:37 epoch 0 of 1000 Train loss: 0.6907 Train f_score 0.6645 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:12:37 epoch 50 of 1000 Train loss: 0.6897 Train f_score 0.6473 Xval loss: 0.6931 Xval f_score 0.6602\n", "14:12:37 epoch 100 of 1000 Train loss: 0.6900 Train f_score 0.6070 Xval loss: 0.6924 Xval f_score 0.6327\n", "14:12:37 epoch 150 of 1000 Train loss: 0.6850 Train f_score 0.6457 Xval loss: 0.6918 Xval f_score 0.5476\n", "14:12:37 epoch 200 of 1000 Train loss: 0.6758 Train f_score 0.6396 Xval loss: 0.6915 Xval f_score 0.5128\n", "14:12:37 epoch 250 of 1000 Train loss: 0.6769 Train f_score 0.6063 Xval loss: 0.6920 Xval f_score 0.5405\n", "14:12:38 epoch 300 of 1000 Train loss: 0.6694 Train f_score 0.6579 Xval loss: 0.6925 Xval f_score 0.5634\n", "14:12:38 epoch 350 of 1000 Train loss: 0.6790 Train f_score 0.6047 Xval loss: 0.6932 Xval f_score 0.5294\n", "14:12:38 epoch 400 of 1000 Train loss: 0.6633 Train f_score 0.6038 Xval loss: 0.6938 Xval f_score 0.5429\n", "14:12:38 epoch 450 of 1000 Train loss: 0.6672 Train f_score 0.6154 Xval loss: 0.6948 Xval f_score 0.5429\n", "14:12:38 epoch 500 of 1000 Train loss: 0.6569 Train f_score 0.6696 Xval loss: 0.6957 Xval f_score 0.5231\n", "14:12:38 epoch 550 of 1000 Train loss: 0.6459 Train f_score 0.6479 Xval loss: 0.6962 Xval f_score 0.5152\n", "14:12:38 epoch 600 of 1000 Train loss: 0.6597 Train f_score 0.6604 Xval loss: 0.6960 Xval f_score 0.4923\n", "14:12:39 epoch 650 of 1000 Train loss: 0.6627 Train f_score 0.6419 Xval loss: 0.6964 Xval f_score 0.4776\n", "14:12:39 epoch 700 of 1000 Train loss: 0.6327 Train f_score 0.6849 Xval loss: 0.6969 Xval f_score 0.4776\n", "14:12:39 epoch 750 of 1000 Train loss: 0.6457 Train f_score 0.6481 Xval loss: 0.6969 Xval f_score 0.4923\n", "14:12:39 epoch 800 of 1000 Train loss: 0.6462 Train f_score 0.5980 Xval loss: 0.6976 Xval f_score 0.4923\n", "14:12:39 epoch 850 of 1000 Train loss: 0.6202 Train f_score 0.6887 Xval loss: 0.6972 Xval f_score 0.4923\n", "14:12:39 epoch 900 of 1000 Train loss: 0.6463 Train f_score 0.6291 Xval loss: 0.6966 Xval f_score 0.4923\n", "14:12:39 epoch 950 of 1000 Train loss: 0.6420 Train f_score 0.6197 Xval loss: 0.6958 Xval f_score 0.4923\n", "Best Xval loss epoch 195, value 0.691517\n", "NN units 4\n", "Reg_penalty 0.00000000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.676, Train F1 0.676\n", "[[71 36]\n", " [32 71]]\n", "Final Xval Accuracy 0.577, Xval F1 0.595\n", "[[19 13]\n", " [17 22]]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:12:42 Starting\n", "14:12:45 epoch 0 of 1000 Train loss: 0.6933 Train f_score 0.6277 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:12:45 epoch 50 of 1000 Train loss: 0.6926 Train f_score 0.6465 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:12:45 epoch 100 of 1000 Train loss: 0.6906 Train f_score 0.6963 Xval loss: 0.6930 Xval f_score 0.6600\n", "14:12:46 epoch 150 of 1000 Train loss: 0.6837 Train f_score 0.6390 Xval loss: 0.6922 Xval f_score 0.5747\n", "14:12:46 epoch 200 of 1000 Train loss: 0.6829 Train f_score 0.5753 Xval loss: 0.6915 Xval f_score 0.5500\n", "14:12:46 epoch 250 of 1000 Train loss: 0.6802 Train f_score 0.6244 Xval loss: 0.6914 Xval f_score 0.5641\n", "14:12:46 epoch 300 of 1000 Train loss: 0.6639 Train f_score 0.6476 Xval loss: 0.6922 Xval f_score 0.5753\n", "14:12:46 epoch 350 of 1000 Train loss: 0.6803 Train f_score 0.5936 Xval loss: 0.6929 Xval f_score 0.5676\n", "14:12:46 epoch 400 of 1000 Train loss: 0.6605 Train f_score 0.6452 Xval loss: 0.6943 Xval f_score 0.5556\n", "14:12:46 epoch 450 of 1000 Train loss: 0.6681 Train f_score 0.6637 Xval loss: 0.6952 Xval f_score 0.5294\n", "14:12:47 epoch 500 of 1000 Train loss: 0.6553 Train f_score 0.6029 Xval loss: 0.6973 Xval f_score 0.5429\n", "14:12:47 epoch 550 of 1000 Train loss: 0.6650 Train f_score 0.6147 Xval loss: 0.6977 Xval f_score 0.5507\n", "14:12:47 epoch 600 of 1000 Train loss: 0.6466 Train f_score 0.6512 Xval loss: 0.6988 Xval f_score 0.5429\n", "14:12:47 epoch 650 of 1000 Train loss: 0.6614 Train f_score 0.6330 Xval loss: 0.6995 Xval f_score 0.5294\n", "14:12:47 epoch 700 of 1000 Train loss: 0.6446 Train f_score 0.6432 Xval loss: 0.6994 Xval f_score 0.5152\n", "14:12:47 epoch 750 of 1000 Train loss: 0.6508 Train f_score 0.6726 Xval loss: 0.7004 Xval f_score 0.5152\n", "14:12:47 epoch 800 of 1000 Train loss: 0.6656 Train f_score 0.6083 Xval loss: 0.6999 Xval f_score 0.5152\n", "14:12:48 epoch 850 of 1000 Train loss: 0.6512 Train f_score 0.6518 Xval loss: 0.7006 Xval f_score 0.5075\n", "14:12:48 epoch 900 of 1000 Train loss: 0.6478 Train f_score 0.6147 Xval loss: 0.7008 Xval f_score 0.5075\n", "14:12:48 epoch 950 of 1000 Train loss: 0.6559 Train f_score 0.6514 Xval loss: 0.7022 Xval f_score 0.5075\n", "Best Xval loss epoch 238, value 0.691286\n", "NN units 4\n", "Reg_penalty 0.00010000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.676, Train F1 0.676\n", "[[71 36]\n", " [32 71]]\n", "Final Xval Accuracy 0.592, Xval F1 0.453\n", "[[30 23]\n", " [ 6 12]]\n", "14:12:50 Starting\n", "14:12:52 epoch 0 of 1000 Train loss: 0.6950 Train f_score 0.2953 Xval loss: 0.6943 Xval f_score 0.0000\n", "14:12:52 epoch 50 of 1000 Train loss: 0.6927 Train f_score 0.6598 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:12:53 epoch 100 of 1000 Train loss: 0.6920 Train f_score 0.6300 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:12:53 epoch 150 of 1000 Train loss: 0.6902 Train f_score 0.6212 Xval loss: 0.6936 Xval f_score 0.6400\n", "14:12:53 epoch 200 of 1000 Train loss: 0.6871 Train f_score 0.6393 Xval loss: 0.6934 Xval f_score 0.5652\n", "14:12:53 epoch 250 of 1000 Train loss: 0.6863 Train f_score 0.6033 Xval loss: 0.6933 Xval f_score 0.5610\n", "14:12:53 epoch 300 of 1000 Train loss: 0.6861 Train f_score 0.5753 Xval loss: 0.6931 Xval f_score 0.5750\n", "14:12:53 epoch 350 of 1000 Train loss: 0.6752 Train f_score 0.6188 Xval loss: 0.6932 Xval f_score 0.5974\n", "14:12:53 epoch 400 of 1000 Train loss: 0.6580 Train f_score 0.6481 Xval loss: 0.6935 Xval f_score 0.6027\n", "14:12:54 epoch 450 of 1000 Train loss: 0.6766 Train f_score 0.6071 Xval loss: 0.6938 Xval f_score 0.5429\n", "14:12:54 epoch 500 of 1000 Train loss: 0.6814 Train f_score 0.6404 Xval loss: 0.6941 Xval f_score 0.5429\n", "14:12:54 epoch 550 of 1000 Train loss: 0.6660 Train f_score 0.6083 Xval loss: 0.6945 Xval f_score 0.5429\n", "14:12:54 epoch 600 of 1000 Train loss: 0.6774 Train f_score 0.5929 Xval loss: 0.6959 Xval f_score 0.5217\n", "14:12:54 epoch 650 of 1000 Train loss: 0.6537 Train f_score 0.6578 Xval loss: 0.6974 Xval f_score 0.5217\n", "14:12:54 epoch 700 of 1000 Train loss: 0.6576 Train f_score 0.6359 Xval loss: 0.6984 Xval f_score 0.5429\n", "14:12:54 epoch 750 of 1000 Train loss: 0.6752 Train f_score 0.6204 Xval loss: 0.6996 Xval f_score 0.5507\n", "14:12:55 epoch 800 of 1000 Train loss: 0.6597 Train f_score 0.6484 Xval loss: 0.7008 Xval f_score 0.5294\n", "14:12:55 epoch 850 of 1000 Train loss: 0.6460 Train f_score 0.6518 Xval loss: 0.7018 Xval f_score 0.5294\n", "14:12:55 epoch 900 of 1000 Train loss: 0.6820 Train f_score 0.6204 Xval loss: 0.7031 Xval f_score 0.5507\n", "14:12:55 epoch 950 of 1000 Train loss: 0.6635 Train f_score 0.6222 Xval loss: 0.7034 Xval f_score 0.5217\n", "Best Xval loss epoch 330, value 0.692995\n", "NN units 4\n", "Reg_penalty 0.00030000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.686, Train F1 0.683\n", "[[73 36]\n", " [30 71]]\n", "Final Xval Accuracy 0.592, Xval F1 0.491\n", "[[28 21]\n", " [ 8 14]]\n", "14:12:57 Starting\n", "14:12:59 epoch 0 of 1000 Train loss: 0.6977 Train f_score 0.1008 Xval loss: 0.6970 Xval f_score 0.0000\n", "14:13:00 epoch 50 of 1000 Train loss: 0.6929 Train f_score 0.6756 Xval loss: 0.6943 Xval f_score 0.6604\n", "14:13:00 epoch 100 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:13:00 epoch 150 of 1000 Train loss: 0.6919 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:00 epoch 200 of 1000 Train loss: 0.6923 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:13:00 epoch 250 of 1000 Train loss: 0.6924 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:00 epoch 300 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:00 epoch 350 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:01 epoch 400 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:01 epoch 450 of 1000 Train loss: 0.6938 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:01 epoch 500 of 1000 Train loss: 0.6927 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:01 epoch 550 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:01 epoch 600 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:01 epoch 650 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:01 epoch 700 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:02 epoch 750 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:02 epoch 800 of 1000 Train loss: 0.6929 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:02 epoch 850 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:02 epoch 900 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:02 epoch 950 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "Best Xval loss epoch 585, value 0.693521\n", "NN units 4\n", "Reg_penalty 0.00100000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.552, Train F1 0.678\n", "[[17 8]\n", " [86 99]]\n", "Final Xval Accuracy 0.592, Xval F1 0.431\n", "[[31 24]\n", " [ 5 11]]\n", "14:13:04 Starting\n", "14:13:07 epoch 0 of 1000 Train loss: 0.7069 Train f_score 0.6237 Xval loss: 0.7059 Xval f_score 0.6604\n", "14:13:07 epoch 50 of 1000 Train loss: 0.6943 Train f_score 0.6751 Xval loss: 0.6953 Xval f_score 0.6604\n", "14:13:07 epoch 100 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:13:07 epoch 150 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:07 epoch 200 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:07 epoch 250 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:08 epoch 300 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:08 epoch 350 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:08 epoch 400 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:08 epoch 450 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:08 epoch 500 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:08 epoch 550 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:13:08 epoch 600 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:08 epoch 650 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:09 epoch 700 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:09 epoch 750 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:09 epoch 800 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:09 epoch 850 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:09 epoch 900 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:09 epoch 950 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "Best Xval loss epoch 873, value 0.693610\n", "NN units 4\n", "Reg_penalty 0.00300000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.519, Train F1 0.466\n", "[[65 63]\n", " [38 44]]\n", "Final Xval Accuracy 0.521, Xval F1 0.646\n", "[[ 6 4]\n", " [30 31]]\n", "14:13:12 Starting\n", "14:13:14 epoch 0 of 1000 Train loss: 0.6940 Train f_score 0.3171 Xval loss: 0.6931 Xval f_score 0.0000\n", "14:13:14 epoch 50 of 1000 Train loss: 0.6900 Train f_score 0.6335 Xval loss: 0.6928 Xval f_score 0.6667\n", "14:13:14 epoch 100 of 1000 Train loss: 0.6849 Train f_score 0.6070 Xval loss: 0.6915 Xval f_score 0.5870\n", "14:13:14 epoch 150 of 1000 Train loss: 0.6820 Train f_score 0.5965 Xval loss: 0.6902 Xval f_score 0.5679\n", "14:13:14 epoch 200 of 1000 Train loss: 0.6819 Train f_score 0.5596 Xval loss: 0.6901 Xval f_score 0.5205\n", "14:13:15 epoch 250 of 1000 Train loss: 0.6662 Train f_score 0.6267 Xval loss: 0.6911 Xval f_score 0.5429\n", "14:13:15 epoch 300 of 1000 Train loss: 0.6638 Train f_score 0.6756 Xval loss: 0.6928 Xval f_score 0.5507\n", "14:13:15 epoch 350 of 1000 Train loss: 0.6645 Train f_score 0.6161 Xval loss: 0.6939 Xval f_score 0.5507\n", "14:13:15 epoch 400 of 1000 Train loss: 0.6603 Train f_score 0.6429 Xval loss: 0.6952 Xval f_score 0.5231\n", "14:13:15 epoch 450 of 1000 Train loss: 0.6433 Train f_score 0.6518 Xval loss: 0.6965 Xval f_score 0.5000\n", "14:13:15 epoch 500 of 1000 Train loss: 0.6317 Train f_score 0.6445 Xval loss: 0.6976 Xval f_score 0.4667\n", "14:13:15 epoch 550 of 1000 Train loss: 0.6529 Train f_score 0.6479 Xval loss: 0.6976 Xval f_score 0.4667\n", "14:13:16 epoch 600 of 1000 Train loss: 0.6423 Train f_score 0.6083 Xval loss: 0.6976 Xval f_score 0.4667\n", "14:13:16 epoch 650 of 1000 Train loss: 0.6300 Train f_score 0.6667 Xval loss: 0.6968 Xval f_score 0.4590\n", "14:13:16 epoch 700 of 1000 Train loss: 0.6251 Train f_score 0.6512 Xval loss: 0.6961 Xval f_score 0.4516\n", "14:13:16 epoch 750 of 1000 Train loss: 0.6419 Train f_score 0.6729 Xval loss: 0.6964 Xval f_score 0.4516\n", "14:13:16 epoch 800 of 1000 Train loss: 0.6148 Train f_score 0.7162 Xval loss: 0.6964 Xval f_score 0.4444\n", "14:13:16 epoch 850 of 1000 Train loss: 0.6375 Train f_score 0.6296 Xval loss: 0.6959 Xval f_score 0.4375\n", "14:13:16 epoch 900 of 1000 Train loss: 0.6107 Train f_score 0.6636 Xval loss: 0.6951 Xval f_score 0.4375\n", "14:13:17 epoch 950 of 1000 Train loss: 0.6262 Train f_score 0.6981 Xval loss: 0.6957 Xval f_score 0.4615\n", "Best Xval loss epoch 197, value 0.690071\n", "NN units 8\n", "Reg_penalty 0.00000000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.676, Train F1 0.609\n", "[[89 54]\n", " [14 53]]\n", "Final Xval Accuracy 0.577, Xval F1 0.643\n", "[[14 8]\n", " [22 27]]\n", "14:13:19 Starting\n", "14:13:21 epoch 0 of 1000 Train loss: 0.6959 Train f_score 0.5962 Xval loss: 0.6954 Xval f_score 0.6476\n", "14:13:21 epoch 50 of 1000 Train loss: 0.6946 Train f_score 0.5887 Xval loss: 0.6940 Xval f_score 0.6400\n", "14:13:22 epoch 100 of 1000 Train loss: 0.6875 Train f_score 0.6047 Xval loss: 0.6931 Xval f_score 0.5517\n", "14:13:22 epoch 150 of 1000 Train loss: 0.6812 Train f_score 0.5974 Xval loss: 0.6927 Xval f_score 0.5783\n", "14:13:22 epoch 200 of 1000 Train loss: 0.6772 Train f_score 0.5956 Xval loss: 0.6929 Xval f_score 0.5641\n", "14:13:22 epoch 250 of 1000 Train loss: 0.6727 Train f_score 0.6053 Xval loss: 0.6939 Xval f_score 0.5753\n", "14:13:22 epoch 300 of 1000 Train loss: 0.6651 Train f_score 0.6267 Xval loss: 0.6943 Xval f_score 0.5507\n", "14:13:22 epoch 350 of 1000 Train loss: 0.6602 Train f_score 0.6244 Xval loss: 0.6957 Xval f_score 0.5588\n", "14:13:22 epoch 400 of 1000 Train loss: 0.6652 Train f_score 0.6058 Xval loss: 0.6970 Xval f_score 0.5538\n", "14:13:23 epoch 450 of 1000 Train loss: 0.6662 Train f_score 0.6239 Xval loss: 0.6990 Xval f_score 0.5538\n", "14:13:23 epoch 500 of 1000 Train loss: 0.6691 Train f_score 0.6429 Xval loss: 0.6997 Xval f_score 0.5455\n", "14:13:23 epoch 550 of 1000 Train loss: 0.6533 Train f_score 0.6419 Xval loss: 0.7007 Xval f_score 0.5152\n", "14:13:23 epoch 600 of 1000 Train loss: 0.6501 Train f_score 0.6575 Xval loss: 0.7017 Xval f_score 0.4848\n", "14:13:23 epoch 650 of 1000 Train loss: 0.6620 Train f_score 0.6233 Xval loss: 0.7010 Xval f_score 0.4687\n", "14:13:23 epoch 700 of 1000 Train loss: 0.6345 Train f_score 0.6698 Xval loss: 0.7015 Xval f_score 0.4687\n", "14:13:23 epoch 750 of 1000 Train loss: 0.6573 Train f_score 0.6667 Xval loss: 0.7017 Xval f_score 0.4687\n", "14:13:24 epoch 800 of 1000 Train loss: 0.6397 Train f_score 0.6636 Xval loss: 0.7026 Xval f_score 0.4923\n", "14:13:24 epoch 850 of 1000 Train loss: 0.6535 Train f_score 0.6575 Xval loss: 0.7037 Xval f_score 0.4923\n", "14:13:24 epoch 900 of 1000 Train loss: 0.6335 Train f_score 0.6210 Xval loss: 0.7042 Xval f_score 0.4923\n", "14:13:24 epoch 950 of 1000 Train loss: 0.6327 Train f_score 0.6636 Xval loss: 0.7051 Xval f_score 0.4923\n", "Best Xval loss epoch 141, value 0.692669\n", "NN units 8\n", "Reg_penalty 0.00010000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.676, Train F1 0.688\n", "[[67 32]\n", " [36 75]]\n", "Final Xval Accuracy 0.592, Xval F1 0.592\n", "[[21 14]\n", " [15 21]]\n", "14:13:27 Starting\n", "14:13:29 epoch 0 of 1000 Train loss: 0.6996 Train f_score 0.0522 Xval loss: 0.6963 Xval f_score 0.0000\n", "14:13:29 epoch 50 of 1000 Train loss: 0.6966 Train f_score 0.5971 Xval loss: 0.6948 Xval f_score 0.6604\n", "14:13:29 epoch 100 of 1000 Train loss: 0.6919 Train f_score 0.6468 Xval loss: 0.6945 Xval f_score 0.6535\n", "14:13:29 epoch 150 of 1000 Train loss: 0.6890 Train f_score 0.6111 Xval loss: 0.6944 Xval f_score 0.6122\n", "14:13:29 epoch 200 of 1000 Train loss: 0.6888 Train f_score 0.5907 Xval loss: 0.6944 Xval f_score 0.5610\n", "14:13:30 epoch 250 of 1000 Train loss: 0.6837 Train f_score 0.5948 Xval loss: 0.6947 Xval f_score 0.5432\n", "14:13:30 epoch 300 of 1000 Train loss: 0.6835 Train f_score 0.6121 Xval loss: 0.6953 Xval f_score 0.5714\n", "14:13:30 epoch 350 of 1000 Train loss: 0.6768 Train f_score 0.6140 Xval loss: 0.6959 Xval f_score 0.5974\n", "14:13:30 epoch 400 of 1000 Train loss: 0.6665 Train f_score 0.6607 Xval loss: 0.6969 Xval f_score 0.5634\n", "14:13:30 epoch 450 of 1000 Train loss: 0.6752 Train f_score 0.6460 Xval loss: 0.6975 Xval f_score 0.5634\n", "14:13:30 epoch 500 of 1000 Train loss: 0.6486 Train f_score 0.6847 Xval loss: 0.6986 Xval f_score 0.5634\n", "14:13:30 epoch 550 of 1000 Train loss: 0.6662 Train f_score 0.6355 Xval loss: 0.6988 Xval f_score 0.5634\n", "14:13:31 epoch 600 of 1000 Train loss: 0.6508 Train f_score 0.6844 Xval loss: 0.6998 Xval f_score 0.5217\n", "14:13:31 epoch 650 of 1000 Train loss: 0.6708 Train f_score 0.6348 Xval loss: 0.7005 Xval f_score 0.5294\n", "14:13:31 epoch 700 of 1000 Train loss: 0.6495 Train f_score 0.6699 Xval loss: 0.7011 Xval f_score 0.5373\n", "14:13:31 epoch 750 of 1000 Train loss: 0.6483 Train f_score 0.6301 Xval loss: 0.7011 Xval f_score 0.5294\n", "14:13:31 epoch 800 of 1000 Train loss: 0.6591 Train f_score 0.6667 Xval loss: 0.7023 Xval f_score 0.5075\n", "14:13:31 epoch 850 of 1000 Train loss: 0.6444 Train f_score 0.6577 Xval loss: 0.7038 Xval f_score 0.5152\n", "14:13:31 epoch 900 of 1000 Train loss: 0.6522 Train f_score 0.6364 Xval loss: 0.7041 Xval f_score 0.5152\n", "14:13:32 epoch 950 of 1000 Train loss: 0.6344 Train f_score 0.7018 Xval loss: 0.7046 Xval f_score 0.5075\n", "Best Xval loss epoch 190, value 0.694346\n", "NN units 8\n", "Reg_penalty 0.00030000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.681, Train F1 0.688\n", "[[69 33]\n", " [34 74]]\n", "Final Xval Accuracy 0.592, Xval F1 0.473\n", "[[29 22]\n", " [ 7 13]]\n", "14:13:34 Starting\n", "14:13:36 epoch 0 of 1000 Train loss: 0.7039 Train f_score 0.6197 Xval loss: 0.7028 Xval f_score 0.6604\n", "14:13:37 epoch 50 of 1000 Train loss: 0.6934 Train f_score 0.6479 Xval loss: 0.6956 Xval f_score 0.6604\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:13:37 epoch 100 of 1000 Train loss: 0.6930 Train f_score 0.6419 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:13:37 epoch 150 of 1000 Train loss: 0.6927 Train f_score 0.6515 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:13:37 epoch 200 of 1000 Train loss: 0.6931 Train f_score 0.6667 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:13:37 epoch 250 of 1000 Train loss: 0.6935 Train f_score 0.6667 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:37 epoch 300 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:13:37 epoch 350 of 1000 Train loss: 0.6926 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:38 epoch 400 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:38 epoch 450 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:38 epoch 500 of 1000 Train loss: 0.6929 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:38 epoch 550 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:38 epoch 600 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:38 epoch 650 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:38 epoch 700 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:39 epoch 750 of 1000 Train loss: 0.6929 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:39 epoch 800 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:39 epoch 850 of 1000 Train loss: 0.6936 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:39 epoch 900 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:13:39 epoch 950 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "Best Xval loss epoch 944, value 0.693605\n", "NN units 8\n", "Reg_penalty 0.00100000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.543, Train F1 0.478\n", "[[70 63]\n", " [33 44]]\n", "Final Xval Accuracy 0.676, Xval F1 0.623\n", "[[29 16]\n", " [ 7 19]]\n", "14:13:42 Starting\n", "14:13:44 epoch 0 of 1000 Train loss: 0.7192 Train f_score 0.5809 Xval loss: 0.7179 Xval f_score 0.6538\n", "14:13:44 epoch 50 of 1000 Train loss: 0.6990 Train f_score 0.6286 Xval loss: 0.6980 Xval f_score 0.6604\n", "14:13:44 epoch 100 of 1000 Train loss: 0.6934 Train f_score 0.6602 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:13:45 epoch 150 of 1000 Train loss: 0.6941 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:13:45 epoch 200 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:13:45 epoch 250 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:45 epoch 300 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:45 epoch 350 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:45 epoch 400 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:45 epoch 450 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:13:46 epoch 500 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:46 epoch 550 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:46 epoch 600 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:46 epoch 650 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:46 epoch 700 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:46 epoch 750 of 1000 Train loss: 0.6929 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:46 epoch 800 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:47 epoch 850 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:47 epoch 900 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:13:47 epoch 950 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "Best Xval loss epoch 682, value 0.693624\n", "NN units 8\n", "Reg_penalty 0.00300000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.538, Train F1 0.673\n", "[[ 13 7]\n", " [ 90 100]]\n", "Final Xval Accuracy 0.507, Xval F1 0.667\n", "[[ 1 0]\n", " [35 35]]\n", "14:13:50 Starting\n", "14:13:52 epoch 0 of 1000 Train loss: 0.6974 Train f_score 0.1719 Xval loss: 0.6931 Xval f_score 0.0000\n", "14:13:52 epoch 50 of 1000 Train loss: 0.6857 Train f_score 0.6529 Xval loss: 0.6921 Xval f_score 0.6087\n", "14:13:52 epoch 100 of 1000 Train loss: 0.6771 Train f_score 0.6435 Xval loss: 0.6907 Xval f_score 0.5714\n", "14:13:52 epoch 150 of 1000 Train loss: 0.6624 Train f_score 0.6484 Xval loss: 0.6910 Xval f_score 0.5070\n", "14:13:52 epoch 200 of 1000 Train loss: 0.6602 Train f_score 0.6267 Xval loss: 0.6929 Xval f_score 0.5217\n", "14:13:52 epoch 250 of 1000 Train loss: 0.6596 Train f_score 0.6267 Xval loss: 0.6949 Xval f_score 0.5373\n", "14:13:53 epoch 300 of 1000 Train loss: 0.6618 Train f_score 0.6216 Xval loss: 0.6970 Xval f_score 0.5455\n", "14:13:53 epoch 350 of 1000 Train loss: 0.6342 Train f_score 0.6449 Xval loss: 0.6992 Xval f_score 0.5397\n", "14:13:53 epoch 400 of 1000 Train loss: 0.6554 Train f_score 0.6479 Xval loss: 0.7005 Xval f_score 0.5312\n", "14:13:53 epoch 450 of 1000 Train loss: 0.6415 Train f_score 0.6385 Xval loss: 0.7019 Xval f_score 0.5000\n", "14:13:53 epoch 500 of 1000 Train loss: 0.6363 Train f_score 0.6575 Xval loss: 0.7035 Xval f_score 0.5000\n", "14:13:53 epoch 550 of 1000 Train loss: 0.6437 Train f_score 0.6047 Xval loss: 0.7035 Xval f_score 0.5000\n", "14:13:53 epoch 600 of 1000 Train loss: 0.6335 Train f_score 0.6667 Xval loss: 0.7041 Xval f_score 0.5000\n", "14:13:54 epoch 650 of 1000 Train loss: 0.6287 Train f_score 0.6759 Xval loss: 0.7039 Xval f_score 0.5000\n", "14:13:54 epoch 700 of 1000 Train loss: 0.6429 Train f_score 0.6937 Xval loss: 0.7056 Xval f_score 0.4923\n", "14:13:54 epoch 750 of 1000 Train loss: 0.6276 Train f_score 0.6514 Xval loss: 0.7053 Xval f_score 0.4923\n", "14:13:54 epoch 800 of 1000 Train loss: 0.6305 Train f_score 0.6351 Xval loss: 0.7057 Xval f_score 0.4923\n", "14:13:54 epoch 850 of 1000 Train loss: 0.6146 Train f_score 0.6948 Xval loss: 0.7065 Xval f_score 0.4687\n", "14:13:54 epoch 900 of 1000 Train loss: 0.6000 Train f_score 0.6759 Xval loss: 0.7059 Xval f_score 0.4923\n", "14:13:54 epoch 950 of 1000 Train loss: 0.6179 Train f_score 0.6759 Xval loss: 0.7061 Xval f_score 0.4923\n", "Best Xval loss epoch 119, value 0.690468\n", "NN units 16\n", "Reg_penalty 0.00000000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.681, Train F1 0.617\n", "[[89 53]\n", " [14 54]]\n", "Final Xval Accuracy 0.592, Xval F1 0.603\n", "[[20 13]\n", " [16 22]]\n", "14:13:57 Starting\n", "14:14:00 epoch 0 of 1000 Train loss: 0.6971 Train f_score 0.6465 Xval loss: 0.6962 Xval f_score 0.6604\n", "14:14:00 epoch 50 of 1000 Train loss: 0.6907 Train f_score 0.6212 Xval loss: 0.6937 Xval f_score 0.6400\n", "14:14:00 epoch 100 of 1000 Train loss: 0.6846 Train f_score 0.6375 Xval loss: 0.6928 Xval f_score 0.5714\n", "14:14:00 epoch 150 of 1000 Train loss: 0.6771 Train f_score 0.6396 Xval loss: 0.6928 Xval f_score 0.5405\n", "14:14:00 epoch 200 of 1000 Train loss: 0.6698 Train f_score 0.6106 Xval loss: 0.6939 Xval f_score 0.5676\n", "14:14:00 epoch 250 of 1000 Train loss: 0.6780 Train f_score 0.6216 Xval loss: 0.6957 Xval f_score 0.5507\n", "14:14:00 epoch 300 of 1000 Train loss: 0.6629 Train f_score 0.6326 Xval loss: 0.6973 Xval f_score 0.5588\n", "14:14:01 epoch 350 of 1000 Train loss: 0.6506 Train f_score 0.6638 Xval loss: 0.6993 Xval f_score 0.5455\n", "14:14:01 epoch 400 of 1000 Train loss: 0.6535 Train f_score 0.6636 Xval loss: 0.7010 Xval f_score 0.5455\n", "14:14:01 epoch 450 of 1000 Train loss: 0.6471 Train f_score 0.6636 Xval loss: 0.7027 Xval f_score 0.5231\n", "14:14:01 epoch 500 of 1000 Train loss: 0.6386 Train f_score 0.6574 Xval loss: 0.7039 Xval f_score 0.5000\n", "14:14:01 epoch 550 of 1000 Train loss: 0.6399 Train f_score 0.6606 Xval loss: 0.7051 Xval f_score 0.4923\n", "14:14:01 epoch 600 of 1000 Train loss: 0.6385 Train f_score 0.6452 Xval loss: 0.7060 Xval f_score 0.4923\n", "14:14:01 epoch 650 of 1000 Train loss: 0.6366 Train f_score 0.6449 Xval loss: 0.7063 Xval f_score 0.4848\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:14:02 epoch 700 of 1000 Train loss: 0.6318 Train f_score 0.6729 Xval loss: 0.7069 Xval f_score 0.4848\n", "14:14:02 epoch 750 of 1000 Train loss: 0.6330 Train f_score 0.6890 Xval loss: 0.7074 Xval f_score 0.4848\n", "14:14:02 epoch 800 of 1000 Train loss: 0.6472 Train f_score 0.6449 Xval loss: 0.7083 Xval f_score 0.4848\n", "14:14:02 epoch 850 of 1000 Train loss: 0.6144 Train f_score 0.7014 Xval loss: 0.7094 Xval f_score 0.5075\n", "14:14:02 epoch 900 of 1000 Train loss: 0.6486 Train f_score 0.6396 Xval loss: 0.7106 Xval f_score 0.4848\n", "14:14:02 epoch 950 of 1000 Train loss: 0.6367 Train f_score 0.6607 Xval loss: 0.7106 Xval f_score 0.4848\n", "Best Xval loss epoch 112, value 0.692562\n", "NN units 16\n", "Reg_penalty 0.00010000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.671, Train F1 0.679\n", "[[68 34]\n", " [35 73]]\n", "Final Xval Accuracy 0.577, Xval F1 0.571\n", "[[21 15]\n", " [15 20]]\n", "14:14:05 Starting\n", "14:14:07 epoch 0 of 1000 Train loss: 0.6997 Train f_score 0.6689 Xval loss: 0.7001 Xval f_score 0.6604\n", "14:14:08 epoch 50 of 1000 Train loss: 0.6950 Train f_score 0.6000 Xval loss: 0.6949 Xval f_score 0.6327\n", "14:14:08 epoch 100 of 1000 Train loss: 0.6932 Train f_score 0.5772 Xval loss: 0.6945 Xval f_score 0.5870\n", "14:14:08 epoch 150 of 1000 Train loss: 0.6796 Train f_score 0.6667 Xval loss: 0.6947 Xval f_score 0.5679\n", "14:14:08 epoch 200 of 1000 Train loss: 0.6823 Train f_score 0.6102 Xval loss: 0.6956 Xval f_score 0.5641\n", "14:14:08 epoch 250 of 1000 Train loss: 0.6892 Train f_score 0.6452 Xval loss: 0.6965 Xval f_score 0.5946\n", "14:14:08 epoch 300 of 1000 Train loss: 0.6792 Train f_score 0.6161 Xval loss: 0.6982 Xval f_score 0.5634\n", "14:14:08 epoch 350 of 1000 Train loss: 0.6644 Train f_score 0.6359 Xval loss: 0.6991 Xval f_score 0.5588\n", "14:14:09 epoch 400 of 1000 Train loss: 0.6679 Train f_score 0.6226 Xval loss: 0.7010 Xval f_score 0.5672\n", "14:14:09 epoch 450 of 1000 Train loss: 0.6745 Train f_score 0.6637 Xval loss: 0.7031 Xval f_score 0.5758\n", "14:14:09 epoch 500 of 1000 Train loss: 0.6715 Train f_score 0.6326 Xval loss: 0.7043 Xval f_score 0.5758\n", "14:14:09 epoch 550 of 1000 Train loss: 0.6635 Train f_score 0.6111 Xval loss: 0.7057 Xval f_score 0.5758\n", "14:14:09 epoch 600 of 1000 Train loss: 0.6552 Train f_score 0.6291 Xval loss: 0.7067 Xval f_score 0.5672\n", "14:14:09 epoch 650 of 1000 Train loss: 0.6536 Train f_score 0.6489 Xval loss: 0.7078 Xval f_score 0.5588\n", "14:14:09 epoch 700 of 1000 Train loss: 0.6499 Train f_score 0.6355 Xval loss: 0.7082 Xval f_score 0.5455\n", "14:14:10 epoch 750 of 1000 Train loss: 0.6480 Train f_score 0.6575 Xval loss: 0.7093 Xval f_score 0.5152\n", "14:14:10 epoch 800 of 1000 Train loss: 0.6485 Train f_score 0.6419 Xval loss: 0.7098 Xval f_score 0.4923\n", "14:14:10 epoch 850 of 1000 Train loss: 0.6520 Train f_score 0.6381 Xval loss: 0.7113 Xval f_score 0.5152\n", "14:14:10 epoch 900 of 1000 Train loss: 0.6683 Train f_score 0.5922 Xval loss: 0.7115 Xval f_score 0.4762\n", "14:14:10 epoch 950 of 1000 Train loss: 0.6584 Train f_score 0.6351 Xval loss: 0.7123 Xval f_score 0.5000\n", "Best Xval loss epoch 115, value 0.694373\n", "NN units 16\n", "Reg_penalty 0.00030000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.667, Train F1 0.670\n", "[[69 36]\n", " [34 71]]\n", "Final Xval Accuracy 0.592, Xval F1 0.592\n", "[[21 14]\n", " [15 21]]\n", "14:14:15 Starting\n", "14:14:17 epoch 0 of 1000 Train loss: 0.7150 Train f_score 0.6558 Xval loss: 0.7127 Xval f_score 0.6604\n", "14:14:17 epoch 50 of 1000 Train loss: 0.6998 Train f_score 0.5659 Xval loss: 0.6972 Xval f_score 0.6604\n", "14:14:17 epoch 100 of 1000 Train loss: 0.6908 Train f_score 0.6370 Xval loss: 0.6946 Xval f_score 0.6604\n", "14:14:18 epoch 150 of 1000 Train loss: 0.6942 Train f_score 0.5891 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:14:18 epoch 200 of 1000 Train loss: 0.6933 Train f_score 0.6573 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:14:18 epoch 250 of 1000 Train loss: 0.6928 Train f_score 0.6667 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:14:18 epoch 300 of 1000 Train loss: 0.6928 Train f_score 0.6548 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:14:18 epoch 350 of 1000 Train loss: 0.6928 Train f_score 0.6551 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:14:18 epoch 400 of 1000 Train loss: 0.6926 Train f_score 0.6456 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:14:18 epoch 450 of 1000 Train loss: 0.6932 Train f_score 0.6711 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:14:19 epoch 500 of 1000 Train loss: 0.6929 Train f_score 0.6801 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:14:19 epoch 550 of 1000 Train loss: 0.6930 Train f_score 0.6389 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:14:19 epoch 600 of 1000 Train loss: 0.6917 Train f_score 0.6421 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:14:19 epoch 650 of 1000 Train loss: 0.6933 Train f_score 0.5992 Xval loss: 0.6943 Xval f_score 0.6604\n", "14:14:19 epoch 700 of 1000 Train loss: 0.6908 Train f_score 0.6332 Xval loss: 0.6945 Xval f_score 0.6604\n", "14:14:19 epoch 750 of 1000 Train loss: 0.6961 Train f_score 0.5425 Xval loss: 0.6944 Xval f_score 0.6408\n", "14:14:20 epoch 800 of 1000 Train loss: 0.6926 Train f_score 0.5920 Xval loss: 0.6947 Xval f_score 0.6408\n", "14:14:20 epoch 850 of 1000 Train loss: 0.6938 Train f_score 0.5656 Xval loss: 0.6948 Xval f_score 0.6400\n", "14:14:20 epoch 900 of 1000 Train loss: 0.6924 Train f_score 0.5761 Xval loss: 0.6947 Xval f_score 0.6186\n", "14:14:20 epoch 950 of 1000 Train loss: 0.6931 Train f_score 0.5641 Xval loss: 0.6944 Xval f_score 0.5581\n", "Best Xval loss epoch 389, value 0.693982\n", "NN units 16\n", "Reg_penalty 0.00100000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.657, Train F1 0.690\n", "[[58 27]\n", " [45 80]]\n", "Final Xval Accuracy 0.592, Xval F1 0.592\n", "[[21 14]\n", " [15 21]]\n", "14:14:23 Starting\n", "14:14:25 epoch 0 of 1000 Train loss: 0.7508 Train f_score 0.3864 Xval loss: 0.7482 Xval f_score 0.0541\n", "14:14:25 epoch 50 of 1000 Train loss: 0.7057 Train f_score 0.5692 Xval loss: 0.7039 Xval f_score 0.6604\n", "14:14:25 epoch 100 of 1000 Train loss: 0.6939 Train f_score 0.6414 Xval loss: 0.6948 Xval f_score 0.6604\n", "14:14:26 epoch 150 of 1000 Train loss: 0.6938 Train f_score 0.6535 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:14:26 epoch 200 of 1000 Train loss: 0.6936 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:14:26 epoch 250 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:14:26 epoch 300 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:14:26 epoch 350 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:14:26 epoch 400 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:14:27 epoch 450 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:14:27 epoch 500 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:14:27 epoch 550 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:14:27 epoch 600 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:14:27 epoch 650 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:14:27 epoch 700 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:14:27 epoch 750 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:14:28 epoch 800 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:14:28 epoch 850 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:14:28 epoch 900 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:14:28 epoch 950 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "Best Xval loss epoch 395, value 0.693689\n", "NN units 16\n", "Reg_penalty 0.00300000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.514, Train F1 0.677\n", "[[ 1 0]\n", " [102 107]]\n", "Final Xval Accuracy 0.507, Xval F1 0.653\n", "[[ 3 2]\n", " [33 33]]\n", "14:14:31 Starting\n", "14:14:33 epoch 0 of 1000 Train loss: 0.6987 Train f_score 0.4022 Xval loss: 0.6923 Xval f_score 0.1053\n", "14:14:33 epoch 50 of 1000 Train loss: 0.6860 Train f_score 0.6076 Xval loss: 0.6907 Xval f_score 0.5476\n", "14:14:34 epoch 100 of 1000 Train loss: 0.6645 Train f_score 0.6344 Xval loss: 0.6909 Xval f_score 0.5405\n", "14:14:34 epoch 150 of 1000 Train loss: 0.6589 Train f_score 0.6516 Xval loss: 0.6933 Xval f_score 0.5294\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:14:34 epoch 200 of 1000 Train loss: 0.6536 Train f_score 0.6140 Xval loss: 0.6962 Xval f_score 0.5455\n", "14:14:34 epoch 250 of 1000 Train loss: 0.6514 Train f_score 0.6449 Xval loss: 0.6996 Xval f_score 0.5312\n", "14:14:34 epoch 300 of 1000 Train loss: 0.6413 Train f_score 0.6190 Xval loss: 0.7037 Xval f_score 0.5161\n", "14:14:34 epoch 350 of 1000 Train loss: 0.6487 Train f_score 0.6055 Xval loss: 0.7052 Xval f_score 0.4918\n", "14:14:35 epoch 400 of 1000 Train loss: 0.6344 Train f_score 0.6455 Xval loss: 0.7077 Xval f_score 0.5397\n", "14:14:35 epoch 450 of 1000 Train loss: 0.6475 Train f_score 0.6606 Xval loss: 0.7082 Xval f_score 0.5000\n", "14:14:35 epoch 500 of 1000 Train loss: 0.6365 Train f_score 0.6385 Xval loss: 0.7107 Xval f_score 0.5152\n", "14:14:35 epoch 550 of 1000 Train loss: 0.6298 Train f_score 0.6481 Xval loss: 0.7115 Xval f_score 0.4848\n", "14:14:35 epoch 600 of 1000 Train loss: 0.6132 Train f_score 0.6698 Xval loss: 0.7118 Xval f_score 0.4923\n", "14:14:35 epoch 650 of 1000 Train loss: 0.6214 Train f_score 0.6575 Xval loss: 0.7127 Xval f_score 0.4848\n", "14:14:36 epoch 700 of 1000 Train loss: 0.6014 Train f_score 0.6574 Xval loss: 0.7129 Xval f_score 0.4848\n", "14:14:36 epoch 750 of 1000 Train loss: 0.5982 Train f_score 0.6667 Xval loss: 0.7149 Xval f_score 0.4848\n", "14:14:36 epoch 800 of 1000 Train loss: 0.6182 Train f_score 0.6698 Xval loss: 0.7172 Xval f_score 0.4848\n", "14:14:36 epoch 850 of 1000 Train loss: 0.6003 Train f_score 0.6818 Xval loss: 0.7174 Xval f_score 0.4848\n", "14:14:36 epoch 900 of 1000 Train loss: 0.6017 Train f_score 0.6697 Xval loss: 0.7210 Xval f_score 0.4848\n", "14:14:36 epoch 950 of 1000 Train loss: 0.6001 Train f_score 0.6820 Xval loss: 0.7222 Xval f_score 0.5075\n", "Best Xval loss epoch 78, value 0.690266\n", "NN units 32\n", "Reg_penalty 0.00000000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.695, Train F1 0.744\n", "[[53 14]\n", " [50 93]]\n", "Final Xval Accuracy 0.592, Xval F1 0.659\n", "[[14 7]\n", " [22 28]]\n", "14:14:39 Starting\n", "14:14:42 epoch 0 of 1000 Train loss: 0.6979 Train f_score 0.6533 Xval loss: 0.6978 Xval f_score 0.6604\n", "14:14:42 epoch 50 of 1000 Train loss: 0.6872 Train f_score 0.6186 Xval loss: 0.6936 Xval f_score 0.5682\n", "14:14:42 epoch 100 of 1000 Train loss: 0.6754 Train f_score 0.6581 Xval loss: 0.6936 Xval f_score 0.5679\n", "14:14:42 epoch 150 of 1000 Train loss: 0.6733 Train f_score 0.6491 Xval loss: 0.6950 Xval f_score 0.5714\n", "14:14:42 epoch 200 of 1000 Train loss: 0.6612 Train f_score 0.6400 Xval loss: 0.6973 Xval f_score 0.5429\n", "14:14:42 epoch 250 of 1000 Train loss: 0.6577 Train f_score 0.6553 Xval loss: 0.7005 Xval f_score 0.5588\n", "14:14:43 epoch 300 of 1000 Train loss: 0.6688 Train f_score 0.6273 Xval loss: 0.7031 Xval f_score 0.5538\n", "14:14:43 epoch 350 of 1000 Train loss: 0.6538 Train f_score 0.6339 Xval loss: 0.7055 Xval f_score 0.5758\n", "14:14:43 epoch 400 of 1000 Train loss: 0.6574 Train f_score 0.6140 Xval loss: 0.7077 Xval f_score 0.5455\n", "14:14:43 epoch 450 of 1000 Train loss: 0.6394 Train f_score 0.6697 Xval loss: 0.7095 Xval f_score 0.5231\n", "14:14:43 epoch 500 of 1000 Train loss: 0.6428 Train f_score 0.6575 Xval loss: 0.7104 Xval f_score 0.5231\n", "14:14:43 epoch 550 of 1000 Train loss: 0.6417 Train f_score 0.6637 Xval loss: 0.7123 Xval f_score 0.5231\n", "14:14:43 epoch 600 of 1000 Train loss: 0.6466 Train f_score 0.6542 Xval loss: 0.7125 Xval f_score 0.5152\n", "14:14:44 epoch 650 of 1000 Train loss: 0.6366 Train f_score 0.6445 Xval loss: 0.7134 Xval f_score 0.5373\n", "14:14:44 epoch 700 of 1000 Train loss: 0.6392 Train f_score 0.6514 Xval loss: 0.7146 Xval f_score 0.5152\n", "14:14:44 epoch 750 of 1000 Train loss: 0.6564 Train f_score 0.6111 Xval loss: 0.7142 Xval f_score 0.4923\n", "14:14:44 epoch 800 of 1000 Train loss: 0.6360 Train f_score 0.6636 Xval loss: 0.7160 Xval f_score 0.4848\n", "14:14:44 epoch 850 of 1000 Train loss: 0.6371 Train f_score 0.6452 Xval loss: 0.7161 Xval f_score 0.4848\n", "14:14:44 epoch 900 of 1000 Train loss: 0.6287 Train f_score 0.6326 Xval loss: 0.7178 Xval f_score 0.5075\n", "14:14:45 epoch 950 of 1000 Train loss: 0.6506 Train f_score 0.6368 Xval loss: 0.7202 Xval f_score 0.5075\n", "Best Xval loss epoch 75, value 0.693242\n", "NN units 32\n", "Reg_penalty 0.00010000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.671, Train F1 0.639\n", "[[80 46]\n", " [23 61]]\n", "Final Xval Accuracy 0.577, Xval F1 0.483\n", "[[27 21]\n", " [ 9 14]]\n", "14:14:48 Starting\n", "14:14:50 epoch 0 of 1000 Train loss: 0.7023 Train f_score 0.6709 Xval loss: 0.7084 Xval f_score 0.6604\n", "14:14:50 epoch 50 of 1000 Train loss: 0.6966 Train f_score 0.6341 Xval loss: 0.6975 Xval f_score 0.6186\n", "14:14:50 epoch 100 of 1000 Train loss: 0.6914 Train f_score 0.5984 Xval loss: 0.6964 Xval f_score 0.5714\n", "14:14:51 epoch 150 of 1000 Train loss: 0.6862 Train f_score 0.5778 Xval loss: 0.6970 Xval f_score 0.5570\n", "14:14:51 epoch 200 of 1000 Train loss: 0.6845 Train f_score 0.5894 Xval loss: 0.6986 Xval f_score 0.5714\n", "14:14:51 epoch 250 of 1000 Train loss: 0.6872 Train f_score 0.5963 Xval loss: 0.7000 Xval f_score 0.5556\n", "14:14:51 epoch 300 of 1000 Train loss: 0.6592 Train f_score 0.6667 Xval loss: 0.7014 Xval f_score 0.5556\n", "14:14:51 epoch 350 of 1000 Train loss: 0.6716 Train f_score 0.6222 Xval loss: 0.7039 Xval f_score 0.5634\n", "14:14:51 epoch 400 of 1000 Train loss: 0.6769 Train f_score 0.6175 Xval loss: 0.7057 Xval f_score 0.5882\n", "14:14:52 epoch 450 of 1000 Train loss: 0.6894 Train f_score 0.5946 Xval loss: 0.7069 Xval f_score 0.5588\n", "14:14:52 epoch 500 of 1000 Train loss: 0.6641 Train f_score 0.6667 Xval loss: 0.7094 Xval f_score 0.5588\n", "14:14:52 epoch 550 of 1000 Train loss: 0.6712 Train f_score 0.6306 Xval loss: 0.7113 Xval f_score 0.5373\n", "14:14:52 epoch 600 of 1000 Train loss: 0.6638 Train f_score 0.6422 Xval loss: 0.7121 Xval f_score 0.5588\n", "14:14:52 epoch 650 of 1000 Train loss: 0.6510 Train f_score 0.6849 Xval loss: 0.7140 Xval f_score 0.5588\n", "14:14:52 epoch 700 of 1000 Train loss: 0.6630 Train f_score 0.6603 Xval loss: 0.7142 Xval f_score 0.5373\n", "14:14:52 epoch 750 of 1000 Train loss: 0.6574 Train f_score 0.6473 Xval loss: 0.7163 Xval f_score 0.5152\n", "14:14:53 epoch 800 of 1000 Train loss: 0.6459 Train f_score 0.6849 Xval loss: 0.7170 Xval f_score 0.5152\n", "14:14:53 epoch 850 of 1000 Train loss: 0.6679 Train f_score 0.6233 Xval loss: 0.7175 Xval f_score 0.5000\n", "14:14:53 epoch 900 of 1000 Train loss: 0.6661 Train f_score 0.6385 Xval loss: 0.7180 Xval f_score 0.5231\n", "14:14:53 epoch 950 of 1000 Train loss: 0.6533 Train f_score 0.6311 Xval loss: 0.7188 Xval f_score 0.5373\n", "Best Xval loss epoch 95, value 0.696305\n", "NN units 32\n", "Reg_penalty 0.00030000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.676, Train F1 0.682\n", "[[69 34]\n", " [34 73]]\n", "Final Xval Accuracy 0.592, Xval F1 0.580\n", "[[22 15]\n", " [14 20]]\n", "14:14:56 Starting\n", "14:14:59 epoch 0 of 1000 Train loss: 0.7312 Train f_score 0.6202 Xval loss: 0.7284 Xval f_score 0.6604\n", "14:14:59 epoch 50 of 1000 Train loss: 0.7018 Train f_score 0.5891 Xval loss: 0.7010 Xval f_score 0.6604\n", "14:14:59 epoch 100 of 1000 Train loss: 0.6961 Train f_score 0.5703 Xval loss: 0.6955 Xval f_score 0.6604\n", "14:14:59 epoch 150 of 1000 Train loss: 0.6924 Train f_score 0.6270 Xval loss: 0.6949 Xval f_score 0.6604\n", "14:14:59 epoch 200 of 1000 Train loss: 0.6950 Train f_score 0.6029 Xval loss: 0.6949 Xval f_score 0.6604\n", "14:14:59 epoch 250 of 1000 Train loss: 0.6931 Train f_score 0.6061 Xval loss: 0.6945 Xval f_score 0.6604\n", "14:15:00 epoch 300 of 1000 Train loss: 0.6942 Train f_score 0.5932 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:15:00 epoch 350 of 1000 Train loss: 0.6937 Train f_score 0.6485 Xval loss: 0.6943 Xval f_score 0.6604\n", "14:15:00 epoch 400 of 1000 Train loss: 0.6943 Train f_score 0.6301 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:15:00 epoch 450 of 1000 Train loss: 0.6937 Train f_score 0.6794 Xval loss: 0.6943 Xval f_score 0.6604\n", "14:15:00 epoch 500 of 1000 Train loss: 0.6928 Train f_score 0.6667 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:00 epoch 550 of 1000 Train loss: 0.6938 Train f_score 0.6580 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:01 epoch 600 of 1000 Train loss: 0.6919 Train f_score 0.6710 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:01 epoch 650 of 1000 Train loss: 0.6943 Train f_score 0.6772 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:01 epoch 700 of 1000 Train loss: 0.6929 Train f_score 0.6730 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:15:01 epoch 750 of 1000 Train loss: 0.6928 Train f_score 0.6730 Xval loss: 0.6939 Xval f_score 0.6604\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:15:01 epoch 800 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:15:01 epoch 850 of 1000 Train loss: 0.6937 Train f_score 0.6667 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:15:02 epoch 900 of 1000 Train loss: 0.6939 Train f_score 0.6645 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:15:02 epoch 950 of 1000 Train loss: 0.6925 Train f_score 0.6731 Xval loss: 0.6941 Xval f_score 0.6604\n", "Best Xval loss epoch 798, value 0.693807\n", "NN units 32\n", "Reg_penalty 0.00100000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.590, Train F1 0.606\n", "[[58 41]\n", " [45 66]]\n", "Final Xval Accuracy 0.563, Xval F1 0.635\n", "[[13 8]\n", " [23 27]]\n", "14:15:05 Starting\n", "14:15:07 epoch 0 of 1000 Train loss: 0.8028 Train f_score 0.4623 Xval loss: 0.7960 Xval f_score 0.2500\n", "14:15:07 epoch 50 of 1000 Train loss: 0.7094 Train f_score 0.6190 Xval loss: 0.7114 Xval f_score 0.6604\n", "14:15:07 epoch 100 of 1000 Train loss: 0.6967 Train f_score 0.5978 Xval loss: 0.6959 Xval f_score 0.6604\n", "14:15:08 epoch 150 of 1000 Train loss: 0.6944 Train f_score 0.6441 Xval loss: 0.6947 Xval f_score 0.6604\n", "14:15:08 epoch 200 of 1000 Train loss: 0.6944 Train f_score 0.6624 Xval loss: 0.6943 Xval f_score 0.6604\n", "14:15:08 epoch 250 of 1000 Train loss: 0.6938 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:15:08 epoch 300 of 1000 Train loss: 0.6937 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:08 epoch 350 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:15:08 epoch 400 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:15:09 epoch 450 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:15:09 epoch 500 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:09 epoch 550 of 1000 Train loss: 0.6939 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:09 epoch 600 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:15:09 epoch 650 of 1000 Train loss: 0.6937 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:09 epoch 700 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:15:09 epoch 750 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:10 epoch 800 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:15:10 epoch 850 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:10 epoch 900 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:15:10 epoch 950 of 1000 Train loss: 0.6939 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "Best Xval loss epoch 582, value 0.693906\n", "NN units 32\n", "Reg_penalty 0.00300000\n", "Dropout 0.3330\n", "Activation sigmoid\n", "Final Train Accuracy 0.571, Train F1 0.628\n", "[[44 31]\n", " [59 76]]\n", "Final Xval Accuracy 0.606, Xval F1 0.548\n", "[[26 18]\n", " [10 17]]\n", "14:15:13 Starting\n", "14:15:16 epoch 0 of 1000 Train loss: 0.6933 Train f_score 0.3977 Xval loss: 0.6933 Xval f_score 0.4062\n", "14:15:16 epoch 50 of 1000 Train loss: 0.6825 Train f_score 0.6595 Xval loss: 0.6926 Xval f_score 0.5934\n", "14:15:16 epoch 100 of 1000 Train loss: 0.6689 Train f_score 0.6667 Xval loss: 0.6922 Xval f_score 0.5882\n", "14:15:16 epoch 150 of 1000 Train loss: 0.6616 Train f_score 0.6615 Xval loss: 0.6945 Xval f_score 0.5500\n", "14:15:16 epoch 200 of 1000 Train loss: 0.6520 Train f_score 0.6864 Xval loss: 0.6967 Xval f_score 0.5526\n", "14:15:16 epoch 250 of 1000 Train loss: 0.6691 Train f_score 0.6400 Xval loss: 0.6970 Xval f_score 0.5753\n", "14:15:16 epoch 300 of 1000 Train loss: 0.6395 Train f_score 0.6529 Xval loss: 0.6956 Xval f_score 0.5429\n", "14:15:17 epoch 350 of 1000 Train loss: 0.6321 Train f_score 0.6452 Xval loss: 0.6937 Xval f_score 0.6176\n", "14:15:17 epoch 400 of 1000 Train loss: 0.6233 Train f_score 0.6942 Xval loss: 0.6977 Xval f_score 0.5714\n", "14:15:17 epoch 450 of 1000 Train loss: 0.6433 Train f_score 0.6638 Xval loss: 0.7005 Xval f_score 0.5672\n", "14:15:17 epoch 500 of 1000 Train loss: 0.6384 Train f_score 0.6583 Xval loss: 0.7028 Xval f_score 0.5507\n", "14:15:17 epoch 550 of 1000 Train loss: 0.6556 Train f_score 0.6615 Xval loss: 0.7069 Xval f_score 0.5373\n", "14:15:17 epoch 600 of 1000 Train loss: 0.6439 Train f_score 0.6748 Xval loss: 0.7124 Xval f_score 0.5294\n", "14:15:18 epoch 650 of 1000 Train loss: 0.6522 Train f_score 0.6504 Xval loss: 0.7154 Xval f_score 0.4923\n", "14:15:18 epoch 700 of 1000 Train loss: 0.6326 Train f_score 0.6696 Xval loss: 0.7169 Xval f_score 0.5075\n", "14:15:18 epoch 750 of 1000 Train loss: 0.6250 Train f_score 0.6532 Xval loss: 0.7198 Xval f_score 0.5507\n", "14:15:18 epoch 800 of 1000 Train loss: 0.6150 Train f_score 0.6639 Xval loss: 0.7183 Xval f_score 0.4848\n", "14:15:18 epoch 850 of 1000 Train loss: 0.6080 Train f_score 0.6875 Xval loss: 0.7191 Xval f_score 0.4615\n", "14:15:18 epoch 900 of 1000 Train loss: 0.6247 Train f_score 0.6853 Xval loss: 0.7210 Xval f_score 0.4545\n", "14:15:18 epoch 950 of 1000 Train loss: 0.6373 Train f_score 0.6822 Xval loss: 0.7249 Xval f_score 0.4375\n", "Best Xval loss epoch 89, value 0.691892\n", "NN units 4\n", "Reg_penalty 0.00000000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.705, Train F1 0.752\n", "[[54 13]\n", " [49 94]]\n", "Final Xval Accuracy 0.549, Xval F1 0.568\n", "[[18 14]\n", " [18 21]]\n", "14:15:21 Starting\n", "14:15:24 epoch 0 of 1000 Train loss: 0.6946 Train f_score 0.3804 Xval loss: 0.6935 Xval f_score 0.3871\n", "14:15:24 epoch 50 of 1000 Train loss: 0.6888 Train f_score 0.6667 Xval loss: 0.6931 Xval f_score 0.6667\n", "14:15:24 epoch 100 of 1000 Train loss: 0.6812 Train f_score 0.6590 Xval loss: 0.6929 Xval f_score 0.5366\n", "14:15:24 epoch 150 of 1000 Train loss: 0.6714 Train f_score 0.6877 Xval loss: 0.6941 Xval f_score 0.5195\n", "14:15:25 epoch 200 of 1000 Train loss: 0.6642 Train f_score 0.6891 Xval loss: 0.6958 Xval f_score 0.5000\n", "14:15:25 epoch 250 of 1000 Train loss: 0.6587 Train f_score 0.6778 Xval loss: 0.6973 Xval f_score 0.5526\n", "14:15:25 epoch 300 of 1000 Train loss: 0.6534 Train f_score 0.6581 Xval loss: 0.6984 Xval f_score 0.5676\n", "14:15:25 epoch 350 of 1000 Train loss: 0.6314 Train f_score 0.6608 Xval loss: 0.6979 Xval f_score 0.4928\n", "14:15:25 epoch 400 of 1000 Train loss: 0.6464 Train f_score 0.6550 Xval loss: 0.7004 Xval f_score 0.5429\n", "14:15:25 epoch 450 of 1000 Train loss: 0.6445 Train f_score 0.6829 Xval loss: 0.7026 Xval f_score 0.4928\n", "14:15:25 epoch 500 of 1000 Train loss: 0.6322 Train f_score 0.6862 Xval loss: 0.7032 Xval f_score 0.5217\n", "14:15:26 epoch 550 of 1000 Train loss: 0.6366 Train f_score 0.6667 Xval loss: 0.7044 Xval f_score 0.5429\n", "14:15:26 epoch 600 of 1000 Train loss: 0.6414 Train f_score 0.6639 Xval loss: 0.7045 Xval f_score 0.5429\n", "14:15:26 epoch 650 of 1000 Train loss: 0.6396 Train f_score 0.6777 Xval loss: 0.7068 Xval f_score 0.5217\n", "14:15:26 epoch 700 of 1000 Train loss: 0.6475 Train f_score 0.6609 Xval loss: 0.7071 Xval f_score 0.5217\n", "14:15:26 epoch 750 of 1000 Train loss: 0.6354 Train f_score 0.6803 Xval loss: 0.7102 Xval f_score 0.5217\n", "14:15:26 epoch 800 of 1000 Train loss: 0.6070 Train f_score 0.6805 Xval loss: 0.7094 Xval f_score 0.5152\n", "14:15:26 epoch 850 of 1000 Train loss: 0.6363 Train f_score 0.6885 Xval loss: 0.7117 Xval f_score 0.5373\n", "14:15:27 epoch 900 of 1000 Train loss: 0.6130 Train f_score 0.6862 Xval loss: 0.7104 Xval f_score 0.5373\n", "14:15:27 epoch 950 of 1000 Train loss: 0.6368 Train f_score 0.6802 Xval loss: 0.7107 Xval f_score 0.5294\n", "Best Xval loss epoch 91, value 0.692770\n", "NN units 4\n", "Reg_penalty 0.00010000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.729, Train F1 0.682\n", "[[92 46]\n", " [11 61]]\n", "Final Xval Accuracy 0.577, Xval F1 0.500\n", "[[26 20]\n", " [10 15]]\n", "14:15:30 Starting\n", "14:15:32 epoch 0 of 1000 Train loss: 0.6941 Train f_score 0.0000 Xval loss: 0.6945 Xval f_score 0.2917\n", "14:15:33 epoch 50 of 1000 Train loss: 0.6889 Train f_score 0.6765 Xval loss: 0.6938 Xval f_score 0.5542\n", "14:15:33 epoch 100 of 1000 Train loss: 0.6802 Train f_score 0.6694 Xval loss: 0.6957 Xval f_score 0.5143\n", "14:15:33 epoch 150 of 1000 Train loss: 0.6659 Train f_score 0.6724 Xval loss: 0.7007 Xval f_score 0.5143\n", "14:15:33 epoch 200 of 1000 Train loss: 0.6626 Train f_score 0.6778 Xval loss: 0.7037 Xval f_score 0.5294\n", "14:15:33 epoch 250 of 1000 Train loss: 0.6594 Train f_score 0.6694 Xval loss: 0.7096 Xval f_score 0.5294\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:15:33 epoch 300 of 1000 Train loss: 0.6526 Train f_score 0.6939 Xval loss: 0.7154 Xval f_score 0.5075\n", "14:15:34 epoch 350 of 1000 Train loss: 0.6584 Train f_score 0.6444 Xval loss: 0.7183 Xval f_score 0.5294\n", "14:15:34 epoch 400 of 1000 Train loss: 0.6428 Train f_score 0.6803 Xval loss: 0.7227 Xval f_score 0.5217\n", "14:15:34 epoch 450 of 1000 Train loss: 0.6459 Train f_score 0.6942 Xval loss: 0.7256 Xval f_score 0.5075\n", "14:15:34 epoch 500 of 1000 Train loss: 0.6469 Train f_score 0.6888 Xval loss: 0.7290 Xval f_score 0.5373\n", "14:15:34 epoch 550 of 1000 Train loss: 0.6441 Train f_score 0.7016 Xval loss: 0.7277 Xval f_score 0.5373\n", "14:15:34 epoch 600 of 1000 Train loss: 0.6377 Train f_score 0.7083 Xval loss: 0.7298 Xval f_score 0.5373\n", "14:15:34 epoch 650 of 1000 Train loss: 0.6323 Train f_score 0.7160 Xval loss: 0.7359 Xval f_score 0.5373\n", "14:15:35 epoch 700 of 1000 Train loss: 0.6407 Train f_score 0.6979 Xval loss: 0.7335 Xval f_score 0.5373\n", "14:15:35 epoch 750 of 1000 Train loss: 0.6260 Train f_score 0.7184 Xval loss: 0.7357 Xval f_score 0.5373\n", "14:15:35 epoch 800 of 1000 Train loss: 0.6433 Train f_score 0.7016 Xval loss: 0.7326 Xval f_score 0.5373\n", "14:15:35 epoch 850 of 1000 Train loss: 0.6280 Train f_score 0.7213 Xval loss: 0.7344 Xval f_score 0.5294\n", "14:15:35 epoch 900 of 1000 Train loss: 0.6373 Train f_score 0.6880 Xval loss: 0.7408 Xval f_score 0.4923\n", "14:15:35 epoch 950 of 1000 Train loss: 0.6504 Train f_score 0.7339 Xval loss: 0.7442 Xval f_score 0.5152\n", "Best Xval loss epoch 53, value 0.693828\n", "NN units 4\n", "Reg_penalty 0.00030000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.710, Train F1 0.705\n", "[[76 34]\n", " [27 73]]\n", "Final Xval Accuracy 0.563, Xval F1 0.551\n", "[[21 16]\n", " [15 19]]\n", "14:15:39 Starting\n", "14:15:41 epoch 0 of 1000 Train loss: 0.6965 Train f_score 0.0000 Xval loss: 0.6974 Xval f_score 0.2791\n", "14:15:41 epoch 50 of 1000 Train loss: 0.6938 Train f_score 0.6691 Xval loss: 0.6971 Xval f_score 0.6087\n", "14:15:41 epoch 100 of 1000 Train loss: 0.6874 Train f_score 0.6642 Xval loss: 0.6996 Xval f_score 0.5195\n", "14:15:42 epoch 150 of 1000 Train loss: 0.6730 Train f_score 0.6446 Xval loss: 0.7036 Xval f_score 0.5373\n", "14:15:42 epoch 200 of 1000 Train loss: 0.6674 Train f_score 0.6667 Xval loss: 0.7064 Xval f_score 0.5152\n", "14:15:42 epoch 250 of 1000 Train loss: 0.6589 Train f_score 0.6750 Xval loss: 0.7128 Xval f_score 0.5075\n", "14:15:42 epoch 300 of 1000 Train loss: 0.6504 Train f_score 0.6883 Xval loss: 0.7161 Xval f_score 0.5152\n", "14:15:42 epoch 350 of 1000 Train loss: 0.6434 Train f_score 0.6917 Xval loss: 0.7192 Xval f_score 0.5373\n", "14:15:42 epoch 400 of 1000 Train loss: 0.6390 Train f_score 0.6975 Xval loss: 0.7232 Xval f_score 0.5373\n", "14:15:42 epoch 450 of 1000 Train loss: 0.6389 Train f_score 0.6696 Xval loss: 0.7274 Xval f_score 0.5373\n", "14:15:43 epoch 500 of 1000 Train loss: 0.6367 Train f_score 0.6908 Xval loss: 0.7270 Xval f_score 0.5373\n", "14:15:43 epoch 550 of 1000 Train loss: 0.6308 Train f_score 0.7206 Xval loss: 0.7297 Xval f_score 0.5373\n", "14:15:43 epoch 600 of 1000 Train loss: 0.6489 Train f_score 0.6640 Xval loss: 0.7302 Xval f_score 0.5373\n", "14:15:43 epoch 650 of 1000 Train loss: 0.6512 Train f_score 0.6667 Xval loss: 0.7323 Xval f_score 0.5294\n", "14:15:43 epoch 700 of 1000 Train loss: 0.6568 Train f_score 0.6862 Xval loss: 0.7311 Xval f_score 0.5294\n", "14:15:43 epoch 750 of 1000 Train loss: 0.6566 Train f_score 0.7137 Xval loss: 0.7344 Xval f_score 0.5588\n", "14:15:44 epoch 800 of 1000 Train loss: 0.6278 Train f_score 0.6996 Xval loss: 0.7367 Xval f_score 0.5507\n", "14:15:44 epoch 850 of 1000 Train loss: 0.6353 Train f_score 0.7213 Xval loss: 0.7383 Xval f_score 0.5507\n", "14:15:44 epoch 900 of 1000 Train loss: 0.6338 Train f_score 0.6992 Xval loss: 0.7394 Xval f_score 0.5507\n", "14:15:44 epoch 950 of 1000 Train loss: 0.6290 Train f_score 0.7029 Xval loss: 0.7396 Xval f_score 0.5507\n", "Best Xval loss epoch 29, value 0.696557\n", "NN units 4\n", "Reg_penalty 0.00100000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.729, Train F1 0.755\n", "[[65 19]\n", " [38 88]]\n", "Final Xval Accuracy 0.577, Xval F1 0.559\n", "[[22 16]\n", " [14 19]]\n", "14:15:47 Starting\n", "14:15:50 epoch 0 of 1000 Train loss: 0.7080 Train f_score 0.1818 Xval loss: 0.7077 Xval f_score 0.2143\n", "14:15:50 epoch 50 of 1000 Train loss: 0.6963 Train f_score 0.6709 Xval loss: 0.6969 Xval f_score 0.6604\n", "14:15:50 epoch 100 of 1000 Train loss: 0.6924 Train f_score 0.6817 Xval loss: 0.6945 Xval f_score 0.6476\n", "14:15:50 epoch 150 of 1000 Train loss: 0.6916 Train f_score 0.6667 Xval loss: 0.6948 Xval f_score 0.6465\n", "14:15:50 epoch 200 of 1000 Train loss: 0.6910 Train f_score 0.6594 Xval loss: 0.6952 Xval f_score 0.6207\n", "14:15:51 epoch 250 of 1000 Train loss: 0.6831 Train f_score 0.6739 Xval loss: 0.6951 Xval f_score 0.6024\n", "14:15:51 epoch 300 of 1000 Train loss: 0.6804 Train f_score 0.6691 Xval loss: 0.6953 Xval f_score 0.6173\n", "14:15:51 epoch 350 of 1000 Train loss: 0.6743 Train f_score 0.6692 Xval loss: 0.6956 Xval f_score 0.6173\n", "14:15:51 epoch 400 of 1000 Train loss: 0.6732 Train f_score 0.6615 Xval loss: 0.6949 Xval f_score 0.6250\n", "14:15:51 epoch 450 of 1000 Train loss: 0.6711 Train f_score 0.6947 Xval loss: 0.6960 Xval f_score 0.6329\n", "14:15:51 epoch 500 of 1000 Train loss: 0.6767 Train f_score 0.6716 Xval loss: 0.6965 Xval f_score 0.6076\n", "14:15:51 epoch 550 of 1000 Train loss: 0.6936 Train f_score 0.6215 Xval loss: 0.6961 Xval f_score 0.6154\n", "14:15:52 epoch 600 of 1000 Train loss: 0.6744 Train f_score 0.6795 Xval loss: 0.6959 Xval f_score 0.6027\n", "14:15:52 epoch 650 of 1000 Train loss: 0.6812 Train f_score 0.6590 Xval loss: 0.6957 Xval f_score 0.5833\n", "14:15:52 epoch 700 of 1000 Train loss: 0.6921 Train f_score 0.6567 Xval loss: 0.6964 Xval f_score 0.5946\n", "14:15:52 epoch 750 of 1000 Train loss: 0.6846 Train f_score 0.6308 Xval loss: 0.6964 Xval f_score 0.5753\n", "14:15:52 epoch 800 of 1000 Train loss: 0.6903 Train f_score 0.6491 Xval loss: 0.6982 Xval f_score 0.5352\n", "14:15:52 epoch 850 of 1000 Train loss: 0.6817 Train f_score 0.6565 Xval loss: 0.6990 Xval f_score 0.5352\n", "14:15:52 epoch 900 of 1000 Train loss: 0.6647 Train f_score 0.6692 Xval loss: 0.7002 Xval f_score 0.5429\n", "14:15:53 epoch 950 of 1000 Train loss: 0.6748 Train f_score 0.6562 Xval loss: 0.6997 Xval f_score 0.5429\n", "Best Xval loss epoch 99, value 0.694526\n", "NN units 4\n", "Reg_penalty 0.00300000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.676, Train F1 0.646\n", "[[80 45]\n", " [23 62]]\n", "Final Xval Accuracy 0.634, Xval F1 0.500\n", "[[32 22]\n", " [ 4 13]]\n", "14:15:56 Starting\n", "14:15:58 epoch 0 of 1000 Train loss: 0.6953 Train f_score 0.4100 Xval loss: 0.6956 Xval f_score 0.3582\n", "14:15:59 epoch 50 of 1000 Train loss: 0.6842 Train f_score 0.6620 Xval loss: 0.6923 Xval f_score 0.5934\n", "14:15:59 epoch 100 of 1000 Train loss: 0.6611 Train f_score 0.6883 Xval loss: 0.6901 Xval f_score 0.5455\n", "14:15:59 epoch 150 of 1000 Train loss: 0.6531 Train f_score 0.6355 Xval loss: 0.6907 Xval f_score 0.5152\n", "14:15:59 epoch 200 of 1000 Train loss: 0.6447 Train f_score 0.5816 Xval loss: 0.6941 Xval f_score 0.5231\n", "14:15:59 epoch 250 of 1000 Train loss: 0.6153 Train f_score 0.6332 Xval loss: 0.6988 Xval f_score 0.5455\n", "14:15:59 epoch 300 of 1000 Train loss: 0.6161 Train f_score 0.5859 Xval loss: 0.7021 Xval f_score 0.5588\n", "14:15:59 epoch 350 of 1000 Train loss: 0.6551 Train f_score 0.5600 Xval loss: 0.7058 Xval f_score 0.5672\n", "14:16:00 epoch 400 of 1000 Train loss: 0.6075 Train f_score 0.6604 Xval loss: 0.7068 Xval f_score 0.5672\n", "14:16:00 epoch 450 of 1000 Train loss: 0.6120 Train f_score 0.6131 Xval loss: 0.7093 Xval f_score 0.5672\n", "14:16:00 epoch 500 of 1000 Train loss: 0.6187 Train f_score 0.6275 Xval loss: 0.7102 Xval f_score 0.5588\n", "14:16:00 epoch 550 of 1000 Train loss: 0.5975 Train f_score 0.6250 Xval loss: 0.7159 Xval f_score 0.5507\n", "14:16:00 epoch 600 of 1000 Train loss: 0.5768 Train f_score 0.6495 Xval loss: 0.7168 Xval f_score 0.5588\n", "14:16:00 epoch 650 of 1000 Train loss: 0.5988 Train f_score 0.6562 Xval loss: 0.7230 Xval f_score 0.5373\n", "14:16:01 epoch 700 of 1000 Train loss: 0.5774 Train f_score 0.6732 Xval loss: 0.7284 Xval f_score 0.5373\n", "14:16:01 epoch 750 of 1000 Train loss: 0.5887 Train f_score 0.5918 Xval loss: 0.7331 Xval f_score 0.5294\n", "14:16:01 epoch 800 of 1000 Train loss: 0.6287 Train f_score 0.6042 Xval loss: 0.7437 Xval f_score 0.5294\n", "14:16:01 epoch 850 of 1000 Train loss: 0.5816 Train f_score 0.6429 Xval loss: 0.7466 Xval f_score 0.5217\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:16:01 epoch 900 of 1000 Train loss: 0.5944 Train f_score 0.6321 Xval loss: 0.7485 Xval f_score 0.5217\n", "14:16:01 epoch 950 of 1000 Train loss: 0.5964 Train f_score 0.6000 Xval loss: 0.7531 Xval f_score 0.5000\n", "Best Xval loss epoch 118, value 0.689723\n", "NN units 8\n", "Reg_penalty 0.00000000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.771, Train F1 0.795\n", "[[69 14]\n", " [34 93]]\n", "Final Xval Accuracy 0.563, Xval F1 0.508\n", "[[24 19]\n", " [12 16]]\n", "14:16:05 Starting\n", "14:16:09 epoch 0 of 1000 Train loss: 0.6947 Train f_score 0.5504 Xval loss: 0.6944 Xval f_score 0.5918\n", "14:16:09 epoch 50 of 1000 Train loss: 0.6888 Train f_score 0.6844 Xval loss: 0.6930 Xval f_score 0.6731\n", "14:16:10 epoch 100 of 1000 Train loss: 0.6748 Train f_score 0.6311 Xval loss: 0.6925 Xval f_score 0.5974\n", "14:16:10 epoch 150 of 1000 Train loss: 0.6506 Train f_score 0.6452 Xval loss: 0.6951 Xval f_score 0.5556\n", "14:16:10 epoch 200 of 1000 Train loss: 0.6538 Train f_score 0.5981 Xval loss: 0.6970 Xval f_score 0.5294\n", "14:16:10 epoch 250 of 1000 Train loss: 0.6211 Train f_score 0.6471 Xval loss: 0.7001 Xval f_score 0.5294\n", "14:16:10 epoch 300 of 1000 Train loss: 0.6014 Train f_score 0.6893 Xval loss: 0.7014 Xval f_score 0.5538\n", "14:16:10 epoch 350 of 1000 Train loss: 0.6443 Train f_score 0.6117 Xval loss: 0.7013 Xval f_score 0.5672\n", "14:16:10 epoch 400 of 1000 Train loss: 0.6137 Train f_score 0.6633 Xval loss: 0.7031 Xval f_score 0.5455\n", "14:16:11 epoch 450 of 1000 Train loss: 0.5813 Train f_score 0.6600 Xval loss: 0.7010 Xval f_score 0.5797\n", "14:16:11 epoch 500 of 1000 Train loss: 0.5790 Train f_score 0.6939 Xval loss: 0.7061 Xval f_score 0.5588\n", "14:16:11 epoch 550 of 1000 Train loss: 0.5813 Train f_score 0.7024 Xval loss: 0.7079 Xval f_score 0.5373\n", "14:16:11 epoch 600 of 1000 Train loss: 0.5640 Train f_score 0.6829 Xval loss: 0.7116 Xval f_score 0.5373\n", "14:16:11 epoch 650 of 1000 Train loss: 0.5685 Train f_score 0.6981 Xval loss: 0.7121 Xval f_score 0.5507\n", "14:16:11 epoch 700 of 1000 Train loss: 0.5532 Train f_score 0.7255 Xval loss: 0.7128 Xval f_score 0.5588\n", "14:16:12 epoch 750 of 1000 Train loss: 0.5706 Train f_score 0.6486 Xval loss: 0.7105 Xval f_score 0.5797\n", "14:16:12 epoch 800 of 1000 Train loss: 0.5881 Train f_score 0.6734 Xval loss: 0.7125 Xval f_score 0.5797\n", "14:16:12 epoch 850 of 1000 Train loss: 0.5609 Train f_score 0.7030 Xval loss: 0.7155 Xval f_score 0.5797\n", "14:16:12 epoch 900 of 1000 Train loss: 0.5764 Train f_score 0.6875 Xval loss: 0.7130 Xval f_score 0.5797\n", "14:16:12 epoch 950 of 1000 Train loss: 0.5816 Train f_score 0.7035 Xval loss: 0.7202 Xval f_score 0.6000\n", "Best Xval loss epoch 94, value 0.692189\n", "NN units 8\n", "Reg_penalty 0.00010000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.824, Train F1 0.828\n", "[[84 18]\n", " [19 89]]\n", "Final Xval Accuracy 0.634, Xval F1 0.658\n", "[[20 10]\n", " [16 25]]\n", "14:16:15 Starting\n", "14:16:18 epoch 0 of 1000 Train loss: 0.6961 Train f_score 0.5041 Xval loss: 0.6961 Xval f_score 0.5435\n", "14:16:18 epoch 50 of 1000 Train loss: 0.6903 Train f_score 0.6926 Xval loss: 0.6953 Xval f_score 0.6604\n", "14:16:18 epoch 100 of 1000 Train loss: 0.6755 Train f_score 0.6531 Xval loss: 0.6964 Xval f_score 0.5455\n", "14:16:19 epoch 150 of 1000 Train loss: 0.6536 Train f_score 0.6476 Xval loss: 0.6982 Xval f_score 0.5373\n", "14:16:19 epoch 200 of 1000 Train loss: 0.6471 Train f_score 0.6234 Xval loss: 0.6983 Xval f_score 0.5429\n", "14:16:19 epoch 250 of 1000 Train loss: 0.6271 Train f_score 0.6452 Xval loss: 0.6960 Xval f_score 0.5373\n", "14:16:19 epoch 300 of 1000 Train loss: 0.6147 Train f_score 0.6607 Xval loss: 0.6988 Xval f_score 0.5588\n", "14:16:19 epoch 350 of 1000 Train loss: 0.6395 Train f_score 0.6637 Xval loss: 0.6998 Xval f_score 0.5312\n", "14:16:19 epoch 400 of 1000 Train loss: 0.6280 Train f_score 0.6852 Xval loss: 0.6994 Xval f_score 0.5312\n", "14:16:19 epoch 450 of 1000 Train loss: 0.5892 Train f_score 0.7018 Xval loss: 0.7023 Xval f_score 0.5758\n", "14:16:20 epoch 500 of 1000 Train loss: 0.6235 Train f_score 0.6840 Xval loss: 0.7025 Xval f_score 0.5758\n", "14:16:20 epoch 550 of 1000 Train loss: 0.6054 Train f_score 0.6724 Xval loss: 0.7022 Xval f_score 0.5538\n", "14:16:20 epoch 600 of 1000 Train loss: 0.6258 Train f_score 0.6805 Xval loss: 0.7035 Xval f_score 0.5455\n", "14:16:20 epoch 650 of 1000 Train loss: 0.5881 Train f_score 0.7117 Xval loss: 0.7041 Xval f_score 0.5455\n", "14:16:20 epoch 700 of 1000 Train loss: 0.5937 Train f_score 0.6897 Xval loss: 0.7061 Xval f_score 0.5455\n", "14:16:20 epoch 750 of 1000 Train loss: 0.5797 Train f_score 0.6870 Xval loss: 0.7070 Xval f_score 0.5373\n", "14:16:21 epoch 800 of 1000 Train loss: 0.6294 Train f_score 0.6525 Xval loss: 0.7119 Xval f_score 0.4762\n", "14:16:21 epoch 850 of 1000 Train loss: 0.5848 Train f_score 0.7094 Xval loss: 0.7150 Xval f_score 0.4687\n", "14:16:21 epoch 900 of 1000 Train loss: 0.5628 Train f_score 0.7426 Xval loss: 0.7194 Xval f_score 0.4923\n", "14:16:21 epoch 950 of 1000 Train loss: 0.5757 Train f_score 0.7078 Xval loss: 0.7204 Xval f_score 0.4848\n", "Best Xval loss epoch 30, value 0.695121\n", "NN units 8\n", "Reg_penalty 0.00030000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.786, Train F1 0.812\n", "[[68 10]\n", " [35 97]]\n", "Final Xval Accuracy 0.592, Xval F1 0.633\n", "[[17 10]\n", " [19 25]]\n", "14:16:24 Starting\n", "14:16:27 epoch 0 of 1000 Train loss: 0.7025 Train f_score 0.4953 Xval loss: 0.7013 Xval f_score 0.5897\n", "14:16:27 epoch 50 of 1000 Train loss: 0.6927 Train f_score 0.6863 Xval loss: 0.6969 Xval f_score 0.6667\n", "14:16:27 epoch 100 of 1000 Train loss: 0.6790 Train f_score 0.6905 Xval loss: 0.6960 Xval f_score 0.5823\n", "14:16:28 epoch 150 of 1000 Train loss: 0.6635 Train f_score 0.6111 Xval loss: 0.6979 Xval f_score 0.5507\n", "14:16:28 epoch 200 of 1000 Train loss: 0.6568 Train f_score 0.6728 Xval loss: 0.6999 Xval f_score 0.5000\n", "14:16:28 epoch 250 of 1000 Train loss: 0.6319 Train f_score 0.6852 Xval loss: 0.7008 Xval f_score 0.5714\n", "14:16:28 epoch 300 of 1000 Train loss: 0.6270 Train f_score 0.6226 Xval loss: 0.7030 Xval f_score 0.5507\n", "14:16:28 epoch 350 of 1000 Train loss: 0.6385 Train f_score 0.6204 Xval loss: 0.7002 Xval f_score 0.5915\n", "14:16:28 epoch 400 of 1000 Train loss: 0.6314 Train f_score 0.6635 Xval loss: 0.7010 Xval f_score 0.5915\n", "14:16:28 epoch 450 of 1000 Train loss: 0.6356 Train f_score 0.7037 Xval loss: 0.7006 Xval f_score 0.6000\n", "14:16:29 epoch 500 of 1000 Train loss: 0.6089 Train f_score 0.6827 Xval loss: 0.7015 Xval f_score 0.5797\n", "14:16:29 epoch 550 of 1000 Train loss: 0.6083 Train f_score 0.7014 Xval loss: 0.7020 Xval f_score 0.5882\n", "14:16:29 epoch 600 of 1000 Train loss: 0.6607 Train f_score 0.6066 Xval loss: 0.6990 Xval f_score 0.6087\n", "14:16:29 epoch 650 of 1000 Train loss: 0.6274 Train f_score 0.6078 Xval loss: 0.6975 Xval f_score 0.6000\n", "14:16:29 epoch 700 of 1000 Train loss: 0.5966 Train f_score 0.6759 Xval loss: 0.6984 Xval f_score 0.6000\n", "14:16:29 epoch 750 of 1000 Train loss: 0.6044 Train f_score 0.6798 Xval loss: 0.6946 Xval f_score 0.6197\n", "14:16:30 epoch 800 of 1000 Train loss: 0.6472 Train f_score 0.6139 Xval loss: 0.6954 Xval f_score 0.6286\n", "14:16:30 epoch 850 of 1000 Train loss: 0.6255 Train f_score 0.6452 Xval loss: 0.6931 Xval f_score 0.6197\n", "14:16:30 epoch 900 of 1000 Train loss: 0.6267 Train f_score 0.6238 Xval loss: 0.6928 Xval f_score 0.6286\n", "14:16:30 epoch 950 of 1000 Train loss: 0.6240 Train f_score 0.6377 Xval loss: 0.6962 Xval f_score 0.6087\n", "Best Xval loss epoch 885, value 0.691002\n", "NN units 8\n", "Reg_penalty 0.00100000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.762, Train F1 0.752\n", "[[84 31]\n", " [19 76]]\n", "Final Xval Accuracy 0.662, Xval F1 0.647\n", "[[25 13]\n", " [11 22]]\n", "14:16:33 Starting\n", "14:16:36 epoch 0 of 1000 Train loss: 0.7198 Train f_score 0.5650 Xval loss: 0.7210 Xval f_score 0.5063\n", "14:16:36 epoch 50 of 1000 Train loss: 0.6972 Train f_score 0.6890 Xval loss: 0.7019 Xval f_score 0.6200\n", "14:16:37 epoch 100 of 1000 Train loss: 0.6926 Train f_score 0.6806 Xval loss: 0.6989 Xval f_score 0.5895\n", "14:16:37 epoch 150 of 1000 Train loss: 0.6902 Train f_score 0.6394 Xval loss: 0.7003 Xval f_score 0.5301\n", "14:16:37 epoch 200 of 1000 Train loss: 0.6883 Train f_score 0.6340 Xval loss: 0.7015 Xval f_score 0.5526\n", "14:16:37 epoch 250 of 1000 Train loss: 0.6816 Train f_score 0.6540 Xval loss: 0.7009 Xval f_score 0.5753\n", "14:16:37 epoch 300 of 1000 Train loss: 0.6893 Train f_score 0.6364 Xval loss: 0.7006 Xval f_score 0.5714\n", "14:16:37 epoch 350 of 1000 Train loss: 0.6540 Train f_score 0.7027 Xval loss: 0.7026 Xval f_score 0.5714\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:16:37 epoch 400 of 1000 Train loss: 0.6850 Train f_score 0.6468 Xval loss: 0.7016 Xval f_score 0.5714\n", "14:16:38 epoch 450 of 1000 Train loss: 0.6699 Train f_score 0.6484 Xval loss: 0.7015 Xval f_score 0.5217\n", "14:16:38 epoch 500 of 1000 Train loss: 0.6531 Train f_score 0.7015 Xval loss: 0.7012 Xval f_score 0.5217\n", "14:16:38 epoch 550 of 1000 Train loss: 0.6713 Train f_score 0.6519 Xval loss: 0.7010 Xval f_score 0.5634\n", "14:16:38 epoch 600 of 1000 Train loss: 0.6741 Train f_score 0.6615 Xval loss: 0.7011 Xval f_score 0.5429\n", "14:16:38 epoch 650 of 1000 Train loss: 0.6595 Train f_score 0.6692 Xval loss: 0.7001 Xval f_score 0.5429\n", "14:16:38 epoch 700 of 1000 Train loss: 0.6703 Train f_score 0.6790 Xval loss: 0.6998 Xval f_score 0.5217\n", "14:16:39 epoch 750 of 1000 Train loss: 0.6781 Train f_score 0.6541 Xval loss: 0.7000 Xval f_score 0.5217\n", "14:16:39 epoch 800 of 1000 Train loss: 0.6716 Train f_score 0.6692 Xval loss: 0.6998 Xval f_score 0.5429\n", "14:16:39 epoch 850 of 1000 Train loss: 0.6771 Train f_score 0.6868 Xval loss: 0.7016 Xval f_score 0.5429\n", "14:16:39 epoch 900 of 1000 Train loss: 0.6801 Train f_score 0.6592 Xval loss: 0.7029 Xval f_score 0.5429\n", "14:16:39 epoch 950 of 1000 Train loss: 0.6562 Train f_score 0.6894 Xval loss: 0.7027 Xval f_score 0.5429\n", "Best Xval loss epoch 94, value 0.698711\n", "NN units 8\n", "Reg_penalty 0.00300000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.681, Train F1 0.717\n", "[[58 22]\n", " [45 85]]\n", "Final Xval Accuracy 0.592, Xval F1 0.491\n", "[[28 21]\n", " [ 8 14]]\n", "14:16:43 Starting\n", "14:16:45 epoch 0 of 1000 Train loss: 0.6931 Train f_score 0.5565 Xval loss: 0.6915 Xval f_score 0.6341\n", "14:16:45 epoch 50 of 1000 Train loss: 0.6700 Train f_score 0.6812 Xval loss: 0.6899 Xval f_score 0.5333\n", "14:16:46 epoch 100 of 1000 Train loss: 0.6359 Train f_score 0.6847 Xval loss: 0.6957 Xval f_score 0.5507\n", "14:16:46 epoch 150 of 1000 Train loss: 0.6300 Train f_score 0.6728 Xval loss: 0.7036 Xval f_score 0.5278\n", "14:16:46 epoch 200 of 1000 Train loss: 0.5850 Train f_score 0.6912 Xval loss: 0.7109 Xval f_score 0.5278\n", "14:16:46 epoch 250 of 1000 Train loss: 0.5859 Train f_score 0.7200 Xval loss: 0.7222 Xval f_score 0.4928\n", "14:16:46 epoch 300 of 1000 Train loss: 0.5657 Train f_score 0.7545 Xval loss: 0.7291 Xval f_score 0.5429\n", "14:16:46 epoch 350 of 1000 Train loss: 0.5641 Train f_score 0.7354 Xval loss: 0.7376 Xval f_score 0.5429\n", "14:16:46 epoch 400 of 1000 Train loss: 0.5533 Train f_score 0.7387 Xval loss: 0.7439 Xval f_score 0.5507\n", "14:16:47 epoch 450 of 1000 Train loss: 0.5303 Train f_score 0.7391 Xval loss: 0.7574 Xval f_score 0.5507\n", "Best Xval loss epoch 48, value 0.689913\n", "NN units 16\n", "Reg_penalty 0.00000000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.790, Train F1 0.776\n", "[[90 31]\n", " [13 76]]\n", "Final Xval Accuracy 0.563, Xval F1 0.563\n", "[[20 15]\n", " [16 20]]\n", "14:16:50 Starting\n", "14:16:53 epoch 0 of 1000 Train loss: 0.6954 Train f_score 0.4811 Xval loss: 0.6941 Xval f_score 0.4872\n", "14:16:53 epoch 50 of 1000 Train loss: 0.6845 Train f_score 0.6667 Xval loss: 0.6919 Xval f_score 0.6316\n", "14:16:53 epoch 100 of 1000 Train loss: 0.6516 Train f_score 0.6756 Xval loss: 0.6889 Xval f_score 0.5753\n", "14:16:53 epoch 150 of 1000 Train loss: 0.6306 Train f_score 0.6509 Xval loss: 0.6925 Xval f_score 0.5714\n", "14:16:53 epoch 200 of 1000 Train loss: 0.6161 Train f_score 0.6996 Xval loss: 0.6963 Xval f_score 0.5294\n", "14:16:53 epoch 250 of 1000 Train loss: 0.5813 Train f_score 0.6952 Xval loss: 0.7013 Xval f_score 0.5373\n", "14:16:54 epoch 300 of 1000 Train loss: 0.5625 Train f_score 0.7042 Xval loss: 0.7109 Xval f_score 0.4615\n", "14:16:54 epoch 350 of 1000 Train loss: 0.5659 Train f_score 0.6897 Xval loss: 0.7209 Xval f_score 0.5588\n", "14:16:54 epoch 400 of 1000 Train loss: 0.5872 Train f_score 0.6852 Xval loss: 0.7211 Xval f_score 0.5915\n", "14:16:54 epoch 450 of 1000 Train loss: 0.5883 Train f_score 0.7143 Xval loss: 0.7273 Xval f_score 0.5797\n", "14:16:54 epoch 500 of 1000 Train loss: 0.5504 Train f_score 0.7570 Xval loss: 0.7310 Xval f_score 0.5882\n", "14:16:54 epoch 550 of 1000 Train loss: 0.5727 Train f_score 0.7136 Xval loss: 0.7406 Xval f_score 0.6087\n", "14:16:55 epoch 600 of 1000 Train loss: 0.5359 Train f_score 0.7489 Xval loss: 0.7430 Xval f_score 0.5588\n", "14:16:55 epoch 650 of 1000 Train loss: 0.5316 Train f_score 0.7721 Xval loss: 0.7559 Xval f_score 0.5672\n", "Best Xval loss epoch 98, value 0.688846\n", "NN units 16\n", "Reg_penalty 0.00010000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.800, Train F1 0.817\n", "[[74 13]\n", " [29 94]]\n", "Final Xval Accuracy 0.634, Xval F1 0.649\n", "[[21 11]\n", " [15 24]]\n", "14:16:58 Starting\n", "14:17:01 epoch 0 of 1000 Train loss: 0.6975 Train f_score 0.6456 Xval loss: 0.6983 Xval f_score 0.5918\n", "14:17:01 epoch 50 of 1000 Train loss: 0.6807 Train f_score 0.6667 Xval loss: 0.6949 Xval f_score 0.5610\n", "14:17:01 epoch 100 of 1000 Train loss: 0.6626 Train f_score 0.6759 Xval loss: 0.6977 Xval f_score 0.5000\n", "14:17:02 epoch 150 of 1000 Train loss: 0.6471 Train f_score 0.6445 Xval loss: 0.7057 Xval f_score 0.5714\n", "14:17:02 epoch 200 of 1000 Train loss: 0.6310 Train f_score 0.6763 Xval loss: 0.7098 Xval f_score 0.5882\n", "14:17:02 epoch 250 of 1000 Train loss: 0.6193 Train f_score 0.7014 Xval loss: 0.7157 Xval f_score 0.5758\n", "14:17:02 epoch 300 of 1000 Train loss: 0.6113 Train f_score 0.7091 Xval loss: 0.7207 Xval f_score 0.5758\n", "14:17:02 epoch 350 of 1000 Train loss: 0.5907 Train f_score 0.7019 Xval loss: 0.7238 Xval f_score 0.5970\n", "14:17:02 epoch 400 of 1000 Train loss: 0.6037 Train f_score 0.6798 Xval loss: 0.7251 Xval f_score 0.5758\n", "14:17:02 epoch 450 of 1000 Train loss: 0.6034 Train f_score 0.6919 Xval loss: 0.7316 Xval f_score 0.6087\n", "14:17:03 epoch 500 of 1000 Train loss: 0.5568 Train f_score 0.6890 Xval loss: 0.7311 Xval f_score 0.5797\n", "14:17:03 epoch 550 of 1000 Train loss: 0.5584 Train f_score 0.6887 Xval loss: 0.7319 Xval f_score 0.6000\n", "14:17:03 epoch 600 of 1000 Train loss: 0.5714 Train f_score 0.7081 Xval loss: 0.7297 Xval f_score 0.5507\n", "14:17:03 epoch 650 of 1000 Train loss: 0.5473 Train f_score 0.7455 Xval loss: 0.7354 Xval f_score 0.5714\n", "14:17:03 epoch 700 of 1000 Train loss: 0.5451 Train f_score 0.7451 Xval loss: 0.7422 Xval f_score 0.5588\n", "14:17:03 epoch 750 of 1000 Train loss: 0.5520 Train f_score 0.7130 Xval loss: 0.7471 Xval f_score 0.5714\n", "Best Xval loss epoch 64, value 0.694354\n", "NN units 16\n", "Reg_penalty 0.00030000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.824, Train F1 0.810\n", "[[94 28]\n", " [ 9 79]]\n", "Final Xval Accuracy 0.606, Xval F1 0.622\n", "[[20 12]\n", " [16 23]]\n", "14:17:07 Starting\n", "14:17:10 epoch 0 of 1000 Train loss: 0.7118 Train f_score 0.6212 Xval loss: 0.7105 Xval f_score 0.6733\n", "14:17:10 epoch 50 of 1000 Train loss: 0.6903 Train f_score 0.6805 Xval loss: 0.7006 Xval f_score 0.6429\n", "14:17:10 epoch 100 of 1000 Train loss: 0.6756 Train f_score 0.6239 Xval loss: 0.7012 Xval f_score 0.4923\n", "14:17:10 epoch 150 of 1000 Train loss: 0.6620 Train f_score 0.6411 Xval loss: 0.7022 Xval f_score 0.5075\n", "14:17:11 epoch 200 of 1000 Train loss: 0.6388 Train f_score 0.7070 Xval loss: 0.7050 Xval f_score 0.5152\n", "14:17:11 epoch 250 of 1000 Train loss: 0.6542 Train f_score 0.6425 Xval loss: 0.7064 Xval f_score 0.5152\n", "14:17:11 epoch 300 of 1000 Train loss: 0.6432 Train f_score 0.6903 Xval loss: 0.7088 Xval f_score 0.5588\n", "14:17:11 epoch 350 of 1000 Train loss: 0.6241 Train f_score 0.6900 Xval loss: 0.7189 Xval f_score 0.5373\n", "14:17:11 epoch 400 of 1000 Train loss: 0.6238 Train f_score 0.6897 Xval loss: 0.7206 Xval f_score 0.5152\n", "14:17:11 epoch 450 of 1000 Train loss: 0.6140 Train f_score 0.6754 Xval loss: 0.7271 Xval f_score 0.5373\n", "14:17:11 epoch 500 of 1000 Train loss: 0.6036 Train f_score 0.7456 Xval loss: 0.7337 Xval f_score 0.4923\n", "14:17:12 epoch 550 of 1000 Train loss: 0.6069 Train f_score 0.7378 Xval loss: 0.7374 Xval f_score 0.4923\n", "14:17:12 epoch 600 of 1000 Train loss: 0.5829 Train f_score 0.7391 Xval loss: 0.7463 Xval f_score 0.4923\n", "14:17:12 epoch 650 of 1000 Train loss: 0.5980 Train f_score 0.7225 Xval loss: 0.7499 Xval f_score 0.5373\n", "14:17:12 epoch 700 of 1000 Train loss: 0.6093 Train f_score 0.7009 Xval loss: 0.7508 Xval f_score 0.5152\n", "14:17:12 epoch 750 of 1000 Train loss: 0.5665 Train f_score 0.7534 Xval loss: 0.7574 Xval f_score 0.5588\n", "14:17:12 epoch 800 of 1000 Train loss: 0.5959 Train f_score 0.7162 Xval loss: 0.7605 Xval f_score 0.5373\n", "14:17:13 epoch 850 of 1000 Train loss: 0.5766 Train f_score 0.7373 Xval loss: 0.7682 Xval f_score 0.5373\n", "Best Xval loss epoch 64, value 0.699568\n", "NN units 16\n", "Reg_penalty 0.00100000\n", "Dropout 0.5000\n", "Activation relu\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Final Train Accuracy 0.795, Train F1 0.805\n", "[[78 18]\n", " [25 89]]\n", "Final Xval Accuracy 0.592, Xval F1 0.613\n", "[[19 12]\n", " [17 23]]\n", "14:17:16 Starting\n", "14:17:19 epoch 0 of 1000 Train loss: 0.7487 Train f_score 0.3152 Xval loss: 0.7479 Xval f_score 0.3404\n", "14:17:19 epoch 50 of 1000 Train loss: 0.7057 Train f_score 0.6890 Xval loss: 0.7076 Xval f_score 0.6471\n", "14:17:19 epoch 100 of 1000 Train loss: 0.6928 Train f_score 0.6644 Xval loss: 0.6983 Xval f_score 0.5843\n", "14:17:19 epoch 150 of 1000 Train loss: 0.6929 Train f_score 0.6491 Xval loss: 0.7001 Xval f_score 0.5526\n", "14:17:20 epoch 200 of 1000 Train loss: 0.6829 Train f_score 0.6667 Xval loss: 0.7017 Xval f_score 0.5556\n", "14:17:20 epoch 250 of 1000 Train loss: 0.6829 Train f_score 0.6557 Xval loss: 0.7055 Xval f_score 0.5714\n", "14:17:20 epoch 300 of 1000 Train loss: 0.6721 Train f_score 0.6800 Xval loss: 0.7083 Xval f_score 0.5429\n", "14:17:20 epoch 350 of 1000 Train loss: 0.6643 Train f_score 0.6809 Xval loss: 0.7103 Xval f_score 0.5455\n", "14:17:20 epoch 400 of 1000 Train loss: 0.6788 Train f_score 0.6880 Xval loss: 0.7110 Xval f_score 0.5294\n", "14:17:20 epoch 450 of 1000 Train loss: 0.6681 Train f_score 0.6748 Xval loss: 0.7125 Xval f_score 0.5294\n", "14:17:21 epoch 500 of 1000 Train loss: 0.6824 Train f_score 0.6667 Xval loss: 0.7137 Xval f_score 0.5294\n", "14:17:21 epoch 550 of 1000 Train loss: 0.6630 Train f_score 0.6946 Xval loss: 0.7143 Xval f_score 0.5294\n", "14:17:21 epoch 600 of 1000 Train loss: 0.6715 Train f_score 0.6560 Xval loss: 0.7153 Xval f_score 0.5294\n", "14:17:21 epoch 650 of 1000 Train loss: 0.6534 Train f_score 0.6971 Xval loss: 0.7144 Xval f_score 0.5152\n", "14:17:21 epoch 700 of 1000 Train loss: 0.6684 Train f_score 0.6802 Xval loss: 0.7165 Xval f_score 0.5152\n", "14:17:21 epoch 750 of 1000 Train loss: 0.6648 Train f_score 0.6640 Xval loss: 0.7165 Xval f_score 0.5373\n", "14:17:22 epoch 800 of 1000 Train loss: 0.6842 Train f_score 0.6720 Xval loss: 0.7176 Xval f_score 0.4928\n", "14:17:22 epoch 850 of 1000 Train loss: 0.6676 Train f_score 0.6853 Xval loss: 0.7171 Xval f_score 0.5217\n", "14:17:22 epoch 900 of 1000 Train loss: 0.6576 Train f_score 0.6805 Xval loss: 0.7172 Xval f_score 0.5000\n", "14:17:22 epoch 950 of 1000 Train loss: 0.6321 Train f_score 0.7280 Xval loss: 0.7199 Xval f_score 0.5152\n", "Best Xval loss epoch 111, value 0.698225\n", "NN units 16\n", "Reg_penalty 0.00300000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.700, Train F1 0.707\n", "[[71 31]\n", " [32 76]]\n", "Final Xval Accuracy 0.592, Xval F1 0.431\n", "[[31 24]\n", " [ 5 11]]\n", "14:17:26 Starting\n", "14:17:28 epoch 0 of 1000 Train loss: 0.6929 Train f_score 0.4674 Xval loss: 0.6964 Xval f_score 0.3158\n", "14:17:29 epoch 50 of 1000 Train loss: 0.6675 Train f_score 0.7004 Xval loss: 0.6964 Xval f_score 0.5000\n", "14:17:29 epoch 100 of 1000 Train loss: 0.6214 Train f_score 0.7111 Xval loss: 0.7110 Xval f_score 0.5217\n", "14:17:29 epoch 150 of 1000 Train loss: 0.5852 Train f_score 0.7434 Xval loss: 0.7245 Xval f_score 0.4928\n", "14:17:29 epoch 200 of 1000 Train loss: 0.5589 Train f_score 0.7593 Xval loss: 0.7333 Xval f_score 0.4776\n", "14:17:29 epoch 250 of 1000 Train loss: 0.5300 Train f_score 0.7593 Xval loss: 0.7488 Xval f_score 0.5075\n", "14:17:29 epoch 300 of 1000 Train loss: 0.5042 Train f_score 0.7558 Xval loss: 0.7624 Xval f_score 0.4776\n", "Best Xval loss epoch 32, value 0.694178\n", "NN units 32\n", "Reg_penalty 0.00000000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.786, Train F1 0.785\n", "[[83 25]\n", " [20 82]]\n", "Final Xval Accuracy 0.577, Xval F1 0.516\n", "[[25 19]\n", " [11 16]]\n", "14:17:33 Starting\n", "14:17:36 epoch 0 of 1000 Train loss: 0.6981 Train f_score 0.4162 Xval loss: 0.6968 Xval f_score 0.3333\n", "14:17:36 epoch 50 of 1000 Train loss: 0.6673 Train f_score 0.6810 Xval loss: 0.6947 Xval f_score 0.5867\n", "14:17:36 epoch 100 of 1000 Train loss: 0.6171 Train f_score 0.7005 Xval loss: 0.7056 Xval f_score 0.5429\n", "14:17:36 epoch 150 of 1000 Train loss: 0.5903 Train f_score 0.7075 Xval loss: 0.7211 Xval f_score 0.5294\n", "14:17:36 epoch 200 of 1000 Train loss: 0.5731 Train f_score 0.7032 Xval loss: 0.7335 Xval f_score 0.4615\n", "14:17:37 epoch 250 of 1000 Train loss: 0.5675 Train f_score 0.7364 Xval loss: 0.7475 Xval f_score 0.4687\n", "14:17:37 epoch 300 of 1000 Train loss: 0.5415 Train f_score 0.7602 Xval loss: 0.7586 Xval f_score 0.4848\n", "Best Xval loss epoch 40, value 0.693992\n", "NN units 32\n", "Reg_penalty 0.00010000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.790, Train F1 0.798\n", "[[79 20]\n", " [24 87]]\n", "Final Xval Accuracy 0.549, Xval F1 0.680\n", "[[ 5 1]\n", " [31 34]]\n", "14:17:40 Starting\n", "14:17:43 epoch 0 of 1000 Train loss: 0.7038 Train f_score 0.6113 Xval loss: 0.7044 Xval f_score 0.5934\n", "14:17:43 epoch 50 of 1000 Train loss: 0.6772 Train f_score 0.6429 Xval loss: 0.7007 Xval f_score 0.5205\n", "14:17:43 epoch 100 of 1000 Train loss: 0.6421 Train f_score 0.6573 Xval loss: 0.7082 Xval f_score 0.5075\n", "14:17:44 epoch 150 of 1000 Train loss: 0.6058 Train f_score 0.6952 Xval loss: 0.7233 Xval f_score 0.5373\n", "14:17:44 epoch 200 of 1000 Train loss: 0.5982 Train f_score 0.7014 Xval loss: 0.7388 Xval f_score 0.5217\n", "14:17:44 epoch 250 of 1000 Train loss: 0.5733 Train f_score 0.7281 Xval loss: 0.7462 Xval f_score 0.5429\n", "14:17:44 epoch 300 of 1000 Train loss: 0.5408 Train f_score 0.7650 Xval loss: 0.7658 Xval f_score 0.5429\n", "Best Xval loss epoch 43, value 0.700504\n", "NN units 32\n", "Reg_penalty 0.00030000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.767, Train F1 0.768\n", "[[80 26]\n", " [23 81]]\n", "Final Xval Accuracy 0.577, Xval F1 0.531\n", "[[24 18]\n", " [12 17]]\n", "14:17:48 Starting\n", "14:17:51 epoch 0 of 1000 Train loss: 0.7262 Train f_score 0.5281 Xval loss: 0.7269 Xval f_score 0.5581\n", "14:17:51 epoch 50 of 1000 Train loss: 0.6954 Train f_score 0.6996 Xval loss: 0.7070 Xval f_score 0.5882\n", "14:17:51 epoch 100 of 1000 Train loss: 0.6725 Train f_score 0.6987 Xval loss: 0.7042 Xval f_score 0.5588\n", "14:17:51 epoch 150 of 1000 Train loss: 0.6523 Train f_score 0.6933 Xval loss: 0.7087 Xval f_score 0.5714\n", "14:17:52 epoch 200 of 1000 Train loss: 0.6275 Train f_score 0.7200 Xval loss: 0.7145 Xval f_score 0.5231\n", "14:17:52 epoch 250 of 1000 Train loss: 0.6300 Train f_score 0.6789 Xval loss: 0.7191 Xval f_score 0.5588\n", "14:17:52 epoch 300 of 1000 Train loss: 0.6140 Train f_score 0.7175 Xval loss: 0.7239 Xval f_score 0.5152\n", "14:17:52 epoch 350 of 1000 Train loss: 0.6062 Train f_score 0.7297 Xval loss: 0.7287 Xval f_score 0.5075\n", "14:17:52 epoch 400 of 1000 Train loss: 0.5951 Train f_score 0.7064 Xval loss: 0.7290 Xval f_score 0.5075\n", "14:17:52 epoch 450 of 1000 Train loss: 0.5913 Train f_score 0.7306 Xval loss: 0.7332 Xval f_score 0.4848\n", "14:17:52 epoch 500 of 1000 Train loss: 0.5917 Train f_score 0.7170 Xval loss: 0.7421 Xval f_score 0.4848\n", "14:17:53 epoch 550 of 1000 Train loss: 0.6030 Train f_score 0.7064 Xval loss: 0.7432 Xval f_score 0.5075\n", "14:17:53 epoch 600 of 1000 Train loss: 0.5815 Train f_score 0.7264 Xval loss: 0.7478 Xval f_score 0.5000\n", "14:17:53 epoch 650 of 1000 Train loss: 0.6008 Train f_score 0.7149 Xval loss: 0.7545 Xval f_score 0.5000\n", "14:17:53 epoch 700 of 1000 Train loss: 0.5374 Train f_score 0.8053 Xval loss: 0.7592 Xval f_score 0.4545\n", "14:17:53 epoch 750 of 1000 Train loss: 0.5736 Train f_score 0.7434 Xval loss: 0.7617 Xval f_score 0.4848\n", "14:17:54 epoch 800 of 1000 Train loss: 0.5540 Train f_score 0.7838 Xval loss: 0.7667 Xval f_score 0.4687\n", "14:17:54 epoch 850 of 1000 Train loss: 0.5548 Train f_score 0.7658 Xval loss: 0.7691 Xval f_score 0.4848\n", "14:17:54 epoch 900 of 1000 Train loss: 0.5779 Train f_score 0.7636 Xval loss: 0.7731 Xval f_score 0.4687\n", "Best Xval loss epoch 102, value 0.704075\n", "NN units 32\n", "Reg_penalty 0.00100000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.867, Train F1 0.869\n", "[[89 14]\n", " [14 93]]\n", "Final Xval Accuracy 0.606, Xval F1 0.600\n", "[[22 14]\n", " [14 21]]\n", "14:17:58 Starting\n", "14:18:01 epoch 0 of 1000 Train loss: 0.7994 Train f_score 0.5128 Xval loss: 0.8032 Xval f_score 0.2800\n", "14:18:01 epoch 50 of 1000 Train loss: 0.7138 Train f_score 0.6914 Xval loss: 0.7226 Xval f_score 0.6047\n", "14:18:01 epoch 100 of 1000 Train loss: 0.6943 Train f_score 0.6534 Xval loss: 0.7053 Xval f_score 0.6053\n", "14:18:01 epoch 150 of 1000 Train loss: 0.6838 Train f_score 0.7097 Xval loss: 0.7079 Xval f_score 0.6197\n", "14:18:01 epoch 200 of 1000 Train loss: 0.6713 Train f_score 0.6809 Xval loss: 0.7117 Xval f_score 0.5882\n", "14:18:01 epoch 250 of 1000 Train loss: 0.6783 Train f_score 0.6914 Xval loss: 0.7151 Xval f_score 0.5588\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:18:02 epoch 300 of 1000 Train loss: 0.6743 Train f_score 0.6494 Xval loss: 0.7195 Xval f_score 0.5373\n", "14:18:02 epoch 350 of 1000 Train loss: 0.6800 Train f_score 0.6410 Xval loss: 0.7180 Xval f_score 0.5373\n", "14:18:02 epoch 400 of 1000 Train loss: 0.6723 Train f_score 0.6610 Xval loss: 0.7197 Xval f_score 0.5455\n", "14:18:02 epoch 450 of 1000 Train loss: 0.6594 Train f_score 0.6987 Xval loss: 0.7205 Xval f_score 0.5538\n", "14:18:02 epoch 500 of 1000 Train loss: 0.6650 Train f_score 0.6784 Xval loss: 0.7225 Xval f_score 0.5161\n", "14:18:02 epoch 550 of 1000 Train loss: 0.6699 Train f_score 0.6667 Xval loss: 0.7247 Xval f_score 0.5538\n", "14:18:03 epoch 600 of 1000 Train loss: 0.6581 Train f_score 0.6725 Xval loss: 0.7243 Xval f_score 0.5538\n", "14:18:03 epoch 650 of 1000 Train loss: 0.6602 Train f_score 0.6891 Xval loss: 0.7279 Xval f_score 0.5312\n", "14:18:03 epoch 700 of 1000 Train loss: 0.6708 Train f_score 0.6695 Xval loss: 0.7263 Xval f_score 0.5312\n", "14:18:03 epoch 750 of 1000 Train loss: 0.6328 Train f_score 0.7080 Xval loss: 0.7285 Xval f_score 0.5000\n", "14:18:03 epoch 800 of 1000 Train loss: 0.6635 Train f_score 0.6923 Xval loss: 0.7258 Xval f_score 0.4923\n", "14:18:03 epoch 850 of 1000 Train loss: 0.6617 Train f_score 0.6723 Xval loss: 0.7300 Xval f_score 0.4848\n", "14:18:04 epoch 900 of 1000 Train loss: 0.6596 Train f_score 0.6522 Xval loss: 0.7264 Xval f_score 0.5152\n", "14:18:04 epoch 950 of 1000 Train loss: 0.6766 Train f_score 0.6667 Xval loss: 0.7318 Xval f_score 0.5152\n", "Best Xval loss epoch 98, value 0.705290\n", "NN units 32\n", "Reg_penalty 0.00300000\n", "Dropout 0.5000\n", "Activation relu\n", "Final Train Accuracy 0.690, Train F1 0.673\n", "[[78 40]\n", " [25 67]]\n", "Final Xval Accuracy 0.577, Xval F1 0.464\n", "[[28 22]\n", " [ 8 13]]\n", "14:18:07 Starting\n", "14:18:10 epoch 0 of 1000 Train loss: 0.6956 Train f_score 0.1176 Xval loss: 0.6930 Xval f_score 0.0000\n", "14:18:10 epoch 50 of 1000 Train loss: 0.6916 Train f_score 0.6017 Xval loss: 0.6929 Xval f_score 0.6667\n", "14:18:11 epoch 100 of 1000 Train loss: 0.6901 Train f_score 0.5837 Xval loss: 0.6926 Xval f_score 0.6465\n", "14:18:11 epoch 150 of 1000 Train loss: 0.6873 Train f_score 0.5882 Xval loss: 0.6918 Xval f_score 0.5957\n", "14:18:11 epoch 200 of 1000 Train loss: 0.6857 Train f_score 0.6025 Xval loss: 0.6914 Xval f_score 0.5610\n", "14:18:11 epoch 250 of 1000 Train loss: 0.6691 Train f_score 0.6182 Xval loss: 0.6911 Xval f_score 0.5316\n", "14:18:11 epoch 300 of 1000 Train loss: 0.6779 Train f_score 0.5804 Xval loss: 0.6912 Xval f_score 0.5278\n", "14:18:11 epoch 350 of 1000 Train loss: 0.6701 Train f_score 0.6106 Xval loss: 0.6908 Xval f_score 0.5507\n", "14:18:12 epoch 400 of 1000 Train loss: 0.6755 Train f_score 0.6091 Xval loss: 0.6909 Xval f_score 0.5429\n", "14:18:12 epoch 450 of 1000 Train loss: 0.6671 Train f_score 0.5288 Xval loss: 0.6911 Xval f_score 0.5538\n", "14:18:12 epoch 500 of 1000 Train loss: 0.6623 Train f_score 0.6288 Xval loss: 0.6919 Xval f_score 0.5294\n", "14:18:12 epoch 550 of 1000 Train loss: 0.6606 Train f_score 0.6063 Xval loss: 0.6926 Xval f_score 0.5294\n", "14:18:12 epoch 600 of 1000 Train loss: 0.6510 Train f_score 0.6244 Xval loss: 0.6931 Xval f_score 0.5000\n", "14:18:12 epoch 650 of 1000 Train loss: 0.6448 Train f_score 0.6522 Xval loss: 0.6935 Xval f_score 0.5373\n", "14:18:13 epoch 700 of 1000 Train loss: 0.6630 Train f_score 0.6326 Xval loss: 0.6941 Xval f_score 0.4762\n", "14:18:13 epoch 750 of 1000 Train loss: 0.6770 Train f_score 0.5893 Xval loss: 0.6945 Xval f_score 0.4762\n", "14:18:13 epoch 800 of 1000 Train loss: 0.6636 Train f_score 0.5687 Xval loss: 0.6944 Xval f_score 0.4687\n", "14:18:13 epoch 850 of 1000 Train loss: 0.6508 Train f_score 0.6579 Xval loss: 0.6944 Xval f_score 0.4687\n", "14:18:13 epoch 900 of 1000 Train loss: 0.6624 Train f_score 0.6233 Xval loss: 0.6942 Xval f_score 0.4687\n", "14:18:13 epoch 950 of 1000 Train loss: 0.6427 Train f_score 0.6383 Xval loss: 0.6943 Xval f_score 0.4687\n", "Best Xval loss epoch 348, value 0.690776\n", "NN units 4\n", "Reg_penalty 0.00000000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.686, Train F1 0.653\n", "[[82 45]\n", " [21 62]]\n", "Final Xval Accuracy 0.592, Xval F1 0.508\n", "[[27 20]\n", " [ 9 15]]\n", "14:18:17 Starting\n", "14:18:20 epoch 0 of 1000 Train loss: 0.6915 Train f_score 0.5872 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:20 epoch 50 of 1000 Train loss: 0.6926 Train f_score 0.6107 Xval loss: 0.6935 Xval f_score 0.6604\n", "14:18:21 epoch 100 of 1000 Train loss: 0.6916 Train f_score 0.6008 Xval loss: 0.6932 Xval f_score 0.6731\n", "14:18:21 epoch 150 of 1000 Train loss: 0.6942 Train f_score 0.5610 Xval loss: 0.6929 Xval f_score 0.6535\n", "14:18:21 epoch 200 of 1000 Train loss: 0.6909 Train f_score 0.5923 Xval loss: 0.6925 Xval f_score 0.6087\n", "14:18:21 epoch 250 of 1000 Train loss: 0.6849 Train f_score 0.6180 Xval loss: 0.6925 Xval f_score 0.5714\n", "14:18:21 epoch 300 of 1000 Train loss: 0.6822 Train f_score 0.5586 Xval loss: 0.6924 Xval f_score 0.5432\n", "14:18:21 epoch 350 of 1000 Train loss: 0.6870 Train f_score 0.5388 Xval loss: 0.6918 Xval f_score 0.5714\n", "14:18:21 epoch 400 of 1000 Train loss: 0.6784 Train f_score 0.5899 Xval loss: 0.6912 Xval f_score 0.5867\n", "14:18:22 epoch 450 of 1000 Train loss: 0.6701 Train f_score 0.6027 Xval loss: 0.6910 Xval f_score 0.5753\n", "14:18:22 epoch 500 of 1000 Train loss: 0.6748 Train f_score 0.5567 Xval loss: 0.6908 Xval f_score 0.5507\n", "14:18:22 epoch 550 of 1000 Train loss: 0.6641 Train f_score 0.5600 Xval loss: 0.6910 Xval f_score 0.5507\n", "14:18:22 epoch 600 of 1000 Train loss: 0.6813 Train f_score 0.5359 Xval loss: 0.6905 Xval f_score 0.5588\n", "14:18:22 epoch 650 of 1000 Train loss: 0.6617 Train f_score 0.6547 Xval loss: 0.6908 Xval f_score 0.5152\n", "14:18:22 epoch 700 of 1000 Train loss: 0.6591 Train f_score 0.6578 Xval loss: 0.6908 Xval f_score 0.5507\n", "14:18:23 epoch 750 of 1000 Train loss: 0.6547 Train f_score 0.6154 Xval loss: 0.6902 Xval f_score 0.5294\n", "14:18:23 epoch 800 of 1000 Train loss: 0.6748 Train f_score 0.5778 Xval loss: 0.6902 Xval f_score 0.5075\n", "14:18:23 epoch 850 of 1000 Train loss: 0.6683 Train f_score 0.6076 Xval loss: 0.6898 Xval f_score 0.5000\n", "14:18:23 epoch 900 of 1000 Train loss: 0.6764 Train f_score 0.5856 Xval loss: 0.6899 Xval f_score 0.4923\n", "14:18:23 epoch 950 of 1000 Train loss: 0.6503 Train f_score 0.6550 Xval loss: 0.6897 Xval f_score 0.4923\n", "Best Xval loss epoch 999, value 0.689429\n", "NN units 4\n", "Reg_penalty 0.00010000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.681, Train F1 0.691\n", "[[68 32]\n", " [35 75]]\n", "Final Xval Accuracy 0.606, Xval F1 0.462\n", "[[31 23]\n", " [ 5 12]]\n", "14:18:27 Starting\n", "14:18:33 epoch 0 of 1000 Train loss: 0.6936 Train f_score 0.5514 Xval loss: 0.6947 Xval f_score 0.6604\n", "14:18:33 epoch 50 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:18:33 epoch 100 of 1000 Train loss: 0.6948 Train f_score 0.6424 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:18:33 epoch 150 of 1000 Train loss: 0.6900 Train f_score 0.6517 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:18:33 epoch 200 of 1000 Train loss: 0.6909 Train f_score 0.5392 Xval loss: 0.6938 Xval f_score 0.6667\n", "14:18:34 epoch 250 of 1000 Train loss: 0.6951 Train f_score 0.4739 Xval loss: 0.6937 Xval f_score 0.6535\n", "14:18:34 epoch 300 of 1000 Train loss: 0.6907 Train f_score 0.4878 Xval loss: 0.6936 Xval f_score 0.6465\n", "14:18:34 epoch 350 of 1000 Train loss: 0.6868 Train f_score 0.5581 Xval loss: 0.6936 Xval f_score 0.5556\n", "14:18:34 epoch 400 of 1000 Train loss: 0.6861 Train f_score 0.5300 Xval loss: 0.6934 Xval f_score 0.5581\n", "14:18:34 epoch 450 of 1000 Train loss: 0.6828 Train f_score 0.5354 Xval loss: 0.6935 Xval f_score 0.5581\n", "14:18:34 epoch 500 of 1000 Train loss: 0.6824 Train f_score 0.5126 Xval loss: 0.6934 Xval f_score 0.5542\n", "14:18:35 epoch 550 of 1000 Train loss: 0.6813 Train f_score 0.5285 Xval loss: 0.6934 Xval f_score 0.5679\n", "14:18:35 epoch 600 of 1000 Train loss: 0.6850 Train f_score 0.4481 Xval loss: 0.6928 Xval f_score 0.5750\n", "14:18:35 epoch 650 of 1000 Train loss: 0.6953 Train f_score 0.4505 Xval loss: 0.6925 Xval f_score 0.5867\n", "14:18:35 epoch 700 of 1000 Train loss: 0.6817 Train f_score 0.4699 Xval loss: 0.6924 Xval f_score 0.5753\n", "14:18:35 epoch 750 of 1000 Train loss: 0.6869 Train f_score 0.4607 Xval loss: 0.6920 Xval f_score 0.5753\n", "14:18:35 epoch 800 of 1000 Train loss: 0.6736 Train f_score 0.5079 Xval loss: 0.6920 Xval f_score 0.5753\n", "14:18:36 epoch 850 of 1000 Train loss: 0.6872 Train f_score 0.4624 Xval loss: 0.6921 Xval f_score 0.5753\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:18:36 epoch 900 of 1000 Train loss: 0.6775 Train f_score 0.4886 Xval loss: 0.6912 Xval f_score 0.5634\n", "14:18:36 epoch 950 of 1000 Train loss: 0.6905 Train f_score 0.4762 Xval loss: 0.6913 Xval f_score 0.5556\n", "Best Xval loss epoch 963, value 0.691020\n", "NN units 4\n", "Reg_penalty 0.00030000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.681, Train F1 0.656\n", "[[79 43]\n", " [24 64]]\n", "Final Xval Accuracy 0.606, Xval F1 0.588\n", "[[23 15]\n", " [13 20]]\n", "14:18:40 Starting\n", "14:18:43 epoch 0 of 1000 Train loss: 0.6964 Train f_score 0.6312 Xval loss: 0.6984 Xval f_score 0.6604\n", "14:18:43 epoch 50 of 1000 Train loss: 0.6937 Train f_score 0.6781 Xval loss: 0.6945 Xval f_score 0.6604\n", "14:18:43 epoch 100 of 1000 Train loss: 0.6919 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:43 epoch 150 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:44 epoch 200 of 1000 Train loss: 0.6925 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:44 epoch 250 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:44 epoch 300 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:44 epoch 350 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:44 epoch 400 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:44 epoch 450 of 1000 Train loss: 0.6936 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:45 epoch 500 of 1000 Train loss: 0.6929 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:45 epoch 550 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:45 epoch 600 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:45 epoch 650 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:45 epoch 700 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:45 epoch 750 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:46 epoch 800 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:46 epoch 850 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:46 epoch 900 of 1000 Train loss: 0.6927 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:46 epoch 950 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "Best Xval loss epoch 614, value 0.693598\n", "NN units 4\n", "Reg_penalty 0.00100000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.571, Train F1 0.516\n", "[[72 59]\n", " [31 48]]\n", "Final Xval Accuracy 0.577, Xval F1 0.595\n", "[[19 13]\n", " [17 22]]\n", "14:18:50 Starting\n", "14:18:53 epoch 0 of 1000 Train loss: 0.7054 Train f_score 0.3057 Xval loss: 0.7048 Xval f_score 0.0000\n", "14:18:53 epoch 50 of 1000 Train loss: 0.6941 Train f_score 0.6751 Xval loss: 0.6955 Xval f_score 0.6604\n", "14:18:54 epoch 100 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:54 epoch 150 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:54 epoch 200 of 1000 Train loss: 0.6929 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:54 epoch 250 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:54 epoch 300 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:54 epoch 350 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:55 epoch 400 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:55 epoch 450 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:55 epoch 500 of 1000 Train loss: 0.6929 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:55 epoch 550 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:55 epoch 600 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:55 epoch 650 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:55 epoch 700 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:56 epoch 750 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:18:56 epoch 800 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:56 epoch 850 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:56 epoch 900 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:18:56 epoch 950 of 1000 Train loss: 0.6929 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "Best Xval loss epoch 502, value 0.693600\n", "NN units 4\n", "Reg_penalty 0.00300000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.543, Train F1 0.682\n", "[[ 11 4]\n", " [ 92 103]]\n", "Final Xval Accuracy 0.535, Xval F1 0.108\n", "[[36 33]\n", " [ 0 2]]\n", "14:19:00 Starting\n", "14:19:03 epoch 0 of 1000 Train loss: 0.6943 Train f_score 0.3129 Xval loss: 0.6933 Xval f_score 0.0000\n", "14:19:04 epoch 50 of 1000 Train loss: 0.6879 Train f_score 0.6220 Xval loss: 0.6930 Xval f_score 0.6667\n", "14:19:04 epoch 100 of 1000 Train loss: 0.6875 Train f_score 0.6192 Xval loss: 0.6920 Xval f_score 0.6170\n", "14:19:04 epoch 150 of 1000 Train loss: 0.6805 Train f_score 0.6167 Xval loss: 0.6908 Xval f_score 0.5500\n", "14:19:04 epoch 200 of 1000 Train loss: 0.6769 Train f_score 0.6036 Xval loss: 0.6904 Xval f_score 0.5333\n", "14:19:04 epoch 250 of 1000 Train loss: 0.6722 Train f_score 0.5888 Xval loss: 0.6905 Xval f_score 0.5556\n", "14:19:04 epoch 300 of 1000 Train loss: 0.6643 Train f_score 0.6133 Xval loss: 0.6908 Xval f_score 0.5294\n", "14:19:05 epoch 350 of 1000 Train loss: 0.6512 Train f_score 0.6301 Xval loss: 0.6911 Xval f_score 0.5455\n", "14:19:05 epoch 400 of 1000 Train loss: 0.6787 Train f_score 0.5581 Xval loss: 0.6917 Xval f_score 0.5588\n", "14:19:05 epoch 450 of 1000 Train loss: 0.6492 Train f_score 0.6301 Xval loss: 0.6921 Xval f_score 0.5507\n", "14:19:05 epoch 500 of 1000 Train loss: 0.6568 Train f_score 0.6385 Xval loss: 0.6929 Xval f_score 0.5075\n", "14:19:05 epoch 550 of 1000 Train loss: 0.6581 Train f_score 0.6725 Xval loss: 0.6933 Xval f_score 0.5000\n", "14:19:05 epoch 600 of 1000 Train loss: 0.6582 Train f_score 0.6239 Xval loss: 0.6935 Xval f_score 0.5000\n", "14:19:06 epoch 650 of 1000 Train loss: 0.6466 Train f_score 0.6787 Xval loss: 0.6945 Xval f_score 0.4839\n", "14:19:06 epoch 700 of 1000 Train loss: 0.6428 Train f_score 0.6256 Xval loss: 0.6945 Xval f_score 0.5079\n", "14:19:06 epoch 750 of 1000 Train loss: 0.6652 Train f_score 0.6457 Xval loss: 0.6946 Xval f_score 0.5079\n", "14:19:06 epoch 800 of 1000 Train loss: 0.6494 Train f_score 0.6291 Xval loss: 0.6951 Xval f_score 0.5079\n", "14:19:06 epoch 850 of 1000 Train loss: 0.6688 Train f_score 0.6330 Xval loss: 0.6953 Xval f_score 0.5000\n", "14:19:06 epoch 900 of 1000 Train loss: 0.6450 Train f_score 0.6250 Xval loss: 0.6958 Xval f_score 0.5000\n", "14:19:06 epoch 950 of 1000 Train loss: 0.6520 Train f_score 0.6577 Xval loss: 0.6961 Xval f_score 0.5000\n", "Best Xval loss epoch 244, value 0.690421\n", "NN units 8\n", "Reg_penalty 0.00000000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.681, Train F1 0.679\n", "[[72 36]\n", " [31 71]]\n", "Final Xval Accuracy 0.592, Xval F1 0.592\n", "[[21 14]\n", " [15 21]]\n", "14:19:11 Starting\n", "14:19:14 epoch 0 of 1000 Train loss: 0.6999 Train f_score 0.1600 Xval loss: 0.6960 Xval f_score 0.0000\n", "14:19:14 epoch 50 of 1000 Train loss: 0.6900 Train f_score 0.6102 Xval loss: 0.6942 Xval f_score 0.5833\n", "14:19:14 epoch 100 of 1000 Train loss: 0.6880 Train f_score 0.6104 Xval loss: 0.6940 Xval f_score 0.5979\n", "14:19:14 epoch 150 of 1000 Train loss: 0.6867 Train f_score 0.5941 Xval loss: 0.6936 Xval f_score 0.5714\n", "14:19:14 epoch 200 of 1000 Train loss: 0.6875 Train f_score 0.6076 Xval loss: 0.6935 Xval f_score 0.5610\n", "14:19:15 epoch 250 of 1000 Train loss: 0.6863 Train f_score 0.5611 Xval loss: 0.6936 Xval f_score 0.5714\n", "14:19:15 epoch 300 of 1000 Train loss: 0.6670 Train f_score 0.6019 Xval loss: 0.6936 Xval f_score 0.5789\n", "14:19:15 epoch 350 of 1000 Train loss: 0.6731 Train f_score 0.5804 Xval loss: 0.6942 Xval f_score 0.5753\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:19:15 epoch 400 of 1000 Train loss: 0.6690 Train f_score 0.6368 Xval loss: 0.6948 Xval f_score 0.5429\n", "14:19:15 epoch 450 of 1000 Train loss: 0.6721 Train f_score 0.6066 Xval loss: 0.6955 Xval f_score 0.5588\n", "14:19:15 epoch 500 of 1000 Train loss: 0.6787 Train f_score 0.5936 Xval loss: 0.6967 Xval f_score 0.5507\n", "14:19:16 epoch 550 of 1000 Train loss: 0.6793 Train f_score 0.6083 Xval loss: 0.6976 Xval f_score 0.5429\n", "14:19:16 epoch 600 of 1000 Train loss: 0.6556 Train f_score 0.6575 Xval loss: 0.6981 Xval f_score 0.5714\n", "14:19:16 epoch 650 of 1000 Train loss: 0.6797 Train f_score 0.5905 Xval loss: 0.6984 Xval f_score 0.5882\n", "14:19:16 epoch 700 of 1000 Train loss: 0.6692 Train f_score 0.5860 Xval loss: 0.6990 Xval f_score 0.5882\n", "14:19:16 epoch 750 of 1000 Train loss: 0.6552 Train f_score 0.6514 Xval loss: 0.7001 Xval f_score 0.5455\n", "14:19:16 epoch 800 of 1000 Train loss: 0.6511 Train f_score 0.6049 Xval loss: 0.7009 Xval f_score 0.5231\n", "14:19:17 epoch 850 of 1000 Train loss: 0.6585 Train f_score 0.6486 Xval loss: 0.7018 Xval f_score 0.5152\n", "14:19:17 epoch 900 of 1000 Train loss: 0.6548 Train f_score 0.6330 Xval loss: 0.7021 Xval f_score 0.4923\n", "14:19:17 epoch 950 of 1000 Train loss: 0.6425 Train f_score 0.6330 Xval loss: 0.7026 Xval f_score 0.5152\n", "Best Xval loss epoch 217, value 0.693415\n", "NN units 8\n", "Reg_penalty 0.00010000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.676, Train F1 0.682\n", "[[69 34]\n", " [34 73]]\n", "Final Xval Accuracy 0.592, Xval F1 0.473\n", "[[29 22]\n", " [ 7 13]]\n", "14:19:21 Starting\n", "14:19:24 epoch 0 of 1000 Train loss: 0.6947 Train f_score 0.4444 Xval loss: 0.6962 Xval f_score 0.0541\n", "14:19:24 epoch 50 of 1000 Train loss: 0.6930 Train f_score 0.6104 Xval loss: 0.6954 Xval f_score 0.6731\n", "14:19:25 epoch 100 of 1000 Train loss: 0.6937 Train f_score 0.6122 Xval loss: 0.6947 Xval f_score 0.6400\n", "14:19:25 epoch 150 of 1000 Train loss: 0.6881 Train f_score 0.5950 Xval loss: 0.6943 Xval f_score 0.5979\n", "14:19:25 epoch 200 of 1000 Train loss: 0.6867 Train f_score 0.5678 Xval loss: 0.6943 Xval f_score 0.5393\n", "14:19:25 epoch 250 of 1000 Train loss: 0.6972 Train f_score 0.5046 Xval loss: 0.6942 Xval f_score 0.5714\n", "14:19:25 epoch 300 of 1000 Train loss: 0.6911 Train f_score 0.5611 Xval loss: 0.6942 Xval f_score 0.5316\n", "14:19:25 epoch 350 of 1000 Train loss: 0.6807 Train f_score 0.5965 Xval loss: 0.6940 Xval f_score 0.5333\n", "14:19:26 epoch 400 of 1000 Train loss: 0.6727 Train f_score 0.5991 Xval loss: 0.6941 Xval f_score 0.5600\n", "14:19:26 epoch 450 of 1000 Train loss: 0.6795 Train f_score 0.6422 Xval loss: 0.6944 Xval f_score 0.5915\n", "14:19:26 epoch 500 of 1000 Train loss: 0.6855 Train f_score 0.5900 Xval loss: 0.6946 Xval f_score 0.5714\n", "14:19:26 epoch 550 of 1000 Train loss: 0.6826 Train f_score 0.6000 Xval loss: 0.6944 Xval f_score 0.5294\n", "14:19:26 epoch 600 of 1000 Train loss: 0.6651 Train f_score 0.6239 Xval loss: 0.6948 Xval f_score 0.5294\n", "14:19:26 epoch 650 of 1000 Train loss: 0.6651 Train f_score 0.6119 Xval loss: 0.6955 Xval f_score 0.5294\n", "14:19:27 epoch 700 of 1000 Train loss: 0.6597 Train f_score 0.6385 Xval loss: 0.6955 Xval f_score 0.5294\n", "14:19:27 epoch 750 of 1000 Train loss: 0.6526 Train f_score 0.6385 Xval loss: 0.6961 Xval f_score 0.5373\n", "14:19:27 epoch 800 of 1000 Train loss: 0.6821 Train f_score 0.6175 Xval loss: 0.6971 Xval f_score 0.5294\n", "14:19:27 epoch 850 of 1000 Train loss: 0.6908 Train f_score 0.5505 Xval loss: 0.6977 Xval f_score 0.5373\n", "14:19:27 epoch 900 of 1000 Train loss: 0.6641 Train f_score 0.6311 Xval loss: 0.6981 Xval f_score 0.5152\n", "14:19:27 epoch 950 of 1000 Train loss: 0.6682 Train f_score 0.5822 Xval loss: 0.6995 Xval f_score 0.5373\n", "Best Xval loss epoch 382, value 0.693961\n", "NN units 8\n", "Reg_penalty 0.00030000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.671, Train F1 0.660\n", "[[74 40]\n", " [29 67]]\n", "Final Xval Accuracy 0.606, Xval F1 0.481\n", "[[30 22]\n", " [ 6 13]]\n", "14:19:32 Starting\n", "14:19:35 epoch 0 of 1000 Train loss: 0.7025 Train f_score 0.5591 Xval loss: 0.7017 Xval f_score 0.6604\n", "14:19:35 epoch 50 of 1000 Train loss: 0.6987 Train f_score 0.5971 Xval loss: 0.6953 Xval f_score 0.6604\n", "14:19:35 epoch 100 of 1000 Train loss: 0.6920 Train f_score 0.6732 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:19:35 epoch 150 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:35 epoch 200 of 1000 Train loss: 0.6929 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:19:36 epoch 250 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:36 epoch 300 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:36 epoch 350 of 1000 Train loss: 0.6936 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:36 epoch 400 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:19:36 epoch 450 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:36 epoch 500 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:19:37 epoch 550 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:37 epoch 600 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:37 epoch 650 of 1000 Train loss: 0.6939 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:37 epoch 700 of 1000 Train loss: 0.6938 Train f_score 0.6751 Xval loss: 0.6936 Xval f_score 0.6604\n", "14:19:37 epoch 750 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:37 epoch 800 of 1000 Train loss: 0.6927 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:38 epoch 850 of 1000 Train loss: 0.6927 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:38 epoch 900 of 1000 Train loss: 0.6923 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:38 epoch 950 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "Best Xval loss epoch 405, value 0.693596\n", "NN units 8\n", "Reg_penalty 0.00100000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.552, Train F1 0.687\n", "[[ 13 4]\n", " [ 90 103]]\n", "Final Xval Accuracy 0.563, Xval F1 0.523\n", "[[23 18]\n", " [13 17]]\n", "14:19:42 Starting\n", "14:19:45 epoch 0 of 1000 Train loss: 0.7187 Train f_score 0.5078 Xval loss: 0.7194 Xval f_score 0.0000\n", "14:19:45 epoch 50 of 1000 Train loss: 0.6973 Train f_score 0.6580 Xval loss: 0.6982 Xval f_score 0.6604\n", "14:19:46 epoch 100 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:19:46 epoch 150 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:19:46 epoch 200 of 1000 Train loss: 0.6926 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:19:46 epoch 250 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:46 epoch 300 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:46 epoch 350 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:19:47 epoch 400 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:47 epoch 450 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:19:47 epoch 500 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:19:47 epoch 550 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:47 epoch 600 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:47 epoch 650 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:48 epoch 700 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:48 epoch 750 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:48 epoch 800 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:48 epoch 850 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:19:48 epoch 900 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n", "14:19:48 epoch 950 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6937 Xval f_score 0.6604\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Best Xval loss epoch 274, value 0.693638\n", "NN units 8\n", "Reg_penalty 0.00300000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.529, Train F1 0.256\n", "[[94 90]\n", " [ 9 17]]\n", "Final Xval Accuracy 0.549, Xval F1 0.673\n", "[[ 6 2]\n", " [30 33]]\n", "14:19:53 Starting\n", "14:19:56 epoch 0 of 1000 Train loss: 0.6968 Train f_score 0.6536 Xval loss: 0.6953 Xval f_score 0.6604\n", "14:19:56 epoch 50 of 1000 Train loss: 0.6921 Train f_score 0.5771 Xval loss: 0.6923 Xval f_score 0.6600\n", "14:19:56 epoch 100 of 1000 Train loss: 0.6786 Train f_score 0.6348 Xval loss: 0.6907 Xval f_score 0.5647\n", "14:19:56 epoch 150 of 1000 Train loss: 0.6835 Train f_score 0.5520 Xval loss: 0.6901 Xval f_score 0.5500\n", "14:19:57 epoch 200 of 1000 Train loss: 0.6664 Train f_score 0.6522 Xval loss: 0.6906 Xval f_score 0.5479\n", "14:19:57 epoch 250 of 1000 Train loss: 0.6683 Train f_score 0.6204 Xval loss: 0.6917 Xval f_score 0.5634\n", "14:19:57 epoch 300 of 1000 Train loss: 0.6543 Train f_score 0.6400 Xval loss: 0.6931 Xval f_score 0.5429\n", "14:19:57 epoch 350 of 1000 Train loss: 0.6389 Train f_score 0.6509 Xval loss: 0.6941 Xval f_score 0.5373\n", "14:19:57 epoch 400 of 1000 Train loss: 0.6549 Train f_score 0.6491 Xval loss: 0.6951 Xval f_score 0.5538\n", "14:19:57 epoch 450 of 1000 Train loss: 0.6398 Train f_score 0.6389 Xval loss: 0.6964 Xval f_score 0.5538\n", "14:19:58 epoch 500 of 1000 Train loss: 0.6619 Train f_score 0.6182 Xval loss: 0.6974 Xval f_score 0.5079\n", "14:19:58 epoch 550 of 1000 Train loss: 0.6564 Train f_score 0.6226 Xval loss: 0.6989 Xval f_score 0.5079\n", "14:19:58 epoch 600 of 1000 Train loss: 0.6518 Train f_score 0.6545 Xval loss: 0.6987 Xval f_score 0.4839\n", "14:19:58 epoch 650 of 1000 Train loss: 0.6451 Train f_score 0.6476 Xval loss: 0.6995 Xval f_score 0.4762\n", "14:19:58 epoch 700 of 1000 Train loss: 0.6385 Train f_score 0.6419 Xval loss: 0.7000 Xval f_score 0.5079\n", "14:19:58 epoch 750 of 1000 Train loss: 0.6462 Train f_score 0.6190 Xval loss: 0.7006 Xval f_score 0.5000\n", "14:19:59 epoch 800 of 1000 Train loss: 0.6509 Train f_score 0.6389 Xval loss: 0.7016 Xval f_score 0.4848\n", "14:19:59 epoch 850 of 1000 Train loss: 0.6284 Train f_score 0.7059 Xval loss: 0.7029 Xval f_score 0.4848\n", "14:19:59 epoch 900 of 1000 Train loss: 0.6416 Train f_score 0.5794 Xval loss: 0.7028 Xval f_score 0.4615\n", "14:19:59 epoch 950 of 1000 Train loss: 0.6145 Train f_score 0.6575 Xval loss: 0.7026 Xval f_score 0.4848\n", "Best Xval loss epoch 149, value 0.690139\n", "NN units 16\n", "Reg_penalty 0.00000000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.681, Train F1 0.691\n", "[[68 32]\n", " [35 75]]\n", "Final Xval Accuracy 0.577, Xval F1 0.583\n", "[[20 14]\n", " [16 21]]\n", "14:20:04 Starting\n", "14:20:07 epoch 0 of 1000 Train loss: 0.6964 Train f_score 0.6755 Xval loss: 0.6981 Xval f_score 0.6604\n", "14:20:07 epoch 50 of 1000 Train loss: 0.6936 Train f_score 0.5726 Xval loss: 0.6939 Xval f_score 0.6327\n", "14:20:07 epoch 100 of 1000 Train loss: 0.6885 Train f_score 0.6167 Xval loss: 0.6934 Xval f_score 0.5934\n", "14:20:07 epoch 150 of 1000 Train loss: 0.6924 Train f_score 0.5726 Xval loss: 0.6928 Xval f_score 0.5783\n", "14:20:07 epoch 200 of 1000 Train loss: 0.6823 Train f_score 0.6106 Xval loss: 0.6925 Xval f_score 0.5714\n", "14:20:08 epoch 250 of 1000 Train loss: 0.6785 Train f_score 0.5822 Xval loss: 0.6928 Xval f_score 0.5600\n", "14:20:08 epoch 300 of 1000 Train loss: 0.6667 Train f_score 0.6311 Xval loss: 0.6936 Xval f_score 0.5867\n", "14:20:08 epoch 350 of 1000 Train loss: 0.6597 Train f_score 0.6288 Xval loss: 0.6943 Xval f_score 0.5429\n", "14:20:08 epoch 400 of 1000 Train loss: 0.6768 Train f_score 0.5646 Xval loss: 0.6954 Xval f_score 0.5588\n", "14:20:08 epoch 450 of 1000 Train loss: 0.6765 Train f_score 0.6047 Xval loss: 0.6963 Xval f_score 0.5507\n", "14:20:08 epoch 500 of 1000 Train loss: 0.6660 Train f_score 0.5701 Xval loss: 0.6978 Xval f_score 0.5588\n", "14:20:09 epoch 550 of 1000 Train loss: 0.6622 Train f_score 0.6457 Xval loss: 0.6989 Xval f_score 0.5588\n", "14:20:09 epoch 600 of 1000 Train loss: 0.6324 Train f_score 0.7005 Xval loss: 0.6998 Xval f_score 0.5373\n", "14:20:09 epoch 650 of 1000 Train loss: 0.6554 Train f_score 0.6250 Xval loss: 0.7006 Xval f_score 0.5588\n", "14:20:09 epoch 700 of 1000 Train loss: 0.6494 Train f_score 0.6481 Xval loss: 0.7013 Xval f_score 0.5000\n", "14:20:09 epoch 750 of 1000 Train loss: 0.6566 Train f_score 0.6182 Xval loss: 0.7024 Xval f_score 0.4923\n", "14:20:10 epoch 800 of 1000 Train loss: 0.6510 Train f_score 0.6210 Xval loss: 0.7038 Xval f_score 0.4923\n", "14:20:10 epoch 850 of 1000 Train loss: 0.6513 Train f_score 0.6637 Xval loss: 0.7038 Xval f_score 0.5152\n", "14:20:10 epoch 900 of 1000 Train loss: 0.6536 Train f_score 0.6516 Xval loss: 0.7053 Xval f_score 0.4923\n", "14:20:10 epoch 950 of 1000 Train loss: 0.6550 Train f_score 0.6262 Xval loss: 0.7060 Xval f_score 0.4923\n", "Best Xval loss epoch 200, value 0.692491\n", "NN units 16\n", "Reg_penalty 0.00010000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.667, Train F1 0.663\n", "[[71 38]\n", " [32 69]]\n", "Final Xval Accuracy 0.592, Xval F1 0.580\n", "[[22 15]\n", " [14 20]]\n", "14:20:14 Starting\n", "14:20:18 epoch 0 of 1000 Train loss: 0.7018 Train f_score 0.6469 Xval loss: 0.7000 Xval f_score 0.6604\n", "14:20:18 epoch 50 of 1000 Train loss: 0.6924 Train f_score 0.6245 Xval loss: 0.6952 Xval f_score 0.6667\n", "14:20:18 epoch 100 of 1000 Train loss: 0.6890 Train f_score 0.5959 Xval loss: 0.6945 Xval f_score 0.6400\n", "14:20:18 epoch 150 of 1000 Train loss: 0.6855 Train f_score 0.6179 Xval loss: 0.6944 Xval f_score 0.6400\n", "14:20:18 epoch 200 of 1000 Train loss: 0.6913 Train f_score 0.5847 Xval loss: 0.6947 Xval f_score 0.5652\n", "14:20:18 epoch 250 of 1000 Train loss: 0.6921 Train f_score 0.5430 Xval loss: 0.6947 Xval f_score 0.5581\n", "14:20:19 epoch 300 of 1000 Train loss: 0.6840 Train f_score 0.6154 Xval loss: 0.6950 Xval f_score 0.5610\n", "14:20:19 epoch 350 of 1000 Train loss: 0.6846 Train f_score 0.6106 Xval loss: 0.6947 Xval f_score 0.5641\n", "14:20:19 epoch 400 of 1000 Train loss: 0.6750 Train f_score 0.6111 Xval loss: 0.6956 Xval f_score 0.6053\n", "14:20:19 epoch 450 of 1000 Train loss: 0.6986 Train f_score 0.5438 Xval loss: 0.6955 Xval f_score 0.5833\n", "14:20:19 epoch 500 of 1000 Train loss: 0.6801 Train f_score 0.6364 Xval loss: 0.6960 Xval f_score 0.6027\n", "14:20:19 epoch 550 of 1000 Train loss: 0.6820 Train f_score 0.6027 Xval loss: 0.6968 Xval f_score 0.5634\n", "14:20:20 epoch 600 of 1000 Train loss: 0.6792 Train f_score 0.5990 Xval loss: 0.6971 Xval f_score 0.5634\n", "14:20:20 epoch 650 of 1000 Train loss: 0.6587 Train f_score 0.6000 Xval loss: 0.6981 Xval f_score 0.5797\n", "14:20:20 epoch 700 of 1000 Train loss: 0.6665 Train f_score 0.5888 Xval loss: 0.6985 Xval f_score 0.5294\n", "14:20:20 epoch 750 of 1000 Train loss: 0.6634 Train f_score 0.6124 Xval loss: 0.6993 Xval f_score 0.5294\n", "14:20:20 epoch 800 of 1000 Train loss: 0.6806 Train f_score 0.5782 Xval loss: 0.7004 Xval f_score 0.5373\n", "14:20:21 epoch 850 of 1000 Train loss: 0.6675 Train f_score 0.6244 Xval loss: 0.7012 Xval f_score 0.5455\n", "14:20:21 epoch 900 of 1000 Train loss: 0.6563 Train f_score 0.6603 Xval loss: 0.7015 Xval f_score 0.5455\n", "14:20:21 epoch 950 of 1000 Train loss: 0.6588 Train f_score 0.6389 Xval loss: 0.7018 Xval f_score 0.5373\n", "Best Xval loss epoch 152, value 0.694416\n", "NN units 16\n", "Reg_penalty 0.00030000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.671, Train F1 0.679\n", "[[68 34]\n", " [35 73]]\n", "Final Xval Accuracy 0.592, Xval F1 0.473\n", "[[29 22]\n", " [ 7 13]]\n", "14:20:25 Starting\n", "14:20:29 epoch 0 of 1000 Train loss: 0.7135 Train f_score 0.5067 Xval loss: 0.7104 Xval f_score 0.6604\n", "14:20:29 epoch 50 of 1000 Train loss: 0.6898 Train f_score 0.6364 Xval loss: 0.6979 Xval f_score 0.6604\n", "14:20:29 epoch 100 of 1000 Train loss: 0.6933 Train f_score 0.5920 Xval loss: 0.6952 Xval f_score 0.6604\n", "14:20:29 epoch 150 of 1000 Train loss: 0.6939 Train f_score 0.5926 Xval loss: 0.6947 Xval f_score 0.6604\n", "14:20:29 epoch 200 of 1000 Train loss: 0.6944 Train f_score 0.6135 Xval loss: 0.6946 Xval f_score 0.6604\n", "14:20:29 epoch 250 of 1000 Train loss: 0.6953 Train f_score 0.5873 Xval loss: 0.6945 Xval f_score 0.6604\n", "14:20:30 epoch 300 of 1000 Train loss: 0.6985 Train f_score 0.5817 Xval loss: 0.6944 Xval f_score 0.6604\n", "14:20:30 epoch 350 of 1000 Train loss: 0.6921 Train f_score 0.6058 Xval loss: 0.6944 Xval f_score 0.6604\n", "14:20:30 epoch 400 of 1000 Train loss: 0.6901 Train f_score 0.6087 Xval loss: 0.6945 Xval f_score 0.6604\n", "14:20:30 epoch 450 of 1000 Train loss: 0.6926 Train f_score 0.6160 Xval loss: 0.6943 Xval f_score 0.6604\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "14:20:30 epoch 500 of 1000 Train loss: 0.6935 Train f_score 0.5714 Xval loss: 0.6945 Xval f_score 0.6604\n", "14:20:30 epoch 550 of 1000 Train loss: 0.6935 Train f_score 0.5844 Xval loss: 0.6944 Xval f_score 0.6604\n", "14:20:31 epoch 600 of 1000 Train loss: 0.6917 Train f_score 0.6614 Xval loss: 0.6946 Xval f_score 0.6604\n", "14:20:31 epoch 650 of 1000 Train loss: 0.6908 Train f_score 0.6459 Xval loss: 0.6947 Xval f_score 0.6604\n", "14:20:31 epoch 700 of 1000 Train loss: 0.6977 Train f_score 0.5455 Xval loss: 0.6947 Xval f_score 0.6604\n", "14:20:31 epoch 750 of 1000 Train loss: 0.6927 Train f_score 0.6316 Xval loss: 0.6946 Xval f_score 0.6476\n", "14:20:31 epoch 800 of 1000 Train loss: 0.6950 Train f_score 0.5774 Xval loss: 0.6946 Xval f_score 0.6604\n", "14:20:31 epoch 850 of 1000 Train loss: 0.6951 Train f_score 0.5920 Xval loss: 0.6947 Xval f_score 0.6604\n", "14:20:32 epoch 900 of 1000 Train loss: 0.6949 Train f_score 0.5906 Xval loss: 0.6949 Xval f_score 0.6604\n", "14:20:32 epoch 950 of 1000 Train loss: 0.6941 Train f_score 0.6031 Xval loss: 0.6949 Xval f_score 0.6604\n", "Best Xval loss epoch 445, value 0.694278\n", "NN units 16\n", "Reg_penalty 0.00100000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.595, Train F1 0.632\n", "[[52 34]\n", " [51 73]]\n", "Final Xval Accuracy 0.563, Xval F1 0.415\n", "[[29 24]\n", " [ 7 11]]\n", "14:20:36 Starting\n", "14:20:40 epoch 0 of 1000 Train loss: 0.7517 Train f_score 0.4749 Xval loss: 0.7461 Xval f_score 0.6408\n", "14:20:40 epoch 50 of 1000 Train loss: 0.7009 Train f_score 0.6129 Xval loss: 0.7041 Xval f_score 0.6604\n", "14:20:40 epoch 100 of 1000 Train loss: 0.6967 Train f_score 0.6131 Xval loss: 0.6949 Xval f_score 0.6604\n", "14:20:40 epoch 150 of 1000 Train loss: 0.6956 Train f_score 0.5882 Xval loss: 0.6943 Xval f_score 0.6604\n", "14:20:40 epoch 200 of 1000 Train loss: 0.6939 Train f_score 0.6441 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:20:40 epoch 250 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:20:41 epoch 300 of 1000 Train loss: 0.6937 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:20:41 epoch 350 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:20:41 epoch 400 of 1000 Train loss: 0.6928 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:20:41 epoch 450 of 1000 Train loss: 0.6932 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:20:41 epoch 500 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:20:41 epoch 550 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:20:42 epoch 600 of 1000 Train loss: 0.6930 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:20:42 epoch 650 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:20:42 epoch 700 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:20:42 epoch 750 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:20:42 epoch 800 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:20:43 epoch 850 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:20:43 epoch 900 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:20:43 epoch 950 of 1000 Train loss: 0.6931 Train f_score 0.6751 Xval loss: 0.6939 Xval f_score 0.6604\n", "Best Xval loss epoch 636, value 0.693763\n", "NN units 16\n", "Reg_penalty 0.00300000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.514, Train F1 0.589\n", "[[35 34]\n", " [68 73]]\n", "Final Xval Accuracy 0.549, Xval F1 0.333\n", "[[31 27]\n", " [ 5 8]]\n", "14:20:47 Starting\n", "14:20:51 epoch 0 of 1000 Train loss: 0.6964 Train f_score 0.5025 Xval loss: 0.6928 Xval f_score 0.3200\n", "14:20:51 epoch 50 of 1000 Train loss: 0.6900 Train f_score 0.5823 Xval loss: 0.6912 Xval f_score 0.6067\n", "14:20:51 epoch 100 of 1000 Train loss: 0.6837 Train f_score 0.6174 Xval loss: 0.6904 Xval f_score 0.5610\n", "14:20:51 epoch 150 of 1000 Train loss: 0.6739 Train f_score 0.6213 Xval loss: 0.6904 Xval f_score 0.5526\n", "14:20:51 epoch 200 of 1000 Train loss: 0.6611 Train f_score 0.5909 Xval loss: 0.6919 Xval f_score 0.5352\n", "14:20:51 epoch 250 of 1000 Train loss: 0.6605 Train f_score 0.6606 Xval loss: 0.6936 Xval f_score 0.5217\n", "14:20:52 epoch 300 of 1000 Train loss: 0.6561 Train f_score 0.5943 Xval loss: 0.6954 Xval f_score 0.5373\n", "14:20:52 epoch 350 of 1000 Train loss: 0.6462 Train f_score 0.6038 Xval loss: 0.6976 Xval f_score 0.5312\n", "14:20:52 epoch 400 of 1000 Train loss: 0.6553 Train f_score 0.6330 Xval loss: 0.6995 Xval f_score 0.5079\n", "14:20:52 epoch 450 of 1000 Train loss: 0.6551 Train f_score 0.5991 Xval loss: 0.7014 Xval f_score 0.5079\n", "14:20:52 epoch 500 of 1000 Train loss: 0.6523 Train f_score 0.6512 Xval loss: 0.7028 Xval f_score 0.5079\n", "14:20:52 epoch 550 of 1000 Train loss: 0.6502 Train f_score 0.6404 Xval loss: 0.7032 Xval f_score 0.5000\n", "14:20:53 epoch 600 of 1000 Train loss: 0.6348 Train f_score 0.6204 Xval loss: 0.7034 Xval f_score 0.5000\n", "14:20:53 epoch 650 of 1000 Train loss: 0.6382 Train f_score 0.6419 Xval loss: 0.7048 Xval f_score 0.5000\n", "14:20:53 epoch 700 of 1000 Train loss: 0.6384 Train f_score 0.6452 Xval loss: 0.7069 Xval f_score 0.5000\n", "14:20:53 epoch 750 of 1000 Train loss: 0.6350 Train f_score 0.6326 Xval loss: 0.7070 Xval f_score 0.5000\n", "14:20:53 epoch 800 of 1000 Train loss: 0.6236 Train f_score 0.6605 Xval loss: 0.7067 Xval f_score 0.4848\n", "14:20:54 epoch 850 of 1000 Train loss: 0.6563 Train f_score 0.6019 Xval loss: 0.7072 Xval f_score 0.4848\n", "14:20:54 epoch 900 of 1000 Train loss: 0.6369 Train f_score 0.6442 Xval loss: 0.7082 Xval f_score 0.4848\n", "14:20:54 epoch 950 of 1000 Train loss: 0.6533 Train f_score 0.6393 Xval loss: 0.7089 Xval f_score 0.5075\n", "Best Xval loss epoch 120, value 0.690116\n", "NN units 32\n", "Reg_penalty 0.00000000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.676, Train F1 0.694\n", "[[65 30]\n", " [38 77]]\n", "Final Xval Accuracy 0.592, Xval F1 0.592\n", "[[21 14]\n", " [15 21]]\n", "14:20:58 Starting\n", "14:21:02 epoch 0 of 1000 Train loss: 0.6935 Train f_score 0.5789 Xval loss: 0.6973 Xval f_score 0.6667\n", "14:21:02 epoch 50 of 1000 Train loss: 0.6810 Train f_score 0.6154 Xval loss: 0.6949 Xval f_score 0.6087\n", "14:21:02 epoch 100 of 1000 Train loss: 0.6885 Train f_score 0.5639 Xval loss: 0.6942 Xval f_score 0.5610\n", "14:21:02 epoch 150 of 1000 Train loss: 0.6858 Train f_score 0.6126 Xval loss: 0.6946 Xval f_score 0.5676\n", "14:21:03 epoch 200 of 1000 Train loss: 0.6685 Train f_score 0.6637 Xval loss: 0.6958 Xval f_score 0.5634\n", "14:21:03 epoch 250 of 1000 Train loss: 0.6768 Train f_score 0.5936 Xval loss: 0.6980 Xval f_score 0.5634\n", "14:21:03 epoch 300 of 1000 Train loss: 0.6700 Train f_score 0.6273 Xval loss: 0.6999 Xval f_score 0.5507\n", "14:21:03 epoch 350 of 1000 Train loss: 0.6706 Train f_score 0.6083 Xval loss: 0.7016 Xval f_score 0.5758\n", "14:21:03 epoch 400 of 1000 Train loss: 0.6494 Train f_score 0.6762 Xval loss: 0.7025 Xval f_score 0.5758\n", "14:21:03 epoch 450 of 1000 Train loss: 0.6712 Train f_score 0.6516 Xval loss: 0.7029 Xval f_score 0.5312\n", "14:21:04 epoch 500 of 1000 Train loss: 0.6660 Train f_score 0.6091 Xval loss: 0.7046 Xval f_score 0.5538\n", "14:21:04 epoch 550 of 1000 Train loss: 0.6665 Train f_score 0.6326 Xval loss: 0.7068 Xval f_score 0.5373\n", "14:21:04 epoch 600 of 1000 Train loss: 0.6582 Train f_score 0.6393 Xval loss: 0.7082 Xval f_score 0.5373\n", "14:21:04 epoch 650 of 1000 Train loss: 0.6755 Train f_score 0.5700 Xval loss: 0.7093 Xval f_score 0.5152\n", "14:21:04 epoch 700 of 1000 Train loss: 0.6390 Train f_score 0.6697 Xval loss: 0.7102 Xval f_score 0.5231\n", "14:21:04 epoch 750 of 1000 Train loss: 0.6556 Train f_score 0.5981 Xval loss: 0.7114 Xval f_score 0.5152\n", "14:21:05 epoch 800 of 1000 Train loss: 0.6597 Train f_score 0.6393 Xval loss: 0.7119 Xval f_score 0.5231\n", "14:21:05 epoch 850 of 1000 Train loss: 0.6553 Train f_score 0.6355 Xval loss: 0.7132 Xval f_score 0.5152\n", "14:21:05 epoch 900 of 1000 Train loss: 0.6547 Train f_score 0.6385 Xval loss: 0.7142 Xval f_score 0.5152\n", "14:21:05 epoch 950 of 1000 Train loss: 0.6397 Train f_score 0.6415 Xval loss: 0.7150 Xval f_score 0.5152\n", "Best Xval loss epoch 91, value 0.694011\n", "NN units 32\n", "Reg_penalty 0.00010000\n", "Dropout 0.5000\n", "Activation sigmoid\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Final Train Accuracy 0.667, Train F1 0.676\n", "[[67 34]\n", " [36 73]]\n", "Final Xval Accuracy 0.577, Xval F1 0.583\n", "[[20 14]\n", " [16 21]]\n", "14:21:10 Starting\n", "14:21:13 epoch 0 of 1000 Train loss: 0.7131 Train f_score 0.5217 Xval loss: 0.7059 Xval f_score 0.6337\n", "14:21:13 epoch 50 of 1000 Train loss: 0.6926 Train f_score 0.5858 Xval loss: 0.6994 Xval f_score 0.6042\n", "14:21:14 epoch 100 of 1000 Train loss: 0.6875 Train f_score 0.6376 Xval loss: 0.6976 Xval f_score 0.5926\n", "14:21:14 epoch 150 of 1000 Train loss: 0.6946 Train f_score 0.5766 Xval loss: 0.6978 Xval f_score 0.5854\n", "14:21:14 epoch 200 of 1000 Train loss: 0.6916 Train f_score 0.6325 Xval loss: 0.6983 Xval f_score 0.5432\n", "14:21:14 epoch 250 of 1000 Train loss: 0.6893 Train f_score 0.5867 Xval loss: 0.6985 Xval f_score 0.5789\n", "14:21:14 epoch 300 of 1000 Train loss: 0.6875 Train f_score 0.6071 Xval loss: 0.6991 Xval f_score 0.6053\n", "14:21:14 epoch 350 of 1000 Train loss: 0.6901 Train f_score 0.6335 Xval loss: 0.6997 Xval f_score 0.5753\n", "14:21:15 epoch 400 of 1000 Train loss: 0.6714 Train f_score 0.6372 Xval loss: 0.7005 Xval f_score 0.5556\n", "14:21:15 epoch 450 of 1000 Train loss: 0.6794 Train f_score 0.6239 Xval loss: 0.7017 Xval f_score 0.5672\n", "14:21:15 epoch 500 of 1000 Train loss: 0.6830 Train f_score 0.6220 Xval loss: 0.7017 Xval f_score 0.5672\n", "14:21:15 epoch 550 of 1000 Train loss: 0.6819 Train f_score 0.5845 Xval loss: 0.7024 Xval f_score 0.5588\n", "14:21:15 epoch 600 of 1000 Train loss: 0.6700 Train f_score 0.6099 Xval loss: 0.7026 Xval f_score 0.5455\n", "14:21:15 epoch 650 of 1000 Train loss: 0.6830 Train f_score 0.5953 Xval loss: 0.7033 Xval f_score 0.5455\n", "14:21:16 epoch 700 of 1000 Train loss: 0.7049 Train f_score 0.6278 Xval loss: 0.7042 Xval f_score 0.5373\n", "14:21:16 epoch 750 of 1000 Train loss: 0.6801 Train f_score 0.6335 Xval loss: 0.7044 Xval f_score 0.5373\n", "14:21:16 epoch 800 of 1000 Train loss: 0.6592 Train f_score 0.6296 Xval loss: 0.7051 Xval f_score 0.5373\n", "14:21:16 epoch 850 of 1000 Train loss: 0.6681 Train f_score 0.6368 Xval loss: 0.7049 Xval f_score 0.5373\n", "14:21:16 epoch 900 of 1000 Train loss: 0.6915 Train f_score 0.5660 Xval loss: 0.7054 Xval f_score 0.5373\n", "14:21:17 epoch 950 of 1000 Train loss: 0.6777 Train f_score 0.6071 Xval loss: 0.7059 Xval f_score 0.5373\n", "Best Xval loss epoch 104, value 0.697482\n", "NN units 32\n", "Reg_penalty 0.00030000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.662, Train F1 0.664\n", "[[69 37]\n", " [34 70]]\n", "Final Xval Accuracy 0.592, Xval F1 0.473\n", "[[29 22]\n", " [ 7 13]]\n", "14:21:21 Starting\n", "14:21:24 epoch 0 of 1000 Train loss: 0.7248 Train f_score 0.5714 Xval loss: 0.7298 Xval f_score 0.0513\n", "14:21:25 epoch 50 of 1000 Train loss: 0.7027 Train f_score 0.5333 Xval loss: 0.7019 Xval f_score 0.6604\n", "14:21:25 epoch 100 of 1000 Train loss: 0.6965 Train f_score 0.6118 Xval loss: 0.6954 Xval f_score 0.6604\n", "14:21:25 epoch 150 of 1000 Train loss: 0.6953 Train f_score 0.5726 Xval loss: 0.6946 Xval f_score 0.6604\n", "14:21:25 epoch 200 of 1000 Train loss: 0.6944 Train f_score 0.5948 Xval loss: 0.6945 Xval f_score 0.6604\n", "14:21:25 epoch 250 of 1000 Train loss: 0.6940 Train f_score 0.5978 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:21:25 epoch 300 of 1000 Train loss: 0.6926 Train f_score 0.6447 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:21:26 epoch 350 of 1000 Train loss: 0.6966 Train f_score 0.6219 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:21:26 epoch 400 of 1000 Train loss: 0.6947 Train f_score 0.6421 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:21:26 epoch 450 of 1000 Train loss: 0.6931 Train f_score 0.6553 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:21:26 epoch 500 of 1000 Train loss: 0.6939 Train f_score 0.6601 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:21:26 epoch 550 of 1000 Train loss: 0.6923 Train f_score 0.6818 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:21:27 epoch 600 of 1000 Train loss: 0.6943 Train f_score 0.6645 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:21:27 epoch 650 of 1000 Train loss: 0.6932 Train f_score 0.6645 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:21:27 epoch 700 of 1000 Train loss: 0.6943 Train f_score 0.6624 Xval loss: 0.6940 Xval f_score 0.6604\n", "14:21:27 epoch 750 of 1000 Train loss: 0.6938 Train f_score 0.6688 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:21:27 epoch 800 of 1000 Train loss: 0.6929 Train f_score 0.6645 Xval loss: 0.6939 Xval f_score 0.6604\n", "14:21:27 epoch 850 of 1000 Train loss: 0.6950 Train f_score 0.6579 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:21:28 epoch 900 of 1000 Train loss: 0.6929 Train f_score 0.6667 Xval loss: 0.6938 Xval f_score 0.6604\n", "14:21:28 epoch 950 of 1000 Train loss: 0.6933 Train f_score 0.6815 Xval loss: 0.6940 Xval f_score 0.6604\n", "Best Xval loss epoch 791, value 0.693786\n", "NN units 32\n", "Reg_penalty 0.00100000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.595, Train F1 0.605\n", "[[60 42]\n", " [43 65]]\n", "Final Xval Accuracy 0.577, Xval F1 0.605\n", "[[18 12]\n", " [18 23]]\n", "14:21:32 Starting\n", "14:21:36 epoch 0 of 1000 Train loss: 0.8036 Train f_score 0.5304 Xval loss: 0.8002 Xval f_score 0.4068\n", "14:21:36 epoch 50 of 1000 Train loss: 0.7154 Train f_score 0.5726 Xval loss: 0.7122 Xval f_score 0.6604\n", "14:21:36 epoch 100 of 1000 Train loss: 0.6930 Train f_score 0.6255 Xval loss: 0.6954 Xval f_score 0.6604\n", "14:21:36 epoch 150 of 1000 Train loss: 0.6955 Train f_score 0.6411 Xval loss: 0.6947 Xval f_score 0.6604\n", "14:21:36 epoch 200 of 1000 Train loss: 0.6927 Train f_score 0.6840 Xval loss: 0.6943 Xval f_score 0.6604\n", "14:21:37 epoch 250 of 1000 Train loss: 0.6944 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:21:37 epoch 300 of 1000 Train loss: 0.6943 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:21:37 epoch 350 of 1000 Train loss: 0.6943 Train f_score 0.6537 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:21:37 epoch 400 of 1000 Train loss: 0.6945 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:21:37 epoch 450 of 1000 Train loss: 0.6937 Train f_score 0.6751 Xval loss: 0.6943 Xval f_score 0.6604\n", "14:21:37 epoch 500 of 1000 Train loss: 0.6936 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:21:38 epoch 550 of 1000 Train loss: 0.6933 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:21:38 epoch 600 of 1000 Train loss: 0.6935 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:21:38 epoch 650 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:21:38 epoch 700 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:21:38 epoch 750 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6943 Xval f_score 0.6604\n", "14:21:39 epoch 800 of 1000 Train loss: 0.6939 Train f_score 0.6751 Xval loss: 0.6941 Xval f_score 0.6604\n", "14:21:39 epoch 850 of 1000 Train loss: 0.6936 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:21:39 epoch 900 of 1000 Train loss: 0.6929 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "14:21:39 epoch 950 of 1000 Train loss: 0.6934 Train f_score 0.6751 Xval loss: 0.6942 Xval f_score 0.6604\n", "Best Xval loss epoch 739, value 0.694027\n", "NN units 32\n", "Reg_penalty 0.00300000\n", "Dropout 0.5000\n", "Activation sigmoid\n", "Final Train Accuracy 0.529, Train F1 0.671\n", "[[ 10 6]\n", " [ 93 101]]\n", "Final Xval Accuracy 0.592, Xval F1 0.540\n", "[[25 18]\n", " [11 17]]\n", "14:21:43 Finishing\n" ] } ], "source": [ "# hyperparameter options to evaluate\n", "hidden_layer_hp = [4, 8, 16, 32]\n", "dropout_hp = [0, 0.333, 0.5]\n", "reg_penalty_hp = [0, 0.0001, 0.0003, 0.001, 0.003]\n", "activation_hp = ['relu', 'sigmoid']\n", "\n", "fscores = {}\n", "\n", "for dr in dropout_hp:\n", " for ac in activation_hp:\n", " for hl in hidden_layer_hp:\n", " for rp in reg_penalty_hp:\n", " # print(\"\\n %s\\n\" % (time.strftime(\"%H:%M:%S\"))) \n", "\n", " model = create_model(num_components=num_features,\n", " hidden_layer_size=hl, \n", " dropout=dr, \n", " reg_penalty=rp,\n", " activation=ac)\n", " \n", " models = []\n", " losses = []\n", " \n", " \n", " epochs = 1000 # increase this if Xval_loss doesn't always reach a minimum in this many epochs\n", " print('%s Starting' % time.strftime(\"%H:%M:%S\")) \n", " \n", " for i in range(epochs):\n", " fit = model.fit(X_train, y_train, validation_data=(X_xval, y_xval), \n", " epochs=1, \n", " batch_size=X_train.shape[0], # small data, full batch, no sgd \n", " verbose=False)\n", " train_loss = fit.history['loss'][-1]\n", " train_acc = fit.history['acc'][-1]\n", " train_f_score = fit.history['f_score'][-1]\n", " current_loss = fit.history['val_loss'][-1]\n", " current_acc = fit.history['val_acc'][-1]\n", " current_f_score = fit.history['val_f_score'][-1]\n", " if i % 100 == 0:\n", " print('%s epoch %d of %d Train loss: %.4f Train f_score %.4f Xval loss: %.4f Xval f_score %.4f' % \n", " (time.strftime(\"%H:%M:%S\"), i, epochs,\n", " train_loss, train_f_score,\n", " current_loss, current_f_score))\n", " \n", " losses.append(current_loss)\n", " models.append(copy.copy(model))\n", " \n", " bestloss_index = np.argmin(losses)\n", " bestloss_value = losses[bestloss_index]\n", " # stop if loss rises by 10% from best\n", " if current_loss / bestloss_value > 1.1:\n", " break\n", " \n", " # keep model from epoch with best xval loss\n", " print (\"Best Xval loss epoch %d, value %f\" % (bestloss_index, bestloss_value))\n", " model = models[bestloss_index]\n", "\n", " # evaluate model\n", " print (\"NN units %d\" % hl)\n", " print (\"Reg_penalty %.8f\" % rp)\n", " print (\"Dropout %.4f\" % dr)\n", " print (\"Activation %s\" % ac)\n", " \n", " y_train_prob = model.predict(X_train)\n", " \n", " thresh, score = selectThresholdAcc(y_train_prob, y_train)\n", " y_train_pred = y_train_prob >= thresh\n", " \n", " print(\"Final Train Accuracy %.3f, Train F1 %.3f\" % \n", " (sklearn.metrics.accuracy_score(y_train_pred, y_train), \n", " sklearn.metrics.f1_score(y_train_pred, y_train)))\n", " \n", " print(sklearn.metrics.confusion_matrix(y_train_pred, y_train))\n", "\n", " y_xval_prob = model.predict(X_xval)\n", " \n", " thresh, score = selectThresholdAcc(y_xval_prob, y_xval)\n", " y_xval_pred = y_xval_prob >= thresh\n", " \n", " print(\"Final Xval Accuracy %.3f, Xval F1 %.3f\" % \n", " (sklearn.metrics.accuracy_score(y_xval_pred, y_xval), \n", " sklearn.metrics.f1_score(y_xval_pred, y_xval)))\n", " \n", " confusion_matrix = sklearn.metrics.confusion_matrix(y_xval_pred, y_xval)\n", " print(confusion_matrix)\n", " true_negative = confusion_matrix[0][0]\n", " false_negative = confusion_matrix[0][1]\n", " false_positive = confusion_matrix[1][0]\n", " true_positive = confusion_matrix[1][1]\n", "\n", " fscores[(hl, rp, dr, ac)] = score\n", "\n", " # save model to disk\n", " modelname = \"model_%s_%d_%.3f_%.6f\" % (ac, hl, dr, rp)\n", " model.save(\"%s.h5\" % modelname)\n", " model.save_weights(\"%s_weights.h5\" % modelname)\n", " with open(\"%s.json\" % modelname, \"wb\") as fjson:\n", " fjson.write(model.to_json()) \n", "\n", "print('%s Finishing' % time.strftime(\"%H:%M:%S\"))\n" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAATAAAAD+CAYAAAC9blmDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYFeWV/z+Hhm6WZqdFAbURFRdkcd8nEk2IkWiMeYJL\nohMNMXnyMwYdTWKiqDPGmMQYZ5KMxjjgPr+fOhoZExMjhhBFWQKERgHRRlkEBLG7aaC38/vjvJeu\nvty+93bfvre72vN5nnq6b731vvVW1fecd6mqU6KqOI7jxJEenV0Bx3Gc9uIOzHGc2OIOzHGc2OIO\nzHGc2OIOzHGc2OIOzHGc2NKpDkxEZorII51ZB8eJIiLfF5EHutp+RaRSRM4uZJ3iQGx7YGL8WES2\nheXHIiJptv+kiLwpIrUiMldEDs62LBEpD3lqQxlnJ5V9iYisE5GdIvKMiAyJpJWIyIMiUiUi74vI\njKS8E0VkcSh7sYhM7Jgz5LQHVb1DVa+K034/1ragqnlZgJ4p1k0BVgFvAd8FZgKPZFneg8AWYEX4\n/fVQ1ihgJLASuLqVvMOAKmAF8AbwAVAZSU9bFvAq8AtgIVAJNAI/CWlHA9XAmUAp8BjwRCTvj4C/\nAoOBI4H3w/HPAYqBdcB3gBLgmvC7OF/XxZe86b2FttuYt4W2W9mmEji7lbTvArXA6lCH9zPYwkfA\nF4HewE+A14DXgWXAxmAf6WzhbqAP8AVgB1AGFAXbamijLUwJae2yhY6+iJXAjcByYA/QExgBPAVs\nBeqBW0NllwG/JDgw4BPA+tYuWjgpx9LswF4Bpke2/SqwoJV6TQ/O59jwezjQBHw2U1nA4eFY+gOl\nYd1fgXeAk4E7gMcieccAdUD/8Hsj8KlI+h/DhZkDfArYAEgk/d3ERfUlf0vQ6Qas8VkFfDKsn0mk\nUQW+Eq7XNuCHSZqcCfw/4NGgpzexBm1T2P69pGs/AvgdsB1zdF+LaPs+YEdk2y9H9nsT6R3YQuBf\nw//9McewLI0tvBL53Q/YFbGNV4G3gZPT2UIk/zzgamAGUAFsaIMt3EZwcO21hXwMIS8GPgsMwi7q\nc5izujCcnC8DZwFPAGOzLVRV52EXPsHRodwEy8K6VBwNLFLVJaGszUANcGIWZR0NvK2q1apaE9at\nwC68JudV1bXYRT5cRAYDByTSRWQUsD/Wg0uUvVzD1criOJwOQETGAt8CTlDV/sCnMQeRvN1RwK+A\nS7HrOBDrlUSZijmQF7FezBysAb8bM9D7Its+AazHHNlFwB0iMjloe1fSfn+N2coIYCjWI2qNwzE7\nQ1WrMUd6WCvbJut1J+ZMR4dVR2FOJ6HJlLYQKW8ZZkefxXpjVZGyW7WFVspusy3kw4Hdq6rvqeou\n4ASgTFVvw3o9q4HfANOwCzkgh/2UYl3hBFVAaStj/xbbikg51k3dlEVZe9NEpEhElgJXAptV9bUU\neRP5+4c0Iun3AP8J9G1lv9G8Tv5oxK7/USLSS1Urg7ElcxHwnKrOV9U64GaaDTvBXzEtv4v1xsqw\nHtkBmMMqF5FBInIgcBpwo6ruVtWlwANYDy/Vfueo6jxV3YP1/JrSHE9Uo+WYQ+uTjS0EqoABQdsD\ngL8GbSfS9rGFpLyTgRuwIWl9ivRUthBNS1evtLaQDwf2XuT/g4ERIrIDmI21KN/HnFmu1NDSAQ4E\napI8+D7bikgpNqTdjA1rM5W1N01VG1V1IjALGCwi41LkTeSvDmlg4jgPm+fYgs1XpNpvNK+TJ1T1\nLeBabAi4RUSeEJERKTYdQUTPqlqLDemibI78vwubP9LIbzDjHAFsT+q9rGPfHl2q/e5Msd8oNZjG\nEtr+OVnYQoSBQFXQdhUwKWg7kbaPLUSYEPIuDsfbK0XZLWwhRVq6eqW1hXw4sOhJew94R1UHAWcD\nL6tqf1U9F+sSV0W23UlzzwQRKcJas9aowE5egglhXavbikgv7AInWsqKaHorZVUAh4hItCU4EliC\nTdy2yCsiY7A5vtWq+iHWy5uAtb6fw1rdA7BW6zxgfFJLOT7NcTgdhKo+pqqnY42sAj9OsdkmIkM3\nEemDDeeS2QAcGPk9KqyLshEYkqSjg1Jsl9jv3vJEpG8r+01Qgc0PP4X1/raTwRYiZffD5qqiet+A\naRsy28IE4EARqQQmAodJeDQqjS1E80bLbrstdPDEaCWRiUbszsQSbMK0FJsc/DRwCvtO4g/Eeiaf\nxbz4LdgdjWh55TRP4l+N3fUYSea7kGVY9/Rl4N+Bu4hM+GcqC1gQ6jocm8vbEdadh43Rq4AzsHmx\n5DsvdwJ/oeWdlxtoeRfy2/hdyIIt2Nzr5HDOi7G7gLND2syIJhN3mE8N2/0YGyJFJ/EfwW5WvQ1c\nFmwgMXfTE3OOo8L2fwX+Axtqjcd6b4my7iFM4oe8NcDpYb8/TbaFpOO5OmjyN22whS+EetwFLAIG\nhfT/g9nhV9LYwk9D3oQtlIW0K8L5aYstJN+FbJMtdLQwKpNPMtYdfjxUthrrZm7E7qzsFUvkBGzC\nhlnX0/KOz+MhrR6bc7gynPztYbmLlncwKoBLI7+vDWJqCuJYCZwb0iRDWeXYRG0TsBu7A3lzJP0S\nbA5kJ/AsMCSSVoIZSBUm2BnYHdc5IX0SkOh+LwEmdbaBd/cFcx6vBz1uxxqTESEtlSbfpfku5Abg\njORtgXODLuuBm8K6ZAc2KuxrO7CW4BiCtquDvhLavjyy30x3IU8P+2kIy+aEtluxhbOxif5dWKM+\nBfg7dvNhBTA/gy28HPKuomUH4xNBw1nbQtJxtNkWJGR0HCcDYY5pB3CYqr7T2fVxYvwkvuMUAhGZ\nKiJ9w1zRT4F/kOKRC6dzcAfmOOk5H5vy2Ig9WzVNfdjSZfAhpOM4scV7YI7jxBZ3YI7jxJZOcWAi\nMt3zO92NztZF3PO3h87qgeV6oB/3/E7XpLN1Eff8bcaHkI7jxJa83IUc1kO0PI1r3NoEZWnS6xpb\nTwN7NDndi2HFpWkSga31UJb8ymmUXWnSyFz/jRnqX0vkpc8UbIIPVDXde6BOJzCsj2h5mvgpW3dB\nWZ80BdSmSSMLXaYTTTb7T5cGbK2BsjS2s3Zd+vx7sEftW2NHHnTdsyMLS1DeAxZlcCLpqEwOqtHW\n/R+bW/4WEYvawa051n+mvQPmdDHKB8CiaTkUsDTHCuQabHxC5k3SceGVueX/nzzo2oeQjuPEFndg\njuPElqwcmIhMEZFVIvKWiHw335VynELguo4/GR1YCCz4S+AzWLzsi0PMbseJLa7r7kE2PbATgbdU\n9W21uOBPYC+4Ok6ccV13A7JxYCNpGed+PanjeDtOnHBddwM6bBJfRKaLyCIRWbQ13fdTHCdGtNB1\nhucDncKTjQPL5oMFqOr9qnq8qh6f7iFPx+kitF3XGR4EdQpPNq5mIfalkdEiUox90/F3+a2W4+Qd\n13U3IOOT+KraICLfAl7AvjL0oKr6Z7+cWOO67h5k9SqRqj4PPJ/nujhOQXFdxx+frXIcJ7a4A3Mc\nJ7a4A3McJ7bkJZwOQ7F7Ou3l3tx2/8y83PJfMDC3/LecmVv+mTnW38kTfcg5JE1OzM4x/925ZX8h\nx93nA++BOY4TW9yBOY4TW9yBOY4TW7IJp/OgiGwRkRWFqJDjFArXdvzJpgc2C5iS53o4TmcwC9d2\nrMnowFR1HrC9AHVxnILi2o4/PgfmOE5syU88MI+b5HQTWui6prNr4yTTYQ7M4yY53ZEWus7hW6dO\nfvAhpOM4sSWbxygeB14FxorIehHJ8fu8jtM1cG3Hn2wCGl5ciIo4TqFxbccfH0I6jhNb3IE5jhNb\n3IE5jhNb8hIPrG4LVOYQ02tpjvvPNZ5XzjGfLs8xv8cD65qsB2bkkD9XXeSoy2e64S0K74E5jhNb\n3IE5jhNb3IE5jhNbsnmQ9UARmSsiK0WkQkS+XYiKOU6+cW3Hn2wm8RuA61R1iYj0BxaLyJ9UdWWe\n6+Y4+ca1HXOyiQe2SVWXhP+rgTeAkfmumOPkG9d2/GnTHJiIlAOTgNfyURnH6Sxc2/Ek6+fARKQU\neAq4VlWrUqRPB6YDjOiw6jlO/kmn7aiuD5JOqJyTlqx6YCLSC7vAj6rq06m2icZNGtqRNXScPJJJ\n2y3igfk9+y5HNnchBfgt8Iaq5vhtX8fpOri24082bcppwJeBySKyNCzn5rlejlMIXNsxJ5t4YPMB\nH/073Q7XdvzxUb3jOLHFHZjjOLHFHZjjOLElL/HAciXneF65xl3KNSDZ7BzzO12Sukao/Kj9+ctz\n1dWy3LJfcGZu+R/NMU7d53PLnhLvgTmOE1vcgTmOE1vcgTmOE1uyeRK/t4i8LiLLQsykWwtRMcfJ\nN67t+JPNJP4eYLKq1oT3xuaLyO9VdUGe6+Y4+ca1HXOyeRJfgZrws1dYNJ+VcpxC4NqOP9lGoygS\nkaXAFuBPquoxk5xugWs73mTlwFS1UVUnAqOAE0VkXPI2IjJdRBaJyKJtHV1Lx8kTmbTtuu7atOku\npKruAOYCU1KkeTwwJ7a0pm3Xddcmm7uQZSIyKPzfBzgHeLMjdn4PcG1HFOQ47aAVbR8jIg8Uui53\nvAtXrW49vfw1eLGhcPWJC9n0wA4A5orIcmAhNk8wJ7/Vyowq3LgLhlbZcuMuW5eKldvg+Cdg8H22\nnP0/ti7Bz/8Oh8yCAb+GEb+F76yFhkhZZy2DsldhwN9gwmJ49oOW5T+2BQ5+DfrNhwsqYHt9c9qe\nJvjqKsu7/6tw9/qWeZfWwHFLoO98+7u0BqdwpNL25ap6VaEr8v2D4IHD25e3TbawE45fAoNfseXs\n5bYuwc/XwyGvm15HLNjXFn6Ival3CfAd9v2AwDws/vY04EdAdctkEZEHRaRKRN4XkRlJiRNFZLGI\n1Ia/EzMdezZfJVquqpNUdbyqjlPV2zLlAfteVTJ/ASYDnwB+nU0hEb5aC/tVwbhwRu6vg2caYFkp\nLC+F5xrgvrrUeUeUws/PgAlDYf8+sGIbnP1Mc/rnRsPCL0HVN2DFpbCsBu7d0Jx+zxh46wQ4oi/U\nNsKFK2HGWkur2AlfXwMPj4XNp0DfHvDNt5rzzlwHa3bBuhNh7gS46z0Y8xqctwLqmuD8CrhsP/jw\nVLh8uP3GY1QVhPZqOxU5aXsV7PcqjFvUvn3fuQf+vQ6GAEOB2fVpbKEE/vtI+OAUWz43FL70Bpz4\nd2ucf7URvjAUqk6DFcftawtfBR4AHgO+gY2itgONwLeAXwDfBv4LKAHuT9o9cBhwMHAWcIOITAEQ\nkWLgWeARYDD2RvGzYX2rdOiT+CJSKSI3rgGOxpzYZuxAjwWuBM4D/gj8DohOii4ATkkqr7yqudt8\nRTH8oV9z2ux6uK4ERvWAkT3g+hKYVU9KBpXAoYPg7jPhH5fBvxwL7+9s7oWNGQRD+9j/qtBD4K1d\nzfknlMKAInhpPDx6BPQSeOFDWFAFj26BqUPgzEFQWgS3l8PTH0B1qPfszfDDg2BwLziyL4zv1+zc\nX95hrdu1I6GkB1wzcu89/P5ZnG4nB0TkRhHZICLVIrJKRD4Z1s8UkUci231FRNaJyLbNwOnA/JB2\nD/BNzGD/Oay7DzPeicCpWI8kwcY98LkVMOQVOPR1+M0mW3/FcPj8UFi/p3nbhzdbr37oK/Bv76Y/\nlqfqYUYJrBkAi/ubhn7digMb1BPG9IEise2KBNbuNm0vOw5WHg/zqkzbyr62MBp71iRBI/ABMAdz\nJgMx2+8DXIzZdST7UOB2Vf1QVd/A/NsVIe0T2GNd96jqHlW9F2vIJ6c79nxEo7j4YOAZ7ICuwiYW\nrgB+GtYfB0wF/gaUZVnomT2hsqn5d0UjTIi43wlFtq41DugHRz4MNfXQpDB2EGzYCUeFmdnHVsHV\nL0F1PQzrBT87pGX+qRXw4oewR+HsQfBBvZ3dilo4dUDzdmP6mDNavQsO6Q2b6swBggl0Ux0UJY6h\n1hyaRPpbE/rBe3vok+VpcdqBiIzFOgwnqOrG8Em1ohTbHQX8CpvYf70J9mxO2ubPwA1YL6QM0/tY\n4ERgGHAT8New7bQ3YFw/2HgyvFkL5/wDxvSGyYOt0Uuwcid8Yw08Pw5OGgDfeyc4t1asdU0T/DKk\n9Rc4ogcsTGMLAIP+BjWN0ATcdrA1vgD1Cpvr4JPLobYptS38K7AcqMe+QzcIWAz0xcpLcECo8kZg\nuK3qRcuYGstoDlJxNLA8PJsXTT8a+ENrx5GPdyHvLQZ6h71vB67BeluHYGPjOcD+ND9B2B5qgIER\nwx8Q1rU29gfYcTV8dDXcejJ8sBtOGt6cdslYG0Ku/gpcfQAMT+q4zhkHO06F0SUw7yP41GATV00j\nDEyS/oAiqG60NICBQVzXrrWya8NVrmlsTovmJYUxOR1KIzbCOUpEeqlqpaquTbHdRcBzqjpfVeuG\ns+/Y/gTMUEcA52J6Px/YijXS64EqzIj/VgU/Hg29e8DEUrhqf3hoy747ffIDOG+o9epLelivvkea\nSYWoLVQ2wepG6/WktYXT4KPT4D8OhUml0KgwcbENZb9UBjtPh9UnpLaFH2BDyB8AE4BZ2LxYPfsK\nty9Wl93Nq6IBiapoHm2UJqUlp6dENN1RthERqQS+hh3fOmwsewgmGAmLYq9wvI855t3AO6GiozHn\nnuAYoJLmucBibAy9GXtuZzVQG9L6Yo3f3zNUswfm1XsC/yD1dN1gbEohlagJ+RV4G/uSc02oU4JJ\nwKpwnBMxX94P62FXAwcBO7ELNACIzJpxKNBTVUszHIeTAyJyCTYCPBp4AZgRemMzgUNV9TIR+U+g\nSlVvCHm2YtfwHew6jsAc4Q7sOjZi2tmAGeS72IBjOabdQ2nZAynDOjBrgAOxTtvfMX00Yf4vwfjI\nfpOZiNlCv1DG9lC3TLYQzb8i1OVDYEyo+24y28KRmO96CxiH2UVFJD3ZFoar6hYAEbkIuEVVjxGR\n7wDnqOrej6qIyBxgrqr+rNWaq2qHLZizOTvy+xRgTeT/F8L/i4DvAS8Cj4R1JwDbI3mLMCOPllce\nTvQi4BXga5G0K4EFGerXCxPre1jDMKmV7S4DlqUppwr4PXA9cAf2TcFE2higDugffm/ERtE/wgS5\nAxNnLTYCWU9oSML27wJTOvK6+JJWEwOAx4GHw++ZEU3eAjwW2bZPuLZnR7dNaDsYamXQ9vewRlKx\nxvZAzMH1j5R3BzAr/H8P0BDZ7xOR7fpG95viGF4BrsZ6MDOysYVI3p4JWwAWhXU3A9dnaQvvBE1X\nhuNriJy/lLYQyXt74jiBT7XHFvIdTud1oFpEbsR6O4eJyKexlmIadsETrAZ6i8hnw4u1P8Bat9Z4\nCJghIiNFZCRwHdab3QcROUdEJgEPYi1JMdbSvBHSrxKR/cL/R2Hi+3P4fYSIfEZEDhSRYSJyGda6\nDseeh3sUmCoiZ4hIP+yiPK2qiZbyoXAsd2GObDfWQr8EfAa76NeISImIXIMJ/qW0Z9XJCREZKyKT\nRaQEux67aDl9k+BJ7NqeGu6GzST1HeKF2MggMb89DbtPtRdVfQ9zND8KUTDGY47mEfblSeA8ETk9\n7Pc20k/3PATciTmL/yYLWwivUA0A7sYc38aQ/g1sNPxmGlvoIyK9gi2MxF6IL8ccjgL3pbMFERks\nIkdio7VEPV+mPbbQwa1ZJUmtBNaVfRwbMlZjYqnD5jdnErx12PYKYBP2Xtr10fJCGZuw7moddvHv\nwrrL28P/Ue9dAVwa/v8iNqRVrIVoxJzPuSH9v7Ah4M6wz58AvUPakdjjLjtD3lqsO3xzZF+XhIu3\nE7sVPCSSVoI5zqqwjxmY454T0idhc6C7gCW00iv0pUN1Op7QuAbtzAFGhLRUmnwXm8b9Idb4nJG8\nLWb0dUGfN4V1e3tg4feosK/tWEN6dUTb1WHb9UHbl0f2exMpbCtSx9ND3oS+Nye03YotvIlNe2wF\n/hf4AjbcrMUa9poMtlCN9boWAp+P7GdV0HDWtpB0HG22hc4S0HTPX/jz7ktuC9bzbgBG5+O6ftzz\nt2fp0El8x+luiMhUbAglwM+Ak4Bj1Q2nS+AhpR0nPedj80MbsXmuae68ug7eA3McJ7Z4D8xxnNji\nDsxxnNjSKQ5MRKZ7fqe70dm6iHv+9tBZPbBcD/Tjnt/pmnS2LuKev834ENJxnNiSl7uQQ0R0VJr0\nbVhgoNYozhCLYWsTlKVzvRmC0Wyth7Je6bfJJX9dhjAbmY7/H/CBqmYbacgpEMN6iJan0V1X12Wu\n+ddm0PUe0r/7tyMPus5HPDBGkfQiWBspzzUOw4Qc8+dI5bzM26RjtL325HQxynvAoly02cm6zJUL\nc9T1/+RB1z6EdBwntrgDcxwntmT7Ze4pIW74WyLy3XxXynEKges6/mTzXcgi4JdY7KqjgItDnCDH\niS2u6+5BNj2wE4G3VPVtVa0DnsBecHWcOOO67gZk48BGYiGYE6wP6xwnzriuuwEd9hhFeI1gOlgI\nVsfpDkR1fZB/brjLkU0PbAP2QYIEo8K6Fqjq/ap6vKoen+4hTcfpIrRZ12kfUnU6hWwuyULsYxyj\nwwcG9vlggePEENd1NyDjEFJVG0TkW9hno4qAB1W1IkM2x+nSuK67B1nNganq88Dzea6L4xQU13X8\n8VG94zixxR2Y4zixxR2Y4zixJS/hdIoPhvKbcyhgdodVpV08k2PYkAvOzLECOe7fcVLylztzyr6s\nC74u6j0wx3Fiizswx3Fiizswx3FiSzbhdB4UkS0isqIQFXKcQuHajj/Z9MBmAVPyXA/H6Qxm4dqO\nNRkdmKrOA7YXoC6OU1Bc2/HH58Acx4ktHebARGS6iCwSkUVbM3w/znHiQgtdN3V2bZxkOsyBtYib\nlOt3HR2ni+DxwLo2fkkcx4kt2TxG8TjwKjBWRNaLyJX5r5bj5B/XdvzJJqDhxYWoiOMUGtd2/PEh\npOM4scUdmOM4scUdmOM4sSUv8cB2rINncpgOvWBgx9WlXfvPNZ7Xsg6phtPV6ANMyCF/rrrIZd8A\n/9T14nnlivfAHMeJLe7AHMeJLe7AHMeJLdk8yHqgiMwVkZUiUiEi3y5ExRwn37i24082k/gNwHWq\nukRE+gOLReRPqroyz3VznHzj2o452cQD26SqS8L/1cAbwMh8V8xx8o1rO/60aQ5MRMqBScBr+aiM\n43QWru14krUDE5FS4CngWlWtSpG+N27SPomO04VJp+0W8cDqO6d+Tutk5cBEpBd2gR9V1adTbRON\nmzSgI2voOHkkk7ZbxAPrVfj6OenJ5i6kAL8F3lDVu/NfJccpDK7t+JNND+w04MvAZBFZGpZz81wv\nxykEru2Yk008sPmAFKAujlNQXNvxx5/EdxwntrgDcxwntrgDcxwntuQlHtigIrigMz+tlmvcpFzj\nNl2eY/57c8zvOCmonJdb/rXX5JZf8qBr74E5jhNb3IE5jhNb3IE5jhNbsnkSv7eIvC4iy0LMpFsL\nUTHHyTeu7fiTzST+HmCyqtaE98bmi8jvVXVBnuvmOPnGtR1zsnkSX4Ga8LNXWDSflXKcQuDajj/Z\nRqMoEpGlwBbgT6rqMZOcboFrO95k5cBUtVFVJwKjgBNFZFzyNi3iJjV1dDUdJz9k0rbHA+vatOku\npKruAOYCU1KkNcdN8nubTsxoTdseD6xrk81dyDIRGRT+7wOcA7zZETufuRsuq+2Ikhyn7bSi7WNE\n5IFC1+WO3XBVGlsor4IXPyxcfeJCNn2lA4C5IrIcWIjNE8zJb7Uyowo37oKhVbbcuMvWpaJyN8g8\nKJ3fvNy+Lqmst2HoK7Ykl1XZBGfVQN+P4IhqeLGhZfmP1cHBVdDvI7hgJ2zf3Zy2pwG++iIM+DXs\n/wDcvaRl3qVb4bjHoe+v7O/SrbmdF6dNpNL25ap6VaEr8v3e8EDf9uXdR79vt24L64HRwNGRJfqG\njwJ3Yh8HmBT+jxZVWQVnPWV6PeJhePHdluU/tgoO/i/o9yu4YE5LW8BiSD4oIlUi8r6IzEhKnCgi\ni0WkNvydmOnYs/kq0XJVnaSq41V1nKrelikPQEOKE/iHehhbDYdWw527901Px1drYb8qGFdtv++v\ng2caYFkpLC+F5xrgvrr0ZRxXCgf1hoN7w4DI/df7N8Ez22DZcbD8uH3LurgWjimCo3rAboVP74Tr\nd1laRSN8fRc83Bc2D4C+At+c25x35muwZges+2eYeyHctQTGzILzfgd1jXD+HLjsCPhwOlx+pP3G\nY1QVhPZqOxV/2A5jF8Khr8Od72bePkqyttvKne/Bv2+EIT1haE+YvRnu25Q+zzKgIixfB84HPgOc\nDPw38Dzwe+DPwGORfBf/ASaVwbbp8G+nwEXPw9ZaaGyCIx6Cr/wRHv4UbL4K+vZsaQvACOAw4GDg\nLOAGEZkCICLFwLPAI8BgYDbwbFjfKh06WyUilSJy48pG6FdlTmxjE3xhJ5RVwdRa+FIvWFkKj9dD\ndLL/5QYYlfQ1kPKq5t7OFcXwh37NabPr4boSGNUDRvaA60tgVoZJ1p8cAiuPhwUT4ZcbYeXOUNYW\nuG4UjCqBkSUty1rdCEsa4V9L4OVSqBwApxbBU/WwoAEerYepveDMnlAqcHsJPL0WqoMDnP0m/PAE\nGNwbjhwC44c2O/eX10NDE1w7EUp6wjUT97ac/dtx+p02ICI3isgGEakWkVUi8smwfqaIPBLZ7isi\nsk5Etm2qg/LXmodyMyvhiyvh0jfg3BW27pmj4RcbrCd04AL44/bmfW5sgs/thCFV1oj/JmjkimL4\nfE9YH7GHh0OvfmgV/FuGxv6pD2DGSFhzIiw+1npMv87gwKIUY07q99g35UqB94H9ga8BT4btVn8I\nS7bArSdDn57whUNh/DB4ai38YikU9YDhfeHMkVBaDLef3NIWgKHA7ar6oaq+AdwPXBHSPoE91nWP\nqu5R1XuxhnxyurrnY7r94kN7wI4BVvjUnTChCJ7qAycXwSN1MLcBpvWCN9twt/LMnjAk0i+paIQJ\nkdpPKLJ16fj8Shi1AK5ZC4f0hg3hxFbshAkR5xgtq6IJDukBA3qYgwIY1wNq1M5uRaNtn2BMEZQU\nweod8OHc3f42AAALp0lEQVRu2LQTJpRZ2vpq+10UyqnYbgKQyHFNGAZAn+zOitMeRGQs8C3gBFXt\nD3waqEyx3VHAr4BLgQMatVkzCZ7bBsf3h08OgpP6w9QVcEw/+M5IuPlg+Pqa5m2n1VqDu7E/PNkX\nvr8bXmowbfeJaGBlI3wj9Oo39odtCuvTPJ22ZhdMHWr/9+8JR/S1dek4HTgF+BfgQyAh/zXYw3AJ\njgzrwPR6yEDoH+kTTRgGr70P/1sJg4phQCRtzKCWthCKjsZ6WYaNYgl/l4dn81KlpyQfDuzeYrEL\nsrARtirc3NseshnbA75WDE/U24WszuGRwRpgYOSiDwjrUo39h/WChZNg3UnWQr1fB3/ZYYIDqGmE\ngT1Tl1WjzftpVJhYDb+th/17wEk9Q3rS/gYUW6tTE3pxA8NFvXYeXD0eakOvsqa+OW1v3hIAinDy\nSSNQAhwlIr1UtVJV16bY7iLgOVWdr6p1I4r3HdufMRAOLLFpiS+WwdZ6uHQ/eL8eppVB5R7Y0QDv\n7Ya/NcKPe0NvgYlFcFUxPJRi2uPJejgv9OpLBG7vnd5Qo/qt3A2ra2FXU2pbGIyN0+YDv8N0fm04\nIeeG3ydg819gvbGdWK+upi6FXoth7nq463TY1Qg9Zd/0qC0AH0WSq2gebZQmpSWnp0S0tdm+diAi\nlViv8zFgHXa+DsHOj4RFsVc43geGA7uBd0JFRwPLI0Ueg7WMidmBYmwMvRl7bmc1kLh30xcYC/w9\nQzV7YF69OGzbBExMU9YgrGddESljLGYAq0NaTahTgknAqnCcE7GWpB/m66qBgzBdVGH+8q1I3kOB\nnqramRHVuj0icgnwTUwLLwAzVHWjiMwEDlXVy0TkP4EqVb0h5NmKXcN3sOs4AtPBDuw6NmKa34AZ\n5LvAcZimi7FrG+2BlGH6WgMcCAzDNHcQpsv1kW3HR/abTEK//UIZ20PdMtkC2LBtQth2SKjHLszu\ndpPZFg4Nx7YSGIfZdzQ92RaGq+oWABG5CLhFVY8Rke8A56jq3o+qiMgcYK6q/qzV2qtqhy3hoM+O\n/D4FWBP5/4Xw/yLge8CLwCNh3QnA9kjeIszIo+WVAytC/leAr0XSrgQWZKhfL0ysG8KJHhjWt1oW\ncHi4kP0j6dVYA3Y9cAf2TcFE2higLrE9sBG7Pf8jTJA7MHHWYnOk6wkNSdj+XWBKR14XX9JqYgDw\nOPBw+D0zoslbgMci2/YJ1/bs6LYJbWOGWhm0/T3MOSjW2B6IObioju4AZoX/7wEaIvt9IrJd3+h+\nUxzDK8DVWA9mRja2EMk7PGELEbt6Frg+S1t4DxuFVobja4icv5S2EMl7e+I4gU+1xxby/cjp60C1\niNwI/AM4TEQ+jbUU07ALnmA10FtEPhterP0B1rq1xkPADBEZKSIjgeuAWak2FJGTwrzHb7FWrA/w\nsqp+lKksVV0NLAXuFJHhInJhyL8f9jzco8BUETlDRPphF+VpVa2OlP0D4C7Mke3GHOhL2I2fRuAa\nESkRkWswMb2U5ridHBGRsSIyWURKsOuxC2tUknkSu7anhrthM0l9h3ghNjJITB9Nwxq4vajqe5hz\n+FGIgjEecw6PsC9PAueJyOlhv7eRfhT5EPbEQx12EzGjLYhIDxEZij1F8bfIcT2O6XRLGlu4JRzD\nhdjI6XBVLcccjgL3pbMFERksIkdio7VEPV+mPbbQwa1ZJUmtBNaVfRwbMlZjYqkDbiLS2oVtrwA2\nYVNm10fLC2VsAupD/isxp7A9LHfR0ntXAJeG/y/GvL+G/A1YT+7ckC4ZyirHRNqECX4PcHMk/ZJw\n8XZirdeQSFoJ8CA2XNyMtZCrgDkhfRKwOJyXJcCkzu6VdPcFG469HvS4HZgDjAhpqTT5LrAN+CHW\n+JyRvC02hVQX9HVTWLe3BxZ+jwr72g6sBa6OaLs6bLs+aPvyyH5vSmVbkTqeHvJq0PbmhLZbsYV3\nglY3YU7lLGyIWBvsYn4GW3g56HUVLUdIq4KGs7aFpONosy10loCme/7Cn3dfcluwea0GYHQ+ruvH\nPX97lg6dxHec7oaITMXmKgX4GXAScKy64XQJ/LVrx0nP+dj0w0ZsnmuaO6+ug/fAHMeJLd4Dcxwn\ntnSKAxOR6Z7f6W50ti7inr89dFYPLNcD/bjnd7omna2LuOdvMz6EdBwntuRlEn+AiO6XJj3xAmBr\nDMrwKvPWJkgXtrouQ1SKbVhcj9YoznH/HJ4h/4dQNrj19MVv8IGqlqUvxSk0w3qJlvduPX1rPaQN\nO50hYOHWXVCWQxySjPm3ZcifQdcbM9hVLekPcRMdr+tsvgvZZvYDfppD/gtyfI25Mvmd9jZSnutr\n1A/nll2OZ13mrZxCU94bFh2bQwEZ44vmmdm5Zb81R7uaScfr2oeQjuPEFndgjuPEFndgjuPElmy/\nzD0lxA1/S0S+m+9KOU4hcF3Hn2y+C1kE/BKLXXUUcHGIFe44scV13T3Ipgd2IvCWqr6tqnXAE9gL\nro4TZ1zX3YBsHNhILGxsgvVhXQtEZLqILBKRRVXJiY7T9Wizrrdm+GyfU3g6bBJfVe9X1eNV9fh0\nD6k6TpyI6jrtQ6pOp5CNA9uAfZAgwaiwznHijOu6G5CNA1uIfYxjdPjAwD4fLHCcGOK67gZkfJVI\nVRtE5FvYZ6OKgAdVtSJDNsfp0riuuwdZvQupqs8Dz+e5Lo5TUFzX8cefxHccJ7a4A3McJ7bkJZxO\nX3KMHPLn3PZfPiO3/PzlrNzy/9PcHCvgdEuW5pg/x3A8uYaZmpBb9rzgPTDHcWKLOzDHcWKLOzDH\ncWKLOzDHcWJLNuF0HhSRLSKyohAVcpxC4dqOP9n0wGYBU/JcD8fpDGbh2o41GR2Yqs4DthegLo5T\nUFzb8afD5sCicZMyfH7OcWKDxwPr2uQlHli6j8Y6TpzweGBdG78L6ThObHEH5jhObMnmMYrHgVeB\nsSKyXkSuzH+1HCf/uLbjTzYBDS8uREUcp9C4tuOPDyEdx4kt7sAcx4kteYkH1unkGDfJ43k5KdkF\nLMshf64BtXKNJ9YN8R6Y4zixxR2Y4zixxR2Y4zixxR2Y4zixJZsHWQ8UkbkislJEKkTk24WomOPk\nG9d2/MnmLmQDcJ2qLhGR/sBiEfmTqq7Mc90cJ9+4tmNONvHANqnqkvB/NfAGMDLfFXOcfOPajj9t\neg5MRMqBScBrKdKmA9MBRnRAxRynkLSm7aiuD5KCV8vJQNaT+CJSCjwFXKuqVcnpHg/MiSvptN0i\nHpjf8upyZHVJRKQXdoEfVdWn81slxykcru14k81dSAF+C7yhqnfnv0qOUxhc2/Enmx7YacCXgcki\nsjQs5+a5Xo5TCFzbMSebeGDzAZ++dLodru3449OSjuPEFndgjuPElrzEAysugvLS9ud/5vjc9n/B\nwNzyc3mO+WfnmN/pmvQht5heucQSI8d9A+U52kV5rvHM5uWYPwXeA3McJ7a4A3McJ7a4A3McJ7a4\nA3McJ7Zk8yR+bxF5XUSWhZhJtxaiYo6Tb1zb8Sebu5B7gMmqWhPeG5svIr9X1QV5rpvj5BvXdszJ\n5kl8BWrCz15h0XxWynEKgWs7/mQbjaJIRJYCW4A/qWrKeGAiskhEFm1t6uhqOk5+yKTtFrqu75w6\nOq2TlQNT1UZVnQiMAk4UkXEptvG4SU7syKTtFrru1Tl1dFqnTa5GVXcAc4Ep+amO43QOru14ks1d\nyDIRGRT+7wOcA7yZ74o5Tr5xbcefbO5CHgDMFpEizOH9X1Wdk99qOU5BcG3HnGzuQi7HPnbgON0K\n13b88el2x3Fiizswx3Fii9izfB1cqMhWYF2aTYYBH+Swi+6e/2BVLcuhfCcPuK5zzt/hus6LA8u4\nU5FFqtrusIUf9/xO16SzdRH3/O3Bh5CO48QWd2CO48SWznJg93t+pxvS2bqIe/420ylzYI7jOB2B\nDyEdx4kt7sAcx4kt7sAcx4kt7sAcx4kt7sAcx4kt/x+dwqc11s2WUAAAAABJRU5ErkJggg==\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stdout", "output_type": "stream", "text": [ "\n", "\n", "Dropout 0.000000 Activation: relu \n", "array([[ 0.57746479, 0.56338028, 0.5915493 , 0.57746479, 0.53521127],\n", " [ 0.6056338 , 0.56338028, 0.54929577, 0.57746479, 0.5915493 ],\n", " [ 0.56338028, 0.54929577, 0.57746479, 0.5915493 , 0.61971831],\n", " [ 0.56338028, 0.57746479, 0.6056338 , 0.5915493 , 0.5915493 ]])\n", "\n", "Max: 0.619718\n", "Avg: 0.578169\n", "Col Avg: array([ 0.57746479, 0.56338028, 0.58098592, 0.58450704, 0.58450704])\n", "Row Avg: array([ 0.56901408, 0.57746479, 0.58028169, 0.58591549])\n", "\n", "\n", "Dropout 0.000000 Activation: sigmoid \n", "array([[ 0.6056338 , 0.5915493 , 0.6056338 , 0.61971831, 0.52112676],\n", " [ 0.61971831, 0.6056338 , 0.57746479, 0.61971831, 0.49295775],\n", " [ 0.57746479, 0.6056338 , 0.5915493 , 0.54929577, 0.49295775],\n", " [ 0.56338028, 0.5915493 , 0.57746479, 0.5915493 , 0.54929577]])\n", "\n", "Max: 0.619718\n", "Avg: 0.577465\n", "Col Avg: array([ 0.5915493 , 0.59859155, 0.58802817, 0.59507042, 0.51408451])\n", "Row Avg: array([ 0.58873239, 0.58309859, 0.56338028, 0.57464789])\n", "\n", "\n", "Dropout 0.333000 Activation: relu \n", "array([[ 0.56338028, 0.57746479, 0.5915493 , 0.5915493 , 0.56338028],\n", " [ 0.61971831, 0.5915493 , 0.5915493 , 0.54929577, 0.5915493 ],\n", " [ 0.54929577, 0.57746479, 0.57746479, 0.5915493 , 0.57746479],\n", " [ 0.57746479, 0.57746479, 0.5915493 , 0.57746479, 0.6056338 ]])\n", "\n", "Max: 0.619718\n", "Avg: 0.581690\n", "Col Avg: array([ 0.57746479, 0.58098592, 0.58802817, 0.57746479, 0.58450704])\n", "Row Avg: array([ 0.57746479, 0.58873239, 0.57464789, 0.58591549])\n", "\n", "\n", "Dropout 0.333000 Activation: sigmoid \n", "array([[ 0.57746479, 0.5915493 , 0.5915493 , 0.5915493 , 0.52112676],\n", " [ 0.57746479, 0.5915493 , 0.5915493 , 0.67605634, 0.50704225],\n", " [ 0.5915493 , 0.57746479, 0.5915493 , 0.5915493 , 0.50704225],\n", " [ 0.5915493 , 0.57746479, 0.5915493 , 0.56338028, 0.6056338 ]])\n", "\n", "Max: 0.676056\n", "Avg: 0.580282\n", "Col Avg: array([ 0.58450704, 0.58450704, 0.5915493 , 0.6056338 , 0.53521127])\n", "Row Avg: array([ 0.57464789, 0.58873239, 0.57183099, 0.58591549])\n", "\n", "\n", "Dropout 0.500000 Activation: relu \n", "array([[ 0.54929577, 0.57746479, 0.56338028, 0.57746479, 0.63380282],\n", " [ 0.56338028, 0.63380282, 0.5915493 , 0.66197183, 0.5915493 ],\n", " [ 0.56338028, 0.63380282, 0.6056338 , 0.5915493 , 0.5915493 ],\n", " [ 0.57746479, 0.54929577, 0.57746479, 0.6056338 , 0.57746479]])\n", "\n", "Max: 0.661972\n", "Avg: 0.590845\n", "Col Avg: array([ 0.56338028, 0.59859155, 0.58450704, 0.60915493, 0.59859155])\n", "Row Avg: array([ 0.58028169, 0.6084507 , 0.5971831 , 0.57746479])\n", "\n", "\n", "Dropout 0.500000 Activation: sigmoid \n", "array([[ 0.5915493 , 0.6056338 , 0.6056338 , 0.57746479, 0.53521127],\n", " [ 0.5915493 , 0.5915493 , 0.6056338 , 0.56338028, 0.54929577],\n", " [ 0.57746479, 0.5915493 , 0.5915493 , 0.56338028, 0.54929577],\n", " [ 0.5915493 , 0.57746479, 0.5915493 , 0.57746479, 0.5915493 ]])\n", "\n", "Max: 0.605634\n", "Avg: 0.580986\n", "Col Avg: array([ 0.58802817, 0.5915493 , 0.59859155, 0.57042254, 0.55633803])\n", "Row Avg: array([ 0.58309859, 0.58028169, 0.57464789, 0.58591549])\n", "\n" ] } ], "source": [ "# visualize evaluations in a grid to pick one\n", "\n", "f, subplots = plt.subplots(len(dropout_hp), len(activation_hp))\n", "\n", "outstr = \"\"\n", "\n", "rownum=0\n", "for dr in dropout_hp:\n", " colnum = 0\n", " for ac in activation_hp:\n", " matrix1 = []\n", " for hl in hidden_layer_hp:\n", " row = []\n", " for reg_penalty in reg_penalty_hp:\n", " try:\n", " row.append(fscores[(hl, reg_penalty, dr, ac)])\n", " except KeyError:\n", " row.append(0.00)\n", " matrix1.append(row)\n", " \n", " outstr += \"\\n\\nDropout %f Activation: %s \\n\" % (dr, ac)\n", " outstr += repr(np.array(matrix1))\n", " outstr += \"\\n\\nMax: %f\\n\" % (np.max(np.array(matrix1)))\n", " outstr += \"Avg: %f\\n\" % (np.average(np.array(matrix1)))\n", " outstr += \"Col Avg: %s\\n\" % repr(np.average(np.array(matrix1), axis=0))\n", " outstr += \"Row Avg: %s\\n\" % repr(np.average(np.array(matrix1), axis=1))\n", "\n", " subplots[rownum, colnum].matshow(matrix1, cmap='hot', vmin=0.5, vmax=0.7, interpolation='nearest')\n", " subplots[rownum, colnum].set_title('%s %f' % (ac, dr))\n", "\n", " colnum += 1 \n", " rownum += 1\n", "\n", "plt.show()\n", "\n", "print(outstr)\n" ] }, { "cell_type": "code", "execution_count": 18, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Loaded model from disk\n" ] } ], "source": [ "# pick this model, load from file\n", "ac='sigmoid'\n", "hl = 8\n", "dr = 0.333\n", "rp = 0.001\n", "modelname = \"model_%s_%d_%.3f_%.6f\" % (ac, hl, dr, rp)\n", "\n", "# doesn't work because of some custom metric BS\n", "#keras.models.load_model(\"%s.h5\" % modelname)\n", "\n", "with open(\"%s.json\" % modelname, 'r') as json_file:\n", " model_json = json_file.read()\n", "model = model_from_json(model_json)\n", "model.load_weights(\"%s_weights.h5\" % modelname)\n", "print(\"Loaded model from disk\")\n", "\n" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Final Xval Accuracy 0.676, Xval F1 0.623, f_score 0.676\n", "[[29 16]\n", " [ 7 19]]\n" ] } ], "source": [ "# re-evaluate in xval to confirm, pick threshold value\n", "y_xval_prob = model.predict(X_xval)\n", " \n", "thresh, score = selectThresholdAcc(y_xval_prob, y_xval)\n", "y_xval_pred = y_xval_prob >= thresh\n", " \n", "print(\"Final Xval Accuracy %.3f, Xval F1 %.3f, f_score %.3f\" % \n", " (sklearn.metrics.accuracy_score(y_xval_pred, y_xval), \n", " sklearn.metrics.f1_score(y_xval_pred, y_xval),\n", " score))\n", " \n", "confusion_matrix = sklearn.metrics.confusion_matrix(y_xval_pred, y_xval)\n", "print(confusion_matrix)\n", "false_positive = confusion_matrix[1][0]\n", "false_negative = confusion_matrix[0][1]\n", "true_positive = confusion_matrix[1][1]\n" ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Test Accuracy 0.563, Test F1 0.475\n", "[[26 20]\n", " [11 14]]\n" ] } ], "source": [ "# evaluate in test set\n", "# xval not a good measure of expected performance since we used it to pick threshold\n", "# also tested many times in xval and picked best model, which is probably model that's lucky in xval\n", "y_test_prob = model.predict(X_test)\n", "\n", "y_test_pred = y_test_prob >= thresh\n", "\n", "print(\"Test Accuracy %.3f, Test F1 %.3f\" % \n", " (sklearn.metrics.accuracy_score(y_test_pred, y_test), \n", " sklearn.metrics.f1_score(y_test_pred, y_test)))\n", "\n", "print(sklearn.metrics.confusion_matrix(y_test_pred, y_test))" ] }, { "cell_type": "code", "execution_count": 21, "metadata": { "collapsed": true }, "outputs": [], "source": [ "model.save(\"finalmodel.h5\")\n", "model.save_weights(\"modelweights.h5\")\n", "\n", "with open(\"finalmodel.json\", \"wb\") as fjson:\n", " fjson.write(model.to_json())" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "TensorFlow", "language": "python", "name": "tensorflow" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 2 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", "version": "2.7.13" } }, "nbformat": 4, "nbformat_minor": 2 }