{ "cells": [ { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "import pandas as pd\n", "import numpy as np\n", "\n", "from sklearn.datasets import load_iris\n", "from sklearn.pipeline import Pipeline\n", "from sklearn.preprocessing import StandardScaler\n", "from sklearn.svm import LinearSVC\n", "from sklearn.svm import SVC\n", "from sklearn.preprocessing import StandardScaler\n", "from sklearn.decomposition import PCA\n", "from sklearn.metrics import roc_curve,auc\n", "from sklearn.model_selection import StratifiedKFold\n", "from sklearn.pipeline import Pipeline\n", "from matplotlib import pyplot as plt\n", "\n", "from sklearn.model_selection import train_test_split\n", "from sklearn.linear_model import LogisticRegression\n", "from sklearn.metrics import accuracy_score\n", "from sklearn.metrics import precision_score\n", "from sklearn.metrics import recall_score\n", "from sklearn.metrics import roc_auc_score\n", "from sklearn.metrics import f1_score\n" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
agejobmaritaleducationdefaultbalancehousingloancontactdaymonthdurationcampaignpdayspreviouspoutcomey
058managementmarriedtertiaryno2143yesnounknown5may2611-10unknownno
144techniciansinglesecondaryno29yesnounknown5may1511-10unknownno
233entrepreneurmarriedsecondaryno2yesyesunknown5may761-10unknownno
347blue-collarmarriedunknownno1506yesnounknown5may921-10unknownno
433unknownsingleunknownno1nonounknown5may1981-10unknownno
535managementmarriedtertiaryno231yesnounknown5may1391-10unknownno
628managementsingletertiaryno447yesyesunknown5may2171-10unknownno
742entrepreneurdivorcedtertiaryyes2yesnounknown5may3801-10unknownno
858retiredmarriedprimaryno121yesnounknown5may501-10unknownno
943techniciansinglesecondaryno593yesnounknown5may551-10unknownno
\n", "
" ], "text/plain": [ " age job marital education default balance housing loan \\\n", "0 58 management married tertiary no 2143 yes no \n", "1 44 technician single secondary no 29 yes no \n", "2 33 entrepreneur married secondary no 2 yes yes \n", "3 47 blue-collar married unknown no 1506 yes no \n", "4 33 unknown single unknown no 1 no no \n", "5 35 management married tertiary no 231 yes no \n", "6 28 management single tertiary no 447 yes yes \n", "7 42 entrepreneur divorced tertiary yes 2 yes no \n", "8 58 retired married primary no 121 yes no \n", "9 43 technician single secondary no 593 yes no \n", "\n", " contact day month duration campaign pdays previous poutcome y \n", "0 unknown 5 may 261 1 -1 0 unknown no \n", "1 unknown 5 may 151 1 -1 0 unknown no \n", "2 unknown 5 may 76 1 -1 0 unknown no \n", "3 unknown 5 may 92 1 -1 0 unknown no \n", "4 unknown 5 may 198 1 -1 0 unknown no \n", "5 unknown 5 may 139 1 -1 0 unknown no \n", "6 unknown 5 may 217 1 -1 0 unknown no \n", "7 unknown 5 may 380 1 -1 0 unknown no \n", "8 unknown 5 may 50 1 -1 0 unknown no \n", "9 unknown 5 may 55 1 -1 0 unknown no " ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df=pd.read_csv('bank-full.csv',sep=';')\n", "df.head(10)" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "# df.columns: returns all the column names" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "# Data cleaning\n", "\n", "# s stores all value_counts\n", "# value_counts() counts unique objects\n", "s=[]\n", "for i in df.columns:\n", " s.append(df[str(i)].value_counts())\n" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "# Transform string to number \n", "\n", "# s[1].index is an object and should be transformed to list\n", "s_index1=list(s[1].index)\n", "# Find the object that equals to one in s[].index in every column, then replace the value with the index of it in s list.\n", "for i in s[1].index:\n", " df.loc[df[str(df.columns[1])]==str(i),str(df.columns[1])]=s_index1.index(str(i))\n", " \n", "s_index2=list(s[2].index)\n", "for i in s[2].index:\n", " df.loc[df[str(df.columns[2])]==str(i),str(df.columns[2])]=s_index2.index(str(i))\n", " \n", "s_index3=list(s[3].index)\n", "for i in s[3].index:\n", " df.loc[df[str(df.columns[3])]==str(i),str(df.columns[3])]=s_index3.index(str(i))\n", " \n", "s_index4=list(s[4].index)\n", "for i in s[4].index:\n", " df.loc[df[str(df.columns[4])]==str(i),str(df.columns[4])]=s_index4.index(str(i))\n", " \n", "s_index6=list(s[6].index)\n", "for i in s[6].index:\n", " df.loc[df[str(df.columns[6])]==str(i),str(df.columns[6])]=s_index6.index(str(i))\n", " \n", "s_index7=list(s[7].index)\n", "for i in s[7].index:\n", " df.loc[df[str(df.columns[7])]==str(i),str(df.columns[7])]=s_index7.index(str(i))\n", " \n", "s_index8=list(s[8].index)\n", "for i in s[8].index:\n", " df.loc[df[str(df.columns[8])]==str(i),str(df.columns[8])]=s_index8.index(str(i))\n", " \n", "s_index10=list(s[10].index)\n", "for i in s[10].index:\n", " df.loc[df[str(df.columns[10])]==str(i),str(df.columns[10])]=s_index10.index(str(i))\n", " \n", "s_index15=list(s[15].index)\n", "for i in s[15].index:\n", " df.loc[df[str(df.columns[15])]==str(i),str(df.columns[15])]=s_index15.index(str(i))\n", " \n", "s_index16=list(s[16].index)\n", "for i in s[16].index:\n", " df.loc[df[str(df.columns[16])]==str(i),str(df.columns[16])]=s_index16.index(str(i))\n" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
agejobmaritaleducationdefaultbalancehousingloancontactdaymonthdurationcampaignpdayspreviouspoutcomey
00.151331-0.238980-1.0688500.372229-1.0183350.000084-1.799045-1.1907711.570163-0.156019-0.3905170.000043-0.183777-0.004109-0.109374-0.594561-1.132458
10.027171-0.0984881.013713-0.959482-1.018335-0.000144-1.799045-1.1907711.570163-0.156019-0.390517-0.001616-0.183777-0.004109-0.109374-0.594561-1.132458
2-0.0703830.603972-1.068850-0.959482-1.018335-0.000147-1.7990456.2410271.570163-0.156019-0.390517-0.002747-0.183777-0.004109-0.109374-0.594561-1.132458
30.053777-0.379472-1.0688503.035650-1.0183350.000016-1.799045-1.1907711.570163-0.156019-0.390517-0.002505-0.183777-0.004109-0.109374-0.594561-1.132458
4-0.0703831.1659401.0137133.035650-1.018335-0.0001472.251382-1.1907711.570163-0.156019-0.390517-0.000907-0.183777-0.004109-0.109374-0.594561-1.132458
5-0.052646-0.238980-1.0688500.372229-1.018335-0.000122-1.799045-1.1907711.570163-0.156019-0.390517-0.001797-0.183777-0.004109-0.109374-0.594561-1.132458
6-0.114725-0.2389801.0137130.372229-1.018335-0.000099-1.7990456.2410271.570163-0.156019-0.390517-0.000621-0.183777-0.004109-0.109374-0.594561-1.132458
70.0094340.6039723.0962760.37222955.472393-0.000147-1.799045-1.1907711.570163-0.156019-0.3905170.001837-0.183777-0.004109-0.109374-0.594561-1.132458
80.1513310.322988-1.0688501.703939-1.018335-0.000134-1.799045-1.1907711.570163-0.156019-0.390517-0.003139-0.183777-0.004109-0.109374-0.594561-1.132458
90.018303-0.0984881.013713-0.959482-1.018335-0.000083-1.799045-1.1907711.570163-0.156019-0.390517-0.003063-0.183777-0.004109-0.109374-0.594561-1.132458
100.0005660.0420043.096276-0.959482-1.018335-0.000118-1.799045-1.1907711.570163-0.156019-0.390517-0.000545-0.183777-0.004109-0.109374-0.594561-1.132458
11-0.1058570.0420041.013713-0.959482-1.018335-0.000105-1.799045-1.1907711.570163-0.156019-0.390517-0.001827-0.183777-0.004109-0.109374-0.594561-1.132458
120.106988-0.098488-1.068850-0.959482-1.018335-0.000146-1.799045-1.1907711.570163-0.156019-0.3905170.003903-0.183777-0.004109-0.109374-0.594561-1.132458
130.151331-0.098488-1.0688503.035650-1.018335-0.000139-1.799045-1.1907711.570163-0.156019-0.390517-0.002822-0.183777-0.004109-0.109374-0.594561-1.132458
140.1424620.182496-1.068850-0.959482-1.018335-0.000129-1.799045-1.1907711.570163-0.156019-0.390517-0.001269-0.183777-0.004109-0.109374-0.594561-1.132458
150.0892510.322988-1.0688501.703939-1.018335-0.000122-1.799045-1.1907711.570163-0.156019-0.3905170.001430-0.183777-0.004109-0.109374-0.594561-1.132458
160.0360400.0420041.0137133.035650-1.018335-0.000146-1.799045-1.1907711.570163-0.156019-0.390517-0.002415-0.183777-0.004109-0.109374-0.594561-1.132458
170.142462-0.379472-1.0688501.703939-1.018335-0.000141-1.799045-1.1907711.570163-0.156019-0.390517-0.003320-0.183777-0.004109-0.109374-0.594561-1.132458
180.1690680.322988-1.0688501.703939-1.018335-0.000140-1.799045-1.1907711.570163-0.156019-0.390517-0.000591-0.183777-0.004109-0.109374-0.594561-1.132458
19-0.0703830.182496-1.068850-0.959482-1.018335-0.000147-1.799045-1.1907711.570163-0.156019-0.390517-0.003078-0.183777-0.004109-0.109374-0.594561-1.132458
20-0.114725-0.379472-1.068850-0.959482-1.018335-0.000069-1.7990456.2410271.570163-0.156019-0.3905170.000058-0.183777-0.004109-0.109374-0.594561-1.132458
210.133594-0.238980-1.0688500.372229-1.018335-0.000063-1.799045-1.1907711.570163-0.156019-0.390517-0.001420-0.183777-0.004109-0.109374-0.594561-1.132458
22-0.079251-0.3794721.0137131.703939-1.018335-0.000144-1.7990456.2410271.570163-0.156019-0.390517-0.001480-0.183777-0.004109-0.109374-0.594561-1.132458
23-0.1413310.182496-1.068850-0.959482-1.018335-0.000142-1.799045-1.1907711.570163-0.156019-0.3905170.001264-0.183777-0.004109-0.109374-0.594561-1.132458
24-0.0083030.322988-1.0688501.703939-1.018335-0.000147-1.7990456.2410271.570163-0.156019-0.390517-0.001163-0.183777-0.004109-0.109374-0.594561-1.132458
250.0271710.042004-1.068850-0.959482-1.018335-0.000187-1.799045-1.1907711.570163-0.156019-0.390517-0.001299-0.183777-0.004109-0.109374-0.594561-1.132458
26-0.017171-0.2389801.0137130.372229-1.018335-0.000119-1.799045-1.1907711.570163-0.156019-0.3905170.000571-0.183777-0.004109-0.109374-0.594561-1.132458
270.0981200.603972-1.068850-0.959482-1.018335-0.000135-1.7990456.2410271.570163-0.156019-0.390517-0.001978-0.183777-0.004109-0.109374-0.594561-1.132458
280.044908-0.2389801.013713-0.959482-1.018335-0.000173-1.799045-1.1907711.570163-0.156019-0.390517-0.000048-0.079586-0.004109-0.109374-0.594561-1.132458
29-0.043777-0.0984881.013713-0.959482-1.018335-0.000118-1.7990456.2410271.570163-0.156019-0.3905170.001355-0.183777-0.004109-0.109374-0.594561-1.132458
......................................................
451810.044908-0.379472-1.068850-0.959482-1.0183350.0005952.251382-1.190771-1.120941-0.0116430.252056-0.002777-0.0795860.0077600.4560401.455210-1.132458
45182-0.061514-0.098488-1.068850-0.959482-1.018335-0.0001332.251382-1.190771-1.120941-0.0116430.2520560.002154-0.0795860.0146420.8329835.5547518.547930
451830.2577530.322988-1.0688501.703939-1.018335-0.0001122.251382-1.190771-1.120941-0.0116430.252056-0.002717-0.1837770.0055661.2099265.554751-1.132458
451840.1956740.322988-1.068850-0.959482-1.0183350.0000142.251382-1.190771-1.1209410.0027950.252056-0.001812-0.183777-0.0018150.8329835.554751-1.132458
451850.1690680.182496-1.0688500.372229-1.0183350.000312-1.799045-1.190771-1.1209410.0027950.252056-0.000877-0.1837770.0051670.6445125.5547518.547930
451860.1601991.165940-1.0688503.035650-1.0183350.0000152.251382-1.190771-1.1209410.0027950.2520560.000329-0.1837770.0063640.2675691.455210-1.132458
45187-0.0792510.1824961.013713-0.959482-1.018335-0.000021-1.799045-1.190771-1.1209410.0027950.2520560.002305-0.183777-0.004109-0.109374-0.5945618.547930
45188-0.105857-0.2389801.013713-0.959482-1.018335-0.000071-1.799045-1.190771-1.1209410.0027950.252056-0.000334-0.1837770.0508490.2675695.5547518.547930
45189-0.1413310.1824961.013713-0.959482-1.018335-0.0001252.251382-1.190771-1.1209410.0027950.252056-0.001284-0.1837770.0051670.8329831.455210-1.132458
45190-0.079251-0.379472-1.068850-0.959482-1.018335-0.0001322.251382-1.190771-1.1209410.0027950.252056-0.000787-0.1837770.0147420.4560405.5547518.547930
451910.3020960.3229883.0962760.372229-1.0183350.000264-1.799045-1.190771-1.1209410.0027950.2520560.000058-0.1837770.0142440.0790971.4552108.547930
45192-0.105857-0.2389801.0137130.372229-1.018335-0.0000642.251382-1.190771-1.1209410.0027950.252056-0.000304-0.183777-0.004109-0.109374-0.5945618.547930
45193-0.1147250.4634801.0137130.372229-1.018335-0.0001302.251382-1.190771-1.1209410.0027950.2520560.002877-0.079586-0.0007180.6445125.5547518.547930
451940.160199-0.238980-1.0688500.372229-1.018335-0.000132-1.7990456.241027-1.1209410.0027950.252056-0.001450-0.0795860.0146420.8329831.455210-1.132458
451950.2400160.322988-1.068850-0.959482-1.018335-0.0000232.251382-1.190771-1.1209410.0027950.252056-0.000696-0.1837770.0146421.0214555.5547518.547930
45196-0.1413311.0254481.013713-0.959482-1.018335-0.0001082.251382-1.190771-1.1209410.0027950.2520560.001083-0.183777-0.004109-0.109374-0.5945618.547930
45197-0.043777-0.2389801.013713-0.959482-1.0183350.000016-1.799045-1.190771-1.1209410.0027950.2520560.000178-0.183777-0.004109-0.109374-0.5945618.547930
45198-0.034908-0.238980-1.0688500.372229-1.0183350.0000072.251382-1.190771-1.1209410.0027950.2520560.001128-0.079586-0.004109-0.109374-0.594561-1.132458
45199-0.061514-0.3794721.013713-0.959482-1.0183350.000012-1.799045-1.190771-1.1209410.0027950.2520560.0136890.0246060.0488542.1522843.504981-1.132458
45200-0.026040-0.098488-1.068850-0.959482-1.018335-0.000087-1.799045-1.190771-1.1209410.0027950.2520560.0195690.128797-0.004109-0.109374-0.5945618.547930
452010.106988-0.238980-1.0688500.372229-1.018335-0.0000842.251382-1.190771-1.1209410.0172320.252056-0.000485-0.1837770.0143430.6445125.5547518.547930
45202-0.0615140.0420041.013713-0.959482-1.018335-0.0000872.251382-1.190771-1.1209410.0172320.252056-0.000515-0.183777-0.004109-0.109374-0.5945618.547930
45203-0.1590681.0254481.0137130.372229-1.018335-0.0001352.251382-1.190771-1.1209410.0172320.2520560.000118-0.183777-0.004109-0.109374-0.5945618.547930
452040.2843590.322988-1.068850-0.959482-1.0183350.0001602.251382-1.190771-1.1209410.0172320.2520560.000631-0.183777-0.0000201.3983981.4552108.547930
45205-0.141331-0.0984881.013713-0.959482-1.018335-0.0000922.2513826.241027-1.1209410.0172320.2520560.001928-0.079586-0.004109-0.109374-0.5945618.547930
452060.089251-0.098488-1.0688500.372229-1.018335-0.0000582.251382-1.190771-1.1209410.0172320.2520560.0108390.024606-0.004109-0.109374-0.5945618.547930
452070.2666220.3229883.0962761.703939-1.0183350.0000402.251382-1.190771-1.1209410.0172320.2520560.002983-0.079586-0.004109-0.109374-0.5945618.547930
452080.2754910.322988-1.068850-0.959482-1.0183350.0004702.251382-1.190771-1.1209410.0172320.2520560.0131010.2329880.0143430.4560405.5547518.547930
452090.142462-0.379472-1.068850-0.959482-1.018335-0.0000752.251382-1.1907714.2612670.0172320.2520560.0037670.128797-0.004109-0.109374-0.594561-1.132458
45210-0.0349080.603972-1.068850-0.959482-1.0183350.0001742.251382-1.190771-1.1209410.0172320.2520560.001551-0.0795860.0147421.9638133.504981-1.132458
\n", "

45211 rows × 17 columns

\n", "
" ], "text/plain": [ " age job marital education default balance housing \\\n", "0 0.151331 -0.238980 -1.068850 0.372229 -1.018335 0.000084 -1.799045 \n", "1 0.027171 -0.098488 1.013713 -0.959482 -1.018335 -0.000144 -1.799045 \n", "2 -0.070383 0.603972 -1.068850 -0.959482 -1.018335 -0.000147 -1.799045 \n", "3 0.053777 -0.379472 -1.068850 3.035650 -1.018335 0.000016 -1.799045 \n", "4 -0.070383 1.165940 1.013713 3.035650 -1.018335 -0.000147 2.251382 \n", "5 -0.052646 -0.238980 -1.068850 0.372229 -1.018335 -0.000122 -1.799045 \n", "6 -0.114725 -0.238980 1.013713 0.372229 -1.018335 -0.000099 -1.799045 \n", "7 0.009434 0.603972 3.096276 0.372229 55.472393 -0.000147 -1.799045 \n", "8 0.151331 0.322988 -1.068850 1.703939 -1.018335 -0.000134 -1.799045 \n", "9 0.018303 -0.098488 1.013713 -0.959482 -1.018335 -0.000083 -1.799045 \n", "10 0.000566 0.042004 3.096276 -0.959482 -1.018335 -0.000118 -1.799045 \n", "11 -0.105857 0.042004 1.013713 -0.959482 -1.018335 -0.000105 -1.799045 \n", "12 0.106988 -0.098488 -1.068850 -0.959482 -1.018335 -0.000146 -1.799045 \n", "13 0.151331 -0.098488 -1.068850 3.035650 -1.018335 -0.000139 -1.799045 \n", "14 0.142462 0.182496 -1.068850 -0.959482 -1.018335 -0.000129 -1.799045 \n", "15 0.089251 0.322988 -1.068850 1.703939 -1.018335 -0.000122 -1.799045 \n", "16 0.036040 0.042004 1.013713 3.035650 -1.018335 -0.000146 -1.799045 \n", "17 0.142462 -0.379472 -1.068850 1.703939 -1.018335 -0.000141 -1.799045 \n", "18 0.169068 0.322988 -1.068850 1.703939 -1.018335 -0.000140 -1.799045 \n", "19 -0.070383 0.182496 -1.068850 -0.959482 -1.018335 -0.000147 -1.799045 \n", "20 -0.114725 -0.379472 -1.068850 -0.959482 -1.018335 -0.000069 -1.799045 \n", "21 0.133594 -0.238980 -1.068850 0.372229 -1.018335 -0.000063 -1.799045 \n", "22 -0.079251 -0.379472 1.013713 1.703939 -1.018335 -0.000144 -1.799045 \n", "23 -0.141331 0.182496 -1.068850 -0.959482 -1.018335 -0.000142 -1.799045 \n", "24 -0.008303 0.322988 -1.068850 1.703939 -1.018335 -0.000147 -1.799045 \n", "25 0.027171 0.042004 -1.068850 -0.959482 -1.018335 -0.000187 -1.799045 \n", "26 -0.017171 -0.238980 1.013713 0.372229 -1.018335 -0.000119 -1.799045 \n", "27 0.098120 0.603972 -1.068850 -0.959482 -1.018335 -0.000135 -1.799045 \n", "28 0.044908 -0.238980 1.013713 -0.959482 -1.018335 -0.000173 -1.799045 \n", "29 -0.043777 -0.098488 1.013713 -0.959482 -1.018335 -0.000118 -1.799045 \n", "... ... ... ... ... ... ... ... \n", "45181 0.044908 -0.379472 -1.068850 -0.959482 -1.018335 0.000595 2.251382 \n", "45182 -0.061514 -0.098488 -1.068850 -0.959482 -1.018335 -0.000133 2.251382 \n", "45183 0.257753 0.322988 -1.068850 1.703939 -1.018335 -0.000112 2.251382 \n", "45184 0.195674 0.322988 -1.068850 -0.959482 -1.018335 0.000014 2.251382 \n", "45185 0.169068 0.182496 -1.068850 0.372229 -1.018335 0.000312 -1.799045 \n", "45186 0.160199 1.165940 -1.068850 3.035650 -1.018335 0.000015 2.251382 \n", "45187 -0.079251 0.182496 1.013713 -0.959482 -1.018335 -0.000021 -1.799045 \n", "45188 -0.105857 -0.238980 1.013713 -0.959482 -1.018335 -0.000071 -1.799045 \n", "45189 -0.141331 0.182496 1.013713 -0.959482 -1.018335 -0.000125 2.251382 \n", "45190 -0.079251 -0.379472 -1.068850 -0.959482 -1.018335 -0.000132 2.251382 \n", "45191 0.302096 0.322988 3.096276 0.372229 -1.018335 0.000264 -1.799045 \n", "45192 -0.105857 -0.238980 1.013713 0.372229 -1.018335 -0.000064 2.251382 \n", "45193 -0.114725 0.463480 1.013713 0.372229 -1.018335 -0.000130 2.251382 \n", "45194 0.160199 -0.238980 -1.068850 0.372229 -1.018335 -0.000132 -1.799045 \n", "45195 0.240016 0.322988 -1.068850 -0.959482 -1.018335 -0.000023 2.251382 \n", "45196 -0.141331 1.025448 1.013713 -0.959482 -1.018335 -0.000108 2.251382 \n", "45197 -0.043777 -0.238980 1.013713 -0.959482 -1.018335 0.000016 -1.799045 \n", "45198 -0.034908 -0.238980 -1.068850 0.372229 -1.018335 0.000007 2.251382 \n", "45199 -0.061514 -0.379472 1.013713 -0.959482 -1.018335 0.000012 -1.799045 \n", "45200 -0.026040 -0.098488 -1.068850 -0.959482 -1.018335 -0.000087 -1.799045 \n", "45201 0.106988 -0.238980 -1.068850 0.372229 -1.018335 -0.000084 2.251382 \n", "45202 -0.061514 0.042004 1.013713 -0.959482 -1.018335 -0.000087 2.251382 \n", "45203 -0.159068 1.025448 1.013713 0.372229 -1.018335 -0.000135 2.251382 \n", "45204 0.284359 0.322988 -1.068850 -0.959482 -1.018335 0.000160 2.251382 \n", "45205 -0.141331 -0.098488 1.013713 -0.959482 -1.018335 -0.000092 2.251382 \n", "45206 0.089251 -0.098488 -1.068850 0.372229 -1.018335 -0.000058 2.251382 \n", "45207 0.266622 0.322988 3.096276 1.703939 -1.018335 0.000040 2.251382 \n", "45208 0.275491 0.322988 -1.068850 -0.959482 -1.018335 0.000470 2.251382 \n", "45209 0.142462 -0.379472 -1.068850 -0.959482 -1.018335 -0.000075 2.251382 \n", "45210 -0.034908 0.603972 -1.068850 -0.959482 -1.018335 0.000174 2.251382 \n", "\n", " loan contact day month duration campaign pdays \\\n", "0 -1.190771 1.570163 -0.156019 -0.390517 0.000043 -0.183777 -0.004109 \n", "1 -1.190771 1.570163 -0.156019 -0.390517 -0.001616 -0.183777 -0.004109 \n", "2 6.241027 1.570163 -0.156019 -0.390517 -0.002747 -0.183777 -0.004109 \n", "3 -1.190771 1.570163 -0.156019 -0.390517 -0.002505 -0.183777 -0.004109 \n", "4 -1.190771 1.570163 -0.156019 -0.390517 -0.000907 -0.183777 -0.004109 \n", "5 -1.190771 1.570163 -0.156019 -0.390517 -0.001797 -0.183777 -0.004109 \n", "6 6.241027 1.570163 -0.156019 -0.390517 -0.000621 -0.183777 -0.004109 \n", "7 -1.190771 1.570163 -0.156019 -0.390517 0.001837 -0.183777 -0.004109 \n", "8 -1.190771 1.570163 -0.156019 -0.390517 -0.003139 -0.183777 -0.004109 \n", "9 -1.190771 1.570163 -0.156019 -0.390517 -0.003063 -0.183777 -0.004109 \n", "10 -1.190771 1.570163 -0.156019 -0.390517 -0.000545 -0.183777 -0.004109 \n", "11 -1.190771 1.570163 -0.156019 -0.390517 -0.001827 -0.183777 -0.004109 \n", "12 -1.190771 1.570163 -0.156019 -0.390517 0.003903 -0.183777 -0.004109 \n", "13 -1.190771 1.570163 -0.156019 -0.390517 -0.002822 -0.183777 -0.004109 \n", "14 -1.190771 1.570163 -0.156019 -0.390517 -0.001269 -0.183777 -0.004109 \n", "15 -1.190771 1.570163 -0.156019 -0.390517 0.001430 -0.183777 -0.004109 \n", "16 -1.190771 1.570163 -0.156019 -0.390517 -0.002415 -0.183777 -0.004109 \n", "17 -1.190771 1.570163 -0.156019 -0.390517 -0.003320 -0.183777 -0.004109 \n", "18 -1.190771 1.570163 -0.156019 -0.390517 -0.000591 -0.183777 -0.004109 \n", "19 -1.190771 1.570163 -0.156019 -0.390517 -0.003078 -0.183777 -0.004109 \n", "20 6.241027 1.570163 -0.156019 -0.390517 0.000058 -0.183777 -0.004109 \n", "21 -1.190771 1.570163 -0.156019 -0.390517 -0.001420 -0.183777 -0.004109 \n", "22 6.241027 1.570163 -0.156019 -0.390517 -0.001480 -0.183777 -0.004109 \n", "23 -1.190771 1.570163 -0.156019 -0.390517 0.001264 -0.183777 -0.004109 \n", "24 6.241027 1.570163 -0.156019 -0.390517 -0.001163 -0.183777 -0.004109 \n", "25 -1.190771 1.570163 -0.156019 -0.390517 -0.001299 -0.183777 -0.004109 \n", "26 -1.190771 1.570163 -0.156019 -0.390517 0.000571 -0.183777 -0.004109 \n", "27 6.241027 1.570163 -0.156019 -0.390517 -0.001978 -0.183777 -0.004109 \n", "28 -1.190771 1.570163 -0.156019 -0.390517 -0.000048 -0.079586 -0.004109 \n", "29 6.241027 1.570163 -0.156019 -0.390517 0.001355 -0.183777 -0.004109 \n", "... ... ... ... ... ... ... ... \n", "45181 -1.190771 -1.120941 -0.011643 0.252056 -0.002777 -0.079586 0.007760 \n", "45182 -1.190771 -1.120941 -0.011643 0.252056 0.002154 -0.079586 0.014642 \n", "45183 -1.190771 -1.120941 -0.011643 0.252056 -0.002717 -0.183777 0.005566 \n", "45184 -1.190771 -1.120941 0.002795 0.252056 -0.001812 -0.183777 -0.001815 \n", "45185 -1.190771 -1.120941 0.002795 0.252056 -0.000877 -0.183777 0.005167 \n", "45186 -1.190771 -1.120941 0.002795 0.252056 0.000329 -0.183777 0.006364 \n", "45187 -1.190771 -1.120941 0.002795 0.252056 0.002305 -0.183777 -0.004109 \n", "45188 -1.190771 -1.120941 0.002795 0.252056 -0.000334 -0.183777 0.050849 \n", "45189 -1.190771 -1.120941 0.002795 0.252056 -0.001284 -0.183777 0.005167 \n", "45190 -1.190771 -1.120941 0.002795 0.252056 -0.000787 -0.183777 0.014742 \n", "45191 -1.190771 -1.120941 0.002795 0.252056 0.000058 -0.183777 0.014244 \n", "45192 -1.190771 -1.120941 0.002795 0.252056 -0.000304 -0.183777 -0.004109 \n", "45193 -1.190771 -1.120941 0.002795 0.252056 0.002877 -0.079586 -0.000718 \n", "45194 6.241027 -1.120941 0.002795 0.252056 -0.001450 -0.079586 0.014642 \n", "45195 -1.190771 -1.120941 0.002795 0.252056 -0.000696 -0.183777 0.014642 \n", "45196 -1.190771 -1.120941 0.002795 0.252056 0.001083 -0.183777 -0.004109 \n", "45197 -1.190771 -1.120941 0.002795 0.252056 0.000178 -0.183777 -0.004109 \n", "45198 -1.190771 -1.120941 0.002795 0.252056 0.001128 -0.079586 -0.004109 \n", "45199 -1.190771 -1.120941 0.002795 0.252056 0.013689 0.024606 0.048854 \n", "45200 -1.190771 -1.120941 0.002795 0.252056 0.019569 0.128797 -0.004109 \n", "45201 -1.190771 -1.120941 0.017232 0.252056 -0.000485 -0.183777 0.014343 \n", "45202 -1.190771 -1.120941 0.017232 0.252056 -0.000515 -0.183777 -0.004109 \n", "45203 -1.190771 -1.120941 0.017232 0.252056 0.000118 -0.183777 -0.004109 \n", "45204 -1.190771 -1.120941 0.017232 0.252056 0.000631 -0.183777 -0.000020 \n", "45205 6.241027 -1.120941 0.017232 0.252056 0.001928 -0.079586 -0.004109 \n", "45206 -1.190771 -1.120941 0.017232 0.252056 0.010839 0.024606 -0.004109 \n", "45207 -1.190771 -1.120941 0.017232 0.252056 0.002983 -0.079586 -0.004109 \n", "45208 -1.190771 -1.120941 0.017232 0.252056 0.013101 0.232988 0.014343 \n", "45209 -1.190771 4.261267 0.017232 0.252056 0.003767 0.128797 -0.004109 \n", "45210 -1.190771 -1.120941 0.017232 0.252056 0.001551 -0.079586 0.014742 \n", "\n", " previous poutcome y \n", "0 -0.109374 -0.594561 -1.132458 \n", "1 -0.109374 -0.594561 -1.132458 \n", "2 -0.109374 -0.594561 -1.132458 \n", "3 -0.109374 -0.594561 -1.132458 \n", "4 -0.109374 -0.594561 -1.132458 \n", "5 -0.109374 -0.594561 -1.132458 \n", "6 -0.109374 -0.594561 -1.132458 \n", "7 -0.109374 -0.594561 -1.132458 \n", "8 -0.109374 -0.594561 -1.132458 \n", "9 -0.109374 -0.594561 -1.132458 \n", "10 -0.109374 -0.594561 -1.132458 \n", "11 -0.109374 -0.594561 -1.132458 \n", "12 -0.109374 -0.594561 -1.132458 \n", "13 -0.109374 -0.594561 -1.132458 \n", "14 -0.109374 -0.594561 -1.132458 \n", "15 -0.109374 -0.594561 -1.132458 \n", "16 -0.109374 -0.594561 -1.132458 \n", "17 -0.109374 -0.594561 -1.132458 \n", "18 -0.109374 -0.594561 -1.132458 \n", "19 -0.109374 -0.594561 -1.132458 \n", "20 -0.109374 -0.594561 -1.132458 \n", "21 -0.109374 -0.594561 -1.132458 \n", "22 -0.109374 -0.594561 -1.132458 \n", "23 -0.109374 -0.594561 -1.132458 \n", "24 -0.109374 -0.594561 -1.132458 \n", "25 -0.109374 -0.594561 -1.132458 \n", "26 -0.109374 -0.594561 -1.132458 \n", "27 -0.109374 -0.594561 -1.132458 \n", "28 -0.109374 -0.594561 -1.132458 \n", "29 -0.109374 -0.594561 -1.132458 \n", "... ... ... ... \n", "45181 0.456040 1.455210 -1.132458 \n", "45182 0.832983 5.554751 8.547930 \n", "45183 1.209926 5.554751 -1.132458 \n", "45184 0.832983 5.554751 -1.132458 \n", "45185 0.644512 5.554751 8.547930 \n", "45186 0.267569 1.455210 -1.132458 \n", "45187 -0.109374 -0.594561 8.547930 \n", "45188 0.267569 5.554751 8.547930 \n", "45189 0.832983 1.455210 -1.132458 \n", "45190 0.456040 5.554751 8.547930 \n", "45191 0.079097 1.455210 8.547930 \n", "45192 -0.109374 -0.594561 8.547930 \n", "45193 0.644512 5.554751 8.547930 \n", "45194 0.832983 1.455210 -1.132458 \n", "45195 1.021455 5.554751 8.547930 \n", "45196 -0.109374 -0.594561 8.547930 \n", "45197 -0.109374 -0.594561 8.547930 \n", "45198 -0.109374 -0.594561 -1.132458 \n", "45199 2.152284 3.504981 -1.132458 \n", "45200 -0.109374 -0.594561 8.547930 \n", "45201 0.644512 5.554751 8.547930 \n", "45202 -0.109374 -0.594561 8.547930 \n", "45203 -0.109374 -0.594561 8.547930 \n", "45204 1.398398 1.455210 8.547930 \n", "45205 -0.109374 -0.594561 8.547930 \n", "45206 -0.109374 -0.594561 8.547930 \n", "45207 -0.109374 -0.594561 8.547930 \n", "45208 0.456040 5.554751 8.547930 \n", "45209 -0.109374 -0.594561 -1.132458 \n", "45210 1.963813 3.504981 -1.132458 \n", "\n", "[45211 rows x 17 columns]" ] }, "execution_count": 9, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# drop NA, Keep rows with at least 17 Non-Null values\n", "df=df.dropna(thresh=17)\n", "# fill NA\n", "df=df.fillna(method='ffill',limit=3)\n", "df.apply(lambda x:((x-x.mean())/x.var()))" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [], "source": [ "# select X and y from dataframe\n", "X=df.iloc[:,0:16]\n", "# if y=df.iloc[:,16:17], we get a dataframe,otherwise we get a series. Here is a series object\n", "y=df.iloc[:,16]\n", "# 25% as training data for default, use 'test_size' argument to give a percentage to split\n", "X_train,X_test,y_train,y_test=train_test_split(X,y)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "#################################################### logistic regression ####################################################" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Logistic classification results:\n", "accuracy_score: 0.8971069627532513\n", "precision_score: 0.6228748068006182\n", "recall_score 0.3048411497730711\n", "auc: 0.8887919573665001\n", "f1_score(weighted): 0.8811531211378714\n", "f1_score(macro): 0.6764948773636862\n", "f1_score(micro): 0.8971069627532513\n", "f1_score(None): 0.409344845099035\n" ] } ], "source": [ "log_reg=LogisticRegression()\n", "log_reg.fit(X_train,y_train)\n", "pred_log=log_reg.predict(X_test)\n", "# Use 'predict_proba' to get AUC, return the probability in every classification (if it is dichotomy, there are two columns)\n", "pred_proba_log=log_reg.predict_proba(X_test)\n", "\n", "print(\"Logistic classification results:\")\n", "# accuracy_score reflects the ratio of correct positive to predicted positive\n", "print(\"accuracy_score:\",accuracy_score(y_test,pred_log))\n", "# precision_score reflects the prediction precision \n", "print(\"precision_score:\",precision_score(y_test,pred_log))\n", "# recall_score reflects the ratio of correct positive to true positive\n", "print(\"recall_score\",recall_score(y_test,pred_log))\n", "print(\"auc:\",roc_auc_score(y_test,pred_proba_log[:,1]))\n", "print(\"f1_score(weighted):\",f1_score(y_test,pred_log,average='weighted'))\n", "print(\"f1_score(macro):\",f1_score(y_test,pred_log,average='macro'))\n", "print(\"f1_score(micro):\",f1_score(y_test,pred_log,average='micro'))\n", "print(\"f1_score(None):\",f1_score(y_test,pred_log))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "#################################################### svm ####################################################" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\preprocessing\\data.py:625: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n", " return self.partial_fit(X, y)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\base.py:465: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n", " return self.fit(X, y, **fit_params).transform(X)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "svm classification result\n", "accuracy_score: 0.8945412722286119\n", "precision_score: 0.6805555555555556\n", "recall_score 0.18532526475037822\n", "f1_score(weighted): 0.8668076170408332\n", "f1_score(macro): 0.6171758911874263\n", "f1_score(micro): 0.8945412722286119\n", "f1_score(None): 0.291319857312723\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\svm\\base.py:931: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n", " \"the number of iterations.\", ConvergenceWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\pipeline.py:331: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n", " Xt = transform.transform(Xt)\n" ] } ], "source": [ "import numpy as np\n", "from sklearn.pipeline import Pipeline\n", "from sklearn.preprocessing import StandardScaler\n", "from sklearn.svm import LinearSVC\n", "from sklearn.svm import SVC\n", "\n", "# method without pipeline \n", "# scaler=StandardScaler()\n", "# scaler.fit(df)\n", "# svm_clf=SVC(C=1,probability=True,verbose=1)\n", "\n", "# standardize by column\n", "svm_clf=Pipeline((\n", " ('scaler',StandardScaler()),\n", " ('linear_svc',LinearSVC(C=1,loss='hinge'))\n", " ))\n", "svm_clf.fit(X_train,y_train)\n", "pred_svm=svm_clf.predict(X_test)\n", "\n", "print(\"svm classification result\")\n", "print(\"accuracy_score:\",accuracy_score(y_test,pred_svm))\n", "print(\"precision_score:\",precision_score(y_test,pred_svm))\n", "print(\"recall_score\",recall_score(y_test,pred_svm))\n", "#print(\"auc:\",roc_auc_score(y_test,pred_proba_svm[:,1]))#auc\n", "print(\"f1_score(weighted):\",f1_score(y_test,pred_svm,average='weighted'))\n", "print(\"f1_score(macro):\",f1_score(y_test,pred_svm,average='macro'))\n", "print(\"f1_score(micro):\",f1_score(y_test,pred_svm,average='micro'))\n", "print(\"f1_score(None):\",f1_score(y_test,pred_svm))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "#################################################### random forest ####################################################" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "random forest classification result:\n", "accuracy_score: 0.8969300185791382\n", "precision_score: 0.755700325732899\n", "recall_score 0.17549167927382753\n", "auc: 0.9073402475293071\n", "f1_score(weighted): 0.8673131007408489\n", "f1_score(macro): 0.6146501533893725\n", "f1_score(micro): 0.8969300185791382\n", "f1_score(None): 0.2848373235113566\n" ] } ], "source": [ "from sklearn.ensemble import RandomForestClassifier\n", "from sklearn.model_selection import train_test_split\n", "\n", "print(\"random forest classification result:\")\n", "rnd_clf=RandomForestClassifier(n_estimators=500,max_leaf_nodes=16,n_jobs=-1)\n", "rnd_clf.fit(X_train,y_train)\n", "pred_rf=rnd_clf.predict(X_test)\n", "pred_proba_rf=rnd_clf.predict_proba(X_test)\n", "print(\"accuracy_score:\",accuracy_score(y_test,pred_rf))\n", "print(\"precision_score:\",precision_score(y_test,pred_rf))\n", "print(\"recall_score\",recall_score(y_test,pred_rf))\n", "print(\"auc:\",roc_auc_score(y_test,pred_proba_rf[:,1]))#auc\n", "print(\"f1_score(weighted):\",f1_score(y_test,pred_rf,average='weighted'))\n", "print(\"f1_score(macro):\",f1_score(y_test,pred_rf,average='macro'))\n", "print(\"f1_score(micro):\",f1_score(y_test,pred_rf,average='micro'))\n", "print(\"f1_score(None):\",f1_score(y_test,pred_rf))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "#################################################### stacking classifier ####################################################" ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", "from sklearn import model_selection\n", "from sklearn.linear_model import LogisticRegression\n", "from sklearn.neighbors import KNeighborsClassifier\n", "from sklearn.naive_bayes import GaussianNB\n", "from sklearn.ensemble import RandomForestClassifier\n", "from mlxtend.classifier import StackingClassifier\n", "import numpy as np" ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Stacking:\n", "\n", "3-fold cross validation:\n", "\n", "Accuracy:0.81(+/- 0.07) [KNN]\n", "Auc:0.59(+/- 0.00) [KNN]\n", "f1:0.27(+/- 0.01) [KNN]\n", "f1_micro:0.81(+/- 0.07) [KNN]\n", "f1_macro:0.58(+/- 0.03) [KNN]\n", "f1_weighted:0.81(+/- 0.04) [KNN]\n", "\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Accuracy:0.65(+/- 0.26) [Random Forest]\n", "Auc:0.58(+/- 0.06) [Random Forest]\n", "f1:0.17(+/- 0.04) [Random Forest]\n", "f1_micro:0.65(+/- 0.26) [Random Forest]\n", "f1_macro:0.44(+/- 0.12) [Random Forest]\n", "f1_weighted:0.65(+/- 0.24) [Random Forest]\n", "\n", "Accuracy:0.79(+/- 0.16) [Naive Bayes]\n", "Auc:0.78(+/- 0.06) [Naive Bayes]\n", "f1:0.42(+/- 0.08) [Naive Bayes]\n", "f1_micro:0.79(+/- 0.16) [Naive Bayes]\n", "f1_macro:0.64(+/- 0.10) [Naive Bayes]\n", "f1_weighted:0.81(+/- 0.12) [Naive Bayes]\n", "\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n", " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Accuracy:0.81(+/- 0.07) [StackingClassifier]\n", "Auc:0.68(+/- 0.02) [StackingClassifier]\n", "f1:0.27(+/- 0.01) [StackingClassifier]\n", "f1_micro:0.81(+/- 0.07) [StackingClassifier]\n", "f1_macro:0.58(+/- 0.03) [StackingClassifier]\n", "f1_weighted:0.81(+/- 0.04) [StackingClassifier]\n", "\n" ] } ], "source": [ "print(\"Stacking:\\n\")\n", "clf1=KNeighborsClassifier(n_neighbors=1)\n", "clr2=RandomForestClassifier(random_state=1)\n", "clf3=GaussianNB()\n", "lr=LogisticRegression()#logistics\n", "sclf=StackingClassifier(classifiers=[clf1,clr2,clf3],meta_classifier=lr)\n", "\n", "print('3-fold cross validation:\\n')\n", "\n", "for clf,label in zip([clf1,clr2,clf3,sclf],\n", " ['KNN',\n", " 'Random Forest',\n", " 'Naive Bayes',\n", " 'StackingClassifier']):\n", " scores_acc=model_selection.cross_val_score(clf,X,y,cv=3,scoring='accuracy')\n", " scores_auc=model_selection.cross_val_score(clf,X,y,cv=3,scoring='roc_auc')\n", " scores_f1=model_selection.cross_val_score(clf,X,y,cv=3,scoring='f1')\n", " scores_f1_macro=model_selection.cross_val_score(clf,X,y,cv=3,scoring='f1_macro') \n", " scores_f1_micro=model_selection.cross_val_score(clf,X,y,cv=3,scoring='f1_micro')\n", " scores_f1_weighted=model_selection.cross_val_score(clf,X,y,cv=3,scoring='f1_weighted')\n", " print(\"Accuracy:%0.2f(+/- %0.2f) [%s]\\nAuc:%0.2f(+/- %0.2f) [%s]\\nf1:%0.2f(+/- %0.2f) [%s]\\nf1_micro:%0.2f(+/- %0.2f) [%s]\\nf1_macro:%0.2f(+/- %0.2f) [%s]\\nf1_weighted:%0.2f(+/- %0.2f) [%s]\\n\"\n", " %(scores_acc.mean(),scores_acc.std(),label,\n", " scores_auc.mean(),scores_auc.std(),label,\n", " scores_f1.mean(),scores_f1.std(),label,\n", " scores_f1_micro.mean(),scores_f1_micro.std(),label,\n", " scores_f1_macro.mean(),scores_f1_macro.std(),label,\n", " scores_f1_weighted.mean(),scores_f1_weighted.std(),label\n", " ))\n", " \n", " \"\"\"\n", "print(\"Normal stacking:\\n\")\n", "import numpy as np\n", "\n", "from sklearn import model_selection\n", "from sklearn.linear_model import LogisticRegression\n", "from sklearn.svm import SVC\n", "from sklearn.ensemble import RandomForestClassifier\n", "from mlxtend.classifier import EnsembleVoteClassifier\n", "#initalizing classifiers\n", "clf1=LogisticRegression(random_state=0)\n", "clf2=RandomForestClassifier(random_state=0)\n", "clf3=SVC(random_state=0,probability=True)\n", "eclf=EnsembleVoteClassifier(clfs=[clf1,clf2,clf3],weights=[2,1,1],voting='soft')\n", "\n", "#loading some example data\n", "for clf,lab in zip([clf1,clf2,clf3,eclf],\n", " ['Logistic Regression','Random Forest','Naive Bayes','Ensemble']):\n", " scores_acc=model_selection.cross_val_score(clf,X,y,cv=3,scoring='accuracy')\n", " scores_auc=model_selection.cross_val_score(clf,X,y,cv=3,scoring='roc_auc')\n", " scores_f1=model_selection.cross_val_score(clf,X,y,cv=3,scoring='f1')\n", " scores_f1_macro=model_selection.cross_val_score(clf,X,y,cv=3,scoring='f1_macro') \n", " scores_f1_micro=model_selection.cross_val_score(clf,X,y,cv=3,scoring='f1_micro')\n", " scores_f1_weighted=model_selection.cross_val_score(clf,X,y,cv=3,scoring='f1_weighted')\n", " print(\"Accuracy:%0.2f(+/- %0.2f) [%s]\\nAuc:%0.2f(+/- %0.2f) [%s]\\nf1:%0.2f(+/- %0.2f) [%s]\\nf1_micro:%0.2f(+/- %0.2f) [%s]\\nf1_macro:%0.2f(+/- %0.2f) [%s]\\nf1_weighted:%0.2f(+/- %0.2f) [%s]\\n\"\n", " %(scores_acc.mean(),scores_acc.std(),lab,\n", " scores_auc.mean(),scores_auc.std(),lab,\n", " scores_f1.mean(),scores_f1.std(),lab,\n", " scores_f1_micro.mean(),scores_f1_micro.std(),lab,\n", " scores_f1_macro.mean(),scores_f1_macro.std(),lab,\n", " scores_f1_weighted.mean(),scores_f1_weighted.std(),lab\n", " ))\"\"\"\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "#################################################### ROC Curve ####################################################" ] }, { "cell_type": "code", "execution_count": 22, "metadata": {}, "outputs": [], "source": [ "from sklearn.preprocessing import StandardScaler\n", "from sklearn.decomposition import PCA\n", "from sklearn.metrics import roc_curve,auc\n", "from sklearn.model_selection import StratifiedKFold\n", "from sklearn.pipeline import Pipeline\n", "from matplotlib import pyplot as plt" ] }, { "cell_type": "code", "execution_count": 26, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "ROC curve\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\preprocessing\\data.py:625: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n", " return self.partial_fit(X, y)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\base.py:465: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n", " return self.fit(X, y, **fit_params).transform(X)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\pipeline.py:381: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n", " Xt = transform.transform(Xt)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\preprocessing\\data.py:625: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n", " return self.partial_fit(X, y)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\base.py:465: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n", " return self.fit(X, y, **fit_params).transform(X)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", " FutureWarning)\n", "C:\\Users\\dizhe\\Anaconda3\\envs\\mcm\\lib\\site-packages\\sklearn\\pipeline.py:381: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n", " Xt = transform.transform(Xt)\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD8CAYAAACMwORRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzt3Xl8FdX9//HXSUIChJ0EAgQIS8K+R0QEQUUEVHBBC+4rLrXWr7bVX9Uu1i7aqlWLC1qXuhRxpxZFUUGKggTZ94AsCZCEkH1P7vn9MZGEEMgF7p738/Hg4Z2Zc+d+xoS347lnzjHWWkREJLSE+bsAERHxPIW7iEgIUriLiIQghbuISAhSuIuIhCCFu4hICFK4i4iEIIW7iEgIUriLiISgCH99cExMjE1ISPDXx4uIBKVVq1YdtNbGNtTOb+GekJBASkqKvz5eRCQoGWN2u9NO3TIiIiFI4S4iEoIU7iIiIUjhLiISghTuIiIhqMFwN8a8bIzJNMZsOMZxY4x52hiTaoxZZ4wZ7vkyRUTkRLhz5/4qMOk4xycDidV/ZgHPnXpZIiJyKhoc526t/doYk3CcJtOAf1lnvb7lxpg2xphO1tr9HqpRRCTolFZUkZlfRmFZJbv37aftgf+Rs38XbUwx7YdPI2n4WV79fE88xNQF2FtrO61631HhboyZhXN3T7du3Tzw0SIivpVfWkF2YTkl5VVszyxg9Z5cAFbtzqFVswialR6k6f4VdCSbUWFbGBa2nf4m/4hzrGgVB0EQ7qaeffWuum2tnQPMAUhOTtbK3CISMApKK/juh0Os3ZtLWk4Ju7KLiIwIo7zSxfd7cmgfWUV4ZTEDSaW7ycAAEVTSnCoicPF4k+/oSToRVEGTmvNWREST12IAFQMvJ6LvFNp07s3pYeFevx5PhHsa0LXWdjywzwPnFRHxmLScYvYcKmZdWh4b0vMIDzNUVLk4kFfKjqwiykoKSTJpjAjbxoURmym2kXSILKOvK5VWTfOck0Q29CkGBlwCSZOh62nQpjtNwsJp7e2Lq4cnwn0+cKcxZi5wOpCn/nYRCQQ/HCzi0U+2sHhbJqUVrlpHLBdGrmZs012Mq1xGZ7sfmtZ5c0QUtOsFxEPLEdCqC8QNhrBw6D4aWsZBWBMIbwJhEc7+ANJguBtj/g2MB2KMMWnAb6n+nw5r7fPAAmAKkAoUAzd4q1gRkfoUl1eSllPCmr25zF+zj/XpeeSVVBw+3tYU8EhSOqOrvqN94Tai8nY6B8p/bJAAXUc5d9uxfaHbaAgL7seA3BktM7OB4xb4qccqEhE5hiqXpbCskvySCpZuP8iSbZkcLCxn1e6cw21iyOOWyC8Y3jWMXiXraVeWTpOKfNhT60QdB0HPcXD6bdA6Hkx9Xx0GN79N+SsicixZBWVsyyhg1e4c1u7NJapJGNszCtmeWXhU2xZREczoWcrFzdcxPP1NIkuyqk8CRMdC02joOhwG/wS6neHcpYdgmNelcBcRv6pyWVbszGZeyl62ZhSyeX/+UW2iIsJI7NiCfp1aMTKhLX3iWhEdaZic/gyRKS8cOYSjx1kwcDr0nwrN2vruQgKMwl1EfGbRpgy+2JLJ19uyOFhYRnmVC1tnUHSv2GiGd2vLmPgIhjfZQ5fIIsJyd4EJg8oy2Pc9pOyBrC3OG9p0h97nQuL50HM8NKn7zWjjpHAXEY/LyC9l/pp9LNx4AAvklVSQWqtLxRiIaRHFpAFxtG8RSbvoSM5Jak88mZC9AzbPgYVvHPsDwiKcIB9yJQy+olF0s5wohbuIeER2YRl/+WQL3+zIJj235PD+lk0jOL1HO3rGRNMyEn45oJC4XR9A5hbYXwQZ6wEDC+t5rnHcfZA0CZq1gZadIDwq6Eex+IrCXUROWGWVi6+3Z/H84p2UVblYuzf3iONjE2O4IrkrE/p1pFlYFax/Bza8C1u+hC21GiaMdbpTmreDtj3AVQFdkp0x5HGDAm7seDBRuIvICXlj+W4e/PDIGcAnD4yjZdMIxiV1YMqgOExlKaR+Aa8/DXtX1DSMaAqj73K+7Ow4UN0pXqRwF5Hj2nIgn9/P38Tu7CKyCsuoqHK6T87p24H7JvWlT1zLmsb71sA798Gmj2r2dRwI/S+Gkbc43SviEwp3ETnMWsuq3Tmk55bwTWo2732fRqXLCfO+cS2Z0L8jzSMjuGVsD9q3iAJXFeSlwfp3YdFva07UZYQzv8qwq6FVJz9dTeOmcBdphApKKygsq6SwtJJH/ruZ/NIKSitc7MgspLzKdUTb60cnMHFAR0b3inF2lObDxrnw7T/g4LaahuFR0Lw93PgptO3uw6uR+ijcRRqBwrJKvtqSyf+2H+TtlL31tpnQryO9YqPp3r455w+Io32LKDq0jKJJeBiUF8P3r8OqVyB9Vc2bYpIgcSIkjHFGtagPPWAo3EVCVG5xOf9Zu4+5K/eycV/NU59Nwg0Du7TmsuHxNI8MJ7ZlFGMTY2ve6HJBfhp8+yzkp8OB9ZDzQ83xmCQYdAWc8VOIbO7DK5IToXAXCRHWWlbvzWVjeh7PLd7BvrzSw8f6dWrF9BHxTBoYR5c2zY58Y/EhWP0mHNwK2xdB1mawtbpmuoyAFh2h7xTnsf7WXXx0RXIqFO4iQS63uJyUXTnc/uaqwyNZwLlDf/SywUwZ1ImmTWqNFy86CFsXwIb3YedXR5+w40DofqYzR0vSJAhXTAQj/dREgkhmfilr0/IorahiybYslm7PIiO/7PDxi4Z05uYxPUjs2ILmkdV/vbd9Bkv/5vSbH9oBFcU1J4zpA91GQdeR0GcKRLVSmIcI/RRFAlRJeRUfr9vH68t3U17pYsuBgnrbDYlvzYyR3TizVwzd2jd3Jtfa8z8oK4Cdi2HlS07DbqMh6Xyny6XPBU43S1TLes8pwU/hLhJAyiqr+Gj1Pl75ZtcRU982jwznkmFdCA8znN6jHUO6tqFpRDjxbZsRFmacQP92Nnz/ryO//ARnNsU7U6B9Lx9fjfiTwl3EzwrLKvnHl6m8k7KX7KLyI479fuoALhjciZgWUfW/2Vrnzvy/99bs63Wu082SeB40bQOtOkOTZvW/X0KWwl3EDxZtymDlrkNs2p/P0u0HD+/vGRPNjWN6cPGwLkRHhmPqGzdelA27ljqTcW35uGb/kCvhwic1n7kACncRn9iXW8KLS3eyek8uu7KLyC2uWbx5bGIM45JiufHMHk4XS21lhZC20hnVcmA97Pjy6JP3nwYXPdWoVx2SoyncRbzkUFE5v3xnLV9syTxif5c2zZg+PJ7bx/eibfPIowMdoKIEnhwAxdlH7m/aGjoNhaFXQcKZzuLOIvVQuIt4UGZ+KQs3ZfD0F9vJKqgZojhtaGfO7BXD9BHx9Yf5j6oqYe5M2P6Zsx3ZEs55EHqOgw79vFy9hBKFu4gHlJRX8dv5G5iXknZ4X1yrpvxqUh8uGdal/r7z2irLYNnT8NUjznbLTjDxERg03YtVSyhTuIucguzCMt5csYdXv9nFoeqRLvdP7suVp3ejVdMmDbx5h9OHvv2zmjt1cLpcLn7Wi1VLY6BwFzkBJeVVpOw+xD//9wNr9uYe8cXozWN6cN/kvs4sisc8QQ588QdIeRmos2bo2Q/AuF95p3BpdBTuIsdRXuliW0YBC9bvZ9HmDLZlFB5xfFxSLGf2bs91oxOIijjGep/Fh5yViTI2wsoXa/YPuxr6TYNupzuP/Wu6XPEghbtIPb7fk8M7KWn8+7s9R+xP7NCCKYM6cfGwLvSIia7/zdbCd3Ng+XNQcAAqS2qORbaEUbc7d+jhDXTbiJwChbs0egWlFby7Ko1lqdlsOZDPvtwSqleWo3WzJlw+Ip7LRsTTN67l8b8YLc2Hzx6EtXOhqnqkTOtuMOgyaJ8I/S5y5nLRHbr4gMJdGq3Siipe/Honj39es1Rc88hw+sS1YmjXNtw2rifd2x/j7hygqgJ2fAWrXnWeGC2rmQuGLiPg2o80MZf4jcJdGqWX//cDD3+8CYDoyHDunpDEtaO7H7vfvC5XFfwtCUoO1exLmgQjroc+kz1fsMgJcivcjTGTgKeAcOAla+1f6hzvBrwGtKluc7+1doGHaxU5aXsPFfPRmnQWrD9Aem4JeSXOKJc7z+7NzyckHn+ES22ZW+DT+5ypdAF6nePM59I2wSt1i5ysBsPdGBMOzAbOA9KAlcaY+dbaTbWaPQjMs9Y+Z4zpDywAErxQr4hbKqpc7M4uZl9uCYs2Z/Cvb3cfPjYkvjVn9o5h1lk9adM80r0Trn4TPnvAGcoI0KwdDP4JTP7L8d8n4ifu3LmPBFKttTsBjDFzgWlA7XC3QKvq162BfZ4sUuREfLQmnZ/PXXPU/heuGcH4PrEn0PXiguxUeOncmv70xIkw+i7oMdaDFYt4njvh3gXYW2s7DTi9TpvfAZ8ZY34GRAMTPFKdiJsqq1y8uWIPy3dm88mGAwDMHNmVM3rF0Du2Bd3bNyc6yo1f98Is+GEJpH8Py2fX7G8RBz9bBVEtvHQFIp7lTrjXN26rzqN1zARetdY+bow5A3jdGDPQ2tpLqIMxZhYwC6Bbt24nU6/IEQ4VlfP3RduO6HYZlxTL76cOIOFY49Drk7kZ3p8FB9bV7AuPhAGXwqDLodfZEObmHb9IAHAn3NOArrW24zm62+UmYBKAtfZbY0xTIAY4Yq5Ta+0cYA5AcnJy3f9AiLht18EiXv1mF69+s+vwvhmndeV3UwfQtMkJhvDiR2Hxn5zX3c5wRrx0H+3crUe42ScvEmDcCfeVQKIxpgeQDswArqzTZg9wLvCqMaYf0BTI8mShIuB0v0x+ainbM2umAbj3vCTuOLs34cebSvdYNn5YE+yXvwYDLvZQpSL+1WC4W2srjTF3Agtxhjm+bK3daIx5GEix1s4H7gVeNMb8H06XzfXWWt2Zi8fszi7isU+38t/1+wGIb9uMP14yiDG9Y0481IsOwtLHnSXqcqunF7hns7PWqEiIcGuce/WY9QV19v2m1utNwJmeLU3E8dM3vz8c6nGtmnLPxCQuHxHf8BzpdbmqYP5dsH4eVJVDWASM+T9nil0Fu4QYPaEqAWv5zmzunbeW9Fxn4q1Xrj+Ns5JiT677pawAnh0NedV36le87jxRqj51CVEKdwlI81L28qt3nZErYxNjePSywXRu0+zET+RywTPDIGeXs50wFq5+DyKiPFesSABSuEtAySoo45Z/pbBmby4AH/9sDAO7tD65k+Xshn+eB4UZzvblr8KASzxTqEiAU7hLwHhp6U4e+e9mAPrGteTFa5Pp2q75yZ1sywJnoWmA3ufBjLfUBSONisJd/O6Hg0Vc8PRSisurALjujO78ftrAEz9RXjp88ivY9im4Kp19Fz8PQ2d6sFqR4KBwF78qrajiwupgT+7eljnXJtMu+gTusCvLYOEDsOYtqChy9rWKh44D4NyHIG6QdwoXCXAKd/GL4vJKHvnvZt5a4YxeufaM7jx8Infr2xfBgntrvigF6DcVhl0DSRM9W6xIEFK4i09lFpRy77y1LN1+8PC+x6YP5orkrsd5Vy3ZO2DuVZDl9M0T3QFG3wkjZ0GTkxhNIxKiFO7iM0u2ZXHdy98BkNSxBRcN7sy1ZyTQurkbC0XvWQGf3g/7vne2wyLgF9uheTsvViwSvBTu4hMvfr2TPy5w7rZvPasn/29KP/feeOgHmDMOSvOc7RYdYeo/1PUi0gCFu3hVaUUVv5u/kbkrnSUBlv7qbPeGN1aUwEc/hQ3vOdt9psBpN0FvLRUg4g6Fu3hNyq5DTH/+28Pbc2eNci/YV7/hBDtAdKzz8FHCGO8UKRKiFO7iFc8v2cFfPtkCwE1jevDgBf0anujLVQUPt+fwWjBDZsIlz3u3UJEQpXAXj3K5LL/+YP3hbpgXrhnB+QPiGn5jYRa8cBaHg/2BDGjS1HuFioQ4hbt4zKGics7+22LySioA+P6h89x7IGn/OnihesHp/tNg2rMKdpFTpHAXj9iRVci0fyyjsKySi4d25tdT+rkX7Omr4MVznNcTfg9j7vZuoSKNhMJdTklpRRV3z13DpxsPADBzZDf+fKmbj/zXntzr4udgaN3VG0XkZCnc5ZTc+85aPt14gKiIMB6/YggXDnZzRaP3bnFWRAK45gPodY73ihRphBTuclJyi8u57pWVrN2by6ie7Zg76wz33liYBa9eAAe3OtuzlkDnod4rVKSRUrjLCckpKmfmi8vZcqAAgO7tm/PcVSMafqO18O8ZznS8P3roIIS7MfWAiJwwhbu4beO+PC54+n+Ht5+aMZRpQ7s0/MasrfDcmeByRtHwkzeg74Vwogtci4jbFO7SoAN5pdz6xirWVi99d+0Z3fndRQMIa2ih6gPr4V8XQ3H1DJBxg+HmL7QikogPKNzluD7dsJ/b3nBmYkzq2IK/XDaY4d3aNvzGggPwfPWUAZ2HOWPXO/b3YqUiUpvCXerlcllufWMVn29yFpd+euYwpg5pYCTMunmw7ClnYepyp0+eSY/CqNu8XK2I1KVwl6McyCvl7L8tpqSiitiWUSy4ayyxLaPqb2wtfPMMrHkTsrY486x3HAhJ5zt37H0m+7Z4EQEU7lJHfmkFE55YQklFFZMGxPHMlcNoEh5Wf+OdS+CtK6Cy1NkedQec97BGwIgEAIW7HLZmby4Xz14GwPkDOvL8NccY4uhywX/ugtWvO9v9LoJLX9QydyIBROEugPNQ0o/B/vC0AVwzqnv9DcuL4E+1+t5/+h3E9vFBhSJyIhTugstluf6VlQBMHhjHtWck1N9w52JnaCNA625w9zqNVRcJUAr3Rq680kXSg58AcOOZPfjNRccYrlh7kq8RN8DkxxTsIgFM4d6IbcsoYOKTXwPQuXXTYwf7qlfhPz93Xs94C/pe4JsCReSkHWMYxJGMMZOMMVuNManGmPuP0eYKY8wmY8xGY8xbni1TPG17rWAf3yeWZfcfY1bGD++oCfa71ijYRYJEg3fuxphwYDZwHpAGrDTGzLfWbqrVJhH4f8CZ1tocY0wHbxUspy49t4TzqoP9semDuSK5a/0NP3vQGb8O8H8boXW8jyoUkVPlzp37SCDVWrvTWlsOzAWm1WlzCzDbWpsDYK3N9GyZ4inbMgo48y9fAvDQhf2PHew7lzgPJwH8coeCXSTIuNPn3gXYW2s7DTi9TpskAGPMMiAc+J219tM6bTDGzAJmAXTr1u1k6pVTsHDjAX7xzloAHrtsMFecVk+wlxXAO9dD6iJn++YvIDrGd0WKiEe4E+71DYmw9ZwnERgPxANLjTEDrbW5R7zJ2jnAHIDk5OS65xAvqqxycevrqwC4e0Ji/cFeUQqP9nCm5g2PgplvQXyyjysVEU9wJ9zTgNpJEA/sq6fNcmttBfCDMWYrTtiv9EiVckp2Zxdx0TPOPOx/mDaAa+obx56X5iym4aqA+JFw8+e+LVJEPMqdPveVQKIxpocxJhKYAcyv0+ZD4GwAY0wMTjfNTk8WKifn/e/TGPfXxeSXVjJ5YFz9wZ61FZ4d7cy/fvYDCnaRENDgnbu1ttIYcyewEKc//WVr7UZjzMNAirV2fvWxicaYTUAV8EtrbbY3C5eGbUjP4555Th/7kz8ZwiXD6vlSdOkT8MXvnddTn4Hh1/qwQhHxFmOtf7q+k5OTbUpKil8+uzGocll6/XoBAJ/8fCz9OrU6utG7N8KG96BpG2fpux5jfVyliJwoY8wqa22DX4bpCdUQVFpRxR8+dh5DmHFa16OD3Vp47SLYtRTa9oCfroCIY8zXLiJBSeEeYlwuS//ffIrLwrl9O/CnSwYd3eijO51g7z4GrpyrYBcJQQr3EPOXT7fgspDcvS0vXZeMqTu518f3wJo3nNfXzYewcN8XKSJe59bcMhIcSiuqeHHpTsIMzLv1jKOD/YelkPJPaNIcfpGqYBcJYQr3EGGt5fY3VmEtPDVjGGFhtYLdVQXfPguvXQgY+NkqaBHrt1pFxPvULRMiRv35CzLyy0jq2IILBnU68uC8a2HLx87rS16AVp2PPoGIhBSFewi45+01ZOSX0axJOAvuGnvkXfsPS51g7zQUrv8Yolr6r1AR8Rl1ywS5TfvyeX91OgApD04gIrzWj7SqEt67GUwYXPOBgl2kEVG4B7Hd2UVMeXopAB/cMZroqDr/I/bCWVB4AMbdB83b+aFCEfEXhXuQyi0uZ9xfFwNwx/heDOvW9sgGc6+CzI3QvjeM/YXvCxQRv1KfexCy1jL0YWdyrwcv6MfNY3se2eDLR2q+QL31awjXj1mksdHf+iD04Rqnjz2uVdMjg72yHP57D6x+HSJbwi+3Q5NmfqpSRPxJ4R5kNqTn8X9vOzM9Lrz7rJoDZQXw9DAoyoLYvjDz3wp2kUZM4R5ESsqruLB60Y37J/eldfMmzoGibPhr9R18v4ucGR5FpFHTF6pBwlrLxL8vAeDOs3tz27hezoGygppgH3mrgl1EAIV70Jj1+ir2HiphyqA4fnF+H2dnWQG8eI7zumkbmPKY/woUkYCicA8CWw7k8/mmDACe/MnQmgNfPgIHt8GYe+D+3X6qTkQCkcI9wFlrmfrMMgDenjWKqIjqmRzz98OK56FVPEz4rR8rFJFApHAPcFf/cwXlVS7uGN+L03u2d3ZWlMATfZ3Xw6/xX3EiErAU7gHsg9VpLEt11hn/5Y/97KX58Mc45/WAS2H8/X6qTkQCmcI9QC3emnl4PPvcWaNqFt74+0DnnyNugOkv+6k6EQl0GucegCqqXFz/ykoA3r9jNMN/nDfm/VuhNA/a9YILn4S6Ky2JiFTTnXuAycwvZfDvPgNg6pDONcG++WNYN9eZvvfWJQp2ETkuhXuA+fUHGyipqOKK5HienjnM2bn3O3j7Kuf1zYs0L7uINEjdMgHkqy2ZLNqcwciEdjw2fQhYCx/eAWvfchpMfQa6jPBvkSISFBTuAWLjvjxueNXpZ3/8iiHOzs8fcoI9PNKZCKz3BD9WKCLBROEeAIrLK7lk9jcAvHXz6XRt1xy+/CN884yz2MatSyGyuZ+rFJFgonAPAM8t3kF5lYtpQzszuncMfPEwLH3cOThzroJdRE6Ywt3PNu7L45kvU+nfqRVPndMMnh3tLI8HcMcKiEn0b4EiEpQ0WsaPrLXcUD2efXbnT+HZ051gH/wTuGczdOjr5wpFJFi5Fe7GmEnGmK3GmFRjzDGfdzfGTDfGWGNMsudKDF23vbGKzIIybu+6ix4b/+HsnP4KXDoHWnX2b3EiEtQa7JYxxoQDs4HzgDRgpTFmvrV2U512LYG7gBXeKDTU5JdWsHBjBoM7RnJf1q+dnT9fC20T/FqXiIQGd+7cRwKp1tqd1tpyYC4wrZ52fwAeA0o9WF/I+nyjMz/72xV3OTvOuFPBLiIe4064dwH21tpOq953mDFmGNDVWvuxB2sLaUu3Z3Fz+H9pVrwPYvrAxEf8XZKIhBB3wr2+SUzs4YPGhAFPAvc2eCJjZhljUowxKVlZWe5XGYJ2ZBVxabiz2DWzvtJcMSLiUe6EexrQtdZ2PLCv1nZLYCCw2BizCxgFzK/vS1Vr7RxrbbK1Njk2Nvbkqw4BLQp/oH/Ybqc7JjLa3+WISIhxJ9xXAonGmB7GmEhgBjD/x4PW2jxrbYy1NsFamwAsB6Zaa1O8UnEIyCuu4OGSPzsbAy7xbzEiEpIaDHdrbSVwJ7AQ2AzMs9ZuNMY8bIyZ6u0CQ9HOrHwSw9Ipiu4G8Ro1KiKe59YTqtbaBcCCOvt+c4y240+9rNBWsN2ZR6Zo0LWoQ0ZEvEFPqPpBq+3vA9A6+XI/VyIioUrh7mN5JRW03e+MkomKSfBvMSISsjRxmI+k5RTzwffpPPX5JlKbZrInLJ5u/i5KREKWwt0Hbn09hYXVT6Qmm1QAuo2/3o8ViUioU7eMlz3zxXYWbswgrlVTXrhmBG+dXeQc6H2efwsTkZCmO3cvcbksT3y+jX985dypv3fHaLq0ioQPn4PIltBpiJ8rFJFQpnD3gi+3ZHDjqzXPcD1y8UC6rJsNS5+AimIYdYemGxARr1K4e5C1lmcX7+CvC7cCcNOYHtyXbIh8bTyUHHIa9RgH5/3Bf0WKSKOgcPeQ7MIybnh1JevS8gB47caRjOvkgseTnAbNY2DWYmjT9ZjnEBHxFIX7KSqtqOLPCzbz2re7ARgc35rXbhhJ20gX/LGj0yhxIlz1jh+rFJHGRuF+Cjbty2fK00sPb/9+6gCuG50ARdnw7DnOzg794cp5/ilQRBothftJKCmv4vXlu/jTgi0A3DymB/dN7kuT9JXw0i2Q9p3TMLIF3LZMX56KiM8p3E9AamYBt76+ih1ZRYf3PT1zGFMHd4K5V8LW6rnVuiTD8Gtg4HQI06MEIuJ7Cnc3vf99GvfMWwvA6F7tObdfRy4Z1oV2ZWnwcDuwLmjdFa5+D2L7+LlaEWnsFO4N+HJLBrP+tYpKl7Oy4CMXD+TqUd2dg1sWwNyZzuvEiTDzbd2pi0hAULgfwzc7DrIxPZ8/LtgMwE+Su/Lghf1oGRUBr10EP3xd0/iSF2DIDD9VKiJyNIV7Pa56aTnLUrMBCA8zPHhBP244rQMs/TMsfbym4dCrYfz9GrsuIgFH4V5LfmkF5z2xhIz8MgAW3XMWPWNaEPbtM/Cnh2oaJt8EFzyuUTAiErAU7tVW7c7hsuec5e86toris7vH0bpZBCz+Myx51Gl0zkMw7Bpo2dGPlYqINEzhDixLPchVL60A4LcX9eeGM3vAgQ3wz+shezu0TYCr3oOY3n6tU0TEXY0+3A8Wlh0O9sMjYTa8B+/e6DRIPB9mztUoGBEJKo063HOLyzntj4sAuGN8LyfYiw/Bezc7Da5+H3qf68cKRUROTqO+HZ0xZznWwvQR8fzy/D7gqoK/D3IeSJrxloJdRIJWo71zf+2bXWw5UMCY3jH87fIhkL0DnhnuHBx4GfS9wL8FioicgkZ5515SXsWfFmymZ2w0L19/mvPl6bNnOAdbdoaLn/NvgSIip6hR3rlf888VlFW6uHtCEpGb34f3bnIOTH8FBl7q3+JERDyg0YV7Wk4xKbtziGkRxVSzrObL0yvfgaSJ/i1ORMRDGl1Rl9k4AAAJ10lEQVS4P/qps77p34fshffvdnbetRra9fRjVSIintWo+tzX7s3lP2v3EU0JY1ZVB/vt3yjYRSTkNKpwf3LRNgD+1/c9Z8eYe6DjAD9WJCLiHY0m3NfszWXx1iziyKbtrgXQPhEm/NbfZYmIeIVb4W6MmWSM2WqMSTXG3F/P8XuMMZuMMeuMMV8YY7p7vtRT89M3vwcsS1pXB/oFf/NrPSIi3tRguBtjwoHZwGSgPzDTGNO/TrPVQLK1djDwLvCYpws9FY99uoX03BIeG3qQqLJD0GUE9Bzv77JERLzGnTv3kUCqtXantbYcmAtMq93AWvuVtba4enM5EO/ZMk9eamYBzy7eQRgupmf9w9l51bv+LUpExMvcCfcuwN5a22nV+47lJuCT+g4YY2YZY1KMMSlZWVnuV3kKPl63H4CvR64gLHs7xA2C5u188tkiIv7iTrjXt9yQrbehMVcDycBf6zturZ1jrU221ibHxsa6X+Up+GpLJj1jo4mv2OXsmLXEJ58rIuJP7oR7GlB7kdB4YF/dRsaYCcADwFRrbZlnyjs13+/JYW1aHhf2bwdbP4FBl0NYuL/LEhHxOnfCfSWQaIzpYYyJBGYA82s3MMYMA17ACfZMz5d5cv68YDMAd26/CVyVzsIbIiKNQIPhbq2tBO4EFgKbgXnW2o3GmIeNMVOrm/0VaAG8Y4xZY4yZf4zT+UxGfikrd+VwXnwlkYech5cYNN2/RYmI+Ihbc8tYaxcAC+rs+02t1xM8XNcpe+IzJ9D/VPWks2Pas2Dq+/pARCT0hOQTqi6X5e2UvTzQ4Rtic1ZDTBIMvdLfZYmI+ExIhvvatFyGm23ckl89rv2GT3TXLiKNSkiG++cbDzA78mlnY8a/ITrGvwWJiPhYSIZ75ep/08kcggseh75T/F2OiIjPhWS4/6rsGefFEPWzi0jjFHLhvnL5EiKMi4Mt+kBkc3+XIyLiFyEX7lmLXwAg+vp3/FyJiIj/hFS4Z+WXclrJUra3GEmzmICbUl5ExGdCKtz3rF9CrMknvJ++RBWRxi2kwr1g3X8A6NB3tJ8rERHxr9AJ94pSxma8BUCLHqf5uRgREf8KnXBf+jjhVPFK+3sgLHQuS0TkZLg1cVjAO/QD9uu/scPVmQM9L/V3NSIifhcat7jfvYjBxc8r7uTGsYn+rkZExO9CJNydse1lsQPp2Kqpn4sREfG/4A/3wkxwVZJi+zCmtyYIExGBUAj31C8AeLxiOj1iov1cjIhIYAj+cD+wjnITxQpXP87s3d7f1YiIBITgD/edS8gLa4OLMHp3aOnvakREAkLQh7stLySsqpQLBnXydykiIgEjuMPdVYUtOkhKVSKjeqlLRkTkR8Ed7iU5hFUUsdbVi5EJ7fxdjYhIwAjqcC8/sAmAjCad6ROn/nYRkR8Fdbhv2+qE+5TTB/m5EhGRwBLU4Z6engZAn34KdxGR2oI63KNzNpNjW9ClWy9/lyIiElCCOtzHFC9iZ3gCJizc36WIiASUoA33vPxCANq1aO7nSkREAk/QhvvKbz4HoLTXJD9XIiISeII23KO+mw1An7Ov8nMlIiKBx61wN8ZMMsZsNcakGmPur+d4lDHm7erjK4wxCZ4utLaC0goSq1IpIJqwVnHe/CgRkaDUYLgbY8KB2cBkoD8w0xjTv06zm4Aca21v4EngUU8XWtvSlauJMzkc7HSWNz9GRCRouXPnPhJItdbutNaWA3OBaXXaTANeq379LnCuMcZ4rsxarKX3dw8B0GHiPV75CBGRYOdOuHcB9tbaTqveV28ba20lkAd4ZSavFR8+S1LBcpY3HUN0j5He+AgRkaDnTrjXdwduT6INxphZxpgUY0xKVlaWO/UdJTK6NduaDaHJFS+f1PtFRBqDCDfapAFda23HA/uO0SbNGBMBtAYO1T2RtXYOMAcgOTn5qPB3x7CJV8PEq0/mrSIijYY7d+4rgURjTA9jTCQwA5hfp8184Lrq19OBL621JxXeIiJy6hq8c7fWVhpj7gQWAuHAy9bajcaYh4EUa+184J/A68aYVJw79hneLFpERI7PnW4ZrLULgAV19v2m1utS4HLPliYiIicraJ9QFRGRY1O4i4iEIIW7iEgIUriLiIQghbuISAgy/hqObozJAnaf5NtjgIMeLCcY6JobB11z43Aq19zdWhvbUCO/hfupMMakWGuT/V2HL+maGwddc+Pgi2tWt4yISAhSuIuIhKBgDfc5/i7AD3TNjYOuuXHw+jUHZZ+7iIgcX7DeuYuIyHEEdLgH2sLcvuDGNd9jjNlkjFlnjPnCGNPdH3V6UkPXXKvddGOMNcYE/cgKd67ZGHNF9c96ozHmLV/X6Glu/G53M8Z8ZYxZXf37PcUfdXqKMeZlY0ymMWbDMY4bY8zT1f8+1hljhnu0AGttQP7BmV54B9ATiATWAv3rtLkDeL769QzgbX/X7YNrPhtoXv369sZwzdXtWgJfA8uBZH/X7YOfcyKwGmhbvd3B33X74JrnALdXv+4P7PJ33ad4zWcBw4ENxzg+BfgEZyW7UcAKT35+IN+5B9bC3L7R4DVba7+y1hZXby7HWRkrmLnzcwb4A/AYUOrL4rzEnWu+BZhtrc0BsNZm+rhGT3Pnmi3Qqvp1a45e8S2oWGu/pp4V6WqZBvzLOpYDbYwxnTz1+YEc7gG1MLePuHPNtd2E81/+YNbgNRtjhgFdrbUf+7IwL3Ln55wEJBljlhljlhtjJvmsOu9w55p/B1xtjEnDWT/iZ74pzW9O9O/7CXFrsQ4/8djC3EHE7esxxlwNJAPjvFqR9x33mo0xYcCTwPW+KsgH3Pk5R+B0zYzH+b+zpcaYgdbaXC/X5i3uXPNM4FVr7ePGmDNwVncbaK11eb88v/BqfgXynfuJLMzN8RbmDiLuXDPGmAnAA8BUa22Zj2rzloauuSUwEFhsjNmF0zc5P8i/VHX3d/sja22FtfYHYCtO2Acrd675JmAegLX2W6Apzhwsocqtv+8nK5DDvTEuzN3gNVd3UbyAE+zB3g8LDVyztTbPWhtjrU2w1ibgfM8w1Vqb4p9yPcKd3+0Pcb48xxgTg9NNs9OnVXqWO9e8BzgXwBjTDyfcs3xapW/NB66tHjUzCsiz1u732Nn9/Y1yA982TwG24XzL/kD1vodx/nKD88N/B0gFvgN6+rtmH1zzIiADWFP9Z76/a/b2Nddpu5ggHy3j5s/ZAE8Am4D1wAx/1+yDa+4PLMMZSbMGmOjvmk/xev8N7AcqcO7SbwJuA26r9TOeXf3vY72nf6/1hKqISAgK5G4ZERE5SQp3EZEQpHAXEQlBCncRkRCkcBcRCUEKdxGREKRwFxEJQQp3EZEQ9P8B92ZGNkH3GDQAAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "# roc curve\n", "print('ROC curve')\n", "kfold=StratifiedKFold(n_splits=2,random_state=1)\n", "'''pipeline,turtle or list is outermost,inside must be turtle'''\n", "pipe_lr=Pipeline([('scl',StandardScaler()),('pca',PCA(n_components=1)),('clf',LogisticRegression(random_state=1))])\n", "for i, (train,test) in enumerate(kfold.split(X_train,y_train)): \n", " prob=pipe_lr.fit(X_train.iloc[train],y_train.iloc[train]).predict_proba(X_train.iloc[test])\n", " fpr,tqr,thresholds=roc_curve(y_train.iloc[test],prob[:,1],pos_label=1)\n", " roc_auc=auc(fpr,tqr)\n", " plt.plot(fpr,tqr,label='ROC fold:{},auc:{}'.format(i,roc_auc))" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.7" } }, "nbformat": 4, "nbformat_minor": 2 }