{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "\n# Plot the decision boundaries of a VotingClassifier\n\n.. currentmodule:: sklearn\n\nPlot the decision boundaries of a :class:`~ensemble.VotingClassifier` for two\nfeatures of the Iris dataset.\n\nPlot the class probabilities of the first sample in a toy dataset predicted by\nthree different classifiers and averaged by the\n:class:`~ensemble.VotingClassifier`.\n\nFirst, three exemplary classifiers are initialized\n(:class:`~tree.DecisionTreeClassifier`,\n:class:`~neighbors.KNeighborsClassifier`, and :class:`~svm.SVC`) and used to\ninitialize a soft-voting :class:`~ensemble.VotingClassifier` with weights `[2,\n1, 2]`, which means that the predicted probabilities of the\n:class:`~tree.DecisionTreeClassifier` and :class:`~svm.SVC` each count 2 times\nas much as the weights of the :class:`~neighbors.KNeighborsClassifier`\nclassifier when the averaged probability is calculated.\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "# Authors: The scikit-learn developers\n# SPDX-License-Identifier: BSD-3-Clause\n\nfrom itertools import product\n\nimport matplotlib.pyplot as plt\n\nfrom sklearn import datasets\nfrom sklearn.ensemble import VotingClassifier\nfrom sklearn.inspection import DecisionBoundaryDisplay\nfrom sklearn.neighbors import KNeighborsClassifier\nfrom sklearn.svm import SVC\nfrom sklearn.tree import DecisionTreeClassifier\n\n# Loading some example data\niris = datasets.load_iris()\nX = iris.data[:, [0, 2]]\ny = iris.target\n\n# Training classifiers\nclf1 = DecisionTreeClassifier(max_depth=4)\nclf2 = KNeighborsClassifier(n_neighbors=7)\nclf3 = SVC(gamma=0.1, kernel=\"rbf\", probability=True)\neclf = VotingClassifier(\n estimators=[(\"dt\", clf1), (\"knn\", clf2), (\"svc\", clf3)],\n voting=\"soft\",\n weights=[2, 1, 2],\n)\n\nclf1.fit(X, y)\nclf2.fit(X, y)\nclf3.fit(X, y)\neclf.fit(X, y)\n\n# Plotting decision regions\nf, axarr = plt.subplots(2, 2, sharex=\"col\", sharey=\"row\", figsize=(10, 8))\nfor idx, clf, tt in zip(\n product([0, 1], [0, 1]),\n [clf1, clf2, clf3, eclf],\n [\"Decision Tree (depth=4)\", \"KNN (k=7)\", \"Kernel SVM\", \"Soft Voting\"],\n):\n DecisionBoundaryDisplay.from_estimator(\n clf, X, alpha=0.4, ax=axarr[idx[0], idx[1]], response_method=\"predict\"\n )\n axarr[idx[0], idx[1]].scatter(X[:, 0], X[:, 1], c=y, s=20, edgecolor=\"k\")\n axarr[idx[0], idx[1]].set_title(tt)\n\nplt.show()" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.21" } }, "nbformat": 4, "nbformat_minor": 0 }