{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "\n# Lasso, Lasso-LARS, and Elastic Net paths\n\nThis example shows how to compute the \"paths\" of coefficients along the Lasso,\nLasso-LARS, and Elastic Net regularization paths. In other words, it shows the\nrelationship between the regularization parameter (alpha) and the coefficients.\n\nLasso and Lasso-LARS impose a sparsity constraint on the coefficients,\nencouraging some of them to be zero. Elastic Net is a generalization of\nLasso that adds an L2 penalty term to the L1 penalty term. This allows for\nsome coefficients to be non-zero while still encouraging sparsity.\n\nLasso and Elastic Net use a coordinate descent method to compute the paths, while\nLasso-LARS uses the LARS algorithm to compute the paths.\n\nThe paths are computed using :func:`~sklearn.linear_model.lasso_path`,\n:func:`~sklearn.linear_model.lars_path`, and :func:`~sklearn.linear_model.enet_path`.\n\nThe results show different comparison plots:\n\n- Compare Lasso and Lasso-LARS\n- Compare Lasso and Elastic Net\n- Compare Lasso with positive Lasso\n- Compare LARS and Positive LARS\n- Compare Elastic Net and positive Elastic Net\n\nEach plot shows how the model coefficients vary as the regularization strength changes,\noffering insight into the behavior of these models\nunder different constraints.\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "# Authors: The scikit-learn developers\n# SPDX-License-Identifier: BSD-3-Clause\n\nfrom itertools import cycle\n\nimport matplotlib.pyplot as plt\n\nfrom sklearn.datasets import load_diabetes\nfrom sklearn.linear_model import enet_path, lars_path, lasso_path\n\nX, y = load_diabetes(return_X_y=True)\nX /= X.std(axis=0) # Standardize data (easier to set the l1_ratio parameter)\n\n# Compute paths\n\neps = 5e-3 # the smaller it is the longer is the path\n\nprint(\"Computing regularization path using the lasso...\")\nalphas_lasso, coefs_lasso, _ = lasso_path(X, y, eps=eps)\n\nprint(\"Computing regularization path using the positive lasso...\")\nalphas_positive_lasso, coefs_positive_lasso, _ = lasso_path(\n X, y, eps=eps, positive=True\n)\n\nprint(\"Computing regularization path using the LARS...\")\nalphas_lars, _, coefs_lars = lars_path(X, y, method=\"lasso\")\n\nprint(\"Computing regularization path using the positive LARS...\")\nalphas_positive_lars, _, coefs_positive_lars = lars_path(\n X, y, method=\"lasso\", positive=True\n)\n\nprint(\"Computing regularization path using the elastic net...\")\nalphas_enet, coefs_enet, _ = enet_path(X, y, eps=eps, l1_ratio=0.8)\n\nprint(\"Computing regularization path using the positive elastic net...\")\nalphas_positive_enet, coefs_positive_enet, _ = enet_path(\n X, y, eps=eps, l1_ratio=0.8, positive=True\n)\n\n# Display results\n\nplt.figure(1)\ncolors = cycle([\"b\", \"r\", \"g\", \"c\", \"k\"])\nfor coef_lasso, coef_lars, c in zip(coefs_lasso, coefs_lars, colors):\n l1 = plt.semilogx(alphas_lasso, coef_lasso, c=c)\n l2 = plt.semilogx(alphas_lars, coef_lars, linestyle=\"--\", c=c)\n\nplt.xlabel(\"alpha\")\nplt.ylabel(\"coefficients\")\nplt.title(\"Lasso and LARS Paths\")\nplt.legend((l1[-1], l2[-1]), (\"Lasso\", \"LARS\"), loc=\"lower right\")\nplt.axis(\"tight\")\n\nplt.figure(2)\ncolors = cycle([\"b\", \"r\", \"g\", \"c\", \"k\"])\nfor coef_l, coef_e, c in zip(coefs_lasso, coefs_enet, colors):\n l1 = plt.semilogx(alphas_lasso, coef_l, c=c)\n l2 = plt.semilogx(alphas_enet, coef_e, linestyle=\"--\", c=c)\n\nplt.xlabel(\"alpha\")\nplt.ylabel(\"coefficients\")\nplt.title(\"Lasso and Elastic-Net Paths\")\nplt.legend((l1[-1], l2[-1]), (\"Lasso\", \"Elastic-Net\"), loc=\"lower right\")\nplt.axis(\"tight\")\n\n\nplt.figure(3)\nfor coef_l, coef_pl, c in zip(coefs_lasso, coefs_positive_lasso, colors):\n l1 = plt.semilogy(alphas_lasso, coef_l, c=c)\n l2 = plt.semilogy(alphas_positive_lasso, coef_pl, linestyle=\"--\", c=c)\n\nplt.xlabel(\"alpha\")\nplt.ylabel(\"coefficients\")\nplt.title(\"Lasso and positive Lasso\")\nplt.legend((l1[-1], l2[-1]), (\"Lasso\", \"positive Lasso\"), loc=\"lower right\")\nplt.axis(\"tight\")\n\n\nplt.figure(4)\ncolors = cycle([\"b\", \"r\", \"g\", \"c\", \"k\"])\nfor coef_lars, coef_positive_lars, c in zip(coefs_lars, coefs_positive_lars, colors):\n l1 = plt.semilogx(alphas_lars, coef_lars, c=c)\n l2 = plt.semilogx(alphas_positive_lars, coef_positive_lars, linestyle=\"--\", c=c)\n\nplt.xlabel(\"alpha\")\nplt.ylabel(\"coefficients\")\nplt.title(\"LARS and Positive LARS\")\nplt.legend((l1[-1], l2[-1]), (\"LARS\", \"Positive LARS\"), loc=\"lower right\")\nplt.axis(\"tight\")\n\nplt.figure(5)\nfor coef_e, coef_pe, c in zip(coefs_enet, coefs_positive_enet, colors):\n l1 = plt.semilogx(alphas_enet, coef_e, c=c)\n l2 = plt.semilogx(alphas_positive_enet, coef_pe, linestyle=\"--\", c=c)\n\nplt.xlabel(\"alpha\")\nplt.ylabel(\"coefficients\")\nplt.title(\"Elastic-Net and positive Elastic-Net\")\nplt.legend((l1[-1], l2[-1]), (\"Elastic-Net\", \"positive Elastic-Net\"), loc=\"lower right\")\nplt.axis(\"tight\")\nplt.show()" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.21" } }, "nbformat": 4, "nbformat_minor": 0 }