{ "metadata": { "name": "", "signature": "sha256:4e49430d14da775f781b66225695e4ec1273fd081b7cf8d18a00bf840a60842c" }, "nbformat": 3, "nbformat_minor": 0, "worksheets": [ { "cells": [ { "cell_type": "heading", "level": 1, "metadata": {}, "source": [ "Robust Linear Models" ] }, { "cell_type": "code", "collapsed": false, "input": [ "from __future__ import print_function\n", "import numpy as np\n", "import statsmodels.api as sm\n", "import matplotlib.pyplot as plt\n", "from statsmodels.sandbox.regression.predstd import wls_prediction_std" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Estimation\n", "\n", "Load data:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "data = sm.datasets.stackloss.load()\n", "data.exog = sm.add_constant(data.exog)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Huber's T norm with the (default) median absolute deviation scaling" ] }, { "cell_type": "code", "collapsed": false, "input": [ "huber_t = sm.RLM(data.endog, data.exog, M=sm.robust.norms.HuberT())\n", "hub_results = huber_t.fit()\n", "print(hub_results.params)\n", "print(hub_results.bse)\n", "print(hub_results.summary(yname='y',\n", " xname=['var_%d' % i for i in range(len(hub_results.params))]))" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Huber's T norm with 'H2' covariance matrix" ] }, { "cell_type": "code", "collapsed": false, "input": [ "hub_results2 = huber_t.fit(cov=\"H2\")\n", "print(hub_results2.params)\n", "print(hub_results2.bse)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Andrew's Wave norm with Huber's Proposal 2 scaling and 'H3' covariance matrix" ] }, { "cell_type": "code", "collapsed": false, "input": [ "andrew_mod = sm.RLM(data.endog, data.exog, M=sm.robust.norms.AndrewWave())\n", "andrew_results = andrew_mod.fit(scale_est=sm.robust.scale.HuberScale(), cov=\"H3\")\n", "print('Parameters: ', andrew_results.params)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "See ``help(sm.RLM.fit)`` for more options and ``module sm.robust.scale`` for scale options\n", "\n", "## Comparing OLS and RLM\n", "\n", "Artificial data with outliers:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "nsample = 50\n", "x1 = np.linspace(0, 20, nsample)\n", "X = np.column_stack((x1, (x1-5)**2))\n", "X = sm.add_constant(X)\n", "sig = 0.3 # smaller error variance makes OLS<->RLM contrast bigger\n", "beta = [5, 0.5, -0.0]\n", "y_true2 = np.dot(X, beta)\n", "y2 = y_true2 + sig*1. * np.random.normal(size=nsample)\n", "y2[[39,41,43,45,48]] -= 5 # add some outliers (10% of nsample)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Example 1: quadratic function with linear truth\n", "\n", "Note that the quadratic term in OLS regression will capture outlier effects. " ] }, { "cell_type": "code", "collapsed": false, "input": [ "res = sm.OLS(y2, X).fit()\n", "print(res.params)\n", "print(res.bse)\n", "print(res.predict())" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Estimate RLM:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "resrlm = sm.RLM(y2, X).fit()\n", "print(resrlm.params)\n", "print(resrlm.bse)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Draw a plot to compare OLS estimates to the robust estimates:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "fig = plt.figure(figsize=(12,8))\n", "ax = fig.add_subplot(111)\n", "ax.plot(x1, y2, 'o',label=\"data\")\n", "ax.plot(x1, y_true2, 'b-', label=\"True\")\n", "prstd, iv_l, iv_u = wls_prediction_std(res)\n", "ax.plot(x1, res.fittedvalues, 'r-', label=\"OLS\")\n", "ax.plot(x1, iv_u, 'r--')\n", "ax.plot(x1, iv_l, 'r--')\n", "ax.plot(x1, resrlm.fittedvalues, 'g.-', label=\"RLM\")\n", "ax.legend(loc=\"best\")" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Example 2: linear function with linear truth\n", "\n", "Fit a new OLS model using only the linear term and the constant:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "X2 = X[:,[0,1]] \n", "res2 = sm.OLS(y2, X2).fit()\n", "print(res2.params)\n", "print(res2.bse)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Estimate RLM:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "resrlm2 = sm.RLM(y2, X2).fit()\n", "print(resrlm2.params)\n", "print(resrlm2.bse)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Draw a plot to compare OLS estimates to the robust estimates:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "prstd, iv_l, iv_u = wls_prediction_std(res2)\n", "\n", "fig, ax = plt.subplots(figsize=(8,6))\n", "ax.plot(x1, y2, 'o', label=\"data\")\n", "ax.plot(x1, y_true2, 'b-', label=\"True\")\n", "ax.plot(x1, res2.fittedvalues, 'r-', label=\"OLS\")\n", "ax.plot(x1, iv_u, 'r--')\n", "ax.plot(x1, iv_l, 'r--')\n", "ax.plot(x1, resrlm2.fittedvalues, 'g.-', label=\"RLM\")\n", "legend = ax.legend(loc=\"best\")" ], "language": "python", "metadata": {}, "outputs": [] } ], "metadata": {} } ] }