{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "from sklearn.datasets import load_boston\n",
    "from sklearn.tree import DecisionTreeRegressor\n",
    "from sklearn.utils.stats import _weighted_percentile\n",
    "from sklearn.ensemble import GradientBoostingRegressor as skGradientBoostingRegressor"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Implementation 1\n",
    "- similar to scikit-learn loss=\"ls\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "class GradientBoostingRegressor():\n",
    "    def __init__(self, learning_rate=0.1, n_estimators=100, max_depth=3, random_state=0):\n",
    "        self.learning_rate = learning_rate\n",
    "        self.n_estimators = n_estimators\n",
    "        self.max_depth = max_depth\n",
    "        self.random_state = random_state\n",
    "\n",
    "    def fit(self, X, y):\n",
    "        self.n_features_ = X.shape[1]\n",
    "        self.estimators_ = np.empty((self.n_estimators, 1), dtype=np.object)\n",
    "        raw_predictions = np.zeros(X.shape[0])\n",
    "        rng = np.random.RandomState(0)\n",
    "        for i in range(self.n_estimators):\n",
    "            residual = y - raw_predictions\n",
    "            tree = DecisionTreeRegressor(criterion=\"friedman_mse\", max_depth=self.max_depth,\n",
    "                                         random_state=rng)\n",
    "            tree.fit(X, residual)\n",
    "            raw_predictions += self.learning_rate * tree.predict(X)\n",
    "            self.estimators_[i, 0] = tree\n",
    "        return self\n",
    "\n",
    "    def predict(self, X):\n",
    "        raw_predictions = np.zeros(X.shape[0])\n",
    "        for i in range(self.n_estimators):\n",
    "            raw_predictions += self.learning_rate * self.estimators_[i, 0].predict(X)\n",
    "        return raw_predictions\n",
    "\n",
    "    @property\n",
    "    def feature_importances_(self):\n",
    "        all_importances = np.zeros(self.n_features_)\n",
    "        for i in range(self.n_estimators):\n",
    "            all_importances += self.estimators_[i, 0].tree_.compute_feature_importances(normalize=False)\n",
    "        return all_importances / np.sum(all_importances)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "X, y = load_boston(return_X_y=True)\n",
    "clf1 = GradientBoostingRegressor().fit(X, y)\n",
    "clf2 = skGradientBoostingRegressor(init=\"zero\", presort=False, random_state=0).fit(X, y)\n",
    "assert np.allclose(clf1.feature_importances_, clf2.feature_importances_)\n",
    "pred1 = clf1.predict(X)\n",
    "pred2 = clf2.predict(X)\n",
    "assert np.allclose(pred1, pred2)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Implementation 2\n",
    "- similar to scikit-learn loss=\"lad\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "class GradientBoostingRegressor():\n",
    "    def __init__(self, learning_rate=0.1, n_estimators=100, max_depth=3, random_state=0):\n",
    "        self.learning_rate = learning_rate\n",
    "        self.n_estimators = n_estimators\n",
    "        self.max_depth = max_depth\n",
    "        self.random_state = random_state\n",
    "\n",
    "    def fit(self, X, y):\n",
    "        self.n_features_ = X.shape[1]\n",
    "        self.estimators_ = np.empty((self.n_estimators, 1), dtype=np.object)\n",
    "        raw_predictions = np.zeros(X.shape[0])\n",
    "        rng = np.random.RandomState(0)\n",
    "        for i in range(self.n_estimators):\n",
    "            residual = np.sign(y - raw_predictions)\n",
    "            tree = DecisionTreeRegressor(criterion=\"friedman_mse\", max_depth=self.max_depth,\n",
    "                                         random_state=rng)\n",
    "            tree.fit(X, residual)\n",
    "            terminal_regions = tree.apply(X)\n",
    "            for leaf in np.where(tree.tree_.children_left == -1)[0]:\n",
    "                cur = np.where(terminal_regions == leaf)[0]\n",
    "                # scikit-learn uses _weightef_percentile, which is inconsistent with np.median\n",
    "                tree.tree_.value[leaf, 0, 0] = _weighted_percentile(y[cur] - raw_predictions[cur],\n",
    "                                                                    np.ones(cur.shape[0]))\n",
    "            raw_predictions += self.learning_rate * tree.tree_.value[:, 0, 0][terminal_regions]\n",
    "            self.estimators_[i, 0] = tree\n",
    "        return self\n",
    "\n",
    "    def predict(self, X):\n",
    "        raw_predictions = np.zeros(X.shape[0])\n",
    "        for i in range(self.n_estimators):\n",
    "            raw_predictions += self.learning_rate * self.estimators_[i, 0].predict(X)\n",
    "        return raw_predictions\n",
    "\n",
    "    @property\n",
    "    def feature_importances_(self):\n",
    "        all_importances = np.zeros(self.n_features_)\n",
    "        for i in range(self.n_estimators):\n",
    "            all_importances += self.estimators_[i, 0].tree_.compute_feature_importances(normalize=False)\n",
    "        return all_importances / np.sum(all_importances)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "X, y = load_boston(return_X_y=True)\n",
    "clf1 = GradientBoostingRegressor().fit(X, y)\n",
    "clf2 = skGradientBoostingRegressor(init=\"zero\", loss=\"lad\", presort=False, random_state=0).fit(X, y)\n",
    "assert np.allclose(clf1.feature_importances_, clf2.feature_importances_)\n",
    "pred1 = clf1.predict(X)\n",
    "pred2 = clf2.predict(X)\n",
    "assert np.allclose(pred1, pred2)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Implementation 3\n",
    "- similar to scikit-learn loss=\"huber\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "class GradientBoostingRegressor():\n",
    "    def __init__(self, learning_rate=0.1, n_estimators=100, max_depth=3,\n",
    "                 random_state=0, alpha=0.9):\n",
    "        self.learning_rate = learning_rate\n",
    "        self.n_estimators = n_estimators\n",
    "        self.max_depth = max_depth\n",
    "        self.random_state = random_state\n",
    "        self.alpha = alpha\n",
    "\n",
    "    def fit(self, X, y):\n",
    "        self.n_features_ = X.shape[1]\n",
    "        self.estimators_ = np.empty((self.n_estimators, 1), dtype=np.object)\n",
    "        raw_predictions = np.zeros(X.shape[0])\n",
    "        rng = np.random.RandomState(0)\n",
    "        for i in range(self.n_estimators):\n",
    "            residual = np.zeros(X.shape[0])\n",
    "            diff = y - raw_predictions\n",
    "            gamma = _weighted_percentile(np.abs(diff), np.ones(diff.shape[0]), self.alpha * 100)\n",
    "            gamma_mask = np.abs(diff) <= gamma\n",
    "            residual[gamma_mask] = diff[gamma_mask]\n",
    "            residual[~gamma_mask] = gamma * np.sign(diff[~gamma_mask])\n",
    "            tree = DecisionTreeRegressor(criterion=\"friedman_mse\", max_depth=self.max_depth,\n",
    "                                         random_state=rng)\n",
    "            tree.fit(X, residual)\n",
    "            terminal_regions = tree.apply(X)\n",
    "            for leaf in np.where(tree.tree_.children_left == -1)[0]:\n",
    "                cur = np.where(terminal_regions == leaf)[0]\n",
    "                diff = y[cur] - raw_predictions[cur]\n",
    "                # scikit-learn uses _weightef_percentile, which is inconsistent with np.median\n",
    "                median = _weighted_percentile(diff, np.ones(diff.shape[0]))\n",
    "                diff_minus_median = diff - median\n",
    "                tree.tree_.value[leaf, 0, 0] = median + np.mean(np.sign(diff_minus_median)\n",
    "                                                                * np.minimum(np.abs(diff_minus_median), gamma))\n",
    "            raw_predictions += self.learning_rate * tree.tree_.value[:, 0, 0][terminal_regions]\n",
    "            self.estimators_[i, 0] = tree\n",
    "        return self\n",
    "\n",
    "    def predict(self, X):\n",
    "        raw_predictions = np.zeros(X.shape[0])\n",
    "        for i in range(self.n_estimators):\n",
    "            raw_predictions += self.learning_rate * self.estimators_[i, 0].predict(X)\n",
    "        return raw_predictions\n",
    "\n",
    "    @property\n",
    "    def feature_importances_(self):\n",
    "        all_importances = np.zeros(self.n_features_)\n",
    "        for i in range(self.n_estimators):\n",
    "            all_importances += self.estimators_[i, 0].tree_.compute_feature_importances(normalize=False)\n",
    "        return all_importances / np.sum(all_importances)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "X, y = load_boston(return_X_y=True)\n",
    "clf1 = GradientBoostingRegressor().fit(X, y)\n",
    "clf2 = skGradientBoostingRegressor(init=\"zero\", loss=\"huber\", presort=False, random_state=0).fit(X, y)\n",
    "assert np.allclose(clf1.feature_importances_, clf2.feature_importances_)\n",
    "pred1 = clf1.predict(X)\n",
    "pred2 = clf2.predict(X)\n",
    "assert np.allclose(pred1, pred2)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "dev",
   "language": "python",
   "name": "dev"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}