{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "from scipy.special import expit, logsumexp\n",
    "from sklearn.datasets import load_breast_cancer, load_iris\n",
    "from sklearn.tree import DecisionTreeRegressor\n",
    "from sklearn.ensemble import GradientBoostingClassifier as skGradientBoostingClassifier"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "class GradientBoostingClassifier():\n",
    "    def __init__(self, learning_rate=0.1, n_estimators=100, max_depth=3, random_state=0):\n",
    "        self.learning_rate = learning_rate\n",
    "        self.n_estimators = n_estimators\n",
    "        self.max_depth = max_depth\n",
    "        self.random_state = random_state\n",
    "\n",
    "    def fit(self, X, y):\n",
    "        self.n_features_ = X.shape[1]\n",
    "        self.classes_, y = np.unique(y, return_inverse=True)\n",
    "        self.n_classes_ = len(self.classes_)\n",
    "        if self.n_classes_ == 2:\n",
    "            n_effective_classes = 1\n",
    "        else:\n",
    "            n_effective_classes = self.n_classes_\n",
    "        self.estimators_ = np.empty((self.n_estimators, n_effective_classes), dtype=np.object)\n",
    "        raw_predictions = np.zeros((X.shape[0], n_effective_classes))\n",
    "        rng = np.random.RandomState(0)\n",
    "        for i in range(self.n_estimators):\n",
    "            raw_predictions_copy = raw_predictions.copy()\n",
    "            for j in range(n_effective_classes):\n",
    "                # binary classification\n",
    "                if n_effective_classes == 1:\n",
    "                    y_enc = y\n",
    "                    residual = y_enc - expit(raw_predictions_copy.ravel())\n",
    "                # multiclass classification\n",
    "                else:\n",
    "                    y_enc = (y == j).astype(np.int)\n",
    "                    residual = y_enc - np.nan_to_num(np.exp(raw_predictions_copy[:, j] \n",
    "                                                            - logsumexp(raw_predictions_copy, axis=1)))\n",
    "                tree = DecisionTreeRegressor(criterion=\"friedman_mse\", max_depth=self.max_depth,\n",
    "                                             random_state=rng)\n",
    "                tree.fit(X, residual)\n",
    "                terminal_regions = tree.apply(X)\n",
    "                for leaf in np.where(tree.tree_.children_left == -1)[0]:\n",
    "                    cur = np.where(terminal_regions == leaf)[0]\n",
    "                    # binary classification\n",
    "                    if n_effective_classes == 1:\n",
    "                        numerator = np.sum(residual[cur])\n",
    "                        denominator = np.sum((y_enc[cur] - residual[cur]) * (1 - y_enc[cur] + residual[cur]))\n",
    "                    # multiclass classification\n",
    "                    else:\n",
    "                        numerator = np.sum(residual[cur])\n",
    "                        numerator *= (self.n_classes_ - 1) / self.n_classes_\n",
    "                        denominator = np.sum((y_enc[cur] - residual[cur]) * (1 - y_enc[cur] + residual[cur]))\n",
    "                    if np.abs(denominator) < 1e-150:\n",
    "                        tree.tree_.value[leaf, 0, 0] = 0\n",
    "                    else:\n",
    "                        tree.tree_.value[leaf, 0, 0] = numerator / denominator\n",
    "                raw_predictions[:, j] += self.learning_rate * tree.tree_.value[:, 0, 0][terminal_regions]\n",
    "                self.estimators_[i, j] = tree\n",
    "        return self\n",
    "\n",
    "    def _predict(self, X):\n",
    "        raw_predictions = np.zeros((X.shape[0], self.estimators_.shape[1]))\n",
    "        for i in range(self.estimators_.shape[0]):\n",
    "            for j in range(self.estimators_.shape[1]):\n",
    "                raw_predictions[:, j] += self.learning_rate * self.estimators_[i, j].predict(X)\n",
    "        return raw_predictions\n",
    "\n",
    "    def decision_function(self, X):\n",
    "        prob = self._predict(X)\n",
    "        if self.n_classes_ == 2:\n",
    "            return prob.ravel()\n",
    "        else:\n",
    "            return prob\n",
    "\n",
    "    def predict_proba(self, X):\n",
    "        scores = self.decision_function(X)\n",
    "        if len(scores.shape) == 1:\n",
    "            prob = expit(scores)\n",
    "            prob = np.vstack((1 - prob, prob)).T\n",
    "        else:\n",
    "            prob = np.nan_to_num(np.exp(scores - logsumexp(scores, axis=1)[:, np.newaxis]))\n",
    "        return prob\n",
    "\n",
    "    def predict(self, X):\n",
    "        scores = self.decision_function(X)\n",
    "        if len(scores.shape) == 1:\n",
    "            indices = (scores > 0).astype(int)\n",
    "        else:\n",
    "            indices = np.argmax(scores, axis=1)\n",
    "        return self.classes_[indices]   \n",
    "\n",
    "    @property\n",
    "    def feature_importances_(self):\n",
    "        all_importances = np.zeros(self.n_features_)\n",
    "        for i in range(self.estimators_.shape[0]):\n",
    "            for j in range(self.estimators_.shape[1]):\n",
    "                all_importances += self.estimators_[i, j].tree_.compute_feature_importances(normalize=False)\n",
    "        return all_importances / np.sum(all_importances)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "# binary classification\n",
    "X, y = load_breast_cancer(return_X_y=True)\n",
    "clf1 = GradientBoostingClassifier(n_estimators=10).fit(X, y)\n",
    "clf2 = skGradientBoostingClassifier(n_estimators=10, init=\"zero\", presort=False, random_state=0).fit(X, y)\n",
    "assert np.allclose(clf1.feature_importances_, clf2.feature_importances_)\n",
    "prob1 = clf1.decision_function(X)\n",
    "prob2 = clf2.decision_function(X)\n",
    "assert np.allclose(prob1, prob2)\n",
    "prob1 = clf1.predict_proba(X)\n",
    "prob2 = clf2.predict_proba(X)\n",
    "assert np.allclose(prob1, prob2)\n",
    "pred1 = clf1.predict(X)\n",
    "pred2 = clf2.predict(X)\n",
    "assert np.array_equal(pred1, pred2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# multiclass classification\n",
    "X, y = load_iris(return_X_y=True)\n",
    "clf1 = GradientBoostingClassifier(n_estimators=10).fit(X, y)\n",
    "clf2 = skGradientBoostingClassifier(n_estimators=10, init=\"zero\", presort=False, random_state=0).fit(X, y)\n",
    "assert np.allclose(clf1.feature_importances_, clf2.feature_importances_)\n",
    "prob1 = clf1.decision_function(X)\n",
    "prob2 = clf2.decision_function(X)\n",
    "assert np.allclose(prob1, prob2)\n",
    "prob1 = clf1.predict_proba(X)\n",
    "prob2 = clf2.predict_proba(X)\n",
    "assert np.allclose(prob1, prob2)\n",
    "pred1 = clf1.predict(X)\n",
    "pred2 = clf2.predict(X)\n",
    "assert np.array_equal(pred1, pred2)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "dev",
   "language": "python",
   "name": "dev"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}