{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", "from sklearn.datasets import load_boston\n", "from sklearn.tree import DecisionTreeRegressor\n", "from sklearn.ensemble import RandomForestRegressor as skRandomForestRegressor" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "def r2_score(y_true, y_pred):\n", " numerator = np.sum((y_true - y_pred) ** 2)\n", " denominator = np.sum((y_true - np.mean(y_true)) ** 2)\n", " return 1 - numerator / denominator" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "class RandomForestRegressor():\n", " def __init__(self, n_estimators=100, max_depth=None, max_features=\"auto\",\n", " oob_score=False, random_state=0):\n", " self.n_estimators = n_estimators\n", " self.max_depth = max_depth\n", " self.max_features = max_features\n", " self.oob_score = oob_score\n", " self.random_state = random_state\n", "\n", " def fit(self, X, y):\n", " self.n_features_ = X.shape[1]\n", " MAX_INT = np.iinfo(np.int32).max\n", " rng = np.random.RandomState(self.random_state)\n", " self.estimators_ = []\n", " for i in range(self.n_estimators):\n", " est = DecisionTreeRegressor(max_depth=self.max_depth,\n", " max_features=self.max_features,\n", " random_state=rng.randint(MAX_INT))\n", " sample_rng = np.random.RandomState(est.random_state)\n", " sample_indices = sample_rng.randint(0, X.shape[0], X.shape[0])\n", " sample_counts = np.bincount(sample_indices, minlength=X.shape[0])\n", " est.fit(X, y, sample_weight=sample_counts)\n", " self.estimators_.append(est)\n", " if self.oob_score:\n", " self._set_oob_score(X, y)\n", " return self\n", "\n", " def _set_oob_score(self, X, y):\n", " predictions = np.zeros(X.shape[0])\n", " n_predictions = np.zeros(X.shape[0])\n", " for i in range(self.n_estimators):\n", " sample_rng = np.random.RandomState(self.estimators_[i].random_state)\n", " sample_indices = sample_rng.randint(0, X.shape[0], X.shape[0])\n", " mask = np.ones(X.shape[0], dtype=bool)\n", " mask[sample_indices] = False\n", " predictions[mask] += self.estimators_[i].predict(X[mask])\n", " n_predictions[mask] += 1\n", " predictions /= n_predictions\n", " self.oob_prediction_ = predictions\n", " self.oob_score_ = r2_score(y, predictions)\n", "\n", " def predict(self, X):\n", " pred = np.zeros(X.shape[0])\n", " for i in range(self.n_estimators):\n", " pred += self.estimators_[i].predict(X)\n", " pred /= self.n_estimators\n", " return pred\n", "\n", " @property\n", " def feature_importances_(self):\n", " all_importances = np.zeros(self.n_features_)\n", " for i in range(self.n_estimators):\n", " all_importances += self.estimators_[i].feature_importances_\n", " all_importances /= self.n_estimators \n", " return all_importances / np.sum(all_importances)" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "X, y = load_boston(return_X_y=True)\n", "clf1 = RandomForestRegressor(random_state=0, oob_score=True).fit(X, y)\n", "clf2 = skRandomForestRegressor(random_state=0, oob_score=True).fit(X, y)\n", "pred1 = clf1.predict(X)\n", "pred2 = clf2.predict(X)\n", "assert np.allclose(pred1, pred2)\n", "assert np.allclose(clf1.oob_prediction_, clf2.oob_prediction_)\n", "assert np.allclose(clf1.oob_score_, clf2.oob_score_)\n", "assert np.allclose(clf1.feature_importances_, clf2.feature_importances_)" ] } ], "metadata": { "kernelspec": { "display_name": "dev", "language": "python", "name": "dev" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.3" } }, "nbformat": 4, "nbformat_minor": 2 }