{ "cells": [ { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "# ML logistic regression - assignment 2\n", "\n", "## Logistic Regression from scratch" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "vscode": { "languageId": "plaintext" } }, "outputs": [], "source": [ "class MyOwnLogisticRegression:\n", " def __init__(self, learning_rate=0.001, n_iters=1000):\n", " self.lr = learning_rate\n", " self.n_iters = n_iters\n", " self.weights = None\n", " self.bias = None\n", "\n", " def fit(self, X, y):\n", " n_samples, n_features = X.shape\n", "\n", " # init parameters\n", " self.weights = np.zeros(n_features)\n", " self.bias = 0\n", "\n", " # gradient descent\n", " for _ in range(self.n_iters):\n", " # approximate y with linear combination of weights and x, plus bias\n", " linear_model = np.dot(X, self.weights) + self.bias\n", " # apply sigmoid function\n", " y_predicted = self._sigmoid(linear_model)\n", "\n", " # compute gradients\n", " dw = (1 / n_samples) * np.dot(X.T, (y_predicted - y))\n", " db = (1 / n_samples) * np.sum(y_predicted - y)\n", " # update parameters\n", " self.weights -= self.lr * dw\n", " self.bias -= self.lr * db\n", "\n", " def predict(self, X):\n", " linear_model = np.dot(X, self.weights) + self.bias\n", " y_predicted = self._sigmoid(linear_model)\n", " y_predicted_cls = [1 if i > 0.5 else 0 for i in y_predicted]\n", " return np.array(y_predicted_cls)\n", "\n", " def _sigmoid(self, x):\n", " return 1 / (1 + np.exp(-x))" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.16" }, "vscode": { "interpreter": { "hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49" } } }, "nbformat": 4, "nbformat_minor": 4 }