{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "from sklearn.metrics import fbeta_score as skfbeta_score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "def fbeta_score(y_true, y_pred, beta, average):\n",
    "    n_labels = len(set(y_true) | set(y_pred))\n",
    "    true_sum = np.bincount(y_true, minlength=n_labels)\n",
    "    pred_sum = np.bincount(y_pred, minlength=n_labels)\n",
    "    tp = np.bincount(y_true[y_true == y_pred], minlength=n_labels)\n",
    "    if average == \"binary\":\n",
    "        tp = np.array([tp[1]])\n",
    "        true_sum = np.array([true_sum[1]])\n",
    "        pred_sum = np.array([pred_sum[1]])\n",
    "    elif average == \"micro\":\n",
    "        tp = np.array([np.sum(tp)])\n",
    "        true_sum = np.array([np.sum(true_sum)])\n",
    "        pred_sum = np.array([np.sum(pred_sum)])\n",
    "    precision = np.zeros(len(pred_sum))\n",
    "    mask = pred_sum != 0\n",
    "    precision[mask] = tp[mask] / pred_sum[mask]\n",
    "    recall = np.zeros(len(true_sum))\n",
    "    mask = true_sum != 0\n",
    "    recall[mask] = tp[mask] / true_sum[mask]\n",
    "    denom = (beta ** 2) * precision + recall\n",
    "    denom[denom == 0.] = 1\n",
    "    fscore = (1 + beta ** 2) * precision * recall / denom\n",
    "    if average == \"weighted\":\n",
    "        fscore = np.average(fscore, weights=true_sum)\n",
    "    elif average is not None:\n",
    "        fscore = np.mean(fscore)\n",
    "    return fscore"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "# binary\n",
    "for i in range(10):\n",
    "    rng = np.random.RandomState(i)\n",
    "    y_true = rng.randint(2, size=10)\n",
    "    y_pred = rng.randint(2, size=10)\n",
    "    score1 = fbeta_score(y_true, y_pred, beta=0.5, average=\"binary\")\n",
    "    score2 = skfbeta_score(y_true, y_pred, beta=0.5, average=\"binary\")\n",
    "    assert np.isclose(score1, score2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "d:\\github\\scikit-learn\\sklearn\\metrics\\classification.py:1430: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no true samples.\n",
      "  'recall', 'true', average, warn_for)\n",
      "d:\\github\\scikit-learn\\sklearn\\metrics\\classification.py:1428: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.\n",
      "  'precision', 'predicted', average, warn_for)\n"
     ]
    }
   ],
   "source": [
    "# multiclass\n",
    "for i in range(10):\n",
    "    for average in (None, \"micro\", \"macro\", \"weighted\"):\n",
    "        rng = np.random.RandomState(i)\n",
    "        y_true = rng.randint(3, size=10)\n",
    "        y_pred = rng.randint(3, size=10)\n",
    "        score1 = fbeta_score(y_true, y_pred, beta=0.5, average=average)\n",
    "        score2 = skfbeta_score(y_true, y_pred, beta=0.5, average=average)\n",
    "        if average is None:\n",
    "            assert np.array_equal(score1, score2)\n",
    "        else:\n",
    "            assert np.isclose(score1, score2)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "dev",
   "language": "python",
   "name": "dev"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}