{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#hide\n", "# default_exp conda" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Create conda packages\n", "\n", "> Pure python packages created from nbdev settings.ini" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "from fastcore.script import *\n", "from fastcore.all import *\n", "from fastrelease.core import find_config\n", "\n", "import yaml,subprocess,glob\n", "from copy import deepcopy\n", "try: from packaging.version import parse\n", "except ImportError: from pip._vendor.packaging.version import parse\n", "\n", "_PYPI_URL = 'https://pypi.org/pypi/'" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def pypi_json(s):\n", " \"Dictionary decoded JSON for PYPI path `s`\"\n", " return urljson(f'{_PYPI_URL}{s}/json')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def latest_pypi(name):\n", " \"Latest version of `name` on pypi\"\n", " return max(parse(r) for r,o in pypi_json(name)['releases'].items()\n", " if not parse(r).is_prerelease and not o[0]['yanked'])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def pypi_details(name):\n", " \"Version, URL, and SHA256 for `name` from pypi\"\n", " ver = str(latest_pypi(name))\n", " pypi = pypi_json(f'{name}/{ver}')\n", " info = pypi['info']\n", " rel = [o for o in pypi['urls'] if o['packagetype']=='sdist'][0]\n", " return ver,rel['url'],rel['digests']['sha256']" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def update_meta(name,src_path,dest_path):\n", " \"Update VERSION and SHA256 in meta.yaml from pypi\"\n", " src_path,dest_path = Path(src_path),Path(dest_path)\n", " txt = src_path.read_text()\n", " ver,url,sha = pypi_details(name)\n", " dest_path.write_text(txt.replace('VERSION',ver).replace('SHA256',sha))\n", " return ver" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def conda_output_path(name,ver):\n", " \"Output path for conda build\"\n", " pre = run('conda info --root').strip()\n", " s = f\"{pre}/conda-bld/*/{name}-{ver}-py\"\n", " res = first(glob.glob(f\"{s}_0.tar.bz2\"))\n", " if res: return res\n", " pyver = strcat(sys.version_info[:2])\n", " return first(glob.glob(f\"{s}{pyver}_0.tar.bz2\"))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def _pip_conda_meta(name, path):\n", " ver = str(latest_pypi(name))\n", " pypi = pypi_json(f'{name}/{ver}')\n", " info = pypi['info']\n", " rel = [o for o in pypi['urls'] if o['packagetype']=='sdist'][0]\n", " reqs = ['pip', 'python', 'packaging']\n", "\n", " # Work around conda build bug - 'package' and 'source' must be first\n", " d1 = {\n", " 'package': {'name': name, 'version': ver},\n", " 'source': {'url':rel['url'], 'sha256':rel['digests']['sha256']}\n", " }\n", " d2 = {\n", " 'build': {'number': '0', 'noarch': 'python',\n", " 'script': '{{ PYTHON }} -m pip install . -vv'},\n", " 'test': {'imports': [name]},\n", " 'requirements': {'host':reqs, 'run':reqs},\n", " 'about': {'license': info['license'], 'home': info['project_url'], 'summary': info['summary']}\n", " }\n", " return d1,d2" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def _write_yaml(path, name, d1, d2):\n", " path = Path(path)\n", " p = path/name\n", " p.mkdir(exist_ok=True, parents=True)\n", " yaml.SafeDumper.ignore_aliases = lambda *args : True\n", " with (p/'meta.yaml').open('w') as f:\n", " yaml.safe_dump(d1, f)\n", " yaml.safe_dump(d2, f)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def write_pip_conda_meta(name, path='conda'):\n", " \"Writes a `meta.yaml` file for `name` to the `conda` directory of the current directory\"\n", " _write_yaml(path, name, *_pip_conda_meta(name))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def _get_conda_meta():\n", " cfg,cfg_path = find_config()\n", " name,ver = cfg.get('lib_name'),cfg.get('version')\n", " url = cfg.get('doc_host') or cfg.get('git_url')\n", "\n", " reqs = ['pip', 'python', 'packaging']\n", " if cfg.get('requirements'): reqs += cfg.get('requirements').split()\n", " if cfg.get('conda_requirements'): reqs += cfg.get('conda_requirements').split()\n", "\n", " pypi = pypi_json(f'{name}/{ver}')\n", " rel = [o for o in pypi['urls'] if o['packagetype']=='sdist'][0]\n", "\n", " # Work around conda build bug - 'package' and 'source' must be first\n", " d1 = {\n", " 'package': {'name': name, 'version': ver},\n", " 'source': {'url':rel['url'], 'sha256':rel['digests']['sha256']}\n", " }\n", "\n", " d2 = {\n", " 'build': {'number': '0', 'noarch': 'python',\n", " 'script': '{{ PYTHON }} -m pip install . -vv'},\n", " 'requirements': {'host':reqs, 'run':reqs},\n", " 'test': {'imports': [cfg.get('lib_path')]},\n", " 'about': {\n", " 'license': 'Apache Software',\n", " 'license_family': 'APACHE',\n", " 'home': url, 'doc_url': url, 'dev_url': url,\n", " 'summary': cfg.get('description')\n", " },\n", " 'extra': {'recipe-maintainers': [cfg.get('user')]}\n", " }\n", " return name,d1,d2" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def write_conda_meta(path='conda'):\n", " \"Writes a `meta.yaml` file to the `conda` directory of the current directory\"\n", " _write_yaml(path, *_get_conda_meta())" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "This function is used in the `fastrelease_conda_package` CLI command.\n", "\n", "**NB**: you need to first of all upload your package to PyPi, before creating the conda package." ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "def anaconda_upload(name, version, user=None, token=None, env_token=None):\n", " \"Update `name` `version` to anaconda\"\n", " user = f'-u {user} ' if user else ''\n", " if env_token: token = os.getenv(env_token)\n", " token = f'-t {token} ' if token else ''\n", " return run(f'anaconda {token} upload {user} {conda_output_path(name,version)} --skip-existing')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "@call_parse\n", "def fastrelease_conda_package(path:Param(\"Path where package will be created\", str)='conda',\n", " do_build:Param(\"Run `conda build` step\", bool_arg)=True,\n", " build_args:Param(\"Additional args (as str) to send to `conda build`\", str)='',\n", " skip_upload:Param(\"Skip `anaconda upload` step\", store_true)=False,\n", " mambabuild:Param(\"Use `mambabuild` (requires `boa`)\", store_true)=False,\n", " upload_user:Param(\"Optional user to upload package to\")=None):\n", " \"Create a `meta.yaml` file ready to be built into a package, and optionally build and upload it\"\n", " write_conda_meta(path)\n", " cfg,cfg_path = find_config()\n", " out = f\"Done. Next steps:\\n```\\`cd {path}\\n\"\"\"\n", " name,lib_path = cfg.get('lib_name'),cfg.get('lib_path')\n", " loc = conda_output_path(lib_path, cfg.get('version'))\n", " out_upl = f\"anaconda upload {loc}\"\n", " build = 'mambabuild' if mambabuild else 'build'\n", " if not do_build: return print(f\"{out}conda {build} .\\n{out_upl}\\n```\")\n", "\n", " os.chdir(path)\n", " res = run(f\"conda {build} --no-anaconda-upload {build_args} {name}\")\n", " if 'anaconda upload' not in res: return print(f\"{res}\\n\\Failed. Check auto-upload not set in .condarc. Try `--do_build False`.\")\n", " return anaconda_upload(lib_path, cfg.get('version'))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "To build and upload a conda package, cd to the root of your repo, and then:\n", "\n", " fastrelease_conda_package\n", "\n", "Or to do things more manually:\n", "\n", "```\n", "fastrelease_conda_package --do_build false\n", "cd conda\n", "conda build --no-anaconda-upload --output-folder build {name}\n", "anaconda upload build/noarch/{name}-{ver}-*.tar.bz2\n", "```\n", "\n", "Add `--debug` to the `conda build command` to debug any problems that occur. Note that the build step takes a few minutes. Add `-u {org_name}` to the `anaconda upload` command if you wish to upload to an organization, or pass `upload_user` to `fastrelease_conda_package`.\n", "\n", "**NB**: you need to first of all upload your package to PyPi, before creating the conda package." ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#export\n", "@call_parse\n", "def chk_conda_rel(nm:Param('Package name on pypi', str),\n", " apkg:Param('Anaconda Package (defaults to {nm})', str)=None,\n", " channel:Param('Anaconda Channel', str)='fastai',\n", " force:Param('Always return github tag', store_true)=False):\n", " \"Prints GitHub tag only if a newer release exists on Pypi compared to an Anaconda Repo.\"\n", " if not apkg: apkg=nm\n", " condavs = L(loads(run(f'mamba repoquery search {apkg} -c {channel} --json'))['result']['pkgs'])\n", " condatag = condavs.attrgot('version').map(parse)\n", " pypitag = latest_pypi(nm)\n", " if force or not condatag or pypitag > max(condatag): return f'{pypitag}'" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Export-" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Converted 00_core.ipynb.\n", "Converted 01_conda.ipynb.\n", "Converted index.ipynb.\n" ] } ], "source": [ "#hide\n", "from nbdev.export import notebook2script\n", "notebook2script()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" } }, "nbformat": 4, "nbformat_minor": 4 }