{ "cells": [ { "cell_type": "markdown", "id": "eligible-induction", "metadata": {}, "source": [ "# OceanParcels recipes\n", "\n", "This notebook contains a complete workflow of using OceanParcels simulations with SalishSeaCast and related model configurations.\n", "\n", "***" ] }, { "cell_type": "code", "execution_count": 1, "id": "rocky-tradition", "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", "import xarray as xr\n", "import os\n", "from matplotlib import pyplot as plt, animation\n", "from datetime import datetime, timedelta\n", "from dateutil.parser import parse\n", "from IPython.display import HTML\n", "from salishsea_tools import nc_tools, places\n", "\n", "from parcels import FieldSet, Field, VectorField, ParticleSet, JITParticle, ErrorCode, AdvectionRK4, AdvectionRK4_3D\n", "\n", "%matplotlib inline" ] }, { "cell_type": "code", "execution_count": 2, "id": "handy-occurrence", "metadata": {}, "outputs": [], "source": [ "plt.rcParams['font.size'] = 12" ] }, { "cell_type": "markdown", "id": "reduced-example", "metadata": {}, "source": [ "***\n", "\n", "### Functions\n", "\n", "#### Fieldset functions\n", "\n", "OceanParcels loads forcing data on-the-fly, and handles all of the interpolation. The NEMO C-grid interpolation is especially awesome (for more info see [Delandmeter and van Sebille 2019, *Geosci. Model Dev.*](https://gmd.copernicus.org/articles/12/3571/2019/gmd-12-3571-2019.html)). The following functions handle the syntax of defining the forcing files into a `parcels.FieldSet` object.\n", "\n", "For more context, see the OceanParcels [Nemo 3D Tutorial](https://nbviewer.jupyter.org/github/OceanParcels/parcels/blob/master/parcels/examples/tutorial_nemo_3D.ipynb) and [run_northsea_mp.py](https://github.com/OceanParcels/Parcelsv2.0PaperNorthSeaScripts/blob/master/run_northsea_mp.py) from the GitHub repo for Delandmeter and van Sebille 2019." ] }, { "cell_type": "code", "execution_count": 10, "id": "duplicate-chart", "metadata": {}, "outputs": [], "source": [ "def fieldset_from_nemo(daterange, coords, flat=True):\n", " \"\"\"Generate a fieldset from a hourly SalishSeaCast forcing fields\n", " over daterange.\n", " \"\"\"\n", "\n", " # Generate sequential list of forcing file prefixes\n", " prefixes = [\n", " nc_tools.get_hindcast_prefix(daterange[0] + timedelta(days=d))\n", " for d in range(np.diff(daterange)[0].days + 1)\n", " ]\n", "\n", " # Predefine fieldset argument dictionaries\n", " filenames, variables, dimensions = {}, {}, {}\n", "\n", " # Define dict fields for each variable\n", " for var, name in zip(['U', 'V', 'W'], ['vozocrtx', 'vomecrty', 'vovecrtz']):\n", " \n", " # Exclude vertical velocity if 2D\n", " if flat:\n", " if var == 'W': break\n", "\n", " # Dict of filenames containing the coordinate and forcing variables\n", " datafiles = [prefix + f'_grid_{var}.nc' for prefix in prefixes]\n", " filenames[var] = {'lon': coords, 'lat': coords, 'data': datafiles}\n", "\n", " # NEMO variable name\n", " variables[var] = name\n", "\n", " # Dict of NEMO coordinate names (f-points)\n", " dimensions[var] = {'lon': 'glamf', 'lat': 'gphif', 'time': 'time_counter'}\n", " \n", " # Add depth fields if 3D (f-points are on W grid)\n", " if not flat:\n", " filenames[var]['depth'] = prefixes[0] + '_grid_W.nc'\n", " dimensions[var]['depth'] = 'depthw'\n", "\n", " # Load NEMO forcing into fieldset\n", " field_set = FieldSet.from_nemo(filenames, variables, dimensions, allow_time_extrapolation=True)\n", " \n", " return field_set\n", "\n", "\n", "def append_auxiliary_forcing(fieldset, model, daterange, coords_path):\n", " \"\"\"Append hourly GEM or WW3 forcing fields to a fieldset over daterange\n", " \"\"\"\n", "\n", " # Generate sequential list of forcing files\n", " datafiles = [\n", " getattr(nc_tools, f'get_{model}_path')(daterange[0] + timedelta(days=d))\n", " for d in range(np.diff(daterange)[0].days + 1)\n", " ]\n", " \n", " # Assign variable suffix and dimensions definitions according to model\n", " if model == 'GEM':\n", " suffix = '_wind'\n", " dimensions = {'lon': 'nav_lon', 'lat': 'nav_lat', 'time': 'time_counter'}\n", " elif model == 'WW3':\n", " suffix = 'uss'\n", " dimensions = {'lon': 'longitude', 'lat': 'latitude', 'time': 'time'}\n", " else:\n", " raise ValueError(f'Unknown surface forcing model: {model}')\n", " \n", " # Define coordinates file path and create if necessary\n", " # - only method I've had success with for correcting lons\n", " coords = os.path.join(coords_path, f'{model}_grid.nc')\n", " if not os.path.exists(coords):\n", " with xr.open_dataset(datafiles[0]) as ds:\n", " data_vars = [dimensions['time']] + list(ds.data_vars)\n", " if model == 'GEM':\n", " for key in ['lon', 'lat']: data_vars.remove(dimensions[key])\n", " ds = ds.drop_vars(data_vars)\n", " ds.update({dimensions['lon']: ds[dimensions['lon']] - 360})\n", " ds.to_netcdf(coords)\n", " \n", " # Filenames dict\n", " filenames = {'lon': coords, 'lat': coords, 'data': datafiles}\n", " \n", " # Load u velocity\n", " u = Field.from_netcdf(\n", " filenames, f'u{suffix}', dimensions, fieldtype='U', allow_time_extrapolation=True,\n", " )\n", " \n", " # Load v velocity with u grid\n", " v = Field.from_netcdf(\n", " filenames, f'v{suffix}', dimensions, fieldtype='V', allow_time_extrapolation=True,\n", " grid=u.grid, dataFiles=u.dataFiles,\n", " )\n", "\n", " # Add velocity fields to fieldset\n", " fieldset.add_field(u)\n", " fieldset.add_field(v)\n", "\n", " # Make new vector field from u and v for use in kernels\n", " u, v = [getattr(fieldset, f'{var}{suffix}') for var in ('u', 'v')]\n", " fieldset.add_vector_field(VectorField(f\"UV_{model}\", u, v))" ] }, { "cell_type": "markdown", "id": "together-music", "metadata": {}, "source": [ "#### Kernel functions\n", "\n", "Kernel functions are used to prescribe particle behavior in OceanParcels.\n", "\n", "For more context, see the OceanParcels [Simple Tutorial](https://nbviewer.jupyter.org/github/OceanParcels/parcels/blob/master/parcels/examples/parcels_tutorial.ipynb#Adding-a-custom-behaviour-kernel) and [northsea_mp_kernels.py](https://github.com/OceanParcels/Parcelsv2.0PaperNorthSeaScripts/blob/master/northsea_mp_kernels.py) from the GitHub repo for Delandmeter and van Sebille 2019, *Geosci. Model Dev.*" ] }, { "cell_type": "code", "execution_count": 11, "id": "received-sitting", "metadata": {}, "outputs": [], "source": [ "def WindDrift(particle, fieldset, time):\n", " (uw, vw) = fieldset.UV_GEM[time, particle.depth, particle.lat, particle.lon]\n", " particle.lon += uw * 0.03 * particle.dt\n", " particle.lat += vw * 0.03 * particle.dt\n", "\n", "\n", "def StokesDrift(particle, fieldset, time):\n", " (us, vs) = fieldset.UV_WW3[time, particle.depth, particle.lat, particle.lon]\n", " particle.lon += us * particle.dt\n", " particle.lat += vs * particle.dt\n", "\n", "\n", "def DeleteParticle(particle, fieldset, time):\n", " print(f'Particle {particle.id} lost !! [{particle.lon}, {particle.lat}, {particle.depth}, {particle.time}]')\n", " particle.delete()" ] }, { "cell_type": "markdown", "id": "wrong-creation", "metadata": {}, "source": [ "***\n", "\n", "### Simulations\n", "\n", "Run parameters and definitions." ] }, { "cell_type": "code", "execution_count": 12, "id": "imported-disaster", "metadata": {}, "outputs": [], "source": [ "# Paths and filenames\n", "paths = {\n", " 'coords': '/data/bmoorema/MEOPAR/grid/coordinates_seagrid_SalishSea201702.nc',\n", " 'mask': '/data/bmoorema/MEOPAR/grid/mesh_mask201702.nc',\n", " 'results': '/data/bmoorema/results/parcels/sandbox',\n", "}\n", "\n", "# Load coords and mask files and extract grid variables\n", "coords, mask = [xr.open_dataset(paths[key], decode_times=False) for key in ('coords', 'mask')]\n", "gridlon, gridlat = [coords[key][0, ...].values for key in ('glamt', 'gphit')]\n", "tmask = mask.tmask[0, 0, ...].values\n", "\n", "# Define release parameters\n", "location = 'Sandheads'\n", "n = 100 # number of particles\n", "r = 50 # radius of particle cloud [m]\n", "\n", "# Start time, duration and timestep\n", "start = datetime(2019, 1, 1, 12, 30, 0)\n", "duration = timedelta(days=3)\n", "dt = timedelta(seconds=90)\n", "\n", "# Create Gaussian distribution around release point\n", "mean, cov = [0, 0], [[r**2, 0], [0, r**2]]\n", "x_offset, y_offset = np.random.multivariate_normal(mean, cov, n).T\n", "lon, lat = places.PLACES[location]['lon lat']\n", "lons = lon + x_offset / 111000 / np.cos(np.deg2rad(lat))\n", "lats = lat + y_offset / 111000\n", "\n", "# Forcing daterange (I add 1-day buffers)\n", "daterange = [start - timedelta(days=1), start + duration + timedelta(days=1)]\n", "\n", "# Output file prefix\n", "strings = [location] + [t.strftime('%Y%m%dT%H%M%S') for t in (start, start + duration)]\n", "prefix = os.path.join(paths['results'], '_'.join(strings))" ] }, { "cell_type": "markdown", "id": "absent-riverside", "metadata": {}, "source": [ "***\n", "\n", "#### Surface drift simulations (2-D)\n", "\n", "Build forcing fieldset" ] }, { "cell_type": "code", "execution_count": 13, "id": "flexible-craft", "metadata": {}, "outputs": [ { "ename": "PermissionError", "evalue": "[Errno 13] Permission denied: b'/data/bmoorema/results/parcels/sandbox/GEM_grid.nc'", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/site-packages/xarray/backends/file_manager.py:209\u001b[0m, in \u001b[0;36mCachingFileManager._acquire_with_cache_info\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 208\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 209\u001b[0m file \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_cache[\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_key]\n\u001b[1;32m 210\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mKeyError\u001b[39;00m:\n", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/site-packages/xarray/backends/lru_cache.py:55\u001b[0m, in \u001b[0;36mLRUCache.__getitem__\u001b[0;34m(self, key)\u001b[0m\n\u001b[1;32m 54\u001b[0m \u001b[39mwith\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_lock:\n\u001b[0;32m---> 55\u001b[0m value \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_cache[key]\n\u001b[1;32m 56\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_cache\u001b[39m.\u001b[39mmove_to_end(key)\n", "\u001b[0;31mKeyError\u001b[0m: [, ('/data/bmoorema/results/parcels/sandbox/GEM_grid.nc',), 'a', (('clobber', True), ('diskless', False), ('format', 'NETCDF4'), ('persist', False)), '69c58a63-25b9-48cb-9fc7-b8886971cbb8']", "\nDuring handling of the above exception, another exception occurred:\n", "\u001b[0;31mPermissionError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[13], line 6\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[39m# Append GEM and WW3 results into fieldset\u001b[39;00m\n\u001b[1;32m 5\u001b[0m \u001b[39mfor\u001b[39;00m model \u001b[39min\u001b[39;00m [\u001b[39m'\u001b[39m\u001b[39mGEM\u001b[39m\u001b[39m'\u001b[39m, \u001b[39m'\u001b[39m\u001b[39mWW3\u001b[39m\u001b[39m'\u001b[39m]:\n\u001b[0;32m----> 6\u001b[0m append_auxiliary_forcing(fieldset, model, daterange, paths[\u001b[39m'\u001b[39;49m\u001b[39mresults\u001b[39;49m\u001b[39m'\u001b[39;49m])\n", "Cell \u001b[0;32mIn[10], line 73\u001b[0m, in \u001b[0;36mappend_auxiliary_forcing\u001b[0;34m(fieldset, model, daterange, coords_path)\u001b[0m\n\u001b[1;32m 71\u001b[0m ds \u001b[39m=\u001b[39m ds\u001b[39m.\u001b[39mdrop_vars(data_vars)\n\u001b[1;32m 72\u001b[0m ds\u001b[39m.\u001b[39mupdate({dimensions[\u001b[39m'\u001b[39m\u001b[39mlon\u001b[39m\u001b[39m'\u001b[39m]: ds[dimensions[\u001b[39m'\u001b[39m\u001b[39mlon\u001b[39m\u001b[39m'\u001b[39m]] \u001b[39m-\u001b[39m \u001b[39m360\u001b[39m})\n\u001b[0;32m---> 73\u001b[0m ds\u001b[39m.\u001b[39;49mto_netcdf(coords)\n\u001b[1;32m 75\u001b[0m \u001b[39m# Filenames dict\u001b[39;00m\n\u001b[1;32m 76\u001b[0m filenames \u001b[39m=\u001b[39m {\u001b[39m'\u001b[39m\u001b[39mlon\u001b[39m\u001b[39m'\u001b[39m: coords, \u001b[39m'\u001b[39m\u001b[39mlat\u001b[39m\u001b[39m'\u001b[39m: coords, \u001b[39m'\u001b[39m\u001b[39mdata\u001b[39m\u001b[39m'\u001b[39m: datafiles}\n", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/site-packages/xarray/core/dataset.py:1912\u001b[0m, in \u001b[0;36mDataset.to_netcdf\u001b[0;34m(self, path, mode, format, group, engine, encoding, unlimited_dims, compute, invalid_netcdf)\u001b[0m\n\u001b[1;32m 1909\u001b[0m encoding \u001b[39m=\u001b[39m {}\n\u001b[1;32m 1910\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mxarray\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mbackends\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mapi\u001b[39;00m \u001b[39mimport\u001b[39;00m to_netcdf\n\u001b[0;32m-> 1912\u001b[0m \u001b[39mreturn\u001b[39;00m to_netcdf( \u001b[39m# type: ignore # mypy cannot resolve the overloads:(\u001b[39;49;00m\n\u001b[1;32m 1913\u001b[0m \u001b[39mself\u001b[39;49m,\n\u001b[1;32m 1914\u001b[0m path,\n\u001b[1;32m 1915\u001b[0m mode\u001b[39m=\u001b[39;49mmode,\n\u001b[1;32m 1916\u001b[0m \u001b[39mformat\u001b[39;49m\u001b[39m=\u001b[39;49m\u001b[39mformat\u001b[39;49m,\n\u001b[1;32m 1917\u001b[0m group\u001b[39m=\u001b[39;49mgroup,\n\u001b[1;32m 1918\u001b[0m engine\u001b[39m=\u001b[39;49mengine,\n\u001b[1;32m 1919\u001b[0m encoding\u001b[39m=\u001b[39;49mencoding,\n\u001b[1;32m 1920\u001b[0m unlimited_dims\u001b[39m=\u001b[39;49munlimited_dims,\n\u001b[1;32m 1921\u001b[0m compute\u001b[39m=\u001b[39;49mcompute,\n\u001b[1;32m 1922\u001b[0m multifile\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[1;32m 1923\u001b[0m invalid_netcdf\u001b[39m=\u001b[39;49minvalid_netcdf,\n\u001b[1;32m 1924\u001b[0m )\n", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/site-packages/xarray/backends/api.py:1215\u001b[0m, in \u001b[0;36mto_netcdf\u001b[0;34m(dataset, path_or_file, mode, format, group, engine, encoding, unlimited_dims, compute, multifile, invalid_netcdf)\u001b[0m\n\u001b[1;32m 1211\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 1212\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mValueError\u001b[39;00m(\n\u001b[1;32m 1213\u001b[0m \u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39munrecognized option \u001b[39m\u001b[39m'\u001b[39m\u001b[39minvalid_netcdf\u001b[39m\u001b[39m'\u001b[39m\u001b[39m for engine \u001b[39m\u001b[39m{\u001b[39;00mengine\u001b[39m}\u001b[39;00m\u001b[39m\"\u001b[39m\n\u001b[1;32m 1214\u001b[0m )\n\u001b[0;32m-> 1215\u001b[0m store \u001b[39m=\u001b[39m store_open(target, mode, \u001b[39mformat\u001b[39;49m, group, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n\u001b[1;32m 1217\u001b[0m \u001b[39mif\u001b[39;00m unlimited_dims \u001b[39mis\u001b[39;00m \u001b[39mNone\u001b[39;00m:\n\u001b[1;32m 1218\u001b[0m unlimited_dims \u001b[39m=\u001b[39m dataset\u001b[39m.\u001b[39mencoding\u001b[39m.\u001b[39mget(\u001b[39m\"\u001b[39m\u001b[39munlimited_dims\u001b[39m\u001b[39m\"\u001b[39m, \u001b[39mNone\u001b[39;00m)\n", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:382\u001b[0m, in \u001b[0;36mNetCDF4DataStore.open\u001b[0;34m(cls, filename, mode, format, group, clobber, diskless, persist, lock, lock_maker, autoclose)\u001b[0m\n\u001b[1;32m 376\u001b[0m kwargs \u001b[39m=\u001b[39m \u001b[39mdict\u001b[39m(\n\u001b[1;32m 377\u001b[0m clobber\u001b[39m=\u001b[39mclobber, diskless\u001b[39m=\u001b[39mdiskless, persist\u001b[39m=\u001b[39mpersist, \u001b[39mformat\u001b[39m\u001b[39m=\u001b[39m\u001b[39mformat\u001b[39m\n\u001b[1;32m 378\u001b[0m )\n\u001b[1;32m 379\u001b[0m manager \u001b[39m=\u001b[39m CachingFileManager(\n\u001b[1;32m 380\u001b[0m netCDF4\u001b[39m.\u001b[39mDataset, filename, mode\u001b[39m=\u001b[39mmode, kwargs\u001b[39m=\u001b[39mkwargs\n\u001b[1;32m 381\u001b[0m )\n\u001b[0;32m--> 382\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mcls\u001b[39;49m(manager, group\u001b[39m=\u001b[39;49mgroup, mode\u001b[39m=\u001b[39;49mmode, lock\u001b[39m=\u001b[39;49mlock, autoclose\u001b[39m=\u001b[39;49mautoclose)\n", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:329\u001b[0m, in \u001b[0;36mNetCDF4DataStore.__init__\u001b[0;34m(self, manager, group, mode, lock, autoclose)\u001b[0m\n\u001b[1;32m 327\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_group \u001b[39m=\u001b[39m group\n\u001b[1;32m 328\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_mode \u001b[39m=\u001b[39m mode\n\u001b[0;32m--> 329\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mformat \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mds\u001b[39m.\u001b[39mdata_model\n\u001b[1;32m 330\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_filename \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mds\u001b[39m.\u001b[39mfilepath()\n\u001b[1;32m 331\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mis_remote \u001b[39m=\u001b[39m is_remote_uri(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_filename)\n", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:391\u001b[0m, in \u001b[0;36mNetCDF4DataStore.ds\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 389\u001b[0m \u001b[39m@property\u001b[39m\n\u001b[1;32m 390\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mds\u001b[39m(\u001b[39mself\u001b[39m):\n\u001b[0;32m--> 391\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_acquire()\n", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:385\u001b[0m, in \u001b[0;36mNetCDF4DataStore._acquire\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 384\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m_acquire\u001b[39m(\u001b[39mself\u001b[39m, needs_lock\u001b[39m=\u001b[39m\u001b[39mTrue\u001b[39;00m):\n\u001b[0;32m--> 385\u001b[0m \u001b[39mwith\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_manager\u001b[39m.\u001b[39macquire_context(needs_lock) \u001b[39mas\u001b[39;00m root:\n\u001b[1;32m 386\u001b[0m ds \u001b[39m=\u001b[39m _nc4_require_group(root, \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_group, \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_mode)\n\u001b[1;32m 387\u001b[0m \u001b[39mreturn\u001b[39;00m ds\n", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/contextlib.py:135\u001b[0m, in \u001b[0;36m_GeneratorContextManager.__enter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 133\u001b[0m \u001b[39mdel\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39margs, \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mkwds, \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mfunc\n\u001b[1;32m 134\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 135\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mnext\u001b[39;49m(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mgen)\n\u001b[1;32m 136\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mStopIteration\u001b[39;00m:\n\u001b[1;32m 137\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mRuntimeError\u001b[39;00m(\u001b[39m\"\u001b[39m\u001b[39mgenerator didn\u001b[39m\u001b[39m'\u001b[39m\u001b[39mt yield\u001b[39m\u001b[39m\"\u001b[39m) \u001b[39mfrom\u001b[39;00m \u001b[39mNone\u001b[39m\n", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/site-packages/xarray/backends/file_manager.py:197\u001b[0m, in \u001b[0;36mCachingFileManager.acquire_context\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 194\u001b[0m \u001b[39m@contextlib\u001b[39m\u001b[39m.\u001b[39mcontextmanager\n\u001b[1;32m 195\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39macquire_context\u001b[39m(\u001b[39mself\u001b[39m, needs_lock\u001b[39m=\u001b[39m\u001b[39mTrue\u001b[39;00m):\n\u001b[1;32m 196\u001b[0m \u001b[39m \u001b[39m\u001b[39m\"\"\"Context manager for acquiring a file.\"\"\"\u001b[39;00m\n\u001b[0;32m--> 197\u001b[0m file, cached \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_acquire_with_cache_info(needs_lock)\n\u001b[1;32m 198\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m 199\u001b[0m \u001b[39myield\u001b[39;00m file\n", "File \u001b[0;32m~/conda_envs/analysis-matt/lib/python3.10/site-packages/xarray/backends/file_manager.py:215\u001b[0m, in \u001b[0;36mCachingFileManager._acquire_with_cache_info\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 213\u001b[0m kwargs \u001b[39m=\u001b[39m kwargs\u001b[39m.\u001b[39mcopy()\n\u001b[1;32m 214\u001b[0m kwargs[\u001b[39m\"\u001b[39m\u001b[39mmode\u001b[39m\u001b[39m\"\u001b[39m] \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_mode\n\u001b[0;32m--> 215\u001b[0m file \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_opener(\u001b[39m*\u001b[39;49m\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_args, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n\u001b[1;32m 216\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_mode \u001b[39m==\u001b[39m \u001b[39m\"\u001b[39m\u001b[39mw\u001b[39m\u001b[39m\"\u001b[39m:\n\u001b[1;32m 217\u001b[0m \u001b[39m# ensure file doesn't get overridden when opened again\u001b[39;00m\n\u001b[1;32m 218\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_mode \u001b[39m=\u001b[39m \u001b[39m\"\u001b[39m\u001b[39ma\u001b[39m\u001b[39m\"\u001b[39m\n", "File \u001b[0;32msrc/netCDF4/_netCDF4.pyx:2463\u001b[0m, in \u001b[0;36mnetCDF4._netCDF4.Dataset.__init__\u001b[0;34m()\u001b[0m\n", "File \u001b[0;32msrc/netCDF4/_netCDF4.pyx:2026\u001b[0m, in \u001b[0;36mnetCDF4._netCDF4._ensure_nc_success\u001b[0;34m()\u001b[0m\n", "\u001b[0;31mPermissionError\u001b[0m: [Errno 13] Permission denied: b'/data/bmoorema/results/parcels/sandbox/GEM_grid.nc'" ] } ], "source": [ "# Load SalishSeaCast results into fieldset\n", "fieldset = fieldset_from_nemo(daterange, paths['coords'])\n", "\n", "# Append GEM and WW3 results into fieldset\n", "for model in ['GEM', 'WW3']:\n", " append_auxiliary_forcing(fieldset, model, daterange, paths['results'])" ] }, { "cell_type": "markdown", "id": "surrounded-bouquet", "metadata": {}, "source": [ "Run simulation with NEMO forcing only" ] }, { "cell_type": "code", "execution_count": 7, "id": "relative-booth", "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "INFO: Compiled JITParticleAdvectionRK4 ==> /tmp/parcels-1896/33e0bbba1bc542d6e3b88a7d04aff565_0.so\n", "INFO: Temporary output files are stored in /data/bmoorema/results/parcels/sandbox/out-DMCPKVLI.\n", "INFO: You can use \"parcels_convert_npydir_to_netcdf /data/bmoorema/results/parcels/sandbox/out-DMCPKVLI\" to convert these to a NetCDF file during the run.\n", "100% |########################################################################|\n" ] } ], "source": [ "# Execute NEMO-only, 2D run\n", "pset = ParticleSet.from_list(fieldset, JITParticle, lon=lons, lat=lats, time=np.repeat(start, n))\n", "kernel = AdvectionRK4\n", "output_file = pset.ParticleFile(name=prefix + '_NEMO_2D.nc', outputdt=timedelta(hours=1))\n", "pset.execute(\n", " kernel, runtime=duration, dt=dt, output_file=output_file,\n", " recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle},\n", ")" ] }, { "cell_type": "markdown", "id": "helpful-montgomery", "metadata": {}, "source": [ "Run simulation with NEMO+GEM+WW3 forcing" ] }, { "cell_type": "code", "execution_count": 8, "id": "artistic-shift", "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "INFO: Compiled JITParticleAdvectionRK4WindDriftStokesDrift ==> /tmp/parcels-1896/59971bb40696596c3f60fdc78dbd10d5_0.so\n", "INFO: Temporary output files are stored in /data/bmoorema/results/parcels/sandbox/out-DTKYIPOP.\n", "INFO: You can use \"parcels_convert_npydir_to_netcdf /data/bmoorema/results/parcels/sandbox/out-DTKYIPOP\" to convert these to a NetCDF file during the run.\n", "100% |########################################################################|\n" ] } ], "source": [ "# Execute NEMO+GEM+WW3, 2D run\n", "pset = ParticleSet.from_list(fieldset, JITParticle, lon=lons, lat=lats, time=np.repeat(start, n))\n", "kernel = AdvectionRK4 + pset.Kernel(WindDrift) + pset.Kernel(StokesDrift)\n", "output_file = pset.ParticleFile(name=prefix + '_NEMOGEMWW3_2D.nc', outputdt=timedelta(hours=1))\n", "pset.execute(\n", " kernel, runtime=duration, dt=dt, output_file=output_file,\n", " recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle},\n", ")" ] }, { "cell_type": "markdown", "id": "dressed-lindsay", "metadata": {}, "source": [ "Compare results" ] }, { "cell_type": "code", "execution_count": 10, "id": "informed-indication", "metadata": {}, "outputs": [], "source": [ "%%capture\n", "\n", "# Make initial figure\n", "fig, axs = plt.subplots(1, 2, figsize=(15, 8), gridspec_kw={'wspace': 0.1})\n", "l1, = axs[0].plot([], [], 'ko')\n", "l2, = axs[1].plot([], [], 'ko')\n", "t = axs[0].text(0.02, 0.02, '', transform=axs[0].transAxes)\n", "data = {}\n", "for ax, config in zip(axs, ['NEMO_2D', 'NEMOGEMWW3_2D']):\n", " data[config] = xr.open_dataset(prefix + f'_{config}.nc')\n", " ax.contourf(gridlon, gridlat, tmask, levels=[-0.01, 0.01], colors='gray')\n", " ax.contour(gridlon, gridlat, tmask, levels=[-0.01, 0.01], colors='k')\n", " ax.set_xlim([-124.3, -123])\n", " ax.set_ylim([48.7, 49.6])\n", " ax.set_title(config)\n", " ax.set_aspect(1/np.sin(np.deg2rad(49)))\n", "axs[1].yaxis.set_ticklabels('')\n", "\n", "# Init function\n", "def init():\n", " t.set_text('')\n", " for l in [l1, l2]:\n", " l.set_data([], [])\n", " return l1, l2, t,\n", "\n", "# Animate function\n", "def animate(hour):\n", " tstamp = data['NEMO_2D'].time[0, hour].values.astype('datetime64[s]').astype(datetime)\n", " t.set_text(tstamp.strftime('%Y-%b-%d %H:%M UTC'))\n", " for l, config in zip([l1, l2], ['NEMO_2D', 'NEMOGEMWW3_2D']):\n", " l.set_data(data[config].lon[:, hour], data[config].lat[:, hour])\n", " return l1, l2, t,\n", "\n", "# Build animation\n", "anim = animation.FuncAnimation(fig, animate, init_func=init, frames=73, interval=100, blit=True)" ] }, { "cell_type": "code", "execution_count": 14, "id": "relevant-liechtenstein", "metadata": {}, "outputs": [ { "data": { "text/html": [ "" ], "text/plain": [ "" ] }, "execution_count": 14, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# Render animation\n", "HTML(anim.to_html5_video())" ] }, { "cell_type": "markdown", "id": "every-federation", "metadata": {}, "source": [ "***\n", "\n", "#### Particle simulations (3-D)\n", "\n", "Build forcing fieldset" ] }, { "cell_type": "code", "execution_count": 12, "id": "arabic-austin", "metadata": {}, "outputs": [], "source": [ "# Load SalishSeaCast results into fieldset\n", "fieldset = fieldset_from_nemo(daterange, paths['coords'], flat=False)" ] }, { "cell_type": "markdown", "id": "abroad-condition", "metadata": {}, "source": [ "Run simulation with NEMO forcing" ] }, { "cell_type": "code", "execution_count": 49, "id": "surprising-mustang", "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "INFO: Compiled JITParticleAdvectionRK4_3D ==> /tmp/parcels-1896/64a6cf2f7a46cdc07fb84343d500440e_0.so\n", "INFO: Temporary output files are stored in /data/bmoorema/results/parcels/sandbox/out-RFUEEZNU.\n", "INFO: You can use \"parcels_convert_npydir_to_netcdf /data/bmoorema/results/parcels/sandbox/out-RFUEEZNU\" to convert these to a NetCDF file during the run.\n", "100% |########################################################################|\n" ] } ], "source": [ "# Execute NEMO-only, 3D run, release at 5m\n", "pset = ParticleSet.from_list(fieldset, JITParticle, lon=lons, lat=lats, depth=np.repeat(5, n), time=np.repeat(start, n))\n", "kernel = AdvectionRK4_3D\n", "output_file = pset.ParticleFile(name=prefix + '_NEMO_3D.nc', outputdt=timedelta(hours=1))\n", "pset.execute(\n", " kernel, runtime=duration, dt=dt, output_file=output_file,\n", " recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle},\n", ")" ] }, { "cell_type": "markdown", "id": "further-invention", "metadata": {}, "source": [ "Visualize results" ] }, { "cell_type": "code", "execution_count": 8, "id": "confident-growth", "metadata": {}, "outputs": [], "source": [ "%%capture\n", "\n", "# Make initial figure\n", "fig, ax = plt.subplots(figsize=(8, 8))\n", "cax = fig.add_axes([0.92, 0.15, 0.02, 0.7])\n", "l = ax.scatter([], [], s=50, c=[], vmin=0, vmax=10, edgecolor='k')\n", "t = ax.text(0.02, 0.02, '', transform=ax.transAxes)\n", "data = xr.open_dataset(prefix + '_NEMO_3D.nc')\n", "ax.contourf(gridlon, gridlat, tmask, levels=[-0.01, 0.01], colors='gray')\n", "ax.contour(gridlon, gridlat, tmask, levels=[-0.01, 0.01], colors='k')\n", "ax.set_xlim([-124.3, -123])\n", "ax.set_ylim([48.7, 49.6])\n", "ax.set_title('NEMO_3D')\n", "ax.set_aspect(1/np.sin(np.deg2rad(49)))\n", "fig.colorbar(l, cax=cax, label='Depth [m]')\n", "\n", "# Init function\n", "def init():\n", " t.set_text('')\n", " l.set_offsets(np.empty((0, 2)))\n", " l.set_array(np.empty(0))\n", " return l, t,\n", "\n", "# Animate function\n", "def animate(hour):\n", " tstamp = data.time[0, hour].values.astype('datetime64[s]').astype(datetime)\n", " t.set_text(tstamp.strftime('%Y-%b-%d %H:%M UTC'))\n", " l.set_offsets(np.vstack([data.lon[:, hour], data.lat[:, hour]]).T)\n", " l.set_array(data.z[:, hour])\n", " return l, t,\n", "\n", "# Build animation\n", "anim = animation.FuncAnimation(fig, animate, init_func=init, frames=73, interval=100, blit=True)" ] }, { "cell_type": "code", "execution_count": 9, "id": "worldwide-sally", "metadata": {}, "outputs": [ { "data": { "text/html": [ "" ], "text/plain": [ "" ] }, "execution_count": 9, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# Render animation\n", "HTML(anim.to_html5_video())" ] }, { "cell_type": "code", "execution_count": null, "id": "fossil-nursing", "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "analysis-matt", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.8" }, "vscode": { "interpreter": { "hash": "b0820408f94f9adab353dd622003a8ca360cea5172f3643d18b5213ac85f01b3" } } }, "nbformat": 4, "nbformat_minor": 5 }