{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "'''\n", "\n", "This code is part of the SIPN2 project focused on improving sub-seasonal to seasonal predictions of Arctic Sea Ice. \n", "If you use this code for a publication or presentation, please cite the reference in the README.md on the\n", "main page (https://github.com/NicWayand/ESIO). \n", "\n", "Questions or comments should be addressed to nicway@uw.edu\n", "\n", "Copyright (c) 2018 Nic Wayand\n", "\n", "GNU General Public License v3.0\n", "\n", "\n", "'''\n", "\n", "'''\n", "Plot forecast maps with all available models.\n", "'''\n", "\n", "%matplotlib inline\n", "%load_ext autoreload\n", "%autoreload\n", "import matplotlib\n", "matplotlib.use('Agg')\n", "import matplotlib.pyplot as plt\n", "from collections import OrderedDict\n", "import itertools\n", "import numpy as np\n", "import numpy.ma as ma\n", "import pandas as pd\n", "import struct\n", "import os\n", "import xarray as xr\n", "import glob\n", "import datetime\n", "import cartopy.crs as ccrs\n", "from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER\n", "import seaborn as sns\n", "np.seterr(divide='ignore', invalid='ignore')\n", "import warnings\n", "warnings.simplefilter(action='ignore', category=FutureWarning)\n", "import json\n", "from esio import EsioData as ed\n", "from esio import ice_plot\n", "from esio import import_data\n", "import subprocess\n", "import dask\n", "from dask.distributed import Client\n", "import timeit\n", "\n", "# General plotting settings\n", "sns.set_style('whitegrid')\n", "sns.set_context(\"talk\", font_scale=.8, rc={\"lines.linewidth\": 2.5})" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def remove_small_contours(p, thres=10):\n", " for level in p.collections:\n", " for kp,path in reversed(list(enumerate(level.get_paths()))):\n", " # go in reversed order due to deletions!\n", "\n", " # include test for \"smallness\" of your choice here:\n", " # I'm using a simple estimation for the diameter based on the\n", " # x and y diameter...\n", " verts = path.vertices # (N,2)-shape array of contour line coordinates\n", " diameter = np.max(verts.max(axis=0) - verts.min(axis=0))\n", "\n", " if diameter=Nmod, 'Need more subplots'\n", "\n", " for cvar in variables:\n", "\n", " # Define fig dir and make if doesn't exist\n", " fig_dir = os.path.join(E.fig_dir, 'model', 'all_model', cvar, 'maps_weekly')\n", " if not os.path.exists(fig_dir):\n", " os.makedirs(fig_dir)\n", "\n", " # Make requested dataArray as specified above\n", " ds_status = xr.DataArray(np.ones((init_slice.size, da_slices.size))*np.NaN, \n", " dims=('init_time','fore_time'), \n", " coords={'init_time':init_slice,'fore_time':da_slices}) \n", " ds_status.name = 'status'\n", " ds_status = ds_status.to_dataset()\n", "\n", " # Check what plots we already have\n", " if not updateAll:\n", " print(\"Removing figures we have already made\")\n", " ds_status = update_status(ds_status=ds_status, fig_dir=fig_dir, int_2_days_dict=int_2_days_dict)\n", " \n", " print(ds_status.status.values)\n", " # Drop IC/FT we have already plotted (orthoginal only)\n", " ds_status = ds_status.where(ds_status.status.sum(dim='fore_time')0:\n", " #if ((it + ft) in ds_81.time.values):\n", "\n", " if metric=='mean':\n", " da_obs_c = da_obs_c.mean(dim='time') #ds_81.sic.sel(time=(it + ft))\n", " elif metric=='SIP':\n", " da_obs_c = (da_obs_c >= 0.15).mean(dim='time').astype('int').where(da_obs_c.isel(time=0).notnull())\n", " elif metric=='anomaly':\n", " da_obs_VT = da_obs_c.mean(dim='time')\n", " da_obs_mean = mean_1980_2010_sic.isel(time=slice(cdoy_start,cdoy_end)).mean(dim='time')\n", " da_obs_c = da_obs_VT - da_obs_mean\n", " else:\n", " raise ValueError('Not implemented')\n", " da_obs_c.plot.pcolormesh(ax=axes[ax_num], x='lon', y='lat', \n", " transform=ccrs.PlateCarree(),\n", " add_colorbar=False,\n", " cmap=cmap_c,\n", " vmin=c_vmin, vmax=c_vmax)\n", " axes[ax_num].set_title('Observed')\n", " # Overlay median ice edge\n", " #if metric=='mean':\n", " #po = median_ice_fill.isel(time=cdoy).plot.contour(ax=axes[ax_num], x='xm', y='ym', \n", " #=('#bc0f60'),\n", " #linewidths=[0.5],\n", " #levels=[0.5])\n", " #remove_small_contours(po, thres=10)\n", " else: # When were in the future (or obs are missing)\n", " if metric=='anomaly': # Still get climatological mean for model difference\n", " da_obs_mean = mean_1980_2010_sic.isel(time=slice(cdoy_start,cdoy_end)).mean(dim='time')\n", " elif metric=='SIP': # Plot this historical mean SIP \n", " da_obs_c = mean_1980_2010_SIP.isel(time=slice(cdoy_start,cdoy_end)).mean(dim='time')\n", " da_obs_c.plot.pcolormesh(ax=axes[ax_num], x='lon', y='lat', \n", " transform=ccrs.PlateCarree(),\n", " add_colorbar=False,\n", " cmap=cmap_c,\n", " vmin=c_vmin, vmax=c_vmax)\n", " axes[ax_num].set_title('Hist. Obs.')\n", "\n", " ############################################################################\n", " # Plot climatology trend #\n", " ############################################################################\n", " \n", " i = 2\n", " cmod = 'climatology'\n", " axes[i].set_title('clim. trend')\n", " \n", " # Check if we have any valid times in range of target dates\n", " ds_model = obs_clim_model.where((obs_clim_model.time>=valid_start) & (obs_clim_model.time<=valid_end), drop=True) \n", " if 'time' in ds_model.lat.dims:\n", " ds_model.coords['lat'] = ds_model.lat.isel(time=0).drop('time') # Drop time from lat/lon dims (not sure why?)\n", " \n", " # If we have any time\n", " if ds_model.time.size > 0:\n", "\n", " # Average over time\n", " ds_model = ds_model.mean(dim='time')\n", "\n", " if metric=='mean': # Calc ensemble mean\n", " ds_model = ds_model\n", " elif metric=='SIP': # Calc probability\n", " # Issue of some ensemble members having missing data\n", " ocnmask = ds_model.notnull()\n", " ds_model = (ds_model>=0.15).where(ocnmask)\n", " elif metric=='anomaly': # Calc anomaly in reference to mean observed 1980-2010\n", " ds_model = ds_model - da_obs_mean\n", " # Add back lat/long (get dropped because of round off differences)\n", " ds_model['lat'] = da_obs_mean.lat\n", " ds_model['lon'] = da_obs_mean.lon\n", " else:\n", " raise ValueError('metric not implemented') \n", "\n", " if 'doy' in ds_model.coords:\n", " ds_model = ds_model.drop(['doy'])\n", " if 'xm' in ds_model.coords:\n", " ds_model = ds_model.drop(['xm'])\n", " if 'ym' in ds_model.coords:\n", " ds_model = ds_model.drop(['ym'])\n", "\n", " # Build MME \n", " if cmod not in MME_NO: # Exclude some models (bad) from MME\n", " ds_model.coords['model'] = cmod\n", " MME_list.append(ds_model)\n", "\n", " # Plot\n", " p = ds_model.plot.pcolormesh(ax=axes[i], x='lon', y='lat', \n", " transform=ccrs.PlateCarree(),\n", " add_colorbar=False,\n", " cmap=cmap_c,\n", " vmin=c_vmin, vmax=c_vmax)\n", "\n", " axes[i].set_title('clim. trend')\n", "\n", " # Clean up for current model\n", " ds_model = None \n", " \n", " ###########################################################\n", " # Plot Models in SIPN format #\n", " ###########################################################\n", " \n", " # Plot all Models\n", " p = None # initlaize to know if we found any data\n", " for (i, cmod) in enumerate(models_2_plot):\n", " print(cmod)\n", " i = i+3 # shift for obs, MME, and clim\n", " axes[i].set_title(E.model[cmod]['model_label'])\n", "\n", " # Load in Model\n", " # Find only files that have current year and month in filename (speeds up loading)\n", " all_files = os.path.join(E.model[cmod][runType]['sipn_nc'], '*'+it_yr+'*'+it_m+'*.nc') \n", "\n", " # Check we have files \n", " files = glob.glob(all_files)\n", " if not files:\n", " continue # Skip this model\n", "\n", " # Load in model \n", " ds_model = xr.open_mfdataset(sorted(files), \n", " chunks={'fore_time': 1, 'init_time': 1, 'nj': 304, 'ni': 448}, \n", " concat_dim='init_time', autoclose=True, parallel=True)\n", " ds_model.rename({'nj':'x', 'ni':'y'}, inplace=True)\n", "\n", " # Select init period and fore_time of interest\n", " ds_model = ds_model.sel(init_time=slice(it_start, it))\n", " # Check we found any init_times in range\n", " if ds_model.init_time.size==0:\n", " print('init_time not found.')\n", " continue\n", "\n", " # Select var of interest (if available)\n", " if cvar in ds_model.variables:\n", " # print('found ',cvar)\n", " ds_model = ds_model[cvar]\n", " else:\n", " print('cvar not found.')\n", " continue\n", "\n", " # Get Valid time\n", " ds_model = import_data.get_valid_time(ds_model)\n", "\n", " # Check if we have any valid times in range of target dates\n", " ds_model = ds_model.where((ds_model.valid_time>=valid_start) & (ds_model.valid_time<=valid_end), drop=True) \n", " if ds_model.fore_time.size == 0:\n", " print(\"no fore_time found for target period.\")\n", " continue\n", "\n", " # Average over for_time and init_times\n", " ds_model = ds_model.mean(dim=['fore_time','init_time'])\n", "\n", " if metric=='mean': # Calc ensemble mean\n", " ds_model = ds_model.mean(dim='ensemble')\n", " elif metric=='SIP': # Calc probability\n", " # Issue of some ensemble members having missing data\n", " # ds_model = ds_model.where(ds_model>=0.15, other=0).mean(dim='ensemble')\n", " ok_ens = ((ds_model.notnull().sum(dim='x').sum(dim='y'))>0) # select ensemble members with any data\n", " ds_model = ((ds_model.where(ok_ens, drop=True)>=0.15) ).mean(dim='ensemble').where(ds_model.isel(ensemble=0).notnull())\n", " elif metric=='anomaly': # Calc anomaly in reference to mean observed 1980-2010\n", " ds_model = ds_model.mean(dim='ensemble') - da_obs_mean\n", " # Add back lat/long (get dropped because of round off differences)\n", " ds_model['lat'] = da_obs_mean.lat\n", " ds_model['lon'] = da_obs_mean.lon\n", " else:\n", " raise ValueError('metric not implemented')\n", " #print(\"Calc metric took \", (timeit.default_timer() - start_time), \" seconds.\")\n", "\n", " # drop ensemble if still present\n", " if 'ensemble' in ds_model:\n", " ds_model = ds_model.drop('ensemble')\n", " \n", " # Build MME \n", " if cmod not in MME_NO: # Exclude some models (bad) from MME\n", " ds_model.coords['model'] = cmod\n", " if 'xm' in ds_model:\n", " ds_model = ds_model.drop(['xm','ym']) #Dump coords we don't use\n", " MME_list.append(ds_model)\n", "\n", " # Plot\n", " #start_time = timeit.default_timer()\n", " p = ds_model.plot.pcolormesh(ax=axes[i], x='lon', y='lat', \n", " transform=ccrs.PlateCarree(),\n", " add_colorbar=False,\n", " cmap=cmap_c,\n", " vmin=c_vmin, vmax=c_vmax)\n", " #print(\"Plotting took \", (timeit.default_timer() - start_time), \" seconds.\")\n", "\n", " # Overlay median ice edge\n", " #if metric=='mean':\n", " #po = median_ice_fill.isel(time=cdoy).plot.contour(ax=axes[i], x='xm', y='ym', \n", " #colors=('#bc0f60'),\n", " #linewidths=[0.5],\n", " #levels=[0.5]) #, label='Median ice edge 1981-2010')\n", " #remove_small_contours(po, thres=10)\n", "\n", " axes[i].set_title(E.model[cmod]['model_label'])\n", "\n", " # Clean up for current model\n", " ds_model = None\n", "\n", "\n", "\n", " # MME\n", " ax_num = 1\n", " \n", " if MME_list: # If we had any models for this time\n", " # Concat over all models\n", " ds_MME = xr.concat(MME_list, dim='model')\n", " # Set lat/lon to \"model\" lat/lon (round off differences)\n", " if 'model' in ds_MME.lat.dims:\n", " ds_MME.coords['lat'] = ds_MME.lat.isel(model=0).drop('model')\n", " \n", " # Plot average\n", " # Don't include some models (i.e. climatology and dampedAnomaly)\n", " mod_to_avg = [m for m in ds_MME.model.values if m not in ['climatology','dampedAnomaly']]\n", " pmme = ds_MME.sel(model=mod_to_avg).mean(dim='model').plot.pcolormesh(ax=axes[ax_num], x='lon', y='lat', \n", " transform=ccrs.PlateCarree(),\n", " add_colorbar=False, \n", " cmap=cmap_c,vmin=c_vmin, vmax=c_vmax)\n", " # Overlay median ice edge\n", " #if metric=='mean':\n", " #po = median_ice_fill.isel(time=cdoy).plot.contour(ax=axes[ax_num], x='xm', y='ym', \n", " # colors=('#bc0f60'),\n", " # linewidths=[0.5],\n", " # levels=[0.5])\n", " #remove_small_contours(po, thres=10)\n", " \n", " # Save all models for given target valid_time\n", " out_metric_dir = os.path.join(E.model['MME'][runType]['sipn_nc'], metric)\n", " if not os.path.exists(out_metric_dir):\n", " os.makedirs(out_metric_dir) \n", " \n", " out_init_dir = os.path.join(out_metric_dir, pd.to_datetime(it).strftime('%Y-%m-%d'))\n", " if not os.path.exists(out_init_dir):\n", " os.makedirs(out_init_dir) \n", " \n", " out_nc_file = os.path.join(out_init_dir, pd.to_datetime(it+ft).strftime('%Y-%m-%d')+'.nc')\n", " \n", " # Add observations\n", " # Check if exits (if we have any observations for this valid period)\n", " # TODO: require ALL observations to be present, otherwise we could only get one day != week avg\n", " if ds_81.sic.sel(time=slice(valid_start,valid_end)).time.size > 0:\n", " da_obs_c.coords['model'] = 'Observed'\n", " \n", " # Drop coords we don't need\n", " da_obs_c = da_obs_c.drop(['hole_mask','xm','ym'])\n", " if 'time' in da_obs_c:\n", " da_obs_c = da_obs_c.drop('time')\n", " if 'xm' in ds_MME:\n", " ds_MME = ds_MME.drop(['xm','ym'])\n", " \n", " # Add obs\n", " ds_MME_out = xr.concat([ds_MME,da_obs_c], dim='model')\n", " else: \n", " ds_MME_out = ds_MME\n", " \n", " # Add init and valid times\n", " ds_MME_out.coords['init_start'] = it_start\n", " ds_MME_out.coords['init_end'] = it\n", " ds_MME_out.coords['valid_start'] = it_start+ft\n", " ds_MME_out.coords['valid_end'] = it+ft\n", " ds_MME_out.coords['fore_time'] = ft\n", " ds_MME_out.name = metric\n", " \n", " # Save\n", " ds_MME_out.to_netcdf(out_nc_file)\n", " \n", " axes[ax_num].set_title('MME')\n", "\n", " # Make pretty\n", " f.subplots_adjust(right=0.8)\n", " cbar_ax = f.add_axes([0.85, 0.15, 0.05, 0.7])\n", " if p:\n", " cbar = f.colorbar(p, cax=cbar_ax, label=c_label)\n", " if metric=='anomaly':\n", " cbar.set_ticks(np.arange(-1,1.1,0.2))\n", " else:\n", " cbar.set_ticks(np.arange(0,1.1,0.1))\n", " #cbar.set_ticklabels(np.arange(0,1,0.05))\n", "\n", " # Set title of all plots\n", " init_time_2 = pd.to_datetime(it).strftime('%Y-%m-%d')\n", " init_time_1 = pd.to_datetime(it_start).strftime('%Y-%m-%d')\n", " valid_time_2 = pd.to_datetime(it+ft).strftime('%Y-%m-%d')\n", " valid_time_1 = pd.to_datetime(it_start+ft).strftime('%Y-%m-%d')\n", " plt.suptitle('Initialization Time: '+init_time_1+' to '+init_time_2+'\\n Valid Time: '+valid_time_1+' to '+valid_time_2,\n", " fontsize=15) # +'\\n Week '+week_str\n", " plt.subplots_adjust(top=0.85)\n", "\n", " # Save to file\n", " f_out = os.path.join(fig_dir,'panArctic_'+metric+'_'+runType+'_'+init_time_2+'_'+cs_str+'.png')\n", " f.savefig(f_out,bbox_inches='tight', dpi=200)\n", " print(\"saved \", f_out)\n", " print(\"Figure took \", (timeit.default_timer() - start_time_plot)/60, \" minutes.\")\n", "\n", " # Mem clean up\n", " plt.close(f)\n", " p = None\n", " ds_MME= None\n", " da_obs_c = None\n", " da_obs_mean = None\n", "\n", " # Done with current it\n", " print(\"Took \", (timeit.default_timer() - start_time_cmod)/60, \" minutes.\")\n", "\n", " \n", " # Update json file\n", " json_format = get_figure_init_times(fig_dir)\n", " json_dict = [{\"date\":cd,\"label\":cd} for cd in json_format]\n", "\n", " json_f = os.path.join(fig_dir, 'plotdates_current.json')\n", " with open(json_f, 'w') as outfile:\n", " json.dump(json_dict, outfile)\n", "\n", " # Make into Gifs\n", " for cit in json_format:\n", " subprocess.call(str(\"/home/disk/sipn/nicway/python/ESIO/scripts/makeGif.sh \" + fig_dir + \" \" + cit), shell=True)\n", "\n", " print(\"Finished plotting panArctic Maps.\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "if __name__ == '__main__':\n", " # Start up Client\n", " client = Client()\n", " #dask.config.set(scheduler='threads') # overwrite default with threaded scheduler\n", "\n", " \n", " # Call function\n", " Update_PanArctic_Maps()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# # Run below in case we need to just update the json file and gifs\n", "\n", "\n", "# fig_dir = '/home/disk/sipn/nicway/public_html/sipn/figures/model/all_model/sic/maps_weekly'\n", "# json_format = get_figure_init_times(fig_dir)\n", "# json_dict = [{\"date\":cd,\"label\":cd} for cd in json_format]\n", "\n", "# json_f = os.path.join(fig_dir, 'plotdates_current.json')\n", "# with open(json_f, 'w') as outfile:\n", "# json.dump(json_dict, outfile)\n", "\n", "# # Make into Gifs\n", "# # TODO fig_dir hardcoded to current variable\n", "# for cit in json_format:\n", "# subprocess.call(str(\"/home/disk/sipn/nicway/python/ESIO/scripts/makeGif.sh \" + fig_dir + \" \" + cit), shell=True)\n", "\n", "# print(\"Finished plotting panArctic Maps.\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "3.6.4 esio", "language": "python", "name": "esio" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.4" } }, "nbformat": 4, "nbformat_minor": 2 }