{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "## Single Track Demo\n", "\n", "Process a single ATL03 granule using SlideRule's ATL06-SR algorithm and compare the results to the existing ATL06 data product.\n", "\n", "### What is demonstrated\n", "\n", "* The `icesat2.atl06` API is used to perform a SlideRule processing request of a single ATL03 granule\n", "* The `h5.h5p` API is used to read existing ATL06 datasets\n", "* The `matplotlib` package is used to plot the elevation profile of all three tracks in the granule (with the first track overlaid with the expected profile)\n", "* The `geopandas` package is used to produce a plot representing the geolocation of the elevations produced by SlideRule.\n", "\n", "### Points of interest\n", "\n", "Most use cases for SlideRule use the higher level `icesat2.atl06p` API which works on a region of interest; but this notebook shows some of the capabilities of SlideRule for working on individual granules." ] }, { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "# Suppress Warnings\n", "import warnings\n", "warnings.filterwarnings(\"ignore\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "import re\n", "import posixpath\n", "import shapely.geometry\n", "import geopandas as gpd\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "import matplotlib.dates as mdates\n", "from sliderule import icesat2, io, sliderule, earthdata, h5" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "# Configure Session\n", "icesat2.init(\"slideruleearth.io\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Find ATL03 Granules" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "# find granules for a spatial and temporal query\n", "box_lon = [-105, -105, -100, -100, -105]\n", "box_lat = [-75, -77.5, -77.5, -75, -75]\n", "poly = io.to_region(box_lon, box_lat)\n", "resources = earthdata.cmr(short_name='ATL03', polygon=poly, time_start='2018-10-19', time_end='2018-10-20') \n", "granule = resources[0]" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Execute SlideRule Algorithm" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "%%time\n", "# regular expression operator for extracting information from files\n", "rx = re.compile(r'(ATL\\d{2})(-\\d{2})?_(\\d{4})(\\d{2})(\\d{2})(\\d{2})'\n", " r'(\\d{2})(\\d{2})_(\\d{4})(\\d{2})(\\d{2})_(\\d{3})_(\\d{2})(.*?).h5$')\n", "# extract parameters from ICESat-2 granule\n", "PRD,HEM,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRN,RL,VRS,AUX=rx.findall(granule).pop()\n", "\n", "# Build ATL06 Request\n", "parms = {\n", " \"poly\":poly,\n", " \"cnf\": 4,\n", " \"ats\": 20.0,\n", " \"cnt\": 10,\n", " \"len\": 40.0,\n", " \"res\": 20.0\n", "}\n", "\n", "# Request ATL06 Data\n", "gdf = icesat2.atl06(parms, granule)\n", "\n", "# Return DataFrame\n", "print(\"Reference Ground Tracks: {} to {}\".format(min(gdf[\"rgt\"]), max(gdf[\"rgt\"])))\n", "print(\"Cycle: {} to {}\".format(min(gdf[\"cycle\"]), max(gdf[\"cycle\"])))\n", "print(\"Retrieved {} points from SlideRule\".format(len(gdf[\"h_mean\"])))" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "def s3_retrieve(granule, **kwargs):\n", " # set default keyword arguments\n", " kwargs.setdefault('lon_key','longitude')\n", " kwargs.setdefault('lat_key','latitude')\n", " kwargs.setdefault('index_key','time')\n", " kwargs.setdefault('polygon',None)\n", " # regular expression operator for extracting information from files\n", " rx = re.compile(r'(ATL\\d{2})(-\\d{2})?_(\\d{4})(\\d{2})(\\d{2})(\\d{2})'\n", " r'(\\d{2})(\\d{2})_(\\d{4})(\\d{2})(\\d{2})_(\\d{3})_(\\d{2})(.*?).h5$')\n", " # extract parameters from ICESat-2 granule\n", " PRD,HEM,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRN,RL,VRS,AUX=rx.findall(granule).pop()\n", " # variables of interest\n", " if (PRD == 'ATL06'):\n", " segment_group = \"land_ice_segments\"\n", " segment_key = 'segment_id'\n", " vnames = ['segment_id','delta_time','latitude','longitude',\n", " 'h_li','h_li_sigma','atl06_quality_summary',\n", " 'fit_statistics/dh_fit_dx','fit_statistics/dh_fit_dy',\n", " 'fit_statistics/dh_fit_dx_sigma','fit_statistics/n_fit_photons',\n", " 'fit_statistics/h_expected_rms','fit_statistics/h_robust_sprd',\n", " 'fit_statistics/w_surface_window_final','fit_statistics/h_mean']\n", " elif (PRD == 'ATL08'):\n", " segment_group = \"land_segments\"\n", " segment_key = 'segment_id_beg'\n", " vnames = ['segment_id_beg','segment_id_end','delta_time',\n", " 'latitude','longitude','brightness_flag','layer_flag',\n", " 'msw_flag','night_flag','terrain_flg','urban_flag',\n", " 'segment_landcover','segment_snowcover','segment_watermask',\n", " 'terrain/h_te_best_fit','terrain/h_te_uncertainty',\n", " 'terrain/terrain_slope','terrain/n_te_photons',\n", " 'canopy/h_canopy','canopy/h_canopy_uncertainty',\n", " 'canopy/canopy_flag','canopy/n_ca_photons']\n", " # for each valid beam within the HDF5 file\n", " frames = []\n", " gt = dict(gt1l=10,gt1r=20,gt2l=30,gt2r=40,gt3l=50,gt3r=60)\n", " atlas_sdp_epoch = np.datetime64('2018-01-01T00:00:00')\n", " kwds = dict(startrow=0,numrows=-1)\n", " for gtx in ['gt1l','gt1r','gt2l','gt2r','gt3l','gt3r']:\n", " geodatasets = [dict(dataset=f'{gtx}/{segment_group}/{v}',**kwds) for v in vnames]\n", " try:\n", " # get datasets from s3\n", " hidatasets = h5.h5p(geodatasets, granule, \"icesat2\")\n", " # copy to new \"flattened\" dictionary\n", " data = {posixpath.basename(key):var for key,var in hidatasets.items()}\n", " # Generate Time Column\n", " delta_time = (data['delta_time']*1e9).astype('timedelta64[ns]')\n", " data['time'] = gpd.pd.to_datetime(atlas_sdp_epoch + delta_time)\n", " except:\n", " pass\n", " else:\n", " # copy filename parameters\n", " data['rgt'] = [int(TRK)]*len(data['delta_time'])\n", " data['cycle'] = [int(CYCL)]*len(data['delta_time'])\n", " data['gt'] = [gt[gtx]]*len(data['delta_time'])\n", " # pandas dataframe from compiled dictionary\n", " frames.append(gpd.pd.DataFrame.from_dict(data))\n", " # concatenate pandas dataframe\n", " try:\n", " df = gpd.pd.concat(frames)\n", " except:\n", " return sliderule.emptyframe()\n", " # convert to a GeoDataFrame\n", " lon_key,lat_key = (kwargs['lon_key'],kwargs['lat_key'])\n", " geometry = gpd.points_from_xy(df[lon_key], df[lat_key])\n", " gdf = gpd.GeoDataFrame(df.drop(columns=[lon_key,lat_key]),\n", " geometry=geometry,crs='EPSG:4326')\n", " # intersect with geometry in projected reference system\n", " if kwargs['polygon'] is not None:\n", " gdf = gpd.overlay(gdf.to_crs(kwargs['polygon'].crs),\n", " kwargs['polygon'], how='intersection')\n", " # sort values for reproducible output despite async processing\n", " gdf.set_index(kwargs['index_key'], inplace=True)\n", " gdf.sort_index(inplace=True)\n", " # remove duplicate points\n", " gdf = gdf[~gdf.index.duplicated()]\n", " # convert back to original coordinate reference system\n", " return gdf.to_crs('EPSG:4326')" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "# get standard ATL06 products\n", "atl06_granule = f'ATL06_{YY}{MM}{DD}{HH}{MN}{SS}_{TRK}{CYCL}{GRN}_{RL}_{VRS}{AUX}.h5'\n", "region_gs = gpd.GeoSeries(shapely.geometry.Polygon(np.c_[box_lon,box_lat]), crs='EPSG:4326')\n", "region_gdf = gpd.GeoDataFrame(geometry=region_gs).to_crs('EPSG:3857')\n", "atl06 = s3_retrieve(atl06_granule, polygon=region_gdf)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Compare Sliderule and ASAS Results" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "# Create Elevation Plot\n", "fig,ax = plt.subplots(num=1, ncols=6, sharey=True, figsize=(12, 6))\n", "locator = mdates.AutoDateLocator(minticks=3, maxticks=7)\n", "formatter = mdates.ConciseDateFormatter(locator)\n", "# Plot Elevations for each track\n", "tracks = dict(gt1l=10,gt1r=20,gt2l=30,gt2r=40,gt3l=50,gt3r=60)\n", "for s,gt in enumerate(tracks.keys()):\n", " sr = gdf[gdf[\"gt\"] == tracks[gt]]\n", " asas = atl06[(atl06[\"gt\"] == tracks[gt]) &\n", " (atl06[\"h_mean\"] < 1e38) &\n", " (atl06[\"segment_id\"] >= sr[\"segment_id\"][0]) &\n", " (atl06[\"segment_id\"] <= sr[\"segment_id\"][-1])]\n", " ax[s].set_title(gt)\n", " ax[s].plot(sr.index.values, sr[\"h_mean\"].values, zorder=1,\n", " linewidth=1.0, color='mediumseagreen', label='SlideRule')\n", " ax[s].plot(asas.index.values, asas[\"h_mean\"].values, zorder=0,\n", " linewidth=1.0, color='darkorchid', label='ASAS')\n", " ax[s].xaxis.set_major_locator(locator)\n", " ax[s].xaxis.set_major_formatter(formatter)\n", "# add labels and legends\n", "ax[0].set_ylabel('Height Above WGS84 Ellipsoid')\n", "lgd = ax[0].legend(loc=3,frameon=False)\n", "lgd.get_frame().set_alpha(1.0)\n", "for line in lgd.get_lines():\n", " line.set_linewidth(6)\n", "# Show Plot\n", "plt.show()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Map Plot" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "# Create PlateCarree Plot\n", "fig,ax1 = plt.subplots(num=None, figsize=(12, 6))\n", "################################\n", "# add global plot\n", "################################\n", "world = gpd.read_file(gpd.datasets.get_path('naturalearth_lowres'))\n", "world.plot(ax=ax1, color='0.8', edgecolor='black')\n", "gdf.plot(ax=ax1, marker='o', color='red', markersize=2.5, zorder=3)\n", "ax1.set_title(\"SlideRule Global Reference\")\n", "\n", "# Plot locations of each track\n", "tracks = dict(gt1l=10,gt1r=20,gt2l=30,gt2r=40,gt3l=50,gt3r=60)\n", "for s,gt in enumerate(tracks.keys()):\n", " sr = gdf[gdf[\"gt\"] == tracks[gt]]\n", " sr.plot(ax=ax1, marker='o', color='red', markersize=2.5, zorder=3)\n", "\n", "# Plot Bounding Box\n", "ax1.plot(box_lon, box_lat, linewidth=1.5, color='blue', zorder=2)\n", "\n", "# x and y limits, axis = equal\n", "ax1.set_xlim(-180,180)\n", "ax1.set_ylim(-90,90)\n", "ax1.set_aspect('equal', adjustable='box')\n", "# show plot\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "interpreter": { "hash": "d7f94b8b1e41b02170d45ac71ce2d6b011e7cd56207b4c480f5292088bcfab93" }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.0" } }, "nbformat": 4, "nbformat_minor": 4 }