{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", "import matplotlib.pyplot as plot\n", "import pandas as pd\n", "import datetime as dt\n", "from salishsea_tools import evaltools as et\n", "\n", "%matplotlib inline" ] }, { "cell_type": "code", "execution_count": 30, "metadata": {}, "outputs": [], "source": [ "# path to model files:\n", "PATH= '/results/SalishSea/nowcast-green.201812/'\n", "\n", "# start and end dates for analysis:\n", "start_date = dt.datetime(2015,1,1)\n", "end_date = dt.datetime(2015,3,1)\n", "\n", "# number of days per model file:\n", "flen=1\n", "\n", "# dictionary mapping desired model variables to the file types where they are found\n", "filemap={'diatoms':'ptrc_T','ciliates':'ptrc_T',\n", " 'flagellates':'ptrc_T','vosaline':'grid_T','votemper':'grid_T'}\n", "\n", "# dictionary mapping model file types to their time resolution in hours (1 is hourly files, 24 is daily)\n", "fdict={'ptrc_T':1,'grid_T':1}\n", "\n", "# results format\n", "# -- nowcast: files like 01jan15/SalishSea_1h_20150101_20150101_ptrc_T.nc\n", "# -- long: files like SalishSea_1h_20150206_20150804_ptrc_T_20150427-20150506.nc, all in one directory\n", "namfmt='nowcast'" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "5419\n" ] }, { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
YearMonthDayHourLatLonZSACTdtUTC
02015.02.011.05.29611148.613333-123.2438331.98309929.8496118.9427532015-02-11 05:17:46
12015.02.011.05.29611148.613333-123.2438332.97464129.8521778.9425112015-02-11 05:17:46
22015.02.011.05.29611148.613333-123.2438333.96617829.8690128.9411352015-02-11 05:17:46
32015.02.011.05.29611148.613333-123.2438334.95771129.9156908.9366612015-02-11 05:17:46
42015.02.011.05.29611148.613333-123.2438335.94923929.9139358.9364892015-02-11 05:17:46
\n", "
" ], "text/plain": [ " Year Month Day Hour Lat Lon Z SA \\\n", "0 2015.0 2.0 11.0 5.296111 48.613333 -123.243833 1.983099 29.849611 \n", "1 2015.0 2.0 11.0 5.296111 48.613333 -123.243833 2.974641 29.852177 \n", "2 2015.0 2.0 11.0 5.296111 48.613333 -123.243833 3.966178 29.869012 \n", "3 2015.0 2.0 11.0 5.296111 48.613333 -123.243833 4.957711 29.915690 \n", "4 2015.0 2.0 11.0 5.296111 48.613333 -123.243833 5.949239 29.913935 \n", "\n", " CT dtUTC \n", "0 8.942753 2015-02-11 05:17:46 \n", "1 8.942511 2015-02-11 05:17:46 \n", "2 8.941135 2015-02-11 05:17:46 \n", "3 8.936661 2015-02-11 05:17:46 \n", "4 8.936489 2015-02-11 05:17:46 " ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df1=et.loadDFOCTD(datelims=(start_date,end_date))\n", "print(len(df1))\n", "df1.head()" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ " from sqlalchemy import create_engine, case\n", " from sqlalchemy.orm import create_session\n", " from sqlalchemy.ext.automap import automap_base\n", " from sqlalchemy.sql import and_, or_, not_, func" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "basedir='/ocean/shared/SalishSeaCastData/DFO/CTD/'\n", "dbname='DFO_CTD.sqlite'\n", "datelims=(start_date,end_date)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [ " engine = create_engine('sqlite:///' + basedir + dbname, echo = False)\n", " Base = automap_base()\n", " # reflect the tables in salish.sqlite:\n", " Base.prepare(engine, reflect=True)\n", " # mapped classes have been created\n", " # existing tables:\n", " StationTBL=Base.classes.StationTBL\n", " ObsTBL=Base.classes.ObsTBL\n", " CalcsTBL=Base.classes.CalcsTBL\n", " session = create_session(bind = engine, autocommit = False, autoflush = True)\n", " SA=case([(CalcsTBL.Salinity_T0_C0_SA!=None, CalcsTBL.Salinity_T0_C0_SA)], else_=\n", " case([(CalcsTBL.Salinity_T1_C1_SA!=None, CalcsTBL.Salinity_T1_C1_SA)], else_=\n", " case([(CalcsTBL.Salinity_SA!=None, CalcsTBL.Salinity_SA)], else_= None)))\n", " CT=case([(CalcsTBL.Temperature_Primary_CT!=None, CalcsTBL.Temperature_Primary_CT)], else_=\n", " case([(CalcsTBL.Temperature_Secondary_CT!=None, CalcsTBL.Temperature_Secondary_CT)], else_=CalcsTBL.Temperature_CT))\n", " ZD=case([(ObsTBL.Depth!=None,ObsTBL.Depth)], else_= CalcsTBL.Z)\n" ] }, { "cell_type": "code", "execution_count": 24, "metadata": {}, "outputs": [], "source": [ "Fl=case([(ObsTBL.Fluorescence_URU_Wetlabs!=None, ObsTBL.Fluorescence_URU_Wetlabs)], else_= ObsTBL.Fluorescence_URU_Seapoint)\n", "Fl_units=case([(ObsTBL.Fluorescence_URU_Wetlabs!=None, ObsTBL.Fluorescence_URU_Wetlabs_units)], else_= ObsTBL.Fluorescence_URU_Seapoint)\n" ] }, { "cell_type": "code", "execution_count": 25, "metadata": {}, "outputs": [], "source": [ "start_y=datelims[0].year\n", "start_m=datelims[0].month\n", "start_d=datelims[0].day\n", "end_y=datelims[1].year\n", "end_m=datelims[1].month\n", "end_d=datelims[1].day" ] }, { "cell_type": "code", "execution_count": 28, "metadata": {}, "outputs": [], "source": [ "qry=session.query(StationTBL.StartYear.label('Year'),StationTBL.StartMonth.label('Month'),\n", " StationTBL.StartDay.label('Day'),StationTBL.StartHour.label('Hour'),\n", " StationTBL.Lat,StationTBL.Lon,ZD.label('Z'),SA.label('SA'),CT.label('CT'),\n", " Fl.label('Fl'),Fl_units.label('Fl_units')).\\\n", " select_from(StationTBL).join(ObsTBL,ObsTBL.StationTBLID==StationTBL.ID).\\\n", " join(CalcsTBL,CalcsTBL.ObsTBLID==ObsTBL.ID).filter(and_(Fl>=0,\n", " or_(StationTBL.StartYear>start_y,\n", " and_(StationTBL.StartYear==start_y, StationTBL.StartMonth>start_m),\n", " and_(StationTBL.StartYear==start_y, StationTBL.StartMonth==start_m, StationTBL.StartDay>=start_d)),\n", " or_(StationTBL.StartYear47-3/2.5*(StationTBL.Lon+123.5),\n", " StationTBL.Lat<47-3/2.5*(StationTBL.Lon+121),\n", " StationTBL.Include==True,ObsTBL.Include==True,CalcsTBL.Include==True))\n", "df1=pd.DataFrame(qry.all())\n", "df1['dtUTC']=[dt.datetime(int(y),int(m),int(d))+dt.timedelta(hours=h) for y,m,d,h in zip(df1['Year'],df1['Month'],df1['Day'],df1['Hour'])]" ] }, { "cell_type": "code", "execution_count": 29, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
YearMonthDayHourLatLonZSACTFlFl_unitsdtUTC
02015.02.011.05.29611148.613333-123.2438331.98309929.8496118.9427530.5040.5042015-02-11 05:17:46
12015.02.011.05.29611148.613333-123.2438332.97464129.8521778.9425110.5000.5002015-02-11 05:17:46
22015.02.011.05.29611148.613333-123.2438333.96617829.8690128.9411350.4910.4912015-02-11 05:17:46
32015.02.011.05.29611148.613333-123.2438334.95771129.9156908.9366610.4820.4822015-02-11 05:17:46
42015.02.011.05.29611148.613333-123.2438335.94923929.9139358.9364890.4900.4902015-02-11 05:17:46
.......................................
3892015.02.011.011.06861148.300833-124.000333173.45334633.2573588.6762070.1080.1082015-02-11 11:04:07
3902015.02.011.011.06861148.300833-124.000333174.44408633.2884298.6560750.1030.1032015-02-11 11:04:07
3912015.02.011.011.06861148.300833-124.000333175.43482133.3251348.6281470.1050.1052015-02-11 11:04:07
3922015.02.011.011.06861148.300833-124.000333176.42555133.3405368.6160820.1130.1132015-02-11 11:04:07
3932015.02.011.011.06861148.300833-124.000333177.41627633.3358408.6231580.1070.1072015-02-11 11:04:07
\n", "

394 rows × 12 columns

\n", "
" ], "text/plain": [ " Year Month Day Hour Lat Lon Z \\\n", "0 2015.0 2.0 11.0 5.296111 48.613333 -123.243833 1.983099 \n", "1 2015.0 2.0 11.0 5.296111 48.613333 -123.243833 2.974641 \n", "2 2015.0 2.0 11.0 5.296111 48.613333 -123.243833 3.966178 \n", "3 2015.0 2.0 11.0 5.296111 48.613333 -123.243833 4.957711 \n", "4 2015.0 2.0 11.0 5.296111 48.613333 -123.243833 5.949239 \n", ".. ... ... ... ... ... ... ... \n", "389 2015.0 2.0 11.0 11.068611 48.300833 -124.000333 173.453346 \n", "390 2015.0 2.0 11.0 11.068611 48.300833 -124.000333 174.444086 \n", "391 2015.0 2.0 11.0 11.068611 48.300833 -124.000333 175.434821 \n", "392 2015.0 2.0 11.0 11.068611 48.300833 -124.000333 176.425551 \n", "393 2015.0 2.0 11.0 11.068611 48.300833 -124.000333 177.416276 \n", "\n", " SA CT Fl Fl_units dtUTC \n", "0 29.849611 8.942753 0.504 0.504 2015-02-11 05:17:46 \n", "1 29.852177 8.942511 0.500 0.500 2015-02-11 05:17:46 \n", "2 29.869012 8.941135 0.491 0.491 2015-02-11 05:17:46 \n", "3 29.915690 8.936661 0.482 0.482 2015-02-11 05:17:46 \n", "4 29.913935 8.936489 0.490 0.490 2015-02-11 05:17:46 \n", ".. ... ... ... ... ... \n", "389 33.257358 8.676207 0.108 0.108 2015-02-11 11:04:07 \n", "390 33.288429 8.656075 0.103 0.103 2015-02-11 11:04:07 \n", "391 33.325134 8.628147 0.105 0.105 2015-02-11 11:04:07 \n", "392 33.340536 8.616082 0.113 0.113 2015-02-11 11:04:07 \n", "393 33.335840 8.623158 0.107 0.107 2015-02-11 11:04:07 \n", "\n", "[394 rows x 12 columns]" ] }, "execution_count": 29, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df1" ] }, { "cell_type": "code", "execution_count": 31, "metadata": {}, "outputs": [], "source": [ "data=et.matchData(data=df1,filemap=filemap, fdict=fdict, mod_start=start_date, mod_end=end_date, \n", " mod_nam_fmt=namfmt, mod_basedir=PATH, mod_flen=flen)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python (py39)", "language": "python", "name": "py39" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.1" } }, "nbformat": 4, "nbformat_minor": 2 }