From b7bba959e6976f1e513940d3ed3e46bfe97a4bdc Mon Sep 17 00:00:00 2001 From: David Huard Date: Tue, 13 Apr 2021 22:05:03 -0400 Subject: [PATCH 1/8] set rvt.grid_weights to a working default --- ravenpy/models/rv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ravenpy/models/rv.py b/ravenpy/models/rv.py index 47325295..497ef623 100644 --- a/ravenpy/models/rv.py +++ b/ravenpy/models/rv.py @@ -301,7 +301,7 @@ def __init__(self, **kwargs): self.gridded_forcings = () # For a distributed model these weights will be shared among all the StationForcing commands - self.grid_weights = None + self.grid_weights = GridWeightsCommand() self.var_cmds = {} # Dictionary of potential variable names, keyed by CF standard name. From 31992abf69dac4d7ad4a19d3eacf73223d7ec4fc Mon Sep 17 00:00:00 2001 From: David Huard Date: Wed, 14 Apr 2021 09:42:08 -0400 Subject: [PATCH 2/8] account for non-existent hru in most emulators --- ravenpy/models/importers.py | 28 ++++++++++++++++------------ ravenpy/models/rv.py | 2 +- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/ravenpy/models/importers.py b/ravenpy/models/importers.py index 9b27a531..0361c83b 100644 --- a/ravenpy/models/importers.py +++ b/ravenpy/models/importers.py @@ -819,7 +819,8 @@ def _extract_nc_attrs(self, fns): var_name_nc=name, dim_names_nc=v.dims, units=v.attrs.get("units"), - number_grid_cells=v.size / len(ds["time"]), + number_grid_cells=v.size + / len(ds["time"]), # Probably too simplistic. ) try: attrs["latitude"] = ds.cf["latitude"] @@ -841,6 +842,18 @@ def _create_command(self, var, attrs, rvh, rvt=None, nc_index=0): # Remove extra attributes number_grid_cells = attrs.pop("number_grid_cells") + + # Construct default grid weights applying equally to all HRUs + if hasattr(rvh, "hrus"): + data = [(hru.hru_id, nc_index, 1.0) for hru in rvh.hrus] + gw = GridWeightsCommand( + number_hrus=len(rvh.hrus), + number_grid_cells=number_grid_cells, + data=data, + ) + else: + gw = None + for k in coords: attrs.pop(k, None) @@ -872,18 +885,9 @@ def _create_command(self, var, attrs, rvh, rvt=None, nc_index=0): # TODO: implement a RedirectToFile mechanism to avoid inlining the grid weights # multiple times as we do here - # Construct default grid weights applying equally to all HRUs - data = [(hru.hru_id, nc_index, 1.0) for hru in rvh.hrus] - - gw = rvt.grid_weights or GridWeightsCommand( - number_hrus=len(rvh.hrus), - number_grid_cells=number_grid_cells, - data=data, - ) - - return StationForcingCommand(**attrs, grid_weights=gw) + return StationForcingCommand(**attrs, grid_weights=rvt.grid_weights or gw) - return GriddedForcingCommand(**attrs, grid_weights=rvt.grid_weights) + return GriddedForcingCommand(**attrs, grid_weights=rvt.grid_weights or gw) def extract(self, rvh, rvt=None, nc_index=0): out = {"var_cmds": {}} diff --git a/ravenpy/models/rv.py b/ravenpy/models/rv.py index 497ef623..47325295 100644 --- a/ravenpy/models/rv.py +++ b/ravenpy/models/rv.py @@ -301,7 +301,7 @@ def __init__(self, **kwargs): self.gridded_forcings = () # For a distributed model these weights will be shared among all the StationForcing commands - self.grid_weights = GridWeightsCommand() + self.grid_weights = None self.var_cmds = {} # Dictionary of potential variable names, keyed by CF standard name. From f7577ff5d1cebb600121c1f9042e9a1b94fb7971 Mon Sep 17 00:00:00 2001 From: David Huard Date: Wed, 14 Apr 2021 12:01:05 -0400 Subject: [PATCH 3/8] added test demonstrating issue with canopex data - see rvt file --- ... => Running_Raven_with_CANOPEX_data.ipynb} | 308 +++++++++++------- docs/notebooks/index.rst | 2 +- tests/test_emulators.py | 32 +- 3 files changed, 219 insertions(+), 123 deletions(-) rename docs/notebooks/{Running_models_with_multiple_timeseries_files.ipynb => Running_Raven_with_CANOPEX_data.ipynb} (58%) diff --git a/docs/notebooks/Running_models_with_multiple_timeseries_files.ipynb b/docs/notebooks/Running_Raven_with_CANOPEX_data.ipynb similarity index 58% rename from docs/notebooks/Running_models_with_multiple_timeseries_files.ipynb rename to docs/notebooks/Running_Raven_with_CANOPEX_data.ipynb index ea9f40f3..2af3efe2 100644 --- a/docs/notebooks/Running_models_with_multiple_timeseries_files.ipynb +++ b/docs/notebooks/Running_Raven_with_CANOPEX_data.ipynb @@ -4,9 +4,15 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Running a hydrological model with multiple timeseries files\n", + "# Running Raven with Canopex data\n", "\n", - "In this notebook, we show how to provide the hydrological model with multiple timeseries files. For example, one file could contain meteorological data and the other contain streamflow data, or all variables could be separate (i.e. precip, temperature, streamflow, etc.) The following instructions should make it easier to understand how to do this. for this example, we actually start from a netcdf file containing all information, and from there divide it into multiple time series netcdf files. We then use the split files to drive the model. In most user cases, different files will be provided directly by the user so no need to pre-split your files!" + "The Canadian model parameter experiment (CANOPEX) database includes meteorological and hydrometric data as well as watershed boundaries for 698 basins. It is inspired by the US MOPEX project data and methods. \n", + "\n", + "Metadata about watersheds can be found here: http://canopex.etsmtl.net/ for CANOPEX version 1. The THREDDS server stores CANOPEX version 2, which has many more watersheds but no accompanying public metadata. The database will later be updated with [HYSETS](https://osf.io/rpc3w/), the latest version of this hydrological database. \n", + "\n", + "In this notebook, we show how to access CANOPEX data from the Ouranos THREDDS server through OPEnDAP streaming and run Raven with it. Meta data about the watersheds is stored in the `ravenpy/data/regionalization` folder. \n", + "\n", + "In the following, we're using information from the `Station metadata` spreadsheet. " ] }, { @@ -27,18 +33,11 @@ "import netCDF4 as nc\n", "import pandas as pd\n", "import xarray as xr\n", - "from birdy import WPSClient\n", - "\n", + "import ravenpy\n", "from ravenpy.utilities.testdata import get_local_testdata\n", "\n", - "\n", - "# Set environment variable WPS_URL to \"http://localhost:9099\" to run on the default local server\n", - "url = os.environ.get(\"WPS_URL\", \"https://pavics.ouranos.ca/twitcher/ows/proxy/raven/wps\")\n", - "wps = WPSClient(url)\n", - "\n", "# DATA MAIN SOURCE - DAP link to CANOPEX dataset\n", - "CANOPEX_DAP = 'https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/birdhouse/ets/Watersheds_5797_cfcompliant.nc'\n", - "CANOPEX_URL = 'https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/fileServer/birdhouse/ets/Watersheds_5797_cfcompliant.nc'" + "CANOPEX_DAP = 'https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/birdhouse/ets/Watersheds_5797_cfcompliant.nc'" ] }, { @@ -424,15 +423,15 @@ " summary: Hydrometeorological database for the PAVICS-Hydro platfor...\n", " institution: ETS (École de technologie supérieure)\n", " DODS.strlen: 72\n", - " DODS.dimName: string72
    • drainage_area
      (watershed)
      float64
      ...
      standard_name :
      drainage_area_at_river_stretch_outlet
      coverage_content_type :
      auxiliaryInformation
      long_name :
      drainage_area
      units :
      km2
      _ChunkSizes :
      5797
      array([3471.689421, 6938.20108 , 2260.093113, ...,  306.      ,  354.1     ,\n",
      +       "        330.88    ])
    • pr
      (watershed, time)
      float64
      ...
      long_name :
      Precipitation
      standard_name :
      precipitation_flux
      units :
      kg m-2 s-1
      coverage_content_type :
      modelResult
      _ChunkSizes :
      [ 363 1393]
      [129157160 values with dtype=float64]
    • tasmax
      (watershed, time)
      float64
      ...
      units :
      K
      coverage_content_type :
      modelResult
      long_name :
      Daily Maximum Near-Surface Air Temperature
      standard_name :
      air_temperature
      _ChunkSizes :
      [ 363 1393]
      [129157160 values with dtype=float64]
    • tasmin
      (watershed, time)
      float64
      ...
      coverage_content_type :
      modelResult
      long_name :
      Daily Minimum Near-Surface Air Temperature\t
      standard_name :
      air_temperature
      units :
      K
      _ChunkSizes :
      [ 363 1393]
      [129157160 values with dtype=float64]
    • discharge
      (watershed, time)
      float64
      ...
      coverage_content_type :
      physicalMeasurement
      long_name :
      discharge
      standard_name :
      water_volume_transport_in_river_channel
      units :
      m3 s-1
      _ChunkSizes :
      [ 363 1393]
      [129157160 values with dtype=float64]
  • title :
    Hydrometeorological data for lumped hydrological modelling of 5797 catchments in North America
    institute_id :
    ETS
    contact :
    Richard Arsenault: richard.arsenault@etsmtl.ca
    date_created :
    2020-08-01
    source :
    Hydrometric data from USGS National Water Information Service and ECCC Water Survey Canada. Meteorological data from ECCC stations, NRCan 10km gridded interpolated dataset and Livneh 2014 database. Catchment areas from ECCC HYDAT, HydroSheds and USGS.
    featureType :
    timeSeries
    cdm_data_type :
    station
    license :
    ODC-BY
    keywords :
    hydrology, North America, streamflow, hydrometeorological, PAVICS, PAVICS-Hydro, modelling
    activity :
    PAVICS_Hydro
    Conventions :
    CF-1.6, ACDD-1.3
    summary :
    Hydrometeorological database for the PAVICS-Hydro platform, including precipitation, temperature, discharge and catchment area to drive the RAVEN hydrological modelling framework. Provided by the HC3 Laboratory at École de technologie supérieure, Montréal, Canada.
    institution :
    ETS (École de technologie supérieure)
    DODS.strlen :
    72
    DODS.dimName :
    string72
  • " ], "text/plain": [ "\n", @@ -473,16 +472,11 @@ ] }, { - "cell_type": "code", - "execution_count": null, + "attachments": {}, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# Setup some parameters to run the models. See the \"canopex.ipynb\" notebook for more detailed information\n", - "# on these parameters. The data we use comes from the extended CANOPEX database.\n", - "start = dt.datetime(1998, 1, 1)\n", - "stop = dt.datetime(2010, 12, 31)\n", - "watershedID = 5600" + "In this notebook, we run Raven on the Whitemouth river, identified by its watershed name `WHITEMOUTH RIVER NEAR WHITEMOUTH`. The first step is find the index for the Whitemouth data in the CANOPEX data. " ] }, { @@ -491,63 +485,134 @@ "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Basin name: b'WHITEMOUTH RIVER NEAR WHITEMOUTH'\n", - "Latitude: 49.51119663557124 °N\n", - "Area: 3650.476384548832 km^2\n" - ] + "data": { + "text/plain": [ + "b'WHITEMOUTH RIVER NEAR WHITEMOUTH'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ - "# With this info, we can gather some properties from the CANOPEX database:\n", - "tmp = pd.read_csv(get_local_testdata(\"regionalisation_data/gauged_catchment_properties.csv\"))\n", - "basin_area = tmp['area'][watershedID]\n", - "basin_latitude = tmp['latitude'][watershedID]\n", - "basin_longitude = tmp['longitude'][watershedID]\n", - "basin_elevation = tmp['elevation'][watershedID]\n", - "basin_name = ds.watershed[watershedID].data\n", - "\n", - "print(\"Basin name: \", basin_name)\n", - "print(\"Latitude: \", basin_latitude, \" °N\")\n", - "print(\"Area: \", basin_area, \" km^2\")" + "watersheds = pd.Series(ds.watershed)\n", + "name = b'WHITEMOUTH RIVER NEAR WHITEMOUTH'\n", + "i = watersheds[watersheds == name].index.values[0]\n", + "watersheds[i]" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## SECTION TO SEPARATE DISCHARGE AND MET DATA TO RECOMBINE LATER" + "We can then retrieve metadata for this watershed from the database packaged with RavenPy. " ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], - "source": [ - "# Define the 2 new files, i.e. the meteorological data and the streamflow data\n", - "filepathMet = Path().cwd().joinpath(\"CANOPEX_Met.nc\")\n", - "filepathQobs = Path().cwd().joinpath(\"CANOPEX_Qobs.nc\")\n", - "\n", - "# Do the extraction for the selected catchment\n", - "newBasin = ds.isel(watershed=watershedID)\n", - "\n", - "# Generate the streamflow time-series netcdf\n", - "Qobsfile = newBasin['discharge']\n", - "Qobsfile.to_netcdf(filepathQobs)\n", - "\n", - "# Generate the meteorological time-series netcdf\n", - "newBasin = newBasin[['drainage_area','pr','tasmax','tasmin']]\n", - "newBasin.to_netcdf(filepathMet)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
    \n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
    Unnamed: 0IDStationIDarealatitudelongitudegraveliusperimeterRunSuccessShapeelevation...RunSuccessTerrainforestgrasswetlandwaterurbanshrubscropssnowIceRunSuccessLandUse
    560056005601.005PH0033650.47638549.511197-95.7218541.908497408762.6306351.0330.589206...1.00.4757520.0299610.3619670.0231680.0076920.063750.037710.01.0
    \n", + "

    1 rows × 22 columns

    \n", + "
    " + ], + "text/plain": [ + " Unnamed: 0 ID StationID area latitude longitude \\\n", + "5600 5600 5601.0 05PH003 3650.476385 49.511197 -95.721854 \n", + "\n", + " gravelius perimeter RunSuccessShape elevation ... \\\n", + "5600 1.908497 408762.630635 1.0 330.589206 ... \n", + "\n", + " RunSuccessTerrain forest grass wetland water urban \\\n", + "5600 1.0 0.475752 0.029961 0.361967 0.023168 0.007692 \n", + "\n", + " shrubs crops snowIce RunSuccessLandUse \n", + "5600 0.06375 0.03771 0.0 1.0 \n", + "\n", + "[1 rows x 22 columns]" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "### Here is where we run the model with multiple input time series" + "fn = Path(ravenpy.__file__).parent / \"data\" / \"regionalisation\" / \"gauged_catchment_properties.csv\"\n", + "meta = pd.read_csv(fn)\n", + "meta.loc[[i]]" ] }, { @@ -556,53 +621,62 @@ "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "'observed data series,filename,DIAG_NASH_SUTCLIFFE,DIAG_RMSE,\\nHYDROGRAPH_ALL,/tmp/pywps_process_wru_pjah/input_r2qw5976.nc,-0.12679,30.5267,\\n'" - ] - }, - "metadata": {}, - "output_type": "display_data" + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + " **************************************************************\n", + " Path : /tmp/tmpvkbo5cyj/exec/model/p00\n", + " **************************************************************\n", + " ADVISORY : The required parameter RAIN_ICEPT_PCT for vegetation class VEG_ALL was autogenerated with value 0.120000\n", + "ADVISORY : The required parameter SNOW_ICEPT_PCT for vegetation class VEG_ALL was autogenerated with value 0.100000\n", + "ADVISORY : The required parameter RAIN_ICEPT_PCT for vegetation class VEG_WATER was autogenerated with value 0.120000\n", + "ADVISORY : The required parameter SNOW_ICEPT_PCT for vegetation class VEG_WATER was autogenerated with value 0.100000\n", + "ERROR : ParseTimeSeriesFile: :FileNameNC command: Cannot find gridded data file https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/birdhouse/ets/Watersheds_5797_cfcompliant.nc\n", + "ERROR : ParseTimeSeriesFile: :GriddedForcing and :StationForcing blocks requires valid :FileNameNC command\n", + "\n", + " \n" + ] + }, + { + "ename": "UserWarning", + "evalue": "No output files for Test_run*Hydrographs.nc in /tmp/tmpvkbo5cyj/exec.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mUserWarning\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[0mparams\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;36m108.02\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2.8693\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m25.352\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1.3696\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1.2483\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m0.30679\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 19\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 20\u001b[0;31m \u001b[0mmodel\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mts\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mCANOPEX_DAP\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mparams\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mparams\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 21\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mq_sim\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/src/RavenPy/ravenpy/models/base.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, ts, overwrite, **kwds)\u001b[0m\n\u001b[1;32m 505\u001b[0m )\n\u001b[1;32m 506\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmsg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 507\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 508\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 509\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mresume\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msolution\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/src/RavenPy/ravenpy/models/base.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, ts, overwrite, **kwds)\u001b[0m\n\u001b[1;32m 489\u001b[0m \u001b[0;31m# print(line)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 490\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 491\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparse_results\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 492\u001b[0m \u001b[0merr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparse_errors\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 493\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m\"ERROR\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0merr\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/src/RavenPy/ravenpy/models/base.py\u001b[0m in \u001b[0;36mparse_results\u001b[0;34m(self, path, run_name)\u001b[0m\n\u001b[1;32m 541\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mUserWarning\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mexc\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 542\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mkey\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;34m\"diagnostics\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 543\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mexc\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 544\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 545\u001b[0m \u001b[0;32mcontinue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/src/RavenPy/ravenpy/models/base.py\u001b[0m in \u001b[0;36mparse_results\u001b[0;34m(self, path, run_name)\u001b[0m\n\u001b[1;32m 538\u001b[0m \u001b[0;31m# There are no diagnostics if a streamflow time series is not provided.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 539\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 540\u001b[0;31m \u001b[0mfns\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_output\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpattern\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpath\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 541\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mUserWarning\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mexc\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 542\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mkey\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;34m\"diagnostics\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/src/RavenPy/ravenpy/models/base.py\u001b[0m in \u001b[0;36m_get_output\u001b[0;34m(self, pattern, path)\u001b[0m\n\u001b[1;32m 604\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfiles\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 605\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrvi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mRVI\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrvi\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msuppress_output\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 606\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mUserWarning\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"No output files for {} in {}.\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpattern\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpath\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 607\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 608\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mabsolute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mf\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mfiles\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mUserWarning\u001b[0m: No output files for Test_run*Hydrographs.nc in /tmp/tmpvkbo5cyj/exec." + ] } ], "source": [ - "# The model parameters. We are forcing values here just so the model runs, the models are probably very bad choices!\n", - "\n", - "params = '9.5019, 0.2774, 6.3942, 0.6884, 1.2875, 5.4134, 2.3641, 0.0973, 0.0464, 0.1998, 0.0222, -1.0919, ' \\\n", - " '2.6851, 0.3740, 1.0000, 0.4739, 0.0114, 0.0243, 0.0069, 310.7211, 916.1947'\n", + "model = ravenpy.models.GR4JCN()\n", "\n", - "# Model configuration parameters. Please see \"canopex.ipynb\" for more details.\n", - "# This remains unchanged with multiple timeseries!\n", + "# Setup some parameters to run the models. \n", "config = dict(\n", - " start_date=start,\n", - " end_date=stop,\n", - " area=basin_area,\n", - " elevation=basin_elevation,\n", - " latitude=basin_latitude,\n", - " longitud =basin_longitude,\n", - " run_name='test_hmets_NRCAN',\n", - " rain_snow_fractio ='RAINSNOW_DINGMAN',\n", - " nc_spec=json.dumps({'tasmax': {'linear_transform': (1.0, -273.15)},'tasmin': {'linear_transform': (1.0, -273.15)},'pr': {'linear_transform': (86400.0, 0.0)}},),\n", + " name=str(name),\n", + " start_date=dt.datetime(2010, 6, 1),\n", + " end_date=dt.datetime(2010, 10, 1),\n", + " nc_index=i,\n", + " run_name=\"Test_run\",\n", + " rain_snow_fraction ='RAINSNOW_DINGMAN',\n", + " tasmax={'offset': -273.15},\n", + " tasmin={'offset': -273.15},\n", + " pr={'scale': 86400.0},\n", + " hrus=[model.LandHRU(**meta[[\"area\", \"latitude\", \"longitude\", \"elevation\"]].loc[i])]\n", ")\n", "\n", + "# The model parameters. \n", + "params = [108.02, 2.8693, 25.352, 1.3696, 1.2483, 0.30679]\n", "\n", - "# Here is where we must tell the model that there are multiple input files. The idea is to combine them into a list of strings,\n", - "# with each string representing a path to a netcdf file. So we could do something like this:\n", - "ts_combined = [filepathMet.as_posix(), filepathQobs.as_posix()]\n", - "resp = wps.raven_hmets(ts=ts_combined, params=params, **config)\n", - "\n", - "# And get the response\n", - "# With `asobj` set to False, only the reference to the output is returned in the response.\n", - "# Setting `asobj` to True will retrieve the actual files and copy the locally.\n", - "[hydrograph, storage, solution, diagnostics, rv] = resp.get(asobj=True)\n", - "display(diagnostics)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### You can even invert the order of the netcdf files. Raven will detect which files contain which variables, so the order is not important!" + "model(ts=CANOPEX_DAP, params=params, **config)\n", + "model.q_sim.plot()" ] }, { @@ -613,31 +687,27 @@ { "data": { "text/plain": [ - "'observed data series,filename,DIAG_NASH_SUTCLIFFE,DIAG_RMSE,\\nHYDROGRAPH_ALL,/tmp/pywps_process_f5cjoe3b/input.nc,-0.12679,30.5267,\\n'" + "{'area': 3650.476384548832,\n", + " 'latitude': 49.51119663557124,\n", + " 'longitude': -95.72185406290754,\n", + " 'elevation': 330.58920561789404}" ] }, + "execution_count": null, "metadata": {}, - "output_type": "display_data" + "output_type": "execute_result" } ], "source": [ - "# Test with reversed timeseries files:\n", - "ts_combined=[filepathQobs.as_posix(), filepathMet.as_posix()]\n", - "resp = wps.raven_hmets(ts=ts_combined, params=params, **config)\n", - "\n", - "# And get the response\n", - "# With `asobj` set to False, only the reference to the output is returned in the response.\n", - "# Setting `asobj` to True will retrieve the actual files and copy the locally.\n", - "[hydrograph, storage, solution, diagnostics, rv] = resp.get(asobj=True)\n", - "display(diagnostics)" + "dict(meta[[\"area\", \"latitude\", \"longitude\", \"elevation\"]].loc[i])" ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "metadata": {}, - "source": [ - "As you can see, the NSE values and RMSE values are identical. You can pass as many NetCDF files as you have variables in any order and it will still work." - ] + "outputs": [], + "source": [] } ], "metadata": { @@ -656,7 +726,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.8" } }, "nbformat": 4, diff --git a/docs/notebooks/index.rst b/docs/notebooks/index.rst index 1fa9fbc8..219baf6c 100644 --- a/docs/notebooks/index.rst +++ b/docs/notebooks/index.rst @@ -7,5 +7,5 @@ Examples :caption: Notebooks: RavenPy.ipynb - Running_models_with_multiple_timeseries_files.ipynb + Running_Raven_with_CANOPEX_data.ipynb hydro_routing_product_example.ipynb diff --git a/tests/test_emulators.py b/tests/test_emulators.py index eddea891..7def07eb 100644 --- a/tests/test_emulators.py +++ b/tests/test_emulators.py @@ -655,8 +655,8 @@ def test_dap(self): """Test Raven with DAP link instead of local netCDF file.""" model = GR4JCN() config = dict( - start_date=dt.datetime(2000, 1, 1), - end_date=dt.datetime(2002, 1, 1), + start_date=dt.datetime(2000, 6, 1), + end_date=dt.datetime(2000, 6, 10), run_name="test", name="Salmon", hrus=(GR4JCN.LandHRU(**salmon_land_hru_1),), @@ -668,6 +668,32 @@ def test_dap(self): ) model(ts, **config) + @pytest.mark.online + def test_canopex(self): + CANOPEX_DAP = ( + "https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/birdhouse/ets" + "/Watersheds_5797_cfcompliant.nc" + ) + model = GR4JCN() + config = dict( + name="WHITEMOUTH RIVER NEAR WHITEMOUTH", + start_date=dt.datetime(2010, 6, 1), + end_date=dt.datetime(2010, 6, 10), + nc_index=5600, + run_name="Test_run", + rain_snow_fraction="RAINSNOW_DINGMAN", + tasmax={"offset": -273.15}, + tasmin={"offset": -273.15}, + pr={"scale": 86400.0}, + hrus=[ + model.LandHRU( + area=3650.47, latitude=49.51, longitude=-95.72, elevation=330.59 + ) + ], + params=model.params(108.02, 2.8693, 25.352, 1.3696, 1.2483, 0.30679), + ) + model(ts=CANOPEX_DAP, **config) + class TestGR4JCN_OST: def test_simple(self): @@ -717,7 +743,7 @@ def test_simple(self): # # Random number seed: 123 # # Budget: 50 # # Algorithm: DDS - # # :StartDate 1954-01-01 00:00:00 + # # :StartDate 1954-01-01 00:00:00no # # :Duration 20819 # np.testing.assert_almost_equal( opt_para, [0.3243268,3.034247,407.2890,2.722774,12.18124,0.9468769], 4, # err_msg='calibrated parameter set is not matching expected value') From 8d22e85f62e429a6c8edce0cb90f5565f4cfa3eb Mon Sep 17 00:00:00 2001 From: David Huard Date: Wed, 14 Apr 2021 14:01:14 -0400 Subject: [PATCH 4/8] pass nc_index to StationForcing command --- docs/installation.rst | 8 ++++++++ ravenpy/models/importers.py | 2 +- setup.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/docs/installation.rst b/docs/installation.rst index e7bd3cda..8caf7b35 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -132,3 +132,11 @@ If everything was properly installed the test suite should run successfully: (ravenpy-env) $ export RAVENPY_TESTDATA_PATH=/path/to/raven-testdata (ravenpy-env) $ pytest tests + +Or set the conda environment variable permanently: + +.. code-block:: console + + (ravenpy-env) $ conda env config vars set RAVENPY_TESTDATA_PATH=/path/to/raven-testdata + +then deactivate and reactivate the environment. diff --git a/ravenpy/models/importers.py b/ravenpy/models/importers.py index 0361c83b..5d38302f 100644 --- a/ravenpy/models/importers.py +++ b/ravenpy/models/importers.py @@ -881,7 +881,7 @@ def _create_command(self, var, attrs, rvh, rvt=None, nc_index=0): raise Exception( "Could not find an outlet subbasin for observation data" ) - return ObservationDataCommand(**attrs) + return ObservationDataCommand(**attrs, index=nc_index) # TODO: implement a RedirectToFile mechanism to avoid inlining the grid weights # multiple times as we do here diff --git a/setup.py b/setup.py index 9e6bddf0..89fa5ae8 100644 --- a/setup.py +++ b/setup.py @@ -170,7 +170,7 @@ def run(self): self.external_deps_path.mkdir(exist_ok=True) url = "http://www.civil.uwaterloo.ca/jmai/raven/" - self.install_binary_dep(url, "raven", "Raven-rev312", "Raven.exe") + self.install_binary_dep(url, "raven", "Raven-rev318", "Raven.exe") url = "https://github.com/usbr/ostrich/archive/refs/tags/" self.install_binary_dep( From 9f82f20a8a5ed7a476e26b82d287c603abdd2394 Mon Sep 17 00:00:00 2001 From: David Huard Date: Wed, 14 Apr 2021 14:31:44 -0400 Subject: [PATCH 5/8] make sure index starts at 1. --- ravenpy/models/commands.py | 2 +- ravenpy/models/importers.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ravenpy/models/commands.py b/ravenpy/models/commands.py index 039c313a..3750f811 100644 --- a/ravenpy/models/commands.py +++ b/ravenpy/models/commands.py @@ -293,7 +293,7 @@ def asdict(self): @dataclass class DataCommand(BaseDataCommand): - index: int = 1 + index: int = 1 # Indexing starts with 1. data_type: str = "" site: str = "" var: str = "" diff --git a/ravenpy/models/importers.py b/ravenpy/models/importers.py index 5d38302f..25c0a282 100644 --- a/ravenpy/models/importers.py +++ b/ravenpy/models/importers.py @@ -881,7 +881,7 @@ def _create_command(self, var, attrs, rvh, rvt=None, nc_index=0): raise Exception( "Could not find an outlet subbasin for observation data" ) - return ObservationDataCommand(**attrs, index=nc_index) + return ObservationDataCommand(**attrs, index=nc_index + 1) # TODO: implement a RedirectToFile mechanism to avoid inlining the grid weights # multiple times as we do here From 4714a41516b1c4a36e8847672981cec1ab0cf139 Mon Sep 17 00:00:00 2001 From: David Huard Date: Wed, 14 Apr 2021 16:07:55 -0400 Subject: [PATCH 6/8] force nc_index to 0 when None --- HISTORY.rst | 7 +++++++ ravenpy/models/importers.py | 2 ++ tests/test_emulators.py | 2 +- 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/HISTORY.rst b/HISTORY.rst index 889db5f7..6700b894 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -2,6 +2,13 @@ History ======= +0.4.2 +----- +* Update to RavenC revision 318 to fix OPENDAP access for StationForcing commands +* Fix grid_weights set to None by default +* Pass nc_index to ObservationData command +* Expose more cleanly RavenC errors and warnings + 0.4.1 ----- diff --git a/ravenpy/models/importers.py b/ravenpy/models/importers.py index 25c0a282..44fd9ed0 100644 --- a/ravenpy/models/importers.py +++ b/ravenpy/models/importers.py @@ -837,6 +837,8 @@ def _extract_nc_attrs(self, fns): self.attrs[var] = attrs def _create_command(self, var, attrs, rvh, rvt=None, nc_index=0): + if nc_index is None: + nc_index = 0 coords = {"latitude", "longitude", "elevation"} dims = attrs["dim_names_nc"] diff --git a/tests/test_emulators.py b/tests/test_emulators.py index e4757327..60c8f10c 100644 --- a/tests/test_emulators.py +++ b/tests/test_emulators.py @@ -756,7 +756,7 @@ def test_simple(self): # # Random number seed: 123 # # Budget: 50 # # Algorithm: DDS - # # :StartDate 1954-01-01 00:00:00no + # # :StartDate 1954-01-01 00:00:00 # # :Duration 20819 # np.testing.assert_almost_equal( opt_para, [0.3243268,3.034247,407.2890,2.722774,12.18124,0.9468769], 4, # err_msg='calibrated parameter set is not matching expected value') From 05abf0bb8aecf35e30b274ee03d731d09bf028d8 Mon Sep 17 00:00:00 2001 From: David Huard Date: Wed, 14 Apr 2021 16:09:05 -0400 Subject: [PATCH 7/8] bumpversion -> 0.4.2 --- .cruft.json | 2 +- ravenpy/__version__.py | 2 +- setup.cfg | 2 +- setup.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.cruft.json b/.cruft.json index 2ebc65f4..17e4cb16 100644 --- a/.cruft.json +++ b/.cruft.json @@ -10,7 +10,7 @@ "project_slug": "ravenpy", "project_short_description": "A Python wrapper to setup and run the hydrologic modelling framework Raven.", "pypi_username": "CSHS-CWRA", - "version": "0.4.1", + "version": "0.4.2", "use_pytest": "y", "use_pypi_deployment_with_travis": "y", "add_pyup_badge": "y", diff --git a/ravenpy/__version__.py b/ravenpy/__version__.py index a3d8f33b..f4ffec92 100644 --- a/ravenpy/__version__.py +++ b/ravenpy/__version__.py @@ -6,4 +6,4 @@ __author__ = """David Huard""" __email__ = "huard.david@ouranos.ca" -__version__ = "0.4.1" +__version__ = "0.4.2" diff --git a/setup.cfg b/setup.cfg index 055b213b..57ffcc47 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.4.1 +current_version = 0.4.2 commit = False tag = False diff --git a/setup.py b/setup.py index 89fa5ae8..b620d63e 100644 --- a/setup.py +++ b/setup.py @@ -235,7 +235,7 @@ def run(self): gis=gis_requirements, ), url="https://github.com/CSHS-CWRA/ravenpy", - version="0.4.1", + version="0.4.2", zip_safe=False, cmdclass={ "install": create_external_deps_install_class(install), From df9c8f6a92191d1b3443871858fcbf505d8225b5 Mon Sep 17 00:00:00 2001 From: David Huard Date: Wed, 14 Apr 2021 16:43:09 -0400 Subject: [PATCH 8/8] update notebooks --- .pre-commit-config.yaml | 7 -- docs/notebooks/RavenPy.ipynb | 92 +++++++-------- .../Running_Raven_with_CANOPEX_data.ipynb | 108 ++++++------------ .../climatological_ESP_forecasting.ipynb | 10 +- .../hydro_routing_product_example.ipynb | 26 ++--- ravenpy/utilities/nb_graphs.py | 5 +- 6 files changed, 98 insertions(+), 150 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2739aacd..a8551bbf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,13 +7,6 @@ repos: hooks: - id: pyupgrade language_version: python3 -- repo: https://github.com/kynan/nbstripout - rev: 0.3.9 - hooks: - - id: nbstripout - language_version: python3 - files: ".ipynb" - args: ["--keep-output"] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v3.4.0 hooks: diff --git a/docs/notebooks/RavenPy.ipynb b/docs/notebooks/RavenPy.ipynb index fc01fd7a..05011581 100644 --- a/docs/notebooks/RavenPy.ipynb +++ b/docs/notebooks/RavenPy.ipynb @@ -15,7 +15,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [ { @@ -62,7 +62,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -74,7 +74,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -446,28 +446,28 @@ " Conventions: CF-1.6\n", " featureType: timeSeries\n", " history: Created by Raven\n", - " description: Standard Output
  • Conventions :
    CF-1.6
    featureType :
    timeSeries
    history :
    Created by Raven
    description :
    Standard Output
  • " ], "text/plain": [ "\n", @@ -488,7 +488,7 @@ " description: Standard Output" ] }, - "execution_count": null, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -499,16 +499,16 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "[]" + "[]" ] }, - "execution_count": null, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" }, @@ -540,7 +540,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -569,7 +569,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -3599,12 +3599,12 @@ "\n", "\n", "\n", - "
    \n", + "
    \n", "\n", "