-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Added in base class for downloading flat files * Added in snowex and csas flat files * Added a test for checking valid links * Added in test datasets * Rearranged the base validation to avoid complexity flake issues. Converted from assertions to exceptions! * Broke out validation checks. Added associated tests
- Loading branch information
1 parent
43d578d
commit 58dc522
Showing
28 changed files
with
5,877 additions
and
22 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -111,3 +111,4 @@ ENV/ | |
# scratch dir | ||
scratch/ | ||
**/.ipynb_checkpoints/* | ||
**/cache/** |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,80 @@ | ||
""" | ||
Data reader for the Center for Snow and Avalanche Studies | ||
""" | ||
from metloom.pointdata import CSVPointData, StationInfo | ||
from metloom.variables import CSASVariables | ||
import os | ||
from datetime import datetime, timedelta | ||
|
||
|
||
class InvalidDateRange(Exception): | ||
""" | ||
Exception to indicate there is no know data for the available date range | ||
""" | ||
|
||
|
||
class CSASStationInfo(StationInfo): | ||
# Name, id, lat, long, elevation, http path | ||
SENATOR_BECK = ("Senator Beck Study Plot", "SBSP", 37.90688, -107.72627, 12186, | ||
"2023/11/SBSP_1hr_2003-2009.csv") | ||
SWAMP_ANGEL = ("Swamp Angel Study Plot", "SASP", 37.90691, -107.71132, 11060, | ||
"2023/11/SASP_1hr_2003-2009.csv") | ||
PUTNEY = ("Putney Study Plot", "PTSP", 37.89233, -107.69577, 12323, | ||
"2023/11/PTSP_1hr.csv") | ||
SENATOR_BECK_STREAM_GAUGE = ("Senator Beck Stream Gauge", "SBSG", 37.90678, | ||
-107.70943, 11030, "2023/11/SBSG_1hr.csv") | ||
|
||
|
||
class CSASMet(CSVPointData): | ||
""" | ||
""" | ||
ALLOWED_VARIABLES = CSASVariables | ||
ALLOWED_STATIONS = CSASStationInfo | ||
|
||
# Data is in Mountain time | ||
UTC_OFFSET_HOURS = -7 | ||
|
||
URL = "https://snowstudies.org/wp-content/uploads/" | ||
DATASOURCE = "CSAS" | ||
DOI = "" | ||
|
||
def _file_urls(self, station_id, start, end): | ||
""" | ||
Navigate the system using dates. Data for SASP and SBSP is stored in | ||
two csvs. 2003-2009 and 2010-2023. Not sure what happens when the | ||
next year is made available. This function will grab the necessary urls | ||
depending on the requested data | ||
""" | ||
urls = [] | ||
|
||
if station_id in ['SASP', 'SBSP']: | ||
current_available_year = datetime.today().year - 1 | ||
|
||
if start.year <= 2009: | ||
urls.append(os.path.join(self.URL, self._station_info.path)) | ||
|
||
# Account for later file use or even straddling thge data | ||
if start.year > 2009 or end.year > 2009: # TODO: add to the info enum? | ||
partial = str(self._station_info.path).replace("2003", "2010") | ||
|
||
filename = partial.replace('2009', str(current_available_year)) | ||
urls.append(os.path.join(self.URL, filename)) | ||
|
||
if start.year < 2003 or end.year > current_available_year: | ||
raise InvalidDateRange(f"CSAS data is only available from 2003-" | ||
f"{current_available_year}") | ||
else: | ||
urls.append(os.path.join(self.URL, self._station_info.path)) | ||
|
||
return urls | ||
|
||
@staticmethod | ||
def _parse_datetime(row): | ||
# Julian day is not zero based Jan 1 == DOY 1 | ||
dt = timedelta(days=int(row['DOY']) - 1, hours=int(row['Hour'] / 100)) | ||
return datetime(int(row['Year']), 1, 1) + dt | ||
|
||
def _assign_datetime(self, resp_df): | ||
resp_df['datetime'] = resp_df.apply(lambda row: self._parse_datetime(row), | ||
axis=1) | ||
return resp_df.set_index('datetime') |
Oops, something went wrong.