From dc3d2895f291692edb257e32ad2ea838530139e7 Mon Sep 17 00:00:00 2001 From: Nate Kean <14845347+garlic-os@users.noreply.github.com> Date: Wed, 7 Aug 2024 12:22:54 -0500 Subject: [PATCH] Normalize import aliases The following packages are now imported the same across all files: - import datetime as dt - import xarray as xr - import multiprocessing as mp --- tools/RAiDER/aria/calcGUNW.py | 10 ++-- tools/RAiDER/checkArgs.py | 2 +- tools/RAiDER/cli/statsPlot.py | 6 +- tools/RAiDER/delay.py | 32 ++++++----- tools/RAiDER/gnss/downloadGNSSDelays.py | 4 +- tools/RAiDER/gnss/processDelayFiles.py | 27 +++++---- tools/RAiDER/llreader.py | 8 +-- tools/RAiDER/losreader.py | 30 +++++----- tools/RAiDER/models/hrrr.py | 6 +- tools/RAiDER/s1_azimuth_timing.py | 74 ++++++++++++------------- tools/RAiDER/s1_orbits.py | 15 ++--- tools/RAiDER/utilFcns.py | 38 ++++++------- 12 files changed, 127 insertions(+), 125 deletions(-) diff --git a/tools/RAiDER/aria/calcGUNW.py b/tools/RAiDER/aria/calcGUNW.py index abf0748f..2f6af058 100644 --- a/tools/RAiDER/aria/calcGUNW.py +++ b/tools/RAiDER/aria/calcGUNW.py @@ -2,8 +2,8 @@ Calculate the interferometric phase from the 4 delays files of a GUNW and write it to disk. """ +import datetime as dt import os -from datetime import datetime from pathlib import Path import h5py @@ -41,9 +41,9 @@ def compute_delays_slc(cube_paths: list[Path], wavelength: float) -> xr.Dataset: Formatted dataset for GUNW """ # parse date from filename - dct_delays: dict[datetime, Path] = {} + dct_delays: dict[dt.datetime, Path] = {} for path in cube_paths: - date = datetime.strptime(path.name.split('_')[2], '%Y%m%dT%H%M%S') + date = dt.datetime.strptime(path.name.split('_')[2], '%Y%m%dT%H%M%S') dct_delays[date] = path sec, ref = sorted(dct_delays.keys()) @@ -52,8 +52,8 @@ def compute_delays_slc(cube_paths: list[Path], wavelength: float) -> xr.Dataset: hyd_delays: list[xr.DataArray] = [] attrs_lst: list[dict] = [] phase2range = (-4 * np.pi) / float(wavelength) - for dt in [ref, sec]: - path = dct_delays[dt] + for datetime in [ref, sec]: + path = dct_delays[datetime] with xr.open_dataset(path) as ds: da_wet = ds['wet'] * phase2range da_hydro = ds['hydro'] * phase2range diff --git a/tools/RAiDER/checkArgs.py b/tools/RAiDER/checkArgs.py index 5397681f..0a9b0650 100644 --- a/tools/RAiDER/checkArgs.py +++ b/tools/RAiDER/checkArgs.py @@ -115,7 +115,7 @@ def makeDelayFileNames(date: Optional[dt.date], los: Optional[LOS], outformat: s return names for the wet and hydrostatic delays. # Examples: - >>> makeDelayFileNames(datetime(2020, 1, 1, 0, 0, 0), None, "h5", "model_name", "some_dir") + >>> makeDelayFileNames(dt.datetime(2020, 1, 1, 0, 0, 0), None, "h5", "model_name", "some_dir") ('some_dir/model_name_wet_00_00_00_ztd.h5', 'some_dir/model_name_hydro_00_00_00_ztd.h5') >>> makeDelayFileNames(None, None, "h5", "model_name", "some_dir") ('some_dir/model_name_wet_ztd.h5', 'some_dir/model_name_hydro_ztd.h5') diff --git a/tools/RAiDER/cli/statsPlot.py b/tools/RAiDER/cli/statsPlot.py index c647b7fb..bddde5cb 100755 --- a/tools/RAiDER/cli/statsPlot.py +++ b/tools/RAiDER/cli/statsPlot.py @@ -9,7 +9,7 @@ import copy import datetime as dt import itertools -import multiprocessing +import multiprocessing as mp import os import warnings @@ -861,7 +861,7 @@ def create_variograms(self): grid_subset = self.df[self.df['gridnode'] == i] args.append((i, grid_subset)) # Parallelize iteration through all grid-cells and time slices - with multiprocessing.Pool(self.numCPUs) as multipool: + with mp.Pool(self.numCPUs) as multipool: for i, j, k, l in multipool.starmap(self._append_variogram, args): self.TOT_good_slices.extend(i) self.TOT_res_robust_arr.extend(j) @@ -1819,7 +1819,7 @@ def create_DF(self) -> None: ) ) # Parallelize iteration through all grid-cells and time slices - with multiprocessing.Pool(self.numCPUs) as multipool: + with mp.Pool(self.numCPUs) as multipool: for i, j, k, l, m, n, o in multipool.starmap(self._amplitude_and_phase, args): self.ampfit.extend(i) self.phsfit.extend(j) diff --git a/tools/RAiDER/delay.py b/tools/RAiDER/delay.py index 47408982..24a9538b 100755 --- a/tools/RAiDER/delay.py +++ b/tools/RAiDER/delay.py @@ -13,15 +13,15 @@ "wet_total" and "hydro_total" fields specified. """ +import datetime as dt import os -from datetime import datetime, timezone from typing import Optional, Union from RAiDER.types import CRSLike from RAiDER.utilFcns import parse_crs import numpy as np import pyproj -import xarray +import xarray as xr from pyproj import CRS, Transformer from RAiDER.constants import _ZREF @@ -29,11 +29,13 @@ from RAiDER.llreader import AOI, BoundingBox, Geocube from RAiDER.logger import logger from RAiDER.losreader import LOS, build_ray +from RAiDER.types import CRSLike +from RAiDER.utilFcns import parse_crs ############################################################################### def tropo_delay( - dt: datetime, + datetime: dt.datetime, weather_model_file: str, aoi: AOI, los: LOS, @@ -49,7 +51,7 @@ def tropo_delay( 3. Slant delays integrated along the raypath (STD-raytracing) Args: - dt: Datetime - Datetime object for determining when to calculate delays + datetime: Datetime - Datetime object for determining when to calculate delays weather_model_File: string - Name of the NETCDF file containing a pre-processed weather model aoi: AOI object - AOI object los: LOS object - LOS object @@ -63,7 +65,7 @@ def tropo_delay( crs = CRS(out_proj) # Load CRS from weather model file - with xarray.load_dataset(weather_model_file) as ds: + with xr.load_dataset(weather_model_file) as ds: try: wm_proj = CRS.from_wkt(ds['proj'].attrs['crs_wkt']) except KeyError: @@ -73,7 +75,7 @@ def tropo_delay( wm_proj = CRS.from_epsg(4326) # get heights - with xarray.load_dataset(weather_model_file) as ds: + with xr.load_dataset(weather_model_file) as ds: wm_levels = ds.z.values toa = wm_levels.max() - 1 @@ -93,7 +95,7 @@ def tropo_delay( ) # TODO: expose this as library function - ds = _get_delays_on_cube(dt, weather_model_file, wm_proj, aoi, height_levels, los, crs, zref) + ds = _get_delays_on_cube(datetime, weather_model_file, wm_proj, aoi, height_levels, los, crs, zref) if isinstance(aoi, (BoundingBox, Geocube)): return ds, None @@ -120,7 +122,7 @@ def tropo_delay( # return the delays (ZTD or STD) if los.is_Projected(): - los.setTime(dt) + los.setTime(datetime) los.setPoints(lats, lons, hgts) wetDelay = los(wetDelay) hydroDelay = los(hydroDelay) @@ -128,14 +130,14 @@ def tropo_delay( return wetDelay, hydroDelay -def _get_delays_on_cube(dt, weather_model_file, wm_proj, aoi, heights, los, crs, zref, nproc=1): +def _get_delays_on_cube(datetime: dt.datetime, weather_model_file, wm_proj, aoi, heights, los, crs, zref, nproc=1): """Raider cube generation function.""" zpts = np.array(heights) try: aoi.xpts except AttributeError: - with xarray.load_dataset(weather_model_file) as ds: + with xr.load_dataset(weather_model_file) as ds: x_spacing = ds.x.diff(dim='x').values.mean() y_spacing = ds.y.diff(dim='y').values.mean() aoi.set_output_spacing(ll_res=np.min([x_spacing, y_spacing])) @@ -186,7 +188,7 @@ def _get_delays_on_cube(dt, weather_model_file, wm_proj, aoi, heights, los, crs, logger.critical('There are missing delay values. Check your inputs.') # Write output file - ds = writeResultsToXarray(dt, aoi.xpts, aoi.ypts, zpts, crs, wetDelay, hydroDelay, weather_model_file, out_type) + ds = writeResultsToXarray(datetime, aoi.xpts, aoi.ypts, zpts, crs, wetDelay, hydroDelay, weather_model_file, out_type) return ds @@ -324,10 +326,10 @@ def _build_cube_ray( return outputArrs -def writeResultsToXarray(dt, xpts, ypts, zpts, crs, wetDelay, hydroDelay, weather_model_file, out_type): +def writeResultsToXarray(datetime: dt.datetime, xpts, ypts, zpts, crs, wetDelay, hydroDelay, weather_model_file, out_type): """Write a 1-D array to a NETCDF5 file.""" # Modify this as needed for NISAR / other projects - ds = xarray.Dataset( + ds = xr.Dataset( data_vars=dict( wet=( ['z', 'y', 'x'], @@ -359,9 +361,9 @@ def writeResultsToXarray(dt, xpts, ypts, zpts, crs, wetDelay, hydroDelay, weathe Conventions='CF-1.7', title='RAiDER geo cube', source=os.path.basename(weather_model_file), - history=str(datetime.now(tz=timezone.utc)) + ' RAiDER', + history=str(dt.datetime.now(tz=dt.timezone.utc)) + ' RAiDER', description=f'RAiDER geo cube - {out_type}', - reference_time=dt.strftime('%Y%m%dT%H:%M:%S'), + reference_time=datetime.strftime('%Y%m%dT%H:%M:%S'), ), ) diff --git a/tools/RAiDER/gnss/downloadGNSSDelays.py b/tools/RAiDER/gnss/downloadGNSSDelays.py index db634ccb..67c97c9f 100755 --- a/tools/RAiDER/gnss/downloadGNSSDelays.py +++ b/tools/RAiDER/gnss/downloadGNSSDelays.py @@ -6,7 +6,7 @@ # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import itertools -import multiprocessing +import multiprocessing as mp import os import pandas as pd @@ -126,7 +126,7 @@ def download_tropo_delays( # Parallelize remote querying of station locations results = [] - with multiprocessing.Pool(numCPUs) as multipool: + with mp.Pool(numCPUs) as multipool: # only record valid path if gps_repo == 'UNR': results = [fileurl for fileurl in multipool.starmap(download_UNR, stat_year_tup) if fileurl['path']] diff --git a/tools/RAiDER/gnss/processDelayFiles.py b/tools/RAiDER/gnss/processDelayFiles.py index 21d01a37..24eaa476 100644 --- a/tools/RAiDER/gnss/processDelayFiles.py +++ b/tools/RAiDER/gnss/processDelayFiles.py @@ -1,6 +1,6 @@ import argparse -import datetime import glob +import datetime as dt import math import os import re @@ -58,8 +58,7 @@ def addDateTimeToFiles(fileList, force=False, verbose=False) -> None: ) else: try: - dt = getDateTime(f) - data['Datetime'] = dt + data['Datetime'] = getDateTime(path) # drop all lines with nans data.dropna(how='any', inplace=True) # drop all duplicate lines @@ -82,29 +81,29 @@ def update_time(row, localTime_hrs): """Update with local origin time.""" localTime_estimate = row['Datetime'].replace(hour=localTime_hrs, minute=0, second=0) # determine if you need to shift days - time_shift = datetime.timedelta(days=0) + time_shift = dt.timedelta(days=0) # round to nearest hour days_diff = ( - row['Datetime'] - datetime.timedelta(seconds=math.floor(row['Localtime']) * 3600) + row['Datetime'] - dt.timedelta(seconds=math.floor(row['Localtime']) * 3600) ).day - localTime_estimate.day # if lon <0, check if you need to add day if row['Lon'] < 0: # add day if days_diff != 0: - time_shift = datetime.timedelta(days=1) + time_shift = dt.timedelta(days=1) # if lon >0, check if you need to subtract day if row['Lon'] > 0: # subtract day if days_diff != 0: - time_shift = -datetime.timedelta(days=1) - return localTime_estimate + datetime.timedelta(seconds=row['Localtime'] * 3600) + time_shift + time_shift = -dt.timedelta(days=1) + return localTime_estimate + dt.timedelta(seconds=row['Localtime'] * 3600) + time_shift def pass_common_obs(reference, target, localtime=None): """Pass only observations in target spatiotemporally common to reference.""" if isinstance(target['Datetime'].iloc[0], str): target['Datetime'] = target['Datetime'].apply( - lambda x: datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S') + lambda x: dt.datetime.strptime(x, '%Y-%m-%d %H:%M:%S') ) if localtime: return target[ @@ -172,8 +171,8 @@ def local_time_filter(raiderFile, ztdFile, dfr, dfz, localTime): dfz['Localtime'] = dfz.apply(lambda r: update_time(r, localTime_hrs), axis=1) # filter out data outside of --localtime hour threshold - dfr['Localtime_u'] = dfr['Localtime'] + datetime.timedelta(hours=localTime_hrthreshold) - dfr['Localtime_l'] = dfr['Localtime'] - datetime.timedelta(hours=localTime_hrthreshold) + dfr['Localtime_u'] = dfr['Localtime'] + dt.timedelta(hours=localTime_hrthreshold) + dfr['Localtime_l'] = dfr['Localtime'] - dt.timedelta(hours=localTime_hrthreshold) OG_total = dfr.shape[0] dfr = dfr[(dfr['Datetime'] >= dfr['Localtime_l']) & (dfr['Datetime'] <= dfr['Localtime_u'])] @@ -182,8 +181,8 @@ def local_time_filter(raiderFile, ztdFile, dfr, dfz, localTime): f'Total number of datapoints dropped in {raiderFile} for not being within {localTime.split(" ")[1]} hrs of ' f'specified local-time {localTime.split(" ")[0]}: {dfr.shape[0]} out of {OG_total}' ) - dfz['Localtime_u'] = dfz['Localtime'] + datetime.timedelta(hours=localTime_hrthreshold) - dfz['Localtime_l'] = dfz['Localtime'] - datetime.timedelta(hours=localTime_hrthreshold) + dfz['Localtime_u'] = dfz['Localtime'] + dt.timedelta(hours=localTime_hrthreshold) + dfz['Localtime_l'] = dfz['Localtime'] - dt.timedelta(hours=localTime_hrthreshold) OG_total = dfz.shape[0] dfz = dfz[(dfz['Datetime'] >= dfz['Localtime_l']) & (dfz['Datetime'] <= dfz['Localtime_u'])] # only keep observation closest to Localtime @@ -209,7 +208,7 @@ def readZTDFile(filename, col_name='ZTD'): """Read and parse a GPS zenith delay file.""" try: data = pd.read_csv(filename, parse_dates=['Date']) - times = data['times'].apply(lambda x: datetime.timedelta(seconds=x)) + times = data['times'].apply(lambda x: dt.timedelta(seconds=x)) data['Datetime'] = data['Date'] + times except (KeyError, ValueError): data = pd.read_csv(filename, parse_dates=['Datetime']) diff --git a/tools/RAiDER/llreader.py b/tools/RAiDER/llreader.py index 7056ad29..ca765f10 100644 --- a/tools/RAiDER/llreader.py +++ b/tools/RAiDER/llreader.py @@ -13,7 +13,7 @@ from RAiDER.types import BB, RIO import numpy as np import pyproj -import xarray +import xarray as xr try: @@ -369,21 +369,21 @@ def __init__(self, path_cube, cube_spacing_in_m: Optional[float]=None) -> None: _, self._proj, self._geotransform = rio_stats(path_cube) def get_extent(self): - with xarray.open_dataset(self.path) as ds: + with xr.open_dataset(self.path) as ds: S, N = ds.latitude.min().item(), ds.latitude.max().item() W, E = ds.longitude.min().item(), ds.longitude.max().item() return [S, N, W, E] ## untested def readLL(self) -> tuple[np.ndarray, np.ndarray]: - with xarray.open_dataset(self.path) as ds: + with xr.open_dataset(self.path) as ds: lats = ds.latitutde.data() lons = ds.longitude.data() Lats, Lons = np.meshgrid(lats, lons) return Lats, Lons def readZ(self): - with xarray.open_dataset(self.path) as ds: + with xr.open_dataset(self.path) as ds: heights = ds.heights.data return heights diff --git a/tools/RAiDER/losreader.py b/tools/RAiDER/losreader.py index 096e4e86..05eb6e65 100644 --- a/tools/RAiDER/losreader.py +++ b/tools/RAiDER/losreader.py @@ -6,7 +6,7 @@ # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -import datetime +import datetime as dt import os import shelve from abc import ABC @@ -59,8 +59,8 @@ def setPoints(self, lats, lons=None, heights=None) -> None: self._lons = lons self._heights = heights - def setTime(self, dt) -> None: - self._time = dt + def setTime(self, datetime) -> None: + self._time = datetime def is_Zenith(self): return self._is_zenith @@ -316,7 +316,7 @@ def getZenithLookVecs(lats, lons, heights): return np.stack([x, y, z], axis=-1) -def get_sv(los_file: Union[str, list, PosixPath], ref_time: datetime.datetime, pad: int): +def get_sv(los_file: Union[str, list, PosixPath], ref_time: dt.datetime, pad: int): """ Read an LOS file and return orbital state vectors. @@ -453,7 +453,7 @@ def read_txt_file(filename): for line in f: try: parts = line.strip().split() - t_ = datetime.datetime.fromisoformat(parts[0]) + t_ = dt.datetime.fromisoformat(parts[0]) x_, y_, z_, vx_, vy_, vz_ = (float(t) for t in parts[1:]) except ValueError: raise ValueError( @@ -506,7 +506,7 @@ def read_ESA_Orbit_file(filename): vz = np.ones(numOSV) for i, st in enumerate(data_block[0]): - t.append(datetime.datetime.strptime(st[1].text, 'UTC=%Y-%m-%dT%H:%M:%S.%f')) + t.append(dt.datetime.strptime(st[1].text, 'UTC=%Y-%m-%dT%H:%M:%S.%f')) x[i] = float(st[4].text) y[i] = float(st[5].text) @@ -518,13 +518,13 @@ def read_ESA_Orbit_file(filename): return [t, x, y, z, vx, vy, vz] -def pick_ESA_orbit_file(list_files: list, ref_time: datetime.datetime): +def pick_ESA_orbit_file(list_files: list, ref_time: dt.datetime): """From list of .EOF orbit files, pick the one that contains 'ref_time'.""" orb_file = None for path in list_files: f = os.path.basename(path) - t0 = datetime.datetime.strptime(f.split('_')[6].lstrip('V'), '%Y%m%dT%H%M%S') - t1 = datetime.datetime.strptime(f.split('_')[7].rstrip('.EOF'), '%Y%m%dT%H%M%S') + t0 = dt.datetime.strptime(f.split('_')[6].lstrip('V'), '%Y%m%dT%H%M%S') + t1 = dt.datetime.strptime(f.split('_')[7].rstrip('.EOF'), '%Y%m%dT%H%M%S') if t0 < ref_time < t1: orb_file = path break @@ -534,14 +534,14 @@ def pick_ESA_orbit_file(list_files: list, ref_time: datetime.datetime): return path -def filter_ESA_orbit_file(orbit_xml: str, ref_time: datetime.datetime) -> bool: +def filter_ESA_orbit_file(orbit_xml: str, ref_time: dt.datetime) -> bool: """Returns true or false depending on whether orbit file contains ref time. Parameters ---------- orbit_xml : str ESA orbit xml - ref_time : datetime.datetime + ref_time : dt.datetime Returns: ------- @@ -549,8 +549,8 @@ def filter_ESA_orbit_file(orbit_xml: str, ref_time: datetime.datetime) -> bool: True if ref time is within orbit_xml """ f = os.path.basename(orbit_xml) - t0 = datetime.datetime.strptime(f.split('_')[6].lstrip('V'), '%Y%m%dT%H%M%S') - t1 = datetime.datetime.strptime(f.split('_')[7].rstrip('.EOF'), '%Y%m%dT%H%M%S') + t0 = dt.datetime.strptime(f.split('_')[6].lstrip('V'), '%Y%m%dT%H%M%S') + t1 = dt.datetime.strptime(f.split('_')[7].rstrip('.EOF'), '%Y%m%dT%H%M%S') return t0 < ref_time < t1 @@ -576,7 +576,7 @@ def state_to_los(svs, llh_targets): >>> from RAiDER.utilFcns import rio_open >>> import RAiDER.losreader as losr >>> lats, lons, heights = np.array([-76.1]), np.array([36.83]), np.array([0]) - >>> time = datetime.datetime(2018,11,12,23,0,0) + >>> time = dt.datetime(2018,11,12,23,0,0) >>> # download the orbit file beforehand >>> esa_orbit_file = 'S1A_OPER_AUX_POEORB_OPOD_20181203T120749_V20181112T225942_20181114T005942.EOF' >>> svs = losr.read_ESA_Orbit_file(esa_orbit_file) @@ -733,7 +733,7 @@ def getTopOfAtmosphere(xyz, look_vecs, toaheight, factor=None): return pos -def get_orbit(orbit_file: Union[list, str], ref_time: datetime.datetime, pad: int): +def get_orbit(orbit_file: Union[list, str], ref_time: dt.datetime, pad: int): """ Returns state vectors from an orbit file; state vectors are unique and ordered in terms of time orbit file (str | list): - user-passed file(s) containing statevectors diff --git a/tools/RAiDER/models/hrrr.py b/tools/RAiDER/models/hrrr.py index 4fcfa755..e379e6b9 100644 --- a/tools/RAiDER/models/hrrr.py +++ b/tools/RAiDER/models/hrrr.py @@ -1,4 +1,4 @@ -import datetime +import datetime as dt import os from pathlib import Path @@ -25,10 +25,10 @@ AK_GEO = gpd.read_file(Path(__file__).parent / 'data' / 'alaska.geojson.zip').geometry.unary_union -def check_hrrr_dataset_availability(dt: datetime) -> bool: +def check_hrrr_dataset_availability(datetime: dt.datetime) -> bool: """Note a file could still be missing within the models valid range.""" herbie = Herbie( - dt, + datetime, model='hrrr', product='nat', fxx=0, diff --git a/tools/RAiDER/s1_azimuth_timing.py b/tools/RAiDER/s1_azimuth_timing.py index 164fdefd..31530a67 100644 --- a/tools/RAiDER/s1_azimuth_timing.py +++ b/tools/RAiDER/s1_azimuth_timing.py @@ -1,4 +1,4 @@ -import datetime +import datetime as dt import warnings import asf_search as asf @@ -18,8 +18,8 @@ def _asf_query( point: Point, - start: datetime.datetime, - end: datetime.datetime, + start: dt.datetime, + end: dt.datetime, buffer_degrees: float = 2 ) -> list[str]: """ @@ -28,8 +28,8 @@ def _asf_query( Parameters ---------- point : Point - start : datetime.datetime - end : datetime.datetime + start : dt.datetime + end : dt.datetime buffer_degrees : float, optional Returns: @@ -50,7 +50,7 @@ def _asf_query( def get_slc_id_from_point_and_time( lon: float, lat: float, - dt: datetime.datetime, + datetime: dt.datetime, buffer_seconds: int = 600, buffer_deg: float = 2 ) -> list: @@ -62,7 +62,7 @@ def get_slc_id_from_point_and_time( ---------- lon : float lat : float - dt : datetime.datetime + datetime : dt.datetime buffer_seconds : int, optional Do not recommend adjusting this, by default 600, to ensure enough padding for multiple orbit files @@ -72,9 +72,9 @@ def get_slc_id_from_point_and_time( All slc_ids returned by asf_search """ point = Point(lon, lat) - time_delta = datetime.timedelta(seconds=buffer_seconds) - start = dt - time_delta - end = dt + time_delta + time_delta = dt.timedelta(seconds=buffer_seconds) + start = datetime - time_delta + end = datetime + time_delta # Requires buffer of degrees to get several SLCs and ensure we get correct # orbit files @@ -151,7 +151,7 @@ def get_s1_azimuth_time_grid( lon: np.ndarray, lat: np.ndarray, hgt: np.ndarray, - dt: datetime.datetime + datetime: dt.datetime ) -> np.ndarray: """Based on the lon, lat, hgt (3d cube) - obtains an associated s1 orbit file to calculate the azimuth timing across the cube. Requires datetime of acq @@ -165,7 +165,7 @@ def get_s1_azimuth_time_grid( 1 dimensional coordinate array or 3d mesh of coordinates hgt : np.ndarray 1 dimensional coordinate array or 3d mesh of coordinates - dt : datetime.datetime + datetime : dt.datetime Returns: ------- @@ -197,7 +197,7 @@ def get_s1_azimuth_time_grid( try: lon_m = np.mean(lon) lat_m = np.mean(lat) - slc_ids = get_slc_id_from_point_and_time(lon_m, lat_m, dt) + slc_ids = get_slc_id_from_point_and_time(lon_m, lat_m, datetime) except ValueError: warnings.warn('No slc id found for the given datetime and grid; returning empty grid') m, n, p = hgt_mesh.shape @@ -207,35 +207,35 @@ def get_s1_azimuth_time_grid( orb_files = get_orbits_from_slc_ids_hyp3lib(slc_ids) orb_files = [str(of) for of in orb_files] - orb = get_isce_orbit(orb_files, dt, pad=600) + orb = get_isce_orbit(orb_files, datetime, pad=600) az_arr = get_azimuth_time_grid(lon_mesh, lat_mesh, hgt_mesh, orb) return az_arr def get_n_closest_datetimes( - ref_time: datetime.datetime, + ref_time: dt.datetime, n_target_times: int, time_step_hours: int -) -> list[datetime.datetime]: +) -> list[dt.datetime]: """ Gets n closest times relative to the `round_to_hour_delta` and the `ref_time`. Specifically, if one is interetsted in getting 3 closest times to say 0, 6, 12, 18 UTC times of a ref time `dt`, then: ``` - dt = datetime.datetime(2023, 1, 1, 11, 0, 0) + dt = dt.datetime(2023, 1, 1, 11, 0, 0) get_n_closest_datetimes(dt, 3, 6) ``` gives the desired answer of ``` - [datetime.datetime(2023, 1, 1, 12, 0, 0), - datetime.datetime(2023, 1, 1, 6, 0, 0), - datetime.datetime(2023, 1, 1, 18, 0, 0)] + [dt.datetime(2023, 1, 1, 12, 0, 0), + dt.datetime(2023, 1, 1, 6, 0, 0), + dt.datetime(2023, 1, 1, 18, 0, 0)] ``` Parameters ---------- - ref_time : datetime.datetime + ref_time : dt.datetime Time to round from n_times : int Number of times to get @@ -246,7 +246,7 @@ def get_n_closest_datetimes( Returns: ------- - list[datetime.datetime] + list[dt.datetime] List of closest dates ordered by absolute proximity. If two dates have same distance to ref_time, choose earlier one (more likely to be available) """ @@ -278,38 +278,38 @@ def get_n_closest_datetimes( def get_times_for_azimuth_interpolation( - ref_time: datetime.datetime, + ref_time: dt.datetime, time_step_hours: int, buffer_in_seconds: int = 300 -) -> list[datetime.datetime]: +) -> list[dt.datetime]: """Obtains times needed for azimuth interpolation. Filters 3 closests dates from ref_time so that all returned dates are within `time_step_hours` + `buffer_in_seconds`. This ensures we request dates that are really needed. ``` - dt = datetime.datetime(2023, 1, 1, 11, 1, 0) + dt = dt.datetime(2023, 1, 1, 11, 1, 0) get_times_for_azimuth_interpolation(dt, 1) ``` yields ``` - [datetime.datetime(2023, 1, 1, 11, 0, 0), - datetime.datetime(2023, 1, 1, 12, 0, 0), - datetime.datetime(2023, 1, 1, 10, 0, 0)] + [dt.datetime(2023, 1, 1, 11, 0, 0), + dt.datetime(2023, 1, 1, 12, 0, 0), + dt.datetime(2023, 1, 1, 10, 0, 0)] ``` whereas ``` - dt = datetime.datetime(2023, 1, 1, 11, 30, 0) + dt = dt.datetime(2023, 1, 1, 11, 30, 0) get_times_for_azimuth_interpolation(dt, 1) ``` yields ``` - [datetime.datetime(2023, 1, 1, 11, 0, 0), - datetime.datetime(2023, 1, 1, 12, 0, 0)] + [dt.datetime(2023, 1, 1, 11, 0, 0), + dt.datetime(2023, 1, 1, 12, 0, 0)] ``` Parameters ---------- - ref_time : datetime.datetime + ref_time : dt.datetime A time of acquisition time_step_hours : int Weather model time step, should evenly divide 24 hours @@ -318,13 +318,13 @@ def get_times_for_azimuth_interpolation( Returns: ------- - list[datetime.datetime] + list[dt.datetime] 2 or 3 closest times within 1 time step (plust the buffer) and the reference time """ # Get 3 closest times closest_times = get_n_closest_datetimes(ref_time, 3, time_step_hours) - def filter_time(time: datetime.datetime): + def filter_time(time: dt.datetime): absolute_time_difference_sec = abs((ref_time - time).total_seconds()) upper_bound_seconds = time_step_hours * 60 * 60 + buffer_in_seconds return absolute_time_difference_sec < upper_bound_seconds @@ -335,7 +335,7 @@ def filter_time(time: datetime.datetime): def get_inverse_weights_for_dates( azimuth_time_array: np.ndarray, - dates: list[datetime.datetime], + dates: list[dt.datetime], inverse_regularizer: float = 1e-9, temporal_window_hours: float = None, ) -> list[np.ndarray]: @@ -350,7 +350,7 @@ def get_inverse_weights_for_dates( ---------- azimuth_time_array : np.ndarray Array of type `np.datetime64[ms]` - dates : list[datetime.datetime] + dates : list[dt.datetime] List of datetimes inverse_regularizer : float, optional If a `time` in the azimuth time arr equals one of the given dates, then the regularlizer ensures that the value @@ -373,7 +373,7 @@ def get_inverse_weights_for_dates( if n_dates == 0: raise ValueError('No dates provided') - if not all([isinstance(date, datetime.datetime) for date in dates]): + if not all([isinstance(date, dt.datetime) for date in dates]): raise TypeError('dates must be all datetimes') if temporal_window_hours is None: temporal_window_seconds = min([abs((date - dates[0]).total_seconds()) for date in dates[1:]]) diff --git a/tools/RAiDER/s1_orbits.py b/tools/RAiDER/s1_orbits.py index e88bc807..217a1191 100644 --- a/tools/RAiDER/s1_orbits.py +++ b/tools/RAiDER/s1_orbits.py @@ -1,3 +1,4 @@ +import datetime as dt import netrc import os import re @@ -107,25 +108,25 @@ def get_orbits_from_slc_ids_hyp3lib(slc_ids: list, orbit_directory: str = None) return orbits -def download_eofs(dts: list, missions: list, save_dir: str): +def download_eofs(datetimes: list[dt.datetime], missions: list, save_dir: str): """Wrapper around sentineleof to first try downloading from ASF and fall back to CDSE.""" _ = ensure_orbit_credentials() orb_files = [] - for dt, mission in zip(dts, missions): - dt = dt if isinstance(dt, list) else [dt] + for datetime, mission in zip(datetimes, missions): + datetime = datetime if isinstance(datetime, list) else [datetime] mission = mission if isinstance(mission, list) else [mission] try: - orb_file = eof.download.download_eofs(dt, mission, save_dir=save_dir, force_asf=True) + orb_file = eof.download.download_eofs(datetime, mission, save_dir=save_dir, force_asf=True) except: logger.error('Could not download orbit from ASF, trying ESA...') - orb_file = eof.download.download_eofs(dt, mission, save_dir=save_dir, force_asf=False) + orb_file = eof.download.download_eofs(datetime, mission, save_dir=save_dir, force_asf=False) orb_file = orb_file[0] if isinstance(orb_file, list) else orb_file orb_files.append(orb_file) - if not len(orb_files) == len(dts): - raise Exception(f'Missing {len(dts) - len(orb_files)} orbit files! dts={dts}, orb_files={len(orb_files)}') + if not len(orb_files) == len(datetimes): + raise Exception(f'Missing {len(datetimes) - len(orb_files)} orbit files! dts={datetimes}, orb_files={len(orb_files)}') return orb_files diff --git a/tools/RAiDER/utilFcns.py b/tools/RAiDER/utilFcns.py index 58a653c3..0d1f1d3c 100644 --- a/tools/RAiDER/utilFcns.py +++ b/tools/RAiDER/utilFcns.py @@ -1,14 +1,14 @@ """Geodesy-related utility functions.""" +import datetime as dt import pathlib import re -from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Any, Optional, Union import numpy as np import rasterio -import xarray +import xarray as xr import yaml from numpy import ndarray from pyproj import CRS, Proj, Transformer @@ -288,15 +288,15 @@ def writeArrayToRaster( def round_date(date, precision): # First try rounding up # Timedelta since the beginning of time - T0 = datetime.min + T0 = dt.datetime.min try: datedelta = T0 - date except TypeError: - T0 = T0.replace(tzinfo=timezone(offset=timedelta())) + T0 = T0.replace(tzinfo=dt.timezone(offset=dt.timedelta())) datedelta = T0 - date - # Round that timedelta to the specified precision + # Round that dt.timedelta to the specified precision rem = datedelta % precision # Add back to get date rounded up round_up = date + rem @@ -305,7 +305,7 @@ def round_date(date, precision): try: datedelta = date - T0 except TypeError: - T0 = T0.replace(tzinfo=timezone(offset=timedelta())) + T0 = T0.replace(tzinfo=dt.timezone(offset=dt.timedelta())) datedelta = date - T0 rem = datedelta % precision @@ -403,16 +403,16 @@ def padLower(invar): return np.concatenate((new_var[:, :, np.newaxis], invar), axis=2) -def round_time(dt, roundTo=60): +def round_time(datetime, roundTo=60): """ Round a datetime object to any time lapse in seconds - dt: datetime.datetime object + datetime: dt.datetime object roundTo: Closest number of seconds to round to, default 1 minute. Source: https://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object/10854034#10854034 """ - seconds = (dt.replace(tzinfo=None) - dt.min).seconds + seconds = (datetime.replace(tzinfo=None) - datetime.min).seconds rounding = (seconds + roundTo / 2) // roundTo * roundTo - return dt + timedelta(0, rounding - seconds, -dt.microsecond) + return datetime + dt.timedelta(0, rounding - seconds, -datetime.microsecond) def writeDelays( @@ -456,7 +456,7 @@ def getTimeFromFile(filename): fmt = '%Y_%m_%d_T%H_%M_%S' p = re.compile(r'\d{4}_\d{2}_\d{2}_T\d{2}_\d{2}_\d{2}') out = p.search(filename).group() - return datetime.strptime(out, fmt) + return dt.datetime.strptime(out, fmt) # Part of the following UTM and WGS84 converter is borrowed from https://gist.github.com/twpayne/4409500 @@ -614,10 +614,10 @@ def requests_retry_session(retries=10, session=None): return session -def writeWeatherVarsXarray(lat, lon, h, q, p, t, dt, crs, outName=None, NoDataValue=-9999, chunk=(1, 91, 144)) -> None: +def writeWeatherVarsXarray(lat, lon, h, q, p, t, datetime, crs, outName=None, NoDataValue=-9999, chunk=(1, 91, 144)) -> None: # I added datetime as an input to the function and just copied these two lines from merra2 for the attrs_dict attrs_dict = { - 'datetime': dt.strftime('%Y_%m_%dT%H_%M_%S'), + 'datetime': datetime.strftime('%Y_%m_%dT%H_%M_%S'), 'date_created': datetime.now().strftime('%Y_%m_%dT%H_%M_%S'), 'NoDataValue': NoDataValue, 'chunksize': chunk, @@ -636,7 +636,7 @@ def writeWeatherVarsXarray(lat, lon, h, q, p, t, dt, crs, outName=None, NoDataVa 't': (('z', 'y', 'x'), t), } - ds = xarray.Dataset( + ds = xr.Dataset( data_vars=dataset_dict, coords=dimension_dict, attrs=attrs_dict, @@ -825,16 +825,16 @@ def get_nearest_wmtimes(t0, time_delta): Example: >>> import datetime >>> from RAiDER.utilFcns import get_nearest_wmtimes - >>> t0 = datetime.datetime(2020,1,1,11,35,0) + >>> t0 = dt.datetime(2020,1,1,11,35,0) >>> get_nearest_wmtimes(t0, 3) - (datetime.datetime(2020, 1, 1, 9, 0), datetime.datetime(2020, 1, 1, 12, 0)) + (dt.datetime(2020, 1, 1, 9, 0), dt.datetime(2020, 1, 1, 12, 0)) """ # get the closest time available tclose = round_time(t0, roundTo=time_delta * 60 * 60) # Just calculate both options and take the closest - t2_1 = tclose + timedelta(hours=time_delta) - t2_2 = tclose - timedelta(hours=time_delta) + t2_1 = tclose + dt.timedelta(hours=time_delta) + t2_2 = tclose - dt.timedelta(hours=time_delta) t2 = [t2_1 if get_dt(t2_1, t0) < get_dt(t2_2, t0) else t2_2][0] # If you're within 5 minutes just take the closest time @@ -861,7 +861,7 @@ def get_dt(t1, t2): Examples: >>> import datetime >>> from RAiDER.utilFcns import get_dt - >>> get_dt(datetime.datetime(2020,1,1,5,0,0), datetime.datetime(2020,1,1,0,0,0)) + >>> get_dt(dt.datetime(2020,1,1,5,0,0), dt.datetime(2020,1,1,0,0,0)) 18000.0 """ return np.abs((t1 - t2).total_seconds())