Skip to content

Commit

Permalink
Normalize import aliases
Browse files Browse the repository at this point in the history
The following packages are now imported the same across all files:
- import datetime as dt
- import xarray as xr
- import multiprocessing as mp
  • Loading branch information
garlic-os committed Aug 7, 2024
1 parent 173571d commit dc3d289
Show file tree
Hide file tree
Showing 12 changed files with 127 additions and 125 deletions.
10 changes: 5 additions & 5 deletions tools/RAiDER/aria/calcGUNW.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
Calculate the interferometric phase from the 4 delays files of a GUNW and write it to disk.
"""

import datetime as dt
import os
from datetime import datetime
from pathlib import Path

import h5py
Expand Down Expand Up @@ -41,9 +41,9 @@ def compute_delays_slc(cube_paths: list[Path], wavelength: float) -> xr.Dataset:
Formatted dataset for GUNW
"""
# parse date from filename
dct_delays: dict[datetime, Path] = {}
dct_delays: dict[dt.datetime, Path] = {}
for path in cube_paths:
date = datetime.strptime(path.name.split('_')[2], '%Y%m%dT%H%M%S')
date = dt.datetime.strptime(path.name.split('_')[2], '%Y%m%dT%H%M%S')
dct_delays[date] = path

sec, ref = sorted(dct_delays.keys())
Expand All @@ -52,8 +52,8 @@ def compute_delays_slc(cube_paths: list[Path], wavelength: float) -> xr.Dataset:
hyd_delays: list[xr.DataArray] = []
attrs_lst: list[dict] = []
phase2range = (-4 * np.pi) / float(wavelength)
for dt in [ref, sec]:
path = dct_delays[dt]
for datetime in [ref, sec]:
path = dct_delays[datetime]
with xr.open_dataset(path) as ds:
da_wet = ds['wet'] * phase2range
da_hydro = ds['hydro'] * phase2range
Expand Down
2 changes: 1 addition & 1 deletion tools/RAiDER/checkArgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def makeDelayFileNames(date: Optional[dt.date], los: Optional[LOS], outformat: s
return names for the wet and hydrostatic delays.
# Examples:
>>> makeDelayFileNames(datetime(2020, 1, 1, 0, 0, 0), None, "h5", "model_name", "some_dir")
>>> makeDelayFileNames(dt.datetime(2020, 1, 1, 0, 0, 0), None, "h5", "model_name", "some_dir")
('some_dir/model_name_wet_00_00_00_ztd.h5', 'some_dir/model_name_hydro_00_00_00_ztd.h5')
>>> makeDelayFileNames(None, None, "h5", "model_name", "some_dir")
('some_dir/model_name_wet_ztd.h5', 'some_dir/model_name_hydro_ztd.h5')
Expand Down
6 changes: 3 additions & 3 deletions tools/RAiDER/cli/statsPlot.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import copy
import datetime as dt
import itertools
import multiprocessing
import multiprocessing as mp
import os
import warnings

Expand Down Expand Up @@ -861,7 +861,7 @@ def create_variograms(self):
grid_subset = self.df[self.df['gridnode'] == i]
args.append((i, grid_subset))
# Parallelize iteration through all grid-cells and time slices
with multiprocessing.Pool(self.numCPUs) as multipool:
with mp.Pool(self.numCPUs) as multipool:
for i, j, k, l in multipool.starmap(self._append_variogram, args):
self.TOT_good_slices.extend(i)
self.TOT_res_robust_arr.extend(j)
Expand Down Expand Up @@ -1819,7 +1819,7 @@ def create_DF(self) -> None:
)
)
# Parallelize iteration through all grid-cells and time slices
with multiprocessing.Pool(self.numCPUs) as multipool:
with mp.Pool(self.numCPUs) as multipool:
for i, j, k, l, m, n, o in multipool.starmap(self._amplitude_and_phase, args):
self.ampfit.extend(i)
self.phsfit.extend(j)
Expand Down
32 changes: 17 additions & 15 deletions tools/RAiDER/delay.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,27 +13,29 @@
"wet_total" and "hydro_total" fields specified.
"""

import datetime as dt
import os
from datetime import datetime, timezone
from typing import Optional, Union

from RAiDER.types import CRSLike
from RAiDER.utilFcns import parse_crs
import numpy as np
import pyproj
import xarray
import xarray as xr
from pyproj import CRS, Transformer

from RAiDER.constants import _ZREF
from RAiDER.delayFcns import getInterpolators
from RAiDER.llreader import AOI, BoundingBox, Geocube
from RAiDER.logger import logger
from RAiDER.losreader import LOS, build_ray
from RAiDER.types import CRSLike
from RAiDER.utilFcns import parse_crs


###############################################################################
def tropo_delay(
dt: datetime,
datetime: dt.datetime,
weather_model_file: str,
aoi: AOI,
los: LOS,
Expand All @@ -49,7 +51,7 @@ def tropo_delay(
3. Slant delays integrated along the raypath (STD-raytracing)
Args:
dt: Datetime - Datetime object for determining when to calculate delays
datetime: Datetime - Datetime object for determining when to calculate delays
weather_model_File: string - Name of the NETCDF file containing a pre-processed weather model
aoi: AOI object - AOI object
los: LOS object - LOS object
Expand All @@ -63,7 +65,7 @@ def tropo_delay(
crs = CRS(out_proj)

# Load CRS from weather model file
with xarray.load_dataset(weather_model_file) as ds:
with xr.load_dataset(weather_model_file) as ds:
try:
wm_proj = CRS.from_wkt(ds['proj'].attrs['crs_wkt'])
except KeyError:
Expand All @@ -73,7 +75,7 @@ def tropo_delay(
wm_proj = CRS.from_epsg(4326)

# get heights
with xarray.load_dataset(weather_model_file) as ds:
with xr.load_dataset(weather_model_file) as ds:
wm_levels = ds.z.values
toa = wm_levels.max() - 1

Expand All @@ -93,7 +95,7 @@ def tropo_delay(
)

# TODO: expose this as library function
ds = _get_delays_on_cube(dt, weather_model_file, wm_proj, aoi, height_levels, los, crs, zref)
ds = _get_delays_on_cube(datetime, weather_model_file, wm_proj, aoi, height_levels, los, crs, zref)

if isinstance(aoi, (BoundingBox, Geocube)):
return ds, None
Expand All @@ -120,22 +122,22 @@ def tropo_delay(

# return the delays (ZTD or STD)
if los.is_Projected():
los.setTime(dt)
los.setTime(datetime)
los.setPoints(lats, lons, hgts)
wetDelay = los(wetDelay)
hydroDelay = los(hydroDelay)

return wetDelay, hydroDelay


def _get_delays_on_cube(dt, weather_model_file, wm_proj, aoi, heights, los, crs, zref, nproc=1):
def _get_delays_on_cube(datetime: dt.datetime, weather_model_file, wm_proj, aoi, heights, los, crs, zref, nproc=1):
"""Raider cube generation function."""
zpts = np.array(heights)

try:
aoi.xpts
except AttributeError:
with xarray.load_dataset(weather_model_file) as ds:
with xr.load_dataset(weather_model_file) as ds:
x_spacing = ds.x.diff(dim='x').values.mean()
y_spacing = ds.y.diff(dim='y').values.mean()
aoi.set_output_spacing(ll_res=np.min([x_spacing, y_spacing]))
Expand Down Expand Up @@ -186,7 +188,7 @@ def _get_delays_on_cube(dt, weather_model_file, wm_proj, aoi, heights, los, crs,
logger.critical('There are missing delay values. Check your inputs.')

# Write output file
ds = writeResultsToXarray(dt, aoi.xpts, aoi.ypts, zpts, crs, wetDelay, hydroDelay, weather_model_file, out_type)
ds = writeResultsToXarray(datetime, aoi.xpts, aoi.ypts, zpts, crs, wetDelay, hydroDelay, weather_model_file, out_type)

return ds

Expand Down Expand Up @@ -324,10 +326,10 @@ def _build_cube_ray(
return outputArrs


def writeResultsToXarray(dt, xpts, ypts, zpts, crs, wetDelay, hydroDelay, weather_model_file, out_type):
def writeResultsToXarray(datetime: dt.datetime, xpts, ypts, zpts, crs, wetDelay, hydroDelay, weather_model_file, out_type):
"""Write a 1-D array to a NETCDF5 file."""
# Modify this as needed for NISAR / other projects
ds = xarray.Dataset(
ds = xr.Dataset(
data_vars=dict(
wet=(
['z', 'y', 'x'],
Expand Down Expand Up @@ -359,9 +361,9 @@ def writeResultsToXarray(dt, xpts, ypts, zpts, crs, wetDelay, hydroDelay, weathe
Conventions='CF-1.7',
title='RAiDER geo cube',
source=os.path.basename(weather_model_file),
history=str(datetime.now(tz=timezone.utc)) + ' RAiDER',
history=str(dt.datetime.now(tz=dt.timezone.utc)) + ' RAiDER',
description=f'RAiDER geo cube - {out_type}',
reference_time=dt.strftime('%Y%m%dT%H:%M:%S'),
reference_time=datetime.strftime('%Y%m%dT%H:%M:%S'),
),
)

Expand Down
4 changes: 2 additions & 2 deletions tools/RAiDER/gnss/downloadGNSSDelays.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import itertools
import multiprocessing
import multiprocessing as mp
import os

import pandas as pd
Expand Down Expand Up @@ -126,7 +126,7 @@ def download_tropo_delays(

# Parallelize remote querying of station locations
results = []
with multiprocessing.Pool(numCPUs) as multipool:
with mp.Pool(numCPUs) as multipool:
# only record valid path
if gps_repo == 'UNR':
results = [fileurl for fileurl in multipool.starmap(download_UNR, stat_year_tup) if fileurl['path']]
Expand Down
27 changes: 13 additions & 14 deletions tools/RAiDER/gnss/processDelayFiles.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import argparse
import datetime
import glob
import datetime as dt
import math
import os
import re
Expand Down Expand Up @@ -58,8 +58,7 @@ def addDateTimeToFiles(fileList, force=False, verbose=False) -> None:
)
else:
try:
dt = getDateTime(f)
data['Datetime'] = dt
data['Datetime'] = getDateTime(path)
# drop all lines with nans
data.dropna(how='any', inplace=True)
# drop all duplicate lines
Expand All @@ -82,29 +81,29 @@ def update_time(row, localTime_hrs):
"""Update with local origin time."""
localTime_estimate = row['Datetime'].replace(hour=localTime_hrs, minute=0, second=0)
# determine if you need to shift days
time_shift = datetime.timedelta(days=0)
time_shift = dt.timedelta(days=0)
# round to nearest hour
days_diff = (
row['Datetime'] - datetime.timedelta(seconds=math.floor(row['Localtime']) * 3600)
row['Datetime'] - dt.timedelta(seconds=math.floor(row['Localtime']) * 3600)
).day - localTime_estimate.day
# if lon <0, check if you need to add day
if row['Lon'] < 0:
# add day
if days_diff != 0:
time_shift = datetime.timedelta(days=1)
time_shift = dt.timedelta(days=1)
# if lon >0, check if you need to subtract day
if row['Lon'] > 0:
# subtract day
if days_diff != 0:
time_shift = -datetime.timedelta(days=1)
return localTime_estimate + datetime.timedelta(seconds=row['Localtime'] * 3600) + time_shift
time_shift = -dt.timedelta(days=1)
return localTime_estimate + dt.timedelta(seconds=row['Localtime'] * 3600) + time_shift


def pass_common_obs(reference, target, localtime=None):
"""Pass only observations in target spatiotemporally common to reference."""
if isinstance(target['Datetime'].iloc[0], str):
target['Datetime'] = target['Datetime'].apply(
lambda x: datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
lambda x: dt.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
)
if localtime:
return target[
Expand Down Expand Up @@ -172,8 +171,8 @@ def local_time_filter(raiderFile, ztdFile, dfr, dfz, localTime):
dfz['Localtime'] = dfz.apply(lambda r: update_time(r, localTime_hrs), axis=1)

# filter out data outside of --localtime hour threshold
dfr['Localtime_u'] = dfr['Localtime'] + datetime.timedelta(hours=localTime_hrthreshold)
dfr['Localtime_l'] = dfr['Localtime'] - datetime.timedelta(hours=localTime_hrthreshold)
dfr['Localtime_u'] = dfr['Localtime'] + dt.timedelta(hours=localTime_hrthreshold)
dfr['Localtime_l'] = dfr['Localtime'] - dt.timedelta(hours=localTime_hrthreshold)
OG_total = dfr.shape[0]
dfr = dfr[(dfr['Datetime'] >= dfr['Localtime_l']) & (dfr['Datetime'] <= dfr['Localtime_u'])]

Expand All @@ -182,8 +181,8 @@ def local_time_filter(raiderFile, ztdFile, dfr, dfz, localTime):
f'Total number of datapoints dropped in {raiderFile} for not being within {localTime.split(" ")[1]} hrs of '
f'specified local-time {localTime.split(" ")[0]}: {dfr.shape[0]} out of {OG_total}'
)
dfz['Localtime_u'] = dfz['Localtime'] + datetime.timedelta(hours=localTime_hrthreshold)
dfz['Localtime_l'] = dfz['Localtime'] - datetime.timedelta(hours=localTime_hrthreshold)
dfz['Localtime_u'] = dfz['Localtime'] + dt.timedelta(hours=localTime_hrthreshold)
dfz['Localtime_l'] = dfz['Localtime'] - dt.timedelta(hours=localTime_hrthreshold)
OG_total = dfz.shape[0]
dfz = dfz[(dfz['Datetime'] >= dfz['Localtime_l']) & (dfz['Datetime'] <= dfz['Localtime_u'])]
# only keep observation closest to Localtime
Expand All @@ -209,7 +208,7 @@ def readZTDFile(filename, col_name='ZTD'):
"""Read and parse a GPS zenith delay file."""
try:
data = pd.read_csv(filename, parse_dates=['Date'])
times = data['times'].apply(lambda x: datetime.timedelta(seconds=x))
times = data['times'].apply(lambda x: dt.timedelta(seconds=x))
data['Datetime'] = data['Date'] + times
except (KeyError, ValueError):
data = pd.read_csv(filename, parse_dates=['Datetime'])
Expand Down
8 changes: 4 additions & 4 deletions tools/RAiDER/llreader.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from RAiDER.types import BB, RIO
import numpy as np
import pyproj
import xarray
import xarray as xr


try:
Expand Down Expand Up @@ -369,21 +369,21 @@ def __init__(self, path_cube, cube_spacing_in_m: Optional[float]=None) -> None:
_, self._proj, self._geotransform = rio_stats(path_cube)

def get_extent(self):
with xarray.open_dataset(self.path) as ds:
with xr.open_dataset(self.path) as ds:
S, N = ds.latitude.min().item(), ds.latitude.max().item()
W, E = ds.longitude.min().item(), ds.longitude.max().item()
return [S, N, W, E]

## untested
def readLL(self) -> tuple[np.ndarray, np.ndarray]:
with xarray.open_dataset(self.path) as ds:
with xr.open_dataset(self.path) as ds:
lats = ds.latitutde.data()
lons = ds.longitude.data()
Lats, Lons = np.meshgrid(lats, lons)
return Lats, Lons

def readZ(self):
with xarray.open_dataset(self.path) as ds:
with xr.open_dataset(self.path) as ds:
heights = ds.heights.data
return heights

Expand Down
Loading

0 comments on commit dc3d289

Please sign in to comment.