From cb43fd018d8d77dba1792cf616baafaeb660e19a Mon Sep 17 00:00:00 2001 From: Tyler Sutterley Date: Tue, 7 May 2024 14:18:47 -0700 Subject: [PATCH] feat: check if input filename is an open HDF5 file object (#89) * feat: check if input filename is an open HDF5 file object * refactor: use wrapper to importlib for optional dependencies * Update environment.yml --- doc/environment.yml | 1 + doc/source/api_reference/utilities.rst | 2 + icesat2_toolkit/convert.py | 19 ++---- icesat2_toolkit/convert_delta_time.py | 3 +- icesat2_toolkit/fit.py | 15 +++-- icesat2_toolkit/io/ATL03.py | 20 ++++-- icesat2_toolkit/io/ATL06.py | 16 +++-- icesat2_toolkit/io/ATL07.py | 16 +++-- icesat2_toolkit/io/ATL10.py | 16 +++-- icesat2_toolkit/io/ATL11.py | 16 +++-- icesat2_toolkit/io/ATL12.py | 17 ++++-- icesat2_toolkit/spatial.py | 36 +++++------ icesat2_toolkit/tools.py | 12 ++-- icesat2_toolkit/utilities.py | 61 +++++++++++++++---- scripts/MPI_DEM_ICESat2_ATL03.py | 48 ++++++--------- scripts/MPI_DEM_ICESat2_ATL06.py | 48 ++++++--------- scripts/MPI_DEM_ICESat2_ATL11.py | 48 ++++++--------- scripts/MPI_ICESat2_ATL03.py | 33 ++++------ scripts/MPI_ICESat2_ATL03_histogram.py | 33 ++++------ scripts/MPI_reduce_ICESat2_ATL03_RGI.py | 39 +++++------- scripts/MPI_reduce_ICESat2_ATL06_RGI.py | 39 +++++------- scripts/MPI_reduce_ICESat2_ATL06_drainages.py | 39 +++++------- scripts/MPI_reduce_ICESat2_ATL06_grounded.py | 37 +++++------ .../MPI_reduce_ICESat2_ATL06_ice_shelves.py | 34 ++++------- scripts/MPI_reduce_ICESat2_ATL11_RGI.py | 38 +++++------- scripts/MPI_reduce_ICESat2_ATL11_drainages.py | 36 ++++------- scripts/MPI_reduce_ICESat2_ATL11_grounded.py | 34 ++++------- .../MPI_reduce_ICESat2_ATL11_ice_shelves.py | 34 ++++------- scripts/nsidc_icesat2_dragann.py | 8 +-- scripts/nsidc_icesat2_sync_s3.py | 8 +-- scripts/reduce_ICESat2_ATL06_raster.py | 17 +++--- scripts/reduce_ICESat2_ATL07_raster.py | 17 +++--- scripts/reduce_ICESat2_ATL10_raster.py | 17 +++--- scripts/reduce_ICESat2_ATL11_raster.py | 17 +++--- 34 files changed, 393 insertions(+), 481 deletions(-) diff --git a/doc/environment.yml b/doc/environment.yml index d53e3a2..de1d67f 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -20,6 +20,7 @@ dependencies: - scp - sphinx - sphinx_rtd_theme + - timescale - pip: - sphinx-argparse>=0.4 - .. diff --git a/doc/source/api_reference/utilities.rst b/doc/source/api_reference/utilities.rst index 47f9834..d3910e5 100644 --- a/doc/source/api_reference/utilities.rst +++ b/doc/source/api_reference/utilities.rst @@ -19,6 +19,8 @@ General Methods .. autofunction:: icesat2_toolkit.utilities.get_data_path +.. autofunction:: icesat2_toolkit.utilities.import_dependency + .. autofunction:: icesat2_toolkit.utilities.get_hash .. autofunction:: icesat2_toolkit.utilities.get_git_revision_hash diff --git a/icesat2_toolkit/convert.py b/icesat2_toolkit/convert.py index 64a0ee2..1e88e3b 100644 --- a/icesat2_toolkit/convert.py +++ b/icesat2_toolkit/convert.py @@ -1,6 +1,6 @@ """ convert.py -Written by Tyler Sutterley (03/2024) +Written by Tyler Sutterley (05/2024) Utilities for converting ICESat-2 HDF5 files into different formats PYTHON DEPENDENCIES: @@ -17,6 +17,7 @@ https://pandas.pydata.org/ UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: place some imports behind try/except statements Updated 06/2022: place zarr and pandas imports behind try/except statements @@ -35,20 +36,12 @@ import itertools import posixpath import numpy as np +from icesat2_toolkit.utilities import import_dependency # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - import pandas -except ModuleNotFoundError: - warnings.warn("pandas not available", ImportWarning) -try: - import zarr -except ModuleNotFoundError: - warnings.warn("zarr not available", ImportWarning) +h5py = import_dependency('h5py') +pandas = import_dependency('pandas') +zarr = import_dependency('zarr') class convert(): np.seterr(invalid='ignore') diff --git a/icesat2_toolkit/convert_delta_time.py b/icesat2_toolkit/convert_delta_time.py index 6056c58..4c9d2ff 100644 --- a/icesat2_toolkit/convert_delta_time.py +++ b/icesat2_toolkit/convert_delta_time.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -convert_delta_time.py (04/2022) +convert_delta_time.py (04/2024) Converts time from delta seconds into Julian and year-decimal INPUTS: @@ -21,6 +21,7 @@ https://pypi.org/project/timescale/ UPDATE HISTORY: + Updated 04/2024: use timescale for temporal operations Updated 04/2022: updated docstrings to numpy documentation format Updated 01/2021: time utilities for converting times from JD and to decimal Updated 08/2020: time utilities for counting leap seconds and JD conversion diff --git a/icesat2_toolkit/fit.py b/icesat2_toolkit/fit.py index 97418ce..1c3330b 100644 --- a/icesat2_toolkit/fit.py +++ b/icesat2_toolkit/fit.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" fit.py -Written by Tyler Sutterley (12/2021) +Written by Tyler Sutterley (05/2024) Utilities for calculating average fits from ATL03 Geolocated Photon Data PYTHON DEPENDENCIES: @@ -15,6 +15,7 @@ https://github.com/scikit-learn/scikit-learn UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 12/2022: place some imports behind try/except statements Updated 04/2022: updated docstrings to numpy documentation format Written 05/2021 @@ -26,12 +27,10 @@ import scipy.stats import scipy.signal import scipy.optimize +from icesat2_toolkit.utilities import import_dependency # attempt imports -try: - import sklearn.neighbors -except (AttributeError, ImportError, ModuleNotFoundError) as exc: - warnings.warn("scikit-learn not available", ImportWarning) +neighbors = import_dependency('sklearn.neighbors') # PURPOSE: compress complete list of valid indices into a set of ranges def compress_list(i,n): @@ -527,7 +526,7 @@ def reduce_histogram_fit(x, y, z, ind, dt, FIT_TYPE='gaussian', # using kernel density functions from scikit-learn neighbors # gaussian kernels will reflect more accurate distributions of the data # with less sensitivity to sampling width than histograms (tophat kernels) - kde = sklearn.neighbors.KernelDensity(bandwidth=dz,kernel='gaussian') + kde = neighbors.KernelDensity(bandwidth=dz, kernel='gaussian') kde.fit(z[:,None]) # kde score_samples outputs are normalized log density functions hist = np.exp(kde.score_samples(z_full[:,None]) + np.log(n_max*dz)) @@ -655,7 +654,7 @@ def reduce_histogram_fit(x, y, z, ind, dt, FIT_TYPE='gaussian', # using kernel density functions from scikit-learn neighbors # gaussian kernels will reflect more accurate distributions of the data # with less sensitivity to sampling width than histograms (tophat kernels) - kde = sklearn.neighbors.KernelDensity(bandwidth=dz,kernel='gaussian') + kde = neighbors.KernelDensity(bandwidth=dz,kernel='gaussian') kde.fit(z_filt[:,None]) # kde score_samples outputs are normalized log density functions hist = np.exp(kde.score_samples(z_full[:,None]) + np.log(nz*dz)) @@ -990,7 +989,7 @@ def calc_first_photon_bias(temporal_residuals,n_pulses,n_pixels,dead_time,dt, # using kernel density functions from scikit-learn neighbors # gaussian kernels will reflect more accurate distributions of the data # with less sensitivity to sampling width than histograms (tophat kernels) - kde = sklearn.neighbors.KernelDensity(bandwidth=dt,kernel='gaussian') + kde = neighbors.KernelDensity(bandwidth=dt,kernel='gaussian') kde.fit(temporal_residuals[:,None]) # kde score_samples outputs are normalized log density functions hist = np.exp(kde.score_samples(t_full[:,None]) + np.log(cnt*dt)) diff --git a/icesat2_toolkit/io/ATL03.py b/icesat2_toolkit/io/ATL03.py index 59d6750..d34a252 100644 --- a/icesat2_toolkit/io/ATL03.py +++ b/icesat2_toolkit/io/ATL03.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -ATL03.py (03/2024) +ATL03.py (05/2024) Read ICESat-2 ATL03 and ATL09 data files to calculate average segment surfaces ATL03 datasets: Global Geolocated Photons ATL09 datasets: Atmospheric Characteristics @@ -15,6 +15,8 @@ https://www.h5py.org/ UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies + check if input filename is an open HDF5 file object Updated 03/2024: use pathlib to define and operate on paths Updated 11/2023: drop DIMENSION_LIST, CLASS and NAME attributes Updated 12/2022: place some imports behind try/except statements @@ -41,12 +43,10 @@ import warnings import numpy as np import scipy.interpolate +from icesat2_toolkit.utilities import import_dependency # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) +h5py = import_dependency('h5py') # PURPOSE: read ICESat-2 ATL03 HDF5 data files def read_granule(FILENAME, ATTRIBUTES=False, **kwargs): @@ -72,6 +72,8 @@ def read_granule(FILENAME, ATTRIBUTES=False, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -299,6 +301,8 @@ def interpolate_ATL09(FILENAME, pfl, dtime, ATTRIBUTES=True, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -368,6 +372,8 @@ def find_beams(FILENAME, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -414,6 +420,8 @@ def read_main(FILENAME, ATTRIBUTES=False, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -586,6 +594,8 @@ def read_beam(FILENAME, gtx, ATTRIBUTES=False, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') diff --git a/icesat2_toolkit/io/ATL06.py b/icesat2_toolkit/io/ATL06.py index 4bd1bcc..4ff7309 100644 --- a/icesat2_toolkit/io/ATL06.py +++ b/icesat2_toolkit/io/ATL06.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -ATL06.py (03/2024) +ATL06.py (05/2024) Read ICESat-2 ATL06 (Land Ice Along-Track Height Product) data files OPTIONS: @@ -16,6 +16,8 @@ https://www.h5py.org/ UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies + check if input filename is an open HDF5 file object Updated 03/2024: use pathlib to define and operate on paths Updated 11/2023: drop DIMENSION_LIST, CLASS and NAME attributes Updated 05/2023: extract more ancillary data from ATL06 files @@ -39,12 +41,10 @@ import pathlib import warnings import numpy as np +from icesat2_toolkit.utilities import import_dependency # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) +h5py = import_dependency('h5py') # PURPOSE: read ICESat-2 ATL06 HDF5 data files def read_granule(FILENAME, ATTRIBUTES=False, HISTOGRAM=False, @@ -75,6 +75,8 @@ def read_granule(FILENAME, ATTRIBUTES=False, HISTOGRAM=False, # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -287,6 +289,8 @@ def find_beams(FILENAME, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -342,6 +346,8 @@ def read_beam(FILENAME, gtx, ATTRIBUTES=False, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') diff --git a/icesat2_toolkit/io/ATL07.py b/icesat2_toolkit/io/ATL07.py index cbe9fdb..9c805cb 100644 --- a/icesat2_toolkit/io/ATL07.py +++ b/icesat2_toolkit/io/ATL07.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -ATL07.py (03/2024) +ATL07.py (05/2024) Read ICESat-2 ATL07 (Sea Ice Height) data files PYTHON DEPENDENCIES: @@ -11,6 +11,8 @@ https://www.h5py.org/ UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies + check if input filename is an open HDF5 file object Updated 03/2024: use pathlib to define and operate on paths Updated 11/2023: drop DIMENSION_LIST, CLASS and NAME attributes Updated 05/2023: extract more ancillary data from ATL07 files @@ -32,12 +34,10 @@ import pathlib import warnings import numpy as np +from icesat2_toolkit.utilities import import_dependency # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) +h5py = import_dependency('h5py') # PURPOSE: read ICESat-2 ATL07 HDF5 data files def read_granule(FILENAME, ATTRIBUTES=False, **kwargs): @@ -63,6 +63,8 @@ def read_granule(FILENAME, ATTRIBUTES=False, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -230,6 +232,8 @@ def find_beams(FILENAME, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -280,6 +284,8 @@ def read_beam(FILENAME, gtx, ATTRIBUTES=False, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') diff --git a/icesat2_toolkit/io/ATL10.py b/icesat2_toolkit/io/ATL10.py index 2ad9ed1..752254d 100644 --- a/icesat2_toolkit/io/ATL10.py +++ b/icesat2_toolkit/io/ATL10.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -ATL10.py (03/2024) +ATL10.py (05/2024) Read ICESat-2 ATL10 (Sea Ice Freeboard) data files PYTHON DEPENDENCIES: @@ -11,6 +11,8 @@ https://www.h5py.org/ UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies + check if input filename is an open HDF5 file object Updated 03/2024: use pathlib to define and operate on paths Updated 11/2023: drop DIMENSION_LIST, CLASS and NAME attributes Updated 05/2023: extract more ancillary data from ATL10 files @@ -27,12 +29,10 @@ import pathlib import warnings import numpy as np +from icesat2_toolkit.utilities import import_dependency # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) +h5py = import_dependency('h5py') # PURPOSE: read ICESat-2 ATL10 HDF5 data files def read_granule(FILENAME, ATTRIBUTES=False, **kwargs): @@ -58,6 +58,8 @@ def read_granule(FILENAME, ATTRIBUTES=False, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -228,6 +230,8 @@ def find_beams(FILENAME, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -279,6 +283,8 @@ def read_beam(FILENAME, gtx, ATTRIBUTES=False, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') diff --git a/icesat2_toolkit/io/ATL11.py b/icesat2_toolkit/io/ATL11.py index 41dc33e..e73b958 100644 --- a/icesat2_toolkit/io/ATL11.py +++ b/icesat2_toolkit/io/ATL11.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -ATL11.py (03/2024) +ATL11.py (05/2024) Read ICESat-2 ATL11 (Annual Land Ice Height) data files OPTIONS: @@ -18,6 +18,8 @@ https://www.h5py.org/ UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies + check if input filename is an open HDF5 file object Updated 03/2024: use pathlib to define and operate on paths Updated 11/2023: drop DIMENSION_LIST, CLASS and NAME attributes Updated 05/2023: extract more ancillary data from ATL11 files @@ -40,12 +42,10 @@ import pathlib import warnings import numpy as np +from icesat2_toolkit.utilities import import_dependency # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) +h5py = import_dependency('h5py') # PURPOSE: read ICESat-2 ATL11 HDF5 data files def read_granule(FILENAME, GROUPS=['cycle_stats'], ATTRIBUTES=False, @@ -80,6 +80,8 @@ def read_granule(FILENAME, GROUPS=['cycle_stats'], ATTRIBUTES=False, # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -243,6 +245,8 @@ def find_pairs(FILENAME, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -300,6 +304,8 @@ def read_pair(FILENAME, ptx, GROUPS=['cycle_stats'], # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') diff --git a/icesat2_toolkit/io/ATL12.py b/icesat2_toolkit/io/ATL12.py index a200b10..7e24db5 100644 --- a/icesat2_toolkit/io/ATL12.py +++ b/icesat2_toolkit/io/ATL12.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -ATL12.py (03/2024) +ATL12.py (05/2024) Read ICESat-2 ATL12 (Ocean Surface Height) data files PYTHON DEPENDENCIES: @@ -11,6 +11,8 @@ https://www.h5py.org/ UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies + check if input filename is an open HDF5 file object Updated 03/2024: use pathlib to define and operate on paths Updated 11/2023: drop DIMENSION_LIST, CLASS and NAME attributes Updated 05/2023: extract more ancillary data from ATL12 files @@ -31,13 +33,10 @@ import pathlib import warnings import numpy as np +from icesat2_toolkit.utilities import import_dependency # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) - +h5py = import_dependency('h5py') # PURPOSE: read ICESat-2 ATL12 HDF5 data files def read_granule(FILENAME, ATTRIBUTES=False, **kwargs): """ @@ -62,6 +61,8 @@ def read_granule(FILENAME, ATTRIBUTES=False, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -224,6 +225,8 @@ def find_beams(FILENAME, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') @@ -274,6 +277,8 @@ def read_beam(FILENAME, gtx, ATTRIBUTES=False, **kwargs): # Open the HDF5 file for reading if isinstance(FILENAME, io.IOBase): fileID = h5py.File(FILENAME, 'r') + elif isinstance(FILENAME, h5py.File): + fileID = FILENAME else: FILENAME = pathlib.Path(FILENAME).expanduser().absolute() fileID = h5py.File(FILENAME, 'r') diff --git a/icesat2_toolkit/spatial.py b/icesat2_toolkit/spatial.py index 5cb38f7..cec6bcf 100644 --- a/icesat2_toolkit/spatial.py +++ b/icesat2_toolkit/spatial.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" spatial.py -Written by Tyler Sutterley (03/2024) +Written by Tyler Sutterley (05/2024) Utilities for reading and operating on spatial data @@ -17,6 +17,7 @@ https://pypi.python.org/pypi/GDAL UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 03/2024: can calculate polar stereographic distortion for distances Updated 05/2023: using pathlib to define and expand paths Updated 04/2023: copy inputs in cartesian to not modify original arrays @@ -42,19 +43,14 @@ import pathlib import warnings import numpy as np +from icesat2_toolkit.utilities import import_dependency + # attempt imports -try: - import osgeo.gdal, osgeo.osr, osgeo.gdalconst -except (AttributeError, ImportError, ModuleNotFoundError) as exc: - warnings.warn("GDAL not available", ImportWarning) -try: - import h5py -except (AttributeError, ImportError, ModuleNotFoundError) as exc: - warnings.warn("h5py not available", ImportWarning) -try: - import netCDF4 -except (AttributeError, ImportError, ModuleNotFoundError) as exc: - warnings.warn("netCDF4 not available", ImportWarning) +gdal = import_dependency('osgeo.gdal') +osr = import_dependency('osgeo.osr') +gdalconst = import_dependency('osgeo.gdalconst') +h5py = import_dependency('h5py') +netCDF4 = import_dependency('netCDF4') def case_insensitive_filename(filename: str | pathlib.Path): """ @@ -193,7 +189,7 @@ def from_netCDF4(filename: str, **kwargs): fileID.variables[grid_mapping].getncattr(att_name) # get the spatial projection reference information from wkt # and overwrite the file-level projection attribute (if existing) - srs = osgeo.osr.SpatialReference() + srs = osr.SpatialReference() srs.ImportFromWkt(dinput['attributes']['crs']['crs_wkt']) dinput['attributes']['projection'] = srs.ExportToProj4() # convert to masked array if fill values @@ -298,7 +294,7 @@ def from_HDF5(filename: str | pathlib.Path, **kwargs): dinput['attributes']['crs'][att_name] = att_val # get the spatial projection reference information from wkt # and overwrite the file-level projection attribute (if existing) - srs = osgeo.osr.SpatialReference() + srs = osr.SpatialReference() srs.ImportFromWkt(dinput['attributes']['crs']['crs_wkt']) dinput['attributes']['projection'] = srs.ExportToProj4() # convert to masked array if fill values @@ -331,16 +327,16 @@ def from_geotiff(filename: str, **kwargs): if (kwargs['compression'] == 'gzip'): # read as GDAL gzip virtual geotiff dataset mmap_name = f"/vsigzip/{str(case_insensitive_filename(filename))}" - ds = osgeo.gdal.Open(mmap_name) + ds = gdal.Open(mmap_name) elif (kwargs['compression'] == 'bytes'): # read as GDAL memory-mapped (diskless) geotiff dataset mmap_name = f"/vsimem/{uuid.uuid4().hex}" - osgeo.gdal.FileFromMemBuffer(mmap_name, filename.read()) - ds = osgeo.gdal.Open(mmap_name) + gdal.FileFromMemBuffer(mmap_name, filename.read()) + ds = gdal.Open(mmap_name) else: # read geotiff dataset - ds = osgeo.gdal.Open(str(case_insensitive_filename(filename)), - osgeo.gdalconst.GA_ReadOnly) + ds = gdal.Open(str(case_insensitive_filename(filename)), + gdalconst.GA_ReadOnly) # print geotiff file if verbose logging.info(str(filename)) # create python dictionary for output variables and attributes diff --git a/icesat2_toolkit/tools.py b/icesat2_toolkit/tools.py index cb84503..29741a0 100644 --- a/icesat2_toolkit/tools.py +++ b/icesat2_toolkit/tools.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" tools.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Plotting tools and utilities PYTHON DEPENDENCIES: @@ -13,6 +13,7 @@ https://github.com/matplotlib/matplotlib UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: add catch for existing colormaps Updated 03/2024: use pathlib to define and operate on paths Updated 04/2022: updated docstrings to numpy documentation format @@ -25,12 +26,11 @@ import colorsys import warnings import numpy as np +from icesat2_toolkit.utilities import import_dependency + # attempt imports -try: - import matplotlib.cm as cm - import matplotlib.colors as colors -except (AttributeError, ImportError, ModuleNotFoundError) as exc: - warnings.warn("matplotlib not available", ImportWarning) +cm = import_dependency('matplotlib.cm') +colors = import_dependency('matplotlib.colors') def from_cpt(filename, use_extremes=True, **kwargs): """ diff --git a/icesat2_toolkit/utilities.py b/icesat2_toolkit/utilities.py index 8562150..50ae505 100644 --- a/icesat2_toolkit/utilities.py +++ b/icesat2_toolkit/utilities.py @@ -1,14 +1,19 @@ #!/usr/bin/env python u""" utilities.py -Written by Tyler Sutterley (03/2024) +Written by Tyler Sutterley (05/2024) Download and management utilities for syncing time and auxiliary files PYTHON DEPENDENCIES: + boto3: Amazon Web Services (AWS) SDK for Python + https://boto3.amazonaws.com/v1/documentation/api/latest/index.html lxml: processing XML and HTML in Python https://pypi.python.org/pypi/lxml + s3fs: FUSE-based file system backed by Amazon S3 + https://s3fs.readthedocs.io/en/latest/ UPDATE HISTORY: + Updated 04/2024: add wrapper to importlib for optional dependencies Updated 03/2024: can use regions to filter sea ice products in cmr queries Updated 11/2023: updated ssl context to fix deprecation error Updated 07/2023: add function for S3 filesystem @@ -67,6 +72,7 @@ import datetime import dateutil import warnings +import importlib import posixpath import lxml.etree import subprocess @@ -80,16 +86,6 @@ from urllib.parse import urlencode import urllib.request as urllib2 -# attempt imports -try: - import boto3 -except (AttributeError, ImportError, ModuleNotFoundError) as exc: - warnings.warn("boto3 not available", ImportWarning) -try: - import s3fs -except (AttributeError, ImportError, ModuleNotFoundError) as exc: - warnings.warn("s3fs not available", ImportWarning) - # PURPOSE: get absolute path within a package from a relative path def get_data_path(relpath: list | str | pathlib.Path): """ @@ -109,6 +105,46 @@ def get_data_path(relpath: list | str | pathlib.Path): elif isinstance(relpath, (str, pathlib.Path)): return filepath.joinpath(relpath) +def import_dependency( + name: str, + extra: str = "", + raise_exception: bool = False + ): + """ + Import an optional dependency + + Adapted from ``pandas.compat._optional::import_optional_dependency`` + + Parameters + ---------- + name: str + Module name + extra: str, default "" + Additional text to include in the ``ImportError`` message + raise_exception: bool, default False + Raise an ``ImportError`` if the module is not found + + Returns + ------- + module: obj + Imported module + """ + # check if the module name is a string + msg = f"Invalid module name: '{name}'; must be a string" + assert isinstance(name, str), msg + # try to import the module + err = f"Missing optional dependency '{name}'. {extra}" + module = None + try: + module = importlib.import_module(name) + except (ImportError, ModuleNotFoundError) as exc: + if raise_exception: + raise ImportError(err) from exc + else: + logging.debug(err) + # return the module + return module + # PURPOSE: get the hash value of a file def get_hash( local: str | io.IOBase | pathlib.Path, @@ -390,6 +426,7 @@ def s3_client( response = urllib2.urlopen(request, timeout=timeout) cumulus = json.loads(response.read()) # get AWS client object + boto3 = import_dependency(name='boto3') client = boto3.client('s3', aws_access_key_id=cumulus['accessKeyId'], aws_secret_access_key=cumulus['secretAccessKey'], @@ -426,6 +463,7 @@ def s3_filesystem( response = urllib2.urlopen(request, timeout=timeout) cumulus = json.loads(response.read()) # get AWS file system session object + s3fs = import_dependency(name='s3fs') session = s3fs.S3FileSystem(anon=False, key=cumulus['accessKeyId'], secret=cumulus['secretAccessKey'], @@ -530,6 +568,7 @@ def generate_presigned_url( s3 presigned https url """ # generate a presigned URL for S3 object + boto3 = import_dependency(name='boto3') s3 = boto3.client('s3') try: response = s3.generate_presigned_url('get_object', diff --git a/scripts/MPI_DEM_ICESat2_ATL03.py b/scripts/MPI_DEM_ICESat2_ATL03.py index 2b78272..9062e55 100644 --- a/scripts/MPI_DEM_ICESat2_ATL03.py +++ b/scripts/MPI_DEM_ICESat2_ATL03.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_DEM_ICESat2_ATL03.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Determines which digital elevation model tiles to read for a given ATL03 file Reads 3x3 array of tiles for points within bounding box of central mosaic tile Interpolates digital elevation model to ICESat-2 ATL03 photon event locations @@ -60,6 +60,7 @@ https://nsidc.org/data/nsidc-0645/versions/1 UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -103,7 +104,6 @@ import os import re import uuid -import pyproj import logging import pathlib import tarfile @@ -113,29 +113,15 @@ import numpy as np import scipy.interpolate import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import fiona -except ModuleNotFoundError: - warnings.warn("fiona not available") - warnings.warn("Some functions will throw an exception if called") -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import osgeo.gdal -except ModuleNotFoundError: - warnings.warn("GDAL not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +fiona = is2tk.utilities.import_dependency('fiona') +gdal = is2tk.utilities.import_dependency('osgeo.gdal') +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +pyproj = is2tk.utilities.import_dependency('pyproj') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # digital elevation models elevation_dir = {} @@ -292,7 +278,7 @@ def read_DEM_index(index_file, DEM_MODEL): # extract Polar Stereographic coordinates for entity x = [ul[0],ur[0],lr[0],ll[0],ul2[0]] y = [ul[1],ur[1],lr[1],ll[1],ul2[1]] - poly_obj = Polygon(list(zip(x,y))) + poly_obj = geometry.Polygon(np.c_[x, y]) # Valid Polygon may not possess overlapping exterior or interior rings if (not poly_obj.is_valid): poly_obj = poly_obj.buffer(0) @@ -310,7 +296,7 @@ def read_DEM_file(elevation_file, nd_value): member, = [m for m in tar.getmembers() if re.search(r'dem\.tif',m.name)] # use GDAL virtual file systems to read dem mmap_name = f"/vsitar/{elevation_file}/{member.name}" - ds = osgeo.gdal.Open(mmap_name) + ds = gdal.Open(mmap_name) # read data matrix im = ds.GetRasterBand(1).ReadAsArray() fill_value = ds.GetRasterBand(1).GetNoDataValue() @@ -334,7 +320,7 @@ def read_DEM_file(elevation_file, nd_value): ymin = ymax + (ysize-1)*info_geotiff[5] # close files ds = None - osgeo.gdal.Unlink(mmap_name) + gdal.Unlink(mmap_name) tar.close() # create image x and y arrays xi = np.arange(xmin,xmax+info_geotiff[1],info_geotiff[1]) @@ -350,7 +336,7 @@ def read_DEM_buffer(elevation_file, xlimits, ylimits, nd_value): member, = [m for m in tar.getmembers() if re.search(r'dem\.tif',m.name)] # use GDAL virtual file systems to read dem mmap_name = f"/vsitar/{elevation_file}/{member.name}" - ds = osgeo.gdal.Open(mmap_name) + ds = gdal.Open(mmap_name) # get geotiff info info_geotiff = ds.GetGeoTransform() # original image extents @@ -380,7 +366,7 @@ def read_DEM_buffer(elevation_file, xlimits, ylimits, nd_value): ymin_reduced = ymax + yoffset*info_geotiff[5] + (ycount-1)*info_geotiff[5] # close files ds = None - osgeo.gdal.Unlink(mmap_name) + gdal.Unlink(mmap_name) tar.close() # create image x and y arrays xi = np.arange(xmin_reduced,xmax_reduced+info_geotiff[1],info_geotiff[1]) @@ -508,7 +494,7 @@ def main(): X,Y = transformer.transform(longitude, latitude) # convert reduced x and y to shapely multipoint object - xy_point = MultiPoint(list(zip(X[ind], Y[ind]))) + xy_point = geometry.MultiPoint(list(zip(X[ind], Y[ind]))) # create complete masks for each DEM tile associated_map = {} @@ -936,9 +922,9 @@ def HDF5_ATL03_dem_write(IS2_atl03_dem, IS2_atl03_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_DEM_ICESat2_ATL06.py b/scripts/MPI_DEM_ICESat2_ATL06.py index daa54cd..043aa71 100644 --- a/scripts/MPI_DEM_ICESat2_ATL06.py +++ b/scripts/MPI_DEM_ICESat2_ATL06.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_DEM_ICESat2_ATL06.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Determines which digital elevation model tiles to read for a given ATL06 file Reads 3x3 array of tiles for points within bounding box of central mosaic tile Interpolates digital elevation model to locations of ICESat-2 ATL06 segments @@ -60,6 +60,7 @@ https://nsidc.org/data/nsidc-0645/versions/1 UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -103,7 +104,6 @@ import os import re import uuid -import pyproj import logging import pathlib import tarfile @@ -113,29 +113,15 @@ import numpy as np import scipy.interpolate import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import fiona -except ModuleNotFoundError: - warnings.warn("fiona not available") - warnings.warn("Some functions will throw an exception if called") -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import osgeo.gdal -except ModuleNotFoundError: - warnings.warn("GDAL not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +fiona = is2tk.utilities.import_dependency('fiona') +gdal = is2tk.utilities.import_dependency('osgeo.gdal') +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +pyproj = is2tk.utilities.import_dependency('pyproj') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # digital elevation models elevation_dir = {} @@ -292,7 +278,7 @@ def read_DEM_index(index_file, DEM_MODEL): # extract Polar Stereographic coordinates for entity x = [ul[0],ur[0],lr[0],ll[0],ul2[0]] y = [ul[1],ur[1],lr[1],ll[1],ul2[1]] - poly_obj = Polygon(list(zip(x,y))) + poly_obj = geometry.Polygon(np.c_[x, y]) # Valid Polygon may not possess overlapping exterior or interior rings if (not poly_obj.is_valid): poly_obj = poly_obj.buffer(0) @@ -310,7 +296,7 @@ def read_DEM_file(elevation_file, nd_value): member, = [m for m in tar.getmembers() if re.search(r'dem\.tif',m.name)] # use GDAL virtual file systems to read dem mmap_name = f"/vsitar/{elevation_file}/{member.name}" - ds = osgeo.gdal.Open(mmap_name) + ds = gdal.Open(mmap_name) # read data matrix im = ds.GetRasterBand(1).ReadAsArray() fill_value = ds.GetRasterBand(1).GetNoDataValue() @@ -334,7 +320,7 @@ def read_DEM_file(elevation_file, nd_value): ymin = ymax + (ysize-1)*info_geotiff[5] # close files ds = None - osgeo.gdal.Unlink(mmap_name) + gdal.Unlink(mmap_name) tar.close() # create image x and y arrays xi = np.arange(xmin,xmax+info_geotiff[1],info_geotiff[1]) @@ -350,7 +336,7 @@ def read_DEM_buffer(elevation_file, xlimits, ylimits, nd_value): member, = [m for m in tar.getmembers() if re.search(r'dem\.tif',m.name)] # use GDAL virtual file systems to read dem mmap_name = f"/vsitar/{elevation_file}/{member.name}" - ds = osgeo.gdal.Open(mmap_name) + ds = gdal.Open(mmap_name) # get geotiff info info_geotiff = ds.GetGeoTransform() # original image extents @@ -380,7 +366,7 @@ def read_DEM_buffer(elevation_file, xlimits, ylimits, nd_value): ymin_reduced = ymax + yoffset*info_geotiff[5] + (ycount-1)*info_geotiff[5] # close files ds = None - osgeo.gdal.Unlink(mmap_name) + gdal.Unlink(mmap_name) tar.close() # create image x and y arrays xi = np.arange(xmin_reduced,xmax_reduced+info_geotiff[1],info_geotiff[1]) @@ -508,7 +494,7 @@ def main(): X,Y = transformer.transform(longitude, latitude) # convert reduced x and y to shapely multipoint object - xy_point = MultiPoint(list(zip(X[ind], Y[ind]))) + xy_point = geometry.MultiPoint(np.c_[X[ind], Y[ind]]) # create complete masks for each DEM tile associated_map = {} @@ -991,9 +977,9 @@ def HDF5_ATL06_dem_write(IS2_atl06_dem, IS2_atl06_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_DEM_ICESat2_ATL11.py b/scripts/MPI_DEM_ICESat2_ATL11.py index 4d437a9..d09be6d 100644 --- a/scripts/MPI_DEM_ICESat2_ATL11.py +++ b/scripts/MPI_DEM_ICESat2_ATL11.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_DEM_ICESat2_ATL11.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Determines which digital elevation model tiles to read for a given ATL11 file Reads 3x3 array of tiles for points within bounding box of central mosaic tile Interpolates digital elevation model to locations of ICESat-2 ATL11 segments @@ -60,6 +60,7 @@ https://nsidc.org/data/nsidc-0645/versions/1 UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -81,7 +82,6 @@ import os import re import uuid -import pyproj import logging import pathlib import tarfile @@ -92,29 +92,15 @@ import numpy as np import scipy.interpolate import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import fiona -except ModuleNotFoundError: - warnings.warn("fiona not available") - warnings.warn("Some functions will throw an exception if called") -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import osgeo.gdal -except ModuleNotFoundError: - warnings.warn("GDAL not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +fiona = is2tk.utilities.import_dependency('fiona') +gdal = is2tk.utilities.import_dependency('osgeo.gdal') +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +pyproj = is2tk.utilities.import_dependency('pyproj') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # digital elevation models elevation_dir = {} @@ -272,7 +258,7 @@ def read_DEM_index(index_file, DEM_MODEL): # extract Polar Stereographic coordinates for entity x = [ul[0],ur[0],lr[0],ll[0],ul2[0]] y = [ul[1],ur[1],lr[1],ll[1],ul2[1]] - poly_obj = Polygon(list(zip(x,y))) + poly_obj = geometry.Polygon(np.c_[x, y]) # Valid Polygon may not possess overlapping exterior or interior rings if (not poly_obj.is_valid): poly_obj = poly_obj.buffer(0) @@ -291,7 +277,7 @@ def read_DEM_file(elevation_file, nd_value): member, = [m for m in tar.getmembers() if re.search(r'dem\.tif',m.name)] # use GDAL virtual file systems to read dem mmap_name = f"/vsitar/{str(elevation_file)}/{member.name}" - ds = osgeo.gdal.Open(mmap_name) + ds = gdal.Open(mmap_name) # read data matrix im = ds.GetRasterBand(1).ReadAsArray() fill_value = ds.GetRasterBand(1).GetNoDataValue() @@ -315,7 +301,7 @@ def read_DEM_file(elevation_file, nd_value): ymin = ymax + (ysize-1)*info_geotiff[5] # close files ds = None - osgeo.gdal.Unlink(mmap_name) + gdal.Unlink(mmap_name) tar.close() # create image x and y arrays xi = np.arange(xmin,xmax+info_geotiff[1],info_geotiff[1]) @@ -332,7 +318,7 @@ def read_DEM_buffer(elevation_file, xlimits, ylimits, nd_value): member, = [m for m in tar.getmembers() if re.search(r'dem\.tif',m.name)] # use GDAL virtual file systems to read dem mmap_name = f"/vsitar/{str(elevation_file)}/{member.name}" - ds = osgeo.gdal.Open(mmap_name) + ds = gdal.Open(mmap_name) # get geotiff info info_geotiff = ds.GetGeoTransform() # original image extents @@ -362,7 +348,7 @@ def read_DEM_buffer(elevation_file, xlimits, ylimits, nd_value): ymin_reduced = ymax + yoffset*info_geotiff[5] + (ycount-1)*info_geotiff[5] # close files ds = None - osgeo.gdal.Unlink(mmap_name) + gdal.Unlink(mmap_name) tar.close() # create image x and y arrays xi = np.arange(xmin_reduced,xmax_reduced+info_geotiff[1],info_geotiff[1]) @@ -485,7 +471,7 @@ def main(): fileID[ptx]['latitude'][:]) # convert reduced x and y to shapely multipoint object - xy_point = MultiPoint(list(zip(X[ind], Y[ind]))) + xy_point = geometry.MultiPoint(np.c_[X[ind], Y[ind]]) # create complete masks for each DEM tile associated_map = {} @@ -962,9 +948,9 @@ def HDF5_ATL11_dem_write(IS2_atl11_dem, IS2_atl11_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_ICESat2_ATL03.py b/scripts/MPI_ICESat2_ATL03.py index e69e21e..ff86838 100644 --- a/scripts/MPI_ICESat2_ATL03.py +++ b/scripts/MPI_ICESat2_ATL03.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -MPI_ICESat2_ATL03.py (04/2024) +MPI_ICESat2_ATL03.py (05/2024) Read ICESat-2 ATL03 and ATL09 data files to calculate average segment surfaces ATL03 datasets: Global Geolocated Photons ATL09 datasets: Atmospheric Characteristics @@ -42,6 +42,7 @@ classify_photons.py: Yet Another Photon Classifier for Geolocated Photon Data UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -87,25 +88,13 @@ import scipy.signal import scipy.interpolate import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import sklearn.neighbors - import sklearn.cluster -except (AttributeError, ImportError, ModuleNotFoundError) as exc: - warnings.warn("scikit-learn not available", ImportWarning) -try: - import yapc.classify_photons -except ModuleNotFoundError: - warnings.warn("pyYAPC not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +cluster = is2tk.utilities.import_dependency('sklearn.cluster') +yapc = is2tk.utilities.import_dependency('yapc') # PURPOSE: keep track of MPI threads def info(rank, size): @@ -589,7 +578,7 @@ def main(): # and that the spread of photons is greater than 20m if (pe_sig_low_count > 10) & (along_X_spread > 20): # use density-based spatial clustering in segment - db = sklearn.cluster.DBSCAN(eps=0.5).fit( + db = cluster.DBSCAN(eps=0.5).fit( np.c_[distance_along_X, segment_heights], sample_weight=photon_snr) labels = db.labels_ @@ -783,7 +772,7 @@ def main(): # and that the spread of photons is greater than 40m if (pe_sig_low_count > 10) & (along_X_spread > 40): # use density-based spatial clustering in segment - db = sklearn.cluster.DBSCAN(eps=0.5).fit( + db = cluster.DBSCAN(eps=0.5).fit( np.c_[distance_along_X, segment_heights], sample_weight=photon_snr) labels = db.labels_ @@ -2442,9 +2431,9 @@ def HDF5_ATL03_write(IS2_atl03_data, IS2_atl03_attrs, COMM=None, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_ICESat2_ATL03_histogram.py b/scripts/MPI_ICESat2_ATL03_histogram.py index 29d1b38..dde3845 100644 --- a/scripts/MPI_ICESat2_ATL03_histogram.py +++ b/scripts/MPI_ICESat2_ATL03_histogram.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -MPI_ICESat2_ATL03_histogram.py (04/2024) +MPI_ICESat2_ATL03_histogram.py (05/2024) Read ICESat-2 ATL03 and ATL09 data files to calculate average segment surfaces ATL03 datasets: Global Geolocated Photons ATL09 datasets: Atmospheric Characteristics @@ -75,6 +75,7 @@ Geophysical Journal International (1997) 131, 267-280 UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -126,25 +127,13 @@ import scipy.optimize import scipy.interpolate import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import sklearn.neighbors - import sklearn.cluster -except (AttributeError, ImportError, ModuleNotFoundError) as exc: - warnings.warn("scikit-learn not available", ImportWarning) -try: - import yapc.classify_photons -except ModuleNotFoundError: - warnings.warn("pyYAPC not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +cluster = is2tk.utilities.import_dependency('sklearn.cluster') +yapc = is2tk.utilities.import_dependency('yapc') # PURPOSE: keep track of MPI threads def info(rank, size): @@ -666,7 +655,7 @@ def main(): # and that the spread of photons is greater than 20m if (ice_sig_low_count > 10) & (along_X_spread > 20): # use density-based spatial clustering in segment - db = sklearn.cluster.DBSCAN(eps=0.5).fit( + db = cluster.DBSCAN(eps=0.5).fit( np.c_[distance_along_X, segment_heights], sample_weight=photon_snr) labels = db.labels_ @@ -875,7 +864,7 @@ def main(): # and that the spread of photons is greater than 40m if (pe_sig_low_count > 10) & (along_X_spread > 40): # use density-based spatial clustering in segment - db = sklearn.cluster.DBSCAN(eps=0.5).fit( + db = cluster.DBSCAN(eps=0.5).fit( np.c_[distance_along_X, segment_heights], sample_weight=photon_snr) labels = db.labels_ @@ -2712,9 +2701,9 @@ def HDF5_ATL03_write(IS2_atl03_data, IS2_atl03_attrs, COMM=None, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_reduce_ICESat2_ATL03_RGI.py b/scripts/MPI_reduce_ICESat2_ATL03_RGI.py index 28712bb..02dd589 100644 --- a/scripts/MPI_reduce_ICESat2_ATL03_RGI.py +++ b/scripts/MPI_reduce_ICESat2_ATL03_RGI.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_reduce_ICESat2_ATL03_RGI.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 data to the Randolph Glacier Inventory https://www.glims.org/RGI/rgi60_dl.html @@ -57,6 +57,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -94,24 +95,13 @@ import warnings import numpy as np import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import shapefile -except ModuleNotFoundError: - warnings.warn("shapefile not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +shapefile = is2tk.utilities.import_dependency('shapefile') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # PURPOSE: keep track of MPI threads def info(rank, size): @@ -200,10 +190,10 @@ def load_glacier_inventory(RGI_DIRECTORY,RGI_REGION): # list object for coordinates (exterior and holes) poly_list = [] # add each part to list - for p1,p2 in zip(parts[:-1],parts[1:]): - poly_list.append(list(zip(points[p1:p2,0],points[p1:p2,1]))) + for p1,p2 in zip(parts[:-1], parts[1:]): + poly_list.append(np.c_[points[p1:p2,0], points[p1:p2,1]]) # convert poly_list into Polygon object with holes - poly_obj = Polygon(poly_list[0],poly_list[1:]) + poly_obj = geometry.Polygon(poly_list[0], poly_list[1:]) # Valid Polygon may not possess overlapping exterior or interior rings if (not poly_obj.is_valid): poly_obj = poly_obj.buffer(0) @@ -239,7 +229,8 @@ def main(): # extract parameters from ICESat-2 ATLAS HDF5 file name rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYC,GRN,RL,VRS,AUX=rx.findall(args.file.name).pop() + SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYC,GRN,RL,VRS,AUX = \ + rx.findall(args.file.name).pop() # read data on rank 0 if (comm.rank == 0): @@ -312,7 +303,7 @@ def main(): latitude = fileID[gtx]['heights']['lat_ph'][:] # convert reduced lat/lon to shapely multipoint object - xy_point = MultiPoint(list(zip(longitude[ind],latitude[ind]))) + xy_point = geometry.MultiPoint(np.c_[longitude[ind], latitude[ind]]) # create distributed intersection map for calculation distributed_map = np.zeros((n_pe),dtype=bool) @@ -587,9 +578,9 @@ def HDF5_ATL03_mask_write(IS2_atl03_mask, IS2_atl03_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_reduce_ICESat2_ATL06_RGI.py b/scripts/MPI_reduce_ICESat2_ATL06_RGI.py index c705420..fa57394 100644 --- a/scripts/MPI_reduce_ICESat2_ATL06_RGI.py +++ b/scripts/MPI_reduce_ICESat2_ATL06_RGI.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_reduce_ICESat2_ATL06_RGI.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 data to the Randolph Glacier Inventory https://www.glims.org/RGI/rgi60_dl.html @@ -57,6 +57,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -93,24 +94,13 @@ import warnings import numpy as np import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import shapefile -except ModuleNotFoundError: - warnings.warn("shapefile not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +shapefile = is2tk.utilities.import_dependency('shapefile') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # PURPOSE: keep track of MPI threads def info(rank, size): @@ -199,10 +189,10 @@ def load_glacier_inventory(RGI_DIRECTORY,RGI_REGION): # list object for coordinates (exterior and holes) poly_list = [] # add each part to list - for p1,p2 in zip(parts[:-1],parts[1:]): - poly_list.append(list(zip(points[p1:p2,0],points[p1:p2,1]))) + for p1,p2 in zip(parts[:-1], parts[1:]): + poly_list.append(np.c_[points[p1:p2,0], points[p1:p2,1]]) # convert poly_list into Polygon object with holes - poly_obj = Polygon(poly_list[0],poly_list[1:]) + poly_obj = geometry.Polygon(poly_list[0], poly_list[1:]) # Valid Polygon may not possess overlapping exterior or interior rings if (not poly_obj.is_valid): poly_obj = poly_obj.buffer(0) @@ -238,7 +228,8 @@ def main(): # extract parameters from ICESat-2 ATLAS HDF5 file name rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYC,GRN,RL,VRS,AUX=rx.findall(args.file.name).pop() + SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYC,GRN,RL,VRS,AUX = \ + rx.findall(args.file.name).pop() # read data on rank 0 if (comm.rank == 0): @@ -313,7 +304,7 @@ def main(): latitude = fileID[gtx]['land_ice_segments']['latitude'][:].copy() # convert reduced lat/lon to shapely multipoint object - xy_point = MultiPoint(list(zip(longitude[ind],latitude[ind]))) + xy_point = geometry.MultiPoint(np.c_[longitude[ind], latitude[ind]]) # create distributed intersection map for calculation distributed_map = np.zeros((n_seg),dtype=bool) @@ -633,9 +624,9 @@ def HDF5_ATL06_mask_write(IS2_atl06_mask, IS2_atl06_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_reduce_ICESat2_ATL06_drainages.py b/scripts/MPI_reduce_ICESat2_ATL06_drainages.py index cabcabc..9caa889 100644 --- a/scripts/MPI_reduce_ICESat2_ATL06_drainages.py +++ b/scripts/MPI_reduce_ICESat2_ATL06_drainages.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_reduce_ICESat2_ATL06_drainages.py -Written by Tyler Sutterley (03/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 data into IMBIE-2 drainage regions @@ -38,6 +38,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -65,7 +66,6 @@ import sys import os import re -import pyproj import logging import pathlib import datetime @@ -73,24 +73,14 @@ import warnings import numpy as np import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import shapefile -except ModuleNotFoundError: - warnings.warn("shapefile not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +pyproj = is2tk.utilities.import_dependency('pyproj') +shapefile = is2tk.utilities.import_dependency('shapefile') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # IMBIE-2 Drainage basins IMBIE_basin_file = {} @@ -171,7 +161,7 @@ def load_IMBIE2_basins(basin_dir, HEM, EPSG): # extract Polar-Stereographic coordinates for record points = np.array(shape_entities[i].points) # shapely polygon object for region outline - poly_obj = Polygon(np.c_[points[:,0],points[:,1]]) + poly_obj = geometry.Polygon(np.c_[points[:,0],points[:,1]]) elif (HEM == 'N'): # no glaciers or ice caps i,=[i for i,a in enumerate(shape_attributes) if (a[0] == REGION)] @@ -187,7 +177,7 @@ def load_IMBIE2_basins(basin_dir, HEM, EPSG): X,Y = transformer.transform(points[p1:p2,0],points[p1:p2,1]) poly_list.append(np.c_[X,Y]) # convert poly_list into Polygon object with holes - poly_obj = Polygon(poly_list[0],poly_list[1:]) + poly_obj = geometry.Polygon(poly_list[0], poly_list[1:]) # check if polygon object is valid if (not poly_obj.is_valid): poly_obj = poly_obj.buffer(0) @@ -221,7 +211,8 @@ def main(): # extract parameters from ICESat-2 ATLAS HDF5 file name rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYC,GRN,RL,VRS,AUX=rx.findall(args.file.name).pop() + SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYC,GRN,RL,VRS,AUX = \ + rx.findall(args.file.name).pop() # set the hemisphere flag based on ICESat-2 granule HEM = set_hemisphere(GRN) # pyproj transformer for converting lat/lon to polar stereographic @@ -308,7 +299,7 @@ def main(): # convert lat/lon to polar stereographic X,Y = transformer.transform(longitude[ind], latitude[ind]) # convert reduced x and y to shapely multipoint object - xy_point = MultiPoint(np.c_[X, Y]) + xy_point = geometry.MultiPoint(np.c_[X, Y]) # calculate mask for each drainage basin in the dictionary associated_map = {} @@ -610,9 +601,9 @@ def HDF5_ATL06_mask_write(IS2_atl06_mask, IS2_atl06_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_reduce_ICESat2_ATL06_grounded.py b/scripts/MPI_reduce_ICESat2_ATL06_grounded.py index a5af6dd..d775e11 100644 --- a/scripts/MPI_reduce_ICESat2_ATL06_grounded.py +++ b/scripts/MPI_reduce_ICESat2_ATL06_grounded.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_reduce_ICESat2_ATL06_grounded.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 data into grounded ice regions @@ -40,6 +40,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024; use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -69,7 +70,6 @@ import sys import os import re -import pyproj import logging import pathlib import datetime @@ -77,24 +77,14 @@ import warnings import numpy as np import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import shapefile -except ModuleNotFoundError: - warnings.warn("shapefile not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +pyproj = is2tk.utilities.import_dependency('pyproj') +shapefile = is2tk.utilities.import_dependency('shapefile') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # regional grounded ice files grounded_file = {} @@ -194,7 +184,7 @@ def load_grounded_ice(base_dir, BUFFER, HEM, AREA=0.0): for p1,p2 in zip(parts[:-1],parts[1:]): poly_list.append(list(zip(points[p1:p2,0],points[p1:p2,1]))) # convert poly_list into Polygon object with holes - poly_obj = Polygon(poly_list[0],poly_list[1:]) + poly_obj = geometry.Polygon(poly_list[0],poly_list[1:]) if (poly_obj.area < (AREA*1e6)): continue # buffer polygon object and add to total polygon dictionary object @@ -227,7 +217,8 @@ def main(): # extract parameters from ICESat-2 ATLAS HDF5 file name rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYC,GRN,RL,VRS,AUX=rx.findall(args.file.name).pop() + SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYC,GRN,RL,VRS,AUX = \ + rx.findall(args.file.name).pop() # set the hemisphere flag based on ICESat-2 granule HEM = set_hemisphere(GRN) # pyproj transformer for converting lat/lon to polar stereographic @@ -307,7 +298,7 @@ def main(): # convert lat/lon to polar stereographic X,Y = transformer.transform(longitude[ind], latitude[ind]) # convert reduced x and y to shapely multipoint object - xy_point = MultiPoint(np.c_[X, Y]) + xy_point = geometry.MultiPoint(np.c_[X, Y]) # calculate mask for each grounded region in the dictionary associated_map = {} @@ -625,9 +616,9 @@ def HDF5_ATL06_mask_write(IS2_atl06_mask, IS2_atl06_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_reduce_ICESat2_ATL06_ice_shelves.py b/scripts/MPI_reduce_ICESat2_ATL06_ice_shelves.py index 6881559..dced142 100644 --- a/scripts/MPI_reduce_ICESat2_ATL06_ice_shelves.py +++ b/scripts/MPI_reduce_ICESat2_ATL06_ice_shelves.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_reduce_ICESat2_ATL06_ice_shelves.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 data into regions of floating ice shelves @@ -39,6 +39,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -67,7 +68,6 @@ import sys import os import re -import pyproj import logging import pathlib import datetime @@ -75,24 +75,14 @@ import warnings import numpy as np import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import shapefile -except ModuleNotFoundError: - warnings.warn("shapefile not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +pyproj = is2tk.utilities.import_dependency('pyproj') +shapefile = is2tk.utilities.import_dependency('shapefile') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # regional ice shelf files ice_shelf_file = {} @@ -186,7 +176,7 @@ def load_ice_shelves(base_dir, BUFFER, HEM): for p1,p2 in zip(parts[:-1],parts[1:]): poly_list.append(list(zip(points[p1:p2,0],points[p1:p2,1]))) # convert poly_list into Polygon object with holes - poly_obj = Polygon(poly_list[0],poly_list[1:]) + poly_obj = geometry.Polygon(poly_list[0], poly_list[1:]) # buffer polygon object and add to total polygon dictionary object poly_dict[shape_attributes[i][0]] = poly_obj.buffer(BUFFER*1e3) # return the polygon object and the input file name @@ -296,7 +286,7 @@ def main(): # convert lat/lon to polar stereographic X,Y = transformer.transform(longitude[ind], latitude[ind]) # convert reduced x and y to shapely multipoint object - xy_point = MultiPoint(np.c_[X, Y]) + xy_point = geometry.MultiPoint(np.c_[X, Y]) # calculate mask for each ice shelf in the dictionary associated_map = {} @@ -614,9 +604,9 @@ def HDF5_ATL06_mask_write(IS2_atl06_mask, IS2_atl06_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_reduce_ICESat2_ATL11_RGI.py b/scripts/MPI_reduce_ICESat2_ATL11_RGI.py index 913575e..6e3dd2c 100644 --- a/scripts/MPI_reduce_ICESat2_ATL11_RGI.py +++ b/scripts/MPI_reduce_ICESat2_ATL11_RGI.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_reduce_ICESat2_ATL11_RGI.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 data to the Randolph Glacier Inventory https://www.glims.org/RGI/rgi60_dl.html @@ -15,7 +15,7 @@ 4: Arctic Canada South 5: Greenland Periphery 6: Iceland - 7: Svalbard + 7: Svalbard: 8: Scandinavia 9: Russian Arctic 10: North Asia @@ -59,6 +59,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -86,24 +87,13 @@ import numpy as np import collections import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import shapefile -except ModuleNotFoundError: - warnings.warn("shapefile not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +shapefile = is2tk.utilities.import_dependency('shapefile') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # PURPOSE: keep track of MPI threads def info(rank, size): @@ -192,10 +182,10 @@ def load_glacier_inventory(RGI_DIRECTORY,RGI_REGION): # list object for coordinates (exterior and holes) poly_list = [] # add each part to list - for p1,p2 in zip(parts[:-1],parts[1:]): - poly_list.append(list(zip(points[p1:p2,0],points[p1:p2,1]))) + for p1,p2 in zip(parts[:-1], parts[1:]): + poly_list.append(np.c_[points[p1:p2,0], points[p1:p2,1]]) # convert poly_list into Polygon object with holes - poly_obj = Polygon(poly_list[0],poly_list[1:]) + poly_obj = geometry.Polygon(poly_list[0], poly_list[1:]) # Valid Polygon may not possess overlapping exterior or interior rings if (not poly_obj.is_valid): poly_obj = poly_obj.buffer(0) @@ -299,7 +289,7 @@ def main(): # convert reduced lat/lon to shapely multipoint object longitude = fileID[ptx]['longitude'][:].copy() latitude = fileID[ptx]['latitude'][:].copy() - xy_point = MultiPoint(list(zip(longitude[ind],latitude[ind]))) + xy_point = geometry.MultiPoint(np.c_[longitude[ind], latitude[ind]]) # create distributed intersection map for calculation distributed_map = np.zeros((n_points),dtype=bool) @@ -620,9 +610,9 @@ def HDF5_ATL11_mask_write(IS2_atl11_mask, IS2_atl11_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_reduce_ICESat2_ATL11_drainages.py b/scripts/MPI_reduce_ICESat2_ATL11_drainages.py index b89c988..2b51409 100644 --- a/scripts/MPI_reduce_ICESat2_ATL11_drainages.py +++ b/scripts/MPI_reduce_ICESat2_ATL11_drainages.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_reduce_ICESat2_ATL11_drainages.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 data into IMBIE-2 drainage regions @@ -38,6 +38,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -56,7 +57,6 @@ import sys import os import re -import pyproj import logging import pathlib import datetime @@ -65,24 +65,14 @@ import numpy as np import collections import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import shapefile -except ModuleNotFoundError: - warnings.warn("shapefile not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +pyproj = is2tk.utilities.import_dependency('pyproj') +shapefile = is2tk.utilities.import_dependency('shapefile') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # IMBIE-2 Drainage basins IMBIE_basin_file = {} @@ -163,7 +153,7 @@ def load_IMBIE2_basins(basin_dir, HEM, EPSG): # extract Polar-Stereographic coordinates for record points = np.array(shape_entities[i].points) # shapely polygon object for region outline - poly_obj = Polygon(np.c_[points[:,0],points[:,1]]) + poly_obj = geometry.Polygon(np.c_[points[:,0], points[:,1]]) elif (HEM == 'N'): # no glaciers or ice caps i,=[i for i,a in enumerate(shape_attributes) if (a[0] == REGION)] @@ -179,7 +169,7 @@ def load_IMBIE2_basins(basin_dir, HEM, EPSG): X,Y = transformer.transform(points[p1:p2,0],points[p1:p2,1]) poly_list.append(np.c_[X,Y]) # convert poly_list into Polygon object with holes - poly_obj = Polygon(poly_list[0],poly_list[1:]) + poly_obj = geometry.Polygon(poly_list[0],poly_list[1:]) # check if polygon object is valid if (not poly_obj.is_valid): poly_obj = poly_obj.buffer(0) @@ -294,7 +284,7 @@ def main(): X,Y = transformer.transform(fileID[ptx]['longitude'][:], fileID[ptx]['latitude'][:]) # convert reduced x and y to shapely multipoint object - xy_point = MultiPoint(list(zip(X[ind], Y[ind]))) + xy_point = geometry.MultiPoint(np.c_[X[ind], Y[ind]]) # calculate mask for each drainage basin in the dictionary associated_map = {} @@ -593,9 +583,9 @@ def HDF5_ATL11_mask_write(IS2_atl11_mask, IS2_atl11_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_reduce_ICESat2_ATL11_grounded.py b/scripts/MPI_reduce_ICESat2_ATL11_grounded.py index 6d2fe5b..8c01444 100644 --- a/scripts/MPI_reduce_ICESat2_ATL11_grounded.py +++ b/scripts/MPI_reduce_ICESat2_ATL11_grounded.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_reduce_ICESat2_ATL11_grounded.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 data into grounded ice regions @@ -40,6 +40,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -60,7 +61,6 @@ import sys import os import re -import pyproj import logging import pathlib import datetime @@ -69,24 +69,14 @@ import numpy as np import collections import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import shapefile -except ModuleNotFoundError: - warnings.warn("shapefile not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +pyproj = is2tk.utilities.import_dependency('pyproj') +shapefile = is2tk.utilities.import_dependency('shapefile') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # regional grounded ice files grounded_file = {} @@ -186,7 +176,7 @@ def load_grounded_ice(base_dir, BUFFER, HEM, AREA=0.0): for p1,p2 in zip(parts[:-1],parts[1:]): poly_list.append(list(zip(points[p1:p2,0],points[p1:p2,1]))) # convert poly_list into Polygon object with holes - poly_obj = Polygon(poly_list[0],poly_list[1:]) + poly_obj = geometry.Polygon(poly_list[0], poly_list[1:]) if (poly_obj.area < (AREA*1e6)): continue # buffer polygon object and add to total polygon dictionary object @@ -293,7 +283,7 @@ def main(): X,Y = transformer.transform(fileID[ptx]['longitude'][:], fileID[ptx]['latitude'][:]) # convert reduced x and y to shapely multipoint object - xy_point = MultiPoint(list(zip(X[ind], Y[ind]))) + xy_point = geometry.MultiPoint(np.c_[X[ind], Y[ind]]) # calculate mask for each grounded ice region in the dictionary associated_map = {} @@ -608,9 +598,9 @@ def HDF5_ATL11_mask_write(IS2_atl11_mask, IS2_atl11_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/MPI_reduce_ICESat2_ATL11_ice_shelves.py b/scripts/MPI_reduce_ICESat2_ATL11_ice_shelves.py index c505a23..2fb0197 100644 --- a/scripts/MPI_reduce_ICESat2_ATL11_ice_shelves.py +++ b/scripts/MPI_reduce_ICESat2_ATL11_ice_shelves.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" MPI_reduce_ICESat2_ATL11_ice_shelves.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 data into regions of floating ice shelves @@ -39,6 +39,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -58,7 +59,6 @@ import sys import os import re -import pyproj import logging import pathlib import datetime @@ -67,24 +67,14 @@ import numpy as np import collections import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) -try: - from mpi4py import MPI -except ModuleNotFoundError: - warnings.warn("mpi4py not available", ImportWarning) -try: - import shapefile -except ModuleNotFoundError: - warnings.warn("shapefile not available", ImportWarning) -try: - from shapely.geometry import MultiPoint, Polygon -except ModuleNotFoundError: - warnings.warn("shapely not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +MPI = is2tk.utilities.import_dependency('mpi4py.MPI') +pyproj = is2tk.utilities.import_dependency('pyproj') +shapefile = is2tk.utilities.import_dependency('shapefile') +geometry = is2tk.utilities.import_dependency('shapely.geometry') # regional ice shelf files ice_shelf_file = {} @@ -188,7 +178,7 @@ def load_ice_shelves(base_dir, BUFFER, HEM): for p1,p2 in zip(parts[:-1],parts[1:]): poly_list.append(list(zip(points[p1:p2,0],points[p1:p2,1]))) # convert poly_list into Polygon object with holes - poly_obj = Polygon(poly_list[0],poly_list[1:]) + poly_obj = geometry.Polygon(poly_list[0], poly_list[1:]) # buffer polygon object and add to total polygon dictionary object poly_dict[shape_attributes[i][0]] = poly_obj.buffer(BUFFER*1e3) # return the polygon object and the input file name @@ -308,7 +298,7 @@ def main(): X,Y = transformer.transform(fileID[ptx]['longitude'][:], fileID[ptx]['latitude'][:]) # convert reduced x and y to shapely multipoint object - xy_point = MultiPoint(list(zip(X[ind], Y[ind]))) + xy_point = geometry.MultiPoint(np.c_[X[ind], Y[ind]]) # calculate mask for each ice shelf in the dictionary associated_map = {} @@ -625,9 +615,9 @@ def HDF5_ATL11_mask_write(IS2_atl11_mask, IS2_atl11_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/nsidc_icesat2_dragann.py b/scripts/nsidc_icesat2_dragann.py index 82e8a28..f3aedb3 100644 --- a/scripts/nsidc_icesat2_dragann.py +++ b/scripts/nsidc_icesat2_dragann.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" nsidc_icesat2_dragann.py -Written by Tyler Sutterley (03/2024) +Written by Tyler Sutterley (05/2024) Acquires the ATL03 geolocated photon height product and appends the ATL08 DRAGANN classifications from NSIDC @@ -57,6 +57,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 03/2024: use pathlib to define and operate on paths Updated 09/2023: generalized regular expressions for non-entered cases Updated 12/2022: single implicit import of altimetry tools @@ -89,10 +90,7 @@ import icesat2_toolkit as is2tk # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') # PURPOSE: sync ATL03 geolocated photon height products and appends the # ATL08 DRAGANN classifications from NSIDC diff --git a/scripts/nsidc_icesat2_sync_s3.py b/scripts/nsidc_icesat2_sync_s3.py index b27580a..b8b481e 100644 --- a/scripts/nsidc_icesat2_sync_s3.py +++ b/scripts/nsidc_icesat2_sync_s3.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" nsidc_icesat2_sync_s3.py -Written by Tyler Sutterley (03/2024) +Written by Tyler Sutterley (05/2024) Acquires ICESat-2 datafiles from the National Snow and Ice Data Center (NSIDC) and transfers to an AWS S3 bucket using a local machine as pass through @@ -72,6 +72,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 03/2024: use pathlib to define and operate on paths Updated 09/2023: generalized regular expressions for non-entered cases Updated 12/2022: single implicit import of altimetry tools @@ -120,10 +121,7 @@ import icesat2_toolkit as is2tk # attempt imports -try: - import boto3 -except (AttributeError, ImportError, ModuleNotFoundError) as exc: - warnings.warn("boto3 not available", ImportWarning) +boto3 = is2tk.utilities.import_dependency('boto3') # PURPOSE: sync the ICESat-2 elevation data from NSIDC def nsidc_icesat2_sync_s3(aws_access_key_id, aws_secret_access_key, diff --git a/scripts/reduce_ICESat2_ATL06_raster.py b/scripts/reduce_ICESat2_ATL06_raster.py index ab570b1..e9bee9b 100644 --- a/scripts/reduce_ICESat2_ATL06_raster.py +++ b/scripts/reduce_ICESat2_ATL06_raster.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" reduce_ICESat2_ATL06_raster.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 ATL06 data using raster imagery @@ -29,7 +29,7 @@ h5py: Python interface for Hierarchal Data Format 5 (HDF5) https://h5py.org netCDF4: Python interface to the netCDF C library - https://unidata.github.io/netcdf4-python/netCDF4/index.html + https://unidata.github.io/netcdf4-python/netCDF4/index.html gdal: Pythonic interface to the Geospatial Data Abstraction Library (GDAL) https://pypi.python.org/pypi/GDAL/ pyproj: Python interface to PROJ library @@ -43,6 +43,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -54,7 +55,6 @@ from __future__ import print_function import re -import pyproj import logging import pathlib import argparse @@ -65,12 +65,11 @@ import scipy.spatial import scipy.interpolate import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +pyproj = is2tk.utilities.import_dependency('pyproj') # PURPOSE: try to get the projection information for the input file def get_projection(attributes, PROJECTION): @@ -543,9 +542,9 @@ def HDF5_ATL06_mask_write(IS2_atl06_mask, IS2_atl06_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/reduce_ICESat2_ATL07_raster.py b/scripts/reduce_ICESat2_ATL07_raster.py index 961ed93..e113cb0 100644 --- a/scripts/reduce_ICESat2_ATL07_raster.py +++ b/scripts/reduce_ICESat2_ATL07_raster.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" reduce_ICESat2_ATL07_raster.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 ATL07 data using raster imagery @@ -29,7 +29,7 @@ h5py: Python interface for Hierarchal Data Format 5 (HDF5) https://h5py.org netCDF4: Python interface to the netCDF C library - https://unidata.github.io/netcdf4-python/netCDF4/index.html + https://unidata.github.io/netcdf4-python/netCDF4/index.html gdal: Pythonic interface to the Geospatial Data Abstraction Library (GDAL) https://pypi.python.org/pypi/GDAL/ pyproj: Python interface to PROJ library @@ -43,6 +43,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -54,7 +55,6 @@ from __future__ import print_function import re -import pyproj import logging import pathlib import argparse @@ -65,12 +65,11 @@ import scipy.spatial import scipy.interpolate import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +pyproj = is2tk.utilities.import_dependency('pyproj') # PURPOSE: try to get the projection information for the input file def get_projection(attributes, PROJECTION): @@ -577,9 +576,9 @@ def HDF5_ATL07_mask_write(IS2_atl07_mask, IS2_atl07_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/reduce_ICESat2_ATL10_raster.py b/scripts/reduce_ICESat2_ATL10_raster.py index 284e181..bc40096 100644 --- a/scripts/reduce_ICESat2_ATL10_raster.py +++ b/scripts/reduce_ICESat2_ATL10_raster.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" reduce_ICESat2_ATL10_raster.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 ATL10 data using raster imagery @@ -29,7 +29,7 @@ h5py: Python interface for Hierarchal Data Format 5 (HDF5) https://h5py.org netCDF4: Python interface to the netCDF C library - https://unidata.github.io/netcdf4-python/netCDF4/index.html + https://unidata.github.io/netcdf4-python/netCDF4/index.html gdal: Pythonic interface to the Geospatial Data Abstraction Library (GDAL) https://pypi.python.org/pypi/GDAL/ pyproj: Python interface to PROJ library @@ -43,6 +43,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -54,7 +55,6 @@ from __future__ import print_function import re -import pyproj import logging import pathlib import argparse @@ -65,12 +65,11 @@ import scipy.spatial import scipy.interpolate import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +pyproj = is2tk.utilities.import_dependency('pyproj') # PURPOSE: try to get the projection information for the input file def get_projection(attributes, PROJECTION): @@ -537,9 +536,9 @@ def HDF5_ATL10_mask_write(IS2_atl10_mask, IS2_atl10_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1]) diff --git a/scripts/reduce_ICESat2_ATL11_raster.py b/scripts/reduce_ICESat2_ATL11_raster.py index 1def452..f39b2ac 100644 --- a/scripts/reduce_ICESat2_ATL11_raster.py +++ b/scripts/reduce_ICESat2_ATL11_raster.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" reduce_ICESat2_ATL11_raster.py -Written by Tyler Sutterley (04/2024) +Written by Tyler Sutterley (05/2024) Create masks for reducing ICESat-2 ATL11 data using raster imagery @@ -29,7 +29,7 @@ h5py: Python interface for Hierarchal Data Format 5 (HDF5) https://h5py.org netCDF4: Python interface to the netCDF C library - https://unidata.github.io/netcdf4-python/netCDF4/index.html + https://unidata.github.io/netcdf4-python/netCDF4/index.html gdal: Pythonic interface to the Geospatial Data Abstraction Library (GDAL) https://pypi.python.org/pypi/GDAL/ pyproj: Python interface to PROJ library @@ -43,6 +43,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2024: use wrapper to importlib for optional dependencies Updated 04/2024: use timescale for temporal operations Updated 03/2024: use pathlib to define and operate on paths Updated 12/2022: single implicit import of altimetry tools @@ -54,7 +55,6 @@ from __future__ import print_function import re -import pyproj import logging import pathlib import argparse @@ -66,12 +66,11 @@ import scipy.spatial import scipy.interpolate import icesat2_toolkit as is2tk +import timescale.time # attempt imports -try: - import h5py -except ModuleNotFoundError: - warnings.warn("h5py not available", ImportWarning) +h5py = is2tk.utilities.import_dependency('h5py') +pyproj = is2tk.utilities.import_dependency('pyproj') # PURPOSE: try to get the projection information for the input file def get_projection(attributes, PROJECTION): @@ -547,9 +546,9 @@ def HDF5_ATL11_mask_write(IS2_atl11_mask, IS2_atl11_attrs, INPUT=None, fileID.attrs['date_type'] = 'UTC' fileID.attrs['time_type'] = 'CCSDS UTC-A' # convert start and end time from ATLAS SDP seconds into timescale - timescale = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), + ts = timescale.time.Timescale().from_deltatime(np.array([tmn,tmx]), epoch=timescale.time._atlas_sdp_epoch, standard='GPS') - dt = np.datetime_as_string(timescale.to_datetime(), unit='s') + dt = np.datetime_as_string(ts.to_datetime(), unit='s') # add attributes with measurement date start, end and duration fileID.attrs['time_coverage_start'] = str(dt[0]) fileID.attrs['time_coverage_end'] = str(dt[1])