Skip to content

Commit

Permalink
Merge branch 'master' into fix-code-style
Browse files Browse the repository at this point in the history
# Conflicts:
#	dhdt/generic/handler_im.py
#	dhdt/generic/mapping_io.py
#	dhdt/generic/mapping_tools.py
#	dhdt/input/read_sentinel2.py
#	dhdt/preprocessing/atmospheric_geometry.py
#	dhdt/preprocessing/shadow_filters.py
#	dhdt/processing/matching_tools.py
#	dhdt/processing/network_tools.py
  • Loading branch information
fnattino committed Nov 19, 2023
2 parents 4e9d205 + 729e68c commit 7698032
Show file tree
Hide file tree
Showing 15 changed files with 245 additions and 114 deletions.
3 changes: 3 additions & 0 deletions dhdt/auxiliary/handler_era5.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,9 @@ def get_era5_atmos_profile(date, x, y, spatialRef, z=None, era5_dir=None):
projection system describtion
z : numpy.ndarray, unit=meter
altitudes of interest
era5_dir : str
directory for ERA5 data
Returns
-------
Expand Down
6 changes: 2 additions & 4 deletions dhdt/generic/handler_im.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,10 +237,8 @@ def get_image_subset(Z, bbox):

if Z.ndim == 2:
sub_Z = Z[bbox[0]:bbox[1], bbox[2]:bbox[3]]
elif Z.ndim == 3:
sub_Z = Z[bbox[0]:bbox[1], bbox[2]:bbox[3], :]
elif Z.ndim == 4:
sub_Z = Z[bbox[0]:bbox[1], bbox[2]:bbox[3], :, :]
elif Z.ndim >= 3:
sub_Z = Z[bbox[0]:bbox[1], bbox[2]:bbox[3], ...]
return sub_Z


Expand Down
8 changes: 6 additions & 2 deletions dhdt/generic/mapping_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,11 +278,15 @@ def make_multispectral_vrt(df, fpath=None, fname='multispec.vrt'):
Examples
--------
>>> s2_df = list_central_wavelength_msi()
>>> s2_df = s2_df[s2_df['gsd']==10]
>>> s2_df, datastrip_id = get_s2_image_locations(IM_PATH, s2_df)
create file in directory where imagery is situated
>>> make_multispectral_vrt(s2_df, fname='multi_spec.vrt')
"""
assert isinstance(df, pd.DataFrame), ('please provide a dataframe')
assert 'filepath' in df, ('please first run "get_S2_image_locations"' +
assert 'filepath' in df, ('please first run "get_s2_image_locations"' +
' to find the proper file locations')

if fpath is None:
Expand Down
16 changes: 16 additions & 0 deletions dhdt/generic/mapping_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -987,6 +987,22 @@ def get_bbox(geoTransform, rows=None, cols=None):
return bbox


def get_geoTransform(bbox, dx, dy):
dx = float(dx)
dy = float(dy)
geoTransform = (
bbox[0],
+dx,
0.,
bbox[3],
0.,
-dy,
int(np.floor((bbox[3] - bbox[2]) / dy)),
int(np.floor((bbox[1] - bbox[0]) / dx))
)
return geoTransform


def get_shape_extent(bbox, geoTransform):
""" given geographic meta data of array, calculate its shape
Expand Down
16 changes: 12 additions & 4 deletions dhdt/input/read_sentinel2.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,11 +413,19 @@ def read_stack_s2(s2_df):
See Also
--------
list_central_wavelength_msi : creates a dataframe for the MSI instrument
get_S2_image_locations : provides dataframe with specific file locations
get_s2_image_locations : provides dataframe with specific file locations
read_band_s2 : reading a single Sentinel-2 band
Examples
--------
>>> s2_df = list_central_wavelength_msi()
>>> s2_df = s2_df[s2_df['gsd'] == 10]
>>> s2_df,_ = get_s2_image_locations(IM_PATH, s2_df)
>>> im_stack, spatialRef, geoTransform, targetprj = read_stack_s2(s2_df)
"""
assert isinstance(s2_df, pd.DataFrame), ('please provide a dataframe')
assert 'filepath' in s2_df, ('please first run "get_S2_image_locations"' +
assert 'filepath' in s2_df, ('please first run "get_s2_image_locations"' +
' to find the proper file locations')

roi = np.min(s2_df['gsd'].array) # resolution of interest
Expand Down Expand Up @@ -1407,7 +1415,7 @@ def read_sensing_time_s2(path, fname='MTD_TL.xml'):
>>> sname = 'S2A_MSIL1C_20200923T163311_N0209_R140_T15MXV_20200923T200821.SAFE'
>>> fname = 'MTD_MSIL1C.xml'
>>> s2_path = os.path.join(fpath, sname, fname)
>>> s2_df,_ = get_S2_image_locations(fname, s2_df)
>>> s2_df,_ = get_s2_image_locations(fname, s2_df)
>>> s2_df['filepath']
B02 'GRANULE/L1C_T15MXV_A027450_20200923T163313/IMG_DATA/T115MXV...'
B03 'GRANULE/L1C_T15MXV_A027450_20200923T163313/IMG_DATA/T15MXV...'
Expand Down Expand Up @@ -1828,7 +1836,7 @@ def get_flight_path_s2(ds_path, fname='MTD_DS.xml', s2_dict=None):
>>> fname = os.path.join(S2_dir, S2_name, 'MTD_MSIL1C.xml')
>>> s2_df = list_central_wavelength_s2()
>>> s2_df, datastrip_id = get_S2_image_locations(fname, s2_df)
>>> s2_df, datastrip_id = get_s2_image_locations(fname, s2_df)
>>> path_det = os.path.join(S2_dir, S2_name, 'DATASTRIP', datastrip_id[17:-7])
>>> sat_tim, sat_xyz, sat_err, sat_uvw = get_flight_path_s2(path_det)
Expand Down
13 changes: 11 additions & 2 deletions dhdt/preprocessing/atmospheric_geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -1038,7 +1038,8 @@ def get_refraction_angle(dh,
spatialRef,
central_wavelength,
h,
simple_refraction=True):
simple_refraction=True,
era5_dir=None):
""" estimate the refraction angle
Parameters
Expand All @@ -1063,6 +1064,8 @@ def get_refraction_angle(dh,
simple_refraction : boolean, deault=True
- True : estimate refraction in the visible range
- False : more precise in a broader spectral range
era5_dir : str
directory for ERA5 data
Returns
-------
Expand All @@ -1079,7 +1082,13 @@ def get_refraction_angle(dh,
])

lat, lon, z, Temp, Pres, fracHum, t_era = get_era5_atmos_profile(
dh['timestamp'].unique().to_numpy(), x_bar, y_bar, spatialRef, z=h)
dh['timestamp'].unique().to_numpy(),
x_bar,
y_bar,
spatialRef,
z=h,
era5_dir=era5_dir,
)

# calculate refraction for individual dates
for timestamp in dh['timestamp'].unique():
Expand Down
10 changes: 4 additions & 6 deletions dhdt/preprocessing/shadow_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,6 @@ def diffusion_strength_1(Z, K):
# admin
if np.iscomplexobj(Z): # support complex input
Z_abs = np.abs(Z)
g_1 = np.exp(-1 * np.divide(np.abs(Z), K)**2)
elif Z.ndim == 3: # support multispectral input
I_sum = np.sum(Z**2, axis=2)
Z_abs = np.sqrt(I_sum, out=np.zeros_like(I_sum), where=I_sum != 0)
Expand All @@ -292,8 +291,8 @@ def diffusion_strength_1(Z, K):


def diffusion_strength_2(Z, K):
""" second diffusion function, proposed by [1], when a complex array is
provided the absolute magnitude is used, following [2]
""" second diffusion function, proposed by [PM87]_, when a complex array is
provided the absolute magnitude is used, following [Ge82]_.
Parameters
----------
Expand All @@ -311,13 +310,12 @@ def diffusion_strength_2(Z, K):
----------
.. [PM87] Perona & Malik "Scale space and edge detection using anisotropic
diffusion" Proceedings of the IEEE workshop on computer vision,
pp.16-22, 1987
pp.16-22, 1987.
.. [Ge92] Gerig et al. "Nonlinear anisotropic filtering of MRI data" IEEE
transactions on medical imaging, vol.11(2), pp.221-232, 1992
transactions on medical imaging, vol.11(2), pp.221-232, 1992.
"""
if np.iscomplexobj(Z):
Z_abs = np.abs(Z)
denom = (1 + np.divide(np.abs(Z), K)**2)
elif Z.ndim == 3: # support multispectral input
I_sum = np.sum(Z**2, axis=2)
Z_abs = np.sqrt(I_sum, out=np.zeros_like(I_sum), where=I_sum != 0)
Expand Down
24 changes: 9 additions & 15 deletions dhdt/processing/matching_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,9 +191,9 @@ def pad_images_and_filter_coord_list(M1,
affine transformation coefficients of array M1
geoTransform2 : tuple
affine transformation coefficients of array M2
X_grd : np.array, size=(k,l), type=float
X_grd : numpy.ndarray, size=(k,l), type=float
horizontal map coordinates of matching centers of array M1
Y_grd : np.array, size=(k,l), type=float
Y_grd : numpy.ndarray, size=(k,l), type=float
vertical map coordinates of matching centers of array M2
ds1 : TYPE
extra boundary to be added to the first data array
Expand All @@ -205,19 +205,19 @@ def pad_images_and_filter_coord_list(M1,
Returns
-------
M1_new : np.array, size=(m+2*ds1,n+2*ds1), ndim={2,3}
M1_new : numpy.ndarray, size=(m+2*ds1,n+2*ds1), ndim={2,3}
extended data array
M2_new : np.array, size=(m+2*ds2,n+2*ds2), ndim={2,3}
M2_new : numpy.ndarray, size=(m+2*ds2,n+2*ds2), ndim={2,3}
extended data array
i1 : np.array, size=(_,1), dtype=integer
i1 : numpy.ndarray, size=(_,1), dtype=integer
vertical image coordinates of the template centers of first image
j1 : np.array, size=(_,1), dtype=integer
j1 : numpy.ndarray, size=(_,1), dtype=integer
horizontal image coordinates of the template centers of first image
i2 : np.array, size=(_,1), dtype=integer
i2 : numpy.ndarray, size=(_,1), dtype=integer
vertical image coordinates of the template centers of second image
j2 : np.array, size=(_,1), dtype=integer
j2 : numpy.array, size=(_,1), dtype=integer
horizontal image coordinates of the template centers of second image
IN : np.array, size=(k,l), dtype=boolean
IN : numpy.ndarray, size=(k,l), dtype=boolean
classification of the grid, which are in and out
See Also
Expand All @@ -244,15 +244,9 @@ def pad_images_and_filter_coord_list(M1,

# map transformation to pixel domain
I1_grd, J1_grd = map2pix(geoTransform1, X1_grd, Y1_grd)
# I1_grd = np.round(I1_grd).astype(np.int64)
# J1_grd = np.round(J1_grd).astype(np.int64)

i1, j1 = I1_grd.flatten(), J1_grd.flatten()

I2_grd, J2_grd = map2pix(geoTransform2, X2_grd, Y2_grd)
# I2_grd = np.round(I2_grd).astype(np.int64)
# J2_grd = np.round(J2_grd).astype(np.int64)

i2, j2 = I2_grd.flatten(), J2_grd.flatten()

i1, j1, i2, j2, IN = remove_posts_pairs_outside_image(
Expand Down
7 changes: 5 additions & 2 deletions dhdt/processing/network_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def get_network_indices(n):
def get_network_indices_constrained(idx,
d,
d_max,
n_max,
n_max=None,
double_direction=True):
""" Generate a list with all matchable combinations within a certain range
Expand All @@ -64,7 +64,10 @@ def get_network_indices_constrained(idx,
get_network_indices : same version, but more generic
get_adjacency_matrix_from_netwrok : construct design matrix from edge list
"""
n_max = min(len(idx) - 1, n_max)
if n_max is None:
n_max = len(idx) - 1
else:
n_max = min(len(idx) - 1, n_max)

if d.ndim == 1:
d = d.reshape(-1, 1)
Expand Down
63 changes: 28 additions & 35 deletions examples/Brintnell-Bologna-icefield/01-download-sentinel-2-data.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,10 @@
create_stac_catalog, download_assets

MGRS_TILE = "09VWJ"
YOIS = [2016, 2022]
ROOT_DIR = os.getenv("ROOT_DIR", os.getcwd())
DATA_DIR = os.path.join(ROOT_DIR, "data")
STAC_L1C_PATH = os.path.join(DATA_DIR, "SEN2", "sentinel2-l1c-small")
STAC_L2A_PATH = os.path.join(DATA_DIR, "SEN2", "sentinel2-l2a-small")
STAC_L1C_PATH = os.path.join(DATA_DIR, "SEN2", "sentinel2-l1c")
STAC_L2A_PATH = os.path.join(DATA_DIR, "SEN2", "sentinel2-l2a")
STAC_DESCRIPTION = (
"Sentinel-2 data catalog containing MGRS tiles that include the ",
"Brintnell-Bologna icefield (Rugged Range, Canada) as retrieved ",
Expand Down Expand Up @@ -52,38 +51,31 @@ def _get_matching_L2A_granules(scenes_L1C, scenes_L2A):


def main():
granules_l1c = []
granules_l2a = []
for year in YOIS:
# create selection criteria such as timespan and other specifics
toi = (datetime.date(year, 10, 1), datetime.date(year+1, 4, 1))

api = SentinelAPI(
user=COPERNICUS_HUB_USERNAME,
password=COPERNICUS_HUB_PASSWORD,
api_url=COPERNICUS_API_URL,
show_progressbars=False,
)

scenes_L1C = api.query(
platformname="Sentinel-2",
producttype="S2MSI1C",
raw=f'filename:*_T{MGRS_TILE}_*',
date=toi,
cloudcoverpercentage=(0, 70),
)
scenes_L1C = api.to_geodataframe(scenes_L1C)

scenes_L2A = api.query(
platformname="Sentinel-2",
producttype="S2MSI2A",
raw=f'filename:*_T{MGRS_TILE}_*',
date=toi,
)
scenes_L2A = api.to_geodataframe(scenes_L2A)

granules_l1c.extend(scenes_L1C["filename"].to_list())
granules_l2a.extend(_get_matching_L2A_granules(scenes_L1C, scenes_L2A))

api = SentinelAPI(
user=COPERNICUS_HUB_USERNAME,
password=COPERNICUS_HUB_PASSWORD,
api_url=COPERNICUS_API_URL,
show_progressbars=False,
)

scenes_L1C = api.query(
platformname="Sentinel-2",
producttype="S2MSI1C",
raw=f'filename:*_T{MGRS_TILE}_*',
cloudcoverpercentage=(0, 70),
)
scenes_L1C = api.to_geodataframe(scenes_L1C)

scenes_L2A = api.query(
platformname="Sentinel-2",
producttype="S2MSI2A",
raw=f'filename:*_T{MGRS_TILE}_*',
)
scenes_L2A = api.to_geodataframe(scenes_L2A)

granules_l1c = scenes_L1C["filename"].to_list()
granules_l2a = _get_matching_L2A_granules(scenes_L1C, scenes_L2A)

# download from Google Cloud Storage
create_stac_catalog(STAC_L1C_PATH, STAC_DESCRIPTION, granules_l1c)
Expand All @@ -95,3 +87,4 @@ def main():

if __name__ == "__main__":
main()

Loading

0 comments on commit 7698032

Please sign in to comment.