Skip to content

Commit

Permalink
Update apps (#543)
Browse files Browse the repository at this point in the history
* Lint the changefilmstrips module

* Lint crophealth module

* Minor edits

* Lint the deacoastlines module

* Update forest monitoring app

* Update forest monitoring notebook

* Lint geomedian module

* Lint imaegexport module

* Lint wetlandinsighttool
  • Loading branch information
vikineema authored Sep 10, 2024
1 parent 7e46a03 commit d910853
Show file tree
Hide file tree
Showing 10 changed files with 1,124 additions and 885 deletions.
12 changes: 6 additions & 6 deletions Real_world_examples/Forest_monitoring.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,14 @@
"source": [
"## Background\n",
"\n",
"Forests worldwide are in a state of flux, with accelerating losses in some regions and gains in others [(Hansen et al., 2013)](https://doi.org/10.1126/science.1244693). The Global Forest Change 2000-2021 dataset characterizes the global forest extent and change from 2000 to 2021. \n",
"Forests worldwide are in a state of flux, with accelerating losses in some regions and gains in others [(Hansen et al., 2013)](https://doi.org/10.1126/science.1244693). The Global Forest Change 2000-2023 dataset characterizes the global forest extent and change from 2000 to 2023. \n",
"\n",
"For this dataset: \n",
"- Forest loss is defined as stand-replacement disturbance, or a change from a forest to non-forest state\n",
"- Forest gain is defined as the inverse of as the inverse of loss, or a non-forest to forest change entirely within the study period\n",
"- Tree cover is defined as canopy closure for all vegetation taller than 5m in height.\n",
"\n",
"The Year of gross forest cover loss event (`lossyear`) layer shows the forest loss during the period 2000 to 2021. Forest loss is encoded as either 0 (no loss) or else a value in the range 1-20, representing loss detected primarily in the year 2001-2021, respectively.\n",
"The Year of gross forest cover loss event (`lossyear`) layer shows the forest loss during the period 2000 to 2023. Forest loss is encoded as either 0 (no loss) or else a value in the range 1-20, representing loss detected primarily in the year 2001-2023, respectively.\n",
"\n",
"\n",
"The Tree canopy cover for year 2000 (`treecover2000`) layer shows the tree cover in the year 2000. \n",
Expand Down Expand Up @@ -111,10 +111,10 @@
"\n",
"| Global Forest Change layers available | |\n",
"| --- | --- |\n",
"| **Year of gross forest cover loss event** | Shows forest loss during the period 2000–2021 | \n",
"| **Year of gross forest cover loss event** | Shows forest loss during the period 2000–2023 | \n",
"| **Global forest cover gain 2000–2012** | Shows forest gain during the period 2000-2012 |\n",
"| **Tree canopy cover for year 2000** | Show the tree canopy cover for the year 2000 |\n",
"| **All layers** | Show the forest loss during the period 2000–2021, forest gain during the period 2000-2012 and the tree canopy cover for the year 2000 |\n",
"| **All layers** | Show the forest loss during the period 2000–2023, forest gain during the period 2000-2012 and the tree canopy cover for the year 2000 |\n",
"\n",
"Use the `Forest Cover Loss Time Range` slider to set the time range for which to load the **Year of gross forest cover loss event** layer. Tick the `Override maximum size limit` box to override the app's default 500 square kilometres area limit. This can be used to load larger areas of imagery, but should be used with caution as it can lead to memory issues or crashes.\n",
"\n",
Expand All @@ -135,7 +135,7 @@
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "9e01e78f8dd84328a90629807a49409d",
"model_id": "d0223d060cdd4162b17fed65c8f9112c",
"version_major": 2,
"version_minor": 0
},
Expand Down Expand Up @@ -222,7 +222,7 @@
{
"data": {
"text/plain": [
"'2023-08-14'"
"'2024-09-05'"
]
},
"execution_count": 4,
Expand Down
2 changes: 1 addition & 1 deletion Tools/deafrica_tools/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
__locales__ = __path__[0] + "/locales"

__version__ = "2.4.8"
__version__ = "2.4.9"


def set_lang(lang=None):
Expand Down
129 changes: 61 additions & 68 deletions Tools/deafrica_tools/app/changefilmstrips.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,37 +3,31 @@
inside the Real_world_examples folder.
"""

# Load modules
import os
import dask
import datacube
import warnings

import datacube
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import xarray as xr
import matplotlib.pyplot as plt
from datacube.utils.geometry import CRS, assign_crs
from ipyleaflet import basemap_to_tiles, basemaps
from odc.algo import geomedian_with_mads
from odc.ui import select_on_a_map
from dask.utils import parse_bytes
from datacube.utils.geometry import CRS, assign_crs
from datacube.utils.rio import configure_s3_access
from datacube.utils.dask import start_local_dask
from ipyleaflet import basemaps, basemap_to_tiles

# Load utility functions
from deafrica_tools.datahandling import load_ard, mostcommon_crs
from deafrica_tools.dask import create_local_dask_cluster
from deafrica_tools.datahandling import load_ard, mostcommon_crs


def run_filmstrip_app(
output_name,
time_range,
time_step,
tide_range=(0.0, 1.0),
resolution=(-30, 30),
max_cloud=0.5,
ls7_slc_off=False,
size_limit=10000,
output_name: str,
time_range: tuple,
time_step: dict[str, int],
tide_range: tuple[float] = (0.0, 1.0),
resolution: tuple[int] = (-30, 30),
max_cloud: float = 0.5,
ls7_slc_off: bool = False,
size_limit: int = 10000,
):
"""
An interactive app that allows the user to select a region from a
Expand Down Expand Up @@ -91,7 +85,7 @@ def run_filmstrip_app(
size_limit : int, optional
An optional integer (in hectares) specifying the size limit
for the data query. Queries larger than this size will receive
a warning that he data query is too large (and may
a warning that the data query is too large (and may
therefore result in memory errors).
Expand All @@ -117,10 +111,7 @@ def run_filmstrip_app(

# Plot interactive map to select area
basemap = basemap_to_tiles(basemaps.Esri.WorldImagery)
geopolygon = select_on_a_map(height="600px",
layers=(basemap,),
center=centre_coords,
zoom=14)
geopolygon = select_on_a_map(height="600px", layers=(basemap,), center=centre_coords, zoom=14)

# Set centre coords based on most recent selection to re-focus
# subsequent data selections
Expand All @@ -129,12 +120,14 @@ def run_filmstrip_app(
# Test size of selected area
msq_per_hectare = 10000
area = geopolygon.to_crs(crs=CRS("epsg:6933")).area / msq_per_hectare
radius = np.round(np.sqrt(size_limit), 1)
radius = np.round(np.sqrt(size_limit), 1) # noqa F841
if area > size_limit:
print(f"Warning: Your selected area is {area:.00f} hectares. "
f"Please select an area of less than {size_limit} hectares."
f"\nTo select a smaller area, re-run the cell "
f"above and draw a new polygon.")
print(
f"Warning: Your selected area is {area:.00f} hectares. "
f"Please select an area of less than {size_limit} hectares."
f"\nTo select a smaller area, re-run the cell "
f"above and draw a new polygon."
)

else:

Expand All @@ -147,23 +140,17 @@ def run_filmstrip_app(
client = create_local_dask_cluster(return_client=True)

# Obtain native CRS
crs = mostcommon_crs(dc=dc,
product="ls8_sr",
query={
"time": "2014",
"geopolygon": geopolygon
})
crs = mostcommon_crs(
dc=dc, product="ls8_sr", query={"time": "2014", "geopolygon": geopolygon}
)

# Create query based on time range, area selected, custom params
query = {
"time": time_range,
"geopolygon": geopolygon,
"output_crs": crs,
"resolution": resolution,
"dask_chunks": {
"x": 3000,
"y": 3000
},
"dask_chunks": {"x": 3000, "y": 3000},
"align": (resolution[1] / 2.0, resolution[1] / 2.0),
}

Expand All @@ -179,39 +166,46 @@ def run_filmstrip_app(
)

# Optionally calculate tides for each timestep in the satellite
# dataset and drop any observations out side this range
# dataset and drop any observations outside this range
if tide_range != (0.0, 1.0):
from deafrica_tools.coastal import tidal_tag

ds = tidal_tag(ds=ds, tidepost_lat=None, tidepost_lon=None)
min_tide, max_tide = ds.tide_height.quantile(tide_range).values
ds = ds.sel(time=(ds.tide_height >= min_tide) &
(ds.tide_height <= max_tide))
ds = ds.sel(time=(ds.tide_height >= min_tide) & (ds.tide_height <= max_tide))
ds = ds.drop("tide_height")
print(f" Keeping {len(ds.time)} observations with tides "
f"between {min_tide:.2f} and {max_tide:.2f} m")
print(
f" Keeping {len(ds.time)} observations with tides "
f"between {min_tide:.2f} and {max_tide:.2f} m"
)

# Create time step ranges to generate filmstrips from
bins_dt = pd.date_range(start=time_range[0],
end=time_range[1],
freq=pd.DateOffset(**time_step))
bins_dt = pd.date_range(
start=time_range[0], end=time_range[1], freq=pd.DateOffset(**time_step)
)

# Bin all satellite observations by timestep. If some observations
# fall outside the upper bin, label these with the highest bin
labels = bins_dt.astype("str")
time_steps = (pd.cut(ds.time.values, bins_dt,
labels=labels[:-1]).add_categories(
labels[-1]).fillna(labels[-1]))
time_steps = (
pd.cut(ds.time.values, bins_dt, labels=labels[:-1])
.add_categories(labels[-1])
.fillna(labels[-1])
)

time_steps_var = xr.DataArray(time_steps, [("time", ds.time.values)],
name="timestep")
time_steps_var = xr.DataArray(time_steps, [("time", ds.time.values)], name="timestep")

# Resample data temporally into time steps, and compute geomedians
ds_geomedian = (ds.groupby(time_steps_var).apply(
ds_geomedian = ds.groupby(time_steps_var).apply(
lambda ds_subset: geomedian_with_mads(
ds_subset, compute_mads=False, compute_count=False)))
ds_subset, compute_mads=False, compute_count=False
)
)

print("\nGenerating geomedian composites and plotting "
"filmstrips... (click the Dashboard link above for status)")
print(
"\nGenerating geomedian composites and plotting "
"filmstrips... (click the Dashboard link above for status)"
)
ds_geomedian = ds_geomedian.compute()

# Reset CRS that is lost during geomedian compositing
Expand All @@ -230,16 +224,14 @@ def run_filmstrip_app(
# and aspect ratio
n_obs = output_array.sizes["timestep"]
ratio = output_array.sizes["x"] / output_array.sizes["y"]
fig, axes = plt.subplots(1,
n_obs + 1,
figsize=(5 * ratio * (n_obs + 1), 5))
fig, axes = plt.subplots(1, n_obs + 1, figsize=(5 * ratio * (n_obs + 1), 5))
fig.subplots_adjust(wspace=0.05, hspace=0.05)

# Add timesteps to the plot, set aspect to equal to preserve shape
for i, ax_i in enumerate(axes.flatten()[:n_obs]):
output_array.isel(timestep=i).plot.imshow(ax=ax_i,
vmin=percentiles[0],
vmax=percentiles[1])
output_array.isel(timestep=i).plot.imshow(
ax=ax_i, vmin=percentiles[0], vmax=percentiles[1]
)
ax_i.get_xaxis().set_visible(False)
ax_i.get_yaxis().set_visible(False)
ax_i.set_aspect("equal")
Expand All @@ -248,11 +240,12 @@ def run_filmstrip_app(
# by first taking the log of the array (so change in dark areas
# can be identified), then computing standard deviation between
# all timesteps
(np.log(output_array).std(dim=["timestep"]).mean(
dim="variable").plot.imshow(ax=axes.flatten()[-1],
robust=True,
cmap="magma",
add_colorbar=False))
(
np.log(output_array)
.std(dim=["timestep"])
.mean(dim="variable")
.plot.imshow(ax=axes.flatten()[-1], robust=True, cmap="magma", add_colorbar=False)
)
axes.flatten()[-1].get_xaxis().set_visible(False)
axes.flatten()[-1].get_yaxis().set_visible(False)
axes.flatten()[-1].set_aspect("equal")
Expand Down
Loading

0 comments on commit d910853

Please sign in to comment.