Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #640

Open
wants to merge 2 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
---
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.6
rev: v0.9.1
hooks:
- id: ruff
args: [--fix, --show-fixes, --output-format, grouped]
Expand Down
2 changes: 1 addition & 1 deletion cubedash/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def get_products() -> List[ProductWithSummary]:
]
if products and not STORE.list_complete_products():
raise RuntimeError(
"No products are summarised. " "Run `cubedash-gen --all` to generate some."
"No products are summarised. Run `cubedash-gen --all` to generate some."
)

return products
Expand Down
6 changes: 3 additions & 3 deletions cubedash/_monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ def time_end(response: flask.Response):
render_time = time.time() - flask.g.start_render
response.headers.add_header(
"Server-Timing",
f"app;dur={render_time*1000},"
f'odcquery;dur={flask.g.datacube_query_time*1000};desc="ODC query time",'
f"app;dur={render_time * 1000},"
f'odcquery;dur={flask.g.datacube_query_time * 1000};desc="ODC query time",'
f"odcquerycount_{flask.g.datacube_query_count};"
f'desc="{flask.g.datacube_query_count} ODC queries"',
)
Expand Down Expand Up @@ -79,7 +79,7 @@ def decorator(*args, **kwargs):
duration_secs = time.time() - start_time
print(
f"== Index Call == {style(function.__name__, bold=True)}: "
f"{duration_secs*1000}",
f"{duration_secs * 1000}",
file=sys.stderr,
flush=True,
)
Expand Down
2 changes: 1 addition & 1 deletion cubedash/_stac.py
Original file line number Diff line number Diff line change
Expand Up @@ -1273,7 +1273,7 @@ def collection_month(collection: str, year: int, month: int):
date = datetime(year, month, 1).date()
c = Catalog(
f"{collection}-{year}-{month}",
description=f'{collection} for {date.strftime("%B %Y")}',
description=f"{collection} for {date.strftime('%B %Y')}",
)

c.links.extend(
Expand Down
6 changes: 3 additions & 3 deletions cubedash/summary/_stores.py
Original file line number Diff line number Diff line change
Expand Up @@ -1718,9 +1718,9 @@ def _get_shape(geometry: WKBElement, crs) -> Optional[Geometry]:

if not shape.is_valid:
newshape = shape.buffer(0)
assert math.isclose(
shape.area, newshape.area, abs_tol=0.0001
), f"{shape.area} != {newshape.area}"
assert math.isclose(shape.area, newshape.area, abs_tol=0.0001), (
f"{shape.area} != {newshape.area}"
)
shape = newshape
return shape

Expand Down
2 changes: 1 addition & 1 deletion cubedash/warmup.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ def _format_time(t: float):
if t > 1:
return f"{t:.1f}s"
else:
return f"{int(t*1000)}ms"
return f"{int(t * 1000)}ms"


if __name__ == "__main__":
Expand Down
58 changes: 29 additions & 29 deletions integration_tests/asserts.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ def assert_shapes_mostly_equal(
# __tracebackhide__ = operator.methodcaller("errisinstance", AssertionError)

# Check area first, as it's a nicer error message when they're wildly different.
assert shape1.area == pytest.approx(
shape2.area, abs=threshold
), "Shapes have different areas"
assert shape1.area == pytest.approx(shape2.area, abs=threshold), (
"Shapes have different areas"
)

s1 = shape1.simplify(tolerance=threshold)
s2 = shape2.simplify(tolerance=threshold)
Expand Down Expand Up @@ -135,17 +135,17 @@ def check_dataset_count(html, count: int):
__tracebackhide__ = True
actual = html.find(".dataset-count", first=True).text
expected = f"{count:,d}"
assert (
f"{expected} dataset" in actual
), f"Incorrect dataset count: found {actual} instead of {expected}"
assert f"{expected} dataset" in actual, (
f"Incorrect dataset count: found {actual} instead of {expected}"
)


def check_datesets_page_datestring(html, datestring: str):
__tracebackhide__ = True
actual = html.find(".overview-day-link", first=True).text
assert (
datestring == actual
), f"Incorrect datestring: found {actual} instead of {datestring}"
assert datestring == actual, (
f"Incorrect datestring: found {actual} instead of {datestring}"
)


def expect_values(
Expand All @@ -168,22 +168,22 @@ def expect_values(
assert s.dataset_count == dataset_count, "wrong dataset count"
assert s.footprint_count == footprint_count, "wrong footprint count"
if s.footprint_count is not None and s.footprint_count > 0:
assert (
s.footprint_geometry is not None
), "No footprint, despite footprint count"
assert s.footprint_geometry is not None, (
"No footprint, despite footprint count"
)
assert s.footprint_geometry.area > 0, "Empty footprint"

assert s.time_range == time_range, "wrong dataset time range"
assert s.newest_dataset_creation_time == default_utc(
newest_creation_time
), "wrong newest dataset creation"
assert s.newest_dataset_creation_time == default_utc(newest_creation_time), (
"wrong newest dataset creation"
)
assert s.timeline_period == timeline_period, (
f"Should be a {timeline_period}, " f"not {s.timeline_period} timeline"
f"Should be a {timeline_period}, not {s.timeline_period} timeline"
)

assert (
s.summary_gen_time is not None
), "Missing summary_gen_time (there's a default)"
assert s.summary_gen_time is not None, (
"Missing summary_gen_time (there's a default)"
)

assert s.crses == crses, "Wrong dataset CRSes"

Expand All @@ -202,16 +202,16 @@ def expect_values(
f"Expected entry with {timeline_count} records."
)
else:
assert (
len(s.timeline_dataset_counts) == timeline_count
), "wrong timeline entry count"

assert (
sum(s.region_dataset_counts.values()) == s.dataset_count
), "region dataset count doesn't match total dataset count"
assert (
sum(s.timeline_dataset_counts.values()) == s.dataset_count
), "timeline count doesn't match dataset count"
assert len(s.timeline_dataset_counts) == timeline_count, (
"wrong timeline entry count"
)

assert sum(s.region_dataset_counts.values()) == s.dataset_count, (
"region dataset count doesn't match total dataset count"
)
assert sum(s.timeline_dataset_counts.values()) == s.dataset_count, (
"timeline count doesn't match dataset count"
)
was_timeline_error = False

if region_dataset_counts is not None:
Expand Down
6 changes: 3 additions & 3 deletions integration_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,9 @@ def _run_cli(cli_method, opts, catch_exceptions=False, expect_success=True):
opts += ("--env", env_name)
result = runner.invoke(cli_method, opts, catch_exceptions=catch_exceptions)
if expect_success:
assert (
0 == result.exit_code
), f"Error for {opts}. Out:\n{indent(result.output, ' ' * 4)}"
assert 0 == result.exit_code, (
f"Error for {opts}. Out:\n{indent(result.output, ' ' * 4)}"
)
return result

return _run_cli
Expand Down
20 changes: 10 additions & 10 deletions integration_tests/test_center_datetime_logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,21 +43,21 @@ def test_datestring_on_dataset_page(client: FlaskClient):
def test_datestring_on_datasets_search_page(client: FlaskClient):
html = get_html(client, "/products/rainfall_chirps_daily/datasets")

assert (
"Time UTC: 2019-05-15 00:00:00"
in [
a.find("td", first=True).attrs["title"] for a in html.find(".search-result")
]
), "datestring does not match expected center_time recorded in dataset_spatial table"
assert "Time UTC: 2019-05-15 00:00:00" in [
a.find("td", first=True).attrs["title"] for a in html.find(".search-result")
], (
"datestring does not match expected center_time recorded in dataset_spatial table"
)


def test_datestring_on_regions_page(client: FlaskClient):
html = get_html(client, "/product/rainfall_chirps_daily/regions/x210y106")

assert (
"2019-05-15 00:00:00"
in [a.find("td", first=True).text.strip() for a in html.find(".search-result")]
), "datestring does not match expected center_time recorded in dataset_spatial table"
assert "2019-05-15 00:00:00" in [
a.find("td", first=True).text.strip() for a in html.find(".search-result")
], (
"datestring does not match expected center_time recorded in dataset_spatial table"
)


def test_summary_center_datetime(client: FlaskClient):
Expand Down
12 changes: 6 additions & 6 deletions integration_tests/test_eo3_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,9 @@ def test_eo3_extents(eo3_index: Index):
),
}
assert footprint.is_valid, "Created footprint is not a valid geometry"
assert (
dataset_extent_row["footprint"].srid == 32650
), "Expected epsg:32650 within the footprint geometry"
assert dataset_extent_row["footprint"].srid == 32650, (
"Expected epsg:32650 within the footprint geometry"
)

assert dataset_extent_row["region_code"] == "113081"
assert dataset_extent_row["size_bytes"] is None
Expand Down Expand Up @@ -210,9 +210,9 @@ def test_undo_eo3_doc_compatibility(eo3_index: Index):
with TEST_EO3_DATASET_ARD.open("r") as f:
raw_doc = YAML(typ="safe", pure=True).load(f)

assert (
indexed_doc == raw_doc
), "Document does not match original after undoing compatibility fields."
assert indexed_doc == raw_doc, (
"Document does not match original after undoing compatibility fields."
)


def test_undo_eo3_compatibility_del_handling():
Expand Down
54 changes: 27 additions & 27 deletions integration_tests/test_page_loads.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,9 +154,9 @@ def test_all_products_are_shown(client: FlaskClient):
a.text.strip() for a in html.find(".product-selection-header .option-menu-link")
)
indexed_product_names = sorted(p.name for p in _model.STORE.all_products())
assert (
found_product_names == indexed_product_names
), "Product shown in menu don't match the indexed products"
assert found_product_names == indexed_product_names, (
"Product shown in menu don't match the indexed products"
)


def test_get_overview_product_links(client: FlaskClient):
Expand Down Expand Up @@ -407,36 +407,36 @@ def test_api_returns_high_tide_comp_datasets(client: FlaskClient):
These are slightly fun to handle as they are a small number with a huge time range.
"""
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p")
assert (
len(geojson["features"]) == 306
), "Not all high tide datasets returned as geojson"
assert len(geojson["features"]) == 306, (
"Not all high tide datasets returned as geojson"
)

# Search and time summary is only based on center time.
# These searches are within the dataset time range, but not the center_time.
# Dataset range: '2000-01-01T00:00:00' to '2016-10-31T00:00:00'
# year
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p/2008")
assert (
len(geojson["features"]) == 306
), "Expected high tide datasets within whole dataset range"
assert len(geojson["features"]) == 306, (
"Expected high tide datasets within whole dataset range"
)
# month
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p/2008/6")
assert (
len(geojson["features"]) == 306
), "Expected high tide datasets within whole dataset range"
assert len(geojson["features"]) == 306, (
"Expected high tide datasets within whole dataset range"
)
# day
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p/2008/6/1")
assert (
len(geojson["features"]) == 306
), "Expected high tide datasets within whole dataset range"
assert len(geojson["features"]) == 306, (
"Expected high tide datasets within whole dataset range"
)

# Out of the test dataset time range. No results.

# Completely outside of range
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p/2018")
assert (
len(geojson["features"]) == 0
), "Expected no high tide datasets in in this year"
assert len(geojson["features"]) == 0, (
"Expected no high tide datasets in in this year"
)
# One day before/after (is time zone handling correct?)
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p/2008/6/2")
assert len(geojson["features"]) == 0, "Expected no result one-day-after center time"
Expand Down Expand Up @@ -469,9 +469,9 @@ def test_api_returns_high_tide_comp_regions(client: FlaskClient):
"""

rv: Response = client.get("/api/regions/high_tide_comp_20p")
assert (
rv.status_code == 404
), "High tide comp does not support regions: it should return not-exist code."
assert rv.status_code == 404, (
"High tide comp does not support regions: it should return not-exist code."
)


def test_api_returns_scene_regions(client: FlaskClient):
Expand Down Expand Up @@ -908,9 +908,9 @@ def test_with_timings(client: FlaskClient):
for f in rv.headers["Server-Timing"].split(",")
if f.startswith("odcquerycount_")
]
assert (
count_header
), f"No query count server timing header found in {rv.headers['Server-Timing']}"
assert count_header, (
f"No query count server timing header found in {rv.headers['Server-Timing']}"
)

# Example header:
# app;dur=1034.12,odcquery;dur=103.03;desc="ODC query time",odcquerycount_6;desc="6 ODC queries"
Expand Down Expand Up @@ -969,9 +969,9 @@ def test_get_robots(client: FlaskClient):
num_lines = len(text.split("\n"))
assert num_lines > 1, "robots.txt should have multiple lines"

assert (
rv.headers["Content-Type"] == "text/plain"
), "robots.txt content-type should be text/plain"
assert rv.headers["Content-Type"] == "text/plain", (
"robots.txt content-type should be text/plain"
)


def test_all_give_404s(client: FlaskClient):
Expand Down
6 changes: 3 additions & 3 deletions integration_tests/test_pages_render.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,9 @@ def test_allows_null_product_fixed_fields(
"""

# WHEN we have some products summarised
assert (
summary_store.list_complete_products()
), "There's no summarised products to test"
assert summary_store.list_complete_products(), (
"There's no summarised products to test"
)

# AND there's some with null fixed_metadata (ie. pre-Explorer0-EO3-update)
with odc_test_db.index._active_connection() as conn:
Expand Down
Loading
Loading