Skip to content

Commit

Permalink
Merge branch 'integrate_1.9' of https://github.com/opendatacube/datac…
Browse files Browse the repository at this point in the history
…ube-explorer into integrate_1.9
  • Loading branch information
Ariana Barzinpour committed Dec 23, 2024
2 parents 4ed52f9 + 587bae5 commit 8c68300
Show file tree
Hide file tree
Showing 61 changed files with 6,922 additions and 2,477 deletions.
2 changes: 1 addition & 1 deletion .docker/create_db.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
#!/usr/bin/env bash

PGPASSWORD=${DB_PASSWORD} psql -h ${DB_HOSTNAME} -U ${DB_USERNAME} -c 'create database opendatacube_test'
PGPASSWORD=${POSTGRES_PASSWORD} psql -h ${POSTGRES_HOSTNAME} -U ${POSTGRES_USER} -c 'create database opendatacube_test'
2 changes: 1 addition & 1 deletion .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ updates:
directory: "/"
schedule:
interval: "daily"
target-branch: "develop"
target-branch: "develop" # should this be removed? changed? duplicated?
- package-ecosystem: docker
directory: "/"
schedule:
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/deployment_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ on: # yamllint disable-line rule:truthy
push:
branches:
- develop
- integrate_1.9
paths:
- '**'

Expand Down
1 change: 1 addition & 0 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ on:
push:
branches:
- develop
- integrate_1.9
paths:
- "**"

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/publish-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.10

- name: Install Twine
run: |
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ on:
push:
branches:
- develop
- integrate_1.9
paths:
- '**'

Expand Down
10 changes: 4 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,6 @@ with [pyflakes](https://github.com/PyCQA/pyflakes).

They are included when installing the test dependencies:

pip install --upgrade --no-deps --extra-index-url https://packages.dea.ga.gov.au/ 'datacube' 'digitalearthau'

pip install -e .[test]

Run `make lint` to check your changes, and `make format` to format your code
Expand Down Expand Up @@ -118,11 +116,11 @@ version uses virtualenvs which are incompatible with Conda's environments)

Set ODC's environment variable before running the server:

export DATACUBE_ENVIRONMENT=staging
export ODC_ENVIRONMENT=staging

You can always see which environment/settings will be used by running `datacube system check`.

See the ODC documentation for config and [datacube environments](https://datacube-core.readthedocs.io/en/latest/user/config.html#runtime-config)
See the ODC documentation for [datacube configuration and environments](https://opendatacube.readthedocs.io/en/latest/installation/database/configuration.html)

### How can I set different timezone

Expand Down Expand Up @@ -218,8 +216,8 @@ Three roles are created:
- **explorer-generator**: Suitable for generating and updating summaries (ie. Running `cubedash-gen`)
- **explorer-owner**: For creating and updating the schema. (ie. Running `cubedash-gen --init`)

Note that these roles extend the built-in datacube role `agdc_user`. If you
created your datacube without permissions, a stand-alone creator of the `agdc_user`
Note that these roles extend the built-in datacube role `agdc_user` (using postgres) or `odc_user` (using postgis).
If you created your datacube without permissions, a stand-alone creator of the appropriate
role is available as a prerequisite in the same [roles](cubedash/summary/roles)
directory.

Expand Down
2 changes: 1 addition & 1 deletion cubedash/_audit.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def legacy_product_audit_page():
@bp.route("/audit/datasets-metadata")
def datasets_metadata_page():
store = _model.STORE
all_products = {p.name for p in store.index.products.get_all()}
all_products = {p.name for p in store.all_products()}
summarised_products = set(store.list_complete_products())
unsummarised_product_names = all_products - summarised_products

Expand Down
14 changes: 7 additions & 7 deletions cubedash/_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ def dataset_page(id_):
def dataset_full_page(product_name: str, id_: UUID):
derived_dataset_overflow = source_dataset_overflow = 0

index = _model.STORE.index
dataset = index.datasets.get(id_, include_sources=False)
store = _model.STORE
dataset = store.index.datasets.get(id_, include_sources=False)

if dataset is None:
abort(404, f"No dataset found with id {id_}")
Expand All @@ -59,19 +59,19 @@ def dataset_full_page(product_name: str, id_: UUID):
provenance_display_limit = current_app.config.get(
"CUBEDASH_PROVENANCE_DISPLAY_LIMIT", PROVENANCE_DISPLAY_LIMIT
)
source_datasets, source_dataset_overflow = utils.get_dataset_sources(
index, id_, limit=provenance_display_limit
source_datasets, source_dataset_overflow = store.e_index.get_dataset_sources(
id_, limit=provenance_display_limit
)

dataset.metadata.sources = {}
ordered_metadata = utils.prepare_dataset_formatting(dataset)

derived_datasets, derived_dataset_overflow = utils.get_datasets_derived(
index, id_, limit=provenance_display_limit
derived_datasets, derived_dataset_overflow = store.e_index.get_datasets_derived(
id_, limit=provenance_display_limit
)
derived_datasets.sort(key=utils.dataset_label)

footprint, region_code = _model.STORE.get_dataset_footprint_region(id_)
footprint, region_code = store.get_dataset_footprint_region(id_)
# We only have a footprint in the spatial table above if summarisation has been
# run for the product (...and done so after the dataset was added).
#
Expand Down
4 changes: 2 additions & 2 deletions cubedash/_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def _format_datetime(date):

@bp.app_template_filter("metadata_center_time")
def _get_metadata_center_time(dataset):
return utils.center_time_from_metadata(dataset)
return utils.datetime_from_metadata(dataset)


@bp.app_template_filter("localised_metadata_center_time")
Expand Down Expand Up @@ -141,7 +141,7 @@ def _all_values_none(d: Mapping):

@bp.app_template_filter("dataset_day_link")
def _dataset_day_link(dataset: Dataset, timezone=None):
t = utils.center_time_from_metadata(dataset)
t = utils.datetime_from_metadata(dataset)
if t is None:
return "(unknown time)"
if timezone:
Expand Down
2 changes: 1 addition & 1 deletion cubedash/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@
# As long as we don't run queries (ie. open db connections) before forking
# (hence validate=False).
STORE: SummaryStore = SummaryStore.create(
index_connect(application_name=NAME, validate_connection=False),
index=index_connect(application_name=NAME, validate_connection=False),
grouping_time_zone=DEFAULT_TIMEZONE,
)

Expand Down
9 changes: 3 additions & 6 deletions cubedash/_monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from sqlalchemy import event

from . import _model
from ._utils import alchemy_engine

_INITIALISED = False

Expand Down Expand Up @@ -41,13 +40,13 @@ def time_end(response: flask.Response):

_INITIALISED = True

@event.listens_for(alchemy_engine(_model.STORE.index), "before_cursor_execute")
@event.listens_for(_model.STORE.e_index.engine, "before_cursor_execute")
def before_cursor_execute(
conn, cursor, statement, parameters, context, executemany
):
conn.info.setdefault("query_start_time", []).append(time.time())

@event.listens_for(alchemy_engine(_model.STORE.index), "after_cursor_execute")
@event.listens_for(_model.STORE.e_index.engine, "after_cursor_execute")
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
if flask.has_app_context() and hasattr(flask.g, "datacube_query_time"):
flask.g.datacube_query_time += time.time() - conn.info[
Expand Down Expand Up @@ -89,8 +88,6 @@ def decorator(*args, **kwargs):
return decorator

# Print call time for all db layer calls.
import datacube.drivers.postgres._api as api

decorate_all_methods(api.PostgresDbAPI, with_timings)
decorate_all_methods(_model.STORE.e_index.db_api, with_timings)

print_datacube_query_times()
11 changes: 6 additions & 5 deletions cubedash/_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,9 +213,10 @@ def search_page(
hard_search_limit = current_app.config.get(
"CUBEDASH_HARD_SEARCH_LIMIT", _HARD_SEARCH_LIMIT
)
index = _model.STORE.index
try:
datasets = sorted(
_model.STORE.index.datasets.search(**query, limit=hard_search_limit + 1),
index.datasets.search(**query, limit=hard_search_limit + 1),
key=lambda d: default_utc(d.center_time),
)
except DataError:
Expand All @@ -228,7 +229,7 @@ def search_page(

if request_wants_json():
return utils.as_rich_json(
dict(datasets=[build_dataset_info(_model.STORE.index, d) for d in datasets])
dict(datasets=[build_dataset_info(index, d) for d in datasets])
)

# For display on the page (and future searches).
Expand Down Expand Up @@ -323,7 +324,7 @@ def region_page(
limit = current_app.config.get("CUBEDASH_HARD_SEARCH_LIMIT", _HARD_SEARCH_LIMIT)
datasets = list(
_model.STORE.find_datasets_for_region(
product_name, region_code, year, month, day, limit=limit + 1, offset=offset
product, region_code, year, month, day, limit=limit + 1, offset=offset
)
)

Expand Down Expand Up @@ -478,11 +479,11 @@ def inject_globals():
# Only the known, summarised products in groups.
grouped_products=_get_grouped_products(),
# All products in the datacube, summarised or not.
datacube_products=list(_model.STORE.index.products.get_all()),
datacube_products=list(_model.STORE.all_products()),
hidden_product_list=current_app.config.get(
"CUBEDASH_HIDE_PRODUCTS_BY_NAME_LIST", []
),
datacube_metadata_types=list(_model.STORE.index.metadata_types.get_all()),
datacube_metadata_types=list(_model.STORE.all_metadata_types()),
current_time=datetime.utcnow(),
datacube_version=datacube.__version__,
app_version=cubedash.__version__,
Expand Down
21 changes: 10 additions & 11 deletions cubedash/_product.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,9 @@ def legacy_raw_product_doc(name):

@bp.route("/products/<name>.odc-product.yaml")
def raw_product_doc(name):
product = _model.STORE.index.products.get_by_name(name)
if not product:
try:
product = _model.STORE.get_product(name)
except KeyError:
abort(404, f"Unknown product {name!r}")

ordered_metadata = utils.prepare_document_formatting(
Expand All @@ -126,17 +127,14 @@ def legacy_metadata_type_page(name):

@bp.route("/metadata-types/<name>")
def metadata_type_page(name):
metadata_type = _model.STORE.index.metadata_types.get_by_name(name)
if not metadata_type:
try:
metadata_type = _model.STORE.get_metadata_type(name)
except KeyError:
abort(404, f"Unknown metadata type {name!r}")
ordered_metadata = utils.prepare_document_formatting(metadata_type.definition)

products_using_it = sorted(
(
p
for p in _model.STORE.index.products.get_all()
if p.metadata_type.name == name
),
(p for p in _model.STORE.all_products() if p.metadata_type.name == name),
key=lambda p: p.name,
)
return utils.render(
Expand All @@ -154,8 +152,9 @@ def legacy_metadata_type_doc(name):

@bp.route("/metadata-types/<name>.odc-type.yaml")
def raw_metadata_type_doc(name):
metadata_type = _model.STORE.index.metadata_types.get_by_name(name)
if not metadata_type:
try:
metadata_type = _model.STORE.get_metadata_type(name)
except KeyError:
abort(404, f"Unknown metadata type {name!r}")
ordered_metadata = utils.prepare_document_formatting(
metadata_type.definition, "Metadata Type", include_source_url=True
Expand Down
Loading

0 comments on commit 8c68300

Please sign in to comment.