diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 46d5db2c2f..c76171573f 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -24,3 +24,6 @@ b90e205c5a04e138a451d670a5d2c360c274ac19 # html style 55a16be2620387e360849a98ecee1e837740311c + +# prettier hmtl and scss +7fa4afd6938aacfc0221da6872a56680a12ef45f diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..b97677bc16 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +docs/images/*.png filter=lfs diff=lfs merge=lfs -text diff --git a/.github/workflows/eval.yml b/.github/workflows/eval.yml index 0089b4061d..060cf8546b 100644 --- a/.github/workflows/eval.yml +++ b/.github/workflows/eval.yml @@ -9,16 +9,16 @@ jobs: strategy: fail-fast: false matrix: - debian-version: ["11", "12"] + debian-version: ['11', '12'] include: - - debian-version: "11" - python-version: "3.9" - postgres-version: "13" - postgis-version: "3.2" - - debian-version: "12" - python-version: "3.11" - postgres-version: "15" - postgis-version: "3.3" + - debian-version: '11' + python-version: '3.9' + postgres-version: '13' + postgis-version: '3.2' + - debian-version: '12' + python-version: '3.11' + postgres-version: '15' + postgis-version: '3.3' name: Debian ${{ matrix.debian-version }} @@ -50,7 +50,7 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - cache: "pip" + cache: 'pip' - name: Install GDAL run: | sudo apt update diff --git a/.github/workflows/eval_perf.yml b/.github/workflows/eval_perf.yml new file mode 100644 index 0000000000..c6fbe44030 --- /dev/null +++ b/.github/workflows/eval_perf.yml @@ -0,0 +1,137 @@ +name: Performance Evaluation +on: + workflow_dispatch: + +jobs: + build: + runs-on: self-hosted + + strategy: + fail-fast: false + matrix: + debian-version: ["11", "12"] + include: + - debian-version: "11" + python-version: "3.9" + postgres-version: "13" + postgis-version: "3.2" + - debian-version: "12" + python-version: "3.11" + postgres-version: "15" + postgis-version: "3.3" + + name: Debian ${{ matrix.debian-version }} + + services: + postgres: + image: postgis/postgis:${{ matrix.postgres-version }}-${{ matrix.postgis-version }} + env: + POSTGRES_DB: geonature2db + POSTGRES_PASSWORD: geonatpasswd + POSTGRES_USER: geonatadmin + ports: + - 5432:5432 + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + steps: + - uses: actions/checkout@v4 + with: + submodules: true + - name: Add database extensions + run: | + psql -h localhost -U geonatadmin -d geonature2db -f install/assets/db/add_pg_extensions.sql + env: + PGPASSWORD: geonatpasswd + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + - name: Install GDAL + run: | + sudo apt update + sudo apt install -y libgdal-dev + - name: Install dependencies + if: github.base_ref == 'master' + run: | + echo 'Installation des requirements de prod' + python -m pip install --upgrade pip + python -m pip install \ + -e ..[tests] \ + -r requirements.txt + working-directory: ./backend + - name: Install dependencies + if: github.base_ref != 'master' + run: | + echo 'Installation des requirements de dev' + python -m pip install --upgrade pip + python -m pip install \ + -e ..[tests] \ + -r requirements-dev.in + working-directory: ./backend + - name: Show database branches and dependencies + run: | + geonature db status --dependencies + env: + GEONATURE_CONFIG_FILE: config/test_config.toml + # - name: Restore database + # run: | + # # wget https://www.dropbox.com/scl/fi/17gsthsftfg59mxwmbbre/export_geonature_10000.zip?rlkey=33choleag4xw60wadm802c3oh&dl=1 -O 10kDump.zip + # # unzip 10kDump.zip + # wget https://www.dropbox.com/scl/fi/jjkxyg120bxc0dp8uy8kq/300KDump.sql?rlkey=tyuk2svitcb9nyshn7r09yo7b&dl=1 -O 300KDump.sql + # ls + # psql -h localhost -U geonatadmin -d geonature2db -f 300KDump.sql + # env: + # PGPASSWORD: geonatpasswd + - name: Install database + run: | + install/03b_populate_db.sh + env: + GEONATURE_CONFIG_FILE: config/test_config.toml + srid_local: 2154 + install_bdc_statuts: true + add_sample_data: true + install_sig_layers: true + install_grid_layer_5: true + install_grid_layer_10: true + install_ref_sensitivity: true + - name: Show database status + run: | + geonature db status + env: + GEONATURE_CONFIG_FILE: config/test_config.toml + + - name: Install core modules backend + run: | + pip install -e contrib/occtax + pip install -e contrib/gn_module_occhab + pip install -e contrib/gn_module_validation + - name: Show database status + run: | + geonature db status + env: + GEONATURE_CONFIG_FILE: config/test_config.toml + - name: Install core modules database + run: | + geonature upgrade-modules-db + env: + GEONATURE_CONFIG_FILE: config/test_config.toml + - name: Show database status + run: | + geonature db status --dependencies + env: + GEONATURE_CONFIG_FILE: config/test_config.toml + - name: Load benchmark stable data + run: | + wget https://geonature.fr/data/benchmark_history/benchmark_stable.json -O benchmark_stable.json + + - name: Compare performance to stable data + run: | + pytest --benchmark-only --benchmark-compare-fail="mean:0.1" --benchmark-compare=benchmark_stable.json + env: + GEONATURE_CONFIG_FILE: config/test_config.toml + # https://stackoverflow.com/a/64126737 For posting results on GitHub Pull Requests diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index c9de492237..32c6b218c4 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -19,16 +19,16 @@ jobs: strategy: fail-fast: false matrix: - debian-version: ["11", "12"] + debian-version: ['11', '12'] include: - - debian-version: "11" - python-version: "3.9" - postgres-version: "13" - postgis-version: "3.2" - - debian-version: "12" - python-version: "3.11" - postgres-version: "15" - postgis-version: "3.3" + - debian-version: '11' + python-version: '3.9' + postgres-version: '13' + postgis-version: '3.2' + - debian-version: '12' + python-version: '3.11' + postgres-version: '15' + postgis-version: '3.3' name: Debian ${{ matrix.debian-version }} @@ -60,7 +60,7 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - cache: "pip" + cache: 'pip' - name: Install GDAL run: | sudo apt update @@ -122,7 +122,7 @@ jobs: GEONATURE_CONFIG_FILE: config/test_config.toml - name: Test with pytest run: | - pytest -v --cov --cov-report xml + pytest -v --cov --cov-report xml --benchmark-skip env: GEONATURE_CONFIG_FILE: config/test_config.toml - name: Upload coverage to Codecov diff --git a/.gitmodules b/.gitmodules index 2c73b4b9d4..9b1fa3a48f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,32 +1,24 @@ [submodule "backend/dependencies/UsersHub-authentification-module"] path = backend/dependencies/UsersHub-authentification-module - url = https://github.com/PnX-SI/UsersHub-authentification-module - branch = develop + url = ../UsersHub-authentification-module.git [submodule "backend/dependencies/Nomenclature-api-module"] path = backend/dependencies/Nomenclature-api-module - url = https://github.com/PnX-SI/Nomenclature-api-module - branch = develop + url = ../Nomenclature-api-module.git [submodule "backend/dependencies/Habref-api-module"] path = backend/dependencies/Habref-api-module - url = https://github.com/PnX-SI/Habref-api-module - branch = develop + url = ../Habref-api-module.git [submodule "backend/dependencies/Utils-Flask-SQLAlchemy"] path = backend/dependencies/Utils-Flask-SQLAlchemy - url = https://github.com/PnX-SI/Utils-Flask-SQLAlchemy - branch = develop + url = ../Utils-Flask-SQLAlchemy.git [submodule "backend/dependencies/TaxHub"] path = backend/dependencies/TaxHub - url = https://github.com/PnX-SI/TaxHub - branch = develop + url = ../TaxHub.git [submodule "backend/dependencies/Utils-Flask-SQLAlchemy-Geo"] path = backend/dependencies/Utils-Flask-SQLAlchemy-Geo - url = https://github.com/PnX-SI/Utils-Flask-SQLAlchemy-Geo - branch = develop + url = ../Utils-Flask-SQLAlchemy-Geo.git [submodule "backend/dependencies/RefGeo"] path = backend/dependencies/RefGeo - url = https://github.com/PnX-SI/RefGeo.git - branch = develop + url = ../RefGeo.git [submodule "backend/dependencies/UsersHub"] path = backend/dependencies/UsersHub - url = https://github.com/PnX-SI/UsersHub - branch = develop + url = ../UsersHub.git diff --git a/VERSION b/VERSION index edcfe40d19..b70ae75a88 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.14.0 +2.14.1 diff --git a/backend/dependencies/Nomenclature-api-module b/backend/dependencies/Nomenclature-api-module index 3128c4ff25..672de79d43 160000 --- a/backend/dependencies/Nomenclature-api-module +++ b/backend/dependencies/Nomenclature-api-module @@ -1 +1 @@ -Subproject commit 3128c4ff257c42e7e384b4a794692693faa1d65b +Subproject commit 672de79d435274978e524b3dc7d6e336887db38d diff --git a/backend/dependencies/RefGeo b/backend/dependencies/RefGeo index 01ae4f86ae..d69c379c76 160000 --- a/backend/dependencies/RefGeo +++ b/backend/dependencies/RefGeo @@ -1 +1 @@ -Subproject commit 01ae4f86ae2967941318cf067af366a3662419be +Subproject commit d69c379c7681972b7d72864ce2824b7644e3432f diff --git a/backend/dependencies/TaxHub b/backend/dependencies/TaxHub index 366dd86b97..ca38000d89 160000 --- a/backend/dependencies/TaxHub +++ b/backend/dependencies/TaxHub @@ -1 +1 @@ -Subproject commit 366dd86b97acb468d8ec29f2c8c52792d9b81eb1 +Subproject commit ca38000d89c074cfba9c80965ab11d9ef679c3ce diff --git a/backend/dependencies/UsersHub b/backend/dependencies/UsersHub index 2dadf77258..910061126a 160000 --- a/backend/dependencies/UsersHub +++ b/backend/dependencies/UsersHub @@ -1 +1 @@ -Subproject commit 2dadf772586517e37831e2245b2a21aa6b263514 +Subproject commit 910061126a12a07f06770430f97b2bcab57e1c05 diff --git a/backend/dependencies/UsersHub-authentification-module b/backend/dependencies/UsersHub-authentification-module index 975a3ebfd7..63c8e2a649 160000 --- a/backend/dependencies/UsersHub-authentification-module +++ b/backend/dependencies/UsersHub-authentification-module @@ -1 +1 @@ -Subproject commit 975a3ebfd7ea1951db0ca8be8e128247d839b042 +Subproject commit 63c8e2a6498315e66fa5f90bdd64ab6018e20d24 diff --git a/backend/geonature/app.py b/backend/geonature/app.py index 5b7906cb53..7bda3c6824 100755 --- a/backend/geonature/app.py +++ b/backend/geonature/app.py @@ -156,28 +156,28 @@ def on_before_models_committed(sender, changes): if change == "delete" and hasattr(obj, "__before_commit_delete__"): obj.__before_commit_delete__() - # setting g.current_user on each request @app.before_request def load_current_user(): g._permissions_by_user = {} g._permissions = {} - # FIXME : order of @app.before_request is random thus g.current_user can be empty... - # if config.get("SENTRY_DSN"): - # from sentry_sdk import set_tag, set_user - - # @app.before_request - # def set_sentry_context(): - # if "FLASK_REQUEST_ID" in request.environ: - # set_tag("request.id", request.environ["FLASK_REQUEST_ID"]) - # if g.current_user: - # set_user( - # { - # "id": g.current_user.id_role, - # "username": g.current_user.identifiant, - # "email": g.current_user.email, - # } - # ) + if config.get("SENTRY_DSN"): + from sentry_sdk import set_tag, set_user + + @app.before_request + def set_sentry_context(): + from flask_login import current_user + + if "FLASK_REQUEST_ID" in request.environ: + set_tag("request.id", request.environ["FLASK_REQUEST_ID"]) + if current_user.is_authenticated: + set_user( + { + "id": current_user.id_role, + "username": current_user.identifiant, + "email": current_user.email, + } + ) admin.init_app(app) diff --git a/backend/geonature/core/auth/routes.py b/backend/geonature/core/auth/routes.py index 8f4944e5b4..c3bfc78566 100644 --- a/backend/geonature/core/auth/routes.py +++ b/backend/geonature/core/auth/routes.py @@ -76,7 +76,7 @@ def loginCas(): "Error with the inpn authentification service", status_code=500 ) info_user = response.json() - data = insert_user_and_org(info_user) + data = insert_user_and_org(info_user, update_user_organism=False) db.session.commit() # creation de la Response @@ -157,24 +157,24 @@ def get_user_from_id_inpn_ws(id_user): log.error("Error with the inpn authentification service") -def insert_user_and_org(info_user): +def insert_user_and_org(info_user, update_user_organism: bool = True): organism_id = info_user["codeOrganisme"] - if info_user["libelleLongOrganisme"] is not None: - organism_name = info_user["libelleLongOrganisme"] - else: - organism_name = "Autre" - + organism_name = info_user.get("libelleLongOrganisme", "Autre") user_login = info_user["login"] user_id = info_user["id"] + try: assert user_id is not None and user_login is not None except AssertionError: log.error("'CAS ERROR: no ID or LOGIN provided'") raise CasAuthentificationError("CAS ERROR: no ID or LOGIN provided", status_code=500) + # Reconciliation avec base GeoNature if organism_id: organism = {"id_organisme": organism_id, "nom_organisme": organism_name} insert_or_update_organism(organism) + + # Retrieve user information from `info_user` user_info = { "id_role": user_id, "identifiant": user_login, @@ -184,15 +184,25 @@ def insert_user_and_org(info_user): "email": info_user["email"], "active": True, } + + # If not updating user organism and user already exists, retrieve existing user organism information rather than information from `info_user` + existing_user = User.query.get(user_id) + if not update_user_organism and existing_user: + user_info["id_organisme"] = existing_user.id_organisme + + # Insert or update user user_info = insert_or_update_role(user_info) - user = db.session.get(User, user_id) + + # Associate user to a default group if the user is not associated to any group + user = existing_user or db.session.get(User, user_id) if not user.groups: - if not current_app.config["CAS"]["USERS_CAN_SEE_ORGANISM_DATA"] or organism_id is None: + if current_app.config["CAS"]["USERS_CAN_SEE_ORGANISM_DATA"] and organism_id: + # group socle 2 - for a user associated to an organism if users can see data from their organism + group_id = current_app.config["BDD"]["ID_USER_SOCLE_2"] + else: # group socle 1 group_id = current_app.config["BDD"]["ID_USER_SOCLE_1"] - else: - # group socle 2 - group_id = current_app.config["BDD"]["ID_USER_SOCLE_2"] group = db.session.get(User, group_id) user.groups.append(group) + return user_info diff --git a/backend/geonature/core/gn_commons/routes.py b/backend/geonature/core/gn_commons/routes.py index 5fa7492e11..1552a63921 100644 --- a/backend/geonature/core/gn_commons/routes.py +++ b/backend/geonature/core/gn_commons/routes.py @@ -88,9 +88,7 @@ def list_modules(): if any(module_dict["cruved"].values()): module_allowed = True if module.active_frontend: - module_dict["module_url"] = "{}/#/{}".format( - current_app.config["URL_APPLICATION"], module.module_path - ) + module_dict["module_url"] = module.module_path else: module_dict["module_url"] = module.module_external_url module_dict["module_objects"] = {} diff --git a/backend/geonature/core/gn_meta/mtd/__init__.py b/backend/geonature/core/gn_meta/mtd/__init__.py index d2e00631b3..d5ac59460e 100644 --- a/backend/geonature/core/gn_meta/mtd/__init__.py +++ b/backend/geonature/core/gn_meta/mtd/__init__.py @@ -1,14 +1,19 @@ import logging import time -from datetime import * from urllib.parse import urljoin +from lxml import etree import requests + from geonature.core.auth.routes import insert_user_and_org -from geonature.core.gn_meta.models import CorAcquisitionFrameworkActor, CorDatasetActor +from geonature.core.gn_meta.models import ( + CorAcquisitionFrameworkActor, + CorDatasetActor, + TAcquisitionFramework, +) from geonature.utils.config import config from geonature.utils.env import db -from lxml import etree + from pypnnomenclature.models import TNomenclatures from pypnusershub.db.models import User from sqlalchemy import func, select @@ -32,7 +37,8 @@ class MTDInstanceApi: af_path = "/mtd/cadre/export/xml/GetRecordsByInstanceId?id={ID_INSTANCE}" ds_path = "/mtd/cadre/jdd/export/xml/GetRecordsByInstanceId?id={ID_INSTANCE}" ds_user_path = "/mtd/cadre/jdd/export/xml/GetRecordsByUserId?id={ID_ROLE}" - single_af_path = "/mtd/cadre/export/xml/GetRecordById?id={ID_AF}" + af_user_path = "/mtd/cadre/export/xml/GetRecordsByUserId?id={ID_ROLE}" + single_af_path = "/mtd/cadre/export/xml/GetRecordById?id={ID_AF}" # NOTE: `ID_AF` is actually an UUID and not an ID from the point of view of geonature database. # https://inpn.mnhn.fr/mtd/cadre/jdd/export/xml/GetRecordsByUserId?id=41542" def __init__(self, api_endpoint, instance_id, id_role=None): @@ -56,7 +62,8 @@ def _get_af_xml(self): def get_af_list(self): xml = self._get_af_xml() - root = etree.fromstring(xml) + _xml_parser = etree.XMLParser(ns_clean=True, recover=True, encoding="utf-8") + root = etree.fromstring(xml, parser=_xml_parser) af_iter = root.iterfind(".//{http://inpn.mnhn.fr/mtd}CadreAcquisition") af_list = [] for af in af_iter: @@ -71,12 +78,67 @@ def get_ds_list(self): return parse_jdd_xml(xml) def get_ds_user_list(self): + """ + Retrieve the list of of datasets (ds) for the user. + + Returns + ------- + list + A list of datasets (ds) for the user. + """ url = urljoin(self.api_endpoint, self.ds_user_path) url = url.format(ID_ROLE=self.id_role) - xml = self._get_xml_by_url(url) - return parse_jdd_xml(xml) + try: + xml = self._get_xml_by_url(url) + except requests.HttpError as http_error: + error_code = http_error.response.status_code + warning_message = f"""[HttpError : {error_code}] for URL "{url}".""" + if error_code == 404: + warning_message = f"""{warning_message} > Probably no dataset found for the user with ID '{self.id_role}'""" + logger.warning(warning_message) + return [] + ds_list = parse_jdd_xml(xml) + return ds_list + + def get_list_af_for_user(self): + """ + Retrieve a list of acquisition frameworks (af) for the user. + + Returns + ------- + list + A list of acquisition frameworks for the user. + """ + url = urljoin(self.api_endpoint, self.af_user_path).format(ID_ROLE=self.id_role) + try: + xml = self._get_xml_by_url(url) + except requests.HttpError as http_error: + error_code = http_error.response.status_code + warning_message = f"""[HttpError : {error_code}] for URL "{url}".""" + if error_code == 404: + warning_message = f"""{warning_message} > Probably no acquisition framework found for the user with ID '{self.id_role}'""" + logger.warning(warning_message) + return [] + _xml_parser = etree.XMLParser(ns_clean=True, recover=True, encoding="utf-8") + root = etree.fromstring(xml, parser=_xml_parser) + af_iter = root.findall(".//{http://inpn.mnhn.fr/mtd}CadreAcquisition") + af_list = [parse_acquisition_framework(af) for af in af_iter] + return af_list + + def get_single_af(self, af_uuid): + """ + Return a single acquistion framework based on its uuid. - def get_user_af_list(self, af_uuid): + Parameters + ---------- + af_uuid : str + uuid of the acquisition framework + + Returns + ------- + dict + acquisition framework data + """ url = urljoin(self.api_endpoint, self.single_af_path) url = url.format(ID_AF=af_uuid) xml = self._get_xml_by_url(url) @@ -108,9 +170,9 @@ def add_unexisting_digitizer(id_digitizer): :param id_digitizer: as id role from meta info """ if ( - not db.session.scalars( - select(func.count("*").select_from(User).filter_by(id_role=id_digitizer).limit(1)) - ).scalar_one() + not db.session.scalar( + select(func.count("*")).select_from(User).filter_by(id_role=id_digitizer) + ) > 0 ): # not fast - need perf optimization on user call @@ -132,20 +194,16 @@ def process_af_and_ds(af_list, ds_list, id_role=None): """ cas_api = INPNCAS() # read nomenclatures from DB to avoid errors if GN nomenclature is not the same - list_cd_nomenclature = [ - record[0] - for record in db.session.scalars(select(TNomenclatures.cd_nomenclature).distinct()).all() - ] + list_cd_nomenclature = db.session.scalars( + select(TNomenclatures.cd_nomenclature).distinct() + ).all() user_add_total_time = 0 logger.debug("MTD - PROCESS AF LIST") for af in af_list: actors = af.pop("actors") with db.session.begin_nested(): start_add_user_time = time.time() - if not id_role: - add_unexisting_digitizer(af["id_digitizer"]) - else: - add_unexisting_digitizer(id_role) + add_unexisting_digitizer(af["id_digitizer"] if not id_role else id_role) user_add_total_time += time.time() - start_add_user_time af = sync_af(af) associate_actors( @@ -191,29 +249,53 @@ def sync_af_and_ds(): logger.info("MTD - SYNC GLOBAL : FINISH") -def sync_af_and_ds_by_user(id_role): +def sync_af_and_ds_by_user(id_role, id_af=None): """ - Method to trigger MTD sync on user authent. + Method to trigger MTD sync on user authentication. + + Args: + id_role (int): The ID of the role (group or user). + id_af (str, optional): The ID of the AF (Acquisition Framework). Defaults to None. """ logger.info("MTD - SYNC USER : START") + # Create an instance of MTDInstanceApi mtd_api = MTDInstanceApi( config["MTD_API_ENDPOINT"], config["MTD"]["ID_INSTANCE_FILTER"], id_role ) + # Get the list of datasets (ds) for the user + # NOTE: `mtd_api.get_ds_user_list()` tested and timed to about 7 seconds on the PROD instance 'GINCO Occtax' with id_role = 13829 > a user with a lot of metadata to be retrieved from 'INPN Métadonnées' to 'GINCO Occtax' ds_list = mtd_api.get_ds_user_list() - user_af_uuids = [ds["uuid_acquisition_framework"] for ds in ds_list] - - # TODO - voir avec INPN pourquoi les AF par user ne sont pas dans l'appel global des AF - # Ce code ne fonctionne pas pour cette raison -> AF manquants - # af_list = mtd_api.get_af_list() - # af_list = [af for af in af_list if af["unique_acquisition_framework_id"] in user_af_uuids] - - # call INPN API for each AF to retrieve info - af_list = [mtd_api.get_user_af_list(af_uuid) for af_uuid in user_af_uuids] - # start AF and DS lists + if not id_af: + # Get the unique UUIDs of the acquisition frameworks for the user + set_user_af_uuids = {ds["uuid_acquisition_framework"] for ds in ds_list} + user_af_uuids = list(set_user_af_uuids) + + # TODO - voir avec INPN pourquoi les AF par user ne sont pas dans l'appel global des AF + # Ce code ne fonctionne pas pour cette raison -> AF manquants + # af_list = mtd_api.get_af_list() + # af_list = [af for af in af_list if af["unique_acquisition_framework_id"] in user_af_uuids] + + # Get the list of acquisition frameworks for the user + # call INPN API for each AF to retrieve info + af_list = mtd_api.get_list_af_for_user() + else: + # TODO: handle case where the AF ; corresponding to the provided `id_af` ; does not exist yet in the database + # this case should not happend from a user action because the only case where `id_af` is provided is for when the user click to unroll an AF in the module Metadata, in which case the AF already exists in the database. + # It would still be better to handle case where the AF does not exist in the database, and to first retrieve the AF from 'INPN Métadonnées' in this case + uuid_af = TAcquisitionFramework.query.get(id_af).unique_acquisition_framework_id + uuid_af = str(uuid_af).upper() + + # Get the acquisition framework for the specified UUID, thus a list of one element + af_list = [mtd_api.get_single_af(uuid_af)] + + # Filter the datasets based on the specified UUID + ds_list = [ds for ds in ds_list if ds["uuid_acquisition_framework"] == uuid_af] + + # Process the acquisition frameworks and datasets process_af_and_ds(af_list, ds_list, id_role) logger.info("MTD - SYNC USER : FINISH") diff --git a/backend/geonature/core/gn_meta/mtd/mtd_utils.py b/backend/geonature/core/gn_meta/mtd/mtd_utils.py index d1b0666376..ed18813c7b 100644 --- a/backend/geonature/core/gn_meta/mtd/mtd_utils.py +++ b/backend/geonature/core/gn_meta/mtd/mtd_utils.py @@ -17,7 +17,7 @@ CorAcquisitionFrameworkActor, ) from geonature.core.gn_commons.models import TModules -from pypnusershub.db.models import Organisme as BibOrganismes +from pypnusershub.db.models import Organisme as BibOrganismes, User from geonature.core.users import routes as users from geonature.core.auth.routes import insert_user_and_org, get_user_from_id_inpn_ws @@ -25,10 +25,10 @@ from .mtd_webservice import get_jdd_by_user_id, get_acquisition_framework, get_jdd_by_uuid NOMENCLATURE_MAPPING = { - "id_nomenclature_data_type": "DATA_TYP", - "id_nomenclature_dataset_objectif": "JDD_OBJECTIFS", - "id_nomenclature_data_origin": "DS_PUBLIQUE", - "id_nomenclature_source_status": "STATUT_SOURCE", + "cd_nomenclature_data_type": "DATA_TYP", + "cd_nomenclature_dataset_objectif": "JDD_OBJECTIFS", + "cd_nomenclature_data_origin": "DS_PUBLIQUE", + "cd_nomenclature_source_status": "STATUT_SOURCE", } # get the root logger @@ -43,51 +43,63 @@ def sync_ds(ds, cd_nomenclatures): :param ds: DS infos :param cd_nomenclatures: cd_nomenclature from ref_normenclatures.t_nomenclatures """ - if ds["id_nomenclature_data_origin"] not in cd_nomenclatures: + if not ds["cd_nomenclature_data_origin"]: + ds["cd_nomenclature_data_origin"] = "NSP" + + # FIXME: the following temporary fix was added due to possible differences in referential of nomenclatures values between INPN and GeoNature + # should be fixed by ensuring that the two referentials are identical, at least for instances that integrates with INPN and thus rely on MTD synchronization from INPN Métadonnées: GINCO and DEPOBIO instances. + if ds["cd_nomenclature_data_origin"] not in cd_nomenclatures: return # CONTROL AF af_uuid = ds.pop("uuid_acquisition_framework") - af = DB.session.scalar( - select(TAcquisitionFramework).filter_by(unique_acquisition_framework_id=af_uuid).limit(1) - ).first() + af = ( + DB.session.execute( + select(TAcquisitionFramework).filter_by(unique_acquisition_framework_id=af_uuid) + ) + .unique() + .scalar_one_or_none() + ) if af is None: + log.warning(f"AF with UUID '{af_uuid}' not found in database.") return ds["id_acquisition_framework"] = af.id_acquisition_framework ds = { - k: ( - func.ref_nomenclatures.get_id_nomenclature(NOMENCLATURE_MAPPING[k], v) - if k.startswith("id_nomenclature") - else v + field.replace("cd_nomenclature", "id_nomenclature"): ( + func.ref_nomenclatures.get_id_nomenclature(NOMENCLATURE_MAPPING[field], value) + if field.startswith("cd_nomenclature") + else value ) - for k, v in ds.items() - if v is not None + for field, value in ds.items() + if value is not None } ds_exists = DB.session.scalar( - select( - exists().where( - TDatasets.unique_dataset_id == ds["unique_dataset_id"], - ) + exists() + .where( + TDatasets.unique_dataset_id == ds["unique_dataset_id"], ) + .select() ) + statement = ( + pg_insert(TDatasets) + .values(**ds) + .on_conflict_do_nothing(index_elements=["unique_dataset_id"]) + ) if ds_exists: statement = ( update(TDatasets) .where(TDatasets.unique_dataset_id == ds["unique_dataset_id"]) .values(**ds) ) - else: - statement = ( - pg_insert(TDatasets) - .values(**ds) - .on_conflict_do_nothing(index_elements=["unique_dataset_id"]) - ) DB.session.execute(statement) - dataset = DB.session.scalars(ds_query).first() + + dataset = DB.session.scalars( + select(TDatasets).filter_by(unique_dataset_id=ds["unique_dataset_id"]) + ).first() # Associate dataset to the modules if new dataset if not ds_exists: @@ -97,37 +109,39 @@ def sync_ds(ds, cd_nomenclatures): def sync_af(af): - """ - Will create or update a given AF according to UUID. + """Will update a given AF (Acquisition Framework) if already exists in database according to UUID, else update the AF. - :param af: dict AF infos + Parameters + ---------- + af : dict + AF infos. + + Returns + ------- + TAcquisitionFramework + The updated or inserted acquisition framework. """ af_uuid = af["unique_acquisition_framework_id"] - count_af = DB.session.execute( - select(func.count("*")) - .select_from(TAcquisitionFramework) - .filter_by(unique_acquisition_framework_id=af_uuid) - ).scalar_one() - - if count_af > 0: - # this avoid useless nextval sequence - statement = ( - update(TAcquisitionFramework) - .where(TAcquisitionFramework.unique_acquisition_framework_id == af_uuid) - .values(af) - .returning(TAcquisitionFramework.id_acquisition_framework) - ) - else: + af_exists = DB.session.scalar( + exists().where(TAcquisitionFramework.unique_acquisition_framework_id == af_uuid).select() + ) + + # Update statement if AF already exists in DB else insert statement + statement = ( + update(TAcquisitionFramework) + .where(TAcquisitionFramework.unique_acquisition_framework_id == af_uuid) + .values(af) + .returning(TAcquisitionFramework) + ) + if not af_exists: statement = ( pg_insert(TAcquisitionFramework) .values(**af) .on_conflict_do_nothing(index_elements=["unique_acquisition_framework_id"]) - .returning(TAcquisitionFramework.id_acquisition_framework) + .returning(TAcquisitionFramework) ) - af_id = DB.session.execute(statement).scalar() - af = DB.session.get(TAcquisitionFramework, af_id) - return af + return DB.session.scalar(statement) def add_or_update_organism(uuid, nom, email): @@ -139,9 +153,7 @@ def add_or_update_organism(uuid, nom, email): :param email: org email """ # Test if actor already exists to avoid nextVal increase - org_exist = DB.session.execute( - select(exists().select_from(BibOrganismes).filter_by(uuid_organisme=uuid)) - ).scalar_one() + org_exist = DB.session.scalar(exists().where(BibOrganismes.uuid_organisme == uuid).select()) if org_exist: statement = ( @@ -185,26 +197,32 @@ def associate_actors(actors, CorActor, pk_name, pk_value): pk value """ for actor in actors: - if not actor["uuid_organism"]: - continue - # test if actor already exists - with DB.session.begin_nested(): - # create or update organisme - id_organism = add_or_update_organism( - uuid=actor["uuid_organism"], - nom=actor["organism"] or "", - email=actor["email"], + id_organism = None + uuid_organism = actor["uuid_organism"] + if uuid_organism: + with DB.session.begin_nested(): + # create or update organisme + # FIXME: prevent update of organism email from actor email ! Several actors may be associated to the same organism and still have different mails ! + id_organism = add_or_update_organism( + uuid=uuid_organism, + nom=actor["organism"] if actor["orgnanism"] else "", + email=actor["email"], + ) + values = dict( + id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( + "ROLE_ACTEUR", actor["actor_role"] + ), + **{pk_name: pk_value}, + ) + if not id_organism: + values["id_role"] = DB.session.scalar( + select(User.id_role).filter_by(email=actor["email"]) ) - # Test if actor already exists to avoid nextVal increase + else: + values["id_organism"] = id_organism statement = ( pg_insert(CorActor) - .values( - id_organism=id_organism, - id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( - "ROLE_ACTEUR", actor["actor_role"] - ), - **{pk_name: pk_value}, - ) + .values(**values) .on_conflict_do_nothing( index_elements=[pk_name, "id_organism", "id_nomenclature_actor_role"], ) diff --git a/backend/geonature/core/gn_meta/mtd/xml_parser.py b/backend/geonature/core/gn_meta/mtd/xml_parser.py index 98ced2625d..efcf98d6be 100644 --- a/backend/geonature/core/gn_meta/mtd/xml_parser.py +++ b/backend/geonature/core/gn_meta/mtd/xml_parser.py @@ -182,15 +182,19 @@ def parse_jdd_xml(xml): current_jdd = { "unique_dataset_id": jdd_uuid, "uuid_acquisition_framework": ca_uuid, - "dataset_name": dataset_name, + "dataset_name": dataset_name if len(dataset_name) < 256 else f"{dataset_name[:253]}...", "dataset_shortname": dataset_shortname, - "dataset_desc": dataset_desc, + "dataset_desc": ( + dataset_desc + if len(dataset_name) < 256 + else f"Nom complet du jeu de données dans MTD : {dataset_name}\n {dataset_desc}" + ), "keywords": keywords, "terrestrial_domain": json.loads(terrestrial_domain), "marine_domain": json.loads(marine_domain), - "id_nomenclature_data_type": data_type, + "cd_nomenclature_data_type": data_type, "id_digitizer": id_digitizer, - "id_nomenclature_data_origin": code_statut_donnees_source, + "cd_nomenclature_data_origin": code_statut_donnees_source, "actors": all_actors, "meta_create_date": create_date, "meta_update_date": update_date, diff --git a/backend/geonature/core/gn_meta/routes.py b/backend/geonature/core/gn_meta/routes.py index b79e3dc5d1..5bb91bbee0 100644 --- a/backend/geonature/core/gn_meta/routes.py +++ b/backend/geonature/core/gn_meta/routes.py @@ -80,10 +80,16 @@ @routes.before_request def synchronize_mtd(): if request.endpoint in ["gn_meta.get_datasets", "gn_meta.get_acquisition_frameworks_list"]: - try: - sync_af_and_ds_by_user(id_role=g.current_user.id_role) - except Exception as e: - log.exception("Error while get JDD via MTD") + from flask_login import current_user + + if current_user.is_authenticated: + params = request.json if request.is_json else request.args + try: + list_id_af = params.get("id_acquisition_frameworks", []) + for id_af in list_id_af: + sync_af_and_ds_by_user(id_role=current_user.id_role, id_af=id_af) + except Exception as e: + log.exception(f"Error while get JDD via MTD: {e}") @routes.route("/datasets", methods=["GET", "POST"]) @@ -1061,14 +1067,21 @@ def publish_acquisition_framework(af_id): @routes.cli.command() -@click.argument("id_role", nargs=1, required=False, default=None) -def mtd_sync(id_role): +@click.option("--id-role", nargs=1, required=False, default=None, help="ID of an user") +@click.option( + "--id-af", nargs=1, required=False, default=None, help="ID of an acquisition framework" +) +def mtd_sync(id_role, id_af): """ - Trigger global sync or a sync for a given user only. + \b + Triggers : + - global sync for instance + - a sync for a given user only (if id_role is provided) + - a sync for a given AF (Acquisition Framework) only (if id_af is provided). NOTE: the AF should in this case already exist in the database, and only datasets associated to this AF will be retrieved - :param id_role: user id + NOTE: if both id_role and id_af are provided, only the datasets possibly associated to both the AF and the user will be retrieved. """ if id_role: - return sync_af_and_ds_by_user(id_role) + return sync_af_and_ds_by_user(id_role, id_af) else: return mtd_sync_af_and_ds() diff --git a/backend/geonature/core/gn_synthese/models.py b/backend/geonature/core/gn_synthese/models.py index 4fa2ebd5da..76619542e3 100644 --- a/backend/geonature/core/gn_synthese/models.py +++ b/backend/geonature/core/gn_synthese/models.py @@ -586,7 +586,7 @@ class VSyntheseForWebApp(DB.Model): id_module = DB.Column(DB.Integer) entity_source_pk_value = DB.Column(DB.Integer) id_dataset = DB.Column(DB.Integer) - dataset_name = DB.Column(DB.Integer) + dataset_name = DB.Column(DB.String) id_acquisition_framework = DB.Column(DB.Integer) count_min = DB.Column(DB.Integer) count_max = DB.Column(DB.Integer) diff --git a/backend/geonature/core/gn_synthese/routes.py b/backend/geonature/core/gn_synthese/routes.py index 213b041b56..abe0c51276 100644 --- a/backend/geonature/core/gn_synthese/routes.py +++ b/backend/geonature/core/gn_synthese/routes.py @@ -16,6 +16,7 @@ g, ) from geonature.core.gn_synthese.schemas import SyntheseSchema +from geonature.core.gn_synthese.synthese_config import MANDATORY_COLUMNS from pypnusershub.db.models import User from pypnnomenclature.models import BibNomenclaturesTypes, TNomenclatures from werkzeug.exceptions import Forbidden, NotFound, BadRequest, Conflict @@ -148,15 +149,14 @@ def get_observations_for_web(permissions): # Get Column Frontend parameter to return only the needed columns param_column_list = { - col["prop"] for col in current_app.config["SYNTHESE"]["LIST_COLUMNS_FRONTEND"] + col["prop"] + for col in current_app.config["SYNTHESE"]["LIST_COLUMNS_FRONTEND"] + + current_app.config["SYNTHESE"]["ADDITIONAL_COLUMNS_FRONTEND"] } # Init with compulsory columns - columns = [ - "id", - VSyntheseForWebApp.id_synthese, - "url_source", - VSyntheseForWebApp.url_source, - ] + columns = [] + for col in MANDATORY_COLUMNS: + columns.extend([col, getattr(VSyntheseForWebApp, col)]) if "count_min_max" in param_column_list: count_min_max = case( @@ -531,15 +531,39 @@ def export_observations_web(permissions): POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view + :query str export_format: str<'csv', 'geojson', 'shapefiles', 'gpkg'> :query str export_format: str<'csv', 'geojson', 'shapefiles', 'gpkg'> """ params = request.args # set default to csv export_format = params.get("export_format", "csv") + view_name_param = params.get("view_name", "gn_synthese.v_synthese_for_export") # Test export_format - if not export_format in current_app.config["SYNTHESE"]["EXPORT_FORMAT"]: + if export_format not in current_app.config["SYNTHESE"]["EXPORT_FORMAT"]: raise BadRequest("Unsupported format") + config_view = { + "view_name": "gn_synthese.v_synthese_for_web_app", + "geojson_4326_field": "geojson_4326", + "geojson_local_field": "geojson_local", + } + # Test export view name is config params for security reason + if view_name_param != "gn_synthese.v_synthese_for_export": + try: + config_view = next( + _view + for _view in current_app.config["SYNTHESE"]["EXPORT_OBSERVATIONS_CUSTOM_VIEWS"] + if _view["view_name"] == view_name_param + ) + except StopIteration: + raise Forbidden("This view is not available for export") + + geojson_4326_field = config_view["geojson_4326_field"] + geojson_local_field = config_view["geojson_local_field"] + try: + schema_name, view_name = view_name_param.split(".") + except ValueError: + raise BadRequest("view_name parameter must be a string with schema dot view_name") # get list of id synthese from POST id_list = request.get_json() @@ -555,12 +579,18 @@ def export_observations_web(permissions): # Useful to have geom column so that they can be replaced by blurred geoms # (only if the user has sensitive permissions) export_view = GenericTableGeo( - tableName="v_synthese_for_export", - schemaName="gn_synthese", + tableName=view_name, + schemaName=schema_name, engine=DB.engine, geometry_field=None, srid=local_srid, ) + mandatory_columns = {"id_synthese", geojson_4326_field, geojson_local_field} + if not mandatory_columns.issubset(set(map(lambda col: col.name, export_view.db_cols))): + print(set(map(lambda col: col.name, export_view.db_cols))) + raise BadRequest( + f"The view {view_name} miss one of required columns {str(mandatory_columns)}" + ) # If there is no sensitive permissions => same path as before blurring implementation if not blurring_permissions: @@ -591,8 +621,6 @@ def export_observations_web(permissions): ) # Overwrite geometry columns to compute the blurred geometry from the blurring cte - geojson_4326_col = current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"] - geojson_local_col = current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"] columns_with_geom_excluded = [ col for col in export_view.tableDef.columns @@ -601,8 +629,8 @@ def export_observations_web(permissions): "geometrie_wkt_4326", # FIXME: hardcoded column names? "x_centroid_4326", "y_centroid_4326", - geojson_4326_col, - geojson_local_col, + geojson_4326_field, + geojson_local_field, ] ] # Recomputed the blurred geometries @@ -610,9 +638,9 @@ def export_observations_web(permissions): func.st_astext(cte_synthese_filtered.c.geom).label("geometrie_wkt_4326"), func.st_x(func.st_centroid(cte_synthese_filtered.c.geom)).label("x_centroid_4326"), func.st_y(func.st_centroid(cte_synthese_filtered.c.geom)).label("y_centroid_4326"), - func.st_asgeojson(cte_synthese_filtered.c.geom).label(geojson_4326_col), + func.st_asgeojson(cte_synthese_filtered.c.geom).label(geojson_4326_field), func.st_asgeojson(func.st_transform(cte_synthese_filtered.c.geom, local_srid)).label( - geojson_local_col + geojson_local_field ), ] @@ -625,14 +653,10 @@ def export_observations_web(permissions): .select_from( export_view.tableDef.join( cte_synthese_filtered, - cte_synthese_filtered.c.id_synthese == export_view.tableDef.c.id_synthese, + cte_synthese_filtered.c.id_synthese == export_view.tableDef.columns["id_synthese"], ) ) - .where( - export_view.tableDef.columns[ - current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"] - ].in_(id_list) - ) + .where(export_view.tableDef.columns["id_synthese"].in_(id_list)) ) # Get the results for export @@ -642,11 +666,17 @@ def export_observations_web(permissions): db_cols_for_shape = [] columns_to_serialize = [] - # loop over synthese config to get the columns for export + # loop over synthese config to exclude columns if its default export for db_col in export_view.db_cols: - if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]: - db_cols_for_shape.append(db_col) - columns_to_serialize.append(db_col.key) + if view_name_param == "gn_synthese.v_synthese_for_export": + if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]: + db_cols_for_shape.append(db_col) + columns_to_serialize.append(db_col.key) + else: + # remove geojson fields of serialization + if db_col.key not in [geojson_4326_field, geojson_local_field]: + db_cols_for_shape.append(db_col) + columns_to_serialize.append(db_col.key) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) @@ -657,9 +687,7 @@ def export_observations_web(permissions): elif export_format == "geojson": features = [] for r in results: - geometry = json.loads( - getattr(r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"]) - ) + geometry = json.loads(getattr(r, geojson_4326_field)) feature = Feature( geometry=geometry, properties=export_view.as_dict(r, fields=columns_to_serialize), @@ -673,7 +701,7 @@ def export_observations_web(permissions): export_format=export_format, export_view=export_view, db_cols=db_cols_for_shape, - geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"], + geojson_col=geojson_local_field, data=results, file_name=file_name, ) diff --git a/backend/geonature/core/gn_synthese/schemas.py b/backend/geonature/core/gn_synthese/schemas.py index 2a317bf938..deea848cfa 100644 --- a/backend/geonature/core/gn_synthese/schemas.py +++ b/backend/geonature/core/gn_synthese/schemas.py @@ -1,7 +1,15 @@ from geonature.utils.env import db, ma +from geonature.utils.config import config from geonature.core.gn_commons.schemas import ModuleSchema, MediaSchema, TValidationSchema -from geonature.core.gn_synthese.models import BibReportsTypes, TReport, TSources, Synthese +from geonature.core.gn_synthese.models import ( + BibReportsTypes, + TReport, + TSources, + Synthese, + VSyntheseForWebApp, +) +from geonature.core.gn_synthese.synthese_config import MANDATORY_COLUMNS from pypn_habref_api.schemas import HabrefSchema from pypnusershub.schemas import UserSchema diff --git a/backend/geonature/core/gn_synthese/synthese_config.py b/backend/geonature/core/gn_synthese/synthese_config.py index 84caecfcc6..a3129d8d1b 100644 --- a/backend/geonature/core/gn_synthese/synthese_config.py +++ b/backend/geonature/core/gn_synthese/synthese_config.py @@ -91,7 +91,7 @@ ] # Mandatory columns for the frontend in Synthese API -MANDATORY_COLUMNS = ["entity_source_pk_value", "url_source", "cd_nom"] +MANDATORY_COLUMNS = ["id_synthese", "entity_source_pk_value", "url_source", "cd_nom"] # CONFIG MAP-LIST DEFAULT_LIST_COLUMN = [ diff --git a/backend/geonature/core/gn_synthese/utils/query_select_sqla.py b/backend/geonature/core/gn_synthese/utils/query_select_sqla.py index 7f970845a2..b5aa956728 100644 --- a/backend/geonature/core/gn_synthese/utils/query_select_sqla.py +++ b/backend/geonature/core/gn_synthese/utils/query_select_sqla.py @@ -219,7 +219,7 @@ def filter_query_with_cruved(self, user, scope): self.model_id_syn_col.in_(subquery_observers), self.model_id_digitiser_column == user.id_role, ] - datasets = DB.session.scalars(TDatasets.filter_by_scope(scope)).all() + datasets = DB.session.scalars(TDatasets.filter_by_scope(scope)).unique().all() allowed_datasets = [dataset.id_dataset for dataset in datasets] ors_filters.append(self.model_id_dataset_column.in_(allowed_datasets)) diff --git a/backend/geonature/tests/benchmarks/__init__.py b/backend/geonature/tests/benchmarks/__init__.py new file mode 100644 index 0000000000..86fd72efad --- /dev/null +++ b/backend/geonature/tests/benchmarks/__init__.py @@ -0,0 +1,5 @@ +# Import required for CLater class when using eval() +from flask import url_for +from .benchmark_data import * +from geonature.core.gn_synthese.models import Synthese +from sqlalchemy import select diff --git a/backend/geonature/tests/benchmarks/benchmark_data.py b/backend/geonature/tests/benchmarks/benchmark_data.py new file mode 100644 index 0000000000..75abbc17b7 --- /dev/null +++ b/backend/geonature/tests/benchmarks/benchmark_data.py @@ -0,0 +1,599 @@ +from geonature.utils.env import db +import pytest +from ref_geo.models import BibAreasTypes, LAreas +from sqlalchemy import select + +benchmark_synthese_intersection_data_test_bbox = { + "modif_since_validation": False, + "geoIntersection": { + "type": "Feature", + "properties": {}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [5.828785, 44.632571], + [5.828785, 45.06773], + [6.625493, 45.06773], + [6.625493, 44.632571], + [5.828785, 44.632571], + ] + ], + }, + }, +} + +benchmark_synthese_intersection_data_test_complex_polygon = { + "modif_since_validation": False, + "geoIntersection": { + "type": "Feature", + "properties": {}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [6.063715, 44.968659], + [5.974429, 44.981293], + [5.955198, 44.934631], + [5.98256, 44.890826], + [6.040252, 44.867462], + [6.085582, 44.860646], + [6.125418, 44.884012], + [6.141901, 44.876225], + [6.135038, 44.837262], + [6.099322, 44.820697], + [6.064979, 44.804128], + [6.070473, 44.750493], + [6.133664, 44.731947], + [6.176247, 44.756341], + [6.189983, 44.777799], + [6.19273, 44.731947], + [6.261412, 44.737803], + [6.273775, 44.760243], + [6.319105, 44.741706], + [6.279269, 44.688002], + [6.231192, 44.656741], + [6.483941, 44.672375], + [6.396028, 44.628395], + [6.486688, 44.596121], + [6.538887, 44.655764], + [6.533392, 44.688981], + [6.500425, 44.73877], + [6.533392, 44.748528], + [6.514161, 44.790467], + [6.41938, 44.812887], + [6.488062, 44.854779], + [6.536139, 44.856727], + [6.597938, 44.894757], + [6.518262, 44.934648], + [6.437214, 44.932702], + [6.391881, 44.883076], + [6.356164, 44.86068], + [6.251768, 44.874311], + [6.284735, 44.921996], + [6.317703, 44.952143], + [6.385011, 44.964781], + [6.364406, 44.972557], + [6.449572, 44.976444], + [6.479792, 44.984218], + [6.468803, 45.027928], + [6.413857, 45.043461], + [6.336928, 45.055017], + [6.194069, 45.048223], + [6.170716, 44.952049], + [6.161099, 44.949131], + [6.063715, 44.968659], + ] + ], + }, + }, +} + + +def benchmark_synthese_intersection_data_test_commune(): + return { + "modif_since_validation": False, + "area_COM": [ + db.session.scalars( + select(LAreas).join(BibAreasTypes).where(BibAreasTypes.type_code == "COM").limit(1) + ) + .one() + .id_area + ], + } + + +def benchmark_synthese_intersection_data_test_departement(): + return { + "modif_since_validation": False, + "area_DEP": [ + db.session.scalars( + select(LAreas.id_area) + .join(BibAreasTypes) + .where(BibAreasTypes.type_code == "DEP") + .limit(1) + ).first() + ], + } + + +def benchmark_synthese_intersection_data_test_region(): + return { + "modif_since_validation": False, + "area_REG": [ + db.session.scalars( + select(LAreas.id_area) + .join(BibAreasTypes) + .where(BibAreasTypes.type_code == "REG") + .limit(1) + ).first() + ], + } + + +benchmark_synthese_with_tree_taxon = { + "modif_since_validation": False, + "cd_ref_parent": [ + 183787, + 183767, + 183839, + 183840, + 183843, + 183874, + 184358, + 184357, + 184355, + 184354, + 184353, + 184367, + 184376, + 927956, + 200726, + 184350, + 184351, + 184366, + 184363, + 184362, + 184360, + 184359, + 184377, + 184379, + 184374, + 184371, + 184369, + 184368, + 184381, + 184393, + 184391, + 184390, + 184385, + 184387, + 184388, + 184378, + 184438, + 184449, + 184450, + 184493, + 1007942, + 184501, + 184486, + 184488, + 184491, + 184499, + 184492, + 184494, + 200765, + 184737, + 184740, + 184743, + 184752, + 184756, + 184762, + 184766, + 184767, + 184768, + 184771, + 184773, + 184775, + 184776, + 184772, + 184615, + 184616, + 184618, + 184630, + 184643, + 188023, + 200773, + 793340, + 184644, + 184646, + 184649, + 184650, + 184653, + 184658, + 184660, + 184662, + 184666, + 184667, + 184668, + 184670, + 184671, + 184672, + 184674, + 184676, + 184680, + 184682, + 184684, + 184685, + 184689, + 184694, + 184696, + 184699, + 184701, + 184702, + 184703, + 184706, + 184708, + 184709, + 184711, + 184712, + 184714, + 184715, + 184718, + 184720, + 184728, + 184732, + 184734, + 184735, + 184781, + 184791, + 184802, + 184801, + 184890, + 184808, + 184810, + 184815, + 184831, + 184836, + 184838, + 184839, + 184843, + 184847, + 184848, + 184852, + 184853, + 184855, + 184862, + 184865, + 184867, + 184870, + 184872, + 184874, + 184877, + 184879, + 184880, + 184882, + 184883, + 184885, + 184888, + 184889, + 184891, + 184892, + 184894, + 184896, + 184900, + 184907, + 184927, + 184944, + 184947, + 184948, + 184949, + 184950, + 184952, + 839211, + 184961, + 184976, + 185027, + 185028, + 185030, + 185033, + 185043, + 185046, + 185051, + 185064, + 185075, + 185076, + 185077, + 185078, + 185023, + 185081, + 185082, + 185084, + 185085, + 185088, + 185090, + 185124, + 185080, + 185025, + 645275, + 184994, + 834425, + 839203, + 938486, + 184992, + 184988, + 184984, + 184983, + 938483, + 938487, + 185012, + 185204, + 185211, + 188009, + 827922, + 728177, + 187977, + 827924, + 827925, + 714463, + 185129, + 939246, + 185130, + 185131, + 185133, + 185150, + 185157, + 185162, + 185171, + 185188, + 185189, + 185190, + 185193, + 185194, + 185197, + 714275, + 648561, + 610362, + 602182, + 200681, + 185216, + 185220, + 185223, + 185224, + 185226, + 185230, + 185232, + 185234, + 185240, + 185242, + 185244, + 185245, + 185247, + 185249, + 185251, + 185253, + 185254, + 185257, + 185259, + 185263, + 185266, + 185268, + 185270, + 185273, + 185274, + 185276, + 185278, + 185280, + 185285, + 185286, + 185287, + 185290, + 185291, + 185292, + 185297, + 185302, + 185307, + 185308, + 185309, + 185313, + 185322, + 185323, + 185324, + 185327, + 185330, + 714314, + 185334, + 185352, + 185346, + 185347, + 185351, + 185369, + 185372, + 185371, + 185365, + 185364, + 185362, + 185360, + 185359, + 185374, + 185378, + 185388, + 185389, + 185399, + 437022, + 185383, + 185385, + 185468, + 185467, + 185471, + 185472, + 185473, + 185517, + 185566, + 185554, + 185568, + 185557, + 185582, + 185580, + 185578, + 528744, + 185401, + 185590, + 185593, + 185599, + 185596, + 185594, + 185602, + 185604, + 185621, + 185635, + 185652, + 185651, + 185649, + 185669, + 185674, + 186286, + 699191, + 618462, + 186293, + 186292, + 186283, + 185770, + 185781, + 185830, + 185832, + 185896, + 185900, + 185760, + 905695, + 904968, + 827284, + 825345, + 185967, + 185979, + 186008, + 186010, + 186012, + 186000, + 186006, + 185998, + 186058, + 186067, + 186062, + 186076, + 186091, + 186084, + 186094, + 186101, + 885360, + 885396, + 885400, + 885430, + 186121, + 186117, + 186115, + 186110, + 186135, + 186107, + 186108, + 186137, + 186139, + 186141, + 186145, + 186146, + 186148, + 186152, + 186154, + 186158, + 186160, + 186130, + 186129, + 186124, + 186122, + 828783, + 186168, + 186178, + 186176, + 186027, + 186047, + 186021, + 186019, + 186038, + 186025, + 351923, + 186173, + 185985, + 186055, + 186053, + 185990, + 186221, + 186214, + 186223, + 186215, + 186238, + 186237, + 186239, + 699095, + 186245, + 186257, + 186259, + 655502, + 199825, + 443494, + 655482, + 186210, + 186209, + 186211, + 186241, + 186243, + 186242, + 199959, + 444434, + 185954, + 185951, + 185946, + 185960, + 186330, + 933748, + 186337, + 186332, + 186374, + 186376, + 186365, + 186377, + 186346, + 186409, + 186380, + 186383, + 186386, + 186388, + 186390, + 186391, + 186392, + 186393, + 186396, + 186399, + 186400, + 186401, + 186404, + 186406, + 186407, + 186415, + 186418, + 186422, + 186423, + 186425, + 186426, + 186430, + 187541, + 851431, + 851437, + 851464, + 186356, + 810391, + 699677, + 186353, + 186369, + 186997, + ], +} diff --git a/backend/geonature/tests/benchmarks/benchmark_generator.py b/backend/geonature/tests/benchmarks/benchmark_generator.py new file mode 100644 index 0000000000..4f80e2f483 --- /dev/null +++ b/backend/geonature/tests/benchmarks/benchmark_generator.py @@ -0,0 +1,96 @@ +from typing import Any +from geonature.tests.utils import set_logged_user +from geonature.tests.fixtures import users + +import importlib +from geonature.tests.benchmarks import * + + +class CLater: + def __init__(self, value) -> None: + self.value = value + + +class BenchmarkTest: + """ + Class that allows to define a benchmark test and generate the pytest function to run the benchmark. + + Example, in a pytest file: + ```python + import pytest + bench = BenchmarkTest(print,"test_print",["Hello","World"],{}) + @pytest.mark.usefixtures("client_class", "temporary_transaction") + class TestBenchie: + pass + TestBenchie.test_print = bench() + ``` + + If a function or its argument depend on the pytest function context, use the GetLatter class : GetLatter("). For example, to use + the `url_for()` function, replace from `url_for(...)` to `GetLatter("url_for(...)")`. + + If the benchmark requires a user to be logged, use the `function_kwargs` with the "user_profile" key and the value corresponds to a key + available in the dictionary returned by the `user` fixture. + + + """ + + def __init__(self, function, function_args=[], function_kwargs={}) -> None: + """ + Constructor of BenchmarkTest + + Parameters + ---------- + function : Callable | GetLatter + function that will be benchmark + function_args : Sequence[Any | GetLatter] + args for the function + function_kwargs : Dict[str,Any] + kwargs for the function + """ + self.function = function + self.function_args = function_args + self.function_kwargs = function_kwargs + + def __call__(self, *args: Any, **kwds: Any) -> Any: + return self.generate_func_test() + + def generate_func_test(self): + """ + Return the pytest function to run the benchmark on the indicated function. + + Returns + ------- + Callable + test function + + Raises + ------ + KeyError + if the user_profile given do not exists + """ + + fixtures = self.function_kwargs.pop("fixtures", []) + user_profile = self.function_kwargs.pop("user_profile", None) + + func, args, kwargs = self.function, self.function_args, self.function_kwargs + + def function_to_include_fixture(*fixture): + + def final_test_function(self, benchmark, users): + + if user_profile: + if not user_profile in users: + raise KeyError(f"{user_profile} can't be found in the users fixture !") + set_logged_user(self.client, users[user_profile]) + benchmark( + eval(func.value) if isinstance(func, CLater) else func, + *[eval(arg.value) if isinstance(arg, CLater) else arg for arg in args], + **{ + key: eval(value.value) if isinstance(value, CLater) else value + for key, value in kwargs.items() + }, + ) + + return final_test_function + + return function_to_include_fixture(*fixtures) diff --git a/backend/geonature/tests/benchmarks/test_benchmark_gn_meta.py b/backend/geonature/tests/benchmarks/test_benchmark_gn_meta.py new file mode 100644 index 0000000000..aadf43f64c --- /dev/null +++ b/backend/geonature/tests/benchmarks/test_benchmark_gn_meta.py @@ -0,0 +1,29 @@ +import logging +import pytest +from geonature.tests.benchmarks import * +from geonature.tests.test_pr_occhab import stations + +from .benchmark_generator import BenchmarkTest, CLater +from .utils import activate_profiling_sql + +logging.basicConfig() +logger = logging.getLogger("logger-name") +logger.setLevel(logging.DEBUG) + +from .utils import CLIENT_GET, CLIENT_POST + + +@pytest.mark.benchmark(group="gn_meta") +@pytest.mark.usefixtures("client_class", "temporary_transaction", "activate_profiling_sql") +class TestBenchmarkGnMeta: + + test_list_acquisition_frameworks = BenchmarkTest( + CLIENT_GET, + [CLater("""url_for("gn_meta.get_acquisition_frameworks_list")""")], + dict(user_profile="admin_user", fixtures=[]), + )() + test_list_datasets = BenchmarkTest( + CLIENT_GET, + [CLater("""url_for("gn_meta.get_datasets")""")], + dict(user_profile="admin_user", fixtures=[]), + )() diff --git a/backend/geonature/tests/benchmarks/test_benchmark_occhab.py b/backend/geonature/tests/benchmarks/test_benchmark_occhab.py new file mode 100644 index 0000000000..0cee7edbd3 --- /dev/null +++ b/backend/geonature/tests/benchmarks/test_benchmark_occhab.py @@ -0,0 +1,48 @@ +import logging +import pytest +from geonature.tests.benchmarks import * +from geonature.tests.test_pr_occhab import stations + +from .benchmark_generator import BenchmarkTest, CLater +from .utils import activate_profiling_sql + +logging.basicConfig() +logger = logging.getLogger("logger-name") +logger.setLevel(logging.DEBUG) + +from .utils import CLIENT_GET, CLIENT_POST + + +@pytest.mark.benchmark(group="occhab") +@pytest.mark.usefixtures("client_class", "temporary_transaction", "activate_profiling_sql") +class TestBenchmarkOcchab: + + test_get_station = BenchmarkTest( + CLIENT_GET, + [CLater("""url_for("occhab.get_station", id_station=8)""")], + dict(user_profile="user", fixtures=[stations]), + )() + + test_list_stations = BenchmarkTest( + CLIENT_GET, + [CLater("""url_for("occhab.list_stations")""")], + dict(user_profile="admin_user", fixtures=[]), + )() + + test_list_stations_restricted = BenchmarkTest( + CLIENT_GET, + [CLater("""url_for("occhab.list_stations")""")], + dict(user_profile="user_restricted_occhab", fixtures=[]), + )() + + +for format_ in "csv geojson shapefile".split(): + setattr( + TestBenchmarkOcchab, + f"test_export_all_habitats_{format_}", + BenchmarkTest( + CLIENT_POST, + [CLater("""url_for("occhab.export_all_habitats",export_format="csv")""")], + dict(user_profile="admin_user", fixtures=[]), + )(), + ) diff --git a/backend/geonature/tests/benchmarks/test_benchmark_occtax.py b/backend/geonature/tests/benchmarks/test_benchmark_occtax.py new file mode 100644 index 0000000000..092604b3ec --- /dev/null +++ b/backend/geonature/tests/benchmarks/test_benchmark_occtax.py @@ -0,0 +1,30 @@ +import logging +import pytest +from geonature.tests.benchmarks import * +from geonature.tests.test_pr_occhab import stations + +from .benchmark_generator import BenchmarkTest, CLater + +from .utils import activate_profiling_sql + +logging.basicConfig() +logger = logging.getLogger("logger-name") +logger.setLevel(logging.DEBUG) + +from .utils import CLIENT_GET, CLIENT_POST + + +@pytest.mark.benchmark(group="occtax") +@pytest.mark.usefixtures("client_class", "temporary_transaction", "activate_profiling_sql") +class TestBenchmarkOcctax: + + test_list_releves_restricted = BenchmarkTest( + CLIENT_GET, + [CLater("""url_for("pr_occtax.getReleves")""")], + dict(user_profile="user_restricted_occhab", fixtures=[]), + )() + test_list_releves_unrestricted = BenchmarkTest( + CLIENT_GET, + [CLater("""url_for("pr_occtax.getReleves")""")], + dict(user_profile="admin_user", fixtures=[]), + )() diff --git a/backend/geonature/tests/benchmarks/test_benchmark_ref_geo.py b/backend/geonature/tests/benchmarks/test_benchmark_ref_geo.py new file mode 100644 index 0000000000..d1ce75d6c9 --- /dev/null +++ b/backend/geonature/tests/benchmarks/test_benchmark_ref_geo.py @@ -0,0 +1,38 @@ +import logging +import pytest +from geonature.tests.benchmarks import * + +from .benchmark_generator import BenchmarkTest, CLater + +from .utils import activate_profiling_sql + +logging.basicConfig() +logger = logging.getLogger("logger-name") +logger.setLevel(logging.DEBUG) + +from .utils import CLIENT_GET + + +@pytest.mark.benchmark(group="ref_geo") +@pytest.mark.usefixtures("client_class", "temporary_transaction", "activate_profiling_sql") +class TestBenchmarkRefGeo: + + test_get_areas_with_geom = BenchmarkTest( + CLIENT_GET, + [ + CLater( + """url_for("ref_geo.get_areas", without_geom="false", type_code=["REG", "DEP", "COM"])""" + ) + ], + dict(user_profile="admin_user", fixtures=[]), + )() + + test_get_areas_without_geom = BenchmarkTest( + CLIENT_GET, + [ + CLater( + """url_for("ref_geo.get_areas", without_geom="true", type_code=["REG", "DEP", "COM"])""" + ) + ], + dict(user_profile="admin_user", fixtures=[]), + )() diff --git a/backend/geonature/tests/benchmarks/test_benchmark_synthese.py b/backend/geonature/tests/benchmarks/test_benchmark_synthese.py new file mode 100644 index 0000000000..c757dbff0c --- /dev/null +++ b/backend/geonature/tests/benchmarks/test_benchmark_synthese.py @@ -0,0 +1,108 @@ +import logging + +import pytest +from geonature.tests.benchmarks import * +from geonature.tests.test_pr_occhab import stations +from geonature.core.gn_synthese.models import Synthese +from .utils import activate_profiling_sql + +from .benchmark_generator import BenchmarkTest, CLater + + +logging.basicConfig() +logger = logging.getLogger("logger-name") +logger.setLevel(logging.DEBUG) + +from .utils import CLIENT_GET, CLIENT_POST, add_bluring_to_benchmark_test_class + +SYNTHESE_GET_OBS_URL = """url_for("gn_synthese.get_observations_for_web")""" +SYNTHESE_EXPORT_OBS_URL = """url_for("gn_synthese.export_observations_web")""" +SYNTHESE_EXPORT_STATUS_URL = """url_for("gn_synthese.export_status")""" +SYNTHESE_EXPORT_TAXON_WEB_URL = """url_for("gn_synthese.export_taxon_web")""" + + +@pytest.mark.benchmark(group="synthese") +@pytest.mark.usefixtures("client_class", "temporary_transaction", "activate_profiling_sql") +class TestBenchmarkSynthese: + # GET NOMENCLATURE + test_get_default_nomenclatures = BenchmarkTest( + CLIENT_GET, + [CLater("""url_for("gn_synthese.getDefaultsNomenclatures")""")], + dict(user_profile="self_user"), + ) + + test_with_geometry_bbox = BenchmarkTest( + CLIENT_POST, + [CLater(SYNTHESE_GET_OBS_URL)], + dict( + user_profile="admin_user", + json=benchmark_synthese_intersection_data_test_bbox, + ), + ) + + test_with_geometry_complex_poly = BenchmarkTest( + CLIENT_POST, + [CLater(SYNTHESE_GET_OBS_URL)], + dict( + user_profile="admin_user", + json=benchmark_synthese_intersection_data_test_complex_polygon, + ), + ) + test_with_commune = BenchmarkTest( + CLIENT_POST, + [CLater(SYNTHESE_GET_OBS_URL)], + dict( + user_profile="admin_user", + json=CLater("benchmark_data.benchmark_synthese_intersection_data_test_commune()"), + ), + ) + + test_with_departement = BenchmarkTest( + CLIENT_POST, + [CLater(SYNTHESE_GET_OBS_URL)], + dict( + user_profile="admin_user", + json=CLater("benchmark_data.benchmark_synthese_intersection_data_test_departement()"), + ), + ) + test_with_region = BenchmarkTest( + CLIENT_POST, + [CLater(SYNTHESE_GET_OBS_URL)], + dict( + user_profile="admin_user", + json=CLater("benchmark_data.benchmark_synthese_intersection_data_test_region()"), + ), + ) + test_with_up_tree_taxon = BenchmarkTest( + CLIENT_POST, + [CLater(SYNTHESE_GET_OBS_URL)], + dict( + user_profile="admin_user", + json=benchmark_synthese_with_tree_taxon, + ), + ) + + +# EXPORT TESTING +for url, label in [ + (SYNTHESE_EXPORT_STATUS_URL, "status"), + (SYNTHESE_EXPORT_TAXON_WEB_URL, "taxons"), + (SYNTHESE_EXPORT_OBS_URL, "observations"), +]: + for n_obs in [1000, 10000, 100000, 1000000]: + setattr( + TestBenchmarkSynthese, + f"test_export_{label}_{n_obs}", + BenchmarkTest( + CLIENT_POST, + [CLater(SYNTHESE_EXPORT_OBS_URL)], + dict( + user_profile="admin_user", + json=CLater( + f"db.session.execute(select(Synthese.id_synthese).limit({n_obs})).all()" + ), + ), + ), + ) + +add_bluring_to_benchmark_test_class(TestBenchmarkSynthese) diff --git a/backend/geonature/tests/benchmarks/utils.py b/backend/geonature/tests/benchmarks/utils.py new file mode 100644 index 0000000000..eeb049b54e --- /dev/null +++ b/backend/geonature/tests/benchmarks/utils.py @@ -0,0 +1,105 @@ +import time +import logging +import os + +import pytest +import pandas +from sqlalchemy import event + +from geonature.utils.env import db +from .benchmark_generator import CLater, BenchmarkTest +from geonature.tests.test_synthese import blur_sensitive_observations +import traceback +from geonature.tests.fixtures import app + +logging.basicConfig() +logger = logging.getLogger("logger-name") +logger.setLevel(logging.DEBUG) + + +@pytest.fixture(scope="function") +def activate_profiling_sql(sqllogfilename: pytest.FixtureDef, app: pytest.FixtureDef): + """ + Fixture to activate profiling for SQL queries and store query's statements and execution times in a CSV file. + + This fixture takes a `sqllogfilename` parameter, which is the path to a CSV file where the query statements and + execution times will be stored. If no `sqllogfilename` is provided, the SQL profiling will not be activated. + + Parameters + ---------- + - sqllogfilename: pytest.FixtureDef + The path to the CSV file where the query statements and execution times will be stored. + + """ + columns = ["Endpoint", "Query", "Total Time [s.]"] + + if not sqllogfilename: + logger.debug("No SQL Log file provided. SQL Profiling will not be activated.") + return + + directory = os.path.dirname(sqllogfilename) + if directory and not os.path.exists(directory): + raise FileNotFoundError(f"Directory {directory} does not exists ! ") + + if not os.path.exists(sqllogfilename): + df = pandas.DataFrame([], columns=columns) + df.to_csv(sqllogfilename, header=True, index=None, sep=";") + + def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): + conn.info.setdefault("query_start_time", []).append(time.time()) + logger.debug("Start Query: %s" % statement) + + # @event.listens_for(Engine, "after_cursor_execute") + def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): + total = time.time() - conn.info["query_start_time"].pop(-1) + logger.debug("Query Complete!") + logger.debug("Total Time: %f" % total) + if statement.startswith("SELECT"): + df = pandas.DataFrame([[pytest.endpoint, statement, total]], columns=columns) + df.to_csv(sqllogfilename, mode="a", header=False, index=None, sep=";") + + event.listen(db.engine, "before_cursor_execute", before_cursor_execute) + event.listen(db.engine, "after_cursor_execute", after_cursor_execute) + + +def add_bluring_to_benchmark_test_class(benchmark_cls: type): + """ + Add the blurring enabling fixture to all benchmark tests declared in the given class. + + Parameters + ---------- + benchmark_cls : type + benchmark test class + """ + for attr in dir(benchmark_cls): + if attr.startswith("test_"): + b_test = getattr(benchmark_cls, attr) + + # If attribute does not corresponds to a BenchmarkTest, skip + if not isinstance(b_test, BenchmarkTest): + continue + + # Include blurring fixture + kwargs = b_test.function_kwargs + kwargs["fixtures"] = ( + kwargs["fixtures"] + [blur_sensitive_observations] + if "fixtures" in kwargs + else [blur_sensitive_observations] + ) + # Recreate BenchmarkTest object including the blurring enabling fixture + setattr( + benchmark_cls, + f"{attr}_with_blurring", + BenchmarkTest( + b_test.function, b_test.function_args, kwargs + )(), # Run the test function generation while we're at it + ) + # Generate the test function from the orginal `BenchmarkTest`s + setattr( + benchmark_cls, + attr, + b_test(), + ) + + +CLIENT_GET, CLIENT_POST = CLater("self.client.get"), CLater("self.client.post") diff --git a/backend/geonature/tests/conftest.py b/backend/geonature/tests/conftest.py index ca9ffcb72b..1b05115836 100644 --- a/backend/geonature/tests/conftest.py +++ b/backend/geonature/tests/conftest.py @@ -1,3 +1,15 @@ # force discovery of some fixtures from .fixtures import app, users, _session from pypnusershub.tests.fixtures import teardown_logout_user +import pytest + +pytest.endpoint = "" + + +def pytest_addoption(parser): + parser.addoption("--sql-log-filename", action="store", default=None) + + +@pytest.fixture(scope="session") +def sqllogfilename(request): + return request.config.getoption("--sql-log-filename") diff --git a/backend/geonature/tests/fixtures.py b/backend/geonature/tests/fixtures.py index a4785b2f63..3b0bf8fdd9 100644 --- a/backend/geonature/tests/fixtures.py +++ b/backend/geonature/tests/fixtures.py @@ -7,7 +7,7 @@ import pytest import sqlalchemy as sa -from flask import current_app, testing, url_for +from flask import current_app, testing, url_for, request from geoalchemy2.shape import from_shape from PIL import Image from shapely.geometry import Point @@ -35,7 +35,11 @@ TReport, TSources, ) -from geonature.core.sensitivity.models import SensitivityRule, cor_sensitivity_area +from geonature.core.sensitivity.models import ( + CorSensitivityCriteria, + SensitivityRule, + cor_sensitivity_area, +) from geonature.utils.env import db from pypnnomenclature.models import BibNomenclaturesTypes, TNomenclatures from pypnusershub.db.models import Application, Organisme @@ -97,6 +101,10 @@ def app(): app.test_client_class = GeoNatureClient app.config["SERVER_NAME"] = "test.geonature.fr" # required by url_for + @app.before_request + def get_endpoint(): + pytest.endpoint = request.endpoint + with app.app_context(): """ Note: This may seem redundant with 'temporary_transaction' fixture. @@ -334,10 +342,18 @@ def create_user( 2, False, [], - {"C": 2, "OCCHAB": {"R": 2, "U": 1, "E": 2, "D": 1}}, + { + "C": 2, + "OCCHAB": {"R": 2, "U": 1, "E": 2, "D": 1}, + "OCCTAX": {"R": 2, "U": 1, "E": 2, "D": 1}, + }, ), {}, ), + ( + ("user_with_blurring", organisme, 1, True, [], {}), + {}, + ), ] for (username, *args), kwargs in users_to_create: @@ -580,6 +596,9 @@ def synthese_sensitive_data(app, users, datasets, source): sensitivity_rule = db.session.execute( sa.select(SensitivityRule) .join(cor_sensitivity_area, SensitivityRule.id == cor_sensitivity_area.c.id_sensitivity) + .join( + CorSensitivityCriteria, SensitivityRule.id == CorSensitivityCriteria.id_sensitivity_rule + ) .join(LAreas, cor_sensitivity_area.c.id_area == LAreas.id_area) .where(SensitivityRule.active == True) .limit(1) diff --git a/backend/geonature/tests/test_pr_occtax.py b/backend/geonature/tests/test_pr_occtax.py index 71ecd35bc7..a856b1095b 100644 --- a/backend/geonature/tests/test_pr_occtax.py +++ b/backend/geonature/tests/test_pr_occtax.py @@ -124,6 +124,7 @@ def releve_data(client: Any, datasets: dict[Any, TDatasets]): "observers": [1], "observers_txt": "tatatato", "id_nomenclature_grp_typ": id_nomenclature_grp_typ, + "additional_fields": {"releve_addi_field": "Releve"}, }, } @@ -154,7 +155,7 @@ def occurrence_data(client: Any, releve_occtax: Any): "digital_proof": None, "non_digital_proof": None, "comment": "blah", - "additional_fields": {}, + "additional_fields": {"occurrence_addi_field": "occ"}, "cor_counting_occtax": [ { "id_nomenclature_life_stage": dict_nomenclatures["STADE_VIE"], @@ -166,7 +167,6 @@ def occurrence_data(client: Any, releve_occtax: Any): "count_min": 2, "count_max": 2, "medias": [], - "additional_fields": {}, }, { "id_nomenclature_life_stage": dict_nomenclatures["STADE_VIE"], @@ -178,7 +178,6 @@ def occurrence_data(client: Any, releve_occtax: Any): "count_min": 1, "count_max": 1, "medias": [], - "additional_fields": {}, }, ], } @@ -407,15 +406,26 @@ def test_post_occurrence(self, users: dict, occurrence_data: dict[str, Any]): assert response.status_code == 200 json_resp = response.json assert len(json_resp["cor_counting_occtax"]) == 2 - - occurrence_data["additional_fields"] = None - response = self.client.post( - url_for("pr_occtax.createOccurrence", id_releve=occurrence_data["id_releve_occtax"]), - json=occurrence_data, + # Test trigger to synthese + occurrence = ( + db.session.execute( + select(TOccurrencesOccurrence).filter_by( + id_occurrence_occtax=json_resp["id_occurrence_occtax"] + ) + ) + .unique() + .scalar_one() ) - assert response.status_code == BadRequest.code - - # TODO : test dans la synthese qu'il y a bien 2 ligne pour l'UUID couting + synthese_data = db.session.scalars( + select(Synthese).filter_by(unique_id_sinp_grp=occurrence.releve.unique_id_sinp_grp) + ).all() + assert len(synthese_data) >= 2 + synthese_reccord = synthese_data[0] + # test additionnal field concatenation + assert synthese_reccord.additional_data == { + "occurrence_addi_field": "occ", + "releve_addi_field": "Releve", + } def test_update_occurrence(self, users: dict, occurrence: Any): set_logged_user(self.client, users["user"]) diff --git a/backend/geonature/tests/test_synthese.py b/backend/geonature/tests/test_synthese.py index 5595d73cb3..a4b157cb6b 100644 --- a/backend/geonature/tests/test_synthese.py +++ b/backend/geonature/tests/test_synthese.py @@ -14,15 +14,21 @@ from geoalchemy2.shape import to_shape, from_shape from shapely.testing import assert_geometries_equal from shapely.geometry import Point -from marshmallow import EXCLUDE +from marshmallow import EXCLUDE, fields, Schema +from marshmallow_geojson import FeatureSchema, GeoJSONSchema + from geonature.utils.env import db +from geonature.utils.config import config from geonature.core.gn_permissions.tools import get_permissions from geonature.core.gn_synthese.utils.blurring import split_blurring_precise_permissions +from geonature.core.gn_synthese.schemas import SyntheseSchema from geonature.core.gn_synthese.utils.query_select_sqla import remove_accents from geonature.core.sensitivity.models import cor_sensitivity_area_type from geonature.core.gn_meta.models import TDatasets from geonature.core.gn_synthese.models import Synthese, TSources, VSyntheseForWebApp +from geonature.core.gn_synthese.synthese_config import MANDATORY_COLUMNS + from geonature.core.gn_synthese.schemas import SyntheseSchema from geonature.core.gn_permissions.models import PermAction, Permission from geonature.core.gn_commons.models.base import TModules @@ -32,9 +38,10 @@ from apptax.tests.fixtures import noms_example, attribut_example from pypnusershub.tests.utils import logged_user_headers, set_logged_user +from utils_flask_sqla_geo.schema import GeoModelConverter, GeoAlchemyAutoSchema + from .fixtures import * from .fixtures import create_synthese, create_module, synthese_with_protected_status -from .utils import jsonschema_definitions @pytest.fixture() @@ -106,59 +113,85 @@ def synthese_for_observers(source, datasets): ) -synthese_properties = { - "type": "object", - "properties": { - "observations": { - "type": "array", - "items": { - "type": "object", - "properties": { - "id": {"type": "number"}, - "cd_nom": {"type": "number"}, - "count_min_max": {"type": "string"}, - "dataset_name": {"type": "string"}, - "date_min": {"type": "string"}, - "entity_source_pk_value": { - "oneOf": [ - {"type": "null"}, - {"type": "string"}, - ], - }, - "lb_nom": {"type": "string"}, - "nom_vern_or_lb_nom": {"type": "string"}, - "unique_id_sinp": { - "type": "string", - "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", - }, - "observers": { - "oneOf": [ - {"type": "null"}, - {"type": "string"}, - ], - }, - "url_source": { - "oneOf": [ - {"type": "null"}, - {"type": "string"}, - ], - }, - }, - "required": [ # obligatoire pour le fonctionement du front - "id", - "cd_nom", - "url_source", - "entity_source_pk_value", - ], - # "additionalProperties": False, - }, - }, - }, -} +# TODO : move and use those schemas in routes one day ! +class CustomRequiredConverter(GeoModelConverter): + """Custom converter to add kwargs required for mandatory and asked fields in get_observations_for_web view + Use to validate response in test""" + + def _add_column_kwargs(self, kwargs, column): + super()._add_column_kwargs(kwargs, column) + default_cols = map( + lambda col: col["prop"], + config["SYNTHESE"]["LIST_COLUMNS_FRONTEND"] + + config["SYNTHESE"]["ADDITIONAL_COLUMNS_FRONTEND"], + ) + required_cols = list(default_cols) + MANDATORY_COLUMNS + kwargs["required"] = column.name in required_cols + + +class VSyntheseForWebAppSchema(GeoAlchemyAutoSchema): + """ + Schema for serialization/deserialization of VSyntheseForWebApp class + """ + + count_min_max = fields.Str() + nom_vern_or_lb_nom = fields.Str() + + class Meta: + model = VSyntheseForWebApp + model_converter = CustomRequiredConverter + + +# utility classes for VSyntheseForWebAppSchema validation +class UngroupedFeatureSchema(FeatureSchema): + properties = fields.Nested( + VSyntheseForWebAppSchema, + required=True, + ) + + +class GroupedFeatureSchema(FeatureSchema): + class NestedObs(Schema): + observations = fields.List( + fields.Nested(VSyntheseForWebAppSchema, required=True), required=True + ) + + properties = fields.Nested(NestedObs, required=True) + + +class UngroupedGeoJSONSchema(GeoJSONSchema): + feature_schema = UngroupedFeatureSchema + + +class GroupedGeoJSONSchema(GeoJSONSchema): + feature_schema = GroupedFeatureSchema @pytest.mark.usefixtures("client_class", "temporary_transaction") class TestSynthese: + def test_required_fields_and_format(self, app, users): + # Test required fields base on VSyntheseForWebAppSchema surrounded by a custom converter : CustomRequiredConverter + # also test geojson serialization (grouped by geometry and not) + app.config["SYNTHESE"]["LIST_COLUMNS_FRONTEND"] += [ + {"prop": "altitude_min", "name": "Altitude min"}, + {"prop": "count_min_max", "name": "Dénombrement"}, + {"prop": "nom_vern_or_lb_nom", "name": "Taxon"}, + ] + + app.config["SYNTHESE"]["ADDITIONAL_COLUMNS_FRONTEND"] += [ + {"prop": "lb_nom", "name": "Nom scientifique"} + ] + url_ungrouped = url_for("gn_synthese.get_observations_for_web") + set_logged_user(self.client, users["admin_user"]) + resp = self.client.get(url_ungrouped) + for f in resp.json["features"]: + UngroupedGeoJSONSchema().load(f) + + url_grouped = url_for("gn_synthese.get_observations_for_web", format="grouped_geom") + resp = self.client.get(url_grouped) + for f in resp.json["features"]: + GroupedGeoJSONSchema().load(f) + def test_synthese_scope_filtering(self, app, users, synthese_data): all_ids = {s.id_synthese for s in synthese_data.values()} sq = ( @@ -184,12 +217,6 @@ def test_get_defaut_nomenclatures(self, users): def test_get_observations_for_web(self, app, users, synthese_data, taxon_attribut): url = url_for("gn_synthese.get_observations_for_web") - schema = { - "definitions": jsonschema_definitions, - "$ref": "#/definitions/featurecollection", - "$defs": {"props": synthese_properties}, - } - r = self.client.get(url) assert r.status_code == Unauthorized.code @@ -197,7 +224,9 @@ def test_get_observations_for_web(self, app, users, synthese_data, taxon_attribu r = self.client.get(url) assert r.status_code == 200 - validate_json(instance=r.json, schema=schema) + + r = self.client.get(url) + assert r.status_code == 200 # Add cd_nom column app.config["SYNTHESE"]["LIST_COLUMNS_FRONTEND"] += [ @@ -206,6 +235,7 @@ def test_get_observations_for_web(self, app, users, synthese_data, taxon_attribu "name": "Cdnom", } ] + # schema["properties"]["observations"]["items"]["required"] = # test on synonymy and taxref attrs filters = { "cd_ref": [taxon_attribut.bib_nom.cd_ref], @@ -215,7 +245,6 @@ def test_get_observations_for_web(self, app, users, synthese_data, taxon_attribu } r = self.client.post(url, json=filters) assert r.status_code == 200 - validate_json(instance=r.json, schema=schema) assert len(r.json["features"]) > 0 for feature in r.json["features"]: assert feature["properties"]["cd_nom"] == taxon_attribut.bib_nom.cd_nom @@ -241,12 +270,11 @@ def test_get_observations_for_web(self, app, users, synthese_data, taxon_attribu } r = self.client.post(url, json=filters) assert r.status_code == 200 - validate_json(instance=r.json, schema=schema) assert {synthese_data[k].id_synthese for k in ["p1_af1", "p1_af2"]}.issubset( - {f["properties"]["id"] for f in r.json["features"]} + {f["properties"]["id_synthese"] for f in r.json["features"]} ) assert {synthese_data[k].id_synthese for k in ["p2_af1", "p2_af2"]}.isdisjoint( - {f["properties"]["id"] for f in r.json["features"]} + {f["properties"]["id_synthese"] for f in r.json["features"]} ) # test geometry filter with circle radius @@ -264,12 +292,11 @@ def test_get_observations_for_web(self, app, users, synthese_data, taxon_attribu } r = self.client.post(url, json=filters) assert r.status_code == 200 - validate_json(instance=r.json, schema=schema) assert {synthese_data[k].id_synthese for k in ["p1_af1", "p1_af2"]}.issubset( - {f["properties"]["id"] for f in r.json["features"]} + {f["properties"]["id_synthese"] for f in r.json["features"]} ) assert {synthese_data[k].id_synthese for k in ["p2_af1", "p2_af2"]}.isdisjoint( - {f["properties"]["id"] for f in r.json["features"]} + {f["properties"]["id_synthese"] for f in r.json["features"]} ) # test ref geo area filter @@ -280,12 +307,11 @@ def test_get_observations_for_web(self, app, users, synthese_data, taxon_attribu filters = {f"area_{com_type.id_type}": [chambery.id_area]} r = self.client.post(url, json=filters) assert r.status_code == 200 - validate_json(instance=r.json, schema=schema) assert {synthese_data[k].id_synthese for k in ["p1_af1", "p1_af2"]}.issubset( - {f["properties"]["id"] for f in r.json["features"]} + {f["properties"]["id_synthese"] for f in r.json["features"]} ) assert {synthese_data[k].id_synthese for k in ["p2_af1", "p2_af2"]}.isdisjoint( - {f["properties"]["id"] for f in r.json["features"]} + {f["properties"]["id_synthese"] for f in r.json["features"]} ) # test organism @@ -294,7 +320,6 @@ def test_get_observations_for_web(self, app, users, synthese_data, taxon_attribu } r = self.client.post(url, json=filters) assert r.status_code == 200 - validate_json(instance=r.json, schema=schema) assert len(r.json["features"]) >= 2 # FIXME # test status lr @@ -343,7 +368,9 @@ def test_get_observations_for_web_filter_comment(self, users, synthese_data, tax filters = {"has_comment": True} r = self.client.get(url, json=filters) - assert id_synthese in (feature["properties"]["id"] for feature in r.json["features"]) + assert id_synthese in ( + feature["properties"]["id_synthese"] for feature in r.json["features"] + ) def test_get_observations_for_web_filter_id_source(self, users, synthese_data, source): set_logged_user(self.client, users["self_user"]) @@ -358,7 +385,7 @@ def test_get_observations_for_web_filter_id_source(self, users, synthese_data, s for synthese in synthese_data.values() if synthese.id_source == id_source } - response_data = {feature["properties"]["id"] for feature in r.json["features"]} + response_data = {feature["properties"]["id_synthese"] for feature in r.json["features"]} assert expected_data.issubset(response_data) @pytest.mark.parametrize( @@ -391,7 +418,7 @@ def test_get_observations_for_web_filter_source_by_id_module( for synthese in synthese_data.values() if synthese.id_module in id_modules_selected } - response_data = {feature["properties"]["id"] for feature in r.json["features"]} + response_data = {feature["properties"]["id_synthese"] for feature in r.json["features"]} assert expected_data.issubset(response_data) assert len(response_data) == expected_length @@ -429,7 +456,9 @@ def test_get_synthese_data_cruved(self, app, users, synthese_data, datasets): assert len(features) > 0 for feat in features: - assert feat["properties"]["id"] in [synt.id_synthese for synt in synthese_data.values()] + assert feat["properties"]["id_synthese"] in [ + synt.id_synthese for synt in synthese_data.values() + ] assert response.status_code == 200 def test_get_synthese_data_aggregate(self, users, datasets, synthese_data): @@ -484,35 +513,6 @@ def test_filter_cor_observers(self, users, synthese_data): # le requete doit etre OK marlgré la geom NULL assert response.status_code == 200 - @pytest.mark.parametrize( - "additionnal_column", - [("altitude_min"), ("count_min_max"), ("nom_vern_or_lb_nom")], - ) - def test_get_observations_for_web_param_column_frontend( - self, app, users, synthese_data, additionnal_column - ): - """ - Test de renseigner le paramètre LIST_COLUMNS_FRONTEND pour renvoyer uniquement - les colonnes souhaitées - """ - app.config["SYNTHESE"]["LIST_COLUMNS_FRONTEND"] = [ - { - "prop": additionnal_column, - "name": "My label", - } - ] - - set_logged_user(self.client, users["self_user"]) - - response = self.client.get(url_for("gn_synthese.get_observations_for_web")) - data = response.get_json() - - expected_columns = {"id", "url_source", additionnal_column} - - assert all( - set(feature["properties"].keys()) == expected_columns for feature in data["features"] - ) - @pytest.mark.parametrize( "group_inpn", [ @@ -535,7 +535,7 @@ def test_get_observations_for_web_filter_group_inpn(self, users, synthese_data, ) response_json = response.json assert obs.id_synthese in { - synthese["properties"]["id"] for synthese in response_json["features"] + synthese["properties"]["id_synthese"] for synthese in response_json["features"] } def test_export(self, users): @@ -564,6 +564,36 @@ def test_export(self, users): ) assert response.status_code == 200 + @pytest.mark.parametrize( + "view_name,response_status_code", + [ + ("gn_synthese.v_synthese_for_web_app", 200), + ("gn_synthese.not_in_config", 403), + ("v_synthese_for_web_app", 400), # miss schema name + ("gn_synthese.v_metadata_for_export", 400), # miss required columns + ], + ) + def test_export_observations_custom_view(self, users, app, view_name, response_status_code): + set_logged_user(self.client, users["self_user"]) + if view_name != "gn_synthese.not_in_config": + app.config["SYNTHESE"]["EXPORT_OBSERVATIONS_CUSTOM_VIEWS"] = [ + { + "label": "Test export custom", + "view_name": view_name, + "geojson_4326_field": "st_asgeojson", + "geojson_local_field": "st_asgeojson", + } + ] + response = self.client.post( + url_for("gn_synthese.export_observations_web"), + json=[1, 2, 3], + query_string={ + "export_format": "geojson", + "view_name": view_name, + }, + ) + assert response.status_code == response_status_code + def test_export_observations(self, users, synthese_data, synthese_sensitive_data, modules): data_synthese = synthese_data.values() data_synthese_sensitive = synthese_sensitive_data.values() @@ -1403,7 +1433,9 @@ def blur_sensitive_observations(monkeypatch): def get_one_synthese_reponse_from_id(response: dict, id_synthese: int): return [ - synthese for synthese in response["features"] if synthese["properties"]["id"] == id_synthese + synthese + for synthese in response["features"] + if synthese["properties"]["id_synthese"] == id_synthese ][0] @@ -1531,7 +1563,9 @@ def test_get_observations_for_web_blurring_excluded( ] ) - json_synthese_ids = (feature["properties"]["id"] for feature in response_json["features"]) + json_synthese_ids = ( + feature["properties"]["id_synthese"] for feature in response_json["features"] + ) assert all(synthese_id not in json_synthese_ids for synthese_id in sensitive_synthese_ids) def test_get_observations_for_web_blurring_grouped_geom( @@ -1578,7 +1612,7 @@ def test_get_observations_for_web_blurring_grouped_geom( feature["geometry"] is None for feature in json_resp["features"] if all( - observation["id"] in sensitive_synthese_ids + observation["id_synthese"] in sensitive_synthese_ids for observation in feature["properties"]["observations"] ) ) diff --git a/backend/geonature/tests/utils.py b/backend/geonature/tests/utils.py index 1ffb7dda79..8bdc0adf6b 100644 --- a/backend/geonature/tests/utils.py +++ b/backend/geonature/tests/utils.py @@ -29,79 +29,3 @@ def get_id_nomenclature(nomenclature_type_mnemonique, cd_nomenclature): ) ) ) - - -jsonschema_definitions = { - "geometries": { - "BoundingBox": { - "type": "array", - "minItems": 4, - "items": {"type": "number"}, - }, - "PointCoordinates": {"type": "array", "minItems": 2, "items": {"type": "number"}}, - "Point": { - "title": "GeoJSON Point", - "type": "object", - "required": ["type", "coordinates"], - "properties": { - "type": {"type": "string", "enum": ["Point"]}, - "coordinates": { - "$ref": "#/definitions/geometries/PointCoordinates", - }, - "bbox": { - "$ref": "#/definitions/geometries/BoundingBox", - }, - }, - }, - }, - "feature": { - "title": "GeoJSON Feature", - "type": "object", - "required": ["type", "properties", "geometry"], - "properties": { - "type": {"type": "string", "enum": ["Feature"]}, - "id": {"oneOf": [{"type": "number"}, {"type": "string"}]}, - "properties": { - "oneOf": [ - {"type": "null"}, - {"$ref": "#/$defs/props"}, - ], - }, - "geometry": { - "oneOf": [ - {"type": "null"}, - {"$ref": "#/definitions/geometries/Point"}, - # {"$ref": "#/definitions/geometries/LineString"}, - # {"$ref": "#/definitions/geometries/Polygon"}, - # {"$ref": "#/definitions/geometries/MultiPoint"}, - # {"$ref": "#/definitions/geometries/MultiLineString"}, - # {"$ref": "#/definitions/geometries/MultiPolygon"}, - # {"$ref": "#/definitions/geometries/GeometryCollection"}, - ], - }, - "bbox": { - "$ref": "#/definitions/geometries/BoundingBox", - }, - }, - }, - "featurecollection": { - "title": "GeoJSON FeatureCollection", - "type": "object", - "required": ["type", "features"], - "properties": { - "type": { - "type": "string", - "enum": ["FeatureCollection"], - }, - "features": { - "type": "array", - "items": { - "$ref": "#/definitions/feature", - }, - }, - "bbox": { - "$ref": "#/definitions/geometries/BoundingBox", - }, - }, - }, -} diff --git a/backend/geonature/utils/config_schema.py b/backend/geonature/utils/config_schema.py index fd44323df4..f066dee806 100644 --- a/backend/geonature/utils/config_schema.py +++ b/backend/geonature/utils/config_schema.py @@ -4,19 +4,14 @@ import os -from marshmallow import ( - Schema, - fields, - validates_schema, - ValidationError, - post_load, -) +from warnings import warn + +from marshmallow import Schema, fields, validates_schema, ValidationError, post_load, pre_load from marshmallow.validate import OneOf, Regexp, Email, Length from geonature.core.gn_synthese.synthese_config import ( DEFAULT_EXPORT_COLUMNS, DEFAULT_LIST_COLUMN, - DEFAULT_COLUMNS_API_SYNTHESE, ) from geonature.utils.env import GEONATURE_VERSION, BACKEND_DIR, ROOT_DIR from geonature.utils.module import iter_modules_dist, get_module_config @@ -192,8 +187,8 @@ class GnPySchemaConf(Schema): SQLALCHEMY_DATABASE_URI = fields.String( required=True, validate=Regexp( - "^postgresql:\/\/.*:.*@[^:]+:\w+\/\w+", - error="Database uri is invalid ex: postgresql://monuser:monpass@server:port/db_name", + r"^(postgres(?:ql)?)((\+psycopg2)?):\/\/(?:([^@\s]+)@)?([^\/\s]+)(?:\/(\w+))?(?:\?(.+))?", + error="PostgreSQL database URL is invalid. Check for authorized URL here : https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING-URIS", ), ) SQLALCHEMY_TRACK_MODIFICATIONS = fields.Boolean(load_default=True) @@ -281,6 +276,13 @@ class GnFrontEndConf(Schema): DISPLAY_EMAIL_DISPLAY_INFO = fields.List(fields.String(), load_default=["NOM_VERN"]) +class ExportObservationSchema(Schema): + label = fields.String(required=True) + view_name = fields.String(required=True) + geojson_4326_field = fields.String(load_default="geojson_4326") + geojson_local_field = fields.String(load_default="geojson_local") + + class Synthese(Schema): # -------------------------------------------------------------------- # SYNTHESE - SEARCH FORM @@ -355,19 +357,18 @@ class Synthese(Schema): # -------------------------------------------------------------------- # SYNTHESE - OBSERVATIONS LIST - # Listes des champs renvoyés par l'API synthese '/synthese' - # Si on veut afficher des champs personnalisés dans le frontend (paramètre LIST_COLUMNS_FRONTEND) il faut - # d'abbord s'assurer que ces champs sont bien renvoyé par l'API ! + # Colonnes affichées par défaut sur la liste des résultats de la synthese # Champs disponibles: tous ceux de la vue 'v_synthese_for_web_app - COLUMNS_API_SYNTHESE_WEB_APP = fields.List( - fields.String, load_default=DEFAULT_COLUMNS_API_SYNTHESE - ) - # Colonnes affichées sur la liste des résultats de la sytnthese LIST_COLUMNS_FRONTEND = fields.List(fields.Dict, load_default=DEFAULT_LIST_COLUMN) + # Colonnes affichables sur la liste des résultats de la synthese via la modale de selection des colonnes + ADDITIONAL_COLUMNS_FRONTEND = fields.List(fields.Dict, load_default=[]) # -------------------------------------------------------------------- # SYNTHESE - DOWNLOADS (AKA EXPORTS) EXPORT_COLUMNS = fields.List(fields.String(), load_default=DEFAULT_EXPORT_COLUMNS) + EXPORT_OBSERVATIONS_CUSTOM_VIEWS = fields.List( + fields.Nested(ExportObservationSchema), load_default=[] + ) # Certaines colonnes sont obligatoires pour effectuer les filtres CRUVED EXPORT_ID_SYNTHESE_COL = fields.String(load_default="id_synthese") EXPORT_ID_DATASET_COL = fields.String(load_default="jdd_id") @@ -436,6 +437,22 @@ class Synthese(Schema): # Activate the blurring of sensitive observations. Otherwise, exclude them BLUR_SENSITIVE_OBSERVATIONS = fields.Boolean(load_default=True) + @pre_load + def warn_deprecated(self, data, **kwargs): + deprecated = { + "EXPORT_ID_SYNTHESE_COL", + "EXPORT_ID_DIGITISER_COL", + "EXPORT_OBSERVERS_COL", + "EXPORT_GEOJSON_4326_COL", + "EXPORT_GEOJSON_LOCAL_COL", + } + for deprecated_field in deprecated & set(data.keys()): + warn( + f"{deprecated_field} is deprecated - " + "Please use `EXPORT_OBSERVATIONS_CUSTOM_VIEWS` parameter to customize your synthese exports " + ) + return data + # Map configuration BASEMAP = [ diff --git a/backend/requirements-dependencies.in b/backend/requirements-dependencies.in index ffcac7117f..0b9d8f24c2 100644 --- a/backend/requirements-dependencies.in +++ b/backend/requirements-dependencies.in @@ -1,7 +1,7 @@ -pypnusershub>=2.1.3,<3 -pypnnomenclature>=1.6.1,<2 +pypnusershub>=2.1.4,<3 +pypnnomenclature>=1.6.2,<2 pypn_habref_api>=0.4.1,<1 utils-flask-sqlalchemy-geo>=0.3.1,<1 utils-flask-sqlalchemy>=0.4.1,<1 -taxhub>=1.13.3,<2 -pypn-ref-geo>=1.4.1,<2 +taxhub>=1.14,<2 +pypn-ref-geo>=1.5.2,<2 diff --git a/backend/requirements.txt b/backend/requirements.txt index 47db9d0248..2a68e351c9 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -233,13 +233,13 @@ pyphen==0.14.0 # via weasyprint pypn-habref-api==0.4.1 # via -r requirements-dependencies.in -pypn-ref-geo==1.5.1 +pypn-ref-geo==1.5.2 # via # -r requirements-dependencies.in # taxhub -pypnnomenclature==1.6.1 +pypnnomenclature==1.6.2 # via -r requirements-dependencies.in -pypnusershub==2.1.3 +pypnusershub==2.1.4 # via # -r requirements-dependencies.in # pypnnomenclature @@ -288,7 +288,7 @@ sqlalchemy==1.4.51 # utils-flask-sqlalchemy # utils-flask-sqlalchemy-geo # wtforms-sqlalchemy -taxhub==1.13.3 +taxhub==1.14.0 # via # -r requirements-dependencies.in # pypnnomenclature diff --git a/config/default_config.toml.example b/config/default_config.toml.example index 68cf538b6c..f9042684b9 100644 --- a/config/default_config.toml.example +++ b/config/default_config.toml.example @@ -269,8 +269,9 @@ MEDIA_CLEAN_CRONTAB = "0 1 * * *" # Colonne à afficher par défaut sur la liste des résultats de la synthese # Choisir le champ 'prop' parmis les colonnes suivantes : - # id (=id_synthese), date_min, cd_nom, lb_nom, nom_vern_or_lb_nom, + # id_synthese, date_min, cd_nom, lb_nom, nom_vern_or_lb_nom, # observers, dataset_name, url_source, count_min_max + # La liste des colonnes affichables est celle de la vue `gn_synthese.v_synthese_for_export`+ `nom_vern_or_lb_nom` et `count_min_max` LIST_COLUMNS_FRONTEND = [ { prop = "nom_vern_or_lb_nom", name = "Taxon" }, { prop = "date_min", name = "Date début" }, @@ -278,6 +279,13 @@ MEDIA_CLEAN_CRONTAB = "0 1 * * *" { prop = "dataset_name", name = "Jeu de données" } ] + # Colonnes affichables dans la liste des résulats, mais masquées par default + # Possibilité de les ajouter en cliquant sur la route crantée en haut de la liste + # La liste des colonnes affichables est celle de la vue `gn_synthese.v_synthese_for_export` + ADDITIONAL_COLUMNS_FRONTEND = [ + { prop = "lb_nom", name = "Nom scientifique" }, + ] + # Nombre de résultats à afficher pour la recherche autocompletée de taxon TAXON_RESULT_NUMBER = 20 @@ -317,17 +325,16 @@ MEDIA_CLEAN_CRONTAB = "0 1 * * *" # Nombre max d'observations dans les exports NB_MAX_OBS_EXPORT = 50000 - # Noms des colonnes obligatoires de la vue ``gn_synthese.v_synthese_for_export`` - EXPORT_ID_SYNTHESE_COL = "id_synthese" - EXPORT_ID_DATASET_COL = "jdd_id" - EXPORT_ID_DIGITISER_COL = "id_digitiser" - EXPORT_OBSERVERS_COL = "observateurs" - EXPORT_GEOJSON_4326_COL = "geojson_4326" - EXPORT_GEOJSON_LOCAL_COL = "geojson_local" - # Formats d'export disponibles ["csv", "geojson", "shapefile", "gpkg"] EXPORT_FORMAT = ["csv", "geojson", "shapefile"] + # Vues d'export personnalisées + EXPORT_OBSERVATIONS_CUSTOM_VIEWS = [ + { + label = "format personnalisé", + view_name = "schema_name.view_name" + } + ] # Noms des colonnes obligatoires de la vue ``gn_synthese.v_metadata_for_export`` EXPORT_METADATA_ID_DATASET_COL = "jdd_id" EXPORT_METADATA_ACTOR_COL = "acteurs" diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py b/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py index fc252224a7..3b4590601f 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py @@ -52,20 +52,36 @@ blueprint = Blueprint("occhab", __name__) +def get_joinedload_when_scope(scope): + joinedload_when_scope = [] + if scope != 0: + # Required when a scope is defined. + # The following enable the restricted user to access one (or more) stations' datasets information + joinedload_when_scope = [ + joinedload(TDatasets.cor_dataset_actor).options( + joinedload(CorDatasetActor.role), joinedload(CorDatasetActor.organism) + ), + joinedload(TDatasets.acquisition_framework).options( + joinedload(TAcquisitionFramework.cor_af_actor).options( + joinedload(CorAcquisitionFrameworkActor.role) + ) + ), + ] + return joinedload_when_scope + + @blueprint.route("/stations/", methods=["GET"]) @permissions.check_cruved_scope("R", module_code="OCCHAB", get_scope=True) def list_stations(scope): + joinedload_when_scope = get_joinedload_when_scope(scope) stations = Station.filter_by_params(request.args) stations = Station.filter_by_scope(scope=scope, query=stations) stations = stations.order_by(Station.date_min.desc()).options( raiseload("*"), joinedload(Station.observers), - joinedload(Station.dataset), + joinedload(Station.dataset).options(*joinedload_when_scope), ) - only = [ - "observers", - "dataset", - ] + only = ["observers", "dataset", "+cruved"] if request.args.get("habitats", default=False, type=int): only.extend( [ @@ -111,18 +127,7 @@ def get_station(id_station, scope): :rtype dict """ - joinedload_when_scope = [] - if scope != 0: - # Required when a scope is defined. - # The following enable the restricted user to access one (or more) stations' datasets information - joinedload_when_scope = [ - joinedload(TDatasets.cor_dataset_actor).options(joinedload(CorDatasetActor.role)), - joinedload(TDatasets.acquisition_framework).options( - joinedload(TAcquisitionFramework.cor_af_actor).options( - joinedload(CorAcquisitionFrameworkActor.role) - ) - ), - ] + joinedload_when_scope = get_joinedload_when_scope(scope) station = ( db.session.scalars( select(Station) diff --git a/contrib/gn_module_occhab/frontend/app/components/delete-modal/delete-modal.component.html b/contrib/gn_module_occhab/frontend/app/components/delete-modal/delete-modal.component.html index fa02c7ad45..73e876b1ad 100644 --- a/contrib/gn_module_occhab/frontend/app/components/delete-modal/delete-modal.component.html +++ b/contrib/gn_module_occhab/frontend/app/components/delete-modal/delete-modal.component.html @@ -12,13 +12,15 @@
- Station n° {{station.properties.id_station}} + Station n° {{ station.properties.id_station }} -
-
Jeu de donnée :
-

{{station?.properties.dataset?.dataset_name}}

+

{{ station?.properties.dataset?.dataset_name }}

Date :

- {{station?.properties.date_min | date:'dd/MM/yyyy'}} - - {{station?.properties.date_max | date:'dd/MM/yyyy'}} + {{ station?.properties.date_min | date: 'dd/MM/yyyy' }} - + {{ station?.properties.date_max | date: 'dd/MM/yyyy' }}

@@ -61,46 +58,55 @@
Observateur(s) :
-

- {{obs.nom_role}} {{obs.prenom_role}} - , +

+ {{ obs.nom_role }} {{ obs.prenom_role }} + ,

-

{{station?.properties.observers_txt}}

+

{{ station?.properties.observers_txt }}

- -
-
Altitude (en m) :
-

{{station?.properties.altitude_min}} - {{station?.properties.altitude_max}}

+
Altitude (en m) :
+

+ {{ station?.properties.altitude_min }} - + {{ station?.properties.altitude_max }} +

-
Surface (en m²) :
-

{{station?.properties.area}} ( - {{station?.properties.nomenclature_area_surface_calculation?.label_default}} )

+
Surface (en m²) :
+

+ {{ station?.properties.area }} + + ( + {{ station?.properties.nomenclature_area_surface_calculation?.label_default }} + ) + +

-
Type d'information géographique :
+
+ Type d'information géographique : +

- {{station.properties.nomenclature_geographic_object.label_default}}

+ {{ station.properties.nomenclature_geographic_object.label_default }} +

-
-
Habitats de la station :
+
Habitats de la station :
- +
{{hab.nom_cite}}{{ hab.nom_cite }} Habitats de la station :
-
-
Commentaire :
-

{{station?.properties.comment}}

+
+
Commentaire :
+

{{ station?.properties.comment }}

-
- -
-
+
-
{{currentHab?.nom_cite}}
+
+ {{ currentHab?.nom_cite }} +
- Technique de collecte : - {{currentHab?.nomenclature_collection_technique?.label_default}}
- Méthode de détermination : - {{currentHab?.nomenclature_determination_type?.label_default}}
- Pourcentage de recouvrement : - {{currentHab?.nomenclature_recovery_percentage}}
- Abondance : {{currentHab?.nomenclature_abundance?.label_default}}
+ Technique de collecte : + {{ currentHab?.nomenclature_collection_technique?.label_default }} +
+ Méthode de détermination : + {{ currentHab?.nomenclature_determination_type?.label_default }} +
+ Pourcentage de recouvrement : + {{ currentHab?.nomenclature_recovery_percentage }} +
+ Abondance : + {{ currentHab?.nomenclature_abundance?.label_default }} +
-
Information sur l'habitat (Habref) :
+
+ Information sur l'habitat (Habref) : +

- Nom : {{currentHab?.habref?.lb_code}} - - {{currentHab?.habref?.lb_hab_fr}} + Nom : + {{ currentHab?.habref?.lb_code }} - + {{ currentHab?.habref?.lb_hab_fr }} -

- Typologie : {{habInfo?.typo.lb_nom_typo}} + Typologie : + {{ habInfo?.typo.lb_nom_typo }} -
-
+
+
-
Infos
- - Infos let-c="close" > diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.html b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.html index 88afdfc32c..bd02787501 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.html +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.html @@ -1,7 +1,14 @@ -
+
-
+
@@ -22,7 +29,9 @@ " > - + @@ -51,28 +66,28 @@ - + - + > - + > - + >
@@ -84,9 +99,15 @@ (click)="formIsDisable()" >
-
+
- + Station
@@ -96,8 +117,7 @@
- + >
[idMenu]="config.OCCHAB.OBSERVER_LIST_ID" label="{{ 'Releve.Observers' | translate }}" [bindAllItem]="true" - > - + >
@@ -117,9 +136,11 @@
else elseblocDataset " > - {{ 'MetaData.Datasets' | translate }}: + {{ 'MetaData.Datasets' | translate }}: - {{ currentEditingStation?.properties?.dataset?.dataset_name }} + + {{ currentEditingStation?.properties?.dataset?.dataset_name }} +
@@ -129,8 +150,7 @@
moduleCode="OCCHAB" creatableInModule="OCCHAB" data-qa="pnx-occhab-form-dataset" - > - + >
@@ -145,9 +165,10 @@
[defaultToday]="true" label="{{ 'Releve.MinDate' | translate }}" [parentFormControl]="occHabForm.stationForm.get('date_min')" - (onSelectOrKeyIn)="globalFormService.synchronizeMax(occHabForm.stationForm, 'date_min', 'date_max')" - > - + (onSelectOrKeyIn)=" + globalFormService.synchronizeMax(occHabForm.stationForm, 'date_min', 'date_max') + " + >
[defaultToday]="true" label="{{ 'Releve.MaxDate' | translate }}" [parentFormControl]="occHabForm.stationForm.get('date_max')" - (onSelectOrKeyIn)="globalFormService.synchronizeMin(occHabForm.stationForm, 'date_min', 'date_max')" - > - + (onSelectOrKeyIn)=" + globalFormService.synchronizeMin(occHabForm.stationForm, 'date_min', 'date_max') + " + >
class="col-xs-3 col-sm-3 col-md-3 col-lg-3 padding-sm" *ngIf="config.OCCHAB.formConfig['altitude_min']" > - Altitude min + Altitude min class="col-xs-3 col-sm-3 col-md-3 col-lg-3 padding-sm" *ngIf="config.OCCHAB.formConfig['altitude_max']" > - Altitude max + Altitude max -
+
- Profondeur min + Profondeur min class="col-xs-3 col-sm-3 col-md-3 col-lg-3 padding-sm" *ngIf="config.OCCHAB.formConfig['depth_max']" > - Profondeur max + Profondeur max comparedKey="id_nomenclature" titleKey="definition_fr" displayedKey="label_fr" - > - + >
- Surface (en m²) + Surface (en m²) comparedKey="id_nomenclature" titleKey="definition_fr" displayedKey="label_fr" - > - + >
@@ -302,8 +334,7 @@
titleKey="definition_fr" displayedKey="label_fr" data-qa="pnx-occhab-form-geographic" - > - + >
@@ -338,8 +369,14 @@
-
- +
+ Habitat
-
-
+
+
- Sélectionner une typologie d'habitat + Sélectionner une typologie d'habitat @@ -380,7 +433,7 @@
.controls[occHabForm.currentEditingHabForm].get('habref') " queryParamSearch="search_name" - [othersGetParams]="{cd_type: occHabForm.typoHabControl.value, id_list: config.OCCHAB.ID_LIST_HABITAT}" + [othersGetParams]="{ + cd_type: occHabForm.typoHabControl.value, + id_list: config.OCCHAB.ID_LIST_HABITAT + }" [formatter]="formatter" keyValue="search_name" label="Habitat" placeholder="Tapez les premières lettres..." (onChange)="occHabForm.patchNomCite($event)" + > +
- -
@@ -424,8 +485,7 @@
titleKey="definition_fr" displayedKey="label_fr" [displayNullValue]="true" - > - + >
@@ -434,7 +494,7 @@
class="col" *ngIf="config.OCCHAB.formConfig['determiner']" > - Déterminateur + Déterminateur comparedKey="id_nomenclature" titleKey="definition_fr" displayedKey="label_fr" - > - + >
@@ -483,8 +542,7 @@
titleKey="definition_fr" displayedKey="label_fr" data-qa="pnx-occhab-form-technique-collect" - > - + >
class="form-group" *ngIf="config.OCCHAB.formConfig['recovery_percentage']" > - Pourcentage de recouvrement + Pourcentage de recouvrement titleKey="definition_fr" displayedKey="label_fr" [displayNullValue]="true" - > - + >
@@ -559,9 +616,7 @@
class="uppercase" (click)="validateHabitat()" [disabled]=" - occHabForm.stationForm.get('habitats').controls[ - occHabForm.currentEditingHabForm - ] && + occHabForm.stationForm.get('habitats').controls[occHabForm.currentEditingHabForm] && occHabForm.stationForm.get('habitats').controls[occHabForm.currentEditingHabForm] .invalid " @@ -574,6 +629,5 @@
-
diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.scss b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.scss index 0af5758a39..fb6dd4663c 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.scss +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.scss @@ -37,7 +37,8 @@ tbody { position: absolute; right: 50px; bottom: -10px; - box-shadow: 3px 5px 10px 2px rgba(0, 0, 0, 0.51), + box-shadow: + 3px 5px 10px 2px rgba(0, 0, 0, 0.51), 0 6px 20px 0 rgba(0, 0, 0, 0.19); } diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts index c6b36350c9..ee13118fff 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts @@ -76,6 +76,13 @@ export class OccHabFormComponent implements OnInit, OnDestroy { this.showHabForm = false; this.showTabHab = true; this._occHabDataService.getStation(params['id_station']).subscribe((station: any) => { + if (!station.properties.cruved.U) { + this._commonService.regularToaster( + 'error', + "Vous n'avez pas les permissions requises pour modifier cette station !" + ); + this._router.navigate(['occhab']); + } this.currentEditingStation = station; if (station.geometry.type == 'Point') { // set the input for the marker component diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/modal-download.component.html b/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/modal-download.component.html index c1276dd4b3..15c19e3ffe 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/modal-download.component.html +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/modal-download.component.html @@ -2,34 +2,35 @@ + > + Téléchargement +
-
Télécharger les habitats des stations
-
+
Télécharger les habitats des stations
+

- Votre recherche comporte {{storeService.idsStation.length}} station(s) d'habitat + Votre recherche comporte {{ storeService.idsStation.length }} station(s) d'habitat

- NB: Le limite du nombre d'habitat exportables est de {{config.OCCHAB.NB_MAX_EXPORT}} + NB: + Le limite du nombre d'habitat exportables est de + {{ config.OCCHAB.NB_MAX_EXPORT }}

- - - - -
diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.html b/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.html index 9e070445e3..2700f09d37 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.html +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.html @@ -1,4 +1,7 @@ -
+
-
+
- +
- +
- - + + - - + + - - + + - {{ row.habitats?.length }} + {{ row.habitats?.length }} {{ displayHabTooltip(row) }} @@ -158,15 +221,22 @@ - - - {{ row.date_min | date : 'dd-MM-yyyy' }} + + + {{ row.date_min | date: 'dd-MM-yyyy' }} - + {{ row.dataset.dataset_name }} @@ -176,12 +246,15 @@
- + - + > diff --git a/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.html b/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.html index 561b83907d..624ef684ad 100644 --- a/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.html +++ b/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.html @@ -4,32 +4,39 @@ id="showDef" class="btn btn-sm btn-light box-shadow" (click)="getDefinitions()" ->Masquer les définitions des statuts +> + Masquer les définitions des statuts + +> + Afficher les définitions des statuts +

{{ occHabForm.stationForm.value.habitats.length - i }} + {{ occHabForm.stationForm.value.habitats.length - i }} + {{ hab?.nom_cite }} {{ hab?.id_nomenclature_abundance?.label_default }} @@ -32,7 +41,10 @@ class="btn btn-primary btn-sm" (click)="editHab(i)" > - + - +
- - - + + - - + + -
Statut Définition StatutDéfinition
- {{def.mnemonique }} {{def.definition_default}} + {{ def.mnemonique }} + {{ def.definition_default }}
diff --git a/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.scss b/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.scss index 33225fb725..b7dbedb4e6 100644 --- a/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.scss +++ b/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.scss @@ -7,13 +7,15 @@ th.table_status { width: 30%; } -td,th { +td, +th { border: 1px solid black; text-align: left; padding: 8px; } -button#showDef, button#hideDef{ +button#showDef, +button#hideDef { width: 100% !important; } diff --git a/contrib/gn_module_validation/frontend/app/components/validation-modal-info-obs/validation-modal-info-obs.component.html b/contrib/gn_module_validation/frontend/app/components/validation-modal-info-obs/validation-modal-info-obs.component.html index 866bf086f9..4d3086f59c 100644 --- a/contrib/gn_module_validation/frontend/app/components/validation-modal-info-obs/validation-modal-info-obs.component.html +++ b/contrib/gn_module_validation/frontend/app/components/validation-modal-info-obs/validation-modal-info-obs.component.html @@ -1,8 +1,7 @@ - Comentaire : + Commentaire : + >
-
@@ -128,5 +130,4 @@
- - \ No newline at end of file + diff --git a/contrib/gn_module_validation/frontend/app/components/validation-synthese-carte/validation-synthese-carte.component.html b/contrib/gn_module_validation/frontend/app/components/validation-synthese-carte/validation-synthese-carte.component.html index 1032893f03..3cd78396b9 100644 --- a/contrib/gn_module_validation/frontend/app/components/validation-synthese-carte/validation-synthese-carte.component.html +++ b/contrib/gn_module_validation/frontend/app/components/validation-synthese-carte/validation-synthese-carte.component.html @@ -11,6 +11,5 @@ [zoomLevel]="1" (layerDrawed)="bindGeojsonForm($event)" (layerDeleted)="deleteControlValue()" - > - + > diff --git a/contrib/gn_module_validation/frontend/app/components/validation-synthese-list/validation-synthese-list.component.html b/contrib/gn_module_validation/frontend/app/components/validation-synthese-list/validation-synthese-list.component.html index d611e32f39..b7bdc971dd 100644 --- a/contrib/gn_module_validation/frontend/app/components/validation-synthese-list/validation-synthese-list.component.html +++ b/contrib/gn_module_validation/frontend/app/components/validation-synthese-list/validation-synthese-list.component.html @@ -16,13 +16,10 @@ (select)="mapListService.onRowSelect($event)" (activate)="onActivate($event)" rowHeight="40" - > - - + /> - @@ -52,14 +48,18 @@ ngx-datatable-cell-template >
- + warning {{modif_text}} + > + {{ modif_text }} +
@@ -82,12 +82,11 @@ class="btn btn-info fa fa-info-circle" title="details" [disabled]="row.unique_id_sinp == null" - > - + >
- {{row.nomenclature_valid_status?.mnemonique}} - (automatique) + {{ row.nomenclature_valid_status?.mnemonique }} + (automatique)
- - {{row[col.column_name] | date:'dd-MM-yyyy'}} + {{ row[col.column_name] | date: 'dd-MM-yyyy' }} computer + > + computer +
- {{row?.nomenclature_valid_status?.mnemonique}} - (automatique) + {{ row?.nomenclature_valid_status?.mnemonique }} + (automatique)
-
@@ -204,8 +219,9 @@ container="body" placement="left" height="50px" - style="color:white" - > {{row.profile.score}}/3 + style="color: white" + > + {{ row.profile.score }}/3 {{row.profile.score}}/3 + style="color: white !important" + > + {{ row.profile.score }}/3 {{row.profile.score}}/3 + > + {{ row.profile.score }}/3
@@ -235,48 +252,47 @@ class="chip-disabled" container="body" placement="left" - > – + > + –
- Cohérence par rapport au profil + Cohérence par rapport au profil - + - + - +
Aire de répartition Aire de répartition - check + check - close + close
Phénologie Phénologie - check + check - close + close
AltitudesAltitudes - check + check - close + close
- - - - {{selectedCount}} selected / {{rowCount}} total + {{ selectedCount }} selected / {{ rowCount }} total > - - - - \ No newline at end of file +> diff --git a/contrib/gn_module_validation/frontend/app/components/validation-synthese-list/validation-synthese-list.component.scss b/contrib/gn_module_validation/frontend/app/components/validation-synthese-list/validation-synthese-list.component.scss index 6321ef95f6..f978482361 100644 --- a/contrib/gn_module_validation/frontend/app/components/validation-synthese-list/validation-synthese-list.component.scss +++ b/contrib/gn_module_validation/frontend/app/components/validation-synthese-list/validation-synthese-list.component.scss @@ -4,10 +4,12 @@ height: 20px; padding: 0px; //background: #00ff00; - color: "black"; + color: 'black'; border: 1px solid black; text-align: center; - font: 15px Arial, sans-serif; + font: + 15px Arial, + sans-serif; position: absolute; left: 0%; transform: translateX(0%); @@ -110,7 +112,7 @@ margin: 0px !important; padding: 0px !important; } - .datatable-checkbox input[type="checkbox"]:before { + .datatable-checkbox input[type='checkbox']:before { border-color: #bfbdbd !important; } } @@ -176,21 +178,17 @@ button:active { } .chip-alert { - background-color: red!important; + background-color: red !important; } .chip-warning { - background-color: rgb(255, 123, 0)!important; + background-color: rgb(255, 123, 0) !important; } - - .chip-success { - background-color: green!important; + background-color: green !important; } - - .mat-chip { width: 70%; display: inline-block; diff --git a/contrib/gn_module_validation/frontend/app/components/validation.component.html b/contrib/gn_module_validation/frontend/app/components/validation.component.html index eca0a04f5b..59f7197cb6 100644 --- a/contrib/gn_module_validation/frontend/app/components/validation.component.html +++ b/contrib/gn_module_validation/frontend/app/components/validation.component.html @@ -1,14 +1,20 @@ -
+
-
- + [regne]="(occtaxFormOccurrenceService.taxref | async)?.regne" + [group2Inpn]="(occtaxFormOccurrenceService.taxref | async)?.group2_inpn" + [group3Inpn]="(occtaxFormOccurrenceService.taxref | async)?.group3_inpn" + >
-
- {{'Counting.Errors.CountingObjectRequiredError' |translate}} + {{ 'Counting.Errors.CountingObjectRequiredError' | translate }}
-
- {{ 'Counting.NumberMin' | translate }} + {{ 'Counting.NumberMin' | translate }} + />
-
- {{ 'Counting.NumberMax' | translate }} + {{ 'Counting.NumberMax' | translate }} + />
{{ 'Counting.Errors.CountError' | translate }} @@ -84,16 +86,19 @@ label="{{ 'Counting.LifeStage' | translate }}" codeNomenclatureType="STADE_VIE" [parentFormControl]="form.get('id_nomenclature_life_stage')" - [regne]="(occtaxFormOccurrenceService.taxref|async)?.regne" - [group2Inpn]="(occtaxFormOccurrenceService.taxref|async)?.group2_inpn" - [group3Inpn]="(occtaxFormOccurrenceService.taxref|async)?.group3_inpn"> - + [regne]="(occtaxFormOccurrenceService.taxref | async)?.regne" + [group2Inpn]="(occtaxFormOccurrenceService.taxref | async)?.group2_inpn" + [group3Inpn]="(occtaxFormOccurrenceService.taxref | async)?.group3_inpn" + > - {{'Counting.Errors.LifeStageRequiredError' |translate}} + {{ 'Counting.Errors.LifeStageRequiredError' | translate }}
- + [regne]="(occtaxFormOccurrenceService.taxref | async)?.regne" + [group2Inpn]="(occtaxFormOccurrenceService.taxref | async)?.group2_inpn" + [group3Inpn]="(occtaxFormOccurrenceService.taxref | async)?.group3_inpn" + > - {{'Counting.Errors.SexRequiredError' |translate}} + {{ 'Counting.Errors.SexRequiredError' | translate }}
- - + >
-
- -
+
- {{'Media.Medias' |translate}} + {{ 'Media.Medias' | translate }}
+ @@ -12,22 +22,26 @@
@@ -208,14 +217,14 @@
@@ -225,12 +234,12 @@ +
+ - - - {{'Taxon.Errors.NomCiteRequiredError' |translate}} + {{ 'Taxon.Errors.NomCiteRequiredError' | translate }} - {{'Taxon.Errors.CdNomRequiredError' |translate}} + {{ 'Taxon.Errors.CdNomRequiredError' | translate }}
-
- TAXREF - Nom valide - TAXREF - Nom valide + - + >
-
-
+
- - + > - {{'Taxon.Errors.TechObsRequiredError' |translate}} + {{ 'Taxon.Errors.TechObsRequiredError' | translate }}
-
- - + > - {{'Taxon.Errors.BiologicalConditionRequiredError' |translate}} + {{ 'Taxon.Errors.BiologicalConditionRequiredError' | translate }}
-
- -
-
+
-
- {{ 'Taxon.Determiner' | translate }} - {{ 'Taxon.Determiner' | translate }} + - + - {{'Taxon.Errors.DeterminerRequiredError' |translate}} + {{ 'Taxon.Errors.DeterminerRequiredError' | translate }}
-
+ + - - - - {{'Taxon.Errors.DeterminationMethodRequiredError' |translate}} - -
+ {{ 'Taxon.Errors.DeterminationMethodRequiredError' | translate }} + +
-
- + - - - {{'Taxon.Errors.BiologicalConditionRequiredError' |translate}} + {{ 'Taxon.Errors.BiologicalConditionRequiredError' | translate }}
-
- - + >
@@ -199,130 +219,142 @@
Ajout d'un taxon
codeNomenclatureType="STATUT_BIO" [regne]="taxref?.regne" [group2Inpn]="taxref?.group2_inpn" - [group3Inpn]="taxref?.group3_inpn"> - + [group3Inpn]="taxref?.group3_inpn" + >
-
- - + >
-
- - + >
-
- - + >
-
- - + >
-
- {{ 'Taxon.DigitalProof' | translate }} + {{ 'Taxon.DigitalProof' | translate }} + />
- - {{'Taxon.Errors.ExistProofValidation' |translate}} + {{ 'Taxon.Errors.ExistProofValidation' | translate }}
-
- {{ 'Taxon.NonDigitalProof' | translate }} + {{ 'Taxon.NonDigitalProof' | translate }} + />
-
- {{'Taxon.Errors.NoExistProofError' |translate}} + {{ 'Taxon.Errors.NoExistProofError' | translate }}
- {{'Taxon.Errors.ExistproofError' |translate}} + {{ 'Taxon.Errors.ExistproofError' | translate }}
-
- + >
@@ -330,59 +362,55 @@
Ajout d'un taxon
[autoGenerated]="true" [myFormGroup]="occurrenceForm.get('additional_fields')" [formsDefinition]="additionalFieldsForm" -> - - -
+> + +
- {{'Counting.Counting' |translate}}s + {{ 'Counting.Counting' | translate }} + s
-
- {{'Taxon.Errors.CountingRequiredError' |translate}} + {{ 'Taxon.Errors.CountingRequiredError' | translate }}
- - +
- - - - Dénombrement #{{ i + 1 }} - - - - clear - - - - - - + + + Dénombrement #{{ i + 1 }} + + clear + + - + + - -
- -
-
+
- - - -
-
+
- + > - + > -
-
- - -
- {{ 'MetaData.Datasets' | translate }} - - {{ dataset?.dataset_name}} - -
-
- - - - +
+ + +
+ + {{ 'MetaData.Datasets' | translate }} + + + {{ dataset?.dataset_name }} + +
+
+ + +
-
@@ -96,8 +97,7 @@ label="{{ 'Releve.MinDate' | translate }}" [parentFormControl]="propertiesForm.get('date_min')" (onSelectOrKeyIn)="globalFormService.synchronizeMax(propertiesForm, 'date_min', 'date_max')" - > - + >
- + >
-
- {{'Releve.HourMin' | translate }} + {{ 'Releve.HourMin' | translate }} + />
@@ -169,40 +170,42 @@ *ngIf="config.OCCTAX.form_fields.hour_max" class="form-group" > - {{'Releve.HourMax' | translate }} + {{ 'Releve.HourMax' | translate }} + />
- {{'Releve.Errors.HourMaxError' |translate}} + {{ 'Releve.Errors.HourMaxError' | translate }}
-
- {{ 'Releve.MinAltitude' | translate }} + {{ 'Releve.MinAltitude' | translate }} + />
@@ -210,13 +213,13 @@ *ngIf="config.OCCTAX.form_fields.altitude_max" class="col-4" > - {{ 'Releve.MaxAltitude' | translate }} + {{ 'Releve.MaxAltitude' | translate }} + />
- + >
-
- {{'Releve.Errors.AltitudeError' |translate}} + {{ 'Releve.Errors.AltitudeError' | translate }}
- {{ 'Releve.MinDepth' | translate }} + {{ 'Releve.MinDepth' | translate }} + />
- {{ 'Releve.MaxDepth' | translate }} + {{ 'Releve.MaxDepth' | translate }} + />
-
- {{'Releve.Errors.DepthError' |translate}} + {{ 'Releve.Errors.DepthError' | translate }}
@@ -280,20 +280,18 @@ label="{{ 'Releve.GroupType' | translate }}" [parentFormControl]="propertiesForm.get('id_nomenclature_grp_typ')" codeNomenclatureType="TYP_GRP" - > - + >
- - {{'Releve.GroupMethod' | translate}} + {{ 'Releve.GroupMethod' | translate }} + />
- + >
- - -
@@ -317,52 +311,48 @@ label="{{ 'Releve.TechCollectCampanule' | translate }}" [parentFormControl]="propertiesForm.get('id_nomenclature_tech_collect_campanule')" codeNomenclatureType="TECHNIQUE_OBS" - > - + >
-
- + >
-
- {{ 'Releve.Comment' | translate }} + {{ 'Releve.Comment' | translate }} + >
- +>
- @@ -399,4 +388,4 @@ Modifier le relevé --> -
\ No newline at end of file + diff --git a/contrib/occtax/frontend/app/occtax-form/releve/releve.component.scss b/contrib/occtax/frontend/app/occtax-form/releve/releve.component.scss index e3e02ae6e9..ea11f7f147 100644 --- a/contrib/occtax/frontend/app/occtax-form/releve/releve.component.scss +++ b/contrib/occtax/frontend/app/occtax-form/releve/releve.component.scss @@ -47,12 +47,14 @@ button .mat-spinner { background-clip: padding-box; border: 1px solid #ced4da; border-radius: 0.2rem; - transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out; + transition: + border-color 0.15s ease-in-out, + box-shadow 0.15s ease-in-out; background-color: #ddd; cursor: not-allowed; } -:host ::ng-deep ngb-typeahead-window { +:host ::ng-deep ngb-typeahead-window { width: 100%; overflow: auto; max-height: 300px; @@ -60,4 +62,4 @@ button .mat-spinner { ::ng-deep .dropdown-item { white-space: inherit; -} \ No newline at end of file +} diff --git a/contrib/occtax/frontend/app/occtax-form/taxa-list/taxa-list.component.html b/contrib/occtax/frontend/app/occtax-form/taxa-list/taxa-list.component.html index 13d492f14f..934435abf9 100644 --- a/contrib/occtax/frontend/app/occtax-form/taxa-list/taxa-list.component.html +++ b/contrib/occtax/frontend/app/occtax-form/taxa-list/taxa-list.component.html @@ -1,86 +1,85 @@
-
- - + > warning
Erreur : cliquez pour reprendre ce taxon
-
- - - + - + - + [attr.data-qa]="'pnx-occtax-taxon-form-taxa-head-' + i" + > - -
+
ID occurrence :
{{ occurrence.id_occurrence_occtax }}
@@ -88,315 +87,375 @@
{{ 'Taxon.NomCite' | translate }} :
-
{{ removeHtml(occurrence.nom_cite) }}
+
+ {{ removeHtml(occurrence.nom_cite) }} +
Taxref (nom complet) :
-
{{occurrence?.taxref?.nom_complet}}
+
{{ occurrence?.taxref?.nom_complet }}
{{ 'Taxon.Determiner' | translate }} :
-
{{ occurrence?.determiner || "-" }}
+
{{ occurrence?.determiner || '-' }}
{{ 'Taxon.DeterminationMethod' | translate }} :
- {{ occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_determination_method) || "-" }} + {{ + occtaxTaxaListService.getLibelleByID( + occurrence?.id_nomenclature_determination_method + ) || '-' + }}
{{ 'Taxon.ObservationStatus' | translate }} :
- {{ occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_observation_status) || "-" }} + {{ + occtaxTaxaListService.getLibelleByID( + occurrence?.id_nomenclature_observation_status + ) || '-' + }}
{{ 'Taxon.ObsTech' | translate }} :
-
- {{ occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_obs_technique) || "-" }} +
+ {{ + occtaxTaxaListService.getLibelleByID( + occurrence?.id_nomenclature_obs_technique + ) || '-' + }}
{{ 'Taxon.BiologicalCondition' | translate }} :
-
- {{ occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_bio_condition) || "-" }} +
+ {{ + occtaxTaxaListService.getLibelleByID( + occurrence?.id_nomenclature_bio_condition + ) || '-' + }}
{{ 'Taxon.OccBehaviour' | translate }} :
- {{ occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_behaviour) || "-" }} + {{ + occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_behaviour) || + '-' + }}
{{ 'Taxon.Naturalness' | translate }} :
- {{ occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_naturalness) || "-" }} + {{ + occtaxTaxaListService.getLibelleByID( + occurrence?.id_nomenclature_naturalness + ) || '-' + }}
{{ 'Taxon.BiologicalStatus' | translate }} :
- {{ occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_bio_status) || "-" }} + {{ + occtaxTaxaListService.getLibelleByID( + occurrence?.id_nomenclature_bio_status + ) || '-' + }}
{{ 'Taxon.StatusSource' | translate }} :
- {{ occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_source_status) || "-" }} + {{ + occtaxTaxaListService.getLibelleByID( + occurrence?.id_nomenclature_source_status + ) || '-' + }}
{{ 'Taxon.Blurring' | translate }} :
- {{ occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_blurring) || "-" }} + {{ + occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_blurring) || + '-' + }}
{{ 'Taxon.ExistProof' | translate }} :
- {{ occtaxTaxaListService.getLibelleByID(occurrence?.id_nomenclature_exist_proof) || "-" }} + {{ + occtaxTaxaListService.getLibelleByID( + occurrence?.id_nomenclature_exist_proof + ) || '-' + }}
{{ 'Taxon.DigitalProof' | translate }} :
-
{{ occurrence?.digital_proof || "-" }}
+
+ {{ occurrence?.digital_proof || '-' }} +
{{ 'Taxon.NonDigitalProof' | translate }} :
-
{{ occurrence?.non_digital_proof || "-" }}
+
+ {{ occurrence?.non_digital_proof || '-' }} +
{{ 'Taxon.Comment' | translate }} :
-
{{ occurrence?.comment || "-" }}
+
{{ occurrence?.comment || '-' }}
-
{{item.key}} :
-
{{item.value}}
+
{{ item.key }} :
+
{{ item.value }}
- -
- -
-
-
Version Taxref :
-
{{ occurrence?.meta_v_taxref }}
-
+ +
+
+
Version Taxref :
+
{{ occurrence?.meta_v_taxref }}
+
-
-
Groupe 1 INPN :
-
{{ occurrence.taxref.group1_inpn }}
-
-
-
Groupe 2 INPN :
-
{{ occurrence.taxref.group2_inpn }}
-
-
-
Règne :
-
{{ occurrence.taxref.regne }}
-
-
-
Phylum :
-
{{ occurrence.taxref.phylum }}
-
-
-
Classe :
-
{{ occurrence.taxref.classe }}
-
-
-
Ordre :
-
{{ occurrence.taxref.ordre }}
-
-
-
Famille :
-
{{ occurrence.taxref.famille }}
-
-
-
Sous-Famille :
-
{{ occurrence.taxref.sous_famille }}
-
-
-
Tribu :
-
{{ occurrence.taxref.tribu }}
+
+
Groupe 1 INPN :
+
{{ occurrence.taxref.group1_inpn }}
+
+
+
Groupe 2 INPN :
+
{{ occurrence.taxref.group2_inpn }}
+
+
+
Règne :
+
{{ occurrence.taxref.regne }}
+
+
+
Phylum :
+
{{ occurrence.taxref.phylum }}
+
+
+
Classe :
+
{{ occurrence.taxref.classe }}
+
+
+
Ordre :
+
{{ occurrence.taxref.ordre }}
+
+
+
Famille :
+
{{ occurrence.taxref.famille }}
+
+
+
Sous-Famille :
+
{{ occurrence.taxref.sous_famille }}
+
+
+
Tribu :
+
{{ occurrence.taxref.tribu }}
+
+
+
CD_NOM :
+
{{ occurrence.taxref.cd_nom }}
+
+
+
Nom complet :
+
{{ occurrence.taxref.nom_complet }}
+
+
+
CD_REF :
+
{{ occurrence.taxref.cd_ref }}
+
+
+
Nom valide :
+
{{ occurrence.taxref.nom_valide }}
+
+
+
Rang :
+
{{ occurrence.taxref.id_rang }}
+
+
+
Nom vern :
+
{{ occurrence.taxref.nom_vern }}
+
+
+
Nom vern eng :
+
{{ occurrence.taxref.nom_vern_eng }}
+
+ +
+ + + <-- TAB pour afficher les infos de dénombrements -- > + + + + Dénombrement + s + + + + +
{{ 'Counting.Counting' | translate }} #{{ i + 1 }}
+
-
CD_NOM :
-
{{ occurrence.taxref.cd_nom }}
+
ID {{ 'Counting.Counting' | translate }} :
+
{{ counting.id_counting_occtax }}
-
Nom complet :
-
{{ occurrence.taxref.nom_complet }}
+
UUID SINP :
+
+ {{ counting.unique_id_sinp_occtax }} +
-
-
CD_REF :
-
{{ occurrence.taxref.cd_ref }}
+
+
{{ 'Counting.LifeStage' | translate }} :
+
+ {{ + occtaxTaxaListService.getLibelleByID(counting.id_nomenclature_life_stage) || + '-' + }} +
-
-
Nom valide :
-
{{ occurrence.taxref.nom_valide }}
+
+
{{ 'Counting.Sex' | translate }} :
+
+ {{ + occtaxTaxaListService.getLibelleByID(counting.id_nomenclature_sex) || '-' + }} +
-
-
Rang :
-
{{ occurrence.taxref.id_rang }}
+
+
{{ 'Counting.NumberMin' | translate }} :
+
{{ counting.count_min || '-' }}
-
-
Nom vern :
-
{{ occurrence.taxref.nom_vern }}
+
+
{{ 'Counting.NumberMax' | translate }} :
+
{{ counting.count_max || '-' }}
-
-
Nom vern eng :
-
{{ occurrence.taxref.nom_vern_eng }}
+
+
{{ 'Counting.CountingObject' | translate }} :
+
+ {{ + occtaxTaxaListService.getLibelleByID(counting.id_nomenclature_obj_count) || + '-' + }} +
-
- - - <-- - TAB - pour - afficher - les - infos - de - dénombrements - -- - > - - - - Dénombrements - - - - - - -
{{ 'Counting.Counting' | translate }} #{{ i + 1 }} -
-
+
+
{{ item.key }} :
+
{{ item.value }}
+
+ + + +
-
UUID SINP :
-
{{ counting.unique_id_sinp_occtax }}
-
-
-
{{ 'Counting.LifeStage' | translate }} :
-
- {{ occtaxTaxaListService.getLibelleByID(counting.id_nomenclature_life_stage) || "-" }} -
-
-
-
{{ 'Counting.Sex' | translate }} :
-
- {{ occtaxTaxaListService.getLibelleByID(counting.id_nomenclature_sex) || "-" }} -
-
-
-
{{ 'Counting.NumberMin' | translate }} :
-
{{ counting.count_min || "-" }}
+
-
-
{{ 'Counting.NumberMax' | translate }} :
-
{{ counting.count_max || "-" }}
-
-
-
{{ 'Counting.CountingObject' | translate }} :
-
- {{ occtaxTaxaListService.getLibelleByID(counting.id_nomenclature_obj_count) || "-" }} -
-
-
-
{{ 'Counting.CountingType' | translate }} :
-
- {{ occtaxTaxaListService.getLibelleByID(counting.id_nomenclature_type_count) || "-" }} -
-
-
-
{{item.key}} :
-
{{item.value}}
-
- - - - -
{{'Media.Media' | translate}} ({{ i+1 }}/{{counting.medias.length}}) - : - {{ media.title_fr }} - - ({{ ms.typeMedia(media) }}, {{ media.author }}) - - {{media.description_fr}} -
-
- -
-
-
-
+ - - +
+ + -
-
- Aucun taxon pour ce relevé -
+
Aucun taxon pour ce relevé
-
diff --git a/contrib/occtax/frontend/app/occtax-form/taxa-list/taxa-list.component.scss b/contrib/occtax/frontend/app/occtax-form/taxa-list/taxa-list.component.scss index 1aba3038a5..58474297fa 100644 --- a/contrib/occtax/frontend/app/occtax-form/taxa-list/taxa-list.component.scss +++ b/contrib/occtax/frontend/app/occtax-form/taxa-list/taxa-list.component.scss @@ -9,13 +9,13 @@ height: 90%; overflow-y: auto; background-color: #95949833; - box-shadow: 2px 3px 4px 0px rgba(0, 0, 0, 0.2), - -2px 3px 4px 0px rgba(0, 0, 0, 0.2); - + box-shadow: + 2px 3px 4px 0px rgba(0, 0, 0, 0.2), + -2px 3px 4px 0px rgba(0, 0, 0, 0.2); } #zero-taxon-container { - padding: 10px + padding: 10px; } .mat-expansion-panel-body { @@ -23,22 +23,21 @@ } .right-aligned-header > .mat-content { - justify-content: space-between!important; + justify-content: space-between !important; } -.mat-content > mat-panel-title, .mat-content > mat-panel-description { +.mat-content > mat-panel-title, +.mat-content > mat-panel-description { flex: 0 0 auto !important; } -.mat-accordion .mat-expansion-panel{ - - border-radius: 0px!important; +.mat-accordion .mat-expansion-panel { + border-radius: 0px !important; } .btn-edit { color: #5343da; margin-right: 5px; - } .btn-clear { @@ -53,7 +52,7 @@ .list-values > div { padding: 1px 7px; - font-size:12px; + font-size: 12px; } .list-values > div:nth-child(odd) { @@ -80,12 +79,18 @@ } .in-progress-panel { - box-shadow: 0 3px 1px -2px rgba(0, 0, 0, 0.2), 0 2px 2px 0 rgba(0, 0, 0, 0.14), + box-shadow: + 0 3px 1px -2px rgba(0, 0, 0, 0.2), + 0 2px 2px 0 rgba(0, 0, 0, 0.14), + 0 1px 5px 0 rgba(0, 0, 0, 0.12); + -moz-box-shadow: + 0 3px 1px -2px rgba(0, 0, 0, 0.2), + 0 2px 2px 0 rgba(0, 0, 0, 0.14), + 0 1px 5px 0 rgba(0, 0, 0, 0.12); + -webkit-box-shadow: + 0 3px 1px -2px rgba(0, 0, 0, 0.2), + 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12); - -moz-box-shadow: 0 3px 1px -2px rgba(0, 0, 0, 0.2), - 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12); - -webkit-box-shadow: 0 3px 1px -2px rgba(0, 0, 0, 0.2), - 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12); border-radius: 4px; overflow: hidden; padding: 0 24px; diff --git a/contrib/occtax/frontend/app/occtax-map-info/occtax-map-info.component.html b/contrib/occtax/frontend/app/occtax-map-info/occtax-map-info.component.html index 661507de8b..3b6e29e08c 100644 --- a/contrib/occtax/frontend/app/occtax-map-info/occtax-map-info.component.html +++ b/contrib/occtax/frontend/app/occtax-map-info/occtax-map-info.component.html @@ -1,303 +1,423 @@
-
-
-
- - -
-
-
- +
+
+
+ +
+
+
+ + + + + Relevé n°{{ id }} - - - - Relevé n°{{id}} - - - - + - - - {{releve.dataset?.dataset_name}} + edit + + + + + {{ releve.dataset?.dataset_name }} +
-
Observateur(s) :
+
Observateur(s) :
- {{releve.observers_txt}} + {{ releve.observers_txt }} - , {{ observer.nom_complet }} + , + {{ observer.nom_complet }}
-
Entre le :
-
{{ releve.date_min | date:'dd/MM/yyyy' }} {{releve.hour_min}}
-
  et le :
-
{{ releve.date_max | date:'dd/MM/yyyy' }} {{releve.hour_max}}
+
Entre le :
+
+ {{ releve.date_min | date: 'dd/MM/yyyy' }} + {{ releve.hour_min }} +
+
  et le :
+
+ {{ releve.date_max | date: 'dd/MM/yyyy' }} + {{ releve.hour_max }} +
-
{{ 'Releve.PlaceName' | translate }} :
-
{{ releve.place_name || "-"}}
+
{{ 'Releve.PlaceName' | translate }} :
+
{{ releve.place_name || '-' }}
-
{{ 'Releve.Precision' | translate }} :
-
{{ releve.precision || "-"}}
+
{{ 'Releve.Precision' | translate }} :
+
{{ releve.precision || '-' }}
-
Altitude min :
-
{{ releve.altitude_min || "-"}}
+
Altitude min :
+
{{ releve.altitude_min || '-' }}
-
Altitude max :
-
{{ releve.altitude_max || "-"}}
+
Altitude max :
+
{{ releve.altitude_max || '-' }}
- +
-
{{dynamiqueField.attribut_label}} :
-
- {{releve?.additional_fields[dynamiqueField.attribut_name]}} +
{{ dynamiqueField.attribut_label }} :
+
+ {{ releve?.additional_fields[dynamiqueField.attribut_name] }}

- - +

-
{{ 'Releve.MinDepth' | translate }} :
-
{{ releve.depth_min || "-"}}
-
+
{{ 'Releve.MinDepth' | translate }} :
+
{{ releve.depth_min || '-' }}
+
-
{{ 'Releve.MaxDepth' | translate }} :
-
{{ releve.depth_max || "-"}}
+
{{ 'Releve.MaxDepth' | translate }} :
+
{{ releve.depth_max || '-' }}
- +
-
{{ 'Releve.TechCollectCampanule' | translate }} :
-
{{getLibelleByID(releve.id_nomenclature_tech_collect_campanule)}}
+
{{ 'Releve.TechCollectCampanule' | translate }} :
+
+ {{ getLibelleByID(releve.id_nomenclature_tech_collect_campanule) }} +
-
-
{{ 'Releve.GroupType' | translate }} :
-
{{getLibelleByID(releve.id_nomenclature_grp_typ)}}
+
+
{{ 'Releve.GroupType' | translate }} :
+
+ {{ getLibelleByID(releve.id_nomenclature_grp_typ) }} +
-
{{'Releve.GroupMethod' | translate}} :
-
{{releve.grp_method || "-" }}
+
{{ 'Releve.GroupMethod' | translate }} :
+
{{ releve.grp_method || '-' }}
-
UUID regroupement:
-
{{releve.unique_id_sinp_grp}}
-
+
UUID regroupement:
+
{{ releve.unique_id_sinp_grp }}
+
-
{{ 'Releve.GeoObjectNature' | translate }} :
-
{{getLibelleByID(releve.id_nomenclature_geo_object_nature)}}
+
{{ 'Releve.GeoObjectNature' | translate }} :
+
+ {{ getLibelleByID(releve.id_nomenclature_geo_object_nature) }} +
-
Habitat associé :
-
{{releve.habitat?.lb_hab_fr}} - {{releve.habitat?.lb_code}}
+
Habitat associé :
+
+ {{ releve.habitat?.lb_hab_fr }} - + {{ releve.habitat?.lb_code }} +
-
Commentaire :
-
{{releve.comment || "-"}}
-
- +
Commentaire :
+
{{ releve.comment || '-' }}
+
- + + + + {{ 'Taxon.ObservedTaxon' | translate }} + + + + visibility + {{ occ.taxref?.nom_vern !== null ? occ.taxref?.nom_vern + ' - ' : '' + }}{{ occ.taxref?.nom_complet }} + + - - {{ 'Taxon.ObservedTaxon' | translate }} - - - visibility - {{(occ.taxref?.nom_vern!==null) ? occ.taxref?.nom_vern+' - ' : ''}}{{occ.taxref?.nom_complet}} - - - - -
+
- - - Détails - {{(displayOccurrence|async).taxref.nom_valide }} + + Détails - + {{ (displayOccurrence | async).taxref.nom_valide }} +
-
{{ 'Taxon.ObservationStatus' | translate }} :
-
{{getLibelleByID((displayOccurrence|async).id_nomenclature_observation_status)}}
+
{{ 'Taxon.ObservationStatus' | translate }} :
+
+ {{ + getLibelleByID( + (displayOccurrence | async).id_nomenclature_observation_status + ) + }} +
-
{{ 'Taxon.ObsTech' | translate }} :
-
{{getLibelleByID((displayOccurrence|async).id_nomenclature_obs_technique)}}
+
{{ 'Taxon.ObsTech' | translate }} :
+
+ {{ + getLibelleByID( + (displayOccurrence | async).id_nomenclature_obs_technique + ) + }} +
-
{{ 'Taxon.BiologicalCondition' | translate }} :
-
{{getLibelleByID((displayOccurrence|async).id_nomenclature_bio_condition)}}
+
{{ 'Taxon.BiologicalCondition' | translate }} :
+
+ {{ + getLibelleByID( + (displayOccurrence | async).id_nomenclature_bio_condition + ) + }} +
-
{{ 'Taxon.OccBehaviour' | translate }} :
-
{{getLibelleByID((displayOccurrence|async).id_nomenclature_behaviour)}}
+
{{ 'Taxon.OccBehaviour' | translate }} :
+
+ {{ + getLibelleByID((displayOccurrence | async).id_nomenclature_behaviour) + }} +
-
{{ 'Taxon.BiologicalStatus' | translate }} :
-
{{getLibelleByID((displayOccurrence|async).id_nomenclature_bio_status)}}
+
{{ 'Taxon.BiologicalStatus' | translate }} :
+
+ {{ + getLibelleByID((displayOccurrence | async).id_nomenclature_bio_status) + }} +
-
{{ 'Taxon.DeterminationMethod' | translate }} :
-
{{getLibelleByID((displayOccurrence|async).id_nomenclature_determination_method)}}
+
{{ 'Taxon.DeterminationMethod' | translate }} :
+
+ {{ + getLibelleByID( + (displayOccurrence | async).id_nomenclature_determination_method + ) + }} +
-
{{ 'Taxon.Determiner' | translate }} :
-
{{(displayOccurrence|async).determiner || ""}}
+
{{ 'Taxon.Determiner' | translate }} :
+
+ {{ (displayOccurrence | async).determiner || '' }} +
-
Nom cité:
-
{{(displayOccurrence|async).nom_cite || ""}}
+
Nom cité:
+
+ {{ (displayOccurrence | async).nom_cite || '' }} +
-
{{ 'Taxon.ExistProof' | translate }} :
-
{{getLibelleByID((displayOccurrence|async).id_nomenclature_exist_proof)}}
+
{{ 'Taxon.ExistProof' | translate }} :
+
+ {{ + getLibelleByID((displayOccurrence | async).id_nomenclature_exist_proof) + }} +
-
-
{{ 'Taxon.DigitalProof' | translate }} :
-
{{(displayOccurrence|async).digital_proof || "-" }}
+
+
{{ 'Taxon.DigitalProof' | translate }} :
+
+ {{ (displayOccurrence | async).digital_proof || '-' }} +
-
-
{{ 'Taxon.NonDigitalProof' | translate }} :
-
{{(displayOccurrence|async).non_digital_proof || "-"}}
+
+
{{ 'Taxon.NonDigitalProof' | translate }} :
+
+ {{ (displayOccurrence | async).non_digital_proof || '-' }} +
-
{{ 'Taxon.Blurring' | translate }} :
-
{{getLibelleByID((displayOccurrence|async).id_nomenclature_blurring)}}
+
{{ 'Taxon.Blurring' | translate }} :
+
+ {{ getLibelleByID((displayOccurrence | async).id_nomenclature_blurring) }} +
-
{{ 'Taxon.StatusSource' | translate }} :
-
{{getLibelleByID((displayOccurrence|async).id_nomenclature_source_status)}}
+
{{ 'Taxon.StatusSource' | translate }} :
+
+ {{ + getLibelleByID( + (displayOccurrence | async).id_nomenclature_source_status + ) + }} +
-
{{ 'Taxon.Comment' | translate }} :
-
{{(displayOccurrence|async).comment || ""}}
+
{{ 'Taxon.Comment' | translate }} :
+
+ {{ (displayOccurrence | async).comment || '' }} +
- + - +
-
{{dynamiqueField.attribut_label}} :
-
- {{(displayOccurrence|async)?.additional_fields[dynamiqueField.attribut_name]}} +
{{ dynamiqueField.attribut_label }} :
+
+ {{ + (displayOccurrence | async)?.additional_fields[ + dynamiqueField.attribut_name + ] + }}
- - -
+ +
-
{{ 'Counting.LifeStage' | translate }} :
-
{{getLibelleByID(counting.id_nomenclature_life_stage)}}
+
{{ 'Counting.LifeStage' | translate }} :
+
+ {{ getLibelleByID(counting.id_nomenclature_life_stage) }} +
- +
-
{{ 'Counting.Sex' | translate }} :
-
{{getLibelleByID(counting.id_nomenclature_sex)}}
+
{{ 'Counting.Sex' | translate }} :
+
+ {{ getLibelleByID(counting.id_nomenclature_sex) }} +
- +
-
{{ 'Counting.CountingObject' | translate }} :
-
{{getLibelleByID(counting.id_nomenclature_obj_count)}}
+
{{ 'Counting.CountingObject' | translate }} :
+
+ {{ getLibelleByID(counting.id_nomenclature_obj_count) }} +
- +
-
{{ 'Counting.CountingType' | translate }} :
-
{{getLibelleByID(counting.id_nomenclature_type_count)}}
+
{{ 'Counting.CountingType' | translate }} :
+
+ {{ getLibelleByID(counting.id_nomenclature_type_count) }} +
- +
-
{{ 'Counting.NumberMin' | translate }} :
-
{{counting.count_min || "-"}}
+
{{ 'Counting.NumberMin' | translate }} :
+
+ {{ counting.count_min || '-' }} +
- +
-
{{ 'Counting.NumberMax' | translate }} :
-
{{counting.count_max || "-"}}
+
{{ 'Counting.NumberMax' | translate }} :
+
+ {{ counting.count_max || '-' }} +
- +
-
{{ 'Counting.ValidationStatus' | translate }} :
-
{{counting?.validation_status?.mnemonique || "-"}}
+
{{ 'Counting.ValidationStatus' | translate }} :
+
+ {{ counting?.validation_status?.mnemonique || '-' }} +
-
UUID SINP :
-
{{counting?.unique_id_sinp_occtax}}
+
UUID SINP :
+
+ {{ counting?.unique_id_sinp_occtax }} +
- + -
-
{{dynamiqueField.attribut_label}} :
-
- {{counting?.additional_fields[dynamiqueField.attribut_name]}} +
+
{{ dynamiqueField.attribut_label }} :
+
+ {{ counting?.additional_fields[dynamiqueField.attribut_name] }}
-
{{ 'Media.Media' | translate }} {{ i + 1 }}/{{ counting.medias.length}} :
-
{{ media.title_fr }} - - ({{ ms.typeMedia(media) }}, {{ media.author }}) - - {{media.description_fr}} +
+ {{ 'Media.Media' | translate }} {{ i + 1 }}/{{ + counting.medias.length + }} + : +
+
+ + {{ media.title_fr }} + + ({{ ms.typeMedia(media) }}, {{ media.author }}) + {{ media.description_fr }}
- @@ -308,56 +428,53 @@ - - -
- -
+ +
+ +
Chargement des données en cours +
- - -
+ + + + diff --git a/contrib/occtax/frontend/app/occtax-map-info/occtax-map-info.component.scss b/contrib/occtax/frontend/app/occtax-map-info/occtax-map-info.component.scss index 78355a4536..3134949f70 100644 --- a/contrib/occtax/frontend/app/occtax-map-info/occtax-map-info.component.scss +++ b/contrib/occtax/frontend/app/occtax-map-info/occtax-map-info.component.scss @@ -1,4 +1,3 @@ - .selected-taxon { cursor: pointer; list-style-type: none; @@ -17,14 +16,15 @@ margin-top: 10px; } - .form-container { overflow-y: auto; } .button-rounded { border-radius: 30px; - box-shadow: 4px 5px 8px 0 rgba(0, 0, 0, 0.2), 0 6px 20px 0 rgba(0, 0, 0, 0.19); + box-shadow: + 4px 5px 8px 0 rgba(0, 0, 0, 0.2), + 0 6px 20px 0 rgba(0, 0, 0, 0.19); padding-right: 10px; padding-left: 10px; margin-left: 15px; @@ -42,7 +42,6 @@ color: #656565ba; } - #info-geo { margin-top: 5px; } @@ -54,8 +53,10 @@ .occtax-info { background-color: white; padding: 10px 15px; - box-shadow: 0 2px 1px -1px rgba(0, 0, 0, 0.2), 0 1px 1px 0 rgba(0, 0, 0, 0.14), - 0 1px 3px 0 rgba(0, 0, 0, 0.12); + box-shadow: + 0 2px 1px -1px rgba(0, 0, 0, 0.2), + 0 1px 1px 0 rgba(0, 0, 0, 0.14), + 0 1px 3px 0 rgba(0, 0, 0, 0.12); height: 100%; } diff --git a/contrib/occtax/frontend/app/occtax-map-list/filter/occtax-map-list-filter.component.html b/contrib/occtax/frontend/app/occtax-map-list/filter/occtax-map-list-filter.component.html index 9dc6eb130a..3d64436b14 100644 --- a/contrib/occtax/frontend/app/occtax-map-list/filter/occtax-map-list-filter.component.html +++ b/contrib/occtax/frontend/app/occtax-map-list/filter/occtax-map-list-filter.component.html @@ -1,85 +1,97 @@ -
+
- - + [idList]="config.OCCTAX.id_taxon_list" + [charNumber]="3" + [listLength]="20" + >
- - +
- - + [idMenu]="config.OCCTAX.id_observers_list" + >
- - + [moduleCode]="moduleService.currentModule?.module_code" + >
- - + [parentFormControl]="occtaxMapListService.dynamicFormGroup.controls.date_low" + >
- - + [parentFormControl]="occtaxMapListService.dynamicFormGroup.controls.date_up" + >
- - - - + cx="5" + cy="30" + r="3" + stroke="red" + stroke-width="0" + fill="red" + /> -
-
- + - + [formsDefinition]="formsDefinition" + >
diff --git a/contrib/occtax/frontend/app/occtax-map-list/filter/occtax-map-list-filter.component.scss b/contrib/occtax/frontend/app/occtax-map-list/filter/occtax-map-list-filter.component.scss index 105757dec0..bb7d42bd95 100644 --- a/contrib/occtax/frontend/app/occtax-map-list/filter/occtax-map-list-filter.component.scss +++ b/contrib/occtax/frontend/app/occtax-map-list/filter/occtax-map-list-filter.component.scss @@ -21,7 +21,7 @@ #refresh-filter { color: red; } -#collapse-settings{ +#collapse-settings { padding: 5px 15px; } @@ -30,5 +30,5 @@ } ::ng-deep .dynamic-form { - display: block!important; + display: block !important; } diff --git a/contrib/occtax/frontend/app/occtax-map-list/occtax-map-list.component.html b/contrib/occtax/frontend/app/occtax-map-list/occtax-map-list.component.html index 62c3fac234..5fd9d88f11 100644 --- a/contrib/occtax/frontend/app/occtax-map-list/occtax-map-list.component.html +++ b/contrib/occtax/frontend/app/occtax-map-list/occtax-map-list.component.html @@ -1,4 +1,7 @@ -
+
@@ -28,7 +31,10 @@ data-qa="pnx-occtax-filter" > Filtrer - +
@@ -42,7 +48,10 @@ [ngStyle]="{ 'min-height.px': cardContentHeight }" > - +
- - -
ID relevé : {{ row.id_releve_occtax }}
+ + +
+ ID relevé : + {{ row.id_releve_occtax }} +
-
Commentaire : {{ row.comment }}
+
+ Commentaire : + {{ row.comment }} +
- Altitude : {{ + Altitude : + {{ row.altitude_min == row.altitude_max ? row.altitude_min : row.altitude_min + ' - ' + row.altitude_max }}
- Saisi par : {{ row?.digitiser?.nom_complet || '-'}} + Saisi par : + {{ row?.digitiser?.nom_complet || '-' }}
-
Observé par:
+
Observé par:
{{ obs }}
-
Taxon(s):
+
Taxon(s):
Aucun taxon
- {{ taxon.taxName }}
+ {{ taxon.taxName }} +
@@ -130,24 +154,29 @@ aria-expanded="false" matTooltip="{{ 'List.Columns' | translate }}" (click)="openModalCol($event, modalCol)" - > - + > - + - + > - + - - +