diff --git a/CHANGELOG.rst b/CHANGELOG.rst index ea855a513..3a43116e3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,6 +6,14 @@ dcicutils Change Log ---------- +8.10.0 +====== + +* Added merge capabilities to structured_data. +* Added Question class to command_utils (factored out of smaht-submitr). +* Refactored out some identifying property related code from portal_object_utils to portal_utils. +* Internalized lookup_strategy related code to structured_data/portal_object_utils/portal_utils. + 8.9.0 ===== diff --git a/dcicutils/command_utils.py b/dcicutils/command_utils.py index ba09d57a1..8b230520a 100644 --- a/dcicutils/command_utils.py +++ b/dcicutils/command_utils.py @@ -1,3 +1,4 @@ +from __future__ import annotations import contextlib import functools import glob @@ -7,7 +8,7 @@ import requests import subprocess -from typing import Optional +from typing import Callable, Optional from .exceptions import InvalidParameterError from .lang_utils import there_are from .misc_utils import INPUT, PRINT, environ_bool, print_error_message, decorator @@ -384,3 +385,70 @@ def fail(*message): message = str(e) # Note: We ignore the type, which isn't intended to be shown. PRINT(message) exit(1) + + +class Question: + """ + Supports asking the user (via stdin) a yes/no question, possibly repeatedly; and after + some maximum number times of the same answer in a row (consecutively), then asks them + if they want to automatically give that same answer to any/all subsequent questions. + Supports static/global list of such Question instances, hashed (only) by the question text. + """ + _static_instances = {} + + @staticmethod + def instance(question: Optional[str] = None, + max: Optional[int] = None, printf: Optional[Callable] = None) -> Question: + question = question if isinstance(question, str) else "" + if not (instance := Question._static_instances.get(question)): + Question._static_instances[question] = (instance := Question(question, max=max, printf=printf)) + return instance + + @staticmethod + def yes(question: Optional[str] = None, + max: Optional[int] = None, printf: Optional[Callable] = None) -> bool: + return Question.instance(question, max=max, printf=printf).ask() + + def __init__(self, question: Optional[str] = None, + max: Optional[int] = None, printf: Optional[Callable] = None) -> None: + self._question = question if isinstance(question, str) else "" + self._max = max if isinstance(max, int) and max > 0 else None + self._print = printf if callable(printf) else print + self._yes_consecutive_count = 0 + self._no_consecutive_count = 0 + self._yes_automatic = False + self._no_automatic = False + + def ask(self, question: Optional[str] = None) -> bool: + + def question_automatic(value: str) -> bool: + nonlocal self + RARROW = "▶" + LARROW = "◀" + if yes_or_no(f"{RARROW}{RARROW}{RARROW}" + f" Do you want to answer {value} to all such questions?" + f" {LARROW}{LARROW}{LARROW}"): + return True + self._yes_consecutive_count = 0 + self._no_consecutive_count = 0 + + if self._yes_automatic: + return True + elif self._no_automatic: + return False + elif yes_or_no((question if isinstance(question, str) else "") or self._question or "Undefined question"): + self._yes_consecutive_count += 1 + self._no_consecutive_count = 0 + if (self._no_consecutive_count == 0) and self._max and (self._yes_consecutive_count >= self._max): + # Have reached the maximum number of consecutive YES answers; ask if YES to all subsequent. + if question_automatic("YES"): + self._yes_automatic = True + return True + else: + self._no_consecutive_count += 1 + self._yes_consecutive_count = 0 + if (self._yes_consecutive_count == 0) and self._max and (self._no_consecutive_count >= self._max): + # Have reached the maximum number of consecutive NO answers; ask if NO to all subsequent. + if question_automatic("NO"): + self._no_automatic = True + return False diff --git a/dcicutils/misc_utils.py b/dcicutils/misc_utils.py index 0d719c421..e830e55e5 100644 --- a/dcicutils/misc_utils.py +++ b/dcicutils/misc_utils.py @@ -4,6 +4,7 @@ from collections import namedtuple import appdirs +from copy import deepcopy import contextlib import datetime import functools @@ -2199,28 +2200,58 @@ def merge_key_value_dict_lists(x, y): return [key_value_dict(k, v) for k, v in merged.items()] -def merge_objects(target: Union[dict, List[Any]], source: Union[dict, List[Any]], full: bool = False) -> dict: +def merge_objects(target: Union[dict, List[Any]], source: Union[dict, List[Any]], + full: bool = False, # deprecated + expand_lists: Optional[bool] = None, + primitive_lists: bool = False, + copy: bool = False, _recursing: bool = False) -> Union[dict, List[Any]]: """ - Merges the given source dictionary or list into the target dictionary or list. - This MAY well change the given target (dictionary or list) IN PLACE. - The the full argument is True then any target lists longer than the - source be will be filled out with the last element(s) of the source. + Merges the given source dictionary or list into the target dictionary or list and returns the + result. This MAY well change the given target (dictionary or list) IN PLACE ... UNLESS the copy + argument is True, then the given target will not change as a local copy is made (and returned). + + If the expand_lists argument is True then any target lists longer than the + source be will be filled out with the last element(s) of the source; the full + argument (is deprecated and) is a synomym for this. The default is False. + + If the primitive_lists argument is True then lists of primitives (i.e. lists in which + NONE of its elements are dictionaries, lists, or tuples) will themselves be treated + like primitives, meaning the whole of a source list will replace the corresponding + target; otherwise they will be merged normally, meaning each element of a source list + will be merged, recursively, into the corresponding target list. The default is False. """ + def is_primitive_list(value: Any) -> bool: # noqa + if not isinstance(value, list): + return False + for item in value: + if isinstance(item, (dict, list, tuple)): + return False + return True + if target is None: return source + if expand_lists not in (True, False): + expand_lists = full is True + if (copy is True) and (_recursing is not True): + target = deepcopy(target) if isinstance(target, dict) and isinstance(source, dict) and source: for key, value in source.items(): - target[key] = merge_objects(target[key], value, full) if key in target else value + if ((primitive_lists is True) and + (key in target) and is_primitive_list(target[key]) and is_primitive_list(value)): # noqa + target[key] = value + else: + target[key] = merge_objects(target[key], value, + expand_lists=expand_lists, _recursing=True) if key in target else value elif isinstance(target, list) and isinstance(source, list) and source: for i in range(max(len(source), len(target))): if i < len(target): if i < len(source): - target[i] = merge_objects(target[i], source[i], full) - elif full: - target[i] = merge_objects(target[i], source[len(source) - 1], full) + target[i] = merge_objects(target[i], source[i], expand_lists=expand_lists, _recursing=True) + elif expand_lists is True: + target[i] = merge_objects(target[i], source[len(source) - 1], expand_lists=expand_lists) else: target.append(source[i]) - elif source: + elif source not in (None, {}, []): target = source return target diff --git a/dcicutils/portal_object_utils.py b/dcicutils/portal_object_utils.py index 200ea698d..9a64cb2bd 100644 --- a/dcicutils/portal_object_utils.py +++ b/dcicutils/portal_object_utils.py @@ -1,6 +1,5 @@ from copy import deepcopy from functools import lru_cache -import re from typing import Any, Callable, List, Optional, Tuple, Type, Union from dcicutils.data_readers import RowReader from dcicutils.misc_utils import create_readonly_object @@ -14,11 +13,9 @@ class PortalObject: _PROPERTY_DELETION_SENTINEL = RowReader.CELL_DELETION_SENTINEL - def __init__(self, data: dict, portal: Portal = None, - schema: Optional[Union[dict, Schema]] = None, type: Optional[str] = None) -> None: + def __init__(self, data: dict, portal: Optional[Portal] = None, type: Optional[str] = None) -> None: self._data = data if isinstance(data, dict) else {} self._portal = portal if isinstance(portal, Portal) else None - self._schema = schema if isinstance(schema, dict) else (schema.data if isinstance(schema, Schema) else None) self._type = type if isinstance(type, str) else "" @property @@ -32,7 +29,7 @@ def portal(self) -> Optional[Portal]: @property @lru_cache(maxsize=1) def type(self) -> str: - return self._type or Portal.get_schema_type(self._data) or (Schema(self._schema).type if self._schema else "") + return self._type or Portal.get_schema_type(self._data) or "" @property @lru_cache(maxsize=1) @@ -47,7 +44,7 @@ def uuid(self) -> Optional[str]: @property @lru_cache(maxsize=1) def schema(self) -> Optional[dict]: - return self._schema if self._schema else (self._portal.get_schema(self.type) if self._portal else None) + return self._portal.get_schema(self.type) if self._portal else None def copy(self) -> PortalObject: return PortalObject(deepcopy(self.data), portal=self.portal, type=self.type) @@ -59,39 +56,29 @@ def identifying_properties(self) -> Optional[List[str]]: Returns the list of all identifying property names of this Portal object which actually have values. Implicitly include "uuid" and "identifier" properties as identifying properties if they are actually properties in the object schema, and favor these (first); defavor "aliases"; no other ordering defined. + Changed (2024-05-26) to use portal_utils.get_identifying_property_names; migrating some intricate stuff there. """ - if not (schema := self.schema) or not (schema_identifying_properties := schema.get("identifyingProperties")): - return None - identifying_properties = [] - for identifying_property in schema_identifying_properties: - if identifying_property not in ["uuid", "identifier", "aliases"]: - if self._data.get(identifying_property): - identifying_properties.append(identifying_property) - if self._data.get("identifier"): - identifying_properties.insert(0, "identifier") - if self._data.get("uuid"): - identifying_properties.insert(0, "uuid") - if "aliases" in schema_identifying_properties and self._data.get("aliases"): - identifying_properties.append("aliases") - return identifying_properties or None + # Migrating to and unifying this in portal_utils.Portal.get_identifying_paths (2024-05-26). + return self._portal.get_identifying_property_names(self.type, portal_object=self._data) if self._portal else [] @lru_cache(maxsize=8192) def lookup(self, raw: bool = False, ref_lookup_strategy: Optional[Callable] = None) -> Tuple[Optional[PortalObject], Optional[str], int]: + if not (identifying_paths := self._get_identifying_paths(ref_lookup_strategy=ref_lookup_strategy)): + return None, None, 0 nlookups = 0 first_identifying_path = None try: - if identifying_paths := self._get_identifying_paths(ref_lookup_strategy=ref_lookup_strategy): - for identifying_path in identifying_paths: - if not first_identifying_path: - first_identifying_path = identifying_path - nlookups += 1 - if (value := self._portal.get(identifying_path, raw=raw)) and (value.status_code == 200): - return ( - PortalObject(value.json(), portal=self._portal, type=self.type if raw else None), - identifying_path, - nlookups - ) + for identifying_path in identifying_paths: + if not first_identifying_path: + first_identifying_path = identifying_path + nlookups += 1 + if self._portal and (item := self._portal.get(identifying_path, raw=raw)) and (item.status_code == 200): + return ( + PortalObject(item.json(), portal=self._portal, type=self.type if raw else None), + identifying_path, + nlookups + ) except Exception: pass return None, first_identifying_path, nlookups @@ -159,64 +146,12 @@ def diff_deleting(value: Any) -> object: # noqa @lru_cache(maxsize=1) def _get_identifying_paths(self, ref_lookup_strategy: Optional[Callable] = None) -> Optional[List[str]]: - """ - Returns a list of the possible Portal URL paths identifying this Portal object. - """ - identifying_paths = [] - if not (identifying_properties := self.identifying_properties): - if self.uuid: - if self.type: - identifying_paths.append(f"/{self.type}/{self.uuid}") - identifying_paths.append(f"/{self.uuid}") - return identifying_paths - for identifying_property in identifying_properties: - if identifying_value := self._data.get(identifying_property): - if identifying_property == "uuid": - if self.type: - identifying_paths.append(f"/{self.type}/{identifying_value}") - identifying_paths.append(f"/{identifying_value}") - # For now at least we include the path both with and without the schema type component, - # as for some identifying values, it works (only) with, and some, it works (only) without. - # For example: If we have FileSet with "accession", an identifying property, with value - # SMAFSFXF1RO4 then /SMAFSFXF1RO4 works but /FileSet/SMAFSFXF1RO4 does not; and - # conversely using "submitted_id", also an identifying property, with value - # UW_FILE-SET_COLO-829BL_HI-C_1 then /UW_FILE-SET_COLO-829BL_HI-C_1 does - # not work but /FileSet/UW_FILE-SET_COLO-829BL_HI-C_1 does work. - elif isinstance(identifying_value, list): - for identifying_value_item in identifying_value: - if self.type: - identifying_paths.append(f"/{self.type}/{identifying_value_item}") - identifying_paths.append(f"/{identifying_value_item}") - else: - # TODO: Import from somewhere ... - lookup_options = 0 - if schema := self.schema: - # TODO: Hook into the ref_lookup_strategy thing in structured_data to make - # sure we check accession format (since it does not have a pattern). - if callable(ref_lookup_strategy): - lookup_options, ref_validator = ref_lookup_strategy( - self._portal, self.type, schema, identifying_value) - if callable(ref_validator): - if ref_validator(schema, identifying_property, identifying_value) is False: - continue - if pattern := schema.get("properties", {}).get(identifying_property, {}).get("pattern"): - if not re.match(pattern, identifying_value): - # If this identifying value is for a (identifying) property which has a - # pattern, and the value does NOT match the pattern, then do NOT include - # this value as an identifying path, since it cannot possibly be found. - continue - if not lookup_options: - lookup_options = Portal.LOOKUP_DEFAULT - if Portal.is_lookup_root_first(lookup_options): - identifying_paths.append(f"/{identifying_value}") - if Portal.is_lookup_specified_type(lookup_options) and self.type: - identifying_paths.append(f"/{self.type}/{identifying_value}") - if Portal.is_lookup_root(lookup_options) and not Portal.is_lookup_root_first(lookup_options): - identifying_paths.append(f"/{identifying_value}") - if Portal.is_lookup_subtypes(lookup_options): - for subtype_name in self._portal.get_schema_subtype_names(self.type): - identifying_paths.append(f"/{subtype_name}/{identifying_value}") - return identifying_paths or None + if not self._portal and (uuid := self.uuid): + return [f"/{uuid}"] + # Migrating to and unifying this in portal_utils.Portal.get_identifying_paths (2024-05-26). + return self._portal.get_identifying_paths(self._data, + portal_type=self.schema, + lookup_strategy=ref_lookup_strategy) if self._portal else None def _normalized_refs(self, refs: List[dict]) -> Tuple[PortalObject, int]: """ diff --git a/dcicutils/portal_utils.py b/dcicutils/portal_utils.py index b6bc16684..0f0bba5e8 100644 --- a/dcicutils/portal_utils.py +++ b/dcicutils/portal_utils.py @@ -1,5 +1,6 @@ from collections import deque from functools import lru_cache +from dcicutils.function_cache_decorator import function_cache import io import json from pyramid.config import Configurator as PyramidConfigurator @@ -18,6 +19,7 @@ from dcicutils.common import APP_SMAHT, OrchestratedApp, ORCHESTRATED_APPS from dcicutils.ff_utils import get_metadata, get_schema, patch_metadata, post_metadata from dcicutils.misc_utils import to_camel_case, VirtualApp +from dcicutils.schema_utils import get_identifying_properties from dcicutils.tmpfile_utils import temporary_file Portal = Type["Portal"] # Forward type reference for type hints. @@ -48,15 +50,16 @@ class Portal: FILE_TYPE_SCHEMA_NAME = "File" # Object lookup strategies; on a per-reference (type/value) basis, used currently ONLY by - # structured_data.py; controlled by an optional ref_lookup_strategy callable; default is + # structured_data.py; controlled by an optional lookup_strategy callable; default is # lookup at root path but after the specified type path lookup, and then lookup all subtypes; # can choose to lookup root path first, or not lookup root path at all, or not lookup - # subtypes at all; the ref_lookup_strategy callable if specified should take a type_name + # subtypes at all; the lookup_strategy callable if specified should take a type_name # and value (string) arguements and return an integer of any of the below ORed together. # The main purpose of this is optimization; to minimize portal lookups; since for example, # currently at least, /{type}/{accession} does not work but /{accession} does; so we # currently (smaht-portal/.../ingestion_processors) use LOOKUP_ROOT_FIRST for this. # And current usage NEVER has LOOKUP_SUBTYPES turned OFF; but support just in case. + LOOKUP_UNDEFINED = 0 LOOKUP_SPECIFIED_TYPE = 0x0001 LOOKUP_ROOT = 0x0002 LOOKUP_ROOT_FIRST = 0x0004 | LOOKUP_ROOT @@ -205,23 +208,6 @@ def app(self) -> Optional[str]: def vapp(self) -> Optional[TestApp]: return self._vapp - @staticmethod - def is_lookup_specified_type(lookup_options: int) -> bool: - return (lookup_options & - Portal.LOOKUP_SPECIFIED_TYPE) == Portal.LOOKUP_SPECIFIED_TYPE - - @staticmethod - def is_lookup_root(lookup_options: int) -> bool: - return (lookup_options & Portal.LOOKUP_ROOT) == Portal.LOOKUP_ROOT - - @staticmethod - def is_lookup_root_first(lookup_options: int) -> bool: - return (lookup_options & Portal.LOOKUP_ROOT_FIRST) == Portal.LOOKUP_ROOT_FIRST - - @staticmethod - def is_lookup_subtypes(lookup_options: int) -> bool: - return (lookup_options & Portal.LOOKUP_SUBTYPES) == Portal.LOOKUP_SUBTYPES - def get(self, url: str, follow: bool = True, raw: bool = False, database: bool = False, raise_for_status: bool = False, **kwargs) -> OptionalResponse: url = self.url(url, raw, database) @@ -305,7 +291,10 @@ def ping(self) -> bool: @lru_cache(maxsize=100) def get_schema(self, schema_name: str) -> Optional[dict]: - return get_schema(self.schema_name(schema_name), portal_vapp=self.vapp, key=self.key) + try: + return get_schema(self.schema_name(schema_name), portal_vapp=self.vapp, key=self.key) + except Exception: + return None @lru_cache(maxsize=1) def get_schemas(self) -> dict: @@ -416,6 +405,218 @@ def get_schema_subtype_names(self, type_name: str) -> List[str]: return [] return schemas_super_type_map.get(type_name, []) + @function_cache(maxsize=100, serialize_key=True) + def get_identifying_paths(self, portal_object: dict, portal_type: Optional[Union[str, dict]] = None, + first_only: bool = False, + lookup_strategy: Optional[Union[Callable, bool]] = None) -> List[str]: + """ + Returns the list of the identifying Portal (URL) paths for the given Portal object. Favors any uuid + and identifier based paths and defavors aliases based paths (ala self.get_identifying_property_names); + no other ordering defined. Returns an empty list if no identifying properties or otherwise not found. + Note that this is a newer version of what was in portal_object_utils and just uses the ref_lookup_stratey + module directly, as it no longer needs to be exposed (to smaht-portal/ingester and smaht-submitr) and so + this is a first step toward internalizing it to structured_data/portal_utils/portal_object_utils usages. + """ + def is_lookup_specified_type(lookup_options: int) -> bool: + return (lookup_options & Portal.LOOKUP_SPECIFIED_TYPE) == Portal.LOOKUP_SPECIFIED_TYPE + def is_lookup_root(lookup_options: int) -> bool: # noqa + return (lookup_options & Portal.LOOKUP_ROOT) == Portal.LOOKUP_ROOT + def is_lookup_root_first(lookup_options: int) -> bool: # noqa + return (lookup_options & Portal.LOOKUP_ROOT_FIRST) == Portal.LOOKUP_ROOT_FIRST + def is_lookup_subtypes(lookup_options: int) -> bool: # noqa + return (lookup_options & Portal.LOOKUP_SUBTYPES) == Portal.LOOKUP_SUBTYPES + + results = [] + if not isinstance(portal_object, dict): + return results + if not (isinstance(portal_type, str) and portal_type): + if isinstance(portal_type, dict): + # It appears that the given portal_type is an actual schema dictionary. + portal_type = self.schema_name(portal_type.get("title")) + if not (isinstance(portal_type, str) and portal_type): + if not (portal_type := self.get_schema_type(portal_object)): + return results + if not callable(lookup_strategy): + lookup_strategy = None if lookup_strategy is False else Portal._lookup_strategy + for identifying_property in self.get_identifying_property_names(portal_type): + if not (identifying_value := portal_object.get(identifying_property)): + continue + # The get_identifying_property_names call above ensures uuid is first if it is in the object. + # And also note that ALL schemas do in fact have identifyingProperties which do in fact have + # uuid, except for a couple "Test" ones, and (for some reason) SubmittedItem; otherwise we + # might have a special case to check the Portal object explicitly for uuid, but no need. + if identifying_property == "uuid": + # + # Note this idiosyncrasy with Portal paths: the only way we do NOT get a (HTTP 301) redirect + # is if we use the lower-case-dashed-plural based version of the path, e.g. all of these: + # + # - /d13d06c1-218e-4f61-aaf0-91f226248b3c + # - /d13d06c1-218e-4f61-aaf0-91f226248b3c/ + # - /FileFormat/d13d06c1-218e-4f61-aaf0-91f226248b3c + # - /FileFormat/d13d06c1-218e-4f61-aaf0-91f226248b3c/ + # - /files-formats/d13d06c1-218e-4f61-aaf0-91f226248b3c + # + # Will result in a (HTTP 301) redirect to: + # + # - /files-formats/d13d06c1-218e-4f61-aaf0-91f226248b3c/ + # + # Unfortunately, this code here has no reasonable way of getting that lower-case-dashed-plural + # based name (e.g. file-formats) from the schema/portal type name (e.g. FileFormat); as the + # information is contained, for this example, in the snovault.collection decorator for the + # endpoint definition in smaht-portal/.../types/file_format.py. Unfortunately merely because + # behind-the-scenes an extra round-trip HTTP request will occur, but happens automatically. + # And note the disction of just using /{uuid} here rather than /{type}/{uuid} as in the else + # statement below is not really necessary; just here for emphasis that this is all that's needed. + # + # TODO + # Consider (from PR-308) writing a portal API for retrieving possible path formats. + # + if first_only is True: + results.append(f"/{portal_type}/{identifying_value}") + else: + results.append(f"/{identifying_value}") + elif isinstance(identifying_value, list): + for identifying_value_item in identifying_value: + if identifying_value_item: + results.append(f"/{portal_type}/{identifying_value_item}") + else: + lookup_options = Portal.LOOKUP_UNDEFINED + if schema := self.get_schema(portal_type): + if callable(lookup_strategy): + lookup_options, validator = lookup_strategy(self, portal_type, schema, identifying_value) + if callable(validator): + if validator(schema, identifying_property, identifying_value) is False: + continue + if pattern := schema.get("properties", {}).get(identifying_property, {}).get("pattern"): + if not re.match(pattern, identifying_value): + # If this identifying value is for a (identifying) property which has a + # pattern, and the value does NOT match the pattern, then do NOT include + # this value as an identifying path, since it cannot possibly be found. + continue + if lookup_options == Portal.LOOKUP_UNDEFINED: + lookup_options = Portal.LOOKUP_DEFAULT + if is_lookup_root_first(lookup_options): + results.append(f"/{identifying_value}") + if is_lookup_specified_type(lookup_options) and portal_type: + results.append(f"/{portal_type}/{identifying_value}") + if is_lookup_root(lookup_options) and not is_lookup_root_first(lookup_options): + results.append(f"/{identifying_value}") + if is_lookup_subtypes(lookup_options): + for subtype_name in self.get_schema_subtype_names(portal_type): + results.append(f"/{subtype_name}/{identifying_value}") + if (first_only is True) and results: + return results + return results + + @function_cache(maxsize=100, serialize_key=True) + def get_identifying_path(self, portal_object: dict, portal_type: Optional[Union[str, dict]] = None, + lookup_strategy: Optional[Union[Callable, bool]] = None) -> Optional[str]: + if identifying_paths := self.get_identifying_paths(portal_object, portal_type, first_only=True, + lookup_strategy=lookup_strategy): + return identifying_paths[0] + return None + + @function_cache(maxsize=100, serialize_key=True) + def get_identifying_property_names(self, schema: Union[str, dict], + portal_object: Optional[dict] = None) -> List[str]: + """ + Returns the list of identifying property names for the given Portal schema, which may be + either a schema name or a schema object. If a Portal object is also given then restricts this + set of identifying properties to those which actually have values within this Portal object. + Favors the uuid and identifier property names and defavors the aliases property name; no other + ordering imposed. Returns empty list if no identifying properties or otherwise not found. + """ + results = [] + if isinstance(schema, str): + if not (schema := self.get_schema(schema)): + return results + elif not isinstance(schema, dict): + return results + if not (identifying_properties := get_identifying_properties(schema)): + return results + identifying_properties = list(set(identifying_properties)) # paranoid dedup + identifying_properties = [*identifying_properties] # copy so as not to change schema if given + favored_identifying_properties = ["uuid", "identifier"] + defavored_identifying_properties = ["aliases"] + for favored_identifying_property in reversed(favored_identifying_properties): + if favored_identifying_property in identifying_properties: + identifying_properties.remove(favored_identifying_property) + identifying_properties.insert(0, favored_identifying_property) + for defavored_identifying_property in defavored_identifying_properties: + if defavored_identifying_property in identifying_properties: + identifying_properties.remove(defavored_identifying_property) + identifying_properties.append(defavored_identifying_property) + if isinstance(portal_object, dict): + for identifying_property in [*identifying_properties]: + if portal_object.get(identifying_property) is None: + identifying_properties.remove(identifying_property) + return identifying_properties + + @staticmethod + def _lookup_strategy(portal: Portal, type_name: str, schema: dict, value: str) -> (int, Optional[str]): + # + # Note this slightly odd situation WRT object lookups by submitted_id and accession: + # -----------------------------+-----------------------------------------------+---------------+ + # PATH | EXAMPLE | LOOKUP RESULT | + # -----------------------------+-----------------------------------------------+---------------+ + # /submitted_id | //UW_FILE-SET_COLO-829BL_HI-C_1 | NOT FOUND | + # /UnalignedReads/submitted_id | /UnalignedReads/UW_FILE-SET_COLO-829BL_HI-C_1 | FOUND | + # /SubmittedFile/submitted_id | /SubmittedFile/UW_FILE-SET_COLO-829BL_HI-C_1 | FOUND | + # /File/submitted_id | /File/UW_FILE-SET_COLO-829BL_HI-C_1 | NOT FOUND | + # -----------------------------+-----------------------------------------------+---------------+ + # /accession | /SMAFSFXF1RO4 | FOUND | + # /UnalignedReads/accession | /UnalignedReads/SMAFSFXF1RO4 | NOT FOUND | + # /SubmittedFile/accession | /SubmittedFile/SMAFSFXF1RO4 | NOT FOUND | + # /File/accession | /File/SMAFSFXF1RO4 | FOUND | + # -----------------------------+-----------------------------------------------+---------------+ + # + def ref_validator(schema: Optional[dict], + property_name: Optional[str], property_value: Optional[str]) -> Optional[bool]: + """ + Returns False iff objects of type represented by the given schema, CANNOT be referenced with + a Portal path using the given property name and its given property value, otherwise returns None. + + For example, if the schema is for UnalignedReads and the property name is accession, then we will + return False iff the given property value is NOT a properly formatted accession ID; otherwise, we + will return None, which indicates that the caller (e.g. dcicutils.structured_data.Portal.ref_exists) + will continue executing its default behavior, which is to check other ways in which the given type + CANNOT be referenced by the given value, i.e. it checks other identifying properties for the type + and makes sure any patterns (e.g. for submitted_id or uuid) are ahered to. + + The goal (in structured_data) being to detect if a type is being referenced in such a way that + CANNOT possibly be allowed, i.e. because none of its identifying types are in the required form, + if indeed there any requirements. It is assumed/guaranteed the given property name is indeed an + identifying property for the given type. + """ + if property_format := schema.get("properties", {}).get(property_name, {}).get("format"): + if (property_format == "accession") and (property_name == "accession"): + if not Portal._is_accession_id(property_value): + return False + return None + + DEFAULT_RESULT = (Portal.LOOKUP_DEFAULT, ref_validator) + if not value: + return DEFAULT_RESULT + if not schema: + if not isinstance(portal, Portal) or not (schema := portal.get_schema(type_name)): + return DEFAULT_RESULT + if schema_properties := schema.get("properties"): + if schema_properties.get("accession") and Portal._is_accession_id(value): + # Case: lookup by accession (only by root). + return (Portal.LOOKUP_ROOT, ref_validator) + elif schema_property_info_submitted_id := schema_properties.get("submitted_id"): + if schema_property_pattern_submitted_id := schema_property_info_submitted_id.get("pattern"): + if re.match(schema_property_pattern_submitted_id, value): + # Case: lookup by submitted_id (only by specified type). + return (Portal.LOOKUP_SPECIFIED_TYPE, ref_validator) + return DEFAULT_RESULT + + @staticmethod + def _is_accession_id(value: str) -> bool: + # This is here for now because of problems with circular dependencies. + # See: smaht-portal/.../schema_formats.py/is_accession(instance) ... + return isinstance(value, str) and re.match(r"^SMA[1-9A-Z]{9}$", value) is not None + def url(self, url: str, raw: bool = False, database: bool = False) -> str: if not isinstance(url, str) or not url: return "/" @@ -516,6 +717,22 @@ def raise_for_status(self): # noqa response = TestResponseWrapper(response) return response + @staticmethod + def _create_vapp(arg: Union[TestApp, VirtualApp, PyramidRouter, str] = None) -> TestApp: + if isinstance(arg, TestApp): + return arg + elif isinstance(arg, VirtualApp): + if not isinstance(arg.wrapped_app, TestApp): + raise Exception("Portal._create_vapp VirtualApp argument error.") + return arg.wrapped_app + if isinstance(arg, PyramidRouter): + router = arg + elif isinstance(arg, str) or not arg: + router = pyramid_get_app(arg or "development.ini", "app") + else: + raise Exception("Portal._create_vapp argument error.") + return TestApp(router, {"HTTP_ACCEPT": Portal.MIME_TYPE_JSON, "REMOTE_USER": "TEST"}) + @staticmethod def create_for_testing(arg: Optional[Union[str, bool, List[dict], dict, Callable]] = None) -> Portal: if isinstance(arg, list) or isinstance(arg, dict) or isinstance(arg, Callable): @@ -547,22 +764,6 @@ def create_for_testing(arg: Optional[Union[str, bool, List[dict], dict, Callable with temporary_file(content=minimal_ini_for_testing, suffix=".ini") as ini_file: return Portal(ini_file) - @staticmethod - def _create_vapp(arg: Union[TestApp, VirtualApp, PyramidRouter, str] = None) -> TestApp: - if isinstance(arg, TestApp): - return arg - elif isinstance(arg, VirtualApp): - if not isinstance(arg.wrapped_app, TestApp): - raise Exception("Portal._create_vapp VirtualApp argument error.") - return arg.wrapped_app - if isinstance(arg, PyramidRouter): - router = arg - elif isinstance(arg, str) or not arg: - router = pyramid_get_app(arg or "development.ini", "app") - else: - raise Exception("Portal._create_vapp argument error.") - return TestApp(router, {"HTTP_ACCEPT": Portal.MIME_TYPE_JSON, "REMOTE_USER": "TEST"}) - @staticmethod def _create_router_for_testing(endpoints: Optional[List[Dict[str, Union[str, Callable]]]] = None) -> PyramidRouter: if isinstance(endpoints, dict): diff --git a/dcicutils/structured_data.py b/dcicutils/structured_data.py index 6d489ea17..c9b895849 100644 --- a/dcicutils/structured_data.py +++ b/dcicutils/structured_data.py @@ -11,7 +11,6 @@ from dcicutils.common import OrchestratedApp from dcicutils.data_readers import CsvReader, Excel, RowReader from dcicutils.datetime_utils import normalize_date_string, normalize_datetime_string -from dcicutils.file_utils import search_for_file from dcicutils.misc_utils import (create_dict, create_readonly_object, is_uuid, load_json_if, merge_objects, remove_empty_properties, right_trim, split_string, to_boolean, to_enum, to_float, to_integer, VirtualApp) @@ -56,7 +55,7 @@ def __init__(self, file: Optional[str] = None, portal: Optional[Union[VirtualApp remove_empty_objects_from_lists: bool = True, ref_lookup_strategy: Optional[Callable] = None, ref_lookup_nocache: bool = False, - norefs: bool = False, + norefs: bool = False, merge: bool = False, progress: Optional[Callable] = None, debug_sleep: Optional[str] = None) -> None: self._progress = progress if callable(progress) else None @@ -75,6 +74,7 @@ def __init__(self, file: Optional[str] = None, portal: Optional[Union[VirtualApp self._nrows = 0 self._autoadd_properties = autoadd if isinstance(autoadd, dict) and autoadd else None self._norefs = True if norefs is True else False + self._merge = True if merge is True else False # New merge functionality (2024-05-25) self._debug_sleep = None if debug_sleep: try: @@ -98,13 +98,13 @@ def load(file: str, portal: Optional[Union[VirtualApp, TestApp, Portal]] = None, remove_empty_objects_from_lists: bool = True, ref_lookup_strategy: Optional[Callable] = None, ref_lookup_nocache: bool = False, - norefs: bool = False, + norefs: bool = False, merge: bool = False, progress: Optional[Callable] = None, debug_sleep: Optional[str] = None) -> StructuredDataSet: return StructuredDataSet(file=file, portal=portal, schemas=schemas, autoadd=autoadd, order=order, prune=prune, remove_empty_objects_from_lists=remove_empty_objects_from_lists, ref_lookup_strategy=ref_lookup_strategy, ref_lookup_nocache=ref_lookup_nocache, - norefs=norefs, progress=progress, debug_sleep=debug_sleep) + norefs=norefs, merge=merge, progress=progress, debug_sleep=debug_sleep) def validate(self, force: bool = False) -> None: def data_without_deleted_properties(data: dict) -> dict: @@ -208,14 +208,6 @@ def upload_files(self) -> List[str]: result.append({"type": type_name, "file": file_name}) return result - def upload_files_located(self, - location: Union[str, Optional[List[str]]] = None, recursive: bool = False) -> List[str]: - upload_files = copy.deepcopy(self.upload_files) - for upload_file in upload_files: - if file_path := search_for_file(upload_file["file"], location, recursive=recursive, single=True): - upload_file["path"] = file_path - return upload_files - @property def nrows(self) -> int: return self._nrows @@ -350,18 +342,23 @@ def get_counts() -> Tuple[int, int]: def _load_json_file(self, file: str) -> None: with open(file) as f: - file_json = json.load(f) - schema_inferred_from_file_name = Schema.type_name(file) - if self._portal.get_schema(schema_inferred_from_file_name) is not None: + data = json.load(f) + if ((schema_name_inferred_from_file_name := Schema.type_name(file)) and + (self._portal.get_schema(schema_name_inferred_from_file_name) is not None)): # noqa # If the JSON file name looks like a schema name then assume it # contains an object or an array of object of that schema type. - self._add(Schema.type_name(file), file_json) - elif isinstance(file_json, dict): + if self._merge: # New merge functionality (2024-05-25) + data = self._merge_with_existing_portal_object(data, schema_name_inferred_from_file_name) + self._add(Schema.type_name(file), data) + elif isinstance(data, dict): # Otherwise if the JSON file name does not look like a schema name then # assume it a dictionary where each property is the name of a schema, and # which (each property) contains a list of object of that schema type. - for schema_name in file_json: - self._add(schema_name, file_json[schema_name]) + for schema_name in data: + item = data[schema_name] + if self._merge: # New merge functionality (2024-05-25) + item = self._merge_with_existing_portal_object(item, schema_name) + self._add(schema_name, item) def _load_reader(self, reader: RowReader, type_name: str) -> None: schema = None @@ -383,11 +380,13 @@ def _load_reader(self, reader: RowReader, type_name: str) -> None: structured_row_template.set_value(structured_row, column_name, value, reader.file, reader.row_number) if self._autoadd_properties: self._add_properties(structured_row, self._autoadd_properties, schema) + if self._merge: # New merge functionality (2024-05-25) + structured_row = self._merge_with_existing_portal_object(structured_row, schema_name) if (prune_error := self._prune_structured_row(structured_row)) is not None: self._note_error({"src": create_dict(type=schema_name, row=reader.row_number), "error": prune_error}, "validation") else: - self._add(type_name, structured_row) + self._add(type_name, structured_row) # TODO: why type_name and not schema_name? if self._progress: self._progress({ PROGRESS.LOAD_ITEM: self._nrows, @@ -428,6 +427,18 @@ def _add_properties(self, structured_row: dict, properties: dict, schema: Option if name not in structured_row and (not schema or schema.data.get("properties", {}).get(name)): structured_row[name] = properties[name] + def _merge_with_existing_portal_object(self, portal_object: dict, portal_type: str) -> dict: + """ + Given a Portal object (presumably/in-practice from the given metadata), if there is + an existing Portal item, identified by the identifying properties for the given object, + then merges the given object into the existing one and returns the result; otherwise + just returns the given object. Note that the given object may be CHANGED in place. + """ + for identifying_path in self._portal.get_identifying_paths(portal_object, portal_type): + if existing_portal_object := self._portal.get_metadata(identifying_path, raw=True, raise_exception=False): + return merge_objects(existing_portal_object, portal_object, primitive_lists=True) + return portal_object + def _is_ref_lookup_specified_type(ref_lookup_flags: int) -> bool: return (ref_lookup_flags & Portal.LOOKUP_SPECIFIED_TYPE) == Portal.LOOKUP_SPECIFIED_TYPE diff --git a/dcicutils/submitr/ref_lookup_strategy.py b/dcicutils/submitr/ref_lookup_strategy.py index 55c4d2827..b0dc69261 100644 --- a/dcicutils/submitr/ref_lookup_strategy.py +++ b/dcicutils/submitr/ref_lookup_strategy.py @@ -2,39 +2,45 @@ from typing import Optional from dcicutils.structured_data import Portal +# This function is exposed (to smaht-portal/ingester and smaht-submitr) only because previously, +# before it was fully developed, we had differing behaviors; but this has been unified; so this +# could now be internalized to structured_data, and portal_object_utils (TODO). + def ref_lookup_strategy(portal: Portal, type_name: str, schema: dict, value: str) -> (int, Optional[str]): # - # FYI: Note this situation WRT object lookups ... - # - # /{submitted_id} # NOT FOUND - # /UnalignedReads/{submitted_id} # OK - # /SubmittedFile/{submitted_id} # OK - # /File/{submitted_id} # NOT FOUND - # - # /{accession} # OK - # /UnalignedReads/{accession} # NOT FOUND - # /SubmittedFile/{accession} # NOT FOUND - # /File/{accession} # OK + # Note this slight odd situation WRT object lookups by submitted_id and accession: + # -----------------------------+-----------------------------------------------+---------------+ + # PATH | EXAMPLE | LOOKUP RESULT | + # -----------------------------+-----------------------------------------------+---------------+ + # /submitted_id | //UW_FILE-SET_COLO-829BL_HI-C_1 | NOT FOUND | + # /UnalignedReads/submitted_id | /UnalignedReads/UW_FILE-SET_COLO-829BL_HI-C_1 | FOUND | + # /SubmittedFile/submitted_id | /SubmittedFile/UW_FILE-SET_COLO-829BL_HI-C_1 | FOUND | + # /File/submitted_id | /File/UW_FILE-SET_COLO-829BL_HI-C_1 | NOT FOUND | + # -----------------------------+-----------------------------------------------+---------------+ + # /accession | /SMAFSFXF1RO4 | FOUND | + # /UnalignedReads/accession | /UnalignedReads/SMAFSFXF1RO4 | NOT FOUND | + # /SubmittedFile/accession | /SubmittedFile/SMAFSFXF1RO4 | NOT FOUND | + # /File/accession | /File/SMAFSFXF1RO4 | FOUND | + # -----------------------------+-----------------------------------------------+---------------+ # def ref_validator(schema: Optional[dict], property_name: Optional[str], property_value: Optional[str]) -> Optional[bool]: """ - Returns False iff the type represented by the given schema, can NOT be referenced by - the given property name with the given property value, otherwise returns None. + Returns False iff objects of type represented by the given schema, CANNOT be referenced with + a Portal path using the given property name and its given property value, otherwise returns None. - For example, if the schema is for the UnalignedReads type and the property name - is accession, then we will return False iff the given property value is NOT a properly - formatted accession ID. Otherwise, we will return None, which indicates that the - caller (in dcicutils.structured_data.Portal.ref_exists) will continue executing - its default behavior, which is to check other ways in which the given type can NOT - be referenced by the given value, i.e. it checks other identifying properties for - the type and makes sure any patterns (e.g. for submitted_id or uuid) are ahered to. + For example, if the schema is for UnalignedReads and the property name is accession, then we will + return False iff the given property value is NOT a properly formatted accession ID; otherwise, we + will return None, which indicates that the caller (e.g. dcicutils.structured_data.Portal.ref_exists) + will continue executing its default behavior, which is to check other ways in which the given type + CANNOT be referenced by the given value, i.e. it checks other identifying properties for the type + and makes sure any patterns (e.g. for submitted_id or uuid) are ahered to. - The goal (in structured_data) being to detect if a type is being referenced in such - a way that cannot possibly be allowed, i.e. because none of its identifying types - are in the required form (if indeed there any requirements). Note that it is guaranteed - that the given property name is indeed an identifying property for the given type. + The goal (in structured_data) being to detect if a type is being referenced in such a way that + CANNOT possibly be allowed, i.e. because none of its identifying types are in the required form, + if indeed there any requirements. It is assumed/guaranteed the given property name is indeed an + identifying property for the given type. """ if property_format := schema.get("properties", {}).get(property_name, {}).get("format"): if (property_format == "accession") and (property_name == "accession"): @@ -62,6 +68,6 @@ def ref_validator(schema: Optional[dict], # This is here for now because of problems with circular dependencies. -# See: smaht-portal/.../schema_formats.py +# See: smaht-portal/.../schema_formats.py/is_accession(instance) ... def _is_accession_id(value: str) -> bool: return isinstance(value, str) and re.match(r"^SMA[1-9A-Z]{9}$", value) is not None diff --git a/pyproject.toml b/pyproject.toml index c7f564026..4ea9c75b4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "dcicutils" -version = "8.9.0" +version = "8.10.0" description = "Utility package for interacting with the 4DN Data Portal and other 4DN resources" authors = ["4DN-DCIC Team "] license = "MIT" diff --git a/test/test_misc_utils.py b/test/test_misc_utils.py index 574f758f5..0f2af3c70 100644 --- a/test/test_misc_utils.py +++ b/test/test_misc_utils.py @@ -3684,6 +3684,22 @@ def test_merge_objects_8(): assert target == expected +def test_merge_objects_9(): + target = {"abc": [1, 2, 3]} + source = {"abc": [4, 5]} + expected = {"abc": [4, 5]} + result = merge_objects(target, source, primitive_lists=True, copy=True) + assert result == expected + assert id(target) != id(result) + + target = {"abc": [1, 2, 3]} + source = {"abc": [4, 5]} + expected = {"abc": [4, 5, 3]} + result = merge_objects(target, source, primitive_lists=False, copy=False) + assert result == expected + assert id(target) == id(result) + + def test_to_integer(): assert to_integer("17") == 17 assert to_integer("17.0") == 17 diff --git a/test/test_portal_object_utils.py b/test/test_portal_object_utils.py index 18e632620..9ed1868f3 100644 --- a/test/test_portal_object_utils.py +++ b/test/test_portal_object_utils.py @@ -600,8 +600,7 @@ def test_compare(): assert portal_object.types == ["IngestionSubmission", "Item"] assert not portal_object.schema assert not portal_object.identifying_properties - assert portal_object._get_identifying_paths() == [f"/{TEST_OBJECT_DATABASE_JSON['@type'][0]}/{TEST_OBJECT_UUID}", - f"/{TEST_OBJECT_UUID}"] + assert portal_object._get_identifying_paths() == [f"/{TEST_OBJECT_UUID}"] assert portal_object.compare(TEST_OBJECT_DATABASE_JSON) == ({}, 0) portal_object_copy = portal_object.copy() @@ -628,10 +627,9 @@ def test_compare(): assert portal_object_found.schema == TEST_OBJECT_SCHEMA_JSON assert portal_object_found.identifying_properties == ["uuid", "aliases"] assert portal_object_found._get_identifying_paths() == ( - [f"/{TEST_OBJECT_DATABASE_JSON['@type'][0]}/{TEST_OBJECT_UUID}", - f"/{TEST_OBJECT_UUID}", - "/IngestionSubmission/foo", "/foo", - "/IngestionSubmission/bar", "/bar"]) + [f"/{TEST_OBJECT_UUID}", + "/IngestionSubmission/foo", + "/IngestionSubmission/bar"]) portal_object_copy = portal_object.copy() portal_object_copy.data["xyzzy"] = 123