From d9c0b773f0c7b07f2a05c29095289b569f8ddaaa Mon Sep 17 00:00:00 2001 From: Eva Date: Fri, 18 Oct 2024 12:17:26 +0100 Subject: [PATCH] made ruff compliant --- docs/conf.py | 3 +- ...02-switched-to-using-a-python-skeleton.rst | 2 +- docs/images/document-generation-timeline.svg | 2 +- pyproject.toml | 9 +- src/event_model/__init__.py | 145 ++++++++++-------- .../documents/generate/__main__.py | 3 +- .../documents/generate/typeddict_to_schema.py | 4 +- src/event_model/schemas/datum.json | 2 +- src/event_model/schemas/datum_page.json | 2 +- src/event_model/schemas/event.json | 2 +- src/event_model/schemas/event_descriptor.json | 2 +- src/event_model/schemas/event_page.json | 2 +- src/event_model/schemas/resource.json | 2 +- src/event_model/schemas/run_start.json | 2 +- src/event_model/schemas/run_stop.json | 2 +- src/event_model/schemas/stream_datum.json | 2 +- src/event_model/schemas/stream_resource.json | 2 +- src/event_model/tests/test_run_router.py | 2 +- .../tests/test_schema_generation.py | 4 +- 19 files changed, 103 insertions(+), 91 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 0da4ca5b..88d3a5f6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -51,7 +51,7 @@ "sphinx_copybutton", # For the card element "sphinx_design", - "sphinx_design", + "sphinx_design", "sphinx.ext.autosummary", "sphinx.ext.mathjax", "sphinx.ext.githubpages", @@ -59,7 +59,6 @@ "sphinx_copybutton", "IPython.sphinxext.ipython_directive", "IPython.sphinxext.ipython_console_highlighting", - # So we can write markdown files "myst_parser", ] diff --git a/docs/explanations/decisions/0002-switched-to-using-a-python-skeleton.rst b/docs/explanations/decisions/0002-switched-to-using-a-python-skeleton.rst index 996eb13f..9ed49e9b 100644 --- a/docs/explanations/decisions/0002-switched-to-using-a-python-skeleton.rst +++ b/docs/explanations/decisions/0002-switched-to-using-a-python-skeleton.rst @@ -29,4 +29,4 @@ We will use the Consequences ------------ -CI workflows will change, even jobs providing the same feature as the older checks. \ No newline at end of file +CI workflows will change, even jobs providing the same feature as the older checks. diff --git a/docs/images/document-generation-timeline.svg b/docs/images/document-generation-timeline.svg index fb06851c..edfdb81e 100755 --- a/docs/images/document-generation-timeline.svg +++ b/docs/images/document-generation-timeline.svg @@ -1 +1 @@ -Data_AcquisitionSoftware_Graphic_Rev0617Example 1: Simplest Possible RunExample 2: A Simple ScanRead motor positionand trigger and read detector(s)Triggerand readMove a motorMoveetc.Triggerand readMoveetc.etc.Read motor positionand trigger and read detector(s)Move a motorMonitor beamcurrentRecordnew valueetc.Recordnew valueMonitortemperatureExample 3: Asynchronously Monitor During a ScanRun Start: Metadata about this run, including everything we know in advance: time, type of experiment, sample info., etc.Event Descriptor: Metadata about the readings in the event (units, precision, etc.) and the relevant hardwareEvent: Readings and timestampsRun Stop: Additional metadata known at the end: what time it completed and its exit status (success, aborted, failed)Do nothing - this is the simplest possible experiment! \ No newline at end of file +Data_AcquisitionSoftware_Graphic_Rev0617Example 1: Simplest Possible RunExample 2: A Simple ScanRead motor positionand trigger and read detector(s)Triggerand readMove a motorMoveetc.Triggerand readMoveetc.etc.Read motor positionand trigger and read detector(s)Move a motorMonitor beamcurrentRecordnew valueetc.Recordnew valueMonitortemperatureExample 3: Asynchronously Monitor During a ScanRun Start: Metadata about this run, including everything we know in advance: time, type of experiment, sample info., etc.Event Descriptor: Metadata about the readings in the event (units, precision, etc.) and the relevant hardwareEvent: Readings and timestampsRun Stop: Additional metadata known at the end: what time it completed and its exit status (success, aborted, failed)Do nothing - this is the simplest possible experiment! diff --git a/pyproject.toml b/pyproject.toml index 775960d6..4b3c5e23 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,7 +73,7 @@ ignore_missing_imports = true # Ignore missing stubs in imported modules # Run pytest with all our checkers, and don't spam us with massive tracebacks on error addopts = """ --tb=native -vv - --cov=event_model --cov-report term --cov-report xml:cov.xml + --cov=src/event_model --cov-report term --cov-report xml:cov.xml """ # https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings filterwarnings = "error" @@ -106,8 +106,8 @@ allowlist_externals = sphinx-autobuild commands = pre-commit: pre-commit run --all-files --show-diff-on-failure {posargs} - type-checking: mypy event_model {posargs} - tests: pytest --cov=event_model --cov-report term --cov-report xml:cov.xml {posargs} + type-checking: mypy src/event_model {posargs} + tests: pytest --cov=src/event_model --cov-report term --cov-report xml:cov.xml {posargs} docs: sphinx-{posargs:build -E --keep-going} -T docs build/html """ @@ -128,5 +128,4 @@ lint.select = [ [tool.ruff.lint.per-file-ignores] # By default, private member access is allowed in tests # See https://github.com/DiamondLightSource/python-copier-template/issues/154 -# Remove this line to forbid private member access in tests -"tests/**/*" = ["SLF001"] +"src/event_model/tests/**" = ["SLF001"] diff --git a/src/event_model/__init__.py b/src/event_model/__init__.py index 7392cc88..3a255c7e 100644 --- a/src/event_model/__init__.py +++ b/src/event_model/__init__.py @@ -148,10 +148,10 @@ def __init__(self, *, emit: Optional[Callable] = None) -> None: try: # Does this function accept two positional arguments? sig.bind(None, None) - except TypeError: + except TypeError as error: raise ValueError( "emit must accept two positional arguments, name and doc" - ) + ) from error # Stash a weak reference to `emit`. if inspect.ismethod(emit): self._emit_ref = weakref.WeakMethod(emit) @@ -295,7 +295,8 @@ def bulk_events(self, doc: dict) -> None: # Do not modify this in a subclass. Use event_page. warnings.warn( "The document type 'bulk_events' has been deprecated in favor of " - "'event_page', whose structure is a transpose of 'bulk_events'." + "'event_page', whose structure is a transpose of 'bulk_events'.", + stacklevel=2, ) for page in bulk_events_to_event_pages(doc): self.event_page(page) @@ -304,7 +305,8 @@ def bulk_datum(self, doc: dict) -> None: # Do not modify this in a subclass. Use event_page. warnings.warn( "The document type 'bulk_datum' has been deprecated in favor of " - "'datum_page', whose structure is a transpose of 'bulk_datum'." + "'datum_page', whose structure is a transpose of 'bulk_datum'.", + stacklevel=2, ) self.datum_page(bulk_datum_to_datum_page(doc)) @@ -317,7 +319,7 @@ class SingleRunDocumentRouter(DocumentRouter): def __init__(self) -> None: super().__init__() self._start_doc: Optional[dict] = None - self._descriptors: dict = dict() + self._descriptors: dict = {} def __call__( self, name: str, doc: dict, validate: bool = False @@ -653,26 +655,29 @@ def __init__( stream_resource_cache: Optional[dict] = None, stream_datum_cache: Optional[dict] = None, inplace: Optional[bool] = None, - retry_intervals: List = [ - 0.001, - 0.002, - 0.004, - 0.008, - 0.016, - 0.032, - 0.064, - 0.128, - 0.256, - 0.512, - 1.024, - ], + retry_intervals: Optional[List[float]] = None, ) -> None: + if retry_intervals is None: + retry_intervals = [ + 0.001, + 0.002, + 0.004, + 0.008, + 0.016, + 0.032, + 0.064, + 0.128, + 0.256, + 0.512, + 1.024, + ] if inplace is None: self._inplace = True warnings.warn( "'inplace' argument not specified. It is recommended to " "specify True or False. In future releases, 'inplace' " - "will default to False." + "will default to False.", + stacklevel=2, ) else: self._inplace = inplace @@ -685,11 +690,11 @@ def __init__( ) try: self._coercion_func = _coercion_registry[coerce] - except KeyError: + except KeyError as error: raise EventModelKeyError( f"The option coerce={coerce!r} was given to event_model.Filler. " f"The valid options are {set(_coercion_registry)}." - ) + ) from error self._coerce = coerce # See comments on coerision functions above for the use of @@ -706,6 +711,7 @@ def __init__( "In a future release of event-model, the argument `include` " "will be removed from Filler.", DeprecationWarning, + stacklevel=2, ) self.include = include if exclude is not None: @@ -713,6 +719,7 @@ def __init__( "In a future release of event-model, the argument `exclude` " "will be removed from Filler.", DeprecationWarning, + stacklevel=2, ) self.exclude = exclude self.root_map = root_map or {} @@ -757,21 +764,21 @@ def __eq__(self, other: Any) -> bool: ) def __getstate__(self) -> dict: - return dict( - inplace=self._inplace, - coercion_func=self._coerce, - handler_registry=self._unpatched_handler_registry, - include=self.include, - exclude=self.exclude, - root_map=self.root_map, - handler_cache=self._handler_cache, - resource_cache=self._resource_cache, - datum_cache=self._datum_cache, - descriptor_cache=self._descriptor_cache, - stream_resource_cache=self._stream_resource_cache, - stream_datum_cache=self._stream_datum_cache, - retry_intervals=self.retry_intervals, - ) + return { + "inplace": self._inplace, + "coercion_func": self._coerce, + "handler_registry": self._unpatched_handler_registry, + "include": self.include, + "exclude": self.exclude, + "root_map": self.root_map, + "handler_cache": self._handler_cache, + "resource_cache": self._resource_cache, + "datum_cache": self._datum_cache, + "descriptor_cache": self._descriptor_cache, + "stream_resource_cache": self._stream_resource_cache, + "stream_datum_cache": self._stream_datum_cache, + "retry_intervals": self.retry_intervals, + } def __setstate__(self, d: dict) -> None: self._inplace = d["inplace"] @@ -1513,7 +1520,7 @@ def __init__( # Map RunStart UID to RunStart document. This is used to send # RunStart documents to subfactory callbacks. - self._start_to_start_doc: dict = dict() + self._start_to_start_doc: dict = {} # Map RunStart UID to the list EventDescriptor. This is used to # facilitate efficient cleanup of the caches above. @@ -1573,7 +1580,8 @@ def start(self, start_doc: RunStart) -> None: warnings.warn( DOCS_PASSED_IN_1_14_0_WARNING.format( callback=callback, name="start", err=err - ) + ), + stacklevel=2, ) raise err self._factory_cbs_by_start[uid].extend(callbacks) @@ -1609,7 +1617,8 @@ def descriptor(self, descriptor_doc: EventDescriptor) -> None: warnings.warn( DOCS_PASSED_IN_1_14_0_WARNING.format( callback=callback, name="start", err=err - ) + ), + stacklevel=2, ) raise err try: @@ -1618,7 +1627,8 @@ def descriptor(self, descriptor_doc: EventDescriptor) -> None: warnings.warn( DOCS_PASSED_IN_1_14_0_WARNING.format( callback=callback, name="descriptor", err=err - ) + ), + stacklevel=2, ) raise err @@ -1639,12 +1649,12 @@ def datum_page(self, doc: DatumPage) -> None: resource_uid = doc["resource"] try: start_uid = self._resources[resource_uid] - except KeyError: + except KeyError as error: if resource_uid not in self._unlabeled_resources: raise UnresolvableForeignKeyError( resource_uid, f"DatumPage refers to unknown Resource uid {resource_uid}", - ) + ) from error # Old Resources do not have a reference to a RunStart document, # so in turn we cannot immediately tell which run these datum # documents belong to. @@ -1855,7 +1865,7 @@ def __call__(self, datum_kwargs: Dict[str, Any], validate: bool = True) -> Datum doc = Datum( resource=resource_uid, datum_kwargs=datum_kwargs, - datum_id="{}/{}".format(resource_uid, next(self.counter)), + datum_id=f"{resource_uid}/{next(self.counter)}", ) if validate: schema_validators[DocumentNames.datum].validate(doc) @@ -1887,9 +1897,7 @@ def __call__(self, datum_kwargs: dict, validate: bool = True) -> DatumPage: doc = DatumPage( resource=resource_uid, datum_kwargs=datum_kwargs, - datum_id=[ - "{}/{}".format(resource_uid, next(self.counter)) for _ in range(N) - ], + datum_id=[f"{resource_uid}/{next(self.counter)}" for _ in range(N)], ) if validate: schema_validators[DocumentNames.datum_page].validate(doc) @@ -2045,6 +2053,7 @@ def compose_stream_datum( warnings.warn( "compose_stream_datum() will be removed in the minor version.", DeprecationWarning, + stacklevel=2, ) return ComposeStreamDatum(stream_resource, counter)( seq_nums, @@ -2146,8 +2155,9 @@ def __call__( ) -> RunStop: if self.poison_pill: raise EventModelError( - "Already composed a RunStop document for run " - "{!r}.".format(self.start["uid"]) + "Already composed a RunStop document for run " "{!r}.".format( + self.start["uid"] + ) ) self.poison_pill.append(object()) if uid is None: @@ -2190,7 +2200,7 @@ def compose_stop( def length_of_value(dictionary: Dict[str, List], error_msg: str) -> Optional[int]: length = None - for k, v in dictionary.items(): + for _, v in dictionary.items(): v_len = len(v) if length is not None: if v_len != length: @@ -2276,8 +2286,9 @@ def __call__( ) if set(filled) - set(data): raise EventModelValidationError( - "Keys in event['filled'] {} must be a subset of those in " - "event['data'] {}".format(filled.keys(), data.keys()) + f"Keys in event['filled'] {filled.keys()} " + "must be a subset of those in " + f"event['data'] {data.keys()}" ) self.event_counters[self.descriptor["name"]] += len(seq_num) return doc @@ -2378,8 +2389,9 @@ def __call__( ) if set(filled) - set(data): raise EventModelValidationError( - "Keys in event['filled'] {} must be a subset of those in " - "event['data'] {}".format(filled.keys(), data.keys()) + f"Keys in event['filled'] {filled.keys()} " + "must be a subset of those in " + f"event['data'] {data.keys()}" ) self.event_counters[self.descriptor["name"]] = seq_num + 1 return doc @@ -2468,10 +2480,10 @@ def __call__( if validate: if name in self.streams and self.streams[name] != set(data_keys): raise EventModelValidationError( - "A descriptor with the name {} has already been composed with " - "data_keys {}. The requested data_keys were {}. All " - "descriptors in a given stream must have the same " - "data_keys.".format(name, self.streams[name], set(data_keys)) + f"A descriptor with the name {name} has already been composed with " + f"data_keys {self.streams[name]}. The requested data_keys were " + f"{set(data_keys)}. All descriptors in a given stream must have " + "the same data_keys." ) schema_validators[DocumentNames.descriptor].validate(doc) @@ -2826,20 +2838,20 @@ def merge_event_pages(event_pages: Iterable[EventPage]) -> EventPage: if len(pages) == 1: return pages[0] - doc = dict( - descriptor=pages[0]["descriptor"], - seq_num=list( + doc = { + "descriptor": pages[0]["descriptor"], + "seq_num": list( itertools.chain.from_iterable([page["seq_num"] for page in pages]) ), - time=list(itertools.chain.from_iterable([page["time"] for page in pages])), - uid=list(itertools.chain.from_iterable([page["uid"] for page in pages])), - data={ + "time": list(itertools.chain.from_iterable([page["time"] for page in pages])), + "uid": list(itertools.chain.from_iterable([page["uid"] for page in pages])), + "data": { key: list( itertools.chain.from_iterable([page["data"][key] for page in pages]) ) for key in pages[0]["data"].keys() }, - timestamps={ + "timestamps": { key: list( itertools.chain.from_iterable( [page["timestamps"][key] for page in pages] @@ -2847,13 +2859,13 @@ def merge_event_pages(event_pages: Iterable[EventPage]) -> EventPage: ) for key in pages[0]["data"].keys() }, - filled={ + "filled": { key: list( itertools.chain.from_iterable([page["filled"][key] for page in pages]) ) for key in pages[0]["filled"].keys() }, - ) + } return cast(EventPage, doc) @@ -3001,6 +3013,7 @@ def bulk_datum_to_datum_page(bulk_datum: dict) -> DatumPage: Note: There is only one known usage of BulkDatum "in the wild", and the BulkDatum layout has been deprecated in favor of DatumPage. """ + datum_page = DatumPage( datum_id=bulk_datum["datum_ids"], resource=bulk_datum["resource"], diff --git a/src/event_model/documents/generate/__main__.py b/src/event_model/documents/generate/__main__.py index b3915d58..f457bec2 100644 --- a/src/event_model/documents/generate/__main__.py +++ b/src/event_model/documents/generate/__main__.py @@ -3,6 +3,7 @@ from event_model.documents import ALL_DOCUMENTS from event_model.documents.generate.typeddict_to_schema import typeddict_to_schema + def main(): schema_dir = Path(__file__).parent.parent.parent / "schemas" for document in ALL_DOCUMENTS: @@ -10,4 +11,4 @@ def main(): if __name__ == "__main__": - main() + main() diff --git a/src/event_model/documents/generate/typeddict_to_schema.py b/src/event_model/documents/generate/typeddict_to_schema.py index c153bc3c..45bf11c4 100644 --- a/src/event_model/documents/generate/typeddict_to_schema.py +++ b/src/event_model/documents/generate/typeddict_to_schema.py @@ -15,7 +15,7 @@ def sort_alphabetically(schema: Dict) -> Dict: """Sorts the schema alphabetically by key name, exchanging the properties dicts for OrderedDicts""" - schema = OrderedDict(sorted(list(schema.items()), key=lambda x: x[0])) + schema = OrderedDict(sorted(schema.items(), key=lambda x: x[0])) return schema @@ -36,7 +36,7 @@ def sort_schema(document_schema: Dict) -> Dict: assert isinstance(document_schema, dict) document_schema = OrderedDict( sorted( - list(document_schema.items()), + document_schema.items(), key=lambda x: SortOrder.get(x[0], len(SortOrder)), ) ) diff --git a/src/event_model/schemas/datum.json b/src/event_model/schemas/datum.json index ac0a9999..a85c6cf8 100644 --- a/src/event_model/schemas/datum.json +++ b/src/event_model/schemas/datum.json @@ -25,4 +25,4 @@ "resource" ], "additionalProperties": false -} \ No newline at end of file +} diff --git a/src/event_model/schemas/datum_page.json b/src/event_model/schemas/datum_page.json index 7d34f988..768adb26 100644 --- a/src/event_model/schemas/datum_page.json +++ b/src/event_model/schemas/datum_page.json @@ -37,4 +37,4 @@ "resource" ], "additionalProperties": false -} \ No newline at end of file +} diff --git a/src/event_model/schemas/event.json b/src/event_model/schemas/event.json index bc175b93..af2f6eff 100644 --- a/src/event_model/schemas/event.json +++ b/src/event_model/schemas/event.json @@ -58,4 +58,4 @@ "uid" ], "additionalProperties": false -} \ No newline at end of file +} diff --git a/src/event_model/schemas/event_descriptor.json b/src/event_model/schemas/event_descriptor.json index 91968ef1..e453536a 100644 --- a/src/event_model/schemas/event_descriptor.json +++ b/src/event_model/schemas/event_descriptor.json @@ -278,4 +278,4 @@ } }, "additionalProperties": false -} \ No newline at end of file +} diff --git a/src/event_model/schemas/event_page.json b/src/event_model/schemas/event_page.json index 7c987427..0469c607 100644 --- a/src/event_model/schemas/event_page.json +++ b/src/event_model/schemas/event_page.json @@ -81,4 +81,4 @@ "uid" ], "additionalProperties": false -} \ No newline at end of file +} diff --git a/src/event_model/schemas/resource.json b/src/event_model/schemas/resource.json index ccbb4924..1a3b6d9d 100644 --- a/src/event_model/schemas/resource.json +++ b/src/event_model/schemas/resource.json @@ -51,4 +51,4 @@ "uid" ], "additionalProperties": false -} \ No newline at end of file +} diff --git a/src/event_model/schemas/run_start.json b/src/event_model/schemas/run_start.json index 852e44b1..d698d5cc 100644 --- a/src/event_model/schemas/run_start.json +++ b/src/event_model/schemas/run_start.json @@ -357,4 +357,4 @@ } }, "additionalProperties": false -} \ No newline at end of file +} diff --git a/src/event_model/schemas/run_stop.json b/src/event_model/schemas/run_stop.json index b0d64458..f401bcea 100644 --- a/src/event_model/schemas/run_stop.json +++ b/src/event_model/schemas/run_stop.json @@ -63,4 +63,4 @@ } }, "additionalProperties": false -} \ No newline at end of file +} diff --git a/src/event_model/schemas/stream_datum.json b/src/event_model/schemas/stream_datum.json index 75b53ed7..35204f6a 100644 --- a/src/event_model/schemas/stream_datum.json +++ b/src/event_model/schemas/stream_datum.json @@ -58,4 +58,4 @@ "uid" ], "additionalProperties": false -} \ No newline at end of file +} diff --git a/src/event_model/schemas/stream_resource.json b/src/event_model/schemas/stream_resource.json index 3eb93b23..440768c3 100644 --- a/src/event_model/schemas/stream_resource.json +++ b/src/event_model/schemas/stream_resource.json @@ -42,4 +42,4 @@ "uri" ], "additionalProperties": false -} \ No newline at end of file +} diff --git a/src/event_model/tests/test_run_router.py b/src/event_model/tests/test_run_router.py index b25b6391..29a269bb 100644 --- a/src/event_model/tests/test_run_router.py +++ b/src/event_model/tests/test_run_router.py @@ -340,7 +340,7 @@ def exception_callback_factory(start_doc_name, start_doc): rr("start", start_doc) descriptor_doc = {"run_start": "abcdef", "uid": "ghijkl"} - with pytest.raises(Exception): + with pytest.raises(UserWarning): rr("descriptor", descriptor_doc) assert rr._start_to_descriptors["abcdef"] == ["ghijkl"] diff --git a/src/event_model/tests/test_schema_generation.py b/src/event_model/tests/test_schema_generation.py index 19a1fb8e..45f6a766 100644 --- a/src/event_model/tests/test_schema_generation.py +++ b/src/event_model/tests/test_schema_generation.py @@ -23,10 +23,10 @@ def test_generated_json_matches_typed_dict(typed_dict_class, tmpdir): with open(generated_file_path) as generated_file, open(old_file_path) as old_file: try: assert json.load(generated_file) == json.load(old_file) - except AssertionError: + except AssertionError as error: raise Exception( f"`{typed_dict_class.__name__}` can generate a json schema, but " f"it doesn't match the schema in `{SCHEMA_PATH}`. Did you forget " "to run `python event_model/documents/generate` after changes " f"to `{typed_dict_class.__name__}`?" - ) + ) from error