Skip to content

Commit

Permalink
made ruff compliant
Browse files Browse the repository at this point in the history
  • Loading branch information
evalott100 committed Oct 18, 2024
1 parent 89fd82d commit d9c0b77
Show file tree
Hide file tree
Showing 19 changed files with 103 additions and 91 deletions.
3 changes: 1 addition & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,15 +51,14 @@
"sphinx_copybutton",
# For the card element
"sphinx_design",
"sphinx_design",
"sphinx_design",
"sphinx.ext.autosummary",
"sphinx.ext.mathjax",
"sphinx.ext.githubpages",
"matplotlib.sphinxext.plot_directive",
"sphinx_copybutton",
"IPython.sphinxext.ipython_directive",
"IPython.sphinxext.ipython_console_highlighting",

# So we can write markdown files
"myst_parser",
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,4 @@ We will use the
Consequences
------------

CI workflows will change, even jobs providing the same feature as the older checks.
CI workflows will change, even jobs providing the same feature as the older checks.
2 changes: 1 addition & 1 deletion docs/images/document-generation-timeline.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
9 changes: 4 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ ignore_missing_imports = true # Ignore missing stubs in imported modules
# Run pytest with all our checkers, and don't spam us with massive tracebacks on error
addopts = """
--tb=native -vv
--cov=event_model --cov-report term --cov-report xml:cov.xml
--cov=src/event_model --cov-report term --cov-report xml:cov.xml
"""
# https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings
filterwarnings = "error"
Expand Down Expand Up @@ -106,8 +106,8 @@ allowlist_externals =
sphinx-autobuild
commands =
pre-commit: pre-commit run --all-files --show-diff-on-failure {posargs}
type-checking: mypy event_model {posargs}
tests: pytest --cov=event_model --cov-report term --cov-report xml:cov.xml {posargs}
type-checking: mypy src/event_model {posargs}
tests: pytest --cov=src/event_model --cov-report term --cov-report xml:cov.xml {posargs}
docs: sphinx-{posargs:build -E --keep-going} -T docs build/html
"""

Expand All @@ -128,5 +128,4 @@ lint.select = [
[tool.ruff.lint.per-file-ignores]
# By default, private member access is allowed in tests
# See https://github.com/DiamondLightSource/python-copier-template/issues/154
# Remove this line to forbid private member access in tests
"tests/**/*" = ["SLF001"]
"src/event_model/tests/**" = ["SLF001"]
145 changes: 79 additions & 66 deletions src/event_model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,10 +148,10 @@ def __init__(self, *, emit: Optional[Callable] = None) -> None:
try:
# Does this function accept two positional arguments?
sig.bind(None, None)
except TypeError:
except TypeError as error:
raise ValueError(
"emit must accept two positional arguments, name and doc"
)
) from error
# Stash a weak reference to `emit`.
if inspect.ismethod(emit):
self._emit_ref = weakref.WeakMethod(emit)
Expand Down Expand Up @@ -295,7 +295,8 @@ def bulk_events(self, doc: dict) -> None:
# Do not modify this in a subclass. Use event_page.
warnings.warn(
"The document type 'bulk_events' has been deprecated in favor of "
"'event_page', whose structure is a transpose of 'bulk_events'."
"'event_page', whose structure is a transpose of 'bulk_events'.",
stacklevel=2,
)
for page in bulk_events_to_event_pages(doc):
self.event_page(page)
Expand All @@ -304,7 +305,8 @@ def bulk_datum(self, doc: dict) -> None:
# Do not modify this in a subclass. Use event_page.
warnings.warn(
"The document type 'bulk_datum' has been deprecated in favor of "
"'datum_page', whose structure is a transpose of 'bulk_datum'."
"'datum_page', whose structure is a transpose of 'bulk_datum'.",
stacklevel=2,
)
self.datum_page(bulk_datum_to_datum_page(doc))

Expand All @@ -317,7 +319,7 @@ class SingleRunDocumentRouter(DocumentRouter):
def __init__(self) -> None:
super().__init__()
self._start_doc: Optional[dict] = None
self._descriptors: dict = dict()
self._descriptors: dict = {}

def __call__(
self, name: str, doc: dict, validate: bool = False
Expand Down Expand Up @@ -653,26 +655,29 @@ def __init__(
stream_resource_cache: Optional[dict] = None,
stream_datum_cache: Optional[dict] = None,
inplace: Optional[bool] = None,
retry_intervals: List = [
0.001,
0.002,
0.004,
0.008,
0.016,
0.032,
0.064,
0.128,
0.256,
0.512,
1.024,
],
retry_intervals: Optional[List[float]] = None,
) -> None:
if retry_intervals is None:
retry_intervals = [
0.001,
0.002,
0.004,
0.008,
0.016,
0.032,
0.064,
0.128,
0.256,
0.512,
1.024,
]
if inplace is None:
self._inplace = True
warnings.warn(
"'inplace' argument not specified. It is recommended to "
"specify True or False. In future releases, 'inplace' "
"will default to False."
"will default to False.",
stacklevel=2,
)
else:
self._inplace = inplace
Expand All @@ -685,11 +690,11 @@ def __init__(
)
try:
self._coercion_func = _coercion_registry[coerce]
except KeyError:
except KeyError as error:
raise EventModelKeyError(
f"The option coerce={coerce!r} was given to event_model.Filler. "
f"The valid options are {set(_coercion_registry)}."
)
) from error
self._coerce = coerce

# See comments on coerision functions above for the use of
Expand All @@ -706,13 +711,15 @@ def __init__(
"In a future release of event-model, the argument `include` "
"will be removed from Filler.",
DeprecationWarning,
stacklevel=2,
)
self.include = include
if exclude is not None:
warnings.warn(
"In a future release of event-model, the argument `exclude` "
"will be removed from Filler.",
DeprecationWarning,
stacklevel=2,
)
self.exclude = exclude
self.root_map = root_map or {}
Expand Down Expand Up @@ -757,21 +764,21 @@ def __eq__(self, other: Any) -> bool:
)

def __getstate__(self) -> dict:
return dict(
inplace=self._inplace,
coercion_func=self._coerce,
handler_registry=self._unpatched_handler_registry,
include=self.include,
exclude=self.exclude,
root_map=self.root_map,
handler_cache=self._handler_cache,
resource_cache=self._resource_cache,
datum_cache=self._datum_cache,
descriptor_cache=self._descriptor_cache,
stream_resource_cache=self._stream_resource_cache,
stream_datum_cache=self._stream_datum_cache,
retry_intervals=self.retry_intervals,
)
return {
"inplace": self._inplace,
"coercion_func": self._coerce,
"handler_registry": self._unpatched_handler_registry,
"include": self.include,
"exclude": self.exclude,
"root_map": self.root_map,
"handler_cache": self._handler_cache,
"resource_cache": self._resource_cache,
"datum_cache": self._datum_cache,
"descriptor_cache": self._descriptor_cache,
"stream_resource_cache": self._stream_resource_cache,
"stream_datum_cache": self._stream_datum_cache,
"retry_intervals": self.retry_intervals,
}

def __setstate__(self, d: dict) -> None:
self._inplace = d["inplace"]
Expand Down Expand Up @@ -1513,7 +1520,7 @@ def __init__(

# Map RunStart UID to RunStart document. This is used to send
# RunStart documents to subfactory callbacks.
self._start_to_start_doc: dict = dict()
self._start_to_start_doc: dict = {}

# Map RunStart UID to the list EventDescriptor. This is used to
# facilitate efficient cleanup of the caches above.
Expand Down Expand Up @@ -1573,7 +1580,8 @@ def start(self, start_doc: RunStart) -> None:
warnings.warn(
DOCS_PASSED_IN_1_14_0_WARNING.format(
callback=callback, name="start", err=err
)
),
stacklevel=2,
)
raise err
self._factory_cbs_by_start[uid].extend(callbacks)
Expand Down Expand Up @@ -1609,7 +1617,8 @@ def descriptor(self, descriptor_doc: EventDescriptor) -> None:
warnings.warn(
DOCS_PASSED_IN_1_14_0_WARNING.format(
callback=callback, name="start", err=err
)
),
stacklevel=2,
)
raise err
try:
Expand All @@ -1618,7 +1627,8 @@ def descriptor(self, descriptor_doc: EventDescriptor) -> None:
warnings.warn(
DOCS_PASSED_IN_1_14_0_WARNING.format(
callback=callback, name="descriptor", err=err
)
),
stacklevel=2,
)
raise err

Expand All @@ -1639,12 +1649,12 @@ def datum_page(self, doc: DatumPage) -> None:
resource_uid = doc["resource"]
try:
start_uid = self._resources[resource_uid]
except KeyError:
except KeyError as error:
if resource_uid not in self._unlabeled_resources:
raise UnresolvableForeignKeyError(
resource_uid,
f"DatumPage refers to unknown Resource uid {resource_uid}",
)
) from error
# Old Resources do not have a reference to a RunStart document,
# so in turn we cannot immediately tell which run these datum
# documents belong to.
Expand Down Expand Up @@ -1855,7 +1865,7 @@ def __call__(self, datum_kwargs: Dict[str, Any], validate: bool = True) -> Datum
doc = Datum(
resource=resource_uid,
datum_kwargs=datum_kwargs,
datum_id="{}/{}".format(resource_uid, next(self.counter)),
datum_id=f"{resource_uid}/{next(self.counter)}",
)
if validate:
schema_validators[DocumentNames.datum].validate(doc)
Expand Down Expand Up @@ -1887,9 +1897,7 @@ def __call__(self, datum_kwargs: dict, validate: bool = True) -> DatumPage:
doc = DatumPage(
resource=resource_uid,
datum_kwargs=datum_kwargs,
datum_id=[
"{}/{}".format(resource_uid, next(self.counter)) for _ in range(N)
],
datum_id=[f"{resource_uid}/{next(self.counter)}" for _ in range(N)],
)
if validate:
schema_validators[DocumentNames.datum_page].validate(doc)
Expand Down Expand Up @@ -2045,6 +2053,7 @@ def compose_stream_datum(
warnings.warn(
"compose_stream_datum() will be removed in the minor version.",
DeprecationWarning,
stacklevel=2,
)
return ComposeStreamDatum(stream_resource, counter)(
seq_nums,
Expand Down Expand Up @@ -2146,8 +2155,9 @@ def __call__(
) -> RunStop:
if self.poison_pill:
raise EventModelError(
"Already composed a RunStop document for run "
"{!r}.".format(self.start["uid"])
"Already composed a RunStop document for run " "{!r}.".format(
self.start["uid"]
)
)
self.poison_pill.append(object())
if uid is None:
Expand Down Expand Up @@ -2190,7 +2200,7 @@ def compose_stop(

def length_of_value(dictionary: Dict[str, List], error_msg: str) -> Optional[int]:
length = None
for k, v in dictionary.items():
for _, v in dictionary.items():
v_len = len(v)
if length is not None:
if v_len != length:
Expand Down Expand Up @@ -2276,8 +2286,9 @@ def __call__(
)
if set(filled) - set(data):
raise EventModelValidationError(
"Keys in event['filled'] {} must be a subset of those in "
"event['data'] {}".format(filled.keys(), data.keys())
f"Keys in event['filled'] {filled.keys()} "
"must be a subset of those in "
f"event['data'] {data.keys()}"
)
self.event_counters[self.descriptor["name"]] += len(seq_num)
return doc
Expand Down Expand Up @@ -2378,8 +2389,9 @@ def __call__(
)
if set(filled) - set(data):
raise EventModelValidationError(
"Keys in event['filled'] {} must be a subset of those in "
"event['data'] {}".format(filled.keys(), data.keys())
f"Keys in event['filled'] {filled.keys()} "
"must be a subset of those in "
f"event['data'] {data.keys()}"
)
self.event_counters[self.descriptor["name"]] = seq_num + 1
return doc
Expand Down Expand Up @@ -2468,10 +2480,10 @@ def __call__(
if validate:
if name in self.streams and self.streams[name] != set(data_keys):
raise EventModelValidationError(
"A descriptor with the name {} has already been composed with "
"data_keys {}. The requested data_keys were {}. All "
"descriptors in a given stream must have the same "
"data_keys.".format(name, self.streams[name], set(data_keys))
f"A descriptor with the name {name} has already been composed with "
f"data_keys {self.streams[name]}. The requested data_keys were "
f"{set(data_keys)}. All descriptors in a given stream must have "
"the same data_keys."
)
schema_validators[DocumentNames.descriptor].validate(doc)

Expand Down Expand Up @@ -2826,34 +2838,34 @@ def merge_event_pages(event_pages: Iterable[EventPage]) -> EventPage:
if len(pages) == 1:
return pages[0]

doc = dict(
descriptor=pages[0]["descriptor"],
seq_num=list(
doc = {
"descriptor": pages[0]["descriptor"],
"seq_num": list(
itertools.chain.from_iterable([page["seq_num"] for page in pages])
),
time=list(itertools.chain.from_iterable([page["time"] for page in pages])),
uid=list(itertools.chain.from_iterable([page["uid"] for page in pages])),
data={
"time": list(itertools.chain.from_iterable([page["time"] for page in pages])),
"uid": list(itertools.chain.from_iterable([page["uid"] for page in pages])),
"data": {
key: list(
itertools.chain.from_iterable([page["data"][key] for page in pages])
)
for key in pages[0]["data"].keys()
},
timestamps={
"timestamps": {
key: list(
itertools.chain.from_iterable(
[page["timestamps"][key] for page in pages]
)
)
for key in pages[0]["data"].keys()
},
filled={
"filled": {
key: list(
itertools.chain.from_iterable([page["filled"][key] for page in pages])
)
for key in pages[0]["filled"].keys()
},
)
}
return cast(EventPage, doc)


Expand Down Expand Up @@ -3001,6 +3013,7 @@ def bulk_datum_to_datum_page(bulk_datum: dict) -> DatumPage:
Note: There is only one known usage of BulkDatum "in the wild", and the
BulkDatum layout has been deprecated in favor of DatumPage.
"""

datum_page = DatumPage(
datum_id=bulk_datum["datum_ids"],
resource=bulk_datum["resource"],
Expand Down
Loading

0 comments on commit d9c0b77

Please sign in to comment.