Skip to content

Commit

Permalink
fixed unit tests and formatter
Browse files Browse the repository at this point in the history
Still have to compare schema thoroughly,
also tested against bluesky.
  • Loading branch information
evalott100 committed Feb 1, 2024
1 parent 6178015 commit a43a1f6
Show file tree
Hide file tree
Showing 9 changed files with 252 additions and 213 deletions.
25 changes: 13 additions & 12 deletions event_model/documents/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from typing import Union, Tuple, Type

# flake8: noqa
from event_model.documents.datum import Datum
from event_model.documents.datum_page import DatumPage
Expand All @@ -9,22 +11,21 @@
from event_model.documents.run_stop import RunStop
from event_model.documents.stream_datum import StreamDatum
from event_model.documents.stream_resource import StreamResource
from typing import Union

DocumentType = Union[
Datum,
DatumPage,
Event,
EventDescriptor,
EventPage,
Resource,
RunStart,
RunStop,
StreamDatum,
StreamResource,
Type[Datum],
Type[DatumPage],
Type[Event],
Type[EventDescriptor],
Type[EventPage],
Type[Resource],
Type[RunStart],
Type[RunStop],
Type[StreamDatum],
Type[StreamResource],
]

ALL_DOCUMENTS = (
ALL_DOCUMENTS: Tuple[DocumentType, ...] = (
Datum,
DatumPage,
Event,
Expand Down
9 changes: 9 additions & 0 deletions event_model/documents/generate/__main__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from pathlib import Path

from event_model.documents import ALL_DOCUMENTS
from event_model.documents.generate.typeddict_to_schema import typeddict_to_schema

if __name__ == "__main__":
schema_dir = Path(__file__).parent.parent.parent / "schemas"
for document in ALL_DOCUMENTS:
typeddict_to_schema(document, schema_dir)
99 changes: 60 additions & 39 deletions event_model/documents/generate/type_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,65 +2,86 @@
A wrapper used to patch out schema generation utilities.
"""

from typing import TYPE_CHECKING, Any, Dict, List, Union

pydantic_version = None
try:
from typing import Any, Dict, List, Union

import pydantic

pydantic_version = pydantic.__version__
def Field(*args, **kwargs): # type: ignore
...

Field = pydantic.Field
FieldInfo = pydantic.fields.FieldInfo
BaseModel = pydantic.BaseModel
TypeAdapter = pydantic.TypeAdapter
create_model = pydantic.create_model

# Root models for root definitions
RootModel = pydantic.RootModel
class BaseModel: # type: ignore
def __init__(self, *args, **kwargs): ...

class DataFrameForEventPage(RootModel):
root: Dict[str, List[Any]] = Field(alias="Dataframe")

class DataFrameForDatumPage(RootModel):
root: List[str] = Field(alias="Dataframe")
class TypeAdapter: # type: ignore
def __init__(self, *args, **kwargs):
self.by_alias = True
...

class DataFrameForFilled(RootModel):
root: Dict[str, List[Union[bool, str]]] = Field(alias="DataframeForFilled")
def json_schema(self, *args, **kwargs): ...

class DataType(RootModel):
root: Any = Field(alias="DataType")

except ModuleNotFoundError:
DataFrameForDatumPage = List[str]

def Field(*args, **kwargs): # type: ignore
...
DataFrameForEventPage = Dict[str, List]

class FieldInfo: # type: ignore
...
DataFrameForFilled = Dict[str, List[Union[bool, str]]]

class BaseModel: # type: ignore
...

class TypeAdapter: # type: ignore
...
class DataType: # type: ignore
...

def create_model(*args, **kwargs): # type: ignore
...

class DataFrame: ...
def add_extra_schema(*args, **kwargs): # type: ignore
def inner(cls):
return cls

class DataFrameForFilled: ...
return inner

class DataType: ...

extra_schema: Dict = {}

extra_schema = {}
if not TYPE_CHECKING:
try:
import pydantic

pydantic_version = pydantic.__version__

def add_extra_schema(schema: dict):
def inner(cls):
extra_schema[cls] = schema
return cls
Field = pydantic.Field
BaseModel = pydantic.BaseModel
TypeAdapter = pydantic.TypeAdapter

return inner
# Root models for root definitions:
# we want some types to reference definitions in the
# schema
RootModel = pydantic.RootModel

class DataFrameForDatumPage(RootModel):
root: List[str] = Field(alias="Dataframe")

class DataFrameForEventPage(RootModel):
root: Dict[str, List] = Field(alias="Dataframe")

class DataFrameForFilled(RootModel):
root: Dict[str, List[Union[bool, str]]] = Field(alias="DataframeForFilled")

class DataType(RootModel):
root: Any = Field(alias="DataType")

# Dictionary for patching in schema post generation
extra_schema = {}

def add_extra_schema(schema: Dict) -> Dict:
def inner(cls):
extra_schema[cls] = schema
return cls

return inner

# If pydantic is not installed (e.g the install isn't [dev]),
# we expect to be able to run event-model, just not the schema
# generation code
except ModuleNotFoundError:
assert pydantic_version is None
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,11 @@
from pathlib import Path
from typing import Dict, Optional

from event_model.documents import (
ALL_DOCUMENTS,
DocumentType,
)
from event_model.documents import ALL_DOCUMENTS, DocumentType
from event_model.documents.generate.type_wrapper import TypeAdapter, extra_schema


def to_snake(string_to_be_aliased: str):
def to_snake(string_to_be_aliased: str) -> str:
"""Alias in snake case"""
return re.sub(r"(?<!^)(?=[A-Z])", "_", string_to_be_aliased).lower()

Expand Down Expand Up @@ -56,7 +53,7 @@ def sort_schema(schema: Dict) -> Dict:


# Used to add user written schema to autogenerated schema.
def merge_dicts(dict1: dict, dict2: dict) -> dict:
def merge_dicts(dict1: Dict, dict2: Dict) -> Dict:
"""
Takes two dictionaries with subdirectories and returns a new dictionary of
the two merged:
Expand Down Expand Up @@ -102,28 +99,42 @@ def merge_dicts(dict1: dict, dict2: dict) -> dict:
return return_dict


def document_to_schema(
document: DocumentType, schema_dir: Optional[Path] = None, sort=True
):
def dump_json(dictionary: Dict, file_path: Path, mode="w"):
with open(file_path, mode) as f:
json.dump(dictionary, f, indent=4)


def typeddict_to_schema(
document: DocumentType, schema_dir: Optional[Path] = None, sort: bool = True
) -> Dict:
assert document in ALL_DOCUMENTS
type_adapter = TypeAdapter(document)
type_adapter.by_alias = True
schema_json = type_adapter.json_schema()
json_schema_dict = type_adapter.json_schema()

# Add the manually defined extra stuff
if document in extra_schema:
print(document)
schema_json = merge_dicts(extra_schema[document], schema_json)
json_schema_dict = merge_dicts(extra_schema[document], json_schema_dict)

if sort:
schema_json = sort_schema(schema_json)
json_schema_dict = sort_schema(json_schema_dict)

if schema_dir:
with open(schema_dir / f"{to_snake(document.__name__)}.json", "w") as f:
json.dump(schema_json, f, indent=4)


if __name__ == "__main__":
schema_dir = Path(__file__).parent.parent.parent / "schemas"
for document in ALL_DOCUMENTS:
document_to_schema(document, schema_dir)
file_path = schema_dir / f"{to_snake(json_schema_dict['title'])}.json"

# Check if the file has been updated
if not file_path.exists():
print(f"{str(file_path)} does not exist yet, writing")
dump_json(json_schema_dict, file_path)
else:
with open(file_path) as json_file:
skip_writing = json.load(json_file) == json_schema_dict

if skip_writing:
print(f"{json_schema_dict['title']} is unchanged")
else:
print(
f"{json_schema_dict['title']} has been changed, writing new schema"
)

return json_schema_dict
2 changes: 1 addition & 1 deletion event_model/documents/run_start.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ class Projection(TypedDict):


RUN_START_EXTRA_SCHEMA = {
"definitions": {
"$defs": {
"DataType": {
"patternProperties": {"^([^./]+)$": {"$ref": "#/$defs/DataType"}},
"additionalProperties": False,
Expand Down
2 changes: 1 addition & 1 deletion event_model/documents/run_stop.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Dict
from typing import Dict

from typing_extensions import Annotated, Literal, NotRequired, TypedDict

Expand Down
Loading

0 comments on commit a43a1f6

Please sign in to comment.