Skip to content

Commit

Permalink
chore: bump deps
Browse files Browse the repository at this point in the history
Signed-off-by: Henry Schreiner <henryschreineriii@gmail.com>
  • Loading branch information
henryiii committed Oct 25, 2024
1 parent 8e6e394 commit b6887be
Show file tree
Hide file tree
Showing 6 changed files with 49 additions and 30 deletions.
17 changes: 9 additions & 8 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ exclude: ^src/scikit_build_core/_vendor

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v5.0.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
Expand All @@ -25,7 +25,7 @@ repos:
exclude: "^tests"

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.9
rev: v0.7.1
hooks:
- id: ruff
args: ["--fix", "--show-fixes"]
Expand All @@ -39,7 +39,7 @@ repos:
- id: rst-inline-touching-normal

- repo: https://github.com/adamchainz/blacken-docs
rev: 1.18.0
rev: 1.19.1
hooks:
- id: blacken-docs
additional_dependencies: [black==24.*]
Expand All @@ -59,7 +59,7 @@ repos:
exclude: "^tests|src/scikit_build_core/resources/scikit-build.schema.json|^docs/projects.md"

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.11.2
rev: v1.13.0
hooks:
- id: mypy
exclude: |
Expand All @@ -81,16 +81,17 @@ repos:
- markdown-it-py<3 # Python 3.7 compat needed for mypy check
- ninja
- nox
- orjson
- packaging
- pytest<8
- pytest
- pytest-subprocess
- rich
- setuptools-scm
- tomli
- types-setuptools>=70.1

- repo: https://github.com/henryiii/check-sdist
rev: "v1.0.0"
rev: "v1.2.0"
hooks:
- id: check-sdist
args: [--inject-junk]
Expand Down Expand Up @@ -129,12 +130,12 @@ repos:
additional_dependencies: [cogapp]

- repo: https://github.com/henryiii/validate-pyproject-schema-store
rev: 2024.09.23
rev: 2024.10.21
hooks:
- id: validate-pyproject

- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.29.3
rev: 0.29.4
hooks:
- id: check-dependabot
- id: check-github-workflows
Expand Down
16 changes: 9 additions & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,8 @@ dependencies = [
"tomli >=1.2.2; python_version<'3.11'",
"typing-extensions >=3.10.0; python_version<'3.9'",
]
# Note: for building wheels and sdists, there are also additional dependencies
# in the pyproject extra. And cmake and possibly ninja if those are not already
# present (user controllable)
# Note: cmake and possibly ninja are also required if those are not already
# present (user controllable) - but a system version is fine.

[project.optional-dependencies]
pyproject = [
Expand Down Expand Up @@ -82,7 +81,7 @@ test-schema = [
"validate-pyproject",
]
cov = [
"pytest-cov[toml]",
"pytest-cov",
]
wheels = [
"cmake",
Expand Down Expand Up @@ -126,8 +125,11 @@ version.source = "vcs"
build.hooks.vcs.version-file = "src/scikit_build_core/_version.py"


[tool.uv.pip]
reinstall-package = ["scikit-build-core"]
[tool.uv]
dev-dependencies = ["scikit-build-core[test,test-hatchling,test-meta,test-numpy,test-schema,cov,dev]"]
environments = ["python_version >= '3.11'"]
pip.reinstall-package = ["scikit-build-core"]
workspace.members = ["tmp/hello/hello"]


[tool.pytest.ini_options]
Expand Down Expand Up @@ -222,7 +224,7 @@ ignore = ["W002"] # Triggers on __init__.py's


[tool.ruff]
exclude = ["src/scikit_build_core/_vendor/*"] # Required due to "build" module
exclude = ["src/scikit_build_core/_vendor/*"]

[tool.ruff.lint]
extend-select = [
Expand Down
8 changes: 5 additions & 3 deletions src/scikit_build_core/file_api/reply.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def make_class(self, data: InputDict, target: Type[T]) -> T:
):
return self._load_from_json(Path(data["jsonFile"]), target)

input_dict = {}
input_dict: Dict[str, Type[Any]] = {}
exceptions: List[Exception] = []

# We don't have DataclassInstance exposed in typing yet
Expand All @@ -65,12 +65,14 @@ def make_class(self, data: InputDict, target: Type[T]) -> T:
"cmakefiles", "cmakeFiles"
)
if json_field in data:
field_type = field.type
assert isinstance(field_type, type)
try:
input_dict[field.name] = self._convert_any(
data[json_field], field.type
data[json_field], field_type
)
except TypeError as err:
msg = f"Failed to convert field {field.name!r} of type {field.type}"
msg = f"Failed to convert field {field.name!r} of type {field_type}"
if sys.version_info < (3, 11):
err.__notes__ = [*getattr(err, "__notes__", []), msg] # type: ignore[attr-defined]
else:
Expand Down
7 changes: 5 additions & 2 deletions src/scikit_build_core/settings/documentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,15 @@ def mk_docs(dc: type[object], prefix: str = "") -> Generator[DCDoc, None, None]:
docs = pull_docs(dc)

for field in dataclasses.fields(dc):
if dataclasses.is_dataclass(field.type):
yield from mk_docs(field.type, prefix=f"{prefix}{field.name}.")
field_type = field.type
assert isinstance(field_type, type)
if dataclasses.is_dataclass(field_type):
yield from mk_docs(field_type, prefix=f"{prefix}{field.name}.")
continue

if get_origin(field.type) is list:
field_type = get_args(field.type)[0]
assert isinstance(field_type, type)
if dataclasses.is_dataclass(field_type):
yield from mk_docs(field_type, prefix=f"{prefix}{field.name}[].")
continue
Expand Down
6 changes: 4 additions & 2 deletions src/scikit_build_core/settings/json_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,11 @@ def to_json_schema(dclass: type[Any], *, normalize_keys: bool) -> dict[str, Any]
errs = []
required = []
for field in dataclasses.fields(dclass):
field_type = field.type
assert isinstance(field_type, type)
if dataclasses.is_dataclass(field.type):
props[field.name] = to_json_schema(
field.type, normalize_keys=normalize_keys
field_type, normalize_keys=normalize_keys
)
continue

Expand Down Expand Up @@ -110,7 +112,7 @@ def to_json_schema(dclass: type[Any], *, normalize_keys: bool) -> dict[str, Any]


def convert_type(t: Any, *, normalize_keys: bool) -> dict[str, Any]:
if dataclasses.is_dataclass(t):
if isinstance(t, type) and dataclasses.is_dataclass(t):
return to_json_schema(t, normalize_keys=normalize_keys)
if t is str or t is Path or t is Version or t is SpecifierSet:
return {"type": "string"}
Expand Down
25 changes: 17 additions & 8 deletions src/scikit_build_core/settings/sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,12 @@ def _dig_fields(__opt: Any, *names: str) -> Any:
return __opt


def _field_type(__field: dataclasses.Field[Any]) -> type[Any]:
field_type = __field.type
assert isinstance(field_type, type)
return field_type


def _process_union(target: type[Any]) -> Any:
"""
Filters None out of Unions. If a Union only has one item, return that item.
Expand Down Expand Up @@ -158,7 +164,9 @@ def _nested_dataclass_to_names(__target: type[Any], *inner: str) -> Iterator[lis

if dataclasses.is_dataclass(__target):
for field in dataclasses.fields(__target):
yield from _nested_dataclass_to_names(field.type, *inner, field.name)
yield from _nested_dataclass_to_names(
_field_type(field), *inner, field.name
)
else:
yield list(inner)

Expand Down Expand Up @@ -321,7 +329,7 @@ def _unrecognized_dict(
yield ".".join((*above, keystr))
continue
(inner_option_field,) = matches
inner_option = inner_option_field.type
inner_option = _field_type(inner_option_field)
if dataclasses.is_dataclass(inner_option):
yield from _unrecognized_dict(
settings[keystr], inner_option, (*above, keystr)
Expand Down Expand Up @@ -490,12 +498,12 @@ def convert(cls, item: Any, target: type[Any]) -> object:
"""
target, annotations = _process_annotated(target)
raw_target = _get_target_raw_type(target)
if dataclasses.is_dataclass(raw_target):
if isinstance(raw_target, type) and dataclasses.is_dataclass(raw_target):
fields = dataclasses.fields(raw_target)
values = ((k.replace("-", "_"), v) for k, v in item.items())
return raw_target(
**{
k: cls.convert(v, *[f.type for f in fields if f.name == k])
k: cls.convert(v, *[_field_type(f) for f in fields if f.name == k])
for k, v in values
}
)
Expand Down Expand Up @@ -579,24 +587,25 @@ def convert_target(self, target: type[T], *prefixes: str) -> T:
errors = []
prep: dict[str, Any] = {}
for field in dataclasses.fields(target): # type: ignore[arg-type]
if dataclasses.is_dataclass(field.type):
field_type = _field_type(field)
if dataclasses.is_dataclass(field_type):
try:
prep[field.name] = self.convert_target(
field.type, *prefixes, field.name
field_type, *prefixes, field.name
)
except Exception as e:
name = ".".join([*self.prefixes, *prefixes, field.name])
e.__notes__ = [*getattr(e, "__notes__", []), f"Field: {name}"] # type: ignore[attr-defined]
errors.append(e)
continue

is_dict = _get_target_raw_type(field.type) is dict
is_dict = _get_target_raw_type(field_type) is dict

for source in self.sources:
if source.has_item(*prefixes, field.name, is_dict=is_dict):
simple = source.get_item(*prefixes, field.name, is_dict=is_dict)
try:
tmp = source.convert(simple, field.type)
tmp = source.convert(simple, field_type)
except Exception as e:
name = ".".join([*self.prefixes, *prefixes, field.name])
e.__notes__ = [*getattr(e, "__notes__", []), f"Field {name}"] # type: ignore[attr-defined]
Expand Down

0 comments on commit b6887be

Please sign in to comment.