Skip to content

Commit

Permalink
Fix some pyright 'basic' checks.
Browse files Browse the repository at this point in the history
  • Loading branch information
mar10 committed Sep 28, 2024
1 parent 616fed1 commit da5e178
Show file tree
Hide file tree
Showing 18 changed files with 547 additions and 328 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@

## 0.9.1 (unreleased)

- Passes pyright 'basic' checks.
- tree.to_rdf() is now available for Tree (not only TypedTree)

## 0.9.0 (2024-09-12)

- Add `Tree.build_random_tree()` (experimental).
Expand Down
2 changes: 1 addition & 1 deletion docs/sphinx/ug_randomize.rst
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ Example::
tree = TypedTree.build_random_tree(structure_def)
assert type(tree) is TypedTree
assert isinstance(tree, TypedTree)
assert tree.calc_height() == 3
tree.print()
Expand Down
70 changes: 55 additions & 15 deletions nutree/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,19 @@
from contextlib import contextmanager
from enum import Enum
from pathlib import Path
from typing import IO, TYPE_CHECKING, Any, Callable, Dict, List, Type, TypeVar, Union
from typing import (
IO,
TYPE_CHECKING,
Any,
Callable,
Dict,
Iterator,
List,
Literal,
Type,
TypeVar,
Union,
)

if TYPE_CHECKING: # Imported by type checkers, but prevent circular includes
from .node import Node
Expand All @@ -23,28 +35,38 @@
TTree = TypeVar("TTree", bound=Tree)

#: Used as ID for the system root node
ROOT_ID: str = "__root__"
ROOT_DATA_ID: str = "__root__"
ROOT_NODE_ID: int = 0

#: File format version used by `tree.save()` as `meta.$format_version`
FILE_FORMAT_VERSION: str = "1.0"

#: Currently used Python version as string
PYTHON_VERSION = ".".join([str(s) for s in sys.version_info[:3]])

#: Type of ``Node.data_id``
DataIdType = Union[str, int]

#: Type of ``Tree(..., calc_data_id)```
CalcIdCallbackType = Callable[["Tree", Any], DataIdType]

#: Type of ``format(..., repr=)```
ReprArgType = Union[str, Callable[["Node"], str]]

#: Type of ``Tree(..., factory)```
NodeFactoryType = Type["Node"]

#: A dict of scalar values
FlatJsonDictType = Dict[str, Union[str, int, float, bool, None]]

#: Type of ``tree.save(..., key_map)``
KeyMapType = Dict[str, str]

#: Type of ``tree.save(..., value_map)``
#: E.g. `{'t': ['person', 'dept']}`
ValueMapType = Dict[str, List[str]]

#: Currently used Python version as string
PYTHON_VERSION = ".".join([str(s) for s in sys.version_info[:3]])
#: E.g. `{'t': {'person': 0, 'dept': 1}}`
ValueDictMapType = Dict[str, Dict[str, int]]


class TreeError(RuntimeError):
Expand Down Expand Up @@ -115,18 +137,34 @@ def __init__(self, value=None):
self.value = value


#: Generic callback for `tree.filter()`, `tree.copy()`, ...
PredicateCallbackType = Callable[["Node"], Union[None, bool, IterationControl]]
#: Generic callback for `tree.to_dot()`, ...
MapperCallbackType = Callable[["Node", dict], Union[None, Any]]

#: Callback for `tree.save()`
SerializeMapperType = Callable[["Node", dict], Union[None, dict]]

#: Callback for `tree.load()`
DeserializeMapperType = Callable[["Node", dict], Union[str, object]]
# MatchCallbackType = Callable[["Node"], bool]

#: Generic callback for `tree.filter()`, `tree.copy()`, ...
PredicateCallbackType = Callable[["Node"], Union[None, bool, IterationControl]]

#:
TraversalCallbackType = Callable[
["Node", Any], Union[None, bool, "StopTraversal", "SkipBranch"]
["Node", Any],
Union[
None,
bool,
"SkipBranch",
"StopTraversal",
Type[SkipBranch],
Type[StopTraversal],
Type[StopIteration],
],
]
#: Callback for `tree.sort(key=...)`
SortKeyType = Callable[["Node"], Any]
# SortKeyType = Callable[[Node], SupportsLess]

#: Node connector prefixes, for use with ``format(style=...)`` argument.
CONNECTORS = {
Expand Down Expand Up @@ -185,10 +223,10 @@ def __init__(self, dict_inst: dict | None = None, **values) -> None:
if dict_inst is not None:
# A dictionary was passed: store a reference to that instance
if not isinstance(dict_inst, dict):
self._dict = None
self._dict = None # type: ignore
raise TypeError("dict_inst must be a dictionary or None")
if values:
self._dict = None
self._dict = None # type: ignore
raise ValueError("Cannot pass both dict_inst and **values")
self._dict: dict = dict_inst
else:
Expand Down Expand Up @@ -249,7 +287,7 @@ def get_version() -> str:
return __version__


def check_python_version(min_version: tuple[str]) -> bool:
def check_python_version(min_version: tuple[Union[str, int], Union[str, int]]) -> bool:
"""Check for deprecated Python version."""
if sys.version_info < min_version:
min_ver = ".".join([str(s) for s in min_version[:3]])
Expand Down Expand Up @@ -296,7 +334,9 @@ def call_predicate(fn: Callable, node: Node) -> IterationControl | None | Any:
return res


def call_traversal_cb(fn: Callable, node: Node, memo: Any) -> False | None:
def call_traversal_cb(
fn: TraversalCallbackType, node: Node, memo: Any
) -> Literal[False] | None:
"""Call the function and handle result and exceptions.
This method calls `fn(node, memo)` and converts all returned or raised
Expand Down Expand Up @@ -350,7 +390,7 @@ def open_as_uncompressed_input_stream(
*,
encoding: str = "utf8",
auto_uncompress: bool = True,
) -> IO[str]: # type: ignore
) -> Iterator[IO[str]]:
"""Open a file for reading, decompressing if necessary.
Decompression is done by checking for the magic header (independent of the
Expand Down Expand Up @@ -383,7 +423,7 @@ def open_as_compressed_output_stream(
*,
compression: bool | int = True,
encoding: str = "utf8",
) -> IO[str]: # type: ignore
) -> Iterator[IO[str]]:
"""Open a file for writing, ZIP-compressing if requested.
Example::
Expand Down
6 changes: 3 additions & 3 deletions nutree/diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def _copy_children(source: Node, dest: Node, add_set: set, meta: tuple) -> None:
n_dest.set_meta(*meta)
if n._children:
# meta is only set on top node
_copy_children(n, n_dest, add_set, meta=None)
_copy_children(n, n_dest, add_set, meta=None) # type: ignore
return


Expand All @@ -64,7 +64,7 @@ def diff_node_formatter(node):
flags.append("Moved here") # ←
elif dc == DC.MOVED_TO:
flags.append("Moved away") # ×➡
elif type(dc) is tuple: # == DC.SHIFTED:
elif isinstance(dc, tuple): # == DC.SHIFTED:
ofs = dc[1] - dc[0]
flags.append(f"Order {ofs:+d}") # ⇳ ⇵
# flags.append("Shifted") # ⇳ ⇵
Expand Down Expand Up @@ -94,7 +94,7 @@ def compare(p0: Node, p1: Node, p2: Node):
# `p0.children` always returns an (empty) array
for i0, c0 in enumerate(p0.children):
p0_data_ids.add(c0._data_id)
i1, c1 = _find_child(p1._children, c0)
i1, c1 = _find_child(p1.children, c0)

c2 = p2.add(c0)
if i0 == i1:
Expand Down
1 change: 1 addition & 0 deletions nutree/dot.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def _attr_str(attr_def: dict, mapper=None, node=None):
if mapper:
if attr_def is None:
attr_def = {}
assert node, "node required for mapper"
call_mapper(mapper, node, attr_def)
if not attr_def:
return ""
Expand Down
32 changes: 22 additions & 10 deletions nutree/fs.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,29 @@

from operator import attrgetter, itemgetter
from pathlib import Path
from typing import Union
from typing import Optional, Union

from nutree.tree import Node, Tree


class FileSystemEntry:
def __init__(self, name, is_dir, size, mdate):
def __init__(
self,
name: str,
*,
is_dir: bool = False,
size: Optional[int] = None,
mdate: Optional[float] = None,
):
self.name = name
self.is_dir = is_dir
if is_dir:
assert size is None
size = 0
else:
assert size is not None
self.size = int(size)
self.mdate = float(mdate)
self.mdate = float(mdate) if mdate is not None else None

def __repr__(self):
if self.is_dir:
Expand All @@ -39,8 +51,8 @@ def deserialize_mapper(parent: Node, data: dict):
"""Callback for use with :meth:`~nutree.tree.Tree.load`."""
v = data["v"]
if "d" in v:
return FileSystemEntry(v["n"], True, 0)
return FileSystemEntry(v["n"], False, v["s"])
return FileSystemEntry(v["n"], is_dir=True, size=0)
return FileSystemEntry(v["n"], is_dir=False, size=v["s"])


def load_tree_from_fs(path: Union[str, Path], *, sort: bool = True) -> Tree:
Expand All @@ -51,19 +63,19 @@ def load_tree_from_fs(path: Union[str, Path], *, sort: bool = True) -> Tree:
Especially useful when comparing unit test fixtures.
"""
path = Path(path)
tree = FileSystemTree(path)
tree = FileSystemTree(str(path))

def visit(node: Node, pth: Path):
if sort:
dirs = []
files = []
for c in pth.iterdir():
if c.is_dir():
o = FileSystemEntry(f"{c.name}", True, 0, 0)
o = FileSystemEntry(f"{c.name}", is_dir=True)
dirs.append((c, o))
elif c.is_file():
stat = c.stat()
o = FileSystemEntry(c.name, False, stat.st_size, stat.st_mtime)
o = FileSystemEntry(c.name, size=stat.st_size, mdate=stat.st_mtime)
files.append(o)
# Files first, sorted by name
for o in sorted(files, key=attrgetter("name")):
Expand All @@ -76,12 +88,12 @@ def visit(node: Node, pth: Path):

for c in pth.iterdir():
if c.is_dir():
o = FileSystemEntry(f"{c.name}", True, 0, 0)
o = FileSystemEntry(f"{c.name}", is_dir=True)
pn = node.add(o)
visit(pn, c)
elif c.is_file():
stat = c.stat()
o = FileSystemEntry(c.name, False, stat.st_size, stat.st_mtime)
o = FileSystemEntry(c.name, size=stat.st_size, mdate=stat.st_mtime)
node.add(o)

visit(tree._root, path)
Expand Down
4 changes: 2 additions & 2 deletions nutree/mermaid.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ def _node_to_mermaid_flowchart_iter(
add_root: bool = True,
unique_nodes: bool = True,
headers: Iterable[str] | None = None,
node_mapper: MermaidNodeMapperCallbackType | None | str = None,
edge_mapper: MermaidEdgeMapperCallbackType | None | str = None,
node_mapper: MermaidNodeMapperCallbackType | str | None = None,
edge_mapper: MermaidEdgeMapperCallbackType | str | None = None, # pyright: ignore[reportRedeclaration]
) -> Iterator[str]:
"""Generate Mermaid formatted output line-by-line.
Expand Down
Loading

0 comments on commit da5e178

Please sign in to comment.