diff --git a/kornia/geometry/transform/affwarp.py b/kornia/geometry/transform/affwarp.py index a2c0e2a00b..42789d607f 100644 --- a/kornia/geometry/transform/affwarp.py +++ b/kornia/geometry/transform/affwarp.py @@ -1,10 +1,9 @@ from typing import Optional, Tuple, Union import torch -from torch import nn -from kornia.core import ones, ones_like, zeros from kornia.core import ImageModule as Module +from kornia.core import ones, ones_like, zeros from kornia.filters import gaussian_blur2d from kornia.utils import _extract_device_dtype from kornia.utils.image import perform_keep_shape_image diff --git a/kornia/geometry/transform/flips.py b/kornia/geometry/transform/flips.py index 87df3bd0d1..cd0030347d 100644 --- a/kornia/geometry/transform/flips.py +++ b/kornia/geometry/transform/flips.py @@ -1,7 +1,7 @@ import torch -from kornia.core import Tensor from kornia.core import ImageModule as Module +from kornia.core import Tensor __all__ = ["Vflip", "Hflip", "Rot180", "rot180", "hflip", "vflip"] diff --git a/kornia/onnx/sequential.py b/kornia/onnx/sequential.py index 4d30fea5dd..535639086c 100644 --- a/kornia/onnx/sequential.py +++ b/kornia/onnx/sequential.py @@ -23,12 +23,13 @@ class ONNXSequential: only one input and output node for each graph. If not None, `io_maps[0]` shall represent the `io_map` for combining the first and second ONNX models. """ + def __init__( self, *args: Union[onnx.ModelProto, str], # type:ignore providers: Optional[list[str]] = None, session_options: Optional[ort.SessionOptions] = None, # type:ignore - io_maps: Optional[list[tuple[str, str]]] = None + io_maps: Optional[list[tuple[str, str]]] = None, ) -> None: self.operators = args self._combined_op = self._combine(io_maps) @@ -48,8 +49,8 @@ def _load_op(self, arg: Union[onnx.ModelProto, str]) -> onnx.ModelProto: # type return arg def _combine(self, io_maps: Optional[list[tuple[str, str]]] = None) -> onnx.ModelProto: # type:ignore - """ Combine the provided ONNX models into a single ONNX graph. Optionally, map inputs and outputs - between operators using the `io_map`. + """Combine the provided ONNX models into a single ONNX graph. Optionally, map inputs and outputs between + operators using the `io_map`. Args: io_maps: @@ -58,7 +59,7 @@ def _combine(self, io_maps: Optional[list[tuple[str, str]]] = None) -> onnx.Mode Returns: onnx.ModelProto: The combined ONNX model as a single ONNX graph. - + Raises: ValueError: If no operators are provided for combination. """ @@ -88,12 +89,10 @@ def export(self, file_path: str) -> None: onnx.save(self._combined_op, file_path) def create_session( - self, - providers: Optional[list[str]] = None, - session_options: Optional[ort.SessionOptions] = None + self, providers: Optional[list[str]] = None, session_options: Optional[ort.SessionOptions] = None ) -> ort.InferenceSession: # type:ignore """Create an optimized ONNXRuntime InferenceSession for the combined model. - + Args: providers: Execution providers for ONNXRuntime (e.g., ['CUDAExecutionProvider', 'CPUExecutionProvider']). @@ -112,7 +111,7 @@ def create_session( session = ort.InferenceSession( self._combined_op.SerializeToString(), sess_options=sess_options, - providers=providers or ['CPUExecutionProvider'] + providers=providers or ["CPUExecutionProvider"], ) return session @@ -148,5 +147,5 @@ def __call__(self, *inputs: np.ndarray) -> list[np.ndarray]: # type:ignore ort_input_values = {ort_inputs[i].name: inputs[i] for i in range(len(ort_inputs))} outputs = self._session.run(None, ort_input_values) - + return outputs