Skip to content

Commit

Permalink
Merge branch '4138-catalog-protocol' into 3995-data-catalog-2.0
Browse files Browse the repository at this point in the history
  • Loading branch information
ElenaKhaustova committed Sep 13, 2024
2 parents 0020095 + 6bf912c commit c7699ec
Show file tree
Hide file tree
Showing 5 changed files with 18 additions and 18 deletions.
12 changes: 6 additions & 6 deletions kedro/framework/hooks/specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def before_node_run(
Args:
node: The ``Node`` to run.
catalog: A catalog containing the node's inputs and outputs.
catalog: An implemented instance of ``CatalogProtocol`` containing the node's inputs and outputs.
inputs: The dictionary of inputs dataset.
The keys are dataset names and the values are the actual loaded input data,
not the dataset instance.
Expand Down Expand Up @@ -93,7 +93,7 @@ def after_node_run( # noqa: PLR0913
Args:
node: The ``Node`` that ran.
catalog: A catalog containing the node's inputs and outputs.
catalog: An implemented instance of ``CatalogProtocol`` containing the node's inputs and outputs.
inputs: The dictionary of inputs dataset.
The keys are dataset names and the values are the actual loaded input data,
not the dataset instance.
Expand Down Expand Up @@ -122,7 +122,7 @@ def on_node_error( # noqa: PLR0913
Args:
error: The uncaught exception thrown during the node run.
node: The ``Node`` to run.
catalog: A catalog containing the node's inputs and outputs.
catalog: An implemented instance of ``CatalogProtocol`` containing the node's inputs and outputs.
inputs: The dictionary of inputs dataset.
The keys are dataset names and the values are the actual loaded input data,
not the dataset instance.
Expand Down Expand Up @@ -164,7 +164,7 @@ def before_pipeline_run(
}
pipeline: The ``Pipeline`` that will be run.
catalog: The catalog to be used during the run.
catalog: An implemented instance of ``CatalogProtocol`` to be used during the run.
"""
pass

Expand Down Expand Up @@ -202,7 +202,7 @@ def after_pipeline_run(
run_result: The output of ``Pipeline`` run.
pipeline: The ``Pipeline`` that was run.
catalog: The catalog used during the run.
catalog: An implemented instance of ``CatalogProtocol`` used during the run.
"""
pass

Expand Down Expand Up @@ -242,7 +242,7 @@ def on_pipeline_error(
}
pipeline: The ``Pipeline`` that will was run.
catalog: The catalog used during the run.
catalog: An implemented instance of ``CatalogProtocol`` used during the run.
"""
pass

Expand Down
4 changes: 2 additions & 2 deletions kedro/runner/parallel_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def _run_node_synchronization( # noqa: PLR0913
Args:
node: The ``Node`` to run.
catalog: A catalog containing the node's inputs and outputs.
catalog: An implemented instance of ``CatalogProtocol`` containing the node's inputs and outputs.
is_async: If True, the node inputs and outputs are loaded and saved
asynchronously with threads. Defaults to False.
session_id: The session id of the pipeline run.
Expand Down Expand Up @@ -250,7 +250,7 @@ def _run(
Args:
pipeline: The ``Pipeline`` to run.
catalog: The `catalog from which to fetch data.
catalog: An implemented instance of ``CatalogProtocol`` from which to fetch data.
hook_manager: The ``PluginManager`` to activate hooks.
session_id: The id of the session.
Expand Down
16 changes: 8 additions & 8 deletions kedro/runner/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def run(
Args:
pipeline: The ``Pipeline`` to run.
catalog: The catalog from which to fetch data.
catalog: An implemented instance of ``CatalogProtocol`` from which to fetch data.
hook_manager: The ``PluginManager`` to activate hooks.
session_id: The id of the session.
Expand Down Expand Up @@ -132,7 +132,7 @@ def run_only_missing(
Args:
pipeline: The ``Pipeline`` to run.
catalog: The catalog from which to fetch data.
catalog: An implemented instance of ``CatalogProtocol`` from which to fetch data.
hook_manager: The ``PluginManager`` to activate hooks.
Raises:
ValueError: Raised when ``Pipeline`` inputs cannot be
Expand Down Expand Up @@ -173,7 +173,7 @@ def _run(
Args:
pipeline: The ``Pipeline`` to run.
catalog: The `catalog from which to fetch data.
catalog: An implemented instance of ``CatalogProtocol`` from which to fetch data.
hook_manager: The ``PluginManager`` to activate hooks.
session_id: The id of the session.
Expand All @@ -194,7 +194,7 @@ def _suggest_resume_scenario(
Args:
pipeline: the ``Pipeline`` of the run.
done_nodes: the ``Node``s that executed successfully.
catalog: the catalog of the run.
catalog: an implemented instance of ``CatalogProtocol`` of the run.
"""
remaining_nodes = set(pipeline.nodes) - set(done_nodes)
Expand Down Expand Up @@ -233,7 +233,7 @@ def _find_nodes_to_resume_from(
Args:
pipeline: the ``Pipeline`` to find starting nodes for.
unfinished_nodes: collection of ``Node``s that have not finished yet
catalog: the catalog of the run.
catalog: an implemented instance of ``CatalogProtocol`` of the run.
Returns:
Set of node names to pass to pipeline.from_nodes() to continue
Expand Down Expand Up @@ -261,7 +261,7 @@ def _find_all_nodes_for_resumed_pipeline(
Args:
pipeline: the ``Pipeline`` to analyze.
unfinished_nodes: the iterable of ``Node``s which have not finished yet.
catalog: the catalog of the run.
catalog: an implemented instance of ``CatalogProtocol`` of the run.
Returns:
A set containing all input unfinished ``Node``s and all remaining
Expand Down Expand Up @@ -314,7 +314,7 @@ def _enumerate_non_persistent_inputs(node: Node, catalog: CatalogProtocol) -> se
Args:
node: the ``Node`` to check the inputs of.
catalog: the catalog of the run.
catalog: an implemented instance of ``CatalogProtocol`` of the run.
Returns:
Set of names of non-persistent inputs of given ``Node``.
Expand Down Expand Up @@ -388,7 +388,7 @@ def run_node(
Args:
node: The ``Node`` to run.
catalog: A catalog containing the node's inputs and outputs.
catalog: An implemented instance of ``CatalogProtocol`` containing the node's inputs and outputs.
hook_manager: The ``PluginManager`` to activate hooks.
is_async: If True, the node inputs and outputs are loaded and saved
asynchronously with threads. Defaults to False.
Expand Down
2 changes: 1 addition & 1 deletion kedro/runner/sequential_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def _run(
Args:
pipeline: The ``Pipeline`` to run.
catalog: The catalog from which to fetch data.
catalog: An implemented instance of ``CatalogProtocol`` from which to fetch data.
hook_manager: The ``PluginManager`` to activate hooks.
session_id: The id of the session.
Expand Down
2 changes: 1 addition & 1 deletion kedro/runner/thread_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def _run(
Args:
pipeline: The ``Pipeline`` to run.
catalog: The catalog from which to fetch data.
catalog: An implemented instance of ``CatalogProtocol`` from which to fetch data.
hook_manager: The ``PluginManager`` to activate hooks.
session_id: The id of the session.
Expand Down

0 comments on commit c7699ec

Please sign in to comment.