From 2f526b64e52140433a4d273a838c554ea9cc9423 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Luis=20Cano=20Rodr=C3=ADguez?= Date: Thu, 5 Oct 2023 12:18:05 +0200 Subject: [PATCH] Merge template `pyproject.toml` into one (#2926) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Turn default template into src-layout Python package Signed-off-by: Juan Luis Cano Rodríguez * Fix tests after metadata and tests moved Signed-off-by: Juan Luis Cano Rodríguez * Remove leftover mention of environment.yml Signed-off-by: Juan Luis Cano Rodríguez * Move src/requirements.txt one level up Signed-off-by: Juan Luis Cano Rodríguez * Make `kedro package` backwards compatible Signed-off-by: Juan Luis Cano Rodríguez * Add template changes to release notes Signed-off-by: Juan Luis Cano Rodríguez * Add test for coverage Signed-off-by: SajidAlamQB <90610031+SajidAlamQB@users.noreply.github.com> * lint Signed-off-by: SajidAlamQB <90610031+SajidAlamQB@users.noreply.github.com> * Fix outdated doc link Signed-off-by: SajidAlamQB <90610031+SajidAlamQB@users.noreply.github.com> --------- Signed-off-by: Juan Luis Cano Rodríguez Signed-off-by: SajidAlamQB <90610031+SajidAlamQB@users.noreply.github.com> Co-authored-by: SajidAlamQB <90610031+SajidAlamQB@users.noreply.github.com> --- RELEASE.md | 2 + docs/source/deployment/airflow_astronomer.md | 2 +- docs/source/deployment/aws_step_functions.md | 2 +- .../databricks_ide_development_workflow.md | 2 +- ...tabricks_notebooks_development_workflow.md | 2 +- docs/source/deployment/distributed.md | 2 +- docs/source/deployment/single_machine.md | 2 +- docs/source/development/commands_reference.md | 4 +- docs/source/development/linting.md | 6 +-- docs/source/experiment_tracking/index.md | 6 +-- docs/source/get_started/new_project.md | 4 +- .../kedro_project_setup/dependencies.md | 14 +++---- .../nodes_and_pipelines/micro_packaging.md | 2 +- .../tutorial/spaceflights_tutorial_faqs.md | 2 +- docs/source/tutorial/tutorial_template.md | 6 +-- .../visualisation/kedro-viz_visualisation.md | 2 +- .../visualise_charts_with_plotly.md | 4 +- features/environment.py | 4 +- features/steps/cli_steps.py | 10 ++--- .../{{ cookiecutter.repo_name }}/README.md | 6 +-- .../pyproject.toml | 37 +++++++++++++++- .../{src => }/requirements.txt | 0 .../src/pyproject.toml | 30 ------------- .../{src => }/tests/__init__.py | 0 .../{src => }/tests/pipelines/__init__.py | 0 .../{src => }/tests/test_run.py | 0 kedro/framework/cli/micropkg.py | 6 +-- kedro/framework/cli/project.py | 18 ++++++-- kedro/framework/cli/starters.py | 2 +- kedro/framework/cli/utils.py | 2 +- .../{{ cookiecutter.repo_name }}/README.md | 8 ++-- .../pyproject.toml | 35 ++++++++++++++++ .../{src => }/requirements.txt | 0 .../src/pyproject.toml | 32 -------------- .../{src => }/tests/__init__.py | 0 .../{src => }/tests/pipelines/__init__.py | 0 .../{src => }/tests/test_run.py | 0 tests/framework/cli/micropkg/conftest.py | 2 +- .../micropkg/test_micropkg_requirements.py | 14 +++---- tests/framework/cli/pipeline/conftest.py | 2 +- tests/framework/cli/test_jupyter.py | 6 +-- tests/framework/cli/test_project.py | 42 +++++++++++++++++-- tests/framework/cli/test_starters.py | 6 +-- 43 files changed, 188 insertions(+), 138 deletions(-) rename features/steps/test_starter/{{ cookiecutter.repo_name }}/{src => }/requirements.txt (100%) delete mode 100644 features/steps/test_starter/{{ cookiecutter.repo_name }}/src/pyproject.toml rename features/steps/test_starter/{{ cookiecutter.repo_name }}/{src => }/tests/__init__.py (100%) rename features/steps/test_starter/{{ cookiecutter.repo_name }}/{src => }/tests/pipelines/__init__.py (100%) rename features/steps/test_starter/{{ cookiecutter.repo_name }}/{src => }/tests/test_run.py (100%) rename kedro/templates/project/{{ cookiecutter.repo_name }}/{src => }/requirements.txt (100%) delete mode 100644 kedro/templates/project/{{ cookiecutter.repo_name }}/src/pyproject.toml rename kedro/templates/project/{{ cookiecutter.repo_name }}/{src => }/tests/__init__.py (100%) rename kedro/templates/project/{{ cookiecutter.repo_name }}/{src => }/tests/pipelines/__init__.py (100%) rename kedro/templates/project/{{ cookiecutter.repo_name }}/{src => }/tests/test_run.py (100%) diff --git a/RELEASE.md b/RELEASE.md index 091f6a0790..5aba523fda 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -21,6 +21,8 @@ * Removed deprecated `kedro.extras.ColorHandler`. * The Kedro IPython extension is no longer available as `%load_ext kedro.extras.extensions.ipython`; use `%load_ext kedro.ipython` instead. * Anonymous nodes are given default names of the form `([in1;in2;...]) -> [out1;out2;...]`, with the names of inputs and outputs separated by semicolons. +* The default project template now has one `pyproject.toml` at the root of the project (containing both the packaging metadata and the Kedro build config). +* The `requirements.txt` in the default project template moved to the root of the project as well (hence dependencies are now installed with `pip install -r requirements.txt` instead of `pip install -r src/requirements.txt`). ## Migration guide from Kedro 0.18.* to 0.19.* ### DataSets diff --git a/docs/source/deployment/airflow_astronomer.md b/docs/source/deployment/airflow_astronomer.md index b21408156b..d99897c44e 100644 --- a/docs/source/deployment/airflow_astronomer.md +++ b/docs/source/deployment/airflow_astronomer.md @@ -84,7 +84,7 @@ To follow this tutorial, ensure you have the following: pip install kedro-airflow~=0.4 ``` -5. Run `pip install -r src/requirements.txt` to install all dependencies. +5. Run `pip install -r requirements.txt` to install all dependencies. ### Deployment process diff --git a/docs/source/deployment/aws_step_functions.md b/docs/source/deployment/aws_step_functions.md index 7a08ba3416..b932ad2250 100644 --- a/docs/source/deployment/aws_step_functions.md +++ b/docs/source/deployment/aws_step_functions.md @@ -55,7 +55,7 @@ The rest of the tutorial will explain each step in the deployment process above * Create a `conf/aws` directory in your Kedro project * Put a `catalog.yml` file in this directory with the following content -* Ensure that you have `s3fs>=0.3.0,<0.5` defined in your `src/requirements.txt` so the data can be read from S3. +* Ensure that you have `s3fs>=0.3.0,<0.5` defined in your `requirements.txt` so the data can be read from S3.
Click to expand diff --git a/docs/source/deployment/databricks/databricks_ide_development_workflow.md b/docs/source/deployment/databricks/databricks_ide_development_workflow.md index 0f32ee540a..46f7b6c00e 100644 --- a/docs/source/deployment/databricks/databricks_ide_development_workflow.md +++ b/docs/source/deployment/databricks/databricks_ide_development_workflow.md @@ -184,7 +184,7 @@ Open your newly-created notebook and create **four new cells** inside it. You wi 1. Before you import and run your Python code, you'll need to install your project's dependencies on the cluster attached to your notebook. Your project has a `requirements.txt` file for this purpose. Add the following code to the first new cell to install the dependencies: ```ipython -%pip install -r "/Workspace/Repos//iris-databricks/src/requirements.txt" +%pip install -r "/Workspace/Repos//iris-databricks/requirements.txt" ``` 2. To run your project in your notebook, you must load the Kedro IPython extension. Add the following code to the second new cell to load the IPython extension: diff --git a/docs/source/deployment/databricks/databricks_notebooks_development_workflow.md b/docs/source/deployment/databricks/databricks_notebooks_development_workflow.md index b3136d9a9d..5d023b5798 100644 --- a/docs/source/deployment/databricks/databricks_notebooks_development_workflow.md +++ b/docs/source/deployment/databricks/databricks_notebooks_development_workflow.md @@ -213,7 +213,7 @@ Create **four new cells** inside your notebook. You will fill these cells with c 1. Before you import and run your Python code, you'll need to install your project's dependencies on the cluster attached to your notebook. Your project has a `requirements.txt` file for this purpose. Add the following code to the first new cell to install the dependencies: ```ipython -%pip install -r "/Workspace/Repos//iris-databricks/src/requirements.txt" +%pip install -r "/Workspace/Repos//iris-databricks/requirements.txt" ``` 2. To run your project in your notebook, you must load the Kedro IPython extension. Add the following code to the second new cell to load the IPython extension: diff --git a/docs/source/deployment/distributed.md b/docs/source/deployment/distributed.md index 2b005afe42..af49ae5fec 100644 --- a/docs/source/deployment/distributed.md +++ b/docs/source/deployment/distributed.md @@ -14,7 +14,7 @@ For better dependency management, we encourage you to containerise the entire pi Firstly make sure your [project requirements are up-to-date](../kedro_project_setup/dependencies.md) by running: ```bash -pip-compile --output-file=/src/requirements.txt --input-file=/src/requirements.txt +pip-compile --output-file=/requirements.txt --input-file=/requirements.txt ``` We then recommend the [`Kedro-Docker`](https://github.com/kedro-org/kedro-plugins/tree/main/kedro-docker) plugin to streamline the process of building the image. [Instructions for using this are in the plugin's README.md](https://github.com/kedro-org/kedro-plugins/blob/main/README.md). diff --git a/docs/source/deployment/single_machine.md b/docs/source/deployment/single_machine.md index 0964a6a968..b70ddfcf60 100644 --- a/docs/source/deployment/single_machine.md +++ b/docs/source/deployment/single_machine.md @@ -114,7 +114,7 @@ conda install -c conda-forge kedro Install the project’s dependencies, by running the following in the project's root directory: ```console -pip install -r src/requirements.txt +pip install -r requirements.txt ``` After having installed your project on the remote server you can run the Kedro project as follows from the root of the project: diff --git a/docs/source/development/commands_reference.md b/docs/source/development/commands_reference.md index 4dffb241b6..2481aab85b 100644 --- a/docs/source/development/commands_reference.md +++ b/docs/source/development/commands_reference.md @@ -273,10 +273,10 @@ def run( #### Install all package dependencies -The following runs [`pip`](https://github.com/pypa/pip) to install all package dependencies specified in `src/requirements.txt`: +The following runs [`pip`](https://github.com/pypa/pip) to install all package dependencies specified in `requirements.txt`: ```bash -pip install -r src/requirements.txt +pip install -r requirements.txt ``` For further information, see the [documentation on installing project-specific dependencies](../kedro_project_setup/dependencies.md#install-project-specific-dependencies). diff --git a/docs/source/development/linting.md b/docs/source/development/linting.md index c4d2631848..8beec39d1a 100644 --- a/docs/source/development/linting.md +++ b/docs/source/development/linting.md @@ -20,7 +20,7 @@ type. [You can read more in the `isort` documentation](https://pycqa.github.io/i ### Install the tools -Install `black` and `ruff` by adding the following lines to your project's `src/requirements.txt` +Install `black` and `ruff` by adding the following lines to your project's `requirements.txt` file: ```text black # Used for formatting code @@ -30,7 +30,7 @@ ruff # Used for linting, formatting and sorting module imports To install all the project-specific dependencies, including the linting tools, navigate to the root directory of the project and run: ```bash -pip install -r src/requirements.txt +pip install -r requirements.txt ``` Alternatively, you can individually install the linting tools using the following shell commands: ```bash @@ -72,7 +72,7 @@ These hooks are run before committing your code to your repositories to automati making code reviews easier and less time-consuming. ### Install `pre-commit` -You can install `pre-commit` along with other dependencies by including it in the `src/requirements.txt` file of your +You can install `pre-commit` along with other dependencies by including it in the `requirements.txt` file of your Kedro project by adding the following line: ```text pre-commit diff --git a/docs/source/experiment_tracking/index.md b/docs/source/experiment_tracking/index.md index 85c943e05c..15aeeefe6d 100644 --- a/docs/source/experiment_tracking/index.md +++ b/docs/source/experiment_tracking/index.md @@ -72,7 +72,7 @@ cd spaceflights Install the project's dependencies: ```bash -pip install -r src/requirements.txt +pip install -r requirements.txt ``` ## Set up the session store @@ -268,7 +268,7 @@ In this section, we illustrate how to compare Matplotlib plots across experiment ### Update the dependencies -Update the `src/requirements.txt` file in your Kedro project by adding the following dataset to enable Matplotlib for your project: +Update the `requirements.txt` file in your Kedro project by adding the following dataset to enable Matplotlib for your project: ```text kedro-datasets[matplotlib.MatplotlibWriter]~=1.1 @@ -278,7 +278,7 @@ seaborn~=0.12.1 And install the requirements with: ```bash -pip install -r src/requirements.txt +pip install -r requirements.txt ``` ### Add a plotting node diff --git a/docs/source/get_started/new_project.md b/docs/source/get_started/new_project.md index 1048c49e17..6bdc3cbd86 100644 --- a/docs/source/get_started/new_project.md +++ b/docs/source/get_started/new_project.md @@ -11,7 +11,7 @@ There are a few ways to create a new project once you have [set up Kedro](instal Once you've created a project: -* You need to **navigate to its project folder** and **install its dependencies**: `pip install -r src/requirements.txt` +* You need to **navigate to its project folder** and **install its dependencies**: `pip install -r requirements.txt` * **To run the project**: `kedro run` * **To visualise the project**: `kedro viz` @@ -79,7 +79,7 @@ kedro new --starter=pandas-iris However you create a Kedro project, once `kedro new` has completed, the next step is to navigate to the project folder (`cd `) and install dependencies with `pip` as follows: ```bash -pip install -r src/requirements.txt +pip install -r requirements.txt ``` Now run the project: diff --git a/docs/source/kedro_project_setup/dependencies.md b/docs/source/kedro_project_setup/dependencies.md index 6827bf10be..598b186592 100644 --- a/docs/source/kedro_project_setup/dependencies.md +++ b/docs/source/kedro_project_setup/dependencies.md @@ -5,30 +5,30 @@ Both `pip install kedro` and `conda install -c conda-forge kedro` install the co When you create a project, you then introduce additional dependencies for the tasks it performs. ## Project-specific dependencies -You can specify a project's exact dependencies in the `src/requirements.txt` file to make it easier for you and others to run your project in the future, +You can specify a project's exact dependencies in the `requirements.txt` file to make it easier for you and others to run your project in the future, and to avoid version conflicts downstream. This can be achieved with the help of [`pip-tools`](https://pypi.org/project/pip-tools/). To install `pip-tools` in your virtual environment, run the following command: ```bash pip install pip-tools ``` -To add or remove dependencies to a project, edit the `src/requirements.txt` file, then run the following: +To add or remove dependencies to a project, edit the `requirements.txt` file, then run the following: ```bash -pip-compile /src/requirements.txt --output-file /src/requirements.lock +pip-compile /requirements.txt --output-file /requirements.lock ``` This will [pip compile](https://github.com/jazzband/pip-tools#example-usage-for-pip-compile) the requirements listed in -the `src/requirements.txt` file into a `src/requirements.lock` that specifies a list of pinned project dependencies +the `requirements.txt` file into a `requirements.lock` that specifies a list of pinned project dependencies (those with a strict version). You can also use this command with additional CLI arguments such as `--generate-hashes` to use `pip`'s Hash Checking Mode or `--upgrade-package` to update specific packages to the latest or specific versions. [Check out the `pip-tools` documentation](https://pypi.org/project/pip-tools/) for more information. ```{note} -The `src/requirements.txt` file contains "source" requirements, while `src/requirements.lock` contains the compiled version of those and requires no manual updates. +The `requirements.txt` file contains "source" requirements, while `src/requirements.lock` contains the compiled version of those and requires no manual updates. ``` -To further update the project requirements, modify the `src/requirements.txt` file (not `src/requirements.lock`) and re-run the `pip-compile` command above. +To further update the project requirements, modify the `requirements.txt` file (not `src/requirements.lock`) and re-run the `pip-compile` command above. ## Install project-specific dependencies @@ -36,7 +36,7 @@ To further update the project requirements, modify the `src/requirements.txt` fi To install the project-specific dependencies, navigate to the root directory of the project and run: ```bash -pip install -r src/requirements.txt +pip install -r requirements.txt ``` ## Workflow dependencies diff --git a/docs/source/nodes_and_pipelines/micro_packaging.md b/docs/source/nodes_and_pipelines/micro_packaging.md index 3ad1ddcc69..79c882b390 100644 --- a/docs/source/nodes_and_pipelines/micro_packaging.md +++ b/docs/source/nodes_and_pipelines/micro_packaging.md @@ -10,7 +10,7 @@ You can package a micro-package by executing: `kedro micropkg package =5.3, <7.0", + "Jinja2<3.1.0", + "myst-parser~=0.17.2", +] + +[tool.setuptools.dynamic] +dependencies = {file = "requirements.txt"} +version = {attr = "{{ cookiecutter.python_package }}.__version__"} + +[tool.setuptools.packages.find] +where = ["src"] +namespaces = false + [tool.kedro] -project_name = "{{ cookiecutter.project_name }}" -project_version = "{{ cookiecutter.kedro_version }}" package_name = "{{ cookiecutter.python_package }}" +project_name = "{{ cookiecutter.project_name }}" +kedro_init_version = "{{ cookiecutter.kedro_version }}" [tool.pytest.ini_options] addopts = """ diff --git a/features/steps/test_starter/{{ cookiecutter.repo_name }}/src/requirements.txt b/features/steps/test_starter/{{ cookiecutter.repo_name }}/requirements.txt similarity index 100% rename from features/steps/test_starter/{{ cookiecutter.repo_name }}/src/requirements.txt rename to features/steps/test_starter/{{ cookiecutter.repo_name }}/requirements.txt diff --git a/features/steps/test_starter/{{ cookiecutter.repo_name }}/src/pyproject.toml b/features/steps/test_starter/{{ cookiecutter.repo_name }}/src/pyproject.toml deleted file mode 100644 index 73b6242480..0000000000 --- a/features/steps/test_starter/{{ cookiecutter.repo_name }}/src/pyproject.toml +++ /dev/null @@ -1,30 +0,0 @@ -[build-system] -requires = ["setuptools"] -build-backend = "setuptools.build_meta" - -[project] -name = "{{ cookiecutter.python_package }}" -version = "0.1" -dynamic = ["dependencies"] - -[tool.setuptools.packages.find] -exclude = ["tests"] - -[project.scripts] -{{ cookiecutter.repo_name }} = "{{ cookiecutter.python_package }}.__main__:main" - -[project.optional-dependencies] -docs = [ - "docutils<0.18.0", - "sphinx~=3.4.3", - "sphinx_rtd_theme==0.5.1", - "nbsphinx==0.8.1", - "sphinx-autodoc-typehints==1.11.1", - "sphinx_copybutton==0.3.1", - "ipykernel>=5.3, <7.0", - "Jinja2<3.1.0", - "myst-parser~=0.17.2", -] - -[tool.setuptools.dynamic] -dependencies = {file = "requirements.txt"} diff --git a/features/steps/test_starter/{{ cookiecutter.repo_name }}/src/tests/__init__.py b/features/steps/test_starter/{{ cookiecutter.repo_name }}/tests/__init__.py similarity index 100% rename from features/steps/test_starter/{{ cookiecutter.repo_name }}/src/tests/__init__.py rename to features/steps/test_starter/{{ cookiecutter.repo_name }}/tests/__init__.py diff --git a/features/steps/test_starter/{{ cookiecutter.repo_name }}/src/tests/pipelines/__init__.py b/features/steps/test_starter/{{ cookiecutter.repo_name }}/tests/pipelines/__init__.py similarity index 100% rename from features/steps/test_starter/{{ cookiecutter.repo_name }}/src/tests/pipelines/__init__.py rename to features/steps/test_starter/{{ cookiecutter.repo_name }}/tests/pipelines/__init__.py diff --git a/features/steps/test_starter/{{ cookiecutter.repo_name }}/src/tests/test_run.py b/features/steps/test_starter/{{ cookiecutter.repo_name }}/tests/test_run.py similarity index 100% rename from features/steps/test_starter/{{ cookiecutter.repo_name }}/src/tests/test_run.py rename to features/steps/test_starter/{{ cookiecutter.repo_name }}/tests/test_run.py diff --git a/kedro/framework/cli/micropkg.py b/kedro/framework/cli/micropkg.py index d063659833..9fabcba31e 100644 --- a/kedro/framework/cli/micropkg.py +++ b/kedro/framework/cli/micropkg.py @@ -243,7 +243,7 @@ def _pull_package( # noqa: too-many-arguments package_reqs = _get_all_library_reqs(library_meta) if package_reqs: - requirements_txt = metadata.source_dir / "requirements.txt" + requirements_txt = metadata.project_path / "requirements.txt" _append_package_reqs(requirements_txt, package_reqs, package_name) _clean_pycache(temp_dir_path) @@ -962,8 +962,8 @@ def _append_package_reqs( file.write(sep.join(sorted_reqs)) click.secho( - "Use 'pip-compile src/requirements.txt --output-file src/requirements.lock' to compile " - "and 'pip install -r src/requirements.lock' to install the updated list of requirements." + "Use 'pip-compile requirements.txt --output-file requirements.lock' to compile " + "and 'pip install -r requirements.lock' to install the updated list of requirements." ) diff --git a/kedro/framework/cli/project.py b/kedro/framework/cli/project.py index e9286e71ad..315296b8e1 100644 --- a/kedro/framework/cli/project.py +++ b/kedro/framework/cli/project.py @@ -26,7 +26,7 @@ from kedro.utils import load_obj NO_DEPENDENCY_MESSAGE = """{module} is not installed. Please make sure {module} is in -{src}/requirements.txt and run 'pip install -r src/requirements.txt'.""" +requirements.txt and run 'pip install -r requirements.txt'.""" LINT_CHECK_ONLY_HELP = """Check the files for style guide violations, unsorted / unformatted imports, and unblackened Python code without modifying the files.""" OPEN_ARG_HELP = """Open the documentation in your default browser after building.""" @@ -84,7 +84,17 @@ def ipython(metadata: ProjectMetadata, env, args, **kwargs): # noqa: unused-arg @click.pass_obj # this will pass the metadata as first argument def package(metadata: ProjectMetadata): """Package the project as a Python wheel.""" - source_path = metadata.source_dir + # Even if the user decides for the older setup.py on purpose, + # pyproject.toml is needed for Kedro metadata + if (metadata.project_path / "pyproject.toml").is_file(): + metadata_dir = metadata.project_path + destination_dir = "dist" + else: + # Assume it's an old Kedro project, packaging metadata was under src + # (could be pyproject.toml or setup.py, it's not important) + metadata_dir = metadata.source_dir + destination_dir = "../dist" + call( [ sys.executable, @@ -92,9 +102,9 @@ def package(metadata: ProjectMetadata): "build", "--wheel", "--outdir", - "../dist", + destination_dir, ], - cwd=str(source_path), + cwd=str(metadata_dir), ) directory = ( diff --git a/kedro/framework/cli/starters.py b/kedro/framework/cli/starters.py index 25e68f3699..a3a2a80541 100644 --- a/kedro/framework/cli/starters.py +++ b/kedro/framework/cli/starters.py @@ -371,7 +371,7 @@ def _create_project(template_path: str, cookiecutter_args: dict[str, Any]): ) click.secho( "\nA best-practice setup includes initialising git and creating " - "a virtual environment before running 'pip install -r src/requirements.txt' to install " + "a virtual environment before running 'pip install -r requirements.txt' to install " "project-specific dependencies. Refer to the Kedro documentation: " "https://kedro.readthedocs.io/" ) diff --git a/kedro/framework/cli/utils.py b/kedro/framework/cli/utils.py index 3240c3c4ab..cc56be5d94 100644 --- a/kedro/framework/cli/utils.py +++ b/kedro/framework/cli/utils.py @@ -354,7 +354,7 @@ def _check_module_importable(module_name: str) -> None: except ImportError as exc: raise KedroCliError( f"Module '{module_name}' not found. Make sure to install required project " - f"dependencies by running the 'pip install -r src/requirements.txt' command first." + f"dependencies by running the 'pip install -r requirements.txt' command first." ) from exc diff --git a/kedro/templates/project/{{ cookiecutter.repo_name }}/README.md b/kedro/templates/project/{{ cookiecutter.repo_name }}/README.md index 19d9afc130..9abe04f279 100644 --- a/kedro/templates/project/{{ cookiecutter.repo_name }}/README.md +++ b/kedro/templates/project/{{ cookiecutter.repo_name }}/README.md @@ -17,12 +17,12 @@ In order to get the best out of the template: ## How to install dependencies -Declare any dependencies in `src/requirements.txt` for `pip` installation and `src/environment.yml` for `conda` installation. +Declare any dependencies in `requirements.txt` for `pip` installation. To install them, run: ``` -pip install -r src/requirements.txt +pip install -r requirements.txt ``` ## How to run your Kedro pipeline @@ -46,7 +46,7 @@ To configure the coverage threshold, look at the `.coveragerc` file. ## Project dependencies -To see and update the dependency requirements for your project use `src/requirements.txt`. You can install the project requirements with `pip install -r src/requirements.txt`. +To see and update the dependency requirements for your project use `requirements.txt`. You can install the project requirements with `pip install -r requirements.txt`. [Further information about project dependencies](https://docs.kedro.org/en/stable/kedro_project_setup/dependencies.html#project-specific-dependencies) @@ -54,7 +54,7 @@ To see and update the dependency requirements for your project use `src/requirem > Note: Using `kedro jupyter` or `kedro ipython` to run your notebook provides these variables in scope: `context`, `catalog`, and `startup_error`. > -> Jupyter, JupyterLab, and IPython are already included in the project requirements by default, so once you have run `pip install -r src/requirements.txt` you will not need to take any extra steps before you use them. +> Jupyter, JupyterLab, and IPython are already included in the project requirements by default, so once you have run `pip install -r requirements.txt` you will not need to take any extra steps before you use them. ### Jupyter To use Jupyter notebooks in your Kedro project, you need to install Jupyter: diff --git a/kedro/templates/project/{{ cookiecutter.repo_name }}/pyproject.toml b/kedro/templates/project/{{ cookiecutter.repo_name }}/pyproject.toml index 48be962dc7..dc3a98bbb9 100644 --- a/kedro/templates/project/{{ cookiecutter.repo_name }}/pyproject.toml +++ b/kedro/templates/project/{{ cookiecutter.repo_name }}/pyproject.toml @@ -1,3 +1,38 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "{{ cookiecutter.python_package }}" +readme = "README.md" +dynamic = ["dependencies", "version"] + +[project.scripts] +{{ cookiecutter.repo_name }} = "{{ cookiecutter.python_package }}.__main__:main" + +[project.entry-points."kedro.hooks"] + +[project.optional-dependencies] +docs = [ + "docutils<0.18.0", + "sphinx~=3.4.3", + "sphinx_rtd_theme==0.5.1", + "nbsphinx==0.8.1", + "sphinx-autodoc-typehints==1.11.1", + "sphinx_copybutton==0.3.1", + "ipykernel>=5.3, <7.0", + "Jinja2<3.1.0", + "myst-parser~=0.17.2", +] + +[tool.setuptools.dynamic] +dependencies = {file = "requirements.txt"} +version = {attr = "{{ cookiecutter.python_package }}.__version__"} + +[tool.setuptools.packages.find] +where = ["src"] +namespaces = false + [tool.kedro] package_name = "{{ cookiecutter.python_package }}" project_name = "{{ cookiecutter.project_name }}" diff --git a/kedro/templates/project/{{ cookiecutter.repo_name }}/src/requirements.txt b/kedro/templates/project/{{ cookiecutter.repo_name }}/requirements.txt similarity index 100% rename from kedro/templates/project/{{ cookiecutter.repo_name }}/src/requirements.txt rename to kedro/templates/project/{{ cookiecutter.repo_name }}/requirements.txt diff --git a/kedro/templates/project/{{ cookiecutter.repo_name }}/src/pyproject.toml b/kedro/templates/project/{{ cookiecutter.repo_name }}/src/pyproject.toml deleted file mode 100644 index 5157ec7b3b..0000000000 --- a/kedro/templates/project/{{ cookiecutter.repo_name }}/src/pyproject.toml +++ /dev/null @@ -1,32 +0,0 @@ -[build-system] -requires = ["setuptools"] -build-backend = "setuptools.build_meta" - -[project] -name = "{{ cookiecutter.python_package }}" -version = "0.1" -dynamic = ["dependencies"] - -[tool.setuptools.packages.find] -exclude = ["tests"] - -[project.scripts] -{{ cookiecutter.repo_name }} = "{{ cookiecutter.python_package }}.__main__:main" - -[project.optional-dependencies] -docs = [ - "docutils<0.18.0", - "sphinx~=3.4.3", - "sphinx_rtd_theme==0.5.1", - "nbsphinx==0.8.1", - "sphinx-autodoc-typehints==1.11.1", - "sphinx_copybutton==0.3.1", - "ipykernel>=5.3, <7.0", - "Jinja2<3.1.0", - "myst-parser~=0.17.2", -] - -[project.entry-points."kedro.hooks"] - -[tool.setuptools.dynamic] -dependencies = {file = "requirements.txt"} diff --git a/kedro/templates/project/{{ cookiecutter.repo_name }}/src/tests/__init__.py b/kedro/templates/project/{{ cookiecutter.repo_name }}/tests/__init__.py similarity index 100% rename from kedro/templates/project/{{ cookiecutter.repo_name }}/src/tests/__init__.py rename to kedro/templates/project/{{ cookiecutter.repo_name }}/tests/__init__.py diff --git a/kedro/templates/project/{{ cookiecutter.repo_name }}/src/tests/pipelines/__init__.py b/kedro/templates/project/{{ cookiecutter.repo_name }}/tests/pipelines/__init__.py similarity index 100% rename from kedro/templates/project/{{ cookiecutter.repo_name }}/src/tests/pipelines/__init__.py rename to kedro/templates/project/{{ cookiecutter.repo_name }}/tests/pipelines/__init__.py diff --git a/kedro/templates/project/{{ cookiecutter.repo_name }}/src/tests/test_run.py b/kedro/templates/project/{{ cookiecutter.repo_name }}/tests/test_run.py similarity index 100% rename from kedro/templates/project/{{ cookiecutter.repo_name }}/src/tests/test_run.py rename to kedro/templates/project/{{ cookiecutter.repo_name }}/tests/test_run.py diff --git a/tests/framework/cli/micropkg/conftest.py b/tests/framework/cli/micropkg/conftest.py index ff8348b755..5d241f9488 100644 --- a/tests/framework/cli/micropkg/conftest.py +++ b/tests/framework/cli/micropkg/conftest.py @@ -35,7 +35,7 @@ def cleanup_micropackages(fake_repo_path, fake_package_path): def cleanup_pipelines(fake_repo_path, fake_package_path): pipes_path = fake_package_path / "pipelines" old_pipelines = {p.name for p in pipes_path.iterdir() if p.is_dir()} - requirements_txt = fake_repo_path / "src" / "requirements.txt" + requirements_txt = fake_repo_path / "requirements.txt" requirements = requirements_txt.read_text() yield diff --git a/tests/framework/cli/micropkg/test_micropkg_requirements.py b/tests/framework/cli/micropkg/test_micropkg_requirements.py index 4b424073b7..e7f41e0b23 100644 --- a/tests/framework/cli/micropkg/test_micropkg_requirements.py +++ b/tests/framework/cli/micropkg/test_micropkg_requirements.py @@ -81,7 +81,7 @@ def test_existing_complex_project_requirements_txt( self, fake_project_cli, fake_metadata, fake_package_path, fake_repo_path ): """Pipeline requirements.txt and project requirements.txt.""" - project_requirements_txt = fake_repo_path / "src" / "requirements.txt" + project_requirements_txt = fake_repo_path / "requirements.txt" with open(project_requirements_txt, "a", encoding="utf-8") as file: file.write(COMPLEX_REQUIREMENTS) existing_requirements = _safe_parse_requirements( @@ -112,7 +112,7 @@ def test_existing_project_requirements_txt( self, fake_project_cli, fake_metadata, fake_package_path, fake_repo_path ): """Pipeline requirements.txt and project requirements.txt.""" - project_requirements_txt = fake_repo_path / "src" / "requirements.txt" + project_requirements_txt = fake_repo_path / "requirements.txt" existing_requirements = _safe_parse_requirements( project_requirements_txt.read_text() ) @@ -146,7 +146,7 @@ def test_missing_project_requirements_txt( project level.""" # Remove project requirements.txt - project_requirements_txt = fake_repo_path / "src" / "requirements.txt" + project_requirements_txt = fake_repo_path / "requirements.txt" project_requirements_txt.unlink() self.call_pipeline_create(fake_project_cli, fake_metadata) @@ -176,7 +176,7 @@ def test_no_requirements( """No pipeline requirements.txt, and also no requirements.txt at project level.""" # Remove project requirements.txt - project_requirements_txt = fake_repo_path / "src" / "requirements.txt" + project_requirements_txt = fake_repo_path / "requirements.txt" project_requirements_txt.unlink() self.call_pipeline_create(fake_project_cli, fake_metadata) @@ -195,7 +195,7 @@ def test_all_requirements_already_covered( pipeline_requirements_txt = ( fake_package_path / "pipelines" / PIPELINE_NAME / "requirements.txt" ) - project_requirements_txt = fake_repo_path / "src" / "requirements.txt" + project_requirements_txt = fake_repo_path / "requirements.txt" pipeline_requirements_txt.write_text(SIMPLE_REQUIREMENTS) project_requirements_txt.write_text(SIMPLE_REQUIREMENTS) @@ -214,7 +214,7 @@ def test_no_pipeline_requirements_txt( create project requirements.txt.""" # Remove project requirements.txt - project_requirements_txt = fake_repo_path / "src" / "requirements.txt" + project_requirements_txt = fake_repo_path / "requirements.txt" project_requirements_txt.unlink() self.call_pipeline_create(fake_project_cli, fake_metadata) @@ -231,7 +231,7 @@ def test_empty_pipeline_requirements_txt( create project requirements.txt.""" # Remove project requirements.txt - project_requirements_txt = fake_repo_path / "src" / "requirements.txt" + project_requirements_txt = fake_repo_path / "requirements.txt" project_requirements_txt.unlink() self.call_pipeline_create(fake_project_cli, fake_metadata) diff --git a/tests/framework/cli/pipeline/conftest.py b/tests/framework/cli/pipeline/conftest.py index 672fee3eb6..f192ae7e3b 100644 --- a/tests/framework/cli/pipeline/conftest.py +++ b/tests/framework/cli/pipeline/conftest.py @@ -49,7 +49,7 @@ def cleanup_micropackages(fake_repo_path, fake_package_path): def cleanup_pipelines(fake_repo_path, fake_package_path): pipes_path = fake_package_path / "pipelines" old_pipelines = {p.name for p in pipes_path.iterdir() if p.is_dir()} - requirements_txt = fake_repo_path / "src" / "requirements.txt" + requirements_txt = fake_repo_path / "requirements.txt" requirements = requirements_txt.read_text() yield diff --git a/tests/framework/cli/test_jupyter.py b/tests/framework/cli/test_jupyter.py index 20a5dc9ad0..e51892e720 100644 --- a/tests/framework/cli/test_jupyter.py +++ b/tests/framework/cli/test_jupyter.py @@ -45,7 +45,7 @@ def test_fail_no_jupyter(self, fake_project_cli, mocker): assert result.exit_code error = ( "Module 'notebook' not found. Make sure to install required project " - "dependencies by running the 'pip install -r src/requirements.txt' command first." + "dependencies by running the 'pip install -r requirements.txt' command first." ) assert error in result.output @@ -95,7 +95,7 @@ def test_fail_no_jupyter(self, fake_project_cli, mocker): assert result.exit_code error = ( "Module 'notebook' not found. Make sure to install required project " - "dependencies by running the 'pip install -r src/requirements.txt' command first." + "dependencies by running the 'pip install -r requirements.txt' command first." ) assert error in result.output @@ -145,7 +145,7 @@ def test_fail_no_jupyter(self, fake_project_cli, mocker): assert result.exit_code error = ( "Module 'jupyterlab' not found. Make sure to install required project " - "dependencies by running the 'pip install -r src/requirements.txt' command first." + "dependencies by running the 'pip install -r requirements.txt' command first." ) assert error in result.output diff --git a/tests/framework/cli/test_project.py b/tests/framework/cli/test_project.py index 071bc6640d..52bee9ddb6 100644 --- a/tests/framework/cli/test_project.py +++ b/tests/framework/cli/test_project.py @@ -61,7 +61,7 @@ def test_fail_no_ipython(self, fake_project_cli, mocker): assert result.exit_code error = ( "Module 'IPython' not found. Make sure to install required project " - "dependencies by running the 'pip install -r src/requirements.txt' command first." + "dependencies by running the 'pip install -r requirements.txt' command first." ) assert error in result.output @@ -82,9 +82,9 @@ def test_happy_path( "build", "--wheel", "--outdir", - "../dist", + "dist", ], - cwd=str(fake_repo_path / "src"), + cwd=str(fake_repo_path), ), mocker.call( [ @@ -98,3 +98,39 @@ def test_happy_path( ), ] ) + + def test_no_pyproject_toml( + self, call_mock, fake_project_cli, mocker, fake_repo_path, fake_metadata + ): + # Assume no pyproject.toml + (fake_metadata.project_path / "pyproject.toml").unlink(missing_ok=True) + + result = CliRunner().invoke(fake_project_cli, ["package"], obj=fake_metadata) + assert not result.exit_code, result.stdout + + # destination_dir will be different since pyproject.toml doesn't exist + call_mock.assert_has_calls( + [ + mocker.call( + [ + sys.executable, + "-m", + "build", + "--wheel", + "--outdir", + "../dist", + ], + cwd=str(fake_metadata.source_dir), + ), + mocker.call( + [ + "tar", + "--exclude=local/*.yml", + "-czf", + f"dist/conf-{fake_metadata.package_name}.tar.gz", + f"--directory={fake_metadata.project_path}", + "conf", + ] + ), + ] + ) diff --git a/tests/framework/cli/test_starters.py b/tests/framework/cli/test_starters.py index b8119c3268..7dde0b231d 100644 --- a/tests/framework/cli/test_starters.py +++ b/tests/framework/cli/test_starters.py @@ -17,7 +17,7 @@ KedroStarterSpec, ) -FILES_IN_TEMPLATE = 28 +FILES_IN_TEMPLATE = 27 @pytest.fixture @@ -70,9 +70,7 @@ def _assert_template_ok( assert (full_path / ".gitignore").is_file() assert project_name in (full_path / "README.md").read_text(encoding="utf-8") assert "KEDRO" in (full_path / ".gitignore").read_text(encoding="utf-8") - assert kedro_version in (full_path / "src" / "requirements.txt").read_text( - encoding="utf-8" - ) + assert kedro_version in (full_path / "requirements.txt").read_text(encoding="utf-8") assert (full_path / "src" / python_package / "__init__.py").is_file()