diff --git a/.circleci/config.yml b/.circleci/config.yml index 057d9c83..e7fd31fe 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -10,11 +10,12 @@ jobs: environment: POSTGRES_USER: root environment: - POSTGRES_TEST_HOST: localhost - POSTGRES_TEST_USER: root - POSTGRES_TEST_PASS: '' - POSTGRES_TEST_PORT: 5432 - POSTGRES_TEST_DBNAME: circle_test + POSTGRES_HOST: localhost + POSTGRES_USER: root + DBT_ENV_SECRET_POSTGRES_PASS: '' + POSTGRES_PORT: 5432 + POSTGRES_DATABASE: circle_test + POSTGRES_SCHEMA: dbt_utils_integration_tests_postgres steps: - checkout @@ -71,8 +72,8 @@ jobs: - checkout - run: pip install --pre dbt-bigquery -r dev-requirements.txt - run: - name: "Set up credentials" - command: echo $BIGQUERY_SERVICE_ACCOUNT_JSON > ${HOME}/bigquery-service-key.json + name: Setup Environment Variables + command: echo 'export BIGQUERY_KEYFILE_JSON="$BIGQUERY_SERVICE_ACCOUNT_JSON"' >> "$BASH_ENV" - run: name: "Run OG Tests - BigQuery" command: ./run_test.sh bigquery @@ -87,7 +88,8 @@ workflows: version: 2 test-all: jobs: - - integration-postgres + - integration-postgres: + context: profile-postgres - integration-redshift: context: profile-redshift requires: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..fb93d720 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,139 @@ +# **what?** +# Run tests for dbt-utils against supported adapters + +# **why?** +# To ensure that dbt-utils works as expected with all supported adapters + +# **when?** +# On every PR, and every push to main and when manually triggered + +name: Package Integration Tests + +on: + push: + branches: + - main + pull_request: + workflow_dispatch: + inputs: + adapter: + description: The adapter to test against. Defaults to all supported adapters when blank. + type: string + required: false + +env: + PYTHON_VERSION: "3.11" + +jobs: + determine-supported-adapters: + runs-on: ubuntu-latest + outputs: + adapters: ${{ steps.supported-adapters.outputs.adapters }} + steps: + - name: "Checkout ${{ github.event.repository }}" + uses: actions/checkout@v4 + + - name: "Set up Python ${{ env.PYTHON_VERSION }}" + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: "Install tox" + run: | + python -m pip install --upgrade pip + pip install tox + + - name: "Get list of supported adapters or use input adapter only" + id: list-adapters + run: | + if [ -z "${{ inputs.adapter }}" ]; then + # github adds a pip freeze and a new line we need to strip out + source supported_adapters.env + echo $SUPPORTED_ADAPTERS + echo "test_adapters=$SUPPORTED_ADAPTERS" >> $GITHUB_OUTPUT + else + echo "test_adapters=${{ inputs.adapter }}" >> $GITHUB_OUTPUT + fi + + - name: "Format adapter list for use as the matrix" + id: supported-adapters + run: | + # Convert to JSON array and output + supported_adapters=$(echo "${{ steps.list-adapters.outputs.test_adapters }}" | jq -Rc 'split(",")') + echo $supported_adapters + echo "adapters=$supported_adapters" >> $GITHUB_OUTPUT + + - name: "[ANNOTATION] ${{ github.event.repository.name }} - Testing ${{ steps.supported-adapters.outputs.adapters }}" + run: | + title="${{ github.event.repository.name }} - adapters to test" + message="The workflow will run tests for the following adapters: ${{ steps.supported-adapters.outputs.adapters }}" + echo "::notice $title::$message" + + run-tests: + runs-on: ubuntu-latest + needs: [determine-supported-adapters] + services: + postgres: + image: postgres + env: + POSTGRES_USER: ${{ vars.POSTGRES_USER }} + POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASS }} + POSTGRES_DB: ${{ vars.POSTGRES_DATABASE }} + POSTGRES_HOST: ${{ vars.POSTGRES_HOST }} + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + strategy: + fail-fast: false + matrix: + adapter: ${{fromJson(needs.determine-supported-adapters.outputs.adapters)}} + + steps: + - name: "Checkout ${{ github.event.repository }} " + uses: actions/checkout@v4 + + - name: "Set up Python ${{ env.PYTHON_VERSION }}" + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: "Install ${{ matrix.adapter }}" + run: | + python -m pip install --upgrade pip + pip install dbt-${{ matrix.adapter }} + + - name: "Install tox" + run: | + python -m pip install --upgrade pip + pip install tox + + - name: "Run integration tests with tox on ${{ matrix.adapter }}" + run: | + tox -e dbt_integration_${{ matrix.adapter }} + env: + POSTGRES_HOST: ${{ vars.POSTGRES_HOST }} + POSTGRES_USER: ${{ vars.POSTGRES_USER }} + DBT_ENV_SECRET_POSTGRES_PASS: ${{ secrets.POSTGRES_PASS }} + POSTGRES_PORT: 5432 + POSTGRES_DATABASE: ${{ vars.POSTGRES_DATABASE }} + POSTGRES_SCHEMA: "dbt_utils_integration_tests_postgres_${{ github.run_number }}" + SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }} + SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }} + DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.SNOWFLAKE_PASS }} + SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }} + SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }} + SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }} + SNOWFLAKE_SCHEMA: "dbt_utils_integration_tests_snowflake_${{ github.run_number }}" + REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }} + REDSHIFT_USER: ${{ vars.REDSHIFT_USER }} + DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.REDSHIFT_PASS }} + REDSHIFT_DATABASE: ${{ vars.REDSHIFT_DATABASE }} + REDSHIFT_SCHEMA: "dbt_utils_integration_tests_redshift_${{ github.run_number }}" + REDSHIFT_PORT: 5439 + BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }} + BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }} + BIGQUERY_SCHEMA: "dbt_utils_integration_tests_bigquery_${{ github.run_number }}" diff --git a/.gitignore b/.gitignore index d155937f..69c484a8 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,17 @@ dbt_modules/ dbt_packages/ logs/ venv/ -env/ +__pycache__ +.tox/ +/.pytest_cache/ + + +# Ignore all directories that start with 'env-' and can have any name after +env*/ + +# Do not ignore .env files in any directory and do not ignore .env directories +!.env +!*/.env/ + +# But explicitly ignore test.env files test.env -__pycache__ \ No newline at end of file diff --git a/Makefile b/Makefile index ead2355b..7481cf6d 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,8 @@ .PHONY: test test: ## Run the integration tests. - @./run_test.sh $(target) + @\ + tox -e dbt_integration_$(target) .PHONY: dev dev: ## Installs dbt-* packages in develop mode along with development dependencies. diff --git a/dev-requirements.txt b/dev-requirements.txt index 6ddd9858..4fc278a6 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -7,3 +7,4 @@ dbt-redshift@git+https://github.com/dbt-labs/dbt-redshift.git dbt-snowflake@git+https://github.com/dbt-labs/dbt-snowflake.git dbt-bigquery@git+https://github.com/dbt-labs/dbt-bigquery.git pytest-xdist +tox>=3.13 diff --git a/integration_tests/.env/bigquery.env b/integration_tests/.env/bigquery.env index 9d8c297d..e94ebe11 100644 --- a/integration_tests/.env/bigquery.env +++ b/integration_tests/.env/bigquery.env @@ -1,2 +1,3 @@ -BIGQUERY_SERVICE_KEY_PATH= -BIGQUERY_TEST_DATABASE= \ No newline at end of file +BIGQUERY_KEYFILE_JSON= +BIGQUERY_PROJECT= +BIGQUERY_SCHEMA=dbt_utils_integration_tests_bigquery diff --git a/integration_tests/.env/postgres.env b/integration_tests/.env/postgres.env index c3f7dd88..a4af5473 100644 --- a/integration_tests/.env/postgres.env +++ b/integration_tests/.env/postgres.env @@ -1,5 +1,6 @@ -POSTGRES_TEST_HOST=localhost -POSTGRES_TEST_USER=root -POSTGRES_TEST_PASS='' -POSTGRES_TEST_PORT=5432 -POSTGRES_TEST_DBNAME=circle_test +POSTGRES_HOST=localhost +POSTGRES_USER=root +DBT_ENV_SECRET_POSTGRES_PASS=password +POSTGRES_PORT=5432 +POSTGRES_DATABASE=dbt_utils_test +POSTGRES_SCHEMA=dbt_utils_integration_tests_postgres diff --git a/integration_tests/.env/redshift.env b/integration_tests/.env/redshift.env index 77378d52..d242f59b 100644 --- a/integration_tests/.env/redshift.env +++ b/integration_tests/.env/redshift.env @@ -1,5 +1,6 @@ -REDSHIFT_TEST_HOST= -REDSHIFT_TEST_USER= -REDSHIFT_TEST_PASS= -REDSHIFT_TEST_DBNAME= -REDSHIFT_TEST_PORT= \ No newline at end of file +REDSHIFT_HOST= +REDSHIFT_USER= +DBT_ENV_SECRET_REDSHIFT_PASS= +REDSHIFT_DATABASE= +REDSHIFT_PORT= +REDSHIFT_SCHEMA=dbt_utils_integration_tests_redshift diff --git a/integration_tests/.env/snowflake.env b/integration_tests/.env/snowflake.env index 134cc8d1..d438bc25 100644 --- a/integration_tests/.env/snowflake.env +++ b/integration_tests/.env/snowflake.env @@ -1,6 +1,7 @@ -SNOWFLAKE_TEST_ACCOUNT= -SNOWFLAKE_TEST_USER= -SNOWFLAKE_TEST_PASSWORD= -SNOWFLAKE_TEST_ROLE= -SNOWFLAKE_TEST_DATABASE= -SNOWFLAKE_TEST_WAREHOUSE= \ No newline at end of file +SNOWFLAKE_ACCOUNT= +SNOWFLAKE_USER= +DBT_ENV_SECRET_SNOWFLAKE_PASS= +SNOWFLAKE_ROLE= +SNOWFLAKE_DATABASE= +SNOWFLAKE_WAREHOUSE= +SNOWFLAKE_SCHEMA=dbt_utils_integration_tests_snowflake diff --git a/integration_tests/README.md b/integration_tests/README.md index 7f74ec35..db685a57 100644 --- a/integration_tests/README.md +++ b/integration_tests/README.md @@ -13,7 +13,7 @@ - Docker ### Configure credentials -Edit the env file for your TARGET in `integration_tests/.env/[TARGET].env`. +Edit the env file for your TARGET in `integration_tests/.env/[TARGET].env`. These will be used for your profiles.yml. Load the environment variables: ```shell @@ -91,16 +91,6 @@ Where possible, targets are being run in docker containers (this works for Postg ### Creating a new integration test -#### Set up profiles -Do either one of the following: -1. Use `DBT_PROFILES_DIR` - ```shell - cp integration_tests/ci/sample.profiles.yml integration_tests/profiles.yml - export DBT_PROFILES_DIR=$(cd integration_tests && pwd) - ``` -2. Use `~/.dbt/profiles.yml` - - Copy contents from `integration_tests/ci/sample.profiles.yml` into `~/.dbt/profiles.yml`. - #### Add your integration test This directory contains an example dbt project which tests the macros in the `dbt-utils` package. An integration test typically involves making 1) a new seed file 2) a new model file 3) a generic test to assert anticipated behaviour. diff --git a/integration_tests/ci/sample.profiles.yml b/integration_tests/ci/sample.profiles.yml deleted file mode 100644 index 71b63983..00000000 --- a/integration_tests/ci/sample.profiles.yml +++ /dev/null @@ -1,45 +0,0 @@ - -# HEY! This file is used in the dbt-utils integrations tests with CircleCI. -# You should __NEVER__ check credentials into version control. Thanks for reading :) - -integration_tests: - target: postgres - outputs: - postgres: - type: postgres - host: "{{ env_var('POSTGRES_TEST_HOST') }}" - user: "{{ env_var('POSTGRES_TEST_USER') }}" - pass: "{{ env_var('POSTGRES_TEST_PASS') }}" - port: "{{ env_var('POSTGRES_TEST_PORT') | as_number }}" - dbname: "{{ env_var('POSTGRES_TEST_DBNAME') }}" - schema: dbt_utils_integration_tests_postgres - threads: 5 - - redshift: - type: redshift - host: "{{ env_var('REDSHIFT_TEST_HOST') }}" - user: "{{ env_var('REDSHIFT_TEST_USER') }}" - pass: "{{ env_var('REDSHIFT_TEST_PASS') }}" - dbname: "{{ env_var('REDSHIFT_TEST_DBNAME') }}" - port: "{{ env_var('REDSHIFT_TEST_PORT') | as_number }}" - schema: dbt_utils_integration_tests_redshift - threads: 5 - - bigquery: - type: bigquery - method: service-account - keyfile: "{{ env_var('BIGQUERY_SERVICE_KEY_PATH') }}" - project: "{{ env_var('BIGQUERY_TEST_DATABASE') }}" - schema: dbt_utils_integration_tests_bigquery - threads: 10 - - snowflake: - type: snowflake - account: "{{ env_var('SNOWFLAKE_TEST_ACCOUNT') }}" - user: "{{ env_var('SNOWFLAKE_TEST_USER') }}" - password: "{{ env_var('SNOWFLAKE_TEST_PASSWORD') }}" - role: "{{ env_var('SNOWFLAKE_TEST_ROLE') }}" - database: "{{ env_var('SNOWFLAKE_TEST_DATABASE') }}" - warehouse: "{{ env_var('SNOWFLAKE_TEST_WAREHOUSE') }}" - schema: dbt_utils_integration_tests_snowflake - threads: 10 diff --git a/integration_tests/profiles.yml b/integration_tests/profiles.yml new file mode 100644 index 00000000..affc9cee --- /dev/null +++ b/integration_tests/profiles.yml @@ -0,0 +1,48 @@ + +# HEY! This file is used in the dbt-utils integrations tests with CircleCI. +# You should __NEVER__ check credentials into version control. Thanks for reading :) + + +integration_tests: + target: postgres + outputs: + postgres: + type: "postgres" + host: "{{ env_var('POSTGRES_HOST') }}" + user: "{{ env_var('POSTGRES_USER') }}" + pass: "{{ env_var('DBT_ENV_SECRET_POSTGRES_PASS') }}" + port: "{{ env_var('POSTGRES_PORT') | as_number }}" + dbname: "{{ env_var('POSTGRES_DATABASE') }}" + schema: "{{ env_var('POSTGRES_SCHEMA') }}" + threads: 5 + + redshift: + type: "redshift" + host: "{{ env_var('REDSHIFT_HOST') }}" + user: "{{ env_var('REDSHIFT_USER') }}" + pass: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_PASS') }}" + dbname: "{{ env_var('REDSHIFT_DATABASE') }}" + port: "{{ env_var('REDSHIFT_PORT') | as_number }}" + schema: "{{ env_var('REDSHIFT_SCHEMA') }}" + threads: 5 + + bigquery: + type: "bigquery" + method: "service-account-json" + project: "{{ env_var('BIGQUERY_PROJECT') }}" + dataset: "{{ env_var('BIGQUERY_SCHEMA') }}" + threads: 10 + keyfile_json: + "{{ env_var('BIGQUERY_KEYFILE_JSON') | as_native }}" + job_retries: 3 + + snowflake: + type: "snowflake" + account: "{{ env_var('SNOWFLAKE_ACCOUNT') }}" + user: "{{ env_var('SNOWFLAKE_USER') }}" + password: "{{ env_var('DBT_ENV_SECRET_SNOWFLAKE_PASS') }}" + role: "{{ env_var('SNOWFLAKE_ROLE') }}" + database: "{{ env_var('SNOWFLAKE_DATABASE') }}" + warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE') }}" + schema: "{{ env_var('SNOWFLAKE_SCHEMA') }}" + threads: 10 diff --git a/run_test.sh b/run_test.sh index b6251b6d..95ef9a30 100755 --- a/run_test.sh +++ b/run_test.sh @@ -8,7 +8,6 @@ dbt --version # Set the profile cd integration_tests -cp ci/sample.profiles.yml profiles.yml export DBT_PROFILES_DIR=. # Show the location of the profiles directory and test the connection diff --git a/supported_adapters.env b/supported_adapters.env new file mode 100755 index 00000000..3acc8a8b --- /dev/null +++ b/supported_adapters.env @@ -0,0 +1 @@ +SUPPORTED_ADAPTERS=postgres,snowflake,redshift,bigquery diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index e7d9d198..00000000 --- a/tests/conftest.py +++ /dev/null @@ -1,98 +0,0 @@ -import pytest -import os - -pytest_plugins = ["dbt.tests.fixtures.project"] - - -def pytest_addoption(parser): - parser.addoption("--profile", action="store", default="postgres", type=str) - - -# Using @pytest.mark.skip_profile('postgres') uses the 'skip_by_profile_type' -# autouse fixture below -def pytest_configure(config): - config.addinivalue_line( - "markers", - "skip_profile(profile): skip test for the given profile", - ) - config.addinivalue_line( - "markers", - "only_profile(profile): only test the given profile", - ) - - -@pytest.fixture(scope="session") -def dbt_profile_target(request): - profile_type = request.config.getoption("--profile") - if profile_type == "postgres": - target = postgres_target() - elif profile_type == "redshift": - target = redshift_target() - elif profile_type == "snowflake": - target = snowflake_target() - elif profile_type == "bigquery": - target = bigquery_target() - else: - raise ValueError(f"Invalid profile type '{profile_type}'") - return target - - -def postgres_target(): - return { - "type": "postgres", - "host": os.getenv('POSTGRES_TEST_HOST'), - "user": os.getenv('POSTGRES_TEST_USER'), - "pass": os.getenv('POSTGRES_TEST_PASS'), - "port": int(os.getenv('POSTGRES_TEST_PORT')), - "dbname": os.getenv('POSTGRES_TEST_DBNAME'), - } - - -def redshift_target(): - return { - "type": "redshift", - "host": os.getenv('REDSHIFT_TEST_HOST'), - "user": os.getenv('REDSHIFT_TEST_USER'), - "pass": os.getenv('REDSHIFT_TEST_PASS'), - "port": int(os.getenv('REDSHIFT_TEST_PORT')), - "dbname": os.getenv('REDSHIFT_TEST_DBNAME'), - } - - -def bigquery_target(): - return { - "type": "bigquery", - "method": "service-account", - "keyfile": os.getenv('BIGQUERY_SERVICE_KEY_PATH'), - "project": os.getenv('BIGQUERY_TEST_DATABASE'), - } - - -def snowflake_target(): - return { - "type": "snowflake", - "account": os.getenv('SNOWFLAKE_TEST_ACCOUNT'), - "user": os.getenv('SNOWFLAKE_TEST_USER'), - "password": os.getenv('SNOWFLAKE_TEST_PASSWORD'), - "role": os.getenv('SNOWFLAKE_TEST_ROLE'), - "database": os.getenv('SNOWFLAKE_TEST_DATABASE'), - "warehouse": os.getenv('SNOWFLAKE_TEST_WAREHOUSE'), - } - - -@pytest.fixture(autouse=True) -def skip_by_profile_type(request): - profile_type = request.config.getoption("--profile") - if request.node.get_closest_marker("skip_profile"): - for skip_profile_type in request.node.get_closest_marker("skip_profile").args: - if skip_profile_type == profile_type: - pytest.skip("skipped on '{profile_type}' profile") - - -@pytest.fixture(autouse=True) -def only_profile_type(request): - profile_type = request.config.getoption("--profile") - if request.node.get_closest_marker("only_profile"): - for only_profile_type in request.node.get_closest_marker("only_profile").args: - if only_profile_type != profile_type: - pytest.skip("skipped on '{profile_type}' profile") diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..7dfa61e0 --- /dev/null +++ b/tox.ini @@ -0,0 +1,85 @@ +[tox] +skipsdist = True +envlist = lint_all, testenv + +[testenv] +passenv = + # postgres env vars + POSTGRES_HOST + POSTGRES_USER + DBT_ENV_SECRET_POSTGRES_PASS + POSTGRES_PORT + POSTGRES_DATABASE + POSTGRES_SCHEMA + # snowflake env vars + SNOWFLAKE_ACCOUNT + SNOWFLAKE_USER + DBT_ENV_SECRET_SNOWFLAKE_PASS + SNOWFLAKE_ROLE + SNOWFLAKE_DATABASE + SNOWFLAKE_WAREHOUSE + SNOWFLAKE_SCHEMA + # redshift + REDSHIFT_HOST + REDSHIFT_USER + DBT_ENV_SECRET_REDSHIFT_PASS + REDSHIFT_DATABASE + REDSHIFT_SCHEMA + REDSHIFT_PORT + # bigquery + BIGQUERY_PROJECT + BIGQUERY_KEYFILE_JSON + BIGQUERY_SCHEMA + +# Snowflake integration tests for centralized dbt testing +# run dbt commands directly, assumes dbt is already installed in environment +[testenv:dbt_integration_snowflake] +changedir = integration_tests +allowlist_externals = + dbt +skip_install = true +commands = + dbt --version + dbt debug --target snowflake + dbt deps --target snowflake + dbt build --target snowflake --full-refresh + + +# Postgres integration tests for centralized dbt testing +# run dbt commands directly, assumes dbt is already installed in environment +[testenv:dbt_integration_postgres] +changedir = integration_tests +allowlist_externals = + dbt +skip_install = true +commands = + dbt --version + dbt debug --target postgres + dbt deps --target postgres + dbt build --target postgres --full-refresh + +# BigQuery integration tests for centralized dbt testing +# run dbt commands directly, assumes dbt is already installed in environment +[testenv:dbt_integration_bigquery] +changedir = integration_tests +allowlist_externals = + dbt +skip_install = true +commands = + dbt --version + dbt debug --target bigquery + dbt deps --target bigquery + dbt build --target bigquery --full-refresh + +# redshift integration tests for centralized dbt testing +# run dbt commands directly, assumes dbt is already installed in environment +[testenv:dbt_integration_redshift] +changedir = integration_tests +allowlist_externals = + dbt +skip_install = true +commands = + dbt --version + dbt debug --target redshift + dbt deps --target redshift + dbt build --target redshift --full-refresh