Skip to content

Commit

Permalink
fix workflow after some debugging
Browse files Browse the repository at this point in the history
  • Loading branch information
emmyoop committed Jun 27, 2024
1 parent e2dad0d commit 4fe5f8c
Showing 1 changed file with 114 additions and 137 deletions.
251 changes: 114 additions & 137 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,150 +1,127 @@
# **what?**
# Run tests for dbt-utils against supported adapters

# **why?**
# To ensure that dbt-utils works as expected with all supported adapters

# **when?**
# On every PR, and every push to main and when manually triggered

name: Integration Tests
name: Package Integration Tests

on:
push:
branches:
- main
pull_request_target:
workflow_dispatch:
inputs:
adapter:
description: The adapter to test against. Defaults to all supported adapters.
type: string
required: false
adapter-version:
description: The dbt version to test against. Also accepts a dbt-core ref when main selected.
type: choice
required: true
options:
push:
branches:
- main
- 1.8.0-latest
- 1.7.0-latest
- 1.6.0-latest
adapter-ref:
description: The adapter ref to test against. Only used when main selected for adapter version.
type: string
required: false
pull_request_target:
workflow_dispatch:
inputs:
adapter:
description: The adapter to test against. Defaults to all supported adapters when blank.
type: string
required: false

env:
PYTHON_VERSION: "3.11"
PYTHON_VERSION: "3.11"

jobs:
determine-refs:
runs-on: ubuntu-latest
outputs:
adapter-ref: ${{ steps.adapter.outputs.adapter-ref }}
use-pip-adapter: ${{ steps.adapter.outputs.use-pip-adapter }}
steps:

- name: "Set adapter version / Commit"
id: adapter
run: |
if [ ${{ inputs.adapter-version }} == "main" ]; then
echo "use-pip-adapter=false" >> $GITHUB_OUTPUT
if [ ${{ inputs.adapter-ref }} == "" ]; then
echo "dbt-adapter-ref=main" >> $GITHUB_OUTPUT
else
echo "dbt-adapter-ref=${{ inputs.adapter-ref }}" >> $GITHUB_OUTPUT
fi
echo "use-pip-adapter=false" >> $GITHUB_OUTPUT
else
echo "use-pip-adapter=true" >> $GITHUB_OUTPUT
fi
determine-supported-adapters:
runs-on: ubuntu-latest
outputs:
adapters: ${{ steps.supported-adapters.outputs.adapters }}
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@v4

- name: "Set up Python ${{ env.PYTHON_VERSION }}"
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}

- name: "Install tox"
run: |
python -m pip install --upgrade pip
pip install tox
- name: "Get list of supported adapters or use input adapter only"
id: list-adapters
run: |
[ if ${{ inputs.adapter }} == "" ]; then
tox_adapters=$(tox -e list_supported_adapters | awk -F'> ' '{print $2}' | sed 's/echo //')
echo $tox_adapters
echo "tox_adapters=$tox_adapters" >> $GITHUB_OUTPUT
else
echo "tox_adapters=${{ inputs.adapter }}" >> $GITHUB_OUTPUT
fi
- name: "Format for use as the matrix"
id: supported-adapters
run: |
# Convert to JSON array and output
supported_adapters=$(echo ${{ steps.list-adapters.outputs.tox_adapters }} | jq -R 'split(",")')
echo "adapters=$supported_adapters" >> $GITHUB_OUTPUT
run-tests:
runs-on: ubuntu-latest
strategy:
matrix:
adapter: ${{fromJson(needs.determine-supported-adapters.outputs.adapters)}}

steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@v4

- name: "Set up Python ${{ env.PYTHON_VERSION }}"
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}

- name: "Install ${{ matrix.adapter }}"
run: |
python -m pip install --upgrade pip
pip install dbt-${{ matrix.adapter}}
- name: "Install tox"
run: |
python -m pip install --upgrade pip
pip install tox
- name: "Run integration tests with tox on ${{ matrix.adapter }}"
run: |
tox -e dbt_integration_${{ matrix.adapter }}
env:
# TODO: set all the env vars!
POSTGRES_HOST: "postgress_db"
POSTGRES_USER: "postgres"
DBT_ENV_SECRET_POSTGRES_PASS: ${{ secrets.POSTGRES_PASS }}
POSTGRES_PORT: 5432
POSTGRES_DATABASE: "postgres"
POSTGRES_SCHEMA: "dbt_utils_integration_tests_postgres"
# snowflake env vars
SNOWFLAKE_ACCOUNT: ${{ vars.SNOWFLAKE_ACCOUNT }}
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }}
DBT_ENV_SECRET_SNOWFLAKE_PASS:
SNOWFLAKE_ROLE: "TESTER"
SNOWFLAKE_DATABASE: "dbt_utils_testing"
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }}
SNOWFLAKE_SCHEMA: "dbt_utils_integration_tests_snowflake"
# redshift
REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }}
REDSHIFT_USER: ${{ vars.REDSHIFT_USER }}
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.REDSHIFT_PASSWORD }}
REDSHIFT_DATABASE: "dbt_utils_testing"
REDSHIFT_SCHEMA: "dbt_utils_integration_tests_redshift"
REDSHIFT_PORT:
# bigquery
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
DBT_ENV_SECRET_BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_SERVICE_ACCOUNT_JSON }}
BIGQUERY_SCHEMA: "dbt_utils_integration_tests_bigquery"
determine-supported-adapters:
runs-on: ubuntu-latest
outputs:
adapters: ${{ steps.supported-adapters.outputs.adapters }}
steps:
- name: "Checkout ${{ github.event.repository }}"
uses: actions/checkout@v4

- name: "Set up Python ${{ env.PYTHON_VERSION }}"
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}

- name: "Install tox"
run: |
python -m pip install --upgrade pip
pip install tox
- name: "Get list of supported adapters or use input adapter only"
id: list-adapters
run: |
if [ -z "${{ inputs.adapter }}" ]; then
# github adds a pip freeze and a new line we need to strip out
tox_adapters=$(tox -e list_supported_adapters | grep -v "python -m pip freeze --all" | awk -F'> ' '{print $2}' | sed 's/echo //' | tr -d '\n')
echo $tox_adapters
echo "test_adapters=$tox_adapters" >> $GITHUB_OUTPUT
else
echo "test_adapters=${{ inputs.adapter }}" >> $GITHUB_OUTPUT
fi
- name: "Format adapter list for use as the matrix"
id: supported-adapters
run: |
# Convert to JSON array and output
supported_adapters=$(echo "${{ steps.list-adapters.outputs.test_adapters }}" | jq -Rc 'split(",")')
echo $supported_adapters
echo "adapters=$supported_adapters" >> $GITHUB_OUTPUT
- name: "[ANNOTATION] ${{ github.event.repository.name }} - adapters to test"
run: |
title="${{ github.event.repository.name }} - adapters to test"
message="The workflow will run tests for the following adapters: ${{ steps.supported-adapters.outputs.adapters }}"
echo "::notice $title::$message"
run-tests:
runs-on: ubuntu-latest
needs: [determine-supported-adapters]
strategy:
fail-fast: false
matrix:
adapter: ${{fromJson(needs.determine-supported-adapters.outputs.adapters)}}

steps:
- name: "Checkout ${{ github.event.repository }} "
uses: actions/checkout@v4

- name: "Set up Python ${{ env.PYTHON_VERSION }}"
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}

- name: "Install ${{ matrix.adapter }}"
run: |
python -m pip install --upgrade pip
pip install dbt-${{ matrix.adapter }}
- name: "Install tox"
run: |
python -m pip install --upgrade pip
pip install tox
- name: "Run integration tests with tox on ${{ matrix.adapter }}"
run: |
tox -e dbt_integration_${{ matrix.adapter }}
env:
POSTGRES_HOST: ${{ vars.POSTGRES_HOST }}
POSTGRES_USER: ${{ vars.POSTGRES_USER }}
DBT_ENV_SECRET_POSTGRES_PASS: ${{ secrets.POSTGRES_PASS }}
POSTGRES_PORT: 5432
POSTGRES_DATABASE: ${{ vars.POSTGRES_DATABASE }}
POSTGRES_SCHEMA: "dbt_utils_integration_tests_postgres"
# snowflake env vars
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }}
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }}
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.SNOWFLAKE_PASS }}
SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }}
SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }}
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }}
SNOWFLAKE_SCHEMA: "dbt_utils_integration_tests_snowflake"
# redshift
REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }}
REDSHIFT_USER: ${{ vars.REDSHIFT_USER }}
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.REDSHIFT_PASS }}
REDSHIFT_DATABASE: ${{ vars.REDSHIFT_DATABASE }}
REDSHIFT_SCHEMA: "dbt_utils_integration_tests_redshift"
REDSHIFT_PORT: 5439
# bigquery
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
DBT_ENV_SECRET_BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_SERVICE_ACCOUNT_JSON }}
BIGQUERY_SCHEMA: "dbt_utils_integration_tests_bigquery"

0 comments on commit 4fe5f8c

Please sign in to comment.