This repository has been archived by the owner on Apr 8, 2024. It is now read-only.
DRAFT: Feature/support dbt core1.8 #824
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CLI integration tests | |
on: | |
pull_request: | |
types: [assigned, opened, synchronize, reopened] | |
paths: | |
- "projects/adapter/**" | |
- ".github/actions/setup-local-fal/**" | |
- ".github/workflows/test_integration_cli.yml" | |
push: | |
branches: [main] | |
paths: | |
- "projects/adapter/**" | |
schedule: | |
# every monday | |
- cron: "0 0 * * 1" | |
workflow_dispatch: | |
inputs: | |
adapter: | |
description: dbt Adapter to test with | |
required: false | |
default: "<ALL>" | |
type: choice | |
options: | |
- "<ALL>" | |
- postgres | |
- bigquery | |
- snowflake | |
- duckdb | |
- fal | |
python: | |
description: Python version to test with | |
required: false | |
default: "3.8" | |
type: choice | |
options: | |
- "<ALL>" | |
- "3.8" | |
- "3.9" | |
- "3.10" | |
- "3.11" | |
dbt: | |
description: dbt version to test with | |
required: false | |
default: "latest" | |
type: choice | |
options: | |
- "<ALL>" | |
- "latest" | |
- "1.7.*" | |
jobs: | |
matrix-adapter: | |
runs-on: ubuntu-latest | |
outputs: | |
list: ${{ steps.matrix-step.outputs.list }} | |
steps: | |
- id: matrix-step | |
shell: python | |
run: | | |
OPTIONS = [ | |
'postgres', | |
'bigquery', | |
'snowflake', | |
'fal' | |
] | |
EXTRA_OPTIONS = [ | |
'duckdb', | |
] | |
OUTPUT = OPTIONS | |
if '${{ github.event_name }}' == 'pull_request': | |
import re | |
PR_TITLE = '${{ github.event.pull_request.title }}'.lower() | |
PR_BRANCH = '${{ github.head_ref }}'.lower() | |
PR_DESCRIPTION = '''${{ github.event.pull_request.body }}'''.lower() | |
PR_DESCRIPTION = re.sub("<!--.*?-->", "", PR_DESCRIPTION, flags=re.DOTALL) | |
# Only test adapters mentioned in the pull request title or branch. | |
# We always test postgres and fal adapter as a sanity check. | |
OUTPUT = [ | |
a for a in OPTIONS + EXTRA_OPTIONS | |
if a == 'postgres' or a == 'fal' or | |
a in PR_TITLE or | |
a in PR_BRANCH or | |
a in PR_DESCRIPTION | |
] | |
elif '${{ github.event_name }}' == 'push': | |
OUTPUT = ['postgres'] | |
elif '${{ github.event_name }}' == 'workflow_dispatch': | |
INPUT_CHOICE = '${{ github.event.inputs.adapter }}' | |
if INPUT_CHOICE == '<ALL>': | |
OUTPUT = OPTIONS + EXTRA_OPTIONS | |
else: | |
OUTPUT = [INPUT_CHOICE] | |
import json | |
print("::set-output name=list::" + json.dumps(OUTPUT)) | |
matrix-python: | |
runs-on: ubuntu-latest | |
outputs: | |
list: ${{ steps.matrix-step.outputs.list }} | |
steps: | |
- id: matrix-step | |
shell: python | |
run: | | |
OPTIONS = [ | |
"3.8", | |
"3.9", | |
"3.10", | |
"3.11", | |
] | |
OUTPUT = ["3.8"] | |
if '${{ github.event_name }}' == 'pull_request': | |
import re | |
PR_TITLE = '${{ github.event.pull_request.title }}'.lower() | |
PR_BRANCH = '${{ github.head_ref }}'.lower() | |
PR_DESCRIPTION = '''${{ github.event.pull_request.body }}'''.lower() | |
PR_DESCRIPTION = re.sub("<!--.*?-->", "", PR_DESCRIPTION, flags=re.DOTALL) | |
# Test version mentioned in the pull request title or branch. | |
OUTPUT = [ | |
v for v in OPTIONS | |
if v in PR_TITLE or | |
v in PR_BRANCH or | |
v in PR_DESCRIPTION | |
] | |
if not OUTPUT: | |
# If none were found in PR info | |
OUTPUT=["3.8"] | |
elif '${{ github.event_name }}' in ('schedule', 'push'): | |
OUTPUT=OPTIONS | |
elif '${{ github.event_name }}' == 'workflow_dispatch': | |
INPUT_CHOICE = '${{ github.event.inputs.python }}' | |
if INPUT_CHOICE == '<ALL>': | |
OUTPUT = OPTIONS | |
else: | |
OUTPUT = [INPUT_CHOICE] | |
import json | |
print("::set-output name=list::" + json.dumps(OUTPUT)) | |
matrix-dbt: | |
runs-on: ubuntu-latest | |
outputs: | |
list: ${{ steps.matrix-step.outputs.list }} | |
steps: | |
- id: matrix-step | |
shell: python | |
run: | | |
OPTIONS = [ | |
"1.7.*", | |
] | |
OUTPUT = OPTIONS | |
if '${{ github.event_name }}' == 'workflow_dispatch': | |
INPUT_CHOICE = '${{ github.event.inputs.dbt }}' | |
if INPUT_CHOICE == '<ALL>': | |
OUTPUT = OPTIONS + EXTRA_OPTIONS | |
else: | |
OUTPUT = [INPUT_CHOICE] | |
import json | |
print("::set-output name=list::" + json.dumps(OUTPUT)) | |
run: | |
needs: | |
- matrix-adapter | |
- matrix-dbt | |
- matrix-python | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
profile: ${{ fromJSON(needs.matrix-adapter.outputs.list) }} | |
dbt: ${{ fromJSON(needs.matrix-dbt.outputs.list) }} | |
python: ${{ fromJSON(needs.matrix-python.outputs.list) }} | |
# Run only the latest commit pushed to PR | |
concurrency: | |
group: "${{ github.head_ref || github.run_id }}-${{ github.workflow }}-${{ matrix.profile }}-${{ matrix.dbt }}-${{ matrix.python }}" | |
cancel-in-progress: true | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Setup local fal | |
uses: ./.github/actions/setup-local-fal | |
with: | |
python: ${{ matrix.python }} | |
dbt: ${{ matrix.dbt }} | |
adapter: ${{ matrix.profile }} | |
- name: Start Docker database | |
working-directory: projects/adapter/cli_tests | |
if: contains(fromJSON('["postgres", "fal"]'), matrix.profile) | |
run: docker-compose up -d | |
- name: Install conda | |
uses: s-weigand/setup-conda@v1 | |
with: | |
activate-conda: false | |
python-version: ${{ matrix.python }} | |
# PyJokes is available on conda-forge | |
conda-channels: anaconda, conda-forge | |
- name: Setup behave | |
working-directory: projects/adapter/cli_tests | |
run: pip install behave ipython | |
- name: Run tests | |
id: test_run | |
working-directory: projects/adapter/cli_tests | |
env: | |
FAL_STATS_ENABLED: false | |
# BigQuery | |
KEYFILE: ${{ secrets.GCP_SA_KEY }} | |
GCLOUD_PROJECT: ${{ secrets.GCP_PROJECT_ID }} | |
BQ_DATASET: ${{ secrets.BQ_DATASET }} | |
# Snowflake | |
SF_ACCOUNT: ${{ secrets.SF_ACCOUNT }} | |
SF_USER: ${{ secrets.SF_USER }} | |
SF_PASSWORD: ${{ secrets.SF_PASSWORD }} | |
SF_ROLE: ${{ secrets.SF_ROLE }} | |
SF_DATABASE: ${{ secrets.SF_DATABASE }} | |
SF_WAREHOUSE: ${{ secrets.SF_WAREHOUSE }} | |
SF_SCHEMA: ${{ secrets.SF_SCHEMA }} | |
# Duckdb | |
DB_PATH: ${{ github.workspace }}/duck.db | |
run: | | |
# Database and schema setup for sources | |
if [[ '${{ matrix.profile }}' == "bigquery" ]] | |
then | |
export DBT_DATABASE="$GCLOUD_PROJECT" DBT_SCHEMA="$BQ_DATASET" | |
fi | |
if [[ '${{ matrix.profile }}' == "snowflake" ]] | |
then | |
export DBT_DATABASE="$SF_DATABASE" DBT_SCHEMA="$SF_SCHEMA" | |
fi | |
if [[ '${{ matrix.profile }}' == "duckdb" ]] | |
then | |
# TODO: which to use for sources? Example: | |
# database: "{{ env_var('DBT_DATABASE', 'test') }}" | |
# schema: "{{ env_var('DBT_SCHEMA', 'dbt_fal') }}" | |
export DBT_DATABASE="" DBT_SCHEMA="" | |
fi | |
if [[ '${{ matrix.profile }}' == "bigquery" ]] | |
then | |
echo $KEYFILE > $HOME/keyfile.json | |
ls -la $HOME/keyfile.json | |
export KEYFILE_DIR=$HOME | |
echo 'keyfile is ready' | |
fi | |
# Could not get the real job_id easily from context | |
UUID=$(uuidgen | head -c8) | |
export DB_NAMESPACE="${{ github.run_id }}_${UUID}" | |
BEHAVE_TAGS="--tags=-TODO-${{ matrix.profile }}" | |
if [[ '${{ matrix.profile }}' != 'postgres' ]] && [[ '${{ matrix.profile }}' != 'fal' ]] | |
then | |
# 'broken_profile' tests only works for postgres and postgres+fal right now | |
BEHAVE_TAGS="$BEHAVE_TAGS --tags=-broken_profile" | |
fi | |
behave $BEHAVE_TAGS -fplain -D profile=${{ matrix.profile }} | |
- name: Send custom JSON data to Slack workflow | |
if: false | |
# if: (failure() || cancelled()) && github.event_name == 'schedule' | |
id: slack | |
uses: slackapi/slack-github-action@v1.18.0 | |
with: | |
# For posting a rich message using Block Kit | |
payload: | | |
{ | |
"text": "Integration tests failed for dbt-${{ matrix.profile }}@${{ matrix.dbt }} (Python ${{ matrix.python }})\nhttps://github.com/fal-ai/fal/actions/runs/${{ github.run_id }}" | |
} | |
env: | |
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} | |
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK |