forked from great-expectations/great_expectations
-
Notifications
You must be signed in to change notification settings - Fork 0
/
azure-pipelines-docs-integration.yml
107 lines (98 loc) · 3.33 KB
/
azure-pipelines-docs-integration.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
trigger:
branches:
include:
- pre_pr_docs-*
resources:
containers:
- container: postgres
image: postgres:11
ports:
- 5432:5432
env:
POSTGRES_DB: "test_ci"
POSTGRES_HOST_AUTH_METHOD: "trust"
- container: mysql
image: mysql:8.0.20
ports:
- 3306:3306
env:
MYSQL_ALLOW_EMPTY_PASSWORD: "yes"
MYSQL_DATABASE: test_ci
- container: mssql
image: mcr.microsoft.com/mssql/server:2019-latest
env:
ACCEPT_EULA: Y
MSSQL_SA_PASSWORD: ReallyStrongPwd1234%^&*
MSSQL_DB: test_ci
MSSQL_PID: Developer
ports:
- 1433:1433
variables:
isMain: $[eq(variables['Build.SourceBranch'], 'refs/heads/main')]
isDevelop: $[eq(variables['Build.SourceBranch'], 'refs/heads/develop')]
GE_USAGE_STATISTICS_URL: "https://qa.stats.greatexpectations.io/great_expectations/v1/usage_statistics"
stages:
- stage: scope_check
pool:
vmImage: 'ubuntu-20.04'
jobs:
- job: changes
steps:
- task: ChangedFiles@1
name: CheckChanges
inputs:
verbose: true
rules: |
[DocsChanged]
docs/**
tests/integration/docusaurus/**
tests/integration/fixtures/**
tests/test_sets/**
- stage: docusaurus_tests
pool:
vmImage: 'ubuntu-latest'
jobs:
- job: test_docs
condition: or(eq(stageDependencies.scope_check.changes.outputs['CheckChanges.GEChanged'], true), eq(stageDependencies.scope_check.changes.outputs['CheckChanges.DocsChanged'], true), eq(variables.isMain, true))
variables:
python.version: '3.8'
services:
postgres: postgres
mysql: mysql
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
displayName: 'Use Python $(python.version)'
- bash: python -m pip install --upgrade pip==20.2.4
displayName: 'Update pip'
- script: |
pip install -r requirements-dev.txt
pip install .
displayName: 'Install dependencies'
- script: |
pip install pytest pytest-azurepipelines
# TODO enable spark tests
# TODO enable sqlalchemy once cloud resources are working again
pytest -v --docs-tests -m docs --no-spark --mysql tests/integration/test_script_runner.py
displayName: 'pytest'
env:
# snowflake credentials
SNOWFLAKE_ACCOUNT: $(SNOWFLAKE_ACCOUNT)
SNOWFLAKE_USER: $(SNOWFLAKE_USER)
SNOWFLAKE_PW: $(SNOWFLAKE_PW)
SNOWFLAKE_DATABASE: $(SNOWFLAKE_DATABASE)
SNOWFLAKE_SCHEMA: $(SNOWFLAKE_SCHEMA)
SNOWFLAKE_WAREHOUSE: $(SNOWFLAKE_WAREHOUSE)
# redshift credentials
REDSHIFT_USERNAME: $(REDSHIFT_USERNAME)
REDSHIFT_PASSWORD: $(REDSHIFT_PASSWORD)
REDSHIFT_HOST: $(REDSHIFT_HOST)
REDSHIFT_PORT: $(REDSHIFT_PORT)
REDSHIFT_DATABASE: $(REDSHIFT_DATABASE)
REDSHIFT_SSLMODE: $(REDSHIFT_SSLMODE)
# AWS credentials
# AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
# AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
# AWS_DEFAULT_REGION: $(AWS_DEFAULT_REGION)
# todo: add credentials for cloud resources here