-
Notifications
You must be signed in to change notification settings - Fork 67
75 lines (66 loc) · 2.65 KB
/
rumble.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
name: Rumble CSV Generation
on:
schedule:
- cron: "0 5 * * *"
workflow_dispatch:
push:
branches: [auto-rumble]
env:
PROJECT_ID: "${{ secrets.PROJECT_ID }}"
STORAGE_BUCKET: "${{ secrets.STORAGE_BUCKET }}"
WORKLOAD_IDENTITY_PROVIDER: "${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}"
SERVICE_ACCOUNT: "${{ secrets.GH_ACTION_SERVICE_ACCOUNT }}"
BIGQUERY_TABLE: "${{ secrets.BIGQUERY_TABLE }}"
GH_TOKEN: ${{ github.token }}
jobs:
generate-csv:
runs-on: ubuntu-latest
permissions:
contents: write
id-token: write
steps:
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@ceee102ec2387dd9e844e01b530ccd4ec87ce955 # v0
with:
token_format: 'access_token'
project_id: "${{ env.PROJECT_ID }}"
workload_identity_provider: "${{ env.WORKLOAD_IDENTITY_PROVIDER }}"
service_account: "${{ env.SERVICE_ACCOUNT }}"
- name: set up bigqueryrc
shell: bash
run: |
gcloud config set auth/impersonate_service_account "${{ env.SERVICE_ACCOUNT }}"
# the following is just used to quiet the bigqueryrc init message, the query result is unused
bq query --use_legacy_sql=false --format=csv --max_rows=1 'SELECT COUNT(*) FROM base-image-rumble.rumble.scheduled;' 2>&1 > /dev/null
- name: rumble query
shell: bash
run: >
bq query --use_legacy_sql=false --format=csv --max_rows=100000\
'SELECT
ROW_NUMBER() OVER (ORDER BY time),
image,
scanner,
scanner_version,
scanner_db_version,
FORMAT_DATETIME("%Y-%m-%d %H:%M:%S", DATE(time)) as time,
low_cve_count as low_cve_cnt,
med_cve_count as med_cve_cnt,
high_cve_count as high_cve_cnt,
crit_cve_count as crit_cve_cnt,
unknown_cve_count as unknown_cve_cnt,
low_cve_count + med_cve_count + high_cve_count + crit_cve_count + unknown_cve_count AS tot_cve_cnt,
digest
FROM ${{ env.BIGQUERY_TABLE }}
WHERE DATE(time) BETWEEN DATE_SUB(CURRENT_DATE(), INTERVAL 30 DAY) AND CURRENT_DATE()
AND scanner = "grype";' > data.csv
- name: upload csv
shell: bash
run: |
gcloud storage cp data.csv gs://chainguard-academy/cve-data/data.csv
- name: update permissions
shell: bash
run: |
gcloud storage objects update gs://chainguard-academy/cve-data/data.csv \
--add-acl-grant=entity=AllUsers,role=READER \
--content-type text/csv