diff --git a/pipelines/app/Jenkinsfile b/pipelines/app/Jenkinsfile new file mode 100644 index 0000000..a4d509f --- /dev/null +++ b/pipelines/app/Jenkinsfile @@ -0,0 +1,536 @@ +@Library('polaris') +import ca.bc.gov.nrids.polaris.Podman +import ca.bc.gov.nrids.polaris.BrokerIntention +import ca.bc.gov.nrids.polaris.Vault + +def podman + +pipeline { + agent none + stages { + stage('Checkout app and download package') { + agent { + label Podman.AGENT_LABEL_APP + } + environment { + GIT_REPO = "${params.gitRepo}" + GIT_BRANCH = "${params.configBranch != null && params.configBranch != "" ? params.configBranch : params.gitTag != null && params.gitTag != "" ? params.gitTag : params.gitBranch}" + GITHUB_PRIVATE_REPO = "${params.githubToken != null && params.githubToken != "" ? 'true' : ''}" + DOWNLOAD_URL = "${params.downloadUrl}" + DOWNLOAD_TYPE =" ${params.downloadType}" + ARTIFACT_SHA256 = "${params.artifactSha256}" + } + steps { + script { + sh 'rm -rf app' + sh 'rm -rf downloads ; mkdir downloads' + if (env.GITHUB_PRIVATE_REPO) { + env.GITHUB_USER = "${params.githubUser}" + env.GITHUB_TOKEN = "${params.githubToken}" + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.GITHUB_TOKEN, password: GITHUB_TOKEN]]]) { + sh 'git clone --branch ${GIT_BRANCH} https://${GITHUB_USER}:${GITHUB_TOKEN}@${GIT_REPO} app' + if (DOWNLOAD_TYPE.toUpperCase().trim() == 'GITHUB') { + sh 'cd downloads ; curl -LO "${DOWNLOAD_URL}" -H "Authorization: token ${GITHUB_TOKEN}"' + } + } + } else { + sh 'git clone --branch ${GIT_BRANCH} https://${GIT_REPO} app' + if (DOWNLOAD_TYPE.toUpperCase().trim() == 'GITHUB') { + sh 'cd downloads ; curl -LO ${DOWNLOAD_URL}' + } + } + if (DOWNLOAD_TYPE.toUpperCase().trim() == 'GITHUB') { + def artifact_download_sha256 = sh( + returnStdout: true, + script: ''' + ARTIFACT_NAME=$(echo ${DOWNLOAD_URL##*/}) + cd downloads + sha256=($(sha256sum "${ARTIFACT_NAME}")) + echo "${sha256}" + ''' + ) + } + } + } + } + stage('Checkout INFRA dev-all-in-one') { + agent { + label Podman.AGENT_LABEL_APP + } + steps { + checkout([ + $class: 'GitSCM', + branches: [[name: "release/1.0.0"]], + doGenerateSubmoduleConfigurations: false, + extensions: [ + [$class: 'RelativeTargetDirectory', relativeTargetDir: "infra"] + ], + submoduleCfg: [], + userRemoteConfigs: [ + [ + credentialsId: 'ci-user', + url: "https://bwa.nrs.gov.bc.ca/int/stash/scm/infra/dev-all-in-one.git" + ] + ] + ]) + } + } + stage('Deploy to development') { + agent { + label Podman.AGENT_LABEL_APP + } + environment { + PROJECT = "${params.project}" + COMPONENT = "${params.component}" + GIT_REPO = "${params.gitRepo}" + GIT_BRANCH = "${params.gitBranch}" + APP_ROLE_ID = credentials("knox-${params.project}-${params.component}-dev-role-id") + EVENT_PROVIDER = "${params.eventProvider}" + VAULT_ADDR = "https://knox.io.nrs.gov.bc.ca" + BROKER_URL = "https://broker.io.nrs.gov.bc.ca" + HTTP_PROXY = "http://test-forwardproxy.nrs.bcgov:23128" + PODMAN_ANSIBLE_INVENTORY_PATH = "${params.devInventoryPath}" + ANSIBLE_COLLECTION_VERSION = "${params.ansibleCollectionBranchTag != null && params.ansibleCollectionBranchTag != "" ? ",${params.ansibleCollectionBranchTag}" : ",v2.0.5"}" + PODMAN_ANSIBLE_COLLECTION_URL = "github.com/bcgov/nr-polaris-collection.git#polaris/deploy${env.ANSIBLE_COLLECTION_VERSION}" + PODMAN_ANSIBLE_COLLECTION_PATH = "./collections/ansible_collections" + GIT_CREDS = credentials('ci-user') + GIT_CREDS_USR = GIT_CREDS_USR.replaceFirst('@', '%40') + CONFIG_ROLE_ID = credentials('knox-jenkins-jenkins-apps-prod-role-id') + NR_BROKER_JWT = credentials('nr-broker-jwt') + AUTHFILE = "auth.json" + CAUSE_USER_ID = "${params.userId != null && params.userId != "" ? params.userId : "github@internal"}" + INTENTION_ID = "${params.intentionId}" + DOWNLOAD_URL = "${params.downloadUrl}" + DOWNLOAD_TYPE =" ${params.downloadType}" + PODMAN_ARTIFACT_SHA256 = "${params.artifactSha256}" + PODMAN_PROJECT_VERSION = "${params.projectVersion}" + } + steps { + script { + try { + echo 'Do development deployment' + intention = new BrokerIntention(readJSON(file: "app/.jenkins/deployment-intention.json")) + intention.setEventDetails( + userName: env.CAUSE_USER_ID, + url: env.BUILD_URL, + provider: env.EVENT_PROVIDER, + environment: 'development', + packageInstallationVersion: env.PODMAN_PROJECT_VERSION, + packageInstallationSourceIntention: env.INTENTION_ID + ) + intention.open(NR_BROKER_JWT) + intention.startAction("login") + def vaultToken = intention.provisionToken("login", CONFIG_ROLE_ID) + def vault = new Vault(vaultToken) + vault.readToObject("apps/data/prod/jenkins/jenkins-apps/jdk_cacerts_pass", env) + vault.readToObject("apps/data/prod/jenkins/jenkins-apps/artifactory", env) + vault.readToObject("apps/data/prod/jenkins/jenkins-apps/cdua", env) + env.PODMAN_BUILD_NUMBER = env.BUILD_NUMBER + env.PODMAN_BUILD_URL = env.BUILD_URL + if (DOWNLOAD_TYPE.toUpperCase().trim() == 'ARTIFACTORY') { + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.REGISTRY_USERNAME, password: REGISTRY_USERNAME],[var: env.REGISTRY_PASSWORD, password: REGISTRY_PASSWORD]]]) { + sh 'cd downloads ; curl -u "${REGISTRY_USERNAME}:${REGISTRY_PASSWORD}" -LO "${DOWNLOAD_URL}" ; ls -l *; cd ..' + } + } + env.PODMAN_ARTIFACT_NAME = sh( + returnStdout: true, + script: ''' + ARTIFACT_NAME=$(echo ${DOWNLOAD_URL##*/}) + echo -n "${ARTIFACT_NAME}" + ''' + ) + def downloaded_artifact_sha256 = sh( + returnStdout: true, + script: ''' + cd downloads + sha256=($(sha256sum "${PODMAN_ARTIFACT_NAME}")) + echo "${sha256}" + ''' + ) + if (downloaded_artifact_sha256.trim() == PODMAN_ARTIFACT_SHA256.trim()) { + echo "SHA256 checksum matches!" + } else { + echo "SHA256 checksum failed" + currentBuild.result = 'ABORTED' + error('Quitting') + } + intention.startAction("configure") + def vaultAppToken = intention.provisionToken("configure", APP_ROLE_ID) + def vaultApp = new Vault(vaultAppToken) + vaultApp.readToObject("apps/data/dev/${env.PROJECT}/${env.COMPONENT}/development", env, keyTransform: { key -> 'PODMAN_' + key }) + vaultApp.revokeToken() + intention.endAction("configure") + intention.startAction("install") + intention.openResponse.actions.each { key, value -> + env."ACTION_TOKEN_${key.toUpperCase()}" = "${value.token}" + } + podman = new Podman(this) + podman.login(authfile: "${env.AUTHFILE}", options: "-u ${env.REGISTRY_USERNAME} -p ${env.REGISTRY_PASSWORD}") + podman.run("willhallonline/ansible:2.12-alpine-3.16", + authfile: "${env.AUTHFILE}", + options: "-v \$(pwd):/ansible -e PODMAN_*", + command: 'ansible-galaxy collection install git+https://${PODMAN_ANSIBLE_COLLECTION_URL} -p ${PODMAN_ANSIBLE_COLLECTION_PATH}') + podman.run("willhallonline/ansible:2.12-alpine-3.16", + authfile: "${env.AUTHFILE}", + options: "-v \$(pwd)/collections:/ansible/app/playbooks/collections \ + -v \$(pwd)/app:/ansible/app \ + -v \$(pwd)/infra/inventory:/ansible/inventory \ + -v \$(pwd)/files/ansible/ansible.cfg:/etc/ansible/ansible.cfg \ + -v \$(pwd)/downloads:/ansible/downloads \ + -e PODMAN_* -e ACTION_TOKEN_* -e COMPONENT*", + command: 'ansible-playbook -i inventory/${PODMAN_ANSIBLE_INVENTORY_PATH} \ + app/playbooks/playbook.yaml --extra-vars "env_vars=dev"') + podman.logout(authfile: "${env.AUTHFILE}") + intention.endAction("install") + vault.revokeToken() + intention.endAction("login") + println intention.close(true) + intention = null + } catch (IllegalStateException ex) { + echo "IllegalStateException occurred: $ex" + currentBuild.result = 'ABORTED' + error('Quitting') + } catch (IllegalArgumentException ex) { + echo "IllegalArgumentException occurred: $ex" + currentBuild.result = 'ABORTED' + error('Quitting') + } catch (Exception ex) { + echo "Other exception occurred: $ex" + currentBuild.result = 'ABORTED' + error('Quitting') + } + } + } + } + stage('Approve test deployment') { + steps { + script { + echo "Deployment to test pending approval" + def approver = input(message: 'Is this build approved for test?', + ok: 'Yes, this build is approved.', + submitterParameter: 'approver' + ) + env.TEST_DEPLOYMENT_APPROVER = "${approver}@azureidir".toLowerCase() + echo "Deployment approved for test by ${env.TEST_DEPLOYMENT_APPROVER}" + } + } + } + stage('Deploy to test') { + agent { + label Podman.AGENT_LABEL_APP + } + environment { + PROJECT = "${params.project}" + COMPONENT = "${params.component}" + GIT_REPO = "${params.gitRepo}" + GIT_BRANCH = "${params.gitBranch}" + APP_ROLE_ID = credentials("knox-${params.project}-${params.component}-test-role-id") + EVENT_PROVIDER = "${params.eventProvider}" + VAULT_ADDR = "https://vault-iit.apps.silver.devops.gov.bc.ca" + BROKER_URL = "https://nr-broker.apps.silver.devops.gov.bc.ca" + HTTP_PROXY = "http://test-forwardproxy.nrs.bcgov:23128" + PODMAN_ANSIBLE_INVENTORY_PATH = "${params.testInventoryPath}" + ANSIBLE_COLLECTION_VERSION = "${params.ansibleCollectionBranchTag != null && params.ansibleCollectionBranchTag != "" ? ",${params.ansibleCollectionBranchTag}" : ",v2.0.5"}" + PODMAN_ANSIBLE_COLLECTION_URL = "github.com/bcgov/nr-polaris-collection.git#polaris/deploy${env.ANSIBLE_COLLECTION_VERSION}" + PODMAN_ANSIBLE_COLLECTION_PATH = "./collections/ansible_collections" + GIT_CREDS = credentials('ci-user') + GIT_CREDS_USR = GIT_CREDS_USR.replaceFirst('@', '%40') + CONFIG_ROLE_ID = credentials('knox-jenkins-jenkins-apps-prod-role-id') + NR_BROKER_JWT = credentials('nr-broker-jwt') + AUTHFILE = "auth.json" + CAUSE_USER_ID = "${params.USER_ID}" + INTENTION_ID = "${params.intentionId}" + DOWNLOAD_URL = "${params.downloadUrl}" + DOWNLOAD_TYPE =" ${params.downloadType}" + PODMAN_ARTIFACT_SHA256 = "${params.artifactSha256}" + PODMAN_PROJECT_VERSION = "${params.projectVersion}" + } + steps { + script { + try { + echo 'Do test deployment' + intention = new BrokerIntention(readJSON(file: "app/.jenkins/deployment-intention.json")) + intention.setEventDetails( + userName: "${env.TEST_DEPLOYMENT_APPROVER}", + url: env.BUILD_URL, + provider: env.EVENT_PROVIDER, + environment: 'test', + packageInstallationVersion: env.PODMAN_PROJECT_VERSION, + packageInstallationSourceIntention: env.INTENTION_ID + ) + intention.open(NR_BROKER_JWT) + intention.startAction("login") + def vaultToken = intention.provisionToken("login", CONFIG_ROLE_ID) + def vault = new Vault(vaultToken) + vault.readToObject("apps/data/prod/jenkins/jenkins-apps/jdk_cacerts_pass", env) + vault.readToObject("apps/data/prod/jenkins/jenkins-apps/artifactory", env) + vault.readToObject("apps/data/prod/jenkins/jenkins-apps/cdua", env) + env.PODMAN_BUILD_NUMBER = env.BUILD_NUMBER + env.PODMAN_BUILD_URL = env.BUILD_URL + if (DOWNLOAD_TYPE.toUpperCase().trim() == 'ARTIFACTORY') { + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.REGISTRY_USERNAME, password: REGISTRY_USERNAME],[var: env.REGISTRY_PASSWORD, password: REGISTRY_PASSWORD]]]) { + sh 'cd downloads ; curl -u "${REGISTRY_USERNAME}:${REGISTRY_PASSWORD}" -LO "${DOWNLOAD_URL}" ; ls -l *; cd ..' + } + } + env.PODMAN_ARTIFACT_NAME = sh( + returnStdout: true, + script: ''' + ARTIFACT_NAME=$(echo ${DOWNLOAD_URL##*/}) + echo -n "${ARTIFACT_NAME}" + ''' + ) + def downloaded_artifact_sha256 = sh( + returnStdout: true, + script: ''' + cd downloads + sha256=($(sha256sum "${PODMAN_ARTIFACT_NAME}")) + echo "${sha256}" + ''' + ) + if (downloaded_artifact_sha256.trim() == PODMAN_ARTIFACT_SHA256.trim()) { + echo "SHA256 checksum matches!" + } else { + echo "SHA256 checksum failed" + currentBuild.result = 'ABORTED' + error('Quitting') + } + intention.startAction("configure") + def vaultAppToken = intention.provisionToken("configure", APP_ROLE_ID) + def vaultApp = new Vault(vaultAppToken) + vaultApp.readToObject("apps/data/test/${env.PROJECT}/${env.COMPONENT}/test", env, keyTransform: { key -> 'PODMAN_' + key }) + vaultApp.revokeToken() + intention.endAction("configure") + intention.startAction("install") + intention.openResponse.actions.each { key, value -> + env."ACTION_TOKEN_${key.toUpperCase()}" = "${value.token}" + } + podman = new Podman(this) + podman.login(authfile: "${env.AUTHFILE}", options: "-u ${env.REGISTRY_USERNAME} -p ${env.REGISTRY_PASSWORD}") + podman.run("willhallonline/ansible:2.12-alpine-3.16", + authfile: "${env.AUTHFILE}", + options: "-v \$(pwd):/ansible -e PODMAN_*", + command: 'ansible-galaxy collection install git+https://${PODMAN_ANSIBLE_COLLECTION_URL} -p ${PODMAN_ANSIBLE_COLLECTION_PATH}') + podman.run("willhallonline/ansible:2.12-alpine-3.16", + authfile: "${env.AUTHFILE}", + options: "-v \$(pwd)/collections:/ansible/app/playbooks/collections \ + -v \$(pwd)/app:/ansible/app \ + -v \$(pwd)/infra/inventory:/ansible/inventory \ + -v \$(pwd)/files/ansible/ansible.cfg:/etc/ansible/ansible.cfg \ + -v \$(pwd)/downloads:/ansible/downloads \ + -e PODMAN_* -e ACTION_TOKEN_* -e COMPONENT*", + command: 'ansible-playbook -i inventory/${PODMAN_ANSIBLE_INVENTORY_PATH} \ + app/playbooks/playbook.yaml --extra-vars "env_vars=test"') + podman.logout(authfile: "${env.AUTHFILE}") + intention.endAction("install") + vault.revokeToken() + intention.endAction("login") + println intention.close(true) + intention = null + } catch (IllegalStateException ex) { + echo "IllegalStateException occurred: $ex" + currentBuild.result = 'ABORTED' + error('Quitting') + } catch (IllegalArgumentException ex) { + echo "IllegalArgumentException occurred: $ex" + currentBuild.result = 'ABORTED' + error('Quitting') + } catch (Exception ex) { + echo "Other exception occurred: $ex" + currentBuild.result = 'ABORTED' + error('Quitting') + } + } + } + } + stage('Pre-deployment checks for production') { + agent { + label Podman.AGENT_LABEL_APP + } + steps { + script { + // TODO: Exit pipeline for SNAPSHOT builds, which should NOT proceed beyond the test environment + echo 'Do pre-deployment checks for production' + } + } + } + stage('Approve production deployment') { + steps { + script { + echo "Deployment to production pending approval" + def approver = input(message: 'Is this build approved for production?', + ok: 'Yes, this build is approved.', + submitterParameter: 'approver' + ) + env.PRODUCTION_DEPLOYMENT_APPROVER = "${approver}@azureidir".toLowerCase() + echo "Deployment approved for production by ${env.PRODUCTION_DEPLOYMENT_APPROVER}" + } + } + } + stage('Deploy to production') { + agent { + label Podman.AGENT_LABEL_APP + } + environment { + PROJECT = "${params.project}" + COMPONENT = "${params.component}" + GIT_REPO = "${params.gitRepo}" + GIT_BRANCH = "${params.gitBranch}" + APP_ROLE_ID = credentials("knox-${params.project}-${params.component}-prod-role-id") + EVENT_PROVIDER = "${params.eventProvider}" + VAULT_ADDR = "https://vault-iit.apps.silver.devops.gov.bc.ca" + BROKER_URL = "https://nr-broker.apps.silver.devops.gov.bc.ca" + HTTP_PROXY = "http://test-forwardproxy.nrs.bcgov:23128" + PODMAN_ANSIBLE_INVENTORY_PATH = "${params.prodInventoryPath}" + ANSIBLE_COLLECTION_VERSION = "${params.ansibleCollectionBranchTag != null && params.ansibleCollectionBranchTag != "" ? ",${params.ansibleCollectionBranchTag}" : ",v2.0.5"}" + PODMAN_ANSIBLE_COLLECTION_URL = "github.com/bcgov/nr-polaris-collection.git#polaris/deploy${env.ANSIBLE_COLLECTION_VERSION}" + PODMAN_ANSIBLE_COLLECTION_PATH = "./collections/ansible_collections" + GIT_CREDS = credentials('ci-user') + GIT_CREDS_USR = GIT_CREDS_USR.replaceFirst('@', '%40') + CONFIG_ROLE_ID = credentials('knox-jenkins-jenkins-apps-prod-role-id') + NR_BROKER_JWT = credentials('nr-broker-jwt') + AUTHFILE = "auth.json" + CAUSE_USER_ID = "${params.USER_ID}" + INTENTION_ID = "${params.intentionId}" + DOWNLOAD_URL = "${params.downloadUrl}" + DOWNLOAD_TYPE =" ${params.downloadType}" + PODMAN_ARTIFACT_SHA256 = "${params.artifactSha256}" + PODMAN_PROJECT_VERSION = "${params.projectVersion}" + } + steps { + script { + try { + echo 'Do production deployment' + intention = new BrokerIntention(readJSON(file: "app/.jenkins/deployment-intention.json")) + intention.setEventDetails( + userName: "${env.PRODUCTION_DEPLOYMENT_APPROVER}", + url: env.BUILD_URL, + provider: env.EVENT_PROVIDER, + environment: 'production', + packageInstallationVersion: env.PODMAN_PROJECT_VERSION, + packageInstallationSourceIntention: env.INTENTION_ID + ) + intention.open(NR_BROKER_JWT) + intention.startAction("login") + def vaultToken = intention.provisionToken("login", CONFIG_ROLE_ID) + def vault = new Vault(vaultToken) + vault.readToObject("apps/data/prod/jenkins/jenkins-apps/jdk_cacerts_pass", env) + vault.readToObject("apps/data/prod/jenkins/jenkins-apps/artifactory", env) + vault.readToObject("apps/data/prod/jenkins/jenkins-apps/cdua", env) + env.PODMAN_BUILD_NUMBER = env.BUILD_NUMBER + env.PODMAN_BUILD_URL = env.BUILD_URL + if (DOWNLOAD_TYPE.toUpperCase().trim() == 'ARTIFACTORY') { + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.REGISTRY_USERNAME, password: REGISTRY_USERNAME],[var: env.REGISTRY_PASSWORD, password: REGISTRY_PASSWORD]]]) { + sh 'cd downloads ; curl -u "${REGISTRY_USERNAME}:${REGISTRY_PASSWORD}" -LO "${DOWNLOAD_URL}" ; ls -l *; cd ..' + } + } + env.PODMAN_ARTIFACT_NAME = sh( + returnStdout: true, + script: ''' + ARTIFACT_NAME=$(echo ${DOWNLOAD_URL##*/}) + echo -n "${ARTIFACT_NAME}" + ''' + ) + def downloaded_artifact_sha256 = sh( + returnStdout: true, + script: ''' + cd downloads + sha256=($(sha256sum "${PODMAN_ARTIFACT_NAME}")) + echo "${sha256}" + ''' + ) + if (downloaded_artifact_sha256.trim() == PODMAN_ARTIFACT_SHA256.trim()) { + echo "SHA256 checksum matches!" + } else { + echo "SHA256 checksum failed" + currentBuild.result = 'ABORTED' + error('Quitting') + } + intention.startAction("configure") + def vaultAppToken = intention.provisionToken("configure", APP_ROLE_ID) + def vaultApp = new Vault(vaultAppToken) + vaultApp.readToObject("apps/data/prod/${env.PROJECT}/${env.COMPONENT}/production", env, keyTransform: { key -> 'PODMAN_' + key}) + vaultApp.revokeToken() + intention.endAction("configure") + intention.startAction("install") + intention.openResponse.actions.each { key, value -> + env."ACTION_TOKEN_${key.toUpperCase()}" = "${value.token}" + } + podman = new Podman(this) + podman.login(authfile: "${env.AUTHFILE}", options: "-u ${env.REGISTRY_USERNAME} -p ${env.REGISTRY_PASSWORD}") + podman.run("willhallonline/ansible:2.12-alpine-3.16", + authfile: "${env.AUTHFILE}", + options: "-v \$(pwd):/ansible -e PODMAN_*", + command: 'ansible-galaxy collection install git+https://${PODMAN_ANSIBLE_COLLECTION_URL} -p ${PODMAN_ANSIBLE_COLLECTION_PATH}') + podman.run("willhallonline/ansible:2.12-alpine-3.16", + authfile: "${env.AUTHFILE}", + options: "-v \$(pwd)/collections:/ansible/app/playbooks/collections \ + -v \$(pwd)/app:/ansible/app \ + -v \$(pwd)/infra/inventory:/ansible/inventory \ + -v \$(pwd)/files/ansible/ansible.cfg:/etc/ansible/ansible.cfg \ + -v \$(pwd)/downloads:/ansible/downloads \ + -e PODMAN_* -e ACTION_TOKEN_* -e COMPONENT*", + command: 'ansible-playbook -i inventory/${PODMAN_ANSIBLE_INVENTORY_PATH} \ + app/playbooks/playbook.yaml --extra-vars "env_vars=prod"') + podman.logout(authfile: "${env.AUTHFILE}") + intention.endAction("install") + vault.revokeToken() + intention.endAction("login") + println intention.close(true) + } catch (IllegalStateException ex) { + echo "IllegalStateException occurred: $ex" + currentBuild.result = 'ABORTED' + error('Quitting') + } catch (IllegalArgumentException ex) { + echo "IllegalArgumentException occurred: $ex" + currentBuild.result = 'ABORTED' + error('Quitting') + } catch (Exception ex) { + echo "Other exception occurred: $ex" + currentBuild.result = 'ABORTED' + error('Quitting') + } + } + } + } + } + post { + unstable { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(false) + } + } + } + } + failure { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(false) + } + } + } + } + aborted { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(true) + } + } + } + } + always { + node(Podman.AGENT_LABEL_APP) { + cleanWs( + cleanWhenAborted: true, + cleanWhenFailure: false, + cleanWhenSuccess: true, + cleanWhenUnstable: false, + deleteDirs: true + ) + } + } + } +} diff --git a/pipelines/database-datafix/Jenkinsfile b/pipelines/database-datafix/Jenkinsfile new file mode 100644 index 0000000..89b0c5b --- /dev/null +++ b/pipelines/database-datafix/Jenkinsfile @@ -0,0 +1,279 @@ +@Library('polaris') +import ca.bc.gov.nrids.polaris.BrokerIntention +import ca.bc.gov.nrids.polaris.JenkinsUtil +import ca.bc.gov.nrids.polaris.Podman +import ca.bc.gov.nrids.polaris.Vault + +def intention + +pipeline { + agent { + label Podman.AGENT_LABEL_APP + } + environment { + PATH = "/sw_ux/bin:$PATH" + VAULT_ADDR = "https://knox.io.nrs.gov.bc.ca" + BROKER_URL = "https://broker.io.nrs.gov.bc.ca" + TARGET_ENV = "production" + GH_REPO = "${params.githubRepo}" + OWNER = GH_REPO.substring(0, GH_REPO.lastIndexOf("/")) + REPO = GH_REPO.substring(GH_REPO.lastIndexOf("/") + 1) + GIT_BRANCH = "${params.gitBranch}" + SEM_VERSION = "${params.semanticVersion}" + TAG_VERSION = "v${SEM_VERSION}" + SERVICE_PROJECT = "${params.serviceProject}" + SERVICE_NAME = "${params.serviceName}" + DATAFIX_SERVICE_NAME = "${params.datafixServiceName}" + DB_SERVICE_NAME = "${params.datacheck ? DATAFIX_SERVICE_NAME : SERVICE_NAME}" + SCHEDULED_DATAFIX_USER_ID = "${params.scheduledDatafixUserId}" + PODMAN_WORKDIR = "/liquibase/changelog" + TMP_VOLUME = "liquibase.${UUID.randomUUID().toString()[0..7]}" + TMP_OUTPUT_FILE = "liquibase.stderr.${UUID.randomUUID().toString()[0..7]}" + ONFAIL_GREP_PATTERN = "^WARNING" + NOTIFICATION_RECIPIENTS = "${params.notificationRecipients}" + EVENT_PROVIDER = "${params.eventProvider}" + PODMAN_REGISTRY = "docker.io" + CONTAINER_IMAGE_CONSUL_TEMPLATE = "hashicorp/consul-template" + CONTAINER_IMAGE_LIQUBASE = "liquibase/liquibase" + HOST = "freight.bcgov" + PODMAN_USER = "wwwadm" + NR_DATABASE_MIGRATION_ROLE_ID = credentials('knox-nr-database-migration-prod-role-id') + NR_BROKER_TOKEN = credentials('nr-broker-jwt') + DB_ROLE_ID = credentials("knox-${params.serviceProject}-${params.datacheck ? params.datafixServiceName : params.serviceName}-prod-role-id") + AUTHFILE = "auth.json" + } + stages { + stage('Setup') { + steps { + script { + env.CAUSE_USER_ID = """${SCHEDULED_DATAFIX_USER_ID ? SCHEDULED_DATAFIX_USER_ID : + JenkinsUtil.getCauseUserId(currentBuild)}""" + env.TARGET_ENV_SHORT = JenkinsUtil.convertLongEnvToShort("${env.TARGET_ENV}") + } + } + } + stage('Get github token') { + steps { + script { + // open intention to get github app creds + intention = new BrokerIntention(readJSON(file: 'scripts/intention-github.json')) + intention.setEventDetails( + userName: env.CAUSE_USER_ID, + provider: env.EVENT_PROVIDER, + url: env.BUILD_URL, + serviceName: env.SERVICE_NAME, + serviceProject: env.SERVICE_PROJECT, + environment: "tools" + ) + intention.open(NR_BROKER_TOKEN) + intention.startAction("login") + def vaultGhToken = intention.provisionToken("login", NR_DATABASE_MIGRATION_ROLE_ID) + def vaultGhApp = new Vault(vaultGhToken) + def ghAppCreds = vaultGhApp.read("apps/data/prod/db-pipeline/nr-database-migration/github_app") + env.APP_ID = ghAppCreds['gh_app_id'] + env.INSTALLATION_ID = ghAppCreds['gh_installation_id'] + env.PRIVATE_KEY = ghAppCreds['gh_private_key'] + // generate github app jwt + env.GENERATED_JWT = sh( + returnStdout: true, + script: 'set +x; scripts/generate_jwt.sh' + ) + env.GH_TOKEN = sh( + returnStdout: true, + script: 'set +x; scripts/get_installation_token.sh' + ) + intention.endAction("login") + intention.close(true) + } + } + } + stage('Check prod release') { + when { expression { return params.dryRun == false } } + steps { + script { + def rc = sh( + returnStatus: true, + script: "set +x; GH_TOKEN=${GH_TOKEN} gh api repos/${OWNER}/${REPO}/releases/tags/${TAG_VERSION}" + ) + if (OWNER != null && OWNER != "" && REPO != null && REPO != "" && rc != 0) { + currentBuild.result = 'ABORTED' + error('Release check error') + } + } + } + } + stage('Checkout for deployment to production') { + when { environment name: 'TARGET_ENV', value: 'production' } + steps { + script { + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.GH_TOKEN, password: GH_TOKEN]]]) { + sh "GH_TOKEN=${GH_TOKEN} gh repo clone ${OWNER}/${REPO} ${TMP_VOLUME} -- --branch=${TAG_VERSION}" + } + } + } + } + stage('Generate Liquibase properties') { + steps { + script { + // open intention to get registry and db creds + intention = new BrokerIntention(readJSON(file: 'scripts/intention-db.json')) + intention.setEventDetails( + userName: env.CAUSE_USER_ID, + provider: env.EVENT_PROVIDER, + url: env.BUILD_URL, + serviceName: env.DB_SERVICE_NAME, + serviceProject: env.SERVICE_PROJECT, + environment: env.TARGET_ENV + ) + intention.open(NR_BROKER_TOKEN) + intention.startAction("login") + def vaultToken = intention.provisionToken("login", NR_DATABASE_MIGRATION_ROLE_ID) + def vault = new Vault(vaultToken) + def registryCreds = vault.read('apps/data/prod/db-pipeline/nr-database-migration/artifactory') + env.REGISTRY_USERNAME = registryCreds['sa_username'] + env.REGISTRY_PASSWORD = registryCreds['sa_password'] + env.APP_VAULT_TOKEN = intention.provisionToken("database", DB_ROLE_ID) + podman = new Podman(this, null) + podman.login(authfile: "${TMP_VOLUME}/${AUTHFILE}", options: "-u ${env.REGISTRY_USERNAME} -p ${env.REGISTRY_PASSWORD}") + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.APP_VAULT_TOKEN, password: APP_VAULT_TOKEN]]]) { + podman.run("${CONTAINER_IMAGE_CONSUL_TEMPLATE}", + authfile: "${TMP_VOLUME}/${AUTHFILE}", + options: "--rm \ + --security-opt label=disable \ + --userns keep-id \ + -v \$(pwd)/${TMP_VOLUME}:${PODMAN_WORKDIR} \ + -v \$(pwd)/scripts/config.hcl:${PODMAN_WORKDIR}/config.hcl \ + -e TARGET_ENV_SHORT=${env.TARGET_ENV_SHORT} \ + -e PODMAN_WORKDIR=${PODMAN_WORKDIR} \ + -e VAULT_TOKEN=${APP_VAULT_TOKEN}", + command: "-config '/liquibase/changelog/config.hcl' \ + -template '/liquibase/changelog/liquibase.properties.tpl:${PODMAN_WORKDIR}/liquibase.properties' \ + -once", + returnStatus: true + ) + } + intention.endAction("login") + } + } + } + stage('Run Liquibase datafix select') { + when { + anyOf { + expression { return params.datafix == true } + expression { return params.datacheck == true } + } + } + steps { + script { + intention.startAction("database") + podman.run("${CONTAINER_IMAGE_LIQUBASE}", + authfile: "${TMP_VOLUME}/${AUTHFILE}", + options: "--rm \ + --security-opt label=disable \ + --userns keep-id \ + -v \$(pwd)/${TMP_VOLUME}:${PODMAN_WORKDIR} \ + --workdir ${PODMAN_WORKDIR}", + command: "--defaultsFile=liquibase.properties --sql-file=scripts/datafix_select.sql execute-sql" + ) + } + } + } + stage('Run Liquibase dry run') { + when { expression { return params.dryRun == true } } + steps { + script { + podman.run("${CONTAINER_IMAGE_LIQUBASE}", + authfile: "${TMP_VOLUME}/${AUTHFILE}", + options: "--rm \ + --security-opt label=disable \ + --userns keep-id \ + -v \$(pwd)/${TMP_VOLUME}:${PODMAN_WORKDIR} \ + --workdir ${PODMAN_WORKDIR}", + command: "--defaultsFile=liquibase.properties update-sql" + ) + } + } + } + stage('Run Liquibase') { + when { expression { return params.dryRun == false } } + steps { + script { + podman.run("${CONTAINER_IMAGE_LIQUBASE}", + authfile: "${TMP_VOLUME}/${AUTHFILE}", + options: "--rm \ + --security-opt label=disable \ + --userns keep-id \ + -v \$(pwd)/${TMP_VOLUME}:${PODMAN_WORKDIR} \ + --workdir ${PODMAN_WORKDIR}", + command: "--defaultsFile=liquibase.properties update 2> ${TMP_OUTPUT_FILE}" + ) + // extract message and send notification + sh """ + ONFAIL_WARNING_COUNT="\$(grep '${ONFAIL_GREP_PATTERN}' ${TMP_OUTPUT_FILE} | wc -l)" + if [ \$ONFAIL_WARNING_COUNT -gt 0 ] && [ "$TARGET_ENV" = "production" ]; then + ONFAIL_MESSAGE="\$(sed -n '/${ONFAIL_GREP_PATTERN}/{N;p}' ${TMP_OUTPUT_FILE})" + printf "${BUILD_URL}\n\n\${ONFAIL_MESSAGE}" | mailx -s "Data quality issue detected" "${NOTIFICATION_RECIPIENTS}" + fi + """ + intention.endAction("database") + podman.logout(authfile: "${TMP_VOLUME}/${AUTHFILE}") + } + } + } + stage('Test') { + when { expression { return params.dryRun == false } } + steps { + sh 'echo Test changes' + } + } + } + post { + success { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(true) + } + } + } + } + unstable { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(false) + } + } + } + } + failure { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(false) + } + } + } + } + aborted { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(true) + } + } + } + } + always { + node(Podman.AGENT_LABEL_APP) { + cleanWs( + cleanWhenAborted: true, + cleanWhenFailure: false, + cleanWhenSuccess: true, + cleanWhenUnstable: false, + deleteDirs: true + ) + } + } + } +} diff --git a/pipelines/database-migration/Jenkinsfile b/pipelines/database-migration/Jenkinsfile new file mode 100644 index 0000000..fedb82d --- /dev/null +++ b/pipelines/database-migration/Jenkinsfile @@ -0,0 +1,358 @@ +@Library('polaris') +import ca.bc.gov.nrids.polaris.BrokerIntention +import ca.bc.gov.nrids.polaris.JenkinsUtil +import ca.bc.gov.nrids.polaris.Podman +import ca.bc.gov.nrids.polaris.Vault + +def intention +def env_short = JenkinsUtil.convertLongEnvToShort("${params.environment}") + +pipeline { + agent { + label Podman.AGENT_LABEL_APP + } + environment { + PATH = "/sw_ux/bin:$PATH" + VAULT_ADDR = "https://knox.io.nrs.gov.bc.ca" + BROKER_URL = "https://broker.io.nrs.gov.bc.ca" + TARGET_ENV = "${params.environment}" + GH_REPO = "${params.githubRepo}" + OWNER = GH_REPO.substring(0, GH_REPO.lastIndexOf("/")) + REPO = GH_REPO.substring(GH_REPO.lastIndexOf("/") + 1) + GIT_BRANCH = "${params.gitBranch}" + SEM_VERSION = "${params.semanticVersion}" + TAG_VERSION = "v${SEM_VERSION}" + SERVICE_PROJECT = "${params.serviceProject}" + SERVICE_NAME = "${params.serviceName}" + PODMAN_WORKDIR = "/liquibase/changelog" + TMP_VOLUME = "liquibase.${UUID.randomUUID().toString()[0..7]}" + TMP_OUTPUT_FILE = "liquibase.stderr.${UUID.randomUUID().toString()[0..7]}" + ONFAIL_GREP_PATTERN = "^WARNING" + NOTIFICATION_RECIPIENTS = "${params.notificationRecipients}" + EVENT_PROVIDER = "${params.eventProvider}" + PODMAN_REGISTRY = "docker.io" + CONTAINER_IMAGE_CONSUL_TEMPLATE = "hashicorp/consul-template" + CONTAINER_IMAGE_LIQUBASE = "liquibase/liquibase" + HOST = "freight.bcgov" + PODMAN_USER = "wwwadm" + NR_DATABASE_MIGRATION_ROLE_ID = credentials('knox-nr-database-migration-prod-role-id') + NR_BROKER_TOKEN = credentials('nr-broker-jwt') + DB_ROLE_ID = credentials("knox-${params.serviceProject}-${params.serviceName}-${env_short}-role-id") + AUTHFILE = "auth.json" + } + stages { + stage('Setup') { + steps { + script { + env.CAUSE_USER_ID = JenkinsUtil.getCauseUserId(currentBuild) + } + } + } + stage('Get github token') { + steps { + script { + // open intention to get github app creds + intention = new BrokerIntention(readJSON(file: 'scripts/intention-github.json')) + intention.setEventDetails( + userName: env.CAUSE_USER_ID, + provider: env.EVENT_PROVIDER, + url: env.BUILD_URL, + serviceName: env.SERVICE_NAME, + serviceProject: env.SERVICE_PROJECT, + environment: "tools" + ) + intention.open(NR_BROKER_TOKEN) + intention.startAction("login") + def vaultGhToken = intention.provisionToken("login", NR_DATABASE_MIGRATION_ROLE_ID) + def vaultGhApp = new Vault(vaultGhToken) + def ghAppCreds = vaultGhApp.read("apps/data/prod/db-pipeline/nr-database-migration/github_app") + env.APP_ID = ghAppCreds['gh_app_id'] + env.INSTALLATION_ID = ghAppCreds['gh_installation_id'] + env.PRIVATE_KEY = ghAppCreds['gh_private_key'] + // generate github app jwt + env.GENERATED_JWT = sh( + returnStdout: true, + script: 'set +x; scripts/generate_jwt.sh' + ) + env.GH_TOKEN = sh( + returnStdout: true, + script: 'set +x; scripts/get_installation_token.sh' + ) + intention.endAction("login") + intention.close(true) + } + } + } + stage('Check prod release') { + when { expression { return params.dryRun == false } } + steps { + script { + def rc = sh( + returnStatus: true, + script: "set +x; GH_TOKEN=${GH_TOKEN} gh api repos/${OWNER}/${REPO}/releases/tags/${TAG_VERSION}" + ) + if (OWNER != null && OWNER != "" && REPO != null && REPO != "" && rc == 0) { + currentBuild.result = 'ABORTED' + error('Release check error') + } + } + } + } + stage('Checkout for deployment to development') { + when { environment name: 'TARGET_ENV', value: 'development' } + steps { + script { + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.GH_TOKEN, password: GH_TOKEN]]]) { + sh "GH_TOKEN=${GH_TOKEN} gh repo clone ${OWNER}/${REPO} ${TMP_VOLUME} -- --branch=${GIT_BRANCH}" + } + } + } + } + stage('Checkout for deployment to test') { + when { environment name: 'TARGET_ENV', value: 'test' } + steps { + script { + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.GH_TOKEN, password: GH_TOKEN]]]) { + sh "GH_TOKEN=${GH_TOKEN} gh repo clone ${OWNER}/${REPO} ${TMP_VOLUME} -- --branch=${TAG_VERSION}-development" + } + } + } + } + stage('Checkout for deployment to production') { + when { environment name: 'TARGET_ENV', value: 'production' } + steps { + script { + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.GH_TOKEN, password: GH_TOKEN]]]) { + sh "GH_TOKEN=${GH_TOKEN} gh repo clone ${OWNER}/${REPO} ${TMP_VOLUME} -- --branch=${TAG_VERSION}-test" + } + } + } + } + stage('Generate Liquibase properties') { + steps { + script { + // open intention to get registry and db creds + intention = new BrokerIntention(readJSON(file: 'scripts/intention-db.json')) + intention.setEventDetails( + userName: env.CAUSE_USER_ID, + provider: env.EVENT_PROVIDER, + url: env.BUILD_URL, + serviceName: env.SERVICE_NAME, + serviceProject: env.SERVICE_PROJECT, + environment: env.TARGET_ENV + ) + intention.open(NR_BROKER_TOKEN) + intention.startAction("login") + def vaultToken = intention.provisionToken("login", NR_DATABASE_MIGRATION_ROLE_ID) + def vault = new Vault(vaultToken) + def registryCreds = vault.read('apps/data/prod/db-pipeline/nr-database-migration/artifactory') + env.REGISTRY_USERNAME = registryCreds['sa_username'] + env.REGISTRY_PASSWORD = registryCreds['sa_password'] + env.APP_VAULT_TOKEN = intention.provisionToken("database", DB_ROLE_ID) + podman = new Podman(this, null) + podman.login(authfile: "${TMP_VOLUME}/${AUTHFILE}", options: "-u ${env.REGISTRY_USERNAME} -p ${env.REGISTRY_PASSWORD}") + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.APP_VAULT_TOKEN, password: APP_VAULT_TOKEN]]]) { + podman.run("${CONTAINER_IMAGE_CONSUL_TEMPLATE}", + authfile: "${TMP_VOLUME}/${AUTHFILE}", + options: "--rm \ + --security-opt label=disable \ + --userns keep-id \ + -v \$(pwd)/${TMP_VOLUME}:${PODMAN_WORKDIR} \ + -v \$(pwd)/scripts/config.hcl:${PODMAN_WORKDIR}/config.hcl \ + -e TARGET_ENV_SHORT=${env_short} \ + -e PODMAN_WORKDIR=${PODMAN_WORKDIR} \ + -e VAULT_TOKEN=${APP_VAULT_TOKEN}", + command: "-config '/liquibase/changelog/config.hcl' \ + -template '/liquibase/changelog/liquibase.properties.tpl:${PODMAN_WORKDIR}/liquibase.properties' \ + -once", + returnStatus: true + ) + } + intention.endAction("login") + } + } + } + stage('Run Liquibase datafix select') { + when { + anyOf { + expression { return params.datafix == true } + expression { return params.datacheck == true } + } + } + steps { + script { + intention.startAction("database") + podman.run("${CONTAINER_IMAGE_LIQUBASE}", + authfile: "${TMP_VOLUME}/${AUTHFILE}", + options: "--rm \ + --security-opt label=disable \ + --userns keep-id \ + -v \$(pwd)/${TMP_VOLUME}:${PODMAN_WORKDIR} \ + --workdir ${PODMAN_WORKDIR}", + command: "--defaultsFile=liquibase.properties --sql-file=scripts/datafix_select.sql execute-sql" + ) + } + } + } + stage('Run Liquibase dry run') { + when { expression { return params.dryRun == true } } + steps { + script { + podman.run("${CONTAINER_IMAGE_LIQUBASE}", + authfile: "${TMP_VOLUME}/${AUTHFILE}", + options: "--rm \ + --security-opt label=disable \ + --userns keep-id \ + -v \$(pwd)/${TMP_VOLUME}:${PODMAN_WORKDIR} \ + --workdir ${PODMAN_WORKDIR}", + command: "--defaultsFile=liquibase.properties update-sql" + ) + } + } + } + stage('Run Liquibase') { + when { expression { return params.dryRun == false } } + steps { + script { + podman.run("${CONTAINER_IMAGE_LIQUBASE}", + authfile: "${TMP_VOLUME}/${AUTHFILE}", + options: "--rm \ + --security-opt label=disable \ + --userns keep-id \ + -v \$(pwd)/${TMP_VOLUME}:${PODMAN_WORKDIR} \ + --workdir ${PODMAN_WORKDIR}", + command: "--defaultsFile=liquibase.properties update 2> ${TMP_OUTPUT_FILE}" + ) + // extract message and send notification + sh """ + ONFAIL_WARNING_COUNT="\$(grep '${ONFAIL_GREP_PATTERN}' ${TMP_OUTPUT_FILE} | wc -l)" + if [ \$ONFAIL_WARNING_COUNT -gt 0 ] && [ "$TARGET_ENV" = "production" ]; then + ONFAIL_MESSAGE="\$(sed -n '/${ONFAIL_GREP_PATTERN}/{N;p}' ${TMP_OUTPUT_FILE})" + printf "${BUILD_URL}\n\n\${ONFAIL_MESSAGE}" | mailx -s "Data quality issue detected" "${NOTIFICATION_RECIPIENTS}" + fi + """ + // tag version in liquibase + podman.run("${CONTAINER_IMAGE_LIQUBASE}", + authfile: "${TMP_VOLUME}/${AUTHFILE}", + options: "--rm \ + --security-opt label=disable \ + --userns keep-id \ + -v \$(pwd)/${TMP_VOLUME}:${PODMAN_WORKDIR} \ + --workdir ${PODMAN_WORKDIR}", + command: "--defaultsFile=liquibase.properties tag ${TAG_VERSION}" + ) + intention.endAction("database") + podman.logout(authfile: "${TMP_VOLUME}/${AUTHFILE}") + } + } + } + stage('Test') { + when { expression { return params.dryRun == false } } + steps { + sh 'echo Test changes' + } + } + stage('Create tag') { + when { expression { return params.dryRun == false } } + steps { + script { + // set tag name and get commit id + if (TARGET_ENV == 'development') { + env.TAG_NAME = "${TAG_VERSION}-development" + env.COMMIT_ID = sh( + returnStdout: true, + script: "set +x; GH_TOKEN=${GH_TOKEN} gh api repos/${OWNER}/${REPO}/commits/heads/main --jq '.sha'" + ).trim() + } + if (TARGET_ENV == 'test') { + env.TAG_NAME = "${TAG_VERSION}-test" + env.COMMIT_ID = sh( + returnStdout: true, + script: "set +x; GH_TOKEN=${GH_TOKEN} gh api repos/${OWNER}/${REPO}/git/refs/tags/${TAG_VERSION}-development --jq '.object.sha'" + ).trim() + } + if (TARGET_ENV == 'production') { + env.TAG_NAME = "${TAG_VERSION}" + env.COMMIT_ID = sh( + returnStdout: true, + script: "set +x; GH_TOKEN=${GH_TOKEN} gh api repos/${OWNER}/${REPO}/git/refs/tags/${TAG_VERSION}-test --jq '.object.sha'" + ).trim() + } + // delete non-production tags + def rc = sh( + returnStatus: true, + script: "set +x; GH_TOKEN=${GH_TOKEN} gh api repos/${OWNER}/${REPO}/git/refs/tags/${TAG_NAME} --silent" + ) + if ((TARGET_ENV == 'development' || TARGET_ENV == 'test') && rc == 0) { + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.GH_TOKEN, password: GH_TOKEN]]]) { + sh "GH_TOKEN=${GH_TOKEN} gh api --method DELETE repos/${OWNER}/${REPO}/git/refs/tags/${TAG_NAME}" + } + } + // create new non-production tag + if (TARGET_ENV == 'development' || TARGET_ENV == 'test') { + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.GH_TOKEN, password: GH_TOKEN]]]) { + sh "GH_TOKEN=${GH_TOKEN} gh api repos/${OWNER}/${REPO}/git/refs -f 'ref=refs/tags/${TAG_NAME}' -f 'sha=${COMMIT_ID}'" + } + } + // create production release + if (TARGET_ENV == 'production') { + wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [[var: env.GH_TOKEN, password: GH_TOKEN]]]) { + sh "GH_TOKEN=${GH_TOKEN} gh api repos/${OWNER}/${REPO}/releases -f 'tag_name=${TAG_NAME}' -F 'generate_release_notes=true'" + sh "GH_TOKEN=${GH_TOKEN} gh api --method DELETE repos/${OWNER}/${REPO}/git/refs/tags/${TAG_NAME}-development" + sh "GH_TOKEN=${GH_TOKEN} gh api --method DELETE repos/${OWNER}/${REPO}/git/refs/tags/${TAG_NAME}-test" + } + } + } + } + } + } + post { + success { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(true) + } + } + } + } + unstable { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(false) + } + } + } + } + failure { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(false) + } + } + } + } + aborted { + node(Podman.AGENT_LABEL_APP) { + script { + if (intention) { + println intention.close(false) + } + } + } + } + always { + node(Podman.AGENT_LABEL_APP) { + cleanWs( + cleanWhenAborted: true, + cleanWhenFailure: false, + cleanWhenSuccess: true, + cleanWhenUnstable: false, + deleteDirs: true + ) + } + } + } +} diff --git a/scripts/config.hcl b/scripts/config.hcl new file mode 100644 index 0000000..0e40c8a --- /dev/null +++ b/scripts/config.hcl @@ -0,0 +1,78 @@ +vault { + # This is the address of the Vault leader. The protocol (http(s)) portion + # of the address is required. + address = "https://knox.io.nrs.gov.bc.ca" + + # This is a Vault Enterprise namespace to use for reading/writing secrets. + # + # This value can also be specified via the environment variable VAULT_NAMESPACE. + # namespace = "" + + # This is the token to use when communicating with the Vault server. + # Like other tools that integrate with Vault, Consul Template makes the + # assumption that you provide it with a Vault token; it does not have the + # incorporated logic to generate tokens via Vault's auth methods. + # + # This value can also be specified via the environment variable VAULT_TOKEN. + # It is highly recommended that you do not put your token in plain-text in a + # configuration file. + # + # When using a token from Vault Agent, the vault_agent_token_file setting + # should be used instead, as that will take precedence over this field. + # token = "" + + # This tells Consul Template to load the Vault token from the contents of a file. + # If this field is specified: + # - by default Consul Template will not try to renew the Vault token, if you want it + # to renew you will need to specify renew_token = true as below. + # - Consul Template will periodically stat the file and update the token if it has + # changed. + # vault_agent_token_file = "/tmp/vault/agent/token" + + # This tells Consul Template that the provided token is actually a wrapped + # token that should be unwrapped using Vault's cubbyhole response wrapping + # before being used. Please see Vault's cubbyhole response wrapping + # documentation for more information. + unwrap_token = false + + # The default lease duration Consul Template will use on a Vault secret that + # does not have a lease duration. This is used to calculate the sleep duration + # for rechecking a Vault secret value. This field is optional and will default to + # 5 minutes. + # default_lease_duration = "60s" + + # The fraction of the lease duration of a non-renewable secret Consul + # Template will wait for. This is used to calculate the sleep duration for + # rechecking a Vault secret value. This field is optional and will default to + # 90% of the lease time. + # lease_renewal_threshold = 0.90 + + # This option tells Consul Template to automatically renew the Vault token + # given. If you are unfamiliar with Vault's architecture, Vault requires + # tokens be renewed at some regular interval or they will be revoked. Consul + # Template will automatically renew the token at half the lease duration of + # the token. The default value is true, but this option can be disabled if + # you want to renew the Vault token using an out-of-band process. + # + # Note that secrets specified in a template (using {{secret}} for example) + # are always renewed, even if this option is set to false. This option only + # applies to the top-level Vault token itself. + # renew_token = true + + # This section details the retry options for connecting to Vault. Please see + # the retry options in the Consul section for more information (they are the + # same). + retry { + enabled = true + attempts = 6 + backoff = "250ms" + max_backoff = "1m" + } + + # This section details the SSL options for connecting to the Vault server. + # Please see the SSL options in the Consul section for more information (they + # are the same). + ssl { + # ... + } +} diff --git a/scripts/generate_jwt.sh b/scripts/generate_jwt.sh new file mode 100755 index 0000000..4e66a3e --- /dev/null +++ b/scripts/generate_jwt.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# https://docs.github.com/en/apps/creating-github-apps/authenticating-with-a-github-app/generating-a-json-web-token-jwt-for-a-github-app +set -o pipefail + +export client_id="${APP_ID}" +export private_key="${PRIVATE_KEY}" +pem="${private_key}" + +now=$(date +%s) +iat=$((${now} - 60)) # Issues 60 seconds in the past +exp=$((${now} + 600)) # Expires 10 minutes in the future + +b64enc() { openssl base64 | tr -d '=' | tr '/+' '_-' | tr -d '\n'; } + +header_json='{ + "typ":"JWT", + "alg":"RS256" +}' +# Header encode +header=$( echo -n "${header_json}" | b64enc ) + +payload_json='{ + "iat":'"${iat}"', + "exp":'"${exp}"', + "iss":'"${client_id}"' +}' +# Payload encode +payload=$( echo -n "${payload_json}" | b64enc ) + +# Signature +header_payload="${header}"."${payload}" +signature=$( + openssl dgst -sha256 -sign <(echo -n "${pem}") \ + <(echo -n "${header_payload}") | b64enc +) + +# Create JWT +JWT="${header_payload}"."${signature}" +printf "${JWT}" diff --git a/scripts/get_installation_token.sh b/scripts/get_installation_token.sh new file mode 100755 index 0000000..338d540 --- /dev/null +++ b/scripts/get_installation_token.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +export GENERATED_JWT="${GENERATED_JWT}" +export INSTALLATION_ID="${INSTALLATION_ID}" +github_api_url="https://api.github.com/app/installations/${INSTALLATION_ID}/access_tokens" +tokens=$(curl -s -X POST \ + -H "Authorization: Bearer ${GENERATED_JWT}" \ + -H "Accept: application/vnd.github.v3+json" \ + "${github_api_url}" ) + +# extract the token, more information about expiry for example is present as well: +# this token can be used to call the API's or used in a Git clone call using https +token=$(echo "$tokens" | jq -r '.token') +echo "${token}" diff --git a/scripts/intention-db.json b/scripts/intention-db.json new file mode 100644 index 0000000..4d81be0 --- /dev/null +++ b/scripts/intention-db.json @@ -0,0 +1,37 @@ +{ + "event": { + "provider": "", + "reason": "Job triggered", + "url": "" + }, + "actions": [ + { + "action": "server-access", + "id": "login", + "provision": ["token/self"], + "service": { + "name": "", + "project": "", + "environment": "", + "target": { + "name": "nr-database-migration", + "project": "db-pipeline", + "environment": "production" + } + } + }, + { + "action": "database-access", + "id": "database", + "provision": ["token/self"], + "service": { + "name": "", + "project": "", + "environment": "" + } + } + ], + "user": { + "name": "" + } +} \ No newline at end of file diff --git a/scripts/intention-github.json b/scripts/intention-github.json new file mode 100644 index 0000000..0e97abe --- /dev/null +++ b/scripts/intention-github.json @@ -0,0 +1,27 @@ +{ + "event": { + "provider": "", + "reason": "Job triggered", + "url": "" + }, + "actions": [ + { + "action": "server-access", + "id": "login", + "provision": ["token/self"], + "service": { + "name": "", + "project": "", + "environment": "", + "target": { + "name": "nr-database-migration", + "project": "db-pipeline", + "environment": "production" + } + } + } + ], + "user": { + "name": "" + } +} \ No newline at end of file