Deploy to AWS #1547
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: 'Deploy to AWS' | |
on: | |
push: | |
branches: | |
- main | |
- 'releases/**' | |
workflow_dispatch: | |
inputs: | |
deploy-env: | |
description: 'Environment to deploy' | |
required: true | |
type: choice | |
options: | |
- Development | |
- Test | |
- Pre-prod | |
- Staging | |
- Production | |
default: Development | |
deploy-plan-only: | |
description: 'Plan only' | |
required: false | |
type: boolean | |
default: false | |
restore-db: | |
description: 'Restore database to original state (Reset database for Development and restore anon dump for Test and Pre-prod)' | |
required: false | |
type: boolean | |
default: false | |
clear-opensearch: | |
description: 'Clear the custom OpenSearch indexes and templates' | |
required: false | |
type: boolean | |
default: false | |
jobs: | |
init-and-plan: | |
runs-on: ubuntu-latest | |
environment: ${{ inputs.deploy-env || (github.ref_name == 'main' && 'Development') || (startsWith(github.ref_name, 'releases/') && 'Pre-prod') || (startsWith(github.ref_name, 'production-') && 'Production') || (startsWith(github.ref_name, 'sandbox-') && 'Staging') || 'None' }} | |
steps: | |
- name: Get Environment Name for ${{ vars.ENV_NAME }} | |
id: get_env_name | |
uses: Entepotenz/change-string-case-action-min-dependencies@v1 | |
with: | |
string: ${{ vars.ENV_NAME }} | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
- name: Checkout config repository | |
uses: actions/checkout@v4 | |
with: | |
repository: 'opensupplyhub/ci-deployment' | |
path: 'terraform-config' | |
token: ${{ secrets.PAT }} | |
- name: Copy tfvars for ${{ vars.ENV_NAME }} | |
run: | | |
cat "terraform-config/environments/${{ env.TFVAR_NAME }}" "deployment/environments/${{env.TFVAR_NAME}}" > "deployment/terraform/${{ env.SETTINGS_BUCKET }}.tfvars" | |
env: | |
SETTINGS_BUCKET: oshub-settings-${{ steps.get_env_name.outputs.lowercase }} | |
TFVAR_NAME: terraform-${{steps.get_env_name.outputs.lowercase}}.tfvars | |
- name: Setup Terraform | |
uses: hashicorp/setup-terraform@v2 | |
with: | |
terraform_version: 1.4.0 | |
terraform_wrapper: false | |
- name: Terraform Plan for ${{ vars.ENV_NAME }} | |
run: ./deployment/infra plan | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
SETTINGS_BUCKET: oshub-settings-${{ steps.get_env_name.outputs.lowercase }} | |
- name: Copy planfile to S3 bucket for ${{ vars.ENV_NAME }} | |
run: aws s3 cp "deployment/terraform/${{ env.SETTINGS_BUCKET }}.tfplan" "s3://${{ env.SETTINGS_BUCKET }}/terraform/${{ env.SETTINGS_BUCKET }}.tfplan" | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
SETTINGS_BUCKET: oshub-settings-${{ steps.get_env_name.outputs.lowercase }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
apply: | |
needs: init-and-plan | |
runs-on: ubuntu-latest | |
environment: ${{ inputs.deploy-env || (github.ref_name == 'main' && 'Development') || (startsWith(github.ref_name, 'releases/') && 'Pre-prod') || (startsWith(github.ref_name, 'production-') && 'Production') || (startsWith(github.ref_name, 'sandbox-') && 'Staging') || 'None' }} | |
if: ${{ inputs.deploy-plan-only == false }} | |
steps: | |
- name: Get Environment Name for ${{ vars.ENV_NAME }} | |
id: get_env_name | |
uses: Entepotenz/change-string-case-action-min-dependencies@v1 | |
with: | |
string: ${{ vars.ENV_NAME }} | |
- name: Setup Terraform | |
uses: hashicorp/setup-terraform@v2 | |
with: | |
terraform_version: 1.4.0 | |
terraform_wrapper: false | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Get planfile from S3 bucket for ${{ vars.ENV_NAME }} | |
run: aws s3 cp "s3://${{ env.SETTINGS_BUCKET }}/terraform/${{ env.SETTINGS_BUCKET }}.tfplan" "deployment/terraform/${{ env.SETTINGS_BUCKET }}.tfplan" | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
SETTINGS_BUCKET: oshub-settings-${{ steps.get_env_name.outputs.lowercase }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
- name: Terraform Apply for ${{ vars.ENV_NAME }} | |
run: | | |
./deployment/infra apply | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
SETTINGS_BUCKET: oshub-settings-${{ steps.get_env_name.outputs.lowercase }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
build_and_push_docker_image: | |
needs: apply | |
runs-on: ubuntu-latest | |
environment: ${{ inputs.deploy-env || (github.ref_name == 'main' && 'Development') || (startsWith(github.ref_name, 'releases/') && 'Pre-prod') || (startsWith(github.ref_name, 'production-') && 'Production') || (startsWith(github.ref_name, 'sandbox-') && 'Staging') || 'None' }} | |
if: ${{ inputs.deploy-plan-only == false }} | |
steps: | |
- name: Get Environment Name for ${{ vars.ENV_NAME }} | |
id: get_env_name | |
uses: Entepotenz/change-string-case-action-min-dependencies@v1 | |
with: | |
string: ${{ vars.ENV_NAME }} | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
- name: Setup Node.js | |
uses: actions/setup-node@v2 | |
with: | |
node-version: '14' | |
- name: Cache dependencies | |
uses: actions/cache@v2 | |
with: | |
path: | | |
src/react/node_modules | |
key: ${{ runner.os }}-node-${{ hashFiles('**/yarn.lock') }} | |
- name: Install dependencies | |
working-directory: src/react | |
run: yarn install | |
- name: Build static assets | |
working-directory: src/react | |
run: yarn run build | |
- name: Move static | |
run: mv src/react/build src/django/static | |
- name: Configure AWS credentials for ${{ vars.ENV_NAME }} | |
uses: aws-actions/configure-aws-credentials@v1 | |
with: | |
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
aws-region: "eu-west-1" | |
- name: Get GIT_COMMIT | |
run: | | |
export SHORT_SHA="$(git rev-parse --short HEAD)" | |
export GIT_COMMIT_CI="${SHORT_SHA:0:7}" | |
echo "GIT_COMMIT=$GIT_COMMIT_CI" >> $GITHUB_ENV | |
- name: Login to Amazon ECR | |
uses: aws-actions/amazon-ecr-login@v1 | |
- name: Build and push Kafka Tools Docker image to ECR for ${{ vars.ENV_NAME }} | |
uses: docker/build-push-action@v2 | |
with: | |
context: src/kafka-tools/ | |
file: src/kafka-tools/Dockerfile | |
push: true | |
tags: ${{ vars.ECR_REGISTRY }}/${{ vars.IMAGE_NAME }}-kafka-${{ steps.get_env_name.outputs.lowercase }}:${{ env.GIT_COMMIT }} | |
- name: Build and push Django Docker image to ECR for ${{ vars.ENV_NAME }} | |
uses: docker/build-push-action@v2 | |
with: | |
context: src/django | |
file: src/django/Dockerfile | |
push: true | |
tags: ${{ vars.ECR_REGISTRY }}/${{ vars.IMAGE_NAME }}-${{ steps.get_env_name.outputs.lowercase }}:${{ env.GIT_COMMIT }} | |
- name: Build and push Batch Docker image to ECR for ${{ vars.ENV_NAME }} | |
uses: docker/build-push-action@v2 | |
with: | |
context: src/batch | |
file: src/batch/Dockerfile | |
push: true | |
tags: ${{ vars.ECR_REGISTRY }}/${{ vars.IMAGE_NAME }}-batch-${{ steps.get_env_name.outputs.lowercase }}:${{ env.GIT_COMMIT }} | |
build-args: | | |
GIT_COMMIT=${{ env.GIT_COMMIT }} | |
DOCKER_IMAGE=${{ vars.DOCKER_IMAGE }} | |
ENVIRONMENT=${{ steps.get_env_name.outputs.lowercase }} | |
- name: Build and push Dedupe Hub Docker image to ECR for ${{ vars.ENV_NAME }} | |
uses: docker/build-push-action@v2 | |
with: | |
context: src/dedupe-hub/api | |
file: src/dedupe-hub/api/Dockerfile | |
push: true | |
tags: ${{ vars.ECR_REGISTRY }}/${{ vars.IMAGE_NAME }}-deduplicate-${{ steps.get_env_name.outputs.lowercase }}:${{ env.GIT_COMMIT }} | |
- name: Build and push Logstash Docker image to ECR for ${{ vars.ENV_NAME }} | |
uses: docker/build-push-action@v2 | |
with: | |
context: src/logstash | |
file: src/logstash/Dockerfile | |
push: true | |
tags: ${{ vars.ECR_REGISTRY }}/${{ vars.IMAGE_NAME }}-logstash-${{ steps.get_env_name.outputs.lowercase }}:${{ env.GIT_COMMIT }} | |
- name: Build and push Database Anonymizer Docker image to ECR for ${{ vars.ENV_NAME }} | |
uses: docker/build-push-action@v2 | |
if: ${{ steps.get_env_name.outputs.lowercase == 'production' }} | |
with: | |
context: deployment/terraform/database_anonymizer_sheduled_task/docker | |
file: deployment/terraform/database_anonymizer_sheduled_task/docker/Dockerfile | |
push: true | |
tags: ${{ vars.ECR_REGISTRY }}/${{ vars.IMAGE_NAME }}-database-anonymizer-${{ steps.get_env_name.outputs.lowercase }}:${{ env.GIT_COMMIT }} | |
- name: Build and push Anonymize Database Dump Docker image to ECR for ${{ vars.ENV_NAME }} | |
uses: docker/build-push-action@v2 | |
if: ${{ steps.get_env_name.outputs.lowercase == 'test' }} | |
with: | |
context: deployment/terraform/anonymized_database_dump_scheduled_task/docker | |
file: deployment/terraform/anonymized_database_dump_scheduled_task/docker/Dockerfile | |
push: true | |
tags: ${{ vars.ECR_REGISTRY }}/${{ vars.IMAGE_NAME }}-anonymized-database-dump-${{ steps.get_env_name.outputs.lowercase }}:${{ env.GIT_COMMIT }} | |
create_kafka_topic: | |
needs: build_and_push_docker_image | |
runs-on: ubuntu-latest | |
environment: ${{ inputs.deploy-env || (github.ref_name == 'main' && 'Development') || (startsWith(github.ref_name, 'releases/') && 'Pre-prod') || (startsWith(github.ref_name, 'production-') && 'Production') || (startsWith(github.ref_name, 'sandbox-') && 'Staging') || 'None' }} | |
if: ${{ inputs.deploy-plan-only == false }} | |
steps: | |
- name: Get Environment Name for ${{ vars.ENV_NAME }} | |
id: get_env_name | |
uses: Entepotenz/change-string-case-action-min-dependencies@v1 | |
with: | |
string: ${{ vars.ENV_NAME }} | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
- name: Create or update kafka topics for ${{ vars.ENV_NAME }} | |
run: | | |
./deployment/run_kafka_task ${{ vars.ENV_NAME }} | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
stop_logstash: | |
needs: create_kafka_topic | |
runs-on: ubuntu-latest | |
environment: ${{ inputs.deploy-env || (github.ref_name == 'main' && 'Development') || (startsWith(github.ref_name, 'releases/') && 'Pre-prod') || (startsWith(github.ref_name, 'production-') && 'Production') || (startsWith(github.ref_name, 'sandbox-') && 'Staging') || 'None' }} | |
if: ${{ inputs.deploy-plan-only == false }} | |
steps: | |
- name: Get Environment Name for ${{ vars.ENV_NAME }} | |
id: get_env_name | |
uses: Entepotenz/change-string-case-action-min-dependencies@v1 | |
with: | |
string: ${{ vars.ENV_NAME }} | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
- name: Stop Logstash for ${{ vars.ENV_NAME }} | |
if: ${{ inputs.restore-db == true || inputs.clear-opensearch == true}} | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
run: | | |
aws \ | |
ecs update-service --desired-count 0 --cluster=ecsOpenSupplyHub${{vars.ENV_NAME}}Cluster \ | |
--service=OpenSupplyHub${{vars.ENV_NAME}}AppLogstash | |
restore_database: | |
needs: stop_logstash | |
runs-on: self-hosted | |
environment: ${{ inputs.deploy-env || (github.ref_name == 'main' && 'Development') || (startsWith(github.ref_name, 'releases/') && 'Pre-prod') || (startsWith(github.ref_name, 'production-') && 'Production') || (startsWith(github.ref_name, 'sandbox-') && 'Staging') || 'None' }} | |
if: ${{ inputs.deploy-plan-only == false }} | |
steps: | |
- name: Get Environment Name for ${{ vars.ENV_NAME }} | |
id: get_env_name | |
uses: Entepotenz/change-string-case-action-min-dependencies@v1 | |
with: | |
string: ${{ vars.ENV_NAME }} | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
- name: Restore database for ${{ vars.ENV_NAME }} | |
if: ${{ (vars.ENV_NAME != 'Production' && vars.ENV_NAME != 'Staging' && vars.ENV_NAME != 'Development') && inputs.restore-db == true}} | |
run: | | |
cd ./src/anon-tools | |
mkdir -p ./keys | |
echo "${{ secrets.KEY_FILE }}" > ./keys/key | |
docker build -t restore -f Dockerfile.restore . | |
docker run -v ./keys/key:/keys/key --shm-size=2gb --rm \ | |
-e AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ | |
-e AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ | |
-e AWS_DEFAULT_REGION=eu-west-1 \ | |
-e ENVIRONMENT=${{ vars.ENV_NAME }} \ | |
-e DATABASE_NAME=opensupplyhub \ | |
-e DATABASE_USERNAME=opensupplyhub \ | |
-e DATABASE_PASSWORD=${{ secrets.DATABASE_PASSWORD }} \ | |
restore | |
- name: Reset database for ${{ vars.ENV_NAME }} | |
if: ${{ vars.ENV_NAME == 'Development' && inputs.restore-db == true}} | |
run: | | |
echo "Creating an S3 folder with production location lists to reset DB if it doesn't exist." | |
aws s3api put-object --bucket ${{ env.LIST_S3_BUCKET }} --key ${{ env.RESET_LISTS_FOLDER }} | |
echo "Coping all production location files from the repo to S3 to ensure consistency between local and environment resets." | |
aws s3 cp ./src/django/${{ env.RESET_LISTS_FOLDER }} s3://${{ env.LIST_S3_BUCKET }}/${{ env.RESET_LISTS_FOLDER }} --recursive | |
echo "Triggering reset commands." | |
./deployment/run_cli_task ${{ vars.ENV_NAME }} "reset_database" | |
./deployment/run_cli_task ${{ vars.ENV_NAME }} "matchfixtures" | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
LIST_S3_BUCKET: opensupplyhub-${{ steps.get_env_name.outputs.lowercase }}-files-eu-west-1 | |
RESET_LISTS_FOLDER: "api/fixtures/list_files/" | |
update_services: | |
needs: restore_database | |
runs-on: ubuntu-latest | |
environment: ${{ inputs.deploy-env || (github.ref_name == 'main' && 'Development') || (startsWith(github.ref_name, 'releases/') && 'Pre-prod') || (startsWith(github.ref_name, 'production-') && 'Production') || (startsWith(github.ref_name, 'sandbox-') && 'Staging') || 'None' }} | |
if: ${{ inputs.deploy-plan-only == false }} | |
steps: | |
- name: Get Environment Name for ${{ vars.ENV_NAME }} | |
id: get_env_name | |
uses: Entepotenz/change-string-case-action-min-dependencies@v1 | |
with: | |
string: ${{ vars.ENV_NAME }} | |
- name: Update ECS Django Service with new Image for ${{ vars.ENV_NAME }} | |
run: | | |
aws ecs update-service --cluster ${{ vars.CLUSTER }} --service ${{ vars.SERVICE_NAME }} --force-new-deployment --region ${{env.AWS_DEFAULT_REGION}} | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
- name: Update ECS Dedupe Hub Service with new Image for ${{ vars.ENV_NAME }} | |
run: | | |
aws ecs update-service --cluster ${{ vars.CLUSTER }} --service ${{ vars.SERVICE_NAME }}DD --force-new-deployment --region ${{env.AWS_DEFAULT_REGION}} | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
post_deploy: | |
needs: update_services | |
runs-on: ubuntu-latest | |
environment: ${{ inputs.deploy-env || (github.ref_name == 'main' && 'Development') || (startsWith(github.ref_name, 'releases/') && 'Pre-prod') || (startsWith(github.ref_name, 'production-') && 'Production') || (startsWith(github.ref_name, 'sandbox-') && 'Staging') || 'None' }} | |
if: ${{ inputs.deploy-plan-only == false }} | |
steps: | |
- name: Get Environment Name for ${{ vars.ENV_NAME }} | |
id: get_env_name | |
uses: Entepotenz/change-string-case-action-min-dependencies@v1 | |
with: | |
string: ${{ vars.ENV_NAME }} | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
- name: Run migrations and other post-deployment tasks for ${{ vars.ENV_NAME }} | |
run: | | |
./deployment/run_cli_task ${{ vars.ENV_NAME }} "post_deployment" | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
clear_opensearch: | |
needs: post_deploy | |
runs-on: ubuntu-latest | |
environment: ${{ inputs.deploy-env || (github.ref_name == 'main' && 'Development') || (startsWith(github.ref_name, 'releases/') && 'Pre-prod') || (startsWith(github.ref_name, 'production-') && 'Production') || (startsWith(github.ref_name, 'sandbox-') && 'Staging') || 'None' }} | |
if: ${{ inputs.deploy-plan-only == false }} | |
steps: | |
- name: Get Environment Name for ${{ vars.ENV_NAME }} | |
id: get_env_name | |
uses: Entepotenz/change-string-case-action-min-dependencies@v1 | |
with: | |
string: ${{ vars.ENV_NAME }} | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
- name: Get OpenSearch domain, filesystem and access point IDs for ${{ vars.ENV_NAME }} | |
if: ${{ inputs.restore-db == true || inputs.clear-opensearch == true}} | |
id: export_variables | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
run: | | |
OS_DOMAIN_NAME=$(echo "${{ vars.ENV_NAME }}-os-domain" | tr '[:upper:]' '[:lower:]') | |
OPENSEARCH_DOMAIN=$(aws \ | |
es describe-elasticsearch-domains --domain-names $OS_DOMAIN_NAME \ | |
--query "DomainStatusList[].Endpoints.vpc" --output text) | |
EFS_ID=$(aws \ | |
efs describe-file-systems \ | |
--query "FileSystems[?Tags[?Key=='Environment' && Value=='${{ vars.ENV_NAME }}']].FileSystemId" \ | |
--output text) | |
EFS_AP_ID=$(aws \ | |
efs describe-access-points \ | |
--query "AccessPoints[?FileSystemId=='$EFS_ID'].AccessPointId" \ | |
--output text) | |
echo "EFS_ID=$EFS_ID" >> $GITHUB_OUTPUT | |
echo "EFS_AP_ID=$EFS_AP_ID" >> $GITHUB_OUTPUT | |
echo "OPENSEARCH_DOMAIN=$OPENSEARCH_DOMAIN" >> $GITHUB_OUTPUT | |
- name: Clear the custom OpenSearch indexes and templates for ${{ vars.ENV_NAME }} | |
if: ${{ inputs.restore-db == true || inputs.clear-opensearch == true}} | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
OPENSEARCH_DOMAIN: ${{ steps.export_variables.outputs.OPENSEARCH_DOMAIN }} | |
EFS_AP_ID: ${{ steps.export_variables.outputs.EFS_AP_ID }} | |
EFS_ID: ${{ steps.export_variables.outputs.EFS_ID }} | |
BASTION_IP: ${{ vars.BASTION_IP }} | |
run: | | |
cd ./deployment/clear_opensearch | |
mkdir -p script | |
mkdir -p ssh | |
echo -e "Host *\n\tStrictHostKeyChecking no\n\n" > ssh/config | |
printf "%s\n" "${{ secrets.SSH_PRIVATE_KEY }}" > ssh/id_rsa | |
echo "" >> ssh/id_rsa | |
echo -n ${{ vars.BASTION_IP }} > script/.env | |
envsubst < clear_opensearch.sh.tpl > script/clear_opensearch.sh | |
envsubst < run.sh.tpl > script/run.sh | |
docker run --rm \ | |
-v ./script:/script \ | |
-v ./ssh:/root/.ssh \ | |
kroniak/ssh-client bash /script/run.sh | |
start_logstash: | |
needs: clear_opensearch | |
runs-on: ubuntu-latest | |
environment: ${{ inputs.deploy-env || (github.ref_name == 'main' && 'Development') || (startsWith(github.ref_name, 'releases/') && 'Pre-prod') || (startsWith(github.ref_name, 'production-') && 'Production') || (startsWith(github.ref_name, 'sandbox-') && 'Staging') || 'None' }} | |
if: ${{ inputs.deploy-plan-only == false }} | |
steps: | |
- name: Get Environment Name for ${{ vars.ENV_NAME }} | |
id: get_env_name | |
uses: Entepotenz/change-string-case-action-min-dependencies@v1 | |
with: | |
string: ${{ vars.ENV_NAME }} | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
- name: Start Logstash for ${{ vars.ENV_NAME }} | |
if: ${{ inputs.restore-db == true || inputs.clear-opensearch == true}} | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_DEFAULT_REGION: "eu-west-1" | |
run: | | |
aws \ | |
ecs update-service --desired-count 1 --cluster=ecsOpenSupplyHub${{vars.ENV_NAME}}Cluster \ | |
--service=OpenSupplyHub${{vars.ENV_NAME}}AppLogstash |