Skip to content

Commit

Permalink
Merge pull request #536 from PAWECOGmbH/development
Browse files Browse the repository at this point in the history
Added new backup strategy
  • Loading branch information
ptruessel authored Oct 22, 2024
2 parents e10686d + 797aa94 commit ca48b74
Show file tree
Hide file tree
Showing 5 changed files with 170 additions and 89 deletions.
55 changes: 35 additions & 20 deletions config/backup/backup.sh
Original file line number Diff line number Diff line change
@@ -1,33 +1,48 @@
#!/bin/bash

# Activates the automatic export of variables
# Enable automatic export of variables
set -a

# Get the directory of the current script
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"

# Set the project root (assuming the script is two levels below the project root)
PROJECT_ROOT="$(dirname "$(dirname "$SCRIPT_DIR")")"

# Load the .env file from the project root
source "$PROJECT_ROOT/.env"
source "$(dirname "$0")/../../.env"

# Deactivates the automatic export of variables
set +a
# Dynamically generate volume names based on the project
DB_VOLUME="${COMPOSE_PROJECT_NAME}_db_volume"
USERDATA_VOLUME="${COMPOSE_PROJECT_NAME}_userdata_volume"

# Checks whether the /backup folder exists and creates it if required
# Date format for versioning (e.g., 20241022_2300)
TIMESTAMP=$(date +"%Y%m%d_%H%M")

# Check if the /backup folder exists and create it if necessary
if [ ! -d "/backup" ]; then
mkdir -p /backup
fi

# Backup database
docker compose -f compose-backup.yml run db_backup
scp -i ${SSH_KEY_PATH} /backup/database.tar.gz ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}
# Create remote directories if they don't exist
ssh -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP} "mkdir -p ${REMOTE_BACKUP_PATH}/db ${REMOTE_BACKUP_PATH}/userdata ${REMOTE_BACKUP_PATH}/lucee"

# Backup database volume and store in the remote db directory
docker run --rm -v ${DB_VOLUME}:/volume -v /backup:/backup alpine sh -c "tar -czf /backup/database_${TIMESTAMP}.tar.gz -C /volume ."
scp -i ${SSH_KEY_PATH} /backup/database_${TIMESTAMP}.tar.gz ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}/db/

# Backup userdata volume and store in the remote userdata directory
docker run --rm -v ${USERDATA_VOLUME}:/volume -v /backup:/backup alpine sh -c "tar -czf /backup/userdata_${TIMESTAMP}.tar.gz -C /volume ."
scp -i ${SSH_KEY_PATH} /backup/userdata_${TIMESTAMP}.tar.gz ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}/userdata/

# Backup userdata
docker compose -f compose-backup.yml run userdata_backup
scp -i ${SSH_KEY_PATH} /backup/userdata.tar.gz ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}
# Backup Lucee image and store in the remote lucee directory
docker save -o /backup/image_${LUCEE_IMAGE}_${LUCEE_IMAGE_VERSION}_${TIMESTAMP}.tar ${LUCEE_IMAGE}:${LUCEE_IMAGE_VERSION}
scp -i ${SSH_KEY_PATH} /backup/image_${LUCEE_IMAGE}_${LUCEE_IMAGE_VERSION}_${TIMESTAMP}.tar ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}/lucee/

# Backup Lucee image
docker compose -f compose-backup.yml run lucee_image_backup
scp -i ${SSH_KEY_PATH} /backup/image_${LUCEE_IMAGE}_${LUCEE_IMAGE_VERSION}.tar ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}
# Rotate backups: Keep only the latest 30 backups per type

# For database backups
ssh -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP} "cd ${REMOTE_BACKUP_PATH}/db && ls -tp | grep -v '/$' | tail -n +31 | xargs -I {} rm -- {}"

# For userdata backups
ssh -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP} "cd ${REMOTE_BACKUP_PATH}/userdata && ls -tp | grep -v '/$' | tail -n +31 | xargs -I {} rm -- {}"

# For Lucee image backups
ssh -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP} "cd ${REMOTE_BACKUP_PATH}/lucee && ls -tp | grep -v '/$' | tail -n +31 | xargs -I {} rm -- {}"

# Disable automatic export of variables
set +a
20 changes: 0 additions & 20 deletions config/backup/compose-backup.yml

This file was deleted.

80 changes: 54 additions & 26 deletions config/backup/readme.md
Original file line number Diff line number Diff line change
@@ -1,46 +1,74 @@
***Backup for the Production Environment***

# Backup for the Production and Staging Environments

**Purpose**

This directory contains the configurations and scripts for backing up and restoring the database, user data, and the Lucee image. These backups are intended only for the production environment.
This directory contains the necessary configurations and scripts for backing up and restoring the **database**, **user data**, and the **Lucee image** in the **production** and **staging** environments. The backup and restore processes are automated using Docker and shell scripts to ensure consistency, reliability, and minimal manual intervention.

By utilizing Docker Compose and shell scripts, the backup process is automated to ensure consistent and reliable data backups with minimal manual intervention.
Note: These scripts are not intended for use in the **development** environment.

**Structure**

- compose-backup.yml: This file defines the container services needed to create backups. Each service performs a backup for the database, user data, or the Lucee image.
## **Structure**

- backup.sh: A shell script that automates the process of backing up the database, user data, and Lucee image. It uses Docker Compose and secure copying (SCP) to transfer backups to the designated backup server.
- **backup.sh**: A shell script that automates the process of creating backups for the **database volume**, **user data volume**, and **Lucee image**. Each backup is timestamped to ensure that multiple versions can be maintained. The script also uses `scp` to transfer the backups securely to a remote server.

- restore.sh: A shell script that automates the process of restoring the database, user data, and Lucee image. It retrieves backups from the backup server and restores them to the appropriate volumes.
- **restore.sh**: A shell script that automates the process of restoring backups from the remote server. It retrieves the backups for the **database**, **user data**, and **Lucee image**, and restores them to the appropriate Docker volumes.

**Usage**
- **.env**: Contains environment variables required for the backup and restore processes, such as volume names, SSH key, server IP, and remote paths. The backup does not have its own `.env` file; instead, it uses the `.env` file from the main project.

*Backup*
## **Usage**

To create a backup, run the following command from this directory:
### **Backup**

sh backup.sh
To create a backup of the **database**, **user data**, and **Lucee image**, follow these steps:

This will:
1. Backup the database volume.
2. Backup the user data volume.
3. Backup the Lucee image.
4. Securely transfer all backups to the remote backup server.
1. Ensure that the `.env` file is correctly configured with your **production** or **staging** environment settings (e.g., volume names, remote server path, SSH keys, and server IP).

*Restore*
To restore from a backup, run the following command:
2. Navigate to the `config/backup/` directory:
`cd config/backup/`

sh restore.sh
3. Run the backup script:
`bash backup.sh`

This will:

1. Retrieve the latest backups from the remote backup server.
2. Restore the database volume.
3. Restore the user data volume.
4. Load the Lucee image into Docker.
- Backup the **database volume**.
- Backup the **user data volume**.
- Backup the **Lucee image**.
- Securely transfer all backups to the remote backup server.

Each backup will be **timestamped** in the format `YYYYMMDD_HHMM`, ensuring you can differentiate between multiple backup versions.

### **Restore**

To restore from a backup, first navigate to the `config/backup/` directory:
`cd config/backup/`

Then, if you run the restore script without any parameters:
`bash restore.sh`

It will display a list of available options, such as:
Usage: restore.sh [--db [TIMESTAMP]] [--userdata [TIMESTAMP]] [--lucee-image [TIMESTAMP]] [--list]

To perform a restore, you need to specify which backup you want to restore by using one of the following options:
- To restore the **latest** backup for the **database**:
`bash restore.sh --db`

- To restore a **specific** database backup by **timestamp**:
`bash restore.sh --db 20241019_2300`

- To list all available backups on the remote server:
`bash restore.sh --list`


## **Automating Backups**

To automate the backup process, you can set up a **cron job** to run the backup script at regular intervals (e.g., daily). For example, to run the backup every night at midnight, add the following entry to your crontab:
`0 0 * * * /path/to/your/project/config/backup/backup.sh`


**Notes**
- The backups created by this process are only for the **production environment**. Please ensure that the environment variables in the `.env` file are configured correctly before running any backups or restores.
## **Notes**

- Make sure to update the `.env` file with the correct values (such as volume names, SSH keys, and server IP) specific to the production setup.
- These backups are intended for the **production** and **staging** environments. Ensure that the environment variables in the `.env` file are correctly configured before running any backups or restores.
- The backup script automatically **rotates** backups, keeping only the **latest 30 backups** per backup type (database, user data, Lucee image) by removing older backups on the remote server.
- Always ensure that your **SSH keys** and **server information** are secure, as they are used for transferring backups between the production or staging environment and the remote server.
102 changes: 80 additions & 22 deletions config/backup/restore.sh
Original file line number Diff line number Diff line change
@@ -1,35 +1,93 @@
#!/bin/bash

# Activates the automatic export of variables
# Enable automatic export of variables
set -a

# Get the directory of the current script
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"

# Set the project root (assuming the script is two levels below the project root)
PROJECT_ROOT="$(dirname "$(dirname "$SCRIPT_DIR")")"

# Load the .env file from the project root
source "$PROJECT_ROOT/.env"
source "$(dirname "$0")/../../.env"

# Deactivates the automatic export of variables
set +a
# Dynamically generate volume names based on the project
DB_VOLUME="${COMPOSE_PROJECT_NAME}_db_volume"
USERDATA_VOLUME="${COMPOSE_PROJECT_NAME}_userdata_volume"

# Checks whether the /restore folder exists and creates it if required
# Check if the /restore folder exists and create it if necessary
if [ ! -d "/restore" ]; then
mkdir -p /restore
fi

# Restore database
scp -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}/database.tar.gz /restore/
docker run --rm -v ${COMPOSE_PROJECT_NAME}_db_volume:/volume -v /restore:/backup alpine sh -c "cd /volume && tar -xzf /backup/database.tar.gz"
docker restart ${MYSQL_CONTAINER_NAME}
# Functions for the various restore processes

restore_db() {
if [ -z "$1" ]; then
echo "Restoring the latest database backup..."
# Get the latest database backup
LATEST_DB_BACKUP=$(ssh -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP} "ls -t ${REMOTE_BACKUP_PATH}/db | head -n 1")
else
echo "Restoring database backup from $1..."
LATEST_DB_BACKUP="database_$1.tar.gz"
fi
scp -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}/db/${LATEST_DB_BACKUP} /restore/database.tar.gz
docker run --rm -v ${DB_VOLUME}:/volume -v /restore:/restore alpine sh -c "tar -xzf /restore/database.tar.gz -C /volume"
docker restart ${MYSQL_CONTAINER_NAME}
}

restore_userdata() {
if [ -z "$1" ]; then
echo "Restoring the latest userdata backup..."
# Get the latest userdata backup
LATEST_USERDATA_BACKUP=$(ssh -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP} "ls -t ${REMOTE_BACKUP_PATH}/userdata | head -n 1")
else
echo "Restoring userdata backup from $1..."
LATEST_USERDATA_BACKUP="userdata_$1.tar.gz"
fi
scp -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}/userdata/${LATEST_USERDATA_BACKUP} /restore/userdata.tar.gz
docker run --rm -v ${USERDATA_VOLUME}:/volume -v /restore:/restore alpine sh -c "tar -xzf /restore/userdata.tar.gz -C /volume"
docker restart ${LUCEE_CONTAINER_NAME}
}

# Restore userdata
scp -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}/userdata.tar.gz /restore/
docker run --rm -v ${COMPOSE_PROJECT_NAME}_userdata_volume:/volume -v /restore:/backup alpine sh -c "cd /volume && tar -xzf /backup/userdata.tar.gz"
docker restart ${LUCEE_CONTAINER_NAME}
restore_lucee_image() {
if [ -z "$1" ]; then
echo "Restoring the latest Lucee image backup..."
# Get the latest Lucee image backup
LATEST_LUCEE_BACKUP=$(ssh -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP} "ls -t ${REMOTE_BACKUP_PATH}/lucee | head -n 1")
else
echo "Restoring Lucee image backup from $1..."
LATEST_LUCEE_BACKUP="image_${LUCEE_IMAGE}_${LUCEE_IMAGE_VERSION}_$1.tar"
fi
scp -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}/lucee/${LATEST_LUCEE_BACKUP} /restore/image_${LUCEE_IMAGE}_${LUCEE_IMAGE_VERSION}.tar
docker load -i /restore/image_${LUCEE_IMAGE}_${LUCEE_IMAGE_VERSION}.tar
}

# Restore Lucee image
scp -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP}:${REMOTE_BACKUP_PATH}/image_${LUCEE_IMAGE}_${LUCEE_IMAGE_VERSION}.tar /restore/
docker load -i /restore/image_${LUCEE_IMAGE}_${LUCEE_IMAGE_VERSION}.tar
list_backups() {
echo "Available backups on remote server:"
echo "Database backups:"
ssh -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP} "ls ${REMOTE_BACKUP_PATH}/db"
echo ""
echo "Userdata backups:"
ssh -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP} "ls ${REMOTE_BACKUP_PATH}/userdata"
echo ""
echo "Lucee image backups:"
ssh -i ${SSH_KEY_PATH} ${SERVER_USER}@${SERVER_IP} "ls ${REMOTE_BACKUP_PATH}/lucee"
echo ""
}

# Show help if no options have been specified
if [ $# -eq 0 ]; then
echo "Usage: $0 [--db [TIMESTAMP]] [--userdata [TIMESTAMP]] [--lucee-image [TIMESTAMP]] [--list]"
exit 1
fi

# Process the specified options
while [[ "$#" -gt 0 ]]; do
case $1 in
--db) restore_db "$2"; shift ;;
--userdata) restore_userdata "$2"; shift ;;
--lucee-image) restore_lucee_image "$2"; shift ;;
--list) list_backups; exit 0 ;;
*) echo "Unknown option: $1"; exit 1 ;;
esac
shift
done

# Disable automatic export of variables
set +a
2 changes: 1 addition & 1 deletion config/example.env
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ INBUCKET_WEB_PORT=9000 # Must be unique on docker host.
INBUCKET_POP3_PORT=1100 # Must be unique on docker host.


# Backup settings (production environment)
## Backup settings

# Backup path on the remote server
REMOTE_BACKUP_PATH=/backups
Expand Down

0 comments on commit ca48b74

Please sign in to comment.