Skip to content

Commit

Permalink
Add data encryption feature
Browse files Browse the repository at this point in the history
  • Loading branch information
andyone committed Jul 23, 2024
1 parent 533584e commit 4644752
Show file tree
Hide file tree
Showing 13 changed files with 373 additions and 295 deletions.
8 changes: 8 additions & 0 deletions app/app.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ const (
SERVER_PORT = "server:port"
SERVER_ACCESS_TOKEN = "server:access-token"
STORAGE_TYPE = "storage:type"
STORAGE_ENCRYPTION_KEY = "storage:encryption-key"
STORAGE_FS_PATH = "storage-fs:path"
STORAGE_FS_MODE = "storage-fs:mode"
STORAGE_SFTP_HOST = "storage-sftp:host"
Expand Down Expand Up @@ -338,6 +339,13 @@ func validateConfig() error {
)
}

if knfu.GetS(STORAGE_ENCRYPTION_KEY) != "" {
validators = append(validators,
&knf.Validator{STORAGE_ENCRYPTION_KEY, knfv.LenGreater, 16},
&knf.Validator{STORAGE_ENCRYPTION_KEY, knfv.LenLess, 96},
)
}

errs := knfu.Validate(validators)

if len(errs) > 0 {
Expand Down
131 changes: 9 additions & 122 deletions app/basic.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,30 +8,21 @@ package app
// ////////////////////////////////////////////////////////////////////////////////// //

import (
"encoding/base64"
"fmt"
"os"
"time"

"github.com/essentialkaos/ek/v13/events"
"github.com/essentialkaos/ek/v13/fmtc"
"github.com/essentialkaos/ek/v13/fmtutil"
"github.com/essentialkaos/ek/v13/fsutil"
"github.com/essentialkaos/ek/v13/log"
"github.com/essentialkaos/ek/v13/options"
"github.com/essentialkaos/ek/v13/path"
"github.com/essentialkaos/ek/v13/spinner"
"github.com/essentialkaos/ek/v13/timeutil"
"github.com/essentialkaos/ek/v13/terminal"

knfu "github.com/essentialkaos/ek/v13/knf/united"

"github.com/essentialkaos/atlassian-cloud-backuper/backuper"
"github.com/essentialkaos/atlassian-cloud-backuper/backuper/confluence"
"github.com/essentialkaos/atlassian-cloud-backuper/backuper/jira"
"github.com/essentialkaos/atlassian-cloud-backuper/uploader"
"github.com/essentialkaos/atlassian-cloud-backuper/uploader/fs"
"github.com/essentialkaos/atlassian-cloud-backuper/uploader/s3"
"github.com/essentialkaos/atlassian-cloud-backuper/uploader/sftp"
)

// ////////////////////////////////////////////////////////////////////////////////// //
Expand All @@ -45,6 +36,12 @@ func startApp(args options.Arguments) error {
addEventsHandlers(dispatcher)
}

if knfu.GetS(STORAGE_ENCRYPTION_KEY) != "" {
fmtc.NewLine()
terminal.Warn("▲ Backup will be encrypted while uploading. You will not be able to use the")
terminal.Warn(" backup if you lose the encryption key. Keep it in a safe place.")
}

defer temp.Clean()

target := args.Get(0).String()
Expand Down Expand Up @@ -93,116 +90,6 @@ func startApp(args options.Arguments) error {
return nil
}

// getBackuper returns backuper instances
func getBackuper(target string) (backuper.Backuper, error) {
var err error
var bkpr backuper.Backuper

bkpConfig, err := getBackuperConfig(target)

if err != nil {
return nil, err
}

switch target {
case TARGET_JIRA:
bkpr, err = jira.NewBackuper(bkpConfig)
case TARGET_CONFLUENCE:
bkpr, err = confluence.NewBackuper(bkpConfig)
}

return bkpr, nil
}

// getOutputFileName returns name for backup output file
func getOutputFileName(target string) string {
var template string

switch target {
case TARGET_JIRA:
template = knfu.GetS(JIRA_OUTPUT_FILE, `jira-backup-%Y-%m-%d`) + ".zip"
case TARGET_CONFLUENCE:
template = knfu.GetS(JIRA_OUTPUT_FILE, `confluence-backup-%Y-%m-%d`) + ".zip"
}

return timeutil.Format(time.Now(), template)
}

// getBackuperConfig returns configuration for backuper
func getBackuperConfig(target string) (*backuper.Config, error) {
switch target {
case TARGET_JIRA:
return &backuper.Config{
Account: knfu.GetS(ACCESS_ACCOUNT),
Email: knfu.GetS(ACCESS_EMAIL),
APIKey: knfu.GetS(ACCESS_API_KEY),
WithAttachments: knfu.GetB(JIRA_INCLUDE_ATTACHMENTS),
ForCloud: knfu.GetB(JIRA_CLOUD_FORMAT),
}, nil

case TARGET_CONFLUENCE:
return &backuper.Config{
Account: knfu.GetS(ACCESS_ACCOUNT),
Email: knfu.GetS(ACCESS_EMAIL),
APIKey: knfu.GetS(ACCESS_API_KEY),
WithAttachments: knfu.GetB(CONFLUENCE_INCLUDE_ATTACHMENTS),
ForCloud: knfu.GetB(CONFLUENCE_CLOUD_FORMAT),
}, nil
}

return nil, fmt.Errorf("Unknown target %q", target)
}

// getUploader returns uploader instance
func getUploader(target string) (uploader.Uploader, error) {
var err error
var updr uploader.Uploader

switch knfu.GetS(STORAGE_TYPE) {
case STORAGE_FS:
updr, err = fs.NewUploader(&fs.Config{
Path: path.Join(knfu.GetS(STORAGE_FS_PATH), target),
Mode: knfu.GetM(STORAGE_FS_MODE, 0600),
})

case STORAGE_SFTP:
keyData, err := readPrivateKeyData()

if err != nil {
return nil, err
}

updr, err = sftp.NewUploader(&sftp.Config{
Host: knfu.GetS(STORAGE_SFTP_HOST),
User: knfu.GetS(STORAGE_SFTP_USER),
Key: keyData,
Path: path.Join(knfu.GetS(STORAGE_SFTP_PATH), target),
Mode: knfu.GetM(STORAGE_SFTP_MODE, 0600),
})

case STORAGE_S3:
updr, err = s3.NewUploader(&s3.Config{
Host: knfu.GetS(STORAGE_S3_HOST),
Region: knfu.GetS(STORAGE_S3_REGION),
AccessKeyID: knfu.GetS(STORAGE_S3_ACCESS_KEY),
SecretKey: knfu.GetS(STORAGE_S3_SECRET_KEY),
Bucket: knfu.GetS(STORAGE_S3_BUCKET),
Path: path.Join(knfu.GetS(STORAGE_S3_PATH), target),
})
}

return updr, err
}

// readPrivateKeyData reads private key data
func readPrivateKeyData() ([]byte, error) {
if fsutil.IsExist(knfu.GetS(STORAGE_SFTP_KEY)) {
return os.ReadFile(knfu.GetS(STORAGE_SFTP_KEY))
}

return base64.StdEncoding.DecodeString(knfu.GetS(STORAGE_SFTP_KEY))
}

// addEventsHandlers registers events handlers
func addEventsHandlers(dispatcher *events.Dispatcher) {
dispatcher.AddHandler(backuper.EVENT_BACKUP_STARTED, func(payload any) {
Expand All @@ -212,7 +99,7 @@ func addEventsHandlers(dispatcher *events.Dispatcher) {

dispatcher.AddHandler(backuper.EVENT_BACKUP_PROGRESS, func(payload any) {
p := payload.(*backuper.ProgressInfo)
spinner.Update("[%d%%] %s", p.Progress, p.Message)
spinner.Update("{s}(%d%%){!} %s", p.Progress, p.Message)
})

dispatcher.AddHandler(backuper.EVENT_BACKUP_SAVING, func(payload any) {
Expand All @@ -231,7 +118,7 @@ func addEventsHandlers(dispatcher *events.Dispatcher) {
dispatcher.AddHandler(uploader.EVENT_UPLOAD_PROGRESS, func(payload any) {
p := payload.(*uploader.ProgressInfo)
spinner.Update(
"[%s] Uploading file (%s/%s)",
"{s}(%5s){!} Uploading file {s-}(%7s | %7s){!}",
fmtutil.PrettyPerc(p.Progress),
fmtutil.PrettySize(p.Current),
fmtutil.PrettySize(p.Total),
Expand Down
151 changes: 151 additions & 0 deletions app/common.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
package app

// ////////////////////////////////////////////////////////////////////////////////// //
// //
// Copyright (c) 2024 ESSENTIAL KAOS //
// Apache License, Version 2.0 <https://www.apache.org/licenses/LICENSE-2.0> //
// //
// ////////////////////////////////////////////////////////////////////////////////// //

import (
"encoding/base64"
"fmt"
"os"
"time"

"github.com/essentialkaos/ek/v13/fsutil"
"github.com/essentialkaos/ek/v13/path"
"github.com/essentialkaos/ek/v13/timeutil"

"github.com/essentialkaos/katana"

knfu "github.com/essentialkaos/ek/v13/knf/united"

"github.com/essentialkaos/atlassian-cloud-backuper/backuper"
"github.com/essentialkaos/atlassian-cloud-backuper/backuper/confluence"
"github.com/essentialkaos/atlassian-cloud-backuper/backuper/jira"
"github.com/essentialkaos/atlassian-cloud-backuper/uploader"
"github.com/essentialkaos/atlassian-cloud-backuper/uploader/fs"
"github.com/essentialkaos/atlassian-cloud-backuper/uploader/s3"
"github.com/essentialkaos/atlassian-cloud-backuper/uploader/sftp"
)

// ////////////////////////////////////////////////////////////////////////////////// //

// getBackuper returns backuper instances
func getBackuper(target string) (backuper.Backuper, error) {
var err error
var bkpr backuper.Backuper

bkpConfig, err := getBackuperConfig(target)

if err != nil {
return nil, err
}

switch target {
case TARGET_JIRA:
bkpr, err = jira.NewBackuper(bkpConfig)
case TARGET_CONFLUENCE:
bkpr, err = confluence.NewBackuper(bkpConfig)
}

return bkpr, nil
}

// getOutputFileName returns name for backup output file
func getOutputFileName(target string) string {
var template string

switch target {
case TARGET_JIRA:
template = knfu.GetS(JIRA_OUTPUT_FILE, `jira-backup-%Y-%m-%d`) + ".zip"
case TARGET_CONFLUENCE:
template = knfu.GetS(JIRA_OUTPUT_FILE, `confluence-backup-%Y-%m-%d`) + ".zip"
}

return timeutil.Format(time.Now(), template)
}

// getBackuperConfig returns configuration for backuper
func getBackuperConfig(target string) (*backuper.Config, error) {
switch target {
case TARGET_JIRA:
return &backuper.Config{
Account: knfu.GetS(ACCESS_ACCOUNT),
Email: knfu.GetS(ACCESS_EMAIL),
APIKey: knfu.GetS(ACCESS_API_KEY),
WithAttachments: knfu.GetB(JIRA_INCLUDE_ATTACHMENTS),
ForCloud: knfu.GetB(JIRA_CLOUD_FORMAT),
}, nil

case TARGET_CONFLUENCE:
return &backuper.Config{
Account: knfu.GetS(ACCESS_ACCOUNT),
Email: knfu.GetS(ACCESS_EMAIL),
APIKey: knfu.GetS(ACCESS_API_KEY),
WithAttachments: knfu.GetB(CONFLUENCE_INCLUDE_ATTACHMENTS),
ForCloud: knfu.GetB(CONFLUENCE_CLOUD_FORMAT),
}, nil
}

return nil, fmt.Errorf("Unknown target %q", target)
}

// getUploader returns uploader instance
func getUploader(target string) (uploader.Uploader, error) {
var err error
var updr uploader.Uploader
var secret *katana.Secret

if knfu.GetS(STORAGE_ENCRYPTION_KEY) != "" {
secret = katana.NewSecret(knfu.GetS(STORAGE_ENCRYPTION_KEY))
}

switch knfu.GetS(STORAGE_TYPE) {
case STORAGE_FS:
updr, err = fs.NewUploader(&fs.Config{
Path: path.Join(knfu.GetS(STORAGE_FS_PATH), target),
Mode: knfu.GetM(STORAGE_FS_MODE, 0600),
Secret: secret,
})

case STORAGE_SFTP:
keyData, err := readPrivateKeyData()

if err != nil {
return nil, err
}

updr, err = sftp.NewUploader(&sftp.Config{
Host: knfu.GetS(STORAGE_SFTP_HOST),
User: knfu.GetS(STORAGE_SFTP_USER),
Key: keyData,
Path: path.Join(knfu.GetS(STORAGE_SFTP_PATH), target),
Mode: knfu.GetM(STORAGE_SFTP_MODE, 0600),
Secret: secret,
})

case STORAGE_S3:
updr, err = s3.NewUploader(&s3.Config{
Host: knfu.GetS(STORAGE_S3_HOST),
Region: knfu.GetS(STORAGE_S3_REGION),
AccessKeyID: knfu.GetS(STORAGE_S3_ACCESS_KEY),
SecretKey: knfu.GetS(STORAGE_S3_SECRET_KEY),
Bucket: knfu.GetS(STORAGE_S3_BUCKET),
Path: path.Join(knfu.GetS(STORAGE_S3_PATH), target),
Secret: secret,
})
}

return updr, err
}

// readPrivateKeyData reads private key data
func readPrivateKeyData() ([]byte, error) {
if fsutil.IsExist(knfu.GetS(STORAGE_SFTP_KEY)) {
return os.ReadFile(knfu.GetS(STORAGE_SFTP_KEY))
}

return base64.StdEncoding.DecodeString(knfu.GetS(STORAGE_SFTP_KEY))
}
2 changes: 1 addition & 1 deletion app/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ func downloadBackupHandler(rw http.ResponseWriter, r *http.Request) {

log.Info("Uploading backup to storage", lf)

err = updr.Write(br, outputFile)
err = updr.Write(br, outputFile, 0)

if err != nil {
log.Error("Can't upload backup file: %v", err, lf)
Expand Down
15 changes: 12 additions & 3 deletions backuper/confluence/confluence-backuper.go
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,17 @@ func (b *ConfluenceBackuper) Start() (string, error) {
info, _ := b.getBackupProgress()

if info != nil && !info.IsOutdated {
log.Info("Found previously created backup task")
log.Info(
"Found previously created backup task",
log.F{"backup-status", info.CurrentStatus},
log.F{"backup-perc", info.AlternativePercentage},
log.F{"backup-size", info.Size},
log.F{"backup-file", info.Filename},
log.F{"backup-outdated", info.IsOutdated},
)
} else {
log.Info("No previously created backup task or task is outdated, starting new backup…")

err := b.startBackup()

if err != nil {
Expand Down Expand Up @@ -142,14 +151,14 @@ func (b *ConfluenceBackuper) Progress(taskID string) (string, error) {

if progressInfo.Size == 0 && progressInfo.AlternativePercentage >= lastProgress {
log.Info(
"(%s) Backup in progress: %s",
"(%s%%) Backup in progress: %s",
progressInfo.AlternativePercentage,
progressInfo.CurrentStatus,
)
lastProgress = progressInfo.AlternativePercentage
}

if progressInfo.Size != 0 && progressInfo.Filename != "" {
if progressInfo.Filename != "" {
backupFileURL = progressInfo.Filename
break
}
Expand Down
Loading

0 comments on commit 4644752

Please sign in to comment.