diff --git a/docs/content/en/docs/core-components/api-reporter.md b/docs/content/en/docs/core-components/api-reporter.md new file mode 100644 index 00000000..6a26ad97 --- /dev/null +++ b/docs/content/en/docs/core-components/api-reporter.md @@ -0,0 +1,171 @@ +--- +title: Reporter API Description +description: > + Descriptions for the Reporter REST API endpoints +categories: [API] +tags: [protocol, http, rest, api] +weight: 2 +date: 2024-05-1 +--- + +# Endpoint description + +We will use HTTP status codes https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + + +```plantuml +@startuml +protocol Reporter { + GET /reporter + GET /reporter/{execution-id} +} +@enduml +``` + + +## /reporter +The reporter endpoint is used to fetch information about ongoing playbook executions in SOARCA + +### GET `/reporter` +Get all execution IDs of currently ongoing executions. + +#### Call payload +None + +#### Response +200/OK with payload: + +```plantuml +@startjson +[ + { + "executions": [ + {"execution_id" : "1", "playbook_id" : "a", "started" : "", "..." : "..."}, + "..."] + } +] +@endjson +``` +#### Error +400/BAD REQUEST with payload: +General error + + + +### GET `/reporter/{execution-id}` +Get information about ongoing execution + +#### Call payload +None + +#### Response + +Response data model: + + +|field |content |type | description | +| ----------------- | --------------------- | ----------------- | ----------- | +|type |"execution_status" |string |The type of this content +|id |UUID |string |The id of the execution +|execution_id |UUID |string |The id of the execution +|playbook_id |UUID |string |The id of the CACAO playbook executed by the execution +|started |timestamp |string |The time at which the execution of the playbook started +|ended |timestamp |string |The time at which the execution of the playbook ended (if so) +|status |execution-status-enum |string |The current [status](#execution-stataus) of the execution +|status_text |explanation |string |A natural language explanation of the current status or related info +|step_results |step_results |dictionary |Map of step-id to related [step execution data](#step-execution-data) +|request_interval |seconds |integer |Suggests the polling interval for the next request (default suggested is 5 seconds). + + +##### Step execution data +|field |content |type | description | +| ----------------- | --------------------- | ----------------- | ----------- | +|step_id |UUID |string |The id of the step being executed +|started |timestamp |string |The time at which the execution of the step started +|ended |timestamp |string |The time at which the execution of the step ended (if so) +|status |execution-status-enum |string |The current [status](#execution-stataus) of the execution of this step +|status_text |explanation |string |A natural language explanation of the current status or related info +|executed_by |entity-identifier |string |The entity executed the workflow step. This can be an organization, a team, a role, a defence component, etc. +|commands_b64 |list of base64 |list of string |A list of Base64 encodings of the commands that were invoked during the execution of a workflow step, including any values stemming from variables. These are the actual commands executed. +|error |error |string |Error raised along the execution of the step +|variables |cacao variables |dictionary |Map of [cacao variables](https://docs.oasis-open.org/cacao/security-playbooks/v2.0/cs01/security-playbooks-v2.0-cs01.html#_Toc152256555) handled in the step (both in and out) with current values and definitions +|automated_execution | boolean |string |This property identifies if the workflow step was executed manually or automatically. It is either true or false. + +##### Execution stataus +Table from [Cyentific RNI workflow Status](https://github.com/cyentific-rni/workflow-status/blob/main/README.md#21-refined-execution-status-enumeration) +**Vocabulary Name:** `execution-status-enum` +| Property Name | Description| +| :--- | :--- | +| successfully_executed | The workflow step was executed successfully (completed). | +|failed| The workflow step failed. | +|ongoing| The workflow step is in progress. | +|server_side_error| A server-side error occurred. | +|client_side_error| A client-side error occurred.| +|timeout_error| A timeout error occurred. The timeout of a CACAO workflow step is specified in the “timeout” property. | +|exception_condition_error| A exception condition error ocurred. A CACAO playbook can incorporate an exception condition at the playbook level and, in particular, with the "workflow_exception" property. | + + +If the execution has completed and no further steps need to be executed + +200/OK +with payload: + +```plantuml +@startjson +[ + { + "type" : "execution-status", + "id" : "", + "execution_id" : "", + "playbook_id" : "", + "started" : "", + "ended" : "", + "status" : "", + "status_text": "", + "errors" : ["error1", "..."], + "step_results" : { + "" : { + "execution_id": "", + "step_id" : "", + "started" : "", + "ended" : "", + "status" : "", + "status_text": "", + "errors" : ["error1", "..."], + "variables": { + "" : { + "type": "", + "name": "", + "description": "", + "value": "", + "constant": "", + "external": "" + } + } + } + }, + "request_interval" : "" + } +] +@endjson + +The payload will include all information that the finished execution has created. + + +If the execution is still ongoing: + +206/Partial Content +with payload equal to the 200 response, but impliclty not including all information from the execution, since the execution is still ongoing. + +The step results object will list the steps that have been executed until the report request, and those that are being executed at the moment of the report request. + +The "request_interval" suggests the polling interval for the next request (default suggested is 5 seconds). + +#### Error +400/BAD REQUEST with payload: +General error + +404/NOT FOUND +No execution with the specified ID was found. + + diff --git a/internal/controller/controller.go b/internal/controller/controller.go index 48b21264..c75e311d 100644 --- a/internal/controller/controller.go +++ b/internal/controller/controller.go @@ -20,10 +20,12 @@ import ( "soarca/internal/fin/protocol" "soarca/internal/guid" "soarca/internal/reporter" + cache "soarca/internal/reporter/downstream_reporter/cache" "soarca/logger" "soarca/utils" httpUtil "soarca/utils/http" "soarca/utils/stix/expression/comparison" + timeUtil "soarca/utils/time" downstreamReporter "soarca/internal/reporter/downstream_reporter" @@ -31,7 +33,7 @@ import ( mongo "soarca/database/mongodb" playbookrepository "soarca/database/playbook" - routes "soarca/routes" + "soarca/routes" ) var log *logger.Log @@ -49,6 +51,10 @@ type Controller struct { var mainController = Controller{} +var mainCache = cache.Cache{} + +const defaultCacheSize int = 10 + func (controller *Controller) NewDecomposer() decomposer.IDecomposer { ssh := new(ssh.SshCapability) capabilities := map[string]capability.ICapability{ssh.GetType(): ssh} @@ -76,7 +82,13 @@ func (controller *Controller) NewDecomposer() decomposer.IDecomposer { } } + // NOTE: Enrolling mainCache by default as reporter reporter := reporter.New([]downstreamReporter.IDownStreamReporter{}) + downstreamReporters := []downstreamReporter.IDownStreamReporter{&mainCache} + err := reporter.RegisterReporters(downstreamReporters) + if err != nil { + log.Error("could not load main Cache as reporter for decomposer and executors") + } actionExecutor := action.New(capabilities, reporter) playbookActionExecutor := playbook_action.New(controller, controller, reporter) @@ -129,6 +141,9 @@ func Initialize() error { } } + cacheSize, _ := strconv.Atoi(utils.GetEnv("MAX_EXECUTIONS", strconv.Itoa(defaultCacheSize))) + mainCache = *cache.New(&timeUtil.Time{}, cacheSize) + errCore := initializeCore(app) if errCore != nil { @@ -171,6 +186,14 @@ func initializeCore(app *gin.Engine) error { } } + // NOTE: Assuming that the cache is the main information mediator for + // the reporter API + err = routes.Reporter(app, &mainCache) + if err != nil { + log.Error(err) + return err + } + routes.Logging(app) routes.Swagger(app) diff --git a/internal/controller/informer/execution_informer.go b/internal/controller/informer/execution_informer.go new file mode 100644 index 00000000..85d801ff --- /dev/null +++ b/internal/controller/informer/execution_informer.go @@ -0,0 +1,12 @@ +package informer + +import ( + "soarca/models/cache" + + "github.com/google/uuid" +) + +type IExecutionInformer interface { + GetExecutions() ([]cache.ExecutionEntry, error) + GetExecutionReport(executionKey uuid.UUID) (cache.ExecutionEntry, error) +} diff --git a/internal/reporter/downstream_reporter/cache/cache.go b/internal/reporter/downstream_reporter/cache/cache.go index 727b751e..add6daf4 100644 --- a/internal/reporter/downstream_reporter/cache/cache.go +++ b/internal/reporter/downstream_reporter/cache/cache.go @@ -1,15 +1,13 @@ package cache import ( + b64 "encoding/base64" "errors" - "fmt" "reflect" "soarca/logger" "soarca/models/cacao" - "soarca/models/report" - "soarca/utils" + cache_report "soarca/models/cache" itime "soarca/utils/time" - "strconv" "time" "github.com/google/uuid" @@ -23,50 +21,48 @@ func init() { } const MaxExecutions int = 10 -const MaxSteps int = 10 type Cache struct { Size int timeUtil itime.ITime - Cache map[string]report.ExecutionEntry // Cached up to max - fifoRegister []string // Used for O(1) FIFO cache management + Cache map[string]cache_report.ExecutionEntry // Cached up to max + fifoRegister []string // Used for O(1) FIFO cache management } -func New(timeUtil itime.ITime) *Cache { - maxExecutions, _ := strconv.Atoi(utils.GetEnv("MAX_EXECUTIONS", strconv.Itoa(MaxExecutions))) +func New(timeUtil itime.ITime, maxExecutions int) *Cache { return &Cache{ Size: maxExecutions, - Cache: make(map[string]report.ExecutionEntry), + Cache: make(map[string]cache_report.ExecutionEntry), timeUtil: timeUtil, } } -func (cacheReporter *Cache) getExecution(executionKey uuid.UUID) (report.ExecutionEntry, error) { +func (cacheReporter *Cache) getExecution(executionKey uuid.UUID) (cache_report.ExecutionEntry, error) { executionKeyStr := executionKey.String() executionEntry, ok := cacheReporter.Cache[executionKeyStr] if !ok { err := errors.New("execution is not in cache") log.Warning("execution is not in cache. consider increasing cache size.") - return report.ExecutionEntry{}, err + return cache_report.ExecutionEntry{}, err // TODO Retrieve from database } return executionEntry, nil } -func (cacheReporter *Cache) getExecutionStep(executionKey uuid.UUID, stepKey string) (report.StepResult, error) { +func (cacheReporter *Cache) getExecutionStep(executionKey uuid.UUID, stepKey string) (cache_report.StepResult, error) { executionEntry, err := cacheReporter.getExecution(executionKey) if err != nil { - return report.StepResult{}, err + return cache_report.StepResult{}, err } executionStep, ok := executionEntry.StepResults[stepKey] if !ok { err := errors.New("execution step is not in cache") - return report.StepResult{}, err + return cache_report.StepResult{}, err } return executionStep, nil } // Adding executions in FIFO logic -func (cacheReporter *Cache) addExecution(newExecutionEntry report.ExecutionEntry) error { +func (cacheReporter *Cache) addExecution(newExecutionEntry cache_report.ExecutionEntry) error { if !(len(cacheReporter.fifoRegister) == len(cacheReporter.Cache)) { return errors.New("cache fifo register and content are desynchronized") @@ -90,13 +86,13 @@ func (cacheReporter *Cache) addExecution(newExecutionEntry report.ExecutionEntry } func (cacheReporter *Cache) ReportWorkflowStart(executionId uuid.UUID, playbook cacao.Playbook) error { - newExecutionEntry := report.ExecutionEntry{ + newExecutionEntry := cache_report.ExecutionEntry{ ExecutionId: executionId, PlaybookId: playbook.ID, Started: cacheReporter.timeUtil.Now(), Ended: time.Time{}, - StepResults: map[string]report.StepResult{}, - Status: report.Ongoing, + StepResults: map[string]cache_report.StepResult{}, + Status: cache_report.Ongoing, } err := cacheReporter.addExecution(newExecutionEntry) if err != nil { @@ -113,10 +109,10 @@ func (cacheReporter *Cache) ReportWorkflowEnd(executionId uuid.UUID, playbook ca } if workflowError != nil { - executionEntry.PlaybookResult = workflowError - executionEntry.Status = report.Failed + executionEntry.Error = workflowError + executionEntry.Status = cache_report.Failed } else { - executionEntry.Status = report.SuccessfullyExecuted + executionEntry.Status = cache_report.SuccessfullyExecuted } executionEntry.Ended = cacheReporter.timeUtil.Now() cacheReporter.Cache[executionId.String()] = executionEntry @@ -130,25 +126,40 @@ func (cacheReporter *Cache) ReportStepStart(executionId uuid.UUID, step cacao.St return err } - if executionEntry.Status != report.Ongoing { + if executionEntry.Status != cache_report.Ongoing { return errors.New("trying to report on the execution of a step for an already reported completed or failed execution") } - fmt.Println(executionEntry) - _, alreadyThere := executionEntry.StepResults[step.ID] if alreadyThere { log.Warning("a step execution was already reported for this step. overwriting.") } - newStepEntry := report.StepResult{ + // TODO: must test + commandsB64 := []string{} + isAutomated := true + for _, cmd := range step.Commands { + if cmd.Type == cacao.CommandTypeManual { + isAutomated = false + } + if cmd.CommandB64 != "" { + commandsB64 = append(commandsB64, cmd.CommandB64) + } else { + cmdB64 := b64.StdEncoding.EncodeToString([]byte(cmd.Command)) + commandsB64 = append(commandsB64, cmdB64) + } + } + + newStepEntry := cache_report.StepResult{ ExecutionId: executionId, StepId: step.ID, Started: cacheReporter.timeUtil.Now(), Ended: time.Time{}, Variables: variables, - Status: report.Ongoing, + CommandsB64: commandsB64, + Status: cache_report.Ongoing, Error: nil, + IsAutomated: isAutomated, } executionEntry.StepResults[step.ID] = newStepEntry return nil @@ -160,7 +171,7 @@ func (cacheReporter *Cache) ReportStepEnd(executionId uuid.UUID, step cacao.Step return err } - if executionEntry.Status != report.Ongoing { + if executionEntry.Status != cache_report.Ongoing { return errors.New("trying to report on the execution of a step for an already reported completed or failed execution") } @@ -169,15 +180,15 @@ func (cacheReporter *Cache) ReportStepEnd(executionId uuid.UUID, step cacao.Step return err } - if executionStepResult.Status != report.Ongoing { + if executionStepResult.Status != cache_report.Ongoing { return errors.New("trying to report on the execution of a step that was already reported completed or failed") } if stepError != nil { executionStepResult.Error = stepError - executionStepResult.Status = report.ServerSideError + executionStepResult.Status = cache_report.ServerSideError } else { - executionStepResult.Status = report.SuccessfullyExecuted + executionStepResult.Status = cache_report.SuccessfullyExecuted } executionStepResult.Ended = cacheReporter.timeUtil.Now() executionStepResult.Variables = returnVars @@ -186,16 +197,25 @@ func (cacheReporter *Cache) ReportStepEnd(executionId uuid.UUID, step cacao.Step return nil } -func (cacheReporter *Cache) GetExecutionsIDs() []string { - executions := make([]string, len(cacheReporter.fifoRegister)) - _ = copy(executions, cacheReporter.fifoRegister) - return executions +func (cacheReporter *Cache) GetExecutions() ([]cache_report.ExecutionEntry, error) { + executions := make([]cache_report.ExecutionEntry, 0) + // NOTE: fetched via fifo register key reference as is ordered array, + // needed to test and report back ordered executions stored + for _, executionEntryKey := range cacheReporter.fifoRegister { + // NOTE: cached executions are passed by reference, so they must not be modified + entry, present := cacheReporter.Cache[executionEntryKey] + if !present { + return []cache_report.ExecutionEntry{}, errors.New("internal error. cache fifo register and cache executions mismatch.") + } + executions = append(executions, entry) + } + return executions, nil } -func (cacheReporter *Cache) GetExecutionReport(executionKey uuid.UUID) (report.ExecutionEntry, error) { +func (cacheReporter *Cache) GetExecutionReport(executionKey uuid.UUID) (cache_report.ExecutionEntry, error) { executionEntry, err := cacheReporter.getExecution(executionKey) if err != nil { - return report.ExecutionEntry{}, err + return cache_report.ExecutionEntry{}, err } report := executionEntry diff --git a/models/api/reporter.go b/models/api/reporter.go new file mode 100644 index 00000000..0d6d73fc --- /dev/null +++ b/models/api/reporter.go @@ -0,0 +1,112 @@ +package api + +import ( + "errors" + "fmt" + "soarca/models/cacao" + cache_model "soarca/models/cache" + "time" +) + +type Status uint8 + +// Reporter model adapted from https://github.com/cyentific-rni/workflow-status/blob/main/README.md + +const ( + ReportLevelPlaybook = "playbook" + ReportLevelStep = "step" + + SuccessfullyExecuted = "successfully_executed" + Failed = "failed" + Ongoing = "ongoing" + ServerSideError = "server_side_error" + ClientSideError = "client_side_error" + TimeoutError = "timeout_error" + ExceptionConditionError = "exception_condition_error" + AwaitUserInput = "await_user_input" + + SuccessfullyExecutedText = "%s execution completed successfully" + FailedText = "something went wrong in the execution of this %s" + OngoingText = "this %s is currently being executed" + ServerSideErrorText = "there was a server-side problem with the execution of this %s" + ClientSideErrorText = "something in the data provided for this %s raised an issue" + TimeoutErrorText = "the execution of this %s timed out" + ExceptionConditionErrorText = "the execution of this %s raised a playbook exception" + AwaitUserInputText = "waiting for users to provide input for the %s execution" +) + +type PlaybookExecutionReport struct { + Type string `bson:"type" json:"type"` + ExecutionId string `bson:"execution_id" json:"execution_id"` + PlaybookId string `bson:"playbook_id" json:"playbook_id"` + Started time.Time `bson:"started" json:"started"` + Ended time.Time `bson:"ended" json:"ended"` + Status string `bson:"status" json:"status"` + StatusText string `bson:"status_text" json:"status_text"` + StepResults map[string]StepExecutionReport `bson:"step_results" json:"step_results"` + RequestInterval int `bson:"request_interval" json:"request_interval"` +} + +type StepExecutionReport struct { + ExecutionId string `bson:"execution_id" json:"execution_id"` + StepId string `bson:"step_id" json:"step_id"` + Started time.Time `bson:"started" json:"started"` + Ended time.Time `bson:"ended" json:"ended"` + Status string `bson:"status" json:"status"` + StatusText string `bson:"status_text" json:"status_text"` + ExecutedBy string `bson:"executed_by" json:"executed_by"` + CommandsB64 []string `bson:"commands_b64" json:"commands_b64"` + Variables map[string]cacao.Variable `bson:"variables" json:"variables"` + AutomatedExecution bool `bson:"automated_execution" json:"automated_execution"` + // Make sure we can have a playbookID for playbook actions, and also + // the execution ID for the invoked playbook +} + +func CacheStatusEnum2String(status cache_model.Status) (string, error) { + switch status { + case cache_model.SuccessfullyExecuted: + return SuccessfullyExecuted, nil + case cache_model.Failed: + return Failed, nil + case cache_model.Ongoing: + return Ongoing, nil + case cache_model.ServerSideError: + return ServerSideError, nil + case cache_model.ClientSideError: + return ClientSideError, nil + case cache_model.TimeoutError: + return TimeoutError, nil + case cache_model.ExceptionConditionError: + return ExceptionConditionError, nil + case cache_model.AwaitUserInput: + return AwaitUserInput, nil + default: + return "", errors.New("unable to read execution information status") + } +} + +func GetCacheStatusText(status string, level string) (string, error) { + if level != ReportLevelPlaybook && level != ReportLevelStep { + return "", errors.New("invalid reporting level provided. use either 'playbook' or 'step'") + } + switch status { + case SuccessfullyExecuted: + return fmt.Sprintf(SuccessfullyExecutedText, level), nil + case Failed: + return fmt.Sprintf(FailedText, level), nil + case Ongoing: + return fmt.Sprintf(OngoingText, level), nil + case ServerSideError: + return fmt.Sprintf(ServerSideErrorText, level), nil + case ClientSideError: + return fmt.Sprintf(ClientSideErrorText, level), nil + case TimeoutError: + return fmt.Sprintf(TimeoutErrorText, level), nil + case ExceptionConditionError: + return fmt.Sprintf(ExceptionConditionErrorText, level), nil + case AwaitUserInput: + return fmt.Sprintf(AwaitUserInputText, level), nil + default: + return "", errors.New("unable to read execution information status") + } +} diff --git a/models/cacao/cacao.go b/models/cacao/cacao.go index c0d36919..a50034c2 100644 --- a/models/cacao/cacao.go +++ b/models/cacao/cacao.go @@ -17,6 +17,19 @@ const ( StepTypeWhileCondition = "while-condition" StepTypeSwitchCondition = "switch-condition" + CommandTypeManual = "manual" + CommandTypeBash = "bash" + CommandTypeCalderaCmd = "caldera-cmd" + CommandTypeElastic = "elastic" + CommandTypeHttpApi = "http-api" + CommandTypeJupyter = "jupyter" + CommandTypeKestrel = "kestrel" + CommandTypeOpenC2Http = "openc2-http" + CommandTypePowershell = "powershell" + CommandTypeSigma = "sigma" + CommandTypeSsh = "ssh" + CommandTypeYara = "yara" + AuthInfoOAuth2Type = "oauth2" AuthInfoHTTPBasicType = "http-basic" AuthInfoNotSet = "" diff --git a/models/report/report.go b/models/cache/cache.go similarity index 61% rename from models/report/report.go rename to models/cache/cache.go index a261a1dd..a678bfbc 100644 --- a/models/report/report.go +++ b/models/cache/cache.go @@ -1,4 +1,4 @@ -package report +package cache import ( "soarca/models/cacao" @@ -17,16 +17,17 @@ const ( ClientSideError TimeoutError ExceptionConditionError + AwaitUserInput ) type ExecutionEntry struct { - ExecutionId uuid.UUID - PlaybookId string - Started time.Time - Ended time.Time - StepResults map[string]StepResult - PlaybookResult error - Status Status + ExecutionId uuid.UUID + PlaybookId string + Started time.Time + Ended time.Time + StepResults map[string]StepResult + Error error + Status Status } type StepResult struct { @@ -36,7 +37,9 @@ type StepResult struct { Ended time.Time // Make sure we can have a playbookID for playbook actions, and also // the execution ID for the invoked playbook - Variables cacao.Variables - Status Status - Error error + CommandsB64 []string + Variables cacao.Variables + Status Status + Error error + IsAutomated bool } diff --git a/routes/reporter/reporter_api.go b/routes/reporter/reporter_api.go new file mode 100644 index 00000000..6318c45f --- /dev/null +++ b/routes/reporter/reporter_api.go @@ -0,0 +1,90 @@ +package reporter + +import ( + "net/http" + "soarca/internal/controller/informer" + + "reflect" + "soarca/routes/error" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + + "soarca/logger" +) + +var log *logger.Log + +type Empty struct{} + +func init() { + log = logger.Logger(reflect.TypeOf(Empty{}).PkgPath(), logger.Info, "", logger.Json) +} + +// A PlaybookController implements the playbook API endpoints is dependent on a database. +type executionInformer struct { + informer informer.IExecutionInformer +} + +// NewPlaybookController makes a new instance of playbookControler +func NewExecutionInformer(informer informer.IExecutionInformer) *executionInformer { + return &executionInformer{informer: informer} +} + +// getExecutions GET handler for obtaining all the executions that can be retrieved. +// Returns this to the gin context as a list if execution IDs in json format +// +// @Summary gets all the UUIDs for the executions that can be retireved +// @Schemes +// @Description return all stored executions +// @Tags reporter +// @Produce json +// @success 200 {array} string +// @error 400 +// @Router /report/ [GET] +func (executionInformer *executionInformer) getExecutions(g *gin.Context) { + executions, err := executionInformer.informer.GetExecutions() + if err != nil { + log.Debug("Could not get executions from informer") + error.SendErrorResponse(g, http.StatusInternalServerError, "Could not get executions from informer", "GET /report/", "") + return + } + g.JSON(http.StatusOK, executions) +} + +// getExecutionReport GET handler for obtaining the information about an execution. +// Returns this to the gin context as a PlaybookExecutionReport object at soarca/model/api/reporter +// +// @Summary gets information about an ongoing playbook execution +// @Schemes +// @Description return execution information +// @Tags reporter +// @Produce json +// @success 200 {object} api.PlaybookExecutionReport +// @error 400 +// @Router /report/:id [GET] +func (executionInformer *executionInformer) getExecutionReport(g *gin.Context) { + id := g.Param("id") + log.Trace("Trying to obtain execution for id: ", id) + uuid, err := uuid.Parse(id) + if err != nil { + log.Debug("Could not parse id parameter for request") + error.SendErrorResponse(g, http.StatusBadRequest, "Could not parse id parameter for request", "GET /report/{id}", "") + return + } + + executionEntry, err := executionInformer.informer.GetExecutionReport(uuid) + if err != nil { + log.Debug("Could not find execution for given id") + error.SendErrorResponse(g, http.StatusBadRequest, "Could not find execution for given ID", "GET /report/{id}", "") + return + } + + executionEntryParsed, err := parseCachePlaybookEntry(executionEntry) + if err != nil { + log.Debug("Could not parse entry to reporter result model") + error.SendErrorResponse(g, http.StatusInternalServerError, "Could not parse execution report", "GET /report/{id}", "") + return + } + g.JSON(http.StatusOK, executionEntryParsed) +} diff --git a/routes/reporter/reporter_endpoints.go b/routes/reporter/reporter_endpoints.go new file mode 100644 index 00000000..a8d374ca --- /dev/null +++ b/routes/reporter/reporter_endpoints.go @@ -0,0 +1,19 @@ +package reporter + +import ( + "soarca/internal/controller/informer" + + "github.com/gin-gonic/gin" +) + +// Main Router for the following endpoints: +// GET /reporter +// GET /reporter/{execution-id} +func Routes(route *gin.Engine, informer informer.IExecutionInformer) { + executionInformer := NewExecutionInformer(informer) + report := route.Group("/reporter") + { + report.GET("/", executionInformer.getExecutions) + report.GET("/:id", executionInformer.getExecutionReport) + } +} diff --git a/routes/reporter/reporter_parser.go b/routes/reporter/reporter_parser.go new file mode 100644 index 00000000..4d89da43 --- /dev/null +++ b/routes/reporter/reporter_parser.go @@ -0,0 +1,74 @@ +package reporter + +import ( + api_model "soarca/models/api" + cache_model "soarca/models/cache" +) + +const defaultRequestInterval int = 5 + +func parseCachePlaybookEntry(cacheEntry cache_model.ExecutionEntry) (api_model.PlaybookExecutionReport, error) { + playbookStatus, err := api_model.CacheStatusEnum2String(cacheEntry.Status) + if err != nil { + return api_model.PlaybookExecutionReport{}, err + } + + playbookStatusText, err := api_model.GetCacheStatusText(playbookStatus, api_model.ReportLevelPlaybook) + if err != nil { + return api_model.PlaybookExecutionReport{}, err + } + if cacheEntry.Error != nil { + playbookStatusText = playbookStatusText + " - error: " + cacheEntry.Error.Error() + } + + stepResults, err := parseCacheStepEntries(cacheEntry.StepResults) + if err != nil { + return api_model.PlaybookExecutionReport{}, err + } + + executionReport := api_model.PlaybookExecutionReport{ + Type: "execution_status", + ExecutionId: cacheEntry.ExecutionId.String(), + PlaybookId: cacheEntry.PlaybookId, + Started: cacheEntry.Started, + Ended: cacheEntry.Ended, + Status: playbookStatus, + StatusText: playbookStatusText, + StepResults: stepResults, + RequestInterval: defaultRequestInterval, + } + return executionReport, nil +} + +func parseCacheStepEntries(cacheStepEntries map[string]cache_model.StepResult) (map[string]api_model.StepExecutionReport, error) { + parsedEntries := map[string]api_model.StepExecutionReport{} + for stepId, stepEntry := range cacheStepEntries { + + stepStatus, err := api_model.CacheStatusEnum2String(stepEntry.Status) + if err != nil { + return map[string]api_model.StepExecutionReport{}, err + } + stepStatusText, err := api_model.GetCacheStatusText(stepStatus, api_model.ReportLevelStep) + if err != nil { + return map[string]api_model.StepExecutionReport{}, err + } + + if stepEntry.Error != nil { + stepStatusText = stepStatusText + " - error: " + stepEntry.Error.Error() + } + + parsedEntries[stepId] = api_model.StepExecutionReport{ + ExecutionId: stepEntry.ExecutionId.String(), + StepId: stepEntry.StepId, + Started: stepEntry.Started, + Ended: stepEntry.Ended, + Status: stepStatus, + StatusText: stepStatusText, + ExecutedBy: "soarca", + CommandsB64: stepEntry.CommandsB64, + Variables: stepEntry.Variables, + AutomatedExecution: stepEntry.IsAutomated, + } + } + return parsedEntries, nil +} diff --git a/routes/router.go b/routes/router.go index c8459da2..a2f7652e 100644 --- a/routes/router.go +++ b/routes/router.go @@ -3,9 +3,11 @@ package routes import ( "soarca/internal/controller/database" "soarca/internal/controller/decomposer_controller" + "soarca/internal/controller/informer" coa_routes "soarca/routes/coa" operator "soarca/routes/operator" playbook_routes "soarca/routes/playbook" + reporter "soarca/routes/reporter" status "soarca/routes/status" swagger "soarca/routes/swagger" "soarca/routes/trigger" @@ -30,6 +32,12 @@ func Logging(app *gin.Engine) { // app.Use(middelware.LoggingMiddleware(log.Logger)) } +func Reporter(app *gin.Engine, informer informer.IExecutionInformer) error { + log.Trace("Setting up reporter routes") + reporter.Routes(app, informer) + return nil +} + func Api(app *gin.Engine, controller decomposer_controller.IController, ) error { @@ -37,9 +45,7 @@ func Api(app *gin.Engine, // gin.SetMode(gin.ReleaseMode) trigger_api := trigger.New(controller) - coa_routes.Routes(app) - status.Routes(app) operator.Routes(app) trigger.Routes(app, trigger_api) diff --git a/test/unittest/mocks/mock_cache/mock_cache.go b/test/unittest/mocks/mock_cache/mock_cache.go new file mode 100644 index 00000000..dd0d0d9e --- /dev/null +++ b/test/unittest/mocks/mock_cache/mock_cache.go @@ -0,0 +1,22 @@ +package mock_cache + +import ( + cache_model "soarca/models/cache" + + "github.com/google/uuid" + "github.com/stretchr/testify/mock" +) + +type Mock_Cache struct { + mock.Mock +} + +func (reporter *Mock_Cache) GetExecutions() ([]cache_model.ExecutionEntry, error) { + args := reporter.Called() + return args.Get(0).([]cache_model.ExecutionEntry), args.Error(1) +} + +func (reporter *Mock_Cache) GetExecutionReport(executionKey uuid.UUID) (cache_model.ExecutionEntry, error) { + args := reporter.Called(executionKey) + return args.Get(0).(cache_model.ExecutionEntry), args.Error(1) +} diff --git a/test/unittest/reporters/downstream_reporter/cache_test.go b/test/unittest/reporters/downstream_reporter/cache_test.go index 107a7b1b..3050a271 100644 --- a/test/unittest/reporters/downstream_reporter/cache_test.go +++ b/test/unittest/reporters/downstream_reporter/cache_test.go @@ -1,10 +1,11 @@ package cache_test import ( + b64 "encoding/base64" "errors" "soarca/internal/reporter/downstream_reporter/cache" "soarca/models/cacao" - "soarca/models/report" + cache_model "soarca/models/cache" "soarca/test/unittest/mocks/mock_utils/time" "testing" "time" @@ -16,7 +17,7 @@ import ( func TestReportWorkflowStartFirst(t *testing.T) { mock_time := new(mock_time.MockTime) - cacheReporter := cache.New(mock_time) + cacheReporter := cache.New(mock_time, 10) expectedCommand := cacao.Command{ Type: "ssh", @@ -74,30 +75,45 @@ func TestReportWorkflowStartFirst(t *testing.T) { Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, } - executionId0, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") layout := "2006-01-02T15:04:05.000Z" str := "2014-11-12T11:45:26.371Z" timeNow, _ := time.Parse(layout, str) mock_time.On("Now").Return(timeNow) - expectedExecutionEntry := report.ExecutionEntry{ + expectedExecutionEntry := cache_model.ExecutionEntry{ ExecutionId: executionId0, PlaybookId: "test", - StepResults: map[string]report.StepResult{}, - Status: report.Ongoing, + StepResults: map[string]cache_model.StepResult{}, + Status: cache_model.Ongoing, Started: timeNow, Ended: time.Time{}, } - expectedExecutions := []string{"6ba7b810-9dad-11d1-80b4-00c04fd430c0"} err := cacheReporter.ReportWorkflowStart(executionId0, playbook) if err != nil { t.Fail() } + expectedStarted, _ := time.Parse(layout, "2014-11-12T11:45:26.371Z") + expectedEnded, _ := time.Parse(layout, "0001-01-01T00:00:00Z") + expectedExecutions := []cache_model.ExecutionEntry{ + { + ExecutionId: executionId0, + PlaybookId: "test", + Started: expectedStarted, + Ended: expectedEnded, + StepResults: map[string]cache_model.StepResult{}, + Error: nil, + Status: 2, + }, + } + + returnedExecutions, _ := cacheReporter.GetExecutions() + exec, err := cacheReporter.GetExecutionReport(executionId0) - assert.Equal(t, expectedExecutions, cacheReporter.GetExecutionsIDs()) + assert.Equal(t, expectedExecutions, returnedExecutions) assert.Equal(t, expectedExecutionEntry.ExecutionId, exec.ExecutionId) assert.Equal(t, expectedExecutionEntry.PlaybookId, exec.PlaybookId) assert.Equal(t, expectedExecutionEntry.StepResults, exec.StepResults) @@ -110,7 +126,7 @@ func TestReportWorkflowStartFirst(t *testing.T) { func TestReportWorkflowStartFifo(t *testing.T) { mock_time := new(mock_time.MockTime) - cacheReporter := cache.New(mock_time) + cacheReporter := cache.New(mock_time, 3) expectedCommand := cacao.Command{ Type: "ssh", @@ -168,46 +184,53 @@ func TestReportWorkflowStartFifo(t *testing.T) { Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, } - executionId0, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") - executionId1, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c1") - executionId2, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c2") - executionId3, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c3") - executionId4, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c4") - executionId5, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c5") - executionId6, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c6") - executionId7, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c7") - executionId8, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c8") - executionId9, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c9") - executionId10, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430ca") + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + executionId1 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c1") + executionId2 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c2") + executionId3 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c3") layout := "2006-01-02T15:04:05.000Z" str := "2014-11-12T11:45:26.371Z" timeNow, _ := time.Parse(layout, str) mock_time.On("Now").Return(timeNow) - expectedExecutionsFull := []string{ - "6ba7b810-9dad-11d1-80b4-00c04fd430c0", - "6ba7b810-9dad-11d1-80b4-00c04fd430c1", - "6ba7b810-9dad-11d1-80b4-00c04fd430c2", - "6ba7b810-9dad-11d1-80b4-00c04fd430c3", - "6ba7b810-9dad-11d1-80b4-00c04fd430c4", - "6ba7b810-9dad-11d1-80b4-00c04fd430c5", - "6ba7b810-9dad-11d1-80b4-00c04fd430c6", - "6ba7b810-9dad-11d1-80b4-00c04fd430c7", - "6ba7b810-9dad-11d1-80b4-00c04fd430c8", - "6ba7b810-9dad-11d1-80b4-00c04fd430c9", - } - expectedExecutionsFifo := []string{ - "6ba7b810-9dad-11d1-80b4-00c04fd430c1", - "6ba7b810-9dad-11d1-80b4-00c04fd430c2", - "6ba7b810-9dad-11d1-80b4-00c04fd430c3", - "6ba7b810-9dad-11d1-80b4-00c04fd430c4", - "6ba7b810-9dad-11d1-80b4-00c04fd430c5", - "6ba7b810-9dad-11d1-80b4-00c04fd430c6", - "6ba7b810-9dad-11d1-80b4-00c04fd430c7", - "6ba7b810-9dad-11d1-80b4-00c04fd430c8", - "6ba7b810-9dad-11d1-80b4-00c04fd430c9", - "6ba7b810-9dad-11d1-80b4-00c04fd430ca", + executionIds := []uuid.UUID{ + executionId0, + executionId1, + executionId2, + executionId3, + } + + expectedStarted, _ := time.Parse(layout, "2014-11-12T11:45:26.371Z") + expectedEnded, _ := time.Parse(layout, "0001-01-01T00:00:00Z") + expectedExecutionsFull := []cache_model.ExecutionEntry{} + for _, executionId := range executionIds[:len(executionIds)-1] { + t.Log(executionId) + entry := cache_model.ExecutionEntry{ + ExecutionId: executionId, + PlaybookId: "test", + Started: expectedStarted, + Ended: expectedEnded, + StepResults: map[string]cache_model.StepResult{}, + Error: nil, + Status: 2, + } + expectedExecutionsFull = append(expectedExecutionsFull, entry) + } + t.Log("") + expectedExecutionsFifo := []cache_model.ExecutionEntry{} + for _, executionId := range executionIds[1:] { + t.Log(executionId) + entry := cache_model.ExecutionEntry{ + ExecutionId: executionId, + PlaybookId: "test", + Started: expectedStarted, + Ended: expectedEnded, + StepResults: map[string]cache_model.StepResult{}, + Error: nil, + Status: 2, + } + expectedExecutionsFifo = append(expectedExecutionsFifo, entry) } err := cacheReporter.ReportWorkflowStart(executionId0, playbook) @@ -222,51 +245,257 @@ func TestReportWorkflowStartFifo(t *testing.T) { if err != nil { t.Fail() } + + returnedExecutionsFull, _ := cacheReporter.GetExecutions() + t.Log("expected") + t.Log(expectedExecutionsFull) + t.Log("returned") + t.Log(returnedExecutionsFull) + assert.Equal(t, expectedExecutionsFull, returnedExecutionsFull) + err = cacheReporter.ReportWorkflowStart(executionId3, playbook) if err != nil { t.Fail() } - err = cacheReporter.ReportWorkflowStart(executionId4, playbook) - if err != nil { - t.Fail() + + returnedExecutionsFifo, _ := cacheReporter.GetExecutions() + assert.Equal(t, expectedExecutionsFifo, returnedExecutionsFifo) + mock_time.AssertExpectations(t) +} + +func TestReportWorkflowEnd(t *testing.T) { + + mock_time := new(mock_time.MockTime) + cacheReporter := cache.New(mock_time, 10) + + expectedCommand := cacao.Command{ + Type: "ssh", + Command: "ssh ls -la", } - err = cacheReporter.ReportWorkflowStart(executionId5, playbook) - if err != nil { - t.Fail() + + expectedVariables := cacao.Variable{ + Type: "string", + Name: "var1", + Value: "testing", } - err = cacheReporter.ReportWorkflowStart(executionId6, playbook) + + step1 := cacao.Step{ + Type: "action", + ID: "action--test", + Name: "ssh-tests", + StepVariables: cacao.NewVariables(expectedVariables), + Commands: []cacao.Command{expectedCommand}, + Cases: map[string]string{}, + OnCompletion: "end--test", + Agent: "agent1", + Targets: []string{"target1"}, + } + + end := cacao.Step{ + Type: "end", + ID: "end--test", + Name: "end step", + } + + expectedAuth := cacao.AuthenticationInformation{ + Name: "user", + ID: "auth1", + } + + expectedTarget := cacao.AgentTarget{ + Name: "sometarget", + AuthInfoIdentifier: "auth1", + ID: "target1", + } + + expectedAgent := cacao.AgentTarget{ + Type: "soarca", + Name: "soarca-ssh", + } + + playbook := cacao.Playbook{ + ID: "test", + Type: "test", + Name: "ssh-test", + WorkflowStart: step1.ID, + AuthenticationInfoDefinitions: map[string]cacao.AuthenticationInformation{"id": expectedAuth}, + AgentDefinitions: map[string]cacao.AgentTarget{"agent1": expectedAgent}, + TargetDefinitions: map[string]cacao.AgentTarget{"target1": expectedTarget}, + + Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, + } + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + + layout := "2006-01-02T15:04:05.000Z" + str := "2014-11-12T11:45:26.371Z" + timeNow, _ := time.Parse(layout, str) + mock_time.On("Now").Return(timeNow) + + err := cacheReporter.ReportWorkflowStart(executionId0, playbook) if err != nil { t.Fail() } - err = cacheReporter.ReportWorkflowStart(executionId7, playbook) + err = cacheReporter.ReportWorkflowEnd(executionId0, playbook, nil) if err != nil { t.Fail() } - err = cacheReporter.ReportWorkflowStart(executionId8, playbook) + + expectedExecutionEntry := cache_model.ExecutionEntry{ + ExecutionId: executionId0, + PlaybookId: "test", + Started: timeNow, + Ended: timeNow, + StepResults: map[string]cache_model.StepResult{}, + Status: cache_model.SuccessfullyExecuted, + } + expectedExecutions := []cache_model.ExecutionEntry{expectedExecutionEntry} + + returnedExecutions, _ := cacheReporter.GetExecutions() + + exec, err := cacheReporter.GetExecutionReport(executionId0) + assert.Equal(t, expectedExecutions, returnedExecutions) + assert.Equal(t, expectedExecutionEntry.ExecutionId, exec.ExecutionId) + assert.Equal(t, expectedExecutionEntry.PlaybookId, exec.PlaybookId) + assert.Equal(t, expectedExecutionEntry.StepResults, exec.StepResults) + assert.Equal(t, expectedExecutionEntry.Status, exec.Status) + assert.Equal(t, exec.Ended, expectedExecutionEntry.Ended) + assert.Equal(t, err, nil) + mock_time.AssertExpectations(t) +} + +func TestReportStepStartAndEnd(t *testing.T) { + mock_time := new(mock_time.MockTime) + cacheReporter := cache.New(mock_time, 10) + + expectedCommand := cacao.Command{ + Type: "ssh", + Command: "ssh ls -la", + } + + expectedVariables := cacao.Variable{ + Type: "string", + Name: "var1", + Value: "testing", + } + + step1 := cacao.Step{ + Type: "action", + ID: "action--test", + Name: "ssh-tests", + StepVariables: cacao.NewVariables(expectedVariables), + Commands: []cacao.Command{expectedCommand}, + Cases: map[string]string{}, + OnCompletion: "end--test", + Agent: "agent1", + Targets: []string{"target1"}, + } + + end := cacao.Step{ + Type: "end", + ID: "end--test", + Name: "end step", + } + + expectedAuth := cacao.AuthenticationInformation{ + Name: "user", + ID: "auth1", + } + + expectedTarget := cacao.AgentTarget{ + Name: "sometarget", + AuthInfoIdentifier: "auth1", + ID: "target1", + } + + expectedAgent := cacao.AgentTarget{ + Type: "soarca", + Name: "soarca-ssh", + } + + playbook := cacao.Playbook{ + ID: "test", + Type: "test", + Name: "ssh-test", + WorkflowStart: step1.ID, + AuthenticationInfoDefinitions: map[string]cacao.AuthenticationInformation{"id": expectedAuth}, + AgentDefinitions: map[string]cacao.AgentTarget{"agent1": expectedAgent}, + TargetDefinitions: map[string]cacao.AgentTarget{"target1": expectedTarget}, + + Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, + } + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + layout := "2006-01-02T15:04:05.000Z" + str := "2014-11-12T11:45:26.371Z" + timeNow, _ := time.Parse(layout, str) + mock_time.On("Now").Return(timeNow) + + err := cacheReporter.ReportWorkflowStart(executionId0, playbook) if err != nil { t.Fail() } - err = cacheReporter.ReportWorkflowStart(executionId9, playbook) + err = cacheReporter.ReportStepStart(executionId0, step1, cacao.NewVariables(expectedVariables)) if err != nil { t.Fail() } - assert.Equal(t, expectedExecutionsFull, cacheReporter.GetExecutionsIDs()) + expectedStepStatus := cache_model.StepResult{ + ExecutionId: executionId0, + StepId: step1.ID, + Started: timeNow, + Ended: time.Time{}, + Variables: cacao.NewVariables(expectedVariables), + Status: cache_model.Ongoing, + Error: nil, + } + + exec, err := cacheReporter.GetExecutionReport(executionId0) + stepStatus := exec.StepResults[step1.ID] + assert.Equal(t, stepStatus.ExecutionId, expectedStepStatus.ExecutionId) + assert.Equal(t, stepStatus.StepId, expectedStepStatus.StepId) + assert.Equal(t, stepStatus.Started, expectedStepStatus.Started) + assert.Equal(t, stepStatus.Ended, expectedStepStatus.Ended) + assert.Equal(t, stepStatus.Variables, expectedStepStatus.Variables) + assert.Equal(t, stepStatus.Status, expectedStepStatus.Status) + assert.Equal(t, stepStatus.Error, expectedStepStatus.Error) + assert.Equal(t, err, nil) - err = cacheReporter.ReportWorkflowStart(executionId10, playbook) + err = cacheReporter.ReportStepEnd(executionId0, step1, cacao.NewVariables(expectedVariables), nil) if err != nil { t.Fail() } - assert.Equal(t, expectedExecutionsFifo, cacheReporter.GetExecutionsIDs()) + + expectedStepResult := cache_model.StepResult{ + ExecutionId: executionId0, + StepId: step1.ID, + Started: timeNow, + Ended: timeNow, + Variables: cacao.NewVariables(expectedVariables), + Status: cache_model.SuccessfullyExecuted, + Error: nil, + } + + exec, err = cacheReporter.GetExecutionReport(executionId0) + stepResult := exec.StepResults[step1.ID] + assert.Equal(t, stepResult.ExecutionId, expectedStepResult.ExecutionId) + assert.Equal(t, stepResult.StepId, expectedStepResult.StepId) + assert.Equal(t, stepResult.Started, expectedStepResult.Started) + assert.Equal(t, stepResult.Ended, expectedStepResult.Ended) + assert.Equal(t, stepResult.Variables, expectedStepResult.Variables) + assert.Equal(t, stepResult.Status, expectedStepResult.Status) + assert.Equal(t, stepResult.Error, expectedStepResult.Error) + assert.Equal(t, err, nil) mock_time.AssertExpectations(t) } -func TestReportWorkflowEnd(t *testing.T) { - +func TestReportStepStartCommandsEncoding(t *testing.T) { mock_time := new(mock_time.MockTime) - cacheReporter := cache.New(mock_time) + cacheReporter := cache.New(mock_time, 10) - expectedCommand := cacao.Command{ + expectedCommand1 := cacao.Command{ + Type: "manual", + CommandB64: b64.StdEncoding.EncodeToString([]byte("do ssh ls -la in the terminal")), + } + expectedCommand2 := cacao.Command{ Type: "ssh", Command: "ssh ls -la", } @@ -282,7 +511,7 @@ func TestReportWorkflowEnd(t *testing.T) { ID: "action--test", Name: "ssh-tests", StepVariables: cacao.NewVariables(expectedVariables), - Commands: []cacao.Command{expectedCommand}, + Commands: []cacao.Command{expectedCommand1, expectedCommand2}, Cases: map[string]string{}, OnCompletion: "end--test", Agent: "agent1", @@ -322,51 +551,89 @@ func TestReportWorkflowEnd(t *testing.T) { Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, } - executionId0, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") - + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") layout := "2006-01-02T15:04:05.000Z" str := "2014-11-12T11:45:26.371Z" timeNow, _ := time.Parse(layout, str) mock_time.On("Now").Return(timeNow) - expectedExecutions := []string{"6ba7b810-9dad-11d1-80b4-00c04fd430c0"} - err := cacheReporter.ReportWorkflowStart(executionId0, playbook) if err != nil { t.Fail() } - err = cacheReporter.ReportWorkflowEnd(executionId0, playbook, nil) + err = cacheReporter.ReportStepStart(executionId0, step1, cacao.NewVariables(expectedVariables)) if err != nil { t.Fail() } - expectedExecutionEntry := report.ExecutionEntry{ + encodedCommand1 := expectedCommand1.CommandB64 + encodedCommand2 := b64.StdEncoding.EncodeToString([]byte(expectedCommand2.Command)) + expectedCommandsB64 := []string{encodedCommand1, encodedCommand2} + + expectedStepStatus := cache_model.StepResult{ ExecutionId: executionId0, - PlaybookId: "test", + StepId: step1.ID, Started: timeNow, - Ended: timeNow, - StepResults: map[string]report.StepResult{}, - Status: report.SuccessfullyExecuted, + Ended: time.Time{}, + Variables: cacao.NewVariables(expectedVariables), + Status: cache_model.Ongoing, + CommandsB64: expectedCommandsB64, + Error: nil, + IsAutomated: false, } exec, err := cacheReporter.GetExecutionReport(executionId0) - assert.Equal(t, expectedExecutions, cacheReporter.GetExecutionsIDs()) - assert.Equal(t, expectedExecutionEntry.ExecutionId, exec.ExecutionId) - assert.Equal(t, expectedExecutionEntry.PlaybookId, exec.PlaybookId) - assert.Equal(t, expectedExecutionEntry.StepResults, exec.StepResults) - assert.Equal(t, expectedExecutionEntry.Status, exec.Status) - assert.Equal(t, exec.Ended, expectedExecutionEntry.Ended) + stepStatus := exec.StepResults[step1.ID] + t.Log("stepStatus commands") + t.Log(stepStatus.CommandsB64) + t.Log("expectedStep commands") + t.Log(expectedStepStatus.CommandsB64) + assert.Equal(t, stepStatus.ExecutionId, expectedStepStatus.ExecutionId) + assert.Equal(t, stepStatus.StepId, expectedStepStatus.StepId) + assert.Equal(t, stepStatus.Started, expectedStepStatus.Started) + assert.Equal(t, stepStatus.Ended, expectedStepStatus.Ended) + assert.Equal(t, stepStatus.Variables, expectedStepStatus.Variables) + assert.Equal(t, stepStatus.Status, expectedStepStatus.Status) + assert.Equal(t, stepStatus.Error, expectedStepStatus.Error) + assert.Equal(t, stepStatus.CommandsB64, expectedStepStatus.CommandsB64) + assert.Equal(t, stepStatus.IsAutomated, expectedStepStatus.IsAutomated) + assert.Equal(t, err, nil) + + err = cacheReporter.ReportStepEnd(executionId0, step1, cacao.NewVariables(expectedVariables), nil) + if err != nil { + t.Fail() + } + + expectedStepResult := cache_model.StepResult{ + ExecutionId: executionId0, + StepId: step1.ID, + Started: timeNow, + Ended: timeNow, + Variables: cacao.NewVariables(expectedVariables), + Status: cache_model.SuccessfullyExecuted, + Error: nil, + } + + exec, err = cacheReporter.GetExecutionReport(executionId0) + stepResult := exec.StepResults[step1.ID] + assert.Equal(t, stepResult.ExecutionId, expectedStepResult.ExecutionId) + assert.Equal(t, stepResult.StepId, expectedStepResult.StepId) + assert.Equal(t, stepResult.Started, expectedStepResult.Started) + assert.Equal(t, stepResult.Ended, expectedStepResult.Ended) + assert.Equal(t, stepResult.Variables, expectedStepResult.Variables) + assert.Equal(t, stepResult.Status, expectedStepResult.Status) + assert.Equal(t, stepResult.Error, expectedStepResult.Error) assert.Equal(t, err, nil) mock_time.AssertExpectations(t) } -func TestReportStepStartAndEnd(t *testing.T) { +func TestReportStepStartManualCommand(t *testing.T) { mock_time := new(mock_time.MockTime) - cacheReporter := cache.New(mock_time) + cacheReporter := cache.New(mock_time, 10) expectedCommand := cacao.Command{ - Type: "ssh", - Command: "ssh ls -la", + Type: "manual", + Command: "do ssh ls -la in the terminal", } expectedVariables := cacao.Variable{ @@ -420,7 +687,7 @@ func TestReportStepStartAndEnd(t *testing.T) { Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, } - executionId0, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") layout := "2006-01-02T15:04:05.000Z" str := "2014-11-12T11:45:26.371Z" timeNow, _ := time.Parse(layout, str) @@ -435,14 +702,18 @@ func TestReportStepStartAndEnd(t *testing.T) { t.Fail() } - expectedStepStatus := report.StepResult{ + encodedCommand := b64.StdEncoding.EncodeToString([]byte(expectedCommand.Command)) + + expectedStepStatus := cache_model.StepResult{ ExecutionId: executionId0, StepId: step1.ID, Started: timeNow, Ended: time.Time{}, Variables: cacao.NewVariables(expectedVariables), - Status: report.Ongoing, + Status: cache_model.Ongoing, + CommandsB64: []string{encodedCommand}, Error: nil, + IsAutomated: false, } exec, err := cacheReporter.GetExecutionReport(executionId0) @@ -454,6 +725,8 @@ func TestReportStepStartAndEnd(t *testing.T) { assert.Equal(t, stepStatus.Variables, expectedStepStatus.Variables) assert.Equal(t, stepStatus.Status, expectedStepStatus.Status) assert.Equal(t, stepStatus.Error, expectedStepStatus.Error) + assert.Equal(t, stepStatus.CommandsB64, expectedStepStatus.CommandsB64) + assert.Equal(t, stepStatus.IsAutomated, expectedStepStatus.IsAutomated) assert.Equal(t, err, nil) err = cacheReporter.ReportStepEnd(executionId0, step1, cacao.NewVariables(expectedVariables), nil) @@ -461,13 +734,13 @@ func TestReportStepStartAndEnd(t *testing.T) { t.Fail() } - expectedStepResult := report.StepResult{ + expectedStepResult := cache_model.StepResult{ ExecutionId: executionId0, StepId: step1.ID, Started: timeNow, Ended: timeNow, Variables: cacao.NewVariables(expectedVariables), - Status: report.SuccessfullyExecuted, + Status: cache_model.SuccessfullyExecuted, Error: nil, } @@ -486,7 +759,7 @@ func TestReportStepStartAndEnd(t *testing.T) { func TestInvalidStepReportAfterExecutionEnd(t *testing.T) { mock_time := new(mock_time.MockTime) - cacheReporter := cache.New(mock_time) + cacheReporter := cache.New(mock_time, 10) expectedCommand := cacao.Command{ Type: "ssh", @@ -544,7 +817,7 @@ func TestInvalidStepReportAfterExecutionEnd(t *testing.T) { Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, } - executionId0, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") layout := "2006-01-02T15:04:05.000Z" str := "2014-11-12T11:45:26.371Z" timeNow, _ := time.Parse(layout, str) @@ -575,7 +848,7 @@ func TestInvalidStepReportAfterExecutionEnd(t *testing.T) { func TestInvalidStepReportAfterStepEnd(t *testing.T) { mock_time := new(mock_time.MockTime) - cacheReporter := cache.New(mock_time) + cacheReporter := cache.New(mock_time, 10) expectedCommand := cacao.Command{ Type: "ssh", @@ -633,7 +906,7 @@ func TestInvalidStepReportAfterStepEnd(t *testing.T) { Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, } - executionId0, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") layout := "2006-01-02T15:04:05.000Z" str := "2014-11-12T11:45:26.371Z" timeNow, _ := time.Parse(layout, str) diff --git a/test/unittest/routes/reporter_api/reporter_api_invocation_test.go b/test/unittest/routes/reporter_api/reporter_api_invocation_test.go new file mode 100644 index 00000000..abd5f288 --- /dev/null +++ b/test/unittest/routes/reporter_api/reporter_api_invocation_test.go @@ -0,0 +1,149 @@ +package reporter_api_test + +import ( + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + api_model "soarca/models/api" + cache_model "soarca/models/cache" + "soarca/routes/reporter" + mock_cache "soarca/test/unittest/mocks/mock_cache" + "testing" + + "github.com/google/uuid" + + "github.com/gin-gonic/gin" + "github.com/go-playground/assert/v2" +) + +func TestGetExecutionsInvocation(t *testing.T) { + mock_cache_reporter := &mock_cache.Mock_Cache{} + mock_cache_reporter.On("GetExecutions").Return([]cache_model.ExecutionEntry{}, nil) + + app := gin.New() + gin.SetMode(gin.DebugMode) + + recorder := httptest.NewRecorder() + reporter.Routes(app, mock_cache_reporter) + + request, err := http.NewRequest("GET", "/reporter/", nil) + if err != nil { + t.Fail() + } + + app.ServeHTTP(recorder, request) + expectedString := "[]" + assert.Equal(t, expectedString, recorder.Body.String()) + assert.Equal(t, 200, recorder.Code) + + mock_cache_reporter.AssertExpectations(t) +} + +func TestGetExecutionReportInvocation(t *testing.T) { + mock_cache_reporter := &mock_cache.Mock_Cache{} + app := gin.New() + gin.SetMode(gin.DebugMode) + + recorder := httptest.NewRecorder() + reporter.Routes(app, mock_cache_reporter) + + executionId0, _ := uuid.Parse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + + expectedCache := `{ + "ExecutionId":"6ba7b810-9dad-11d1-80b4-00c04fd430c0", + "PlaybookId":"test", + "Started":"2014-11-12T11:45:26.371Z", + "Ended":"0001-01-01T00:00:00Z", + "StepResults":{ + "action--test":{ + "ExecutionId":"6ba7b810-9dad-11d1-80b4-00c04fd430c0", + "StepId":"action--test", + "Started":"2014-11-12T11:45:26.371Z", + "Ended":"2014-11-12T11:45:26.371Z", + "Variables":{ + "var1":{ + "type":"string", + "name":"var1", + "value":"testing" + } + }, + "CommandsB64" : [], + "IsAutomated" : true, + "Status":0, + "Error":null + } + }, + "PlaybookResult":null, + "Status":2 + }` + expectedCacheData := cache_model.ExecutionEntry{} + err := json.Unmarshal([]byte(expectedCache), &expectedCacheData) + if err != nil { + t.Log(err) + t.Log("Could not parse data to JSON") + t.Fail() + } + + mock_cache_reporter.On("GetExecutionReport", executionId0).Return(expectedCacheData, nil) + + request, err := http.NewRequest("GET", fmt.Sprintf("/reporter/%s", executionId0), nil) + if err != nil { + t.Log(err) + t.Fail() + } + app.ServeHTTP(recorder, request) + + expectedResponse := `{ + "type":"execution_status", + "execution_id":"6ba7b810-9dad-11d1-80b4-00c04fd430c0", + "playbook_id":"test", + "started":"2014-11-12T11:45:26.371Z", + "ended":"0001-01-01T00:00:00Z", + "status":"ongoing", + "status_text":"this playbook is currently being executed", + "step_results":{ + "action--test":{ + "execution_id":"6ba7b810-9dad-11d1-80b4-00c04fd430c0", + "step_id": "action--test", + "started": "2014-11-12T11:45:26.371Z", + "ended": "2014-11-12T11:45:26.371Z", + "status": "successfully_executed", + "status_text": "step execution completed successfully", + "Variables":{ + "var1":{ + "type":"string", + "name":"var1", + "value":"testing" + } + }, + "commands_b64" : [], + "automated_execution" : true, + "executed_by" : "soarca" + } + }, + "request_interval":5 + }` + expectedResponseData := api_model.PlaybookExecutionReport{} + err = json.Unmarshal([]byte(expectedResponse), &expectedResponseData) + if err != nil { + t.Log(err) + t.Log("Could not parse data to JSON") + t.Fail() + } + + receivedData := api_model.PlaybookExecutionReport{} + err = json.Unmarshal(recorder.Body.Bytes(), &receivedData) + if err != nil { + t.Log(err) + t.Log("Could not parse data to JSON") + t.Fail() + } + + t.Log("expected response") + t.Log(expectedResponseData) + t.Log("received response") + t.Log(receivedData) + assert.Equal(t, expectedResponseData, receivedData) + mock_cache_reporter.AssertExpectations(t) +} diff --git a/test/unittest/routes/reporter_api/reporter_api_test.go b/test/unittest/routes/reporter_api/reporter_api_test.go new file mode 100644 index 00000000..de48c76f --- /dev/null +++ b/test/unittest/routes/reporter_api/reporter_api_test.go @@ -0,0 +1,319 @@ +package reporter_api_test + +import ( + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "soarca/internal/reporter/downstream_reporter/cache" + api_model "soarca/models/api" + "soarca/models/cacao" + cache_model "soarca/models/cache" + "soarca/routes/reporter" + mock_time "soarca/test/unittest/mocks/mock_utils/time" + "testing" + "time" + + "github.com/google/uuid" + + "github.com/gin-gonic/gin" + "github.com/go-playground/assert/v2" +) + +func TestGetExecutions(t *testing.T) { + + mock_time := new(mock_time.MockTime) + cacheReporter := cache.New(mock_time, 10) + + expectedCommand := cacao.Command{ + Type: "ssh", + Command: "ssh ls -la", + } + + expectedVariables := cacao.Variable{ + Type: "string", + Name: "var1", + Value: "testing", + } + + step1 := cacao.Step{ + Type: "action", + ID: "action--test", + Name: "ssh-tests", + StepVariables: cacao.NewVariables(expectedVariables), + Commands: []cacao.Command{expectedCommand}, + Cases: map[string]string{}, + OnCompletion: "end--test", + Agent: "agent1", + Targets: []string{"target1"}, + } + + end := cacao.Step{ + Type: "end", + ID: "end--test", + Name: "end step", + } + + expectedAuth := cacao.AuthenticationInformation{ + Name: "user", + ID: "auth1", + } + + expectedTarget := cacao.AgentTarget{ + Name: "sometarget", + AuthInfoIdentifier: "auth1", + ID: "target1", + } + + expectedAgent := cacao.AgentTarget{ + Type: "soarca", + Name: "soarca-ssh", + } + + playbook := cacao.Playbook{ + ID: "test", + Type: "test", + Name: "ssh-test", + WorkflowStart: step1.ID, + AuthenticationInfoDefinitions: map[string]cacao.AuthenticationInformation{"id": expectedAuth}, + AgentDefinitions: map[string]cacao.AgentTarget{"agent1": expectedAgent}, + TargetDefinitions: map[string]cacao.AgentTarget{"target1": expectedTarget}, + + Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, + } + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + executionId1 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c1") + executionId2 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c2") + + executionIds := []uuid.UUID{ + executionId0, + executionId1, + executionId2, + } + + layout := "2006-01-02T15:04:05.000Z" + str := "2014-11-12T11:45:26.371Z" + timeNow, _ := time.Parse(layout, str) + mock_time.On("Now").Return(timeNow) + + expectedStarted, _ := time.Parse(layout, str) + expectedEnded, _ := time.Parse(layout, "0001-01-01T00:00:00Z") + + expectedExecutions := []cache_model.ExecutionEntry{} + for _, executionId := range executionIds { + t.Log(executionId) + entry := cache_model.ExecutionEntry{ + ExecutionId: executionId, + PlaybookId: "test", + Started: expectedStarted, + Ended: expectedEnded, + StepResults: map[string]cache_model.StepResult{}, + Error: nil, + Status: cache_model.Ongoing, + } + expectedExecutions = append(expectedExecutions, entry) + } + + err := cacheReporter.ReportWorkflowStart(executionId0, playbook) + if err != nil { + t.Fail() + } + + err = cacheReporter.ReportWorkflowStart(executionId1, playbook) + if err != nil { + t.Fail() + } + err = cacheReporter.ReportWorkflowStart(executionId2, playbook) + if err != nil { + t.Fail() + } + + app := gin.New() + gin.SetMode(gin.DebugMode) + + recorder := httptest.NewRecorder() + reporter.Routes(app, cacheReporter) + + request, err := http.NewRequest("GET", "/reporter/", nil) + if err != nil { + t.Fail() + } + + app.ServeHTTP(recorder, request) + expectedByte, err := json.Marshal(expectedExecutions) + if err != nil { + t.Log("failed to decode expected struct to json") + t.Fail() + } + expectedString := string(expectedByte) + assert.Equal(t, expectedString, recorder.Body.String()) + assert.Equal(t, 200, recorder.Code) + + mock_time.AssertExpectations(t) +} + +func TestGetExecutionReport(t *testing.T) { + // Create real cache, create real reporter api object + // Do executions, test retrieval via api + + mock_time := new(mock_time.MockTime) + cacheReporter := cache.New(mock_time, 10) + + expectedCommand := cacao.Command{ + Type: "ssh", + Command: "ssh ls -la", + } + + expectedVariables := cacao.Variable{ + Type: "string", + Name: "var1", + Value: "testing", + } + + step1 := cacao.Step{ + Type: "action", + ID: "action--test", + Name: "ssh-tests", + StepVariables: cacao.NewVariables(expectedVariables), + Commands: []cacao.Command{expectedCommand}, + Cases: map[string]string{}, + OnCompletion: "end--test", + Agent: "agent1", + Targets: []string{"target1"}, + } + + end := cacao.Step{ + Type: "end", + ID: "end--test", + Name: "end step", + } + + expectedAuth := cacao.AuthenticationInformation{ + Name: "user", + ID: "auth1", + } + + expectedTarget := cacao.AgentTarget{ + Name: "sometarget", + AuthInfoIdentifier: "auth1", + ID: "target1", + } + + expectedAgent := cacao.AgentTarget{ + Type: "soarca", + Name: "soarca-ssh", + } + + playbook := cacao.Playbook{ + ID: "test", + Type: "test", + Name: "ssh-test", + WorkflowStart: step1.ID, + AuthenticationInfoDefinitions: map[string]cacao.AuthenticationInformation{"id": expectedAuth}, + AgentDefinitions: map[string]cacao.AgentTarget{"agent1": expectedAgent}, + TargetDefinitions: map[string]cacao.AgentTarget{"target1": expectedTarget}, + Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, + } + + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + executionId1 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c1") + executionId2 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c2") + + layout := "2006-01-02T15:04:05.000Z" + str := "2014-11-12T11:45:26.371Z" + timeNow, _ := time.Parse(layout, str) + mock_time.On("Now").Return(timeNow) + + err := cacheReporter.ReportWorkflowStart(executionId0, playbook) + if err != nil { + t.Fail() + } + err = cacheReporter.ReportStepStart(executionId0, step1, cacao.NewVariables(expectedVariables)) + if err != nil { + t.Fail() + } + + err = cacheReporter.ReportWorkflowStart(executionId1, playbook) + if err != nil { + t.Fail() + } + err = cacheReporter.ReportWorkflowStart(executionId2, playbook) + if err != nil { + t.Fail() + } + err = cacheReporter.ReportStepEnd(executionId0, step1, cacao.NewVariables(expectedVariables), nil) + if err != nil { + t.Fail() + } + + app := gin.New() + gin.SetMode(gin.DebugMode) + + recorder := httptest.NewRecorder() + reporter.Routes(app, cacheReporter) + + expected := `{ + "type":"execution_status", + "execution_id":"6ba7b810-9dad-11d1-80b4-00c04fd430c0", + "playbook_id":"test", + "started":"2014-11-12T11:45:26.371Z", + "ended":"0001-01-01T00:00:00Z", + "status":"ongoing", + "status_text":"this playbook is currently being executed", + "step_results":{ + "action--test":{ + "execution_id":"6ba7b810-9dad-11d1-80b4-00c04fd430c0", + "step_id":"action--test", + "started":"2014-11-12T11:45:26.371Z", + "ended":"2014-11-12T11:45:26.371Z", + "status":"successfully_executed", + "status_text": "step execution completed successfully", + "variables":{ + "var1":{ + "type":"string", + "name":"var1", + "value":"testing" + } + }, + "commands_b64" : ["c3NoIGxzIC1sYQ=="], + "automated_execution" : true, + "executed_by" : "soarca" + } + }, + "request_interval":5 + }` + expectedData := api_model.PlaybookExecutionReport{} + err = json.Unmarshal([]byte(expected), &expectedData) + if err != nil { + t.Log(err) + t.Log("Could not parse data to JSON") + t.Fail() + } + t.Log("expected") + b, err := json.MarshalIndent(expectedData, "", " ") + if err != nil { + fmt.Println(err) + } + fmt.Print(string(b)) + + request, err := http.NewRequest("GET", fmt.Sprintf("/reporter/%s", executionId0), nil) + if err != nil { + t.Log(err) + t.Fail() + } + app.ServeHTTP(recorder, request) + + receivedData := api_model.PlaybookExecutionReport{} + err = json.Unmarshal(recorder.Body.Bytes(), &receivedData) + if err != nil { + t.Log(err) + t.Log("Could not parse data to JSON") + t.Fail() + } + t.Log("received") + t.Log(receivedData) + + assert.Equal(t, expectedData, receivedData) + + mock_time.AssertExpectations(t) +}