From c478379f2d291e6dddb4ec52e2d6337ebad38921 Mon Sep 17 00:00:00 2001 From: lucamrgs <39555424+lucamrgs@users.noreply.github.com> Date: Thu, 21 Nov 2024 16:19:02 +0100 Subject: [PATCH] initial thehive reporting setup (#237) --- .env.example | 9 +- docker/soarca/docker-compose.yml | 5 + example/http-playbook.json | 2 +- internal/controller/controller.go | 38 +- .../downstream_reporter/thehive/ids_map.go | 73 +++ .../thehive/thehive_connector.go | 468 ++++++++++++++++++ .../thehive/thehive_models/models.go | 123 +++++ .../thehive/thehive_reporter.go | 76 +++ .../thehive/thehive_utils/utils.go | 101 ++++ internal/reporter/reporter.go | 96 ++-- test/manual/thehive_reporter/thehive_test.go | 157 ++++++ 11 files changed, 1110 insertions(+), 38 deletions(-) create mode 100644 internal/reporter/downstream_reporter/thehive/ids_map.go create mode 100644 internal/reporter/downstream_reporter/thehive/thehive_connector.go create mode 100644 internal/reporter/downstream_reporter/thehive/thehive_models/models.go create mode 100644 internal/reporter/downstream_reporter/thehive/thehive_reporter.go create mode 100644 internal/reporter/downstream_reporter/thehive/thehive_utils/utils.go create mode 100644 test/manual/thehive_reporter/thehive_test.go diff --git a/.env.example b/.env.example index 58b5f0d3..5923bd11 100644 --- a/.env.example +++ b/.env.example @@ -18,4 +18,11 @@ ENABLE_FINS: false MQTT_BROKER: "localhost" MQTT_PORT: 1883 -HTTP_SKIP_CERT_VALIDATION: false \ No newline at end of file +HTTP_SKIP_CERT_VALIDATION: false + +### Integrations + +# The Hive +THEHIVE_ACTIVATE: true +THEHIVE_API_TOKEN: your_token +THEHIVE_API_BASE_URL: http://localhost:9000/api/v1/ \ No newline at end of file diff --git a/docker/soarca/docker-compose.yml b/docker/soarca/docker-compose.yml index 4dac734a..3a8b2db5 100644 --- a/docker/soarca/docker-compose.yml +++ b/docker/soarca/docker-compose.yml @@ -59,6 +59,11 @@ services: MQTT_BROKER: "mosquitto" MQTT_PORT: 1883 HTTP_SKIP_CERT_VALIDATION: false + # Integrations: + # The Hive + THEHIVE_ACTIVATE: false + THEHIVE_API_TOKEN: your_token + THEHIVE_API_BASE_URL: http://localhost:9000/api/v1/ networks: - db-net - mqtt-net diff --git a/example/http-playbook.json b/example/http-playbook.json index 1148259d..4225506a 100644 --- a/example/http-playbook.json +++ b/example/http-playbook.json @@ -2,7 +2,7 @@ "type": "playbook", "spec_version": "cacao-2.0", "id": "playbook--300270f9-0e64-42c8-93cc-0927edbe3ae7", - "name": "Example ssh", + "name": "Example http", "description": "This playbook is to demonstrate the http functionality", "playbook_types": [ "notification" diff --git a/internal/controller/controller.go b/internal/controller/controller.go index b0df207c..b2e1b182 100644 --- a/internal/controller/controller.go +++ b/internal/controller/controller.go @@ -2,6 +2,7 @@ package controller import ( "errors" + "fmt" "os" "reflect" "strconv" @@ -22,6 +23,7 @@ import ( "soarca/internal/guid" "soarca/internal/reporter" cache "soarca/internal/reporter/downstream_reporter/cache" + "soarca/internal/reporter/downstream_reporter/thehive" "soarca/logger" "soarca/utils" httpUtil "soarca/utils/http" @@ -89,11 +91,23 @@ func (controller *Controller) NewDecomposer() decomposer.IDecomposer { // NOTE: Enrolling mainCache by default as reporter reporter := reporter.New([]downstreamReporter.IDownStreamReporter{}) - downstreamReporters := []downstreamReporter.IDownStreamReporter{&mainCache} - err := reporter.RegisterReporters(downstreamReporters) + cacheReporterAsList := []downstreamReporter.IDownStreamReporter{&mainCache} + + // Reporter integrations + theHiveReporterAsList := []downstreamReporter.IDownStreamReporter{} + thehive_reporter := initializeIntegrationTheHiveReporting() + if thehive_reporter != nil { + theHiveReporterAsList = append(theHiveReporterAsList, thehive_reporter) + } + + err := reporter.RegisterReporters(cacheReporterAsList) if err != nil { log.Error("could not load main Cache as reporter for decomposer and executors") } + err = reporter.RegisterReporters(theHiveReporterAsList) + if err != nil { + log.Error("could not load The Hive as reporter for decomposer and executors") + } soarcaTime := new(timeUtil.Time) actionExecutor := action.New(capabilities, reporter, soarcaTime) @@ -229,6 +243,26 @@ func (controller *Controller) setupAndRunMqtt() error { return nil } +func initializeIntegrationTheHiveReporting() downstreamReporter.IDownStreamReporter { + initTheHiveReporter, _ := strconv.ParseBool(utils.GetEnv("THEHIVE_ACTIVATE", "false")) + if !initTheHiveReporter { + return nil + } + log.Info("initializing The Hive reporting integration") + + thehive_api_token := utils.GetEnv("THEHIVE_API_TOKEN", "") + thehive_api_base_url := utils.GetEnv("THEHIVE_API_BASE_URL", "") + if len(thehive_api_base_url) < 1 || len(thehive_api_token) < 1 { + log.Warning("could not initialize The Hive reporting integration. Check to have configured the env variables correctly.") + return nil + } + + log.Info(fmt.Sprintf("creating new The hive connector with API base url at : %s", thehive_api_base_url)) + theHiveConnector := thehive.NewConnector(thehive_api_base_url, thehive_api_token) + theHiveReporter := thehive.NewReporter(theHiveConnector) + return theHiveReporter +} + func getMqttDetails() (string, int) { broker := utils.GetEnv("MQTT_BROKER", "localhost") port, err := strconv.Atoi(utils.GetEnv("MQTT_PORT", "1883")) diff --git a/internal/reporter/downstream_reporter/thehive/ids_map.go b/internal/reporter/downstream_reporter/thehive/ids_map.go new file mode 100644 index 00000000..ab023a09 --- /dev/null +++ b/internal/reporter/downstream_reporter/thehive/ids_map.go @@ -0,0 +1,73 @@ +package thehive + +import ( + "fmt" +) + +// ############################### Playbook to TheHive ID mappings + +type SOARCATheHiveMap struct { + executionsCaseMaps map[string]ExecutionCaseMap +} +type ExecutionCaseMap struct { + caseId string + stepsTasksMap map[string]string +} + +// TODO: Change to using observables instead of updating the tasks descriptions + +func (soarcaTheHiveMap *SOARCATheHiveMap) checkExecutionCaseExists(executionId string) error { + if _, ok := soarcaTheHiveMap.executionsCaseMaps[executionId]; !ok { + return fmt.Errorf("case not found for execution id %s", executionId) + } + return nil +} +func (soarcaTheHiveMap *SOARCATheHiveMap) checkExecutionStepTaskExists(executionId string, stepId string) error { + if _, ok := soarcaTheHiveMap.executionsCaseMaps[executionId].stepsTasksMap[stepId]; !ok { + return fmt.Errorf("task not found for execution id %s for step id %s", executionId, stepId) + } + return nil +} + +func (soarcaTheHiveMap *SOARCATheHiveMap) registerExecutionInCase(executionId string, caseId string) error { + soarcaTheHiveMap.executionsCaseMaps[executionId] = ExecutionCaseMap{ + caseId: caseId, + stepsTasksMap: map[string]string{}, + } + log.Info(fmt.Sprintf("registering execution: %s, case id: %s", executionId, caseId)) + + return nil +} +func (soarcaTheHiveMap *SOARCATheHiveMap) registerStepTaskInCase(executionId string, stepId string, taskId string) { + soarcaTheHiveMap.executionsCaseMaps[executionId].stepsTasksMap[stepId] = taskId +} + +func (soarcaTheHiveMap *SOARCATheHiveMap) retrieveCaseId(executionId string) (string, error) { + err := soarcaTheHiveMap.checkExecutionCaseExists(executionId) + if err != nil { + return "", err + } + return soarcaTheHiveMap.executionsCaseMaps[executionId].caseId, nil +} + +func (soarcaTheHiveMap *SOARCATheHiveMap) retrieveTaskId(executionId string, stepId string) (string, error) { + err := soarcaTheHiveMap.checkExecutionCaseExists(executionId) + if err != nil { + return "", err + } + err = soarcaTheHiveMap.checkExecutionStepTaskExists(executionId, stepId) + if err != nil { + return "", err + } + return soarcaTheHiveMap.executionsCaseMaps[executionId].stepsTasksMap[stepId], nil +} + +// func (soarcaTheHiveMap *SOARCATheHiveMap) clearCase(executionId string) error { +// err := soarcaTheHiveMap.checkExecutionCaseExists(executionId) +// if err != nil { +// return err +// } +// return nil +// } + +// func (soarcaTheHiveMap *SOARCATheHiveMap) clearMap(executionId string) error diff --git a/internal/reporter/downstream_reporter/thehive/thehive_connector.go b/internal/reporter/downstream_reporter/thehive/thehive_connector.go new file mode 100644 index 00000000..90c31349 --- /dev/null +++ b/internal/reporter/downstream_reporter/thehive/thehive_connector.go @@ -0,0 +1,468 @@ +package thehive + +import ( + "errors" + "fmt" + "io" + "net/http" + "reflect" + "soarca/internal/reporter/downstream_reporter/thehive/thehive_models" + "soarca/internal/reporter/downstream_reporter/thehive/thehive_utils" + "soarca/logger" + "soarca/models/cacao" + "time" +) + +// TODOs +// Fix asynchronous http api calls causing The Hive reporting to be all over the place + +var ( + component = reflect.TypeOf(TheHiveConnector{}).PkgPath() + log *logger.Log +) + +func init() { + log = logger.Logger(component, logger.Info, "", logger.Json) +} + +// ############################### ITheHiveConnector interface + +type ITheHiveConnector interface { + Hello() string + PostNewExecutionCase(executionMetadata thehive_models.ExecutionMetadata, at time.Time) (string, error) + UpdateEndExecutionCase(executionMetadata thehive_models.ExecutionMetadata, at time.Time) (string, error) + UpdateStartStepTaskInCase(executionMetadata thehive_models.ExecutionMetadata, at time.Time) (string, error) + UpdateEndStepTaskInCase(executionMetadata thehive_models.ExecutionMetadata, at time.Time) (string, error) +} + +// ############################### TheHiveConnector object + +type TheHiveConnector struct { + baseUrl string + apiKey string + ids_map *SOARCATheHiveMap +} + +func NewConnector(theHiveEndpoint string, theHiveApiKey string) *TheHiveConnector { + ids_map := &SOARCATheHiveMap{} + ids_map.executionsCaseMaps = map[string]ExecutionCaseMap{} + return &TheHiveConnector{ + baseUrl: theHiveEndpoint, + apiKey: theHiveApiKey, + ids_map: ids_map, + } +} + +// ############################### Functions + +func (theHiveConnector *TheHiveConnector) postCommentInTaskLog(executionId string, step cacao.Step, note string) error { + log.Trace(fmt.Sprintf("posting comment in task log via execution ID: %s. step ID: %s", executionId, step.ID)) + taskId, err := theHiveConnector.ids_map.retrieveTaskId(executionId, step.ID) + if err != nil { + return err + } + url := theHiveConnector.baseUrl + "/task/" + taskId + "/log" + method := "POST" + + message := thehive_models.TaskLog{Message: note} + + body, err := theHiveConnector.sendRequest(method, url, message) + if err != nil { + return err + } + messageId, err := theHiveConnector.getIdFromRespBody(body) + if err != nil { + return err + } + log.Trace(fmt.Sprintf("task log created. execution ID %s, Task id %s, message Id: %s", executionId, taskId, messageId)) + + return nil +} + +func (theHiveConnector *TheHiveConnector) postStepDataAsCommentInTaskLog(executionId string, step cacao.Step, note string) error { + + message := note + "\n" + stepData, err := thehive_utils.StructToMDJSON(step) + if err != nil { + return err + } + + message = message + stepData + + err = theHiveConnector.postCommentInTaskLog(executionId, step, message) + if err != nil { + return err + } + + return nil +} + +func (theHiveConnector *TheHiveConnector) postStepVariablesAsCommentInTaskLog(executionId string, step cacao.Step, note string) error { + variablesString := note + "\n" + for _, variable := range step.StepVariables { + variableJson, err := thehive_utils.StructToMDJSON(variable) + if err != nil { + return err + } + variablesString = variablesString + variableJson + } + + err := theHiveConnector.postCommentInTaskLog(executionId, step, variablesString) + if err != nil { + return err + } + + return nil +} + +func (theHiveConnector *TheHiveConnector) postCommentInCase(executionId string, note string) error { + log.Trace(fmt.Sprintf("posting comment in case via execution ID: %s.", executionId)) + caseId, err := theHiveConnector.ids_map.retrieveCaseId(executionId) + if err != nil { + return err + } + + url := theHiveConnector.baseUrl + "/case/" + caseId + "/comment" + method := "POST" + + message := thehive_models.MessagePost{Message: note} + + body, err := theHiveConnector.sendRequest(method, url, message) + if err != nil { + return err + } + messageId, err := theHiveConnector.getIdFromRespBody(body) + if err != nil { + return err + } + log.Trace(fmt.Sprintf("Case comment created. execution ID %s, caseId %s, message Id: %s", executionId, caseId, messageId)) + return nil +} + +func (theHiveConnector *TheHiveConnector) postVariablesAsCommentInCase(executionId string, variables cacao.Variables, note string) error { + + variablesString := note + "\n" + for _, variable := range variables { + variableJson, err := thehive_utils.StructToTxtJSON(variable) + if err != nil { + return err + } + variablesString = variablesString + variableJson + } + + err := theHiveConnector.postCommentInCase(executionId, variablesString) + if err != nil { + return err + } + + return nil +} + +func (theHiveConnector *TheHiveConnector) postNewStepTaskInCase(executionId string, step cacao.Step) error { + caseId, err := theHiveConnector.ids_map.retrieveCaseId(executionId) + if err != nil { + return err + } + url := theHiveConnector.baseUrl + "/case/" + caseId + "/task" + method := "POST" + + taskDescription := step.Description + "\n" + fmt.Sprintf("(SOARCA step: %s )", step.ID) + task := thehive_models.Task{ + Title: step.Name, + Description: taskDescription, + } + + body, err := theHiveConnector.sendRequest(method, url, task) + if err != nil { + return err + } + + task_id, err := theHiveConnector.getIdFromRespBody(body) + if err != nil { + return err + } + theHiveConnector.ids_map.registerStepTaskInCase(executionId, step.ID, task_id) + + return nil +} + +// ######################################## Connector interface + +func (theHiveConnector *TheHiveConnector) PostNewExecutionCase(execMetadata thehive_models.ExecutionMetadata, at time.Time) (string, error) { + log.Trace(fmt.Sprintf("posting new case to The Hive. execution ID %s, playbook %+v", execMetadata.ExecutionId, execMetadata.Playbook)) + url := theHiveConnector.baseUrl + "/case" + method := "POST" + + // Add execution ID and playbook ID to tags (first and second tags) + caseTags := []string{execMetadata.ExecutionId, execMetadata.Playbook.ID} + caseTags = append(caseTags, execMetadata.Playbook.Labels...) + + data := thehive_models.Case{ + Title: execMetadata.Playbook.Name, + Description: execMetadata.Playbook.Description, + //StartDate: int(time.Now().Unix()), + Tags: caseTags, + } + + log.Tracef("sending request: %s %s", method, url) + + body, err := theHiveConnector.sendRequest(method, url, data) + if err != nil { + return "", err + } + + caseId, err := theHiveConnector.getIdFromRespBody(body) + if err != nil { + return "", err + } + + log.Trace("Executing register execution in case") + + err = theHiveConnector.ids_map.registerExecutionInCase(execMetadata.ExecutionId, caseId) + if err != nil { + return "", err + } + + // Pre-populate tasks according to playbook steps + for _, step := range execMetadata.Playbook.Workflow { + if step.Type == cacao.StepTypeStart || step.Type == cacao.StepTypeEnd { + continue + } + err := theHiveConnector.postNewStepTaskInCase(execMetadata.ExecutionId, step) + if err != nil { + return "", err + } + } + + executionStartMessage := fmt.Sprintf( + "START\nplaybook ID\n\t\t[ %s ]\nexecution ID\n\t\t[ %s ]\nstarted at\n\t\t[ %s ]", + execMetadata.Playbook.ID, execMetadata.ExecutionId, at.String()) + err = theHiveConnector.postCommentInCase(execMetadata.ExecutionId, executionStartMessage) + if err != nil { + log.Warningf("could not post message to case: %s", err) + } + + err = theHiveConnector.postVariablesAsCommentInCase( + execMetadata.ExecutionId, execMetadata.Playbook.PlaybookVariables, + "variables at start of execution") + if err != nil { + log.Warningf("could not report variables in case comment: %s", err) + } + + log.Tracef("case posted with case ID: %s", caseId) + return string(body), nil +} + +func (theHiveConnector *TheHiveConnector) UpdateEndExecutionCase(execMetadata thehive_models.ExecutionMetadata, at time.Time) (string, error) { + caseId, err := theHiveConnector.ids_map.retrieveCaseId(execMetadata.ExecutionId) + if err != nil { + return "", err + } + log.Trace(fmt.Sprintf("updating case status to The Hive. execution ID %s, The Hive case ID %s", execMetadata.ExecutionId, caseId)) + + url := theHiveConnector.baseUrl + "/case/" + caseId + method := "PATCH" + + err = theHiveConnector.postVariablesAsCommentInCase(execMetadata.ExecutionId, execMetadata.Variables, "variables at end of execution") + if err != nil { + log.Warningf("could not add task log: %s", err) + } + + caseStatus := thehive_models.TheHiveCaseStatusTruePositive + closureComment := fmt.Sprintf("END\nexecution ID\n\t\t[ %s ]\nended at\n\t\t[ %s ]", execMetadata.ExecutionId, at.String()) + if execMetadata.ExecutionErr != nil { + caseStatus = thehive_models.TheHiveCaseStatusIndeterminate + closureComment = closureComment + fmt.Sprintf("execution error: %s", execMetadata.ExecutionErr) + } + err = theHiveConnector.postCommentInCase(execMetadata.ExecutionId, closureComment) + if err != nil { + log.Warningf("could not add task log: %s", err) + } + + data := thehive_models.Case{ + Status: caseStatus, + Summary: "summary not implemented yet. Look at the tasks :)", + } + + body, err := theHiveConnector.sendRequest(method, url, data) + if err != nil { + return "", err + } + + return string(body), nil +} + +// TODO: revise this function through + +func (theHiveConnector *TheHiveConnector) UpdateStartStepTaskInCase(execMetadata thehive_models.ExecutionMetadata, at time.Time) (string, error) { + log.Trace(fmt.Sprintf("updating task in thehive. case ID %s. task started.", execMetadata.ExecutionId)) + taskId, err := theHiveConnector.ids_map.retrieveTaskId(execMetadata.ExecutionId, execMetadata.Step.ID) + if err != nil { + return "", err + } + + url := theHiveConnector.baseUrl + "/task/" + taskId + method := "PATCH" + + fullyAuto := true + for _, command := range execMetadata.Step.Commands { + if command.Type == cacao.CommandTypeManual { + fullyAuto = false + } + } + + // Must identify valid user in The hive. Cannot be custom string + taskAssignee := "soarca@soarca.eu" + if fullyAuto { + taskAssignee = "soarca@soarca.eu" + } + task := thehive_models.Task{ + Status: thehive_models.TheHiveStatusInProgress, + Assignee: taskAssignee, + } + + body, err := theHiveConnector.sendRequest(method, url, task) + if err != nil { + return "", err + } + + executionStartMessage := fmt.Sprintf( + "START\nexecution ID\t\t[ %s ]\nstep ID\t\t[ %s ]\nstarted at\t\t[ %s ]", + execMetadata.ExecutionId, execMetadata.Step.ID, at.String()) + + err = theHiveConnector.postCommentInTaskLog(execMetadata.ExecutionId, execMetadata.Step, executionStartMessage) + if err != nil { + log.Warningf("could post message to task: %s", err) + } + + err = theHiveConnector.postStepDataAsCommentInTaskLog(execMetadata.ExecutionId, execMetadata.Step, "step data") + if err != nil { + log.Warningf("could not report step data in step task log: %s", err) + } + + return theHiveConnector.getIdFromRespBody(body) +} + +func (theHiveConnector *TheHiveConnector) UpdateEndStepTaskInCase(execMetadata thehive_models.ExecutionMetadata, at time.Time) (string, error) { + log.Trace(fmt.Sprintf("updating task in thehive. case ID %s. task ended.", execMetadata.ExecutionId)) + taskId, err := theHiveConnector.ids_map.retrieveTaskId(execMetadata.ExecutionId, execMetadata.Step.ID) + if err != nil { + return "", err + } + + url := theHiveConnector.baseUrl + "/task/" + taskId + method := "PATCH" + + err = theHiveConnector.postStepVariablesAsCommentInTaskLog(execMetadata.ExecutionId, execMetadata.Step, "returned variables") + if err != nil { + log.Warningf("could not report variables in step task log: %s", err) + } + + taskStatus := thehive_models.TheHiveStatusCompleted + executionEndMessage := fmt.Sprintf( + "END\nexecution ID\t\t[ %s ]\nstep ID\t\t[ %s ]\nended at\t\t[ %s ]", + execMetadata.ExecutionId, execMetadata.Step.ID, at.String()) + + if execMetadata.ExecutionErr != nil { + taskStatus = thehive_models.TheHiveStatusCancelled + executionEndMessage = executionEndMessage + fmt.Sprintf("\nexecution error: %s", execMetadata.ExecutionErr) + } + err = theHiveConnector.postCommentInTaskLog(execMetadata.ExecutionId, execMetadata.Step, executionEndMessage) + if err != nil { + log.Warningf("could post message to task: %s", err) + } + + task := thehive_models.Task{ + Status: taskStatus, + } + + body, err := theHiveConnector.sendRequest(method, url, task) + if err != nil { + return "", err + } + + return string(body), nil +} + +// ############################### HTTP interaction + +func (theHiveConnector *TheHiveConnector) sendRequest(method string, url string, body interface{}) ([]byte, error) { + log.Trace(fmt.Sprintf("sending request: %s %s", method, url)) + + req, err := theHiveConnector.prepareRequest(method, url, body) + if err != nil { + return nil, err + } + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + respbody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + log.Debug(fmt.Sprintf("response body: %s", respbody)) + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return nil, fmt.Errorf("received non-2xx status code: %d\nURL: %s: %s", resp.StatusCode, url, respbody) + } + + return respbody, nil +} + +func (theHiveConnector *TheHiveConnector) prepareRequest(method string, url string, body interface{}) (*http.Request, error) { + url = thehive_utils.CleanUrlString(url) + + requestBody, err := thehive_utils.MarhsalRequestBody(body) + if err != nil { + return nil, err + } + + req, err := http.NewRequest(method, url, requestBody) + if err != nil { + return nil, err + } + + req.Header.Add("Authorization", "Bearer "+theHiveConnector.apiKey) + req.Header.Add("Content-Type", "application/json") + + return req, nil +} + +func (theHiveConnector *TheHiveConnector) Hello() string { + + url := theHiveConnector.baseUrl + "/user/current" + + body, err := theHiveConnector.sendRequest("GET", url, nil) + if err != nil { + return "error" + } + + return (string(body)) +} + +func (theHiveConnector *TheHiveConnector) getIdFromRespBody(body []byte) (string, error) { + + if len(body) == 0 { + return "", nil + } + + id, err := thehive_utils.GetIdFromArrayBody(body) + if err == nil { + return id, err + } + + id, err = thehive_utils.GetIdFromObjectBody(body) + if err == nil { + return id, err + } + + log.Debug(fmt.Sprintf("body: %s", string(body))) + return "", errors.New("failed to get ID from response body") + +} diff --git a/internal/reporter/downstream_reporter/thehive/thehive_models/models.go b/internal/reporter/downstream_reporter/thehive/thehive_models/models.go new file mode 100644 index 00000000..9e864651 --- /dev/null +++ b/internal/reporter/downstream_reporter/thehive/thehive_models/models.go @@ -0,0 +1,123 @@ +package thehive_models + +import "soarca/models/cacao" + +const ( + TheHiveStatusInProgress = "InProgress" + TheHiveStatusCompleted = "Completed" + TheHiveStatusWaiting = "Waiting" + TheHiveStatusCancelled = "Cancel" + + TheHiveCaseStatusTruePositive = "TruePositive" + TheHiveCaseStatusComplete = "Complete" + TheHiveCaseStatusClosed = "Closed" + TheHiveCaseStatusOther = "Other" + TheHiveCaseStatusIndeterminate = "Indeterminate" + TheHiveCaseImpacStatustLow = "Low" + + ObservableTypeOther = "other" +) + +type Task struct { + Title string `bson:"title,omitempty" json:"title,omitempty" validate:"required" example:"Task 1"` + Group string `bson:"group,omitempty" json:"group,omitempty" example:"Group 1"` + Description string `bson:"description,omitempty" json:"description,omitempty" example:"Description of task 1"` + Status string `bson:"status,omitempty" json:"status,omitempty" example:"Open"` + Flag bool `bson:"flag,omitempty" json:"flag,omitempty" example:"true"` + StartDate int64 `bson:"startDate,omitempty" json:"startDate,omitempty" example:"1640000000000"` + EndDate int64 `bson:"endDate,omitempty" json:"endDate,omitempty" example:"1640000000000"` + Order int `bson:"order,omitempty" json:"order,omitempty" example:"1"` + DueDate int64 `bson:"dueDate,omitempty" json:"dueDate,omitempty" example:"1640000000000"` + Assignee string `bson:"assignee,omitempty" json:"assignee,omitempty" example:"Jane Doe"` + Mandatory bool `bson:"mandatory,omitempty" json:"mandatory,omitempty" example:"true"` +} + +type Page struct { + Title string `bson:"title" json:"title" example:"Page 1"` + Content string `bson:"content" json:"content" example:"Content of page 1"` + Order int `bson:"order" json:"order" example:"1"` + Category string `bson:"category" json:"category" example:"Category 1"` +} + +type SharingParameter struct { + Organisation string `bson:"organisation" json:"organisation" example:"~354"` + Share bool `bson:"share" json:"share" example:"true"` + Profile string `bson:"profile" json:"profile" example:"analyst"` + TaskRule string `bson:"taskRule" json:"taskRule" example:"Sharing rule applied on the case"` + ObservableRule string `bson:"observableRule" json:"observableRule" example:"Sharing rule applied on the case"` +} + +type CustomField struct { + Name string `bson:"name" json:"name" validate:"required"` + DisplayName string `bson:"displayName,omitempty" json:"displayName,omitempty"` + Group string `bson:"group" json:"group" validate:"required"` + Description string `bson:"description" json:"description" validate:"required"` + Type string `bson:"type" json:"type" validate:"required"` + Value string `bson:"value" json:"value"` + Order int `bson:"order,omitempty" json:"order,omitempty"` + Mandatory bool `bson:"mandatory,omitempty" json:"mandatory,omitempty"` + Options []interface{} `bson:"options,omitempty" json:"options,omitempty"` +} + +type Observable struct { + DataType string `bson:"dataType,omitempty" json:"dataType,omitempty" validate:"required,min=1,max=64"` + Data []string `bson:"data,omitempty" json:"data,omitempty" validate:"max=4096,dive,min=1,max=4096"` + Message string `bson:"message,omitempty" json:"message,omitempty" validate:"max=1048576"` + StartDate int64 `bson:"startDate,omitempty" json:"startDate,omitempty"` + Attachment []string `bson:"attachment,omitempty" json:"attachment,omitempty"` + Name string `bson:"name" json:"name" validate:"required,min=1,max=128"` + ContentType string `bson:"contentType" json:"contentType" validate:"required,min=1,max=128"` + TLP int `bson:"tlp,omitempty" json:"tlp,omitempty" validate:"min=0,max=4"` + PAP int `bson:"pap,omitempty" json:"pap,omitempty" validate:"min=0,max=3"` + Tags []string `bson:"tags,omitempty" json:"tags,omitempty" validate:"max=128,dive,min=1,max=128"` + IOC bool `bson:"ioc,omitempty" json:"ioc,omitempty"` + Sighted bool `bson:"sighted,omitempty" json:"sighted,omitempty"` + SightedAt int64 `bson:"sightedAt,omitempty" json:"sightedAt,omitempty"` + IgnoreSimilarity bool `bson:"ignoreSimilarity,omitempty" json:"ignoreSimilarity,omitempty"` + IsZip bool `bson:"isZip,omitempty" json:"isZip,omitempty"` + ZipPassword string `bson:"zipPassword,omitempty" json:"zipPassword,omitempty" validate:"max=512"` + AddTags []string `bson:"addTags,omitempty" json:"addTags,omitempty" validate:"max=128,dive,min=1,max=128"` + RemoveTags []string `bson:"removeTags,omitempty" json:"removeTags,omitempty" validate:"max=128,dive,min=1,max=128"` +} + +type MessagePost struct { + Message string `bson:"message" json:"message" validate:"required,min=1,max=1048576"` +} + +type TaskLog struct { + Message string `bson:"message" json:"message" validate:"required,min=1,max=1048576"` + StartDate int `bson:"startDate,omitempty" json:"startDate,omitempty" example:"1640000000000"` + IncludeIntTimeline int `bson:"includeIntTimeline,omitempty" json:"includeIntTimeline,omitempty" example:"1640000000000"` + Attachments []string `bson:"attachments,omitempty" json:"attachments,omitempty"` +} + +type Case struct { + Title string `bson:"title,omitempty" json:"title,omitempty" validate:"required,min=1,max=512" example:"Example Case"` + Description string `bson:"description,omitempty" json:"description,omitempty" validate:"required,max=1048576"` + Severity int `bson:"severity,omitempty" json:"severity,omitempty" validate:"min=1,max=4" example:"2"` + StartDate int `bson:"startDate,omitempty" json:"startDate,omitempty" example:"1640000000000"` + EndDate int `bson:"endDate,omitempty" json:"endDate,omitempty" example:"1640000000000"` + Tags []string `bson:"tags,omitempty" json:"tags,omitempty" validate:"max=128,dive,min=1,max=128" example:"[\"example\", \"test\"]"` + Flag bool `bson:"flag,omitempty" json:"flag,omitempty" example:"false"` + TLP int `bson:"tlp,omitempty" json:"tlp,omitempty" validate:"min=0,max=4" example:"2"` + PAP int `bson:"pap,omitempty" json:"pap,omitempty" validate:"min=0,max=3" example:"2"` + Status string `bson:"status,omitempty" json:"status,omitempty" validate:"min=1,max=64" example:"New"` + Summary string `bson:"summary,omitempty" json:"summary,omitempty" validate:"max=1048576" example:"Summary of the case"` + ImpactStatus string `bson:"impactStatus,omitempty" json:"impactStatus,omitempty" validate:"max=128"` + Assignee string `bson:"assignee,omitempty" json:"assignee,omitempty" validate:"max=128" example:"John Doe"` + CustomFields []CustomField `bson:"customFields,omitempty" json:"customFields,omitempty" example:"{\"property1\":null,\"property2\":null}"` + CaseTemplate string `bson:"caseTemplate,omitempty" json:"caseTemplate,omitempty" validate:"max=128" example:"Template1"` + Tasks []Task `bson:"tasks,omitempty" json:"tasks,omitempty"` + Pages []Page `bson:"pages,omitempty" json:"pages,omitempty"` + SharingParameters []SharingParameter `bson:"sharingParameters,omitempty" json:"sharingParameters,omitempty"` + TaskRule string `bson:"taskRule,omitempty" json:"taskRule,omitempty" validate:"max=128" example:"Task rule"` + ObservableRule string `bson:"observableRule,omitempty" json:"observableRule,omitempty" validate:"max=128" example:"Observable rule"` +} + +type ExecutionMetadata struct { + ExecutionId string + Playbook cacao.Playbook + Step cacao.Step + Variables cacao.Variables + ExecutionErr error +} diff --git a/internal/reporter/downstream_reporter/thehive/thehive_reporter.go b/internal/reporter/downstream_reporter/thehive/thehive_reporter.go new file mode 100644 index 00000000..d801a78a --- /dev/null +++ b/internal/reporter/downstream_reporter/thehive/thehive_reporter.go @@ -0,0 +1,76 @@ +package thehive + +import ( + "soarca/internal/reporter/downstream_reporter/thehive/thehive_models" + "soarca/models/cacao" + "time" + + "github.com/google/uuid" +) + +type TheHiveReporter struct { + connector ITheHiveConnector +} + +func NewReporter(connector ITheHiveConnector) *TheHiveReporter { + return &TheHiveReporter{connector: connector} +} + +func (theHiveReporter *TheHiveReporter) ConnectorTest() string { + return theHiveReporter.connector.Hello() +} + +// Creates a new *case* in The Hive with related triggering metadata +func (theHiveReporter *TheHiveReporter) ReportWorkflowStart(executionId uuid.UUID, playbook cacao.Playbook, at time.Time) error { + log.Trace("TheHive reporter reporting workflow start") + _, err := theHiveReporter.connector.PostNewExecutionCase( + thehive_models.ExecutionMetadata{ + ExecutionId: executionId.String(), + Playbook: playbook, + }, + at, + ) + return err +} + +// Marks case closure according to workflow execution. Also reports all variables, and data +func (theHiveReporter *TheHiveReporter) ReportWorkflowEnd(executionId uuid.UUID, playbook cacao.Playbook, workflowErr error, at time.Time) error { + log.Trace("TheHive reporter reporting workflow end") + _, err := theHiveReporter.connector.UpdateEndExecutionCase( + thehive_models.ExecutionMetadata{ + ExecutionId: executionId.String(), + Variables: playbook.PlaybookVariables, + ExecutionErr: workflowErr, + }, + at, + ) + return err +} + +// Adds *event* to case +func (theHiveReporter *TheHiveReporter) ReportStepStart(executionId uuid.UUID, step cacao.Step, stepResults cacao.Variables, at time.Time) error { + log.Trace("TheHive reporter reporting step start") + _, err := theHiveReporter.connector.UpdateStartStepTaskInCase( + thehive_models.ExecutionMetadata{ + ExecutionId: executionId.String(), + Step: step, + }, + at, + ) + return err +} + +// Populates event with step execution information +func (theHiveReporter *TheHiveReporter) ReportStepEnd(executionId uuid.UUID, step cacao.Step, stepResults cacao.Variables, stepErr error, at time.Time) error { + log.Trace("TheHive reporter reporting step end") + _, err := theHiveReporter.connector.UpdateEndStepTaskInCase( + thehive_models.ExecutionMetadata{ + ExecutionId: executionId.String(), + Step: step, + Variables: stepResults, + ExecutionErr: stepErr, + }, + at, + ) + return err +} diff --git a/internal/reporter/downstream_reporter/thehive/thehive_utils/utils.go b/internal/reporter/downstream_reporter/thehive/thehive_utils/utils.go new file mode 100644 index 00000000..761d8aaa --- /dev/null +++ b/internal/reporter/downstream_reporter/thehive/thehive_utils/utils.go @@ -0,0 +1,101 @@ +package thehive_utils + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "reflect" + "soarca/logger" + "strings" +) + +type Empty struct{} + +var component = reflect.TypeOf(Empty{}).PkgPath() +var log *logger.Log + +func init() { + log = logger.Logger(component, logger.Info, "", logger.Json) +} + +// ############################### Utils + +func CleanUrlString(url string) string { + // Replace double slashes in the URL after http(s):// + parts := strings.SplitN(url, "//", 2) + cleanedPath := strings.ReplaceAll(parts[1], "//", "/") + url = parts[0] + "//" + cleanedPath + return url +} + +func MarhsalRequestBody(body interface{}) (io.Reader, error) { + var requestBody io.Reader + if body != nil { + jsonData, err := json.Marshal(body) + if err != nil { + return nil, fmt.Errorf("error marshalling JSON: %v", err) + } + requestBody = bytes.NewBuffer(jsonData) + } + log.Debug(fmt.Sprintf("request body: %s", requestBody)) + return requestBody, nil +} + +func GetIdFromArrayBody(body []byte) (string, error) { + // Try to unmarshal as a slice of maps + var respArray []map[string]interface{} + err := json.Unmarshal(body, &respArray) + if err != nil { + return "", err + } + + if len(respArray) == 0 { + return "", errors.New("response array is empty") + } + + _id, ok := respArray[0]["_id"].(string) + if !ok { + return "", errors.New("type assertion for retrieving TheHive ID failed") + } + return _id, nil +} + +func GetIdFromObjectBody(body []byte) (string, error) { + // If unmarshalling as a slice fails, try to unmarshal as a single map + var respMap map[string]interface{} + err := json.Unmarshal(body, &respMap) + if err != nil { + return "", err + } + + _id, ok := respMap["_id"].(string) + if !ok { + return "", errors.New("type assertion for retrieving TheHive ID from map failed") + } + + return _id, nil +} + +func StructToMDJSON(v interface{}) (string, error) { + + indentedJSON, err := json.MarshalIndent(v, "", " ") + if err != nil { + return "", fmt.Errorf("error marshalling to JSON: %w", err) + } + + markdownContent := "```json\n" + string(indentedJSON) + "\n```" + return markdownContent, nil +} + +func StructToTxtJSON(v interface{}) (string, error) { + + indentedJSON, err := json.MarshalIndent(v, "", " ") + if err != nil { + return "", fmt.Errorf("error marshalling to JSON: %w", err) + } + + txtContent := string(indentedJSON) + "\n" + return txtContent, nil +} diff --git a/internal/reporter/reporter.go b/internal/reporter/reporter.go index dcabb3bf..0eb1bba6 100644 --- a/internal/reporter/reporter.go +++ b/internal/reporter/reporter.go @@ -5,6 +5,7 @@ import ( "fmt" "reflect" "strconv" + "sync" "time" downstreamReporter "soarca/internal/reporter/downstream_reporter" @@ -42,6 +43,8 @@ const MaxReporters int = 10 type Reporter struct { reporters []downstreamReporter.IDownStreamReporter maxReporters int + wg sync.WaitGroup + reportingch chan func() } func New(reporters []downstreamReporter.IDownStreamReporter) *Reporter { @@ -49,11 +52,28 @@ func New(reporters []downstreamReporter.IDownStreamReporter) *Reporter { instance := Reporter{ reporters: reporters, maxReporters: maxReporters, + reportingch: make(chan func(), 100), // Buffer size can be adjusted + wg: sync.WaitGroup{}, } + go instance.startReportingProcessor() return &instance } +func (reporter *Reporter) startReportingProcessor() { + for { + task, ok := <-reporter.reportingch + if !ok { + return + } + task() + } +} + func (reporter *Reporter) RegisterReporters(reporters []downstreamReporter.IDownStreamReporter) error { + if len(reporters) == 0 { + log.Warning("reporters list is empty. No action taken.") + return nil + } if (len(reporter.reporters) + len(reporters)) > reporter.maxReporters { log.Warning("reporter not registered, too many reporters") return errors.New("attempting to register too many reporters") @@ -64,56 +84,64 @@ func (reporter *Reporter) RegisterReporters(reporters []downstreamReporter.IDown // ######################## IWorkflowReporter interface -func (reporter *Reporter) reportWorkflowStart(executionId uuid.UUID, playbook cacao.Playbook, at time.Time) { - for _, rep := range reporter.reporters { - err := rep.ReportWorkflowStart(executionId, playbook, at) - if err != nil { - log.Warning(err) - } - } -} func (reporter *Reporter) ReportWorkflowStart(executionId uuid.UUID, playbook cacao.Playbook, at time.Time) { log.Trace(fmt.Sprintf("[execution: %s, playbook: %s] reporting workflow start", executionId, playbook.ID)) - go reporter.reportWorkflowStart(executionId, playbook, at) -} - -func (reporter *Reporter) reportWorkflowEnd(executionId uuid.UUID, playbook cacao.Playbook, workflowError error, at time.Time) { - for _, rep := range reporter.reporters { - err := rep.ReportWorkflowEnd(executionId, playbook, workflowError, at) - if err != nil { - log.Warning(err) + reporter.wg.Add(1) + reporter.reportingch <- func() { + defer reporter.wg.Done() + for _, downstreamRep := range reporter.reporters { + err := downstreamRep.ReportWorkflowStart(executionId, playbook, at) + if err != nil { + log.Trace("reportWorkflowStart error") + log.Warning(err) + } } } } + func (reporter *Reporter) ReportWorkflowEnd(executionId uuid.UUID, playbook cacao.Playbook, workflowError error, at time.Time) { log.Trace(fmt.Sprintf("[execution: %s, playbook: %s] reporting workflow end", executionId, playbook.ID)) - go reporter.reportWorkflowEnd(executionId, playbook, workflowError, at) + reporter.wg.Add(1) + reporter.reportingch <- func() { + defer reporter.wg.Done() + for _, downstreamRep := range reporter.reporters { + err := downstreamRep.ReportWorkflowEnd(executionId, playbook, workflowError, at) + if err != nil { + log.Trace("reportWorkflowEnd error") + log.Warning(err) + } + } + } } // ######################## IStepReporter interface -func (reporter *Reporter) reporStepStart(executionId uuid.UUID, step cacao.Step, returnVars cacao.Variables, at time.Time) { - for _, rep := range reporter.reporters { - err := rep.ReportStepStart(executionId, step, returnVars, at) - if err != nil { - log.Warning(err) - } - } -} func (reporter *Reporter) ReportStepStart(executionId uuid.UUID, step cacao.Step, returnVars cacao.Variables, at time.Time) { log.Trace(fmt.Sprintf("[execution: %s, step: %s] reporting step start", executionId, step.ID)) - go reporter.reporStepStart(executionId, step, returnVars, at) -} - -func (reporter *Reporter) reportStepEnd(executionId uuid.UUID, step cacao.Step, returnVars cacao.Variables, stepError error, at time.Time) { - for _, rep := range reporter.reporters { - err := rep.ReportStepEnd(executionId, step, returnVars, stepError, at) - if err != nil { - log.Warning(err) + reporter.wg.Add(1) + reporter.reportingch <- func() { + defer reporter.wg.Done() + for _, downstreamRep := range reporter.reporters { + err := downstreamRep.ReportStepStart(executionId, step, returnVars, at) + if err != nil { + log.Trace("reportStepStart error") + log.Warning(err) + } } } } + func (reporter *Reporter) ReportStepEnd(executionId uuid.UUID, step cacao.Step, returnVars cacao.Variables, stepError error, at time.Time) { log.Trace(fmt.Sprintf("[execution: %s, step: %s] reporting step end", executionId, step.ID)) - go reporter.reportStepEnd(executionId, step, returnVars, stepError, at) + reporter.wg.Add(1) + reporter.reportingch <- func() { + defer reporter.wg.Done() + for _, downstreamRep := range reporter.reporters { + err := downstreamRep.ReportStepEnd(executionId, step, returnVars, stepError, at) + if err != nil { + log.Trace("reportStepEnd error") + log.Warning(err) + } + } + } } diff --git a/test/manual/thehive_reporter/thehive_test.go b/test/manual/thehive_reporter/thehive_test.go new file mode 100644 index 00000000..f9d758bf --- /dev/null +++ b/test/manual/thehive_reporter/thehive_test.go @@ -0,0 +1,157 @@ +package thehive_test + +import ( + "bufio" + "fmt" + "os" + "soarca/internal/reporter/downstream_reporter/thehive" + "soarca/models/cacao" + "strings" + "testing" + "time" + + "github.com/google/uuid" +) + +// Microsoft Copilot provided code to get .env local file and extract variables values +func LoadEnv(envVar string) (string, error) { + file, err := os.Open(".env") + if err != nil { + return "", err + } + defer file.Close() + + scanner := bufio.NewScanner(file) + for scanner.Scan() { + line := scanner.Text() + if strings.HasPrefix(line, envVar+"=") { + parts := strings.SplitN(line, "=", 2) + if len(parts) == 2 { + return strings.Trim(parts[1], `"`), nil + } + } + } + + if err := scanner.Err(); err != nil { + return "", err + } + + return "", fmt.Errorf("variable %s not found", envVar) +} + +func TestTheHiveConnection(t *testing.T) { + thehive_api_tkn, err := LoadEnv("THEHIVE_TEST_API_TOKEN") + if err != nil { + t.Fail() + } + thehive_api_base_uri, err := LoadEnv("THEHIVE_TEST_API_BASE_URI") + if err != nil { + t.Fail() + } + thr := thehive.NewReporter(thehive.NewConnector(thehive_api_base_uri, thehive_api_tkn)) + str := thr.ConnectorTest() + fmt.Println(str) +} + +func TestTheHiveReporting(t *testing.T) { + thehive_api_tkn, err := LoadEnv("THEHIVE_TEST_API_TOKEN") + if err != nil { + t.Fail() + } + thehive_api_base_uri, err := LoadEnv("THEHIVE_TEST_API_BASE_URI") + if err != nil { + t.Fail() + } + thr := thehive.NewReporter(thehive.NewConnector(thehive_api_base_uri, thehive_api_tkn)) + + expectedCommand := cacao.Command{ + Type: "ssh", + Command: "ssh ls -la", + } + + expectedVariables := cacao.Variable{ + Type: "string", + Name: "var1", + Value: "testing", + } + playbookVariables := cacao.NewVariables( + cacao.Variable{ + Type: "string", + Name: "__playbook_var__", + Value: "testing!", + }, + ) + + step1 := cacao.Step{ + Type: "action", + ID: "action--test", + Name: "ssh-tests", + Description: "test step", + StepVariables: cacao.NewVariables(expectedVariables), + Commands: []cacao.Command{expectedCommand}, + Cases: map[string]string{}, + OnCompletion: "end--test", + Agent: "agent1", + Targets: []string{"target1"}, + } + + end := cacao.Step{ + Type: "end", + ID: "end--test", + Name: "end step", + } + + expectedAuth := cacao.AuthenticationInformation{ + Name: "user", + ID: "auth1", + } + + expectedTarget := cacao.AgentTarget{ + Name: "sometarget", + AuthInfoIdentifier: "auth1", + ID: "target1", + } + + expectedAgent := cacao.AgentTarget{ + Type: "soarca", + Name: "soarca-ssh", + } + + playbook := cacao.Playbook{ + ID: "test", + Type: "test", + Name: "ssh-test-playbook", + Description: "Playbook description", + WorkflowStart: step1.ID, + PlaybookVariables: playbookVariables, + AuthenticationInfoDefinitions: map[string]cacao.AuthenticationInformation{"id": expectedAuth}, + AgentDefinitions: map[string]cacao.AgentTarget{"agent1": expectedAgent}, + TargetDefinitions: map[string]cacao.AgentTarget{"target1": expectedTarget}, + + Workflow: map[string]cacao.Step{step1.ID: step1, end.ID: end}, + } + executionId0 := uuid.MustParse("6ba7b810-9dad-11d1-80b4-00c04fd430c0") + + err = thr.ReportWorkflowStart(executionId0, playbook, time.Now()) + if err != nil { + fmt.Println("failing at report workflow start") + fmt.Println(err) + t.Fail() + } + err = thr.ReportStepStart(executionId0, step1, cacao.NewVariables(expectedVariables), time.Now()) + if err != nil { + fmt.Println(err) + t.Fail() + } + err = thr.ReportStepEnd(executionId0, step1, cacao.NewVariables(expectedVariables), nil, time.Now()) + if err != nil { + fmt.Println(err) + t.Fail() + } + + err = thr.ReportWorkflowEnd(executionId0, playbook, nil, time.Now()) + if err != nil { + fmt.Println(err) + t.Fail() + } +}