diff --git a/distribution/packages/src/rpm/wazuh-indexer.rpm.spec b/distribution/packages/src/rpm/wazuh-indexer.rpm.spec index b719210ba4d37..4843031db82c6 100644 --- a/distribution/packages/src/rpm/wazuh-indexer.rpm.spec +++ b/distribution/packages/src/rpm/wazuh-indexer.rpm.spec @@ -276,10 +276,10 @@ exit 0 %attr(750, %{name}, %{name}) %{product_dir}/performance-analyzer-rca/bin/* %changelog -* Wed Feb 21 2025 support - 4.12.0 +* Wed Mar 26 2025 support - 4.12.0 - More info: https://documentation.wazuh.com/current/release-notes/release-4-12-0.html -* Tue Jan 28 2025 support - 4.11.0 -- More info: https://documentation.wazuh.com/current/release-notes/release-4-10-1.html +* Wed Feb 19 2025 support - 4.11.0 +- More info: https://documentation.wazuh.com/current/release-notes/release-4-11-0.html * Thu Jan 16 2025 support - 4.10.1 - More info: https://documentation.wazuh.com/current/release-notes/release-4-10-1.html * Wed Jan 08 2025 support - 4.10.0 diff --git a/integrations/amazon-security-lake/CONTRIBUTING.md b/integrations/amazon-security-lake/CONTRIBUTING.md index 1d8132d814c73..e819fac2f6c7e 100644 --- a/integrations/amazon-security-lake/CONTRIBUTING.md +++ b/integrations/amazon-security-lake/CONTRIBUTING.md @@ -16,6 +16,12 @@ This Docker Compose project will bring up these services: - our [events generator](../tools/events-generator/README.md) - an AWS Lambda Python container. +| Service | Address | Credentials | +| ------------- | ------------------------ | --------------- | +| Wazuh Indexer | https://localhost:9200 | admin:admin | +| Dashboards | https://localhost:5601 | admin:admin | +| S3 Ninja | http://localhost:9444/ui | | + On the one hand, the event generator will push events constantly to the indexer, to the `wazuh-alerts-4.x-sample` index by default (refer to the [events generator](../tools/events-generator/README.md) documentation for customization options). On the other hand, Logstash will query for new data and deliver it to output configured in the pipeline `indexer-to-s3`. This pipeline delivers the data to an S3 bucket, from which the data is processed using a Lambda function, to finally be sent to the Amazon Security Lake bucket in Parquet format. The pipeline starts automatically, but if you need to start it manually, attach a terminal to the Logstash container and start the integration using the command below: diff --git a/integrations/amazon-security-lake/Dockerfile b/integrations/amazon-security-lake/Dockerfile index 2a5420e4bcfef..6670f31cee453 100644 --- a/integrations/amazon-security-lake/Dockerfile +++ b/integrations/amazon-security-lake/Dockerfile @@ -1,4 +1,4 @@ -# docker build --platform linux/amd64 --no-cache -f aws-lambda.dockerfile -t docker-image:test . +# docker build --platform linux/amd64 --no-cache -f Dockerfile -t docker-image:test . # docker run --platform linux/amd64 -p 9000:8080 docker-image:test # FROM public.ecr.aws/lambda/python:3.9 diff --git a/integrations/amazon-security-lake/aws-lambda.dockerfile b/integrations/amazon-security-lake/aws-lambda.dockerfile deleted file mode 100644 index 7039c2b935de8..0000000000000 --- a/integrations/amazon-security-lake/aws-lambda.dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -# docker build --platform linux/amd64 --no-cache -f aws-lambda.dockerfile -t docker-image:test . -# docker run --platform linux/amd64 -p 9000:8080 docker-image:test - -# FROM public.ecr.aws/lambda/python:3.9 -FROM amazon/aws-lambda-python:3.12 - -# Copy requirements.txt -COPY requirements.aws.txt ${LAMBDA_TASK_ROOT} - -# Install the specified packages -RUN pip install -r requirements.aws.txt - -# Copy function code -COPY src ${LAMBDA_TASK_ROOT} - -# Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile) -CMD [ "lambda_function.lambda_handler" ] \ No newline at end of file diff --git a/integrations/amazon-security-lake/logstash/pipeline/indexer-to-file.conf b/integrations/amazon-security-lake/logstash/pipeline/indexer-to-file.conf deleted file mode 100644 index 1bee9afc62450..0000000000000 --- a/integrations/amazon-security-lake/logstash/pipeline/indexer-to-file.conf +++ /dev/null @@ -1,34 +0,0 @@ -input { - opensearch { - hosts => ["wazuh.indexer:9200"] - user => "${INDEXER_USERNAME}" - password => "${INDEXER_PASSWORD}" - ssl => true - ca_file => "/usr/share/logstash/root-ca.pem" - index => "wazuh-alerts-4.x-*" - query => '{ - "query": { - "range": { - "@timestamp": { - "gt": "now-1m" - } - } - } - }' - schedule => "* * * * *" - } -} - - -output { - stdout { - id => "output.stdout" - codec => json_lines - } - file { - id => "output.file" - path => "/var/log/logstash/indexer-to-file-%{+YYYY-MM-dd-HH}.log" - file_mode => 0644 - codec => json_lines - } -}