From d05004e75ae24cc2b1fdc66f9715900c63ec07b5 Mon Sep 17 00:00:00 2001 From: Carlo van Overbeek Date: Fri, 24 May 2024 13:49:44 +0200 Subject: [PATCH] feat gsn-10597: migrate to awsfindingsmanagerlib --- .../securityhub-suppressor/requirements.txt | 5 +- .../securityhub_events.py | 243 +++--------------- .../securityhub_streams.py | 67 ----- .../securityhub_trigger.py | 41 +++ .../securityhub-suppressor/suppressor.yml | 9 - .../securityhub-suppressor/yaml_parser.py | 34 --- jira.tf | 2 +- outputs.tf | 11 +- suppressor.tf | 129 +++++----- variables.tf | 23 +- 10 files changed, 161 insertions(+), 403 deletions(-) delete mode 100644 files/lambda-artifacts/securityhub-suppressor/securityhub_streams.py create mode 100644 files/lambda-artifacts/securityhub-suppressor/securityhub_trigger.py delete mode 100644 files/lambda-artifacts/securityhub-suppressor/suppressor.yml delete mode 100644 files/lambda-artifacts/securityhub-suppressor/yaml_parser.py diff --git a/files/lambda-artifacts/securityhub-suppressor/requirements.txt b/files/lambda-artifacts/securityhub-suppressor/requirements.txt index 1766cb5..2ef8a29 100644 --- a/files/lambda-artifacts/securityhub-suppressor/requirements.txt +++ b/files/lambda-artifacts/securityhub-suppressor/requirements.txt @@ -1,5 +1,2 @@ aws-lambda-powertools -pylint-gitlab -pyyaml -jmespath -yamllint +awsfindingsmanagerlib diff --git a/files/lambda-artifacts/securityhub-suppressor/securityhub_events.py b/files/lambda-artifacts/securityhub-suppressor/securityhub_events.py index 6479668..370893e 100644 --- a/files/lambda-artifacts/securityhub-suppressor/securityhub_events.py +++ b/files/lambda-artifacts/securityhub-suppressor/securityhub_events.py @@ -1,218 +1,41 @@ -import os -from dataclasses import dataclass -from datetime import datetime -from re import search -from typing import Any -from typing import Dict -from typing import Optional -from typing import Tuple -from typing import Union +from os import environ import boto3 -import jmespath +import yaml from aws_lambda_powertools import Logger -from aws_lambda_powertools.utilities.data_classes import EventBridgeEvent -from aws_lambda_powertools.utilities.typing import LambdaContext +from awsfindingsmanagerlib.awsfindingsmanagerlib import FindingsManager +from awsfindingsmanagerlib.backends import Backend -from yaml_parser import get_file_contents +LOGGER = Logger() +S3_BUCKET_NAME = environ.get("S3_BUCKET_NAME") +S3_OBJECT_NAME = environ.get("S3_OBJECT_NAME") -logger = Logger() -VALID_STATUSES = ['FAILED', 'HIGH', 'WARNING'] -DYNAMODB_TABLE_NAME = os.environ['DYNAMODB_TABLE_NAME'] -YAML_CONFIGURATION_FILE = 'suppressor.yml' -SUPPRESSED_FINDINGS = [] - -@dataclass -class Finding: - finding_id: str - product_arn: str - product_name: str - - -@dataclass -class SuppressionRule: - action: str - rules: [str] - notes: str - dry_run: Optional[bool] - - -@dataclass -class SuppressionEntry: - control_id: str - data: [SuppressionRule] - - -class SuppressionList: - def __init__(self, boto_client, hash_key) -> None: - self._entries = [] - self.hash_key = hash_key - self.boto_client = boto_client - self.table = self.data_source - - @property - def data_source(self): - dynamodb = self.boto_client.resource('dynamodb') - return dynamodb.Table(name=DYNAMODB_TABLE_NAME) - - @property - def entries(self) -> list: - if not self.hash_key: - logger.info(f'Invalid hash key: {self.hash_key}') - return self._entries - if not self._entries: - logger.info(f'Fetching suppression list from dynamoDB {DYNAMODB_TABLE_NAME}, hash key: {self.hash_key}') - rules = self.table.get_item(Key={"controlId": self.hash_key}) - for rule in rules.get('Item', {}).get('data', {}): - self._entries.append( - SuppressionRule(action=rule.get('action'), - rules=rule.get('rules'), - notes=rule.get('notes'), - dry_run=rule.get('dry_run', False)) - ) - return self._entries - - -class Suppressor: - def __init__(self, boto_client, - finding: Finding, - resource_id: str, - suppression_list: SuppressionList) -> None: - self.boto_client = boto_client - self._finding = finding - self._security_hub = boto_client.client('securityhub') - self.resource_id = resource_id - self.suppression_list = suppression_list - self._suppression_rule = None - self.matched_rule = None - SUPPRESSED_FINDINGS.clear() - - @property - def finding(self) -> Finding: - return self._finding - - @property - def rule(self) -> SuppressionRule: - if not self._suppression_rule: - self._suppression_rule = self.evaluate_rule() - return self._suppression_rule - - @staticmethod - def validate(finding_event: Dict[str, Any]) -> Union[bool, Finding]: - product_arn = finding_event.get('ProductArn', '') - if not product_arn: - raise ValueError('Error: no product_arn found') - finding_id = finding_event.get('Id', '') - if not finding_id: - raise ValueError('Error: no finding_id found') - product_details = finding_event.get('ProductFields', {}) - if not product_details: - raise ValueError('Error: no product fields found') - product_name = product_details.get('aws/securityhub/ProductName', '') - if not product_name: - raise ValueError('Error: no product name found') - return Finding(product_arn=product_arn, finding_id=finding_id, product_name=product_name) +class S3(Backend): + def __init__(self, bucket_name, file_name): + self._file_contents = self._get_file_contents(bucket_name, file_name) @staticmethod - def get_product_details(finding_event: Dict[str, Any], product_name: str) -> Tuple[None, None]: - key, status = None, None - yaml_config = get_file_contents(YAML_CONFIGURATION_FILE) - if not yaml_config.get(product_name): - logger.warning(f'No YAML configuration for product {product_name}') - return key, status - key = jmespath.search(yaml_config.get(product_name, {}).get('key'), finding_event) - status = jmespath.search(yaml_config.get(product_name, {}).get('status'), finding_event) - return key, status - - def evaluate_rule(self) -> Optional[SuppressionRule]: - for entry in self.suppression_list.entries: - match = next((rule for rule in entry.rules if search(rule, self.resource_id)), None) - if match: - self.matched_rule = match - return entry - return None - - def suppress_finding(self) -> bool: - if not self.rule: - logger.info(f'Skipping finding because {self.resource_id} is not in the suppression list') - return False - if not self.rule.notes: - logger.error('Error: a valid notes must be added to the dynamoDB entry') - return False - if self.rule.dry_run: - action_output = 'DRY RUN - Would' - else: - action_output = 'Will' - - logger.info(f'{action_output} perform Suppression on finding {self.finding.finding_id}, ' - f'matched rule: {self.matched_rule}, ' - f'action: {self.rule.action}') - SUPPRESSED_FINDINGS.append(self.finding.finding_id) - now = datetime.now() - - if self.rule.dry_run: - return True - - return self._security_hub.batch_update_findings(FindingIdentifiers=[ - { - 'Id': self.finding.finding_id, - 'ProductArn': self.finding.product_arn - }], - Workflow={'Status': self.rule.action}, - Note={'Text': f'{self.rule.notes} - ' - f'Suppressed by the Security Hub Suppressor at {now.strftime("%Y-%m-%d %H:%M:%S")}', - 'UpdatedBy': 'landingzone'}) - - -def validate_event(event: EventBridgeEvent): - for event_entries in event.detail.get('findings', []): - finding = Suppressor.validate(event_entries) - hash_key, status = Suppressor.get_product_details(event_entries, finding.product_name) - if status not in VALID_STATUSES: - raise ValueError(f'Skipping execution because status is {status}. Valid statuses: {VALID_STATUSES}') - if not hash_key: - raise ValueError(f'Error: no hash_key found for product {finding.product_name}') - workflow_status = event_entries.get('Workflow', {}).get('Status', {}) - if workflow_status == "SUPPRESSED": - raise ValueError(f'Skipping execution because workflow status is {workflow_status}') - return True - - -def _parse_fields(event): - finding, resource_id, hash_key = None, None, None - for event_entries in event.get('detail').get('findings', []): - finding = Suppressor.validate(event_entries) - hash_key, status = Suppressor.get_product_details(event_entries, finding.product_name) - resource_id = [resource.get('Id') for resource in event_entries.get('Resources', [])].pop() - return finding, resource_id, hash_key - - -def suppress(event): - finding, resource_id, hash_key = _parse_fields(event) - suppression_list = get_suppression_list(hash_key) - return Suppressor(boto_client=boto3, - finding=finding, - resource_id=resource_id, - suppression_list=suppression_list).suppress_finding() - - -def get_suppression_list(hash_key) -> SuppressionList: - suppression_list = SuppressionList(hash_key=hash_key, boto_client=boto3) - if not suppression_list.entries: - logger.error(f'Could not find any rules for control {hash_key}') - return suppression_list - - -@logger.inject_lambda_context(log_event=True) -def lambda_handler(event: Dict[str, Any], context: LambdaContext): - event: EventBridgeEvent = EventBridgeEvent(event) - validate_event(event) - if suppress(event): - logger.info(f'Total findings processed: {len(SUPPRESSED_FINDINGS)}') - return { - 'finding_state': 'suppressed' - } - return { - 'finding_state': 'skipped' - } + def _get_file_contents(bucket_name, file_name): + s3 = boto3.resource("s3") + return s3.Object(bucket_name, file_name).get()["Body"].read() + + def _get_rules(self): + data = yaml.safe_load(self._file_contents) + return data.get("Rules") + + +@LOGGER.inject_lambda_context(log_event=True) +def lambda_handler(event, context): + s3_backend = S3(S3_BUCKET_NAME, S3_OBJECT_NAME) + rules = s3_backend.get_rules() + LOGGER.info(rules) + findings_manager = FindingsManager() + findings_manager.register_rules(rules) + if findings_manager.suppress_matching_findings(): + LOGGER.info("Successfully applied all suppression rules.") + return True + else: + raise RuntimeError( + "No explicit error was raised, but not all suppression rules were applied successfully, please investigate." + ) diff --git a/files/lambda-artifacts/securityhub-suppressor/securityhub_streams.py b/files/lambda-artifacts/securityhub-suppressor/securityhub_streams.py deleted file mode 100644 index 7bd3a70..0000000 --- a/files/lambda-artifacts/securityhub-suppressor/securityhub_streams.py +++ /dev/null @@ -1,67 +0,0 @@ -import itertools -from typing import Any -from typing import Dict - -import boto3 -from aws_lambda_powertools import Logger -from aws_lambda_powertools.utilities.data_classes import DynamoDBStreamEvent -from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecordEventName - -from securityhub_events import suppress -from securityhub_events import SUPPRESSED_FINDINGS - -logger = Logger() -security_hub = boto3.client('securityhub') -paginator = security_hub.get_paginator('get_findings') - - -def get_findings(control_value: str) -> Dict[str, list]: - findings = paginator.paginate(Filters={'ProductFields': [ - { - 'Key': 'RuleId', - 'Value': control_value, - 'Comparison': 'EQUALS' - }, - { - 'Key': 'ControlId', - 'Value': control_value, - 'Comparison': 'EQUALS' - } - ], - 'ComplianceStatus': [ - { - 'Value': 'FAILED', - 'Comparison': 'EQUALS' - }, - { - 'Value': 'WARNING', - 'Comparison': 'EQUALS' - } - ], - }) - return {'findings': list(itertools.chain.from_iterable([finding.get('Findings') for finding in findings]))} - - -def process_findings(findings_list): - for finding in findings_list: - try: - suppress({'detail': {'findings': [finding]}}) - except ValueError: - continue - return len(SUPPRESSED_FINDINGS) - - -@logger.inject_lambda_context(log_event=True) -def lambda_handler(event: Dict[str, Any], context): - total_findings = 0 - event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) - if event.records: - for record in event.records: - if record.event_name != DynamoDBRecordEventName.REMOVE: - control_id = record.dynamodb.keys.get('controlId', {}) - findings_list = get_findings(control_id) - if len(findings_list.get('findings')) == 0: - logger.warning(f'Could not find any findings with controlId {control_id}') - continue - total_findings = process_findings(findings_list.get('findings')) - logger.info(f'Total findings processed: {total_findings}') diff --git a/files/lambda-artifacts/securityhub-suppressor/securityhub_trigger.py b/files/lambda-artifacts/securityhub-suppressor/securityhub_trigger.py new file mode 100644 index 0000000..370893e --- /dev/null +++ b/files/lambda-artifacts/securityhub-suppressor/securityhub_trigger.py @@ -0,0 +1,41 @@ +from os import environ + +import boto3 +import yaml +from aws_lambda_powertools import Logger +from awsfindingsmanagerlib.awsfindingsmanagerlib import FindingsManager +from awsfindingsmanagerlib.backends import Backend + +LOGGER = Logger() +S3_BUCKET_NAME = environ.get("S3_BUCKET_NAME") +S3_OBJECT_NAME = environ.get("S3_OBJECT_NAME") + + +class S3(Backend): + def __init__(self, bucket_name, file_name): + self._file_contents = self._get_file_contents(bucket_name, file_name) + + @staticmethod + def _get_file_contents(bucket_name, file_name): + s3 = boto3.resource("s3") + return s3.Object(bucket_name, file_name).get()["Body"].read() + + def _get_rules(self): + data = yaml.safe_load(self._file_contents) + return data.get("Rules") + + +@LOGGER.inject_lambda_context(log_event=True) +def lambda_handler(event, context): + s3_backend = S3(S3_BUCKET_NAME, S3_OBJECT_NAME) + rules = s3_backend.get_rules() + LOGGER.info(rules) + findings_manager = FindingsManager() + findings_manager.register_rules(rules) + if findings_manager.suppress_matching_findings(): + LOGGER.info("Successfully applied all suppression rules.") + return True + else: + raise RuntimeError( + "No explicit error was raised, but not all suppression rules were applied successfully, please investigate." + ) diff --git a/files/lambda-artifacts/securityhub-suppressor/suppressor.yml b/files/lambda-artifacts/securityhub-suppressor/suppressor.yml deleted file mode 100644 index 12f1977..0000000 --- a/files/lambda-artifacts/securityhub-suppressor/suppressor.yml +++ /dev/null @@ -1,9 +0,0 @@ -Inspector: - key: ProductFields."attributes/BENCHMARK_RULE_ID" - status: FindingProviderFields.Severity.Label -Firewall Manager: - key: ProductFields."aws/securityhub/ProductName" - status: Compliance.Status -Security Hub: - key: ProductFields.ControlId || ProductFields.RuleId - status: Compliance.Status diff --git a/files/lambda-artifacts/securityhub-suppressor/yaml_parser.py b/files/lambda-artifacts/securityhub-suppressor/yaml_parser.py deleted file mode 100644 index ed33738..0000000 --- a/files/lambda-artifacts/securityhub-suppressor/yaml_parser.py +++ /dev/null @@ -1,34 +0,0 @@ -from ctypes import Union -from parser import ParserError - -import yaml -from aws_lambda_powertools import Logger -from yamllint import linter -from yamllint.config import YamlLintConfig - -logger = Logger() - - -def get_file_contents(file_name) -> any: - try: - file_contents = yaml.load(open(file_name, 'r').read().strip(), Loader=yaml.FullLoader) - except IOError: - logger.error(f'Unable to read {file_name}') - return None - except (ValueError, ParserError) as error: - logger.error(f'Unable to parse file {file_name} as yaml, error: {error}') - return None - return file_contents - - -def run_yaml_lint(file_name) -> bool: - conf = YamlLintConfig('extends: default') - yaml_linting_result = linter.run(open(file_name), conf) - success = True - for line in yaml_linting_result: - if line.level == 'warning': - print(f'\tWARNING: {line}') - if line.level == 'error': - print(f'\tERROR: {line}') - success = False - return success diff --git a/jira.tf b/jira.tf index 24c587d..c522660 100644 --- a/jira.tf +++ b/jira.tf @@ -108,7 +108,7 @@ module "lambda_jira_security_hub" { memory_size = var.jira_integration.lambda_settings.memory_size role_arn = module.lambda_jira_security_hub_role[0].arn runtime = var.jira_integration.lambda_settings.runtime - s3_bucket = var.s3_bucket_name + s3_bucket = var.artifact_s3_bucket_name s3_key = module.lambda_jira_deployment_package[0].s3_object.key s3_object_version = module.lambda_jira_deployment_package[0].s3_object.version_id security_group_egress_rules = var.jira_integration.security_group_egress_rules diff --git a/outputs.tf b/outputs.tf index 9278115..07371b8 100644 --- a/outputs.tf +++ b/outputs.tf @@ -1,8 +1,3 @@ -output "dynamodb_arn" { - value = aws_dynamodb_table.suppressor_dynamodb_table.arn - description = "ARN of the DynamoDB table" -} - output "lambda_jira_security_hub_sg_id" { value = length(module.lambda_jira_security_hub) > 0 ? module.lambda_jira_security_hub[*].security_group_id : null description = "This will output the security group id attached to the jira_security_hub Lambda. This can be used to tune ingress and egress rules." @@ -13,7 +8,7 @@ output "lambda_securityhub_events_suppressor_sg_id" { description = "This will output the security group id attached to the securityhub_events_suppressor Lambda. This can be used to tune ingress and egress rules." } -output "lambda_securityhub_streams_suppressor_sg_id" { - value = module.lambda_securityhub_streams_suppressor.security_group_id - description = "This will output the security group id attached to the securityhub_streams_suppressor Lambda. This can be used to tune ingress and egress rules." +output "lambda_securityhub_trigger_suppressor_sg_id" { + value = module.lambda_securityhub_trigger_suppressor.security_group_id + description = "This will output the security group id attached to the securityhub_trigger_suppressor Lambda. This can be used to tune ingress and egress rules." } diff --git a/suppressor.tf b/suppressor.tf index e0a676c..54f9318 100644 --- a/suppressor.tf +++ b/suppressor.tf @@ -1,27 +1,34 @@ -# DynamoDB table for storing suppressions list -resource "aws_dynamodb_table" "suppressor_dynamodb_table" { - name = var.dynamodb_table - billing_mode = "PAY_PER_REQUEST" - deletion_protection_enabled = var.dynamodb_deletion_protection - hash_key = "controlId" - stream_enabled = true - stream_view_type = "KEYS_ONLY" - - attribute { - name = "controlId" - type = "S" - } +# S3 bucket for storing suppressions list +module "suppressions_bucket" { + #checkov:skip=CKV_AWS_145:Bug in CheckOV https://github.com/bridgecrewio/checkov/issues/3847 + #checkov:skip=CKV_AWS_19:Bug in CheckOV https://github.com/bridgecrewio/checkov/issues/3847 + source = "schubergphilis/mcaf-s3/aws" + version = "~> 0.11.0" - point_in_time_recovery { - enabled = true - } + name = var.suppressions_s3_bucket_name + kms_key_arn = var.kms_key_arn + logging = null + tags = var.tags + versioning = true - server_side_encryption { - enabled = true - kms_key_arn = var.kms_key_arn - } + lifecycle_rule = [ + { + id = "default" + enabled = true + + abort_incomplete_multipart_upload = { + days_after_initiation = 7 + } + + expiration = { + expired_object_delete_marker = true + } - tags = var.tags + noncurrent_version_expiration = { + noncurrent_days = 7 + } + } + ] } # S3 bucket to store Lambda artifacts @@ -31,7 +38,7 @@ module "lambda_artifacts_bucket" { source = "schubergphilis/mcaf-s3/aws" version = "~> 0.11.0" - name = var.s3_bucket_name + name = var.artifact_s3_bucket_name kms_key_arn = var.kms_key_arn logging = null tags = var.tags @@ -84,26 +91,15 @@ data "aws_iam_policy_document" "lambda_security_hub_suppressor" { } statement { - sid = "DynamoDBGetItemAccess" - actions = [ - "dynamodb:GetItem" - ] - resources = [ - aws_dynamodb_table.suppressor_dynamodb_table.arn - ] + sid = "S3GetObjectAccess" + actions = ["s3:GetObject"] + resources = ["${module.suppressions_bucket.arn}/*"] } statement { - sid = "DynamoDBStreamsAccess" - actions = [ - "dynamodb:DescribeStream", - "dynamodb:GetRecords", - "dynamodb:GetShardIterator", - "dynamodb:ListStreams" - ] - resources = [ - aws_dynamodb_table.suppressor_dynamodb_table.stream_arn - ] + sid = "EC2DescribeRegionsAccess" + actions = ["ec2:DescribeRegions"] + resources = ["*"] } statement { @@ -169,7 +165,7 @@ module "lambda_securityhub_events_suppressor" { memory_size = var.lambda_events_suppressor.memory_size role_arn = module.lambda_security_hub_suppressor_role.arn runtime = var.lambda_events_suppressor.runtime - s3_bucket = var.s3_bucket_name + s3_bucket = var.artifact_s3_bucket_name s3_key = module.lambda_suppressor_deployment_package.s3_object.key s3_object_version = module.lambda_suppressor_deployment_package.s3_object.version_id security_group_egress_rules = var.lambda_events_suppressor.security_group_egress_rules @@ -178,41 +174,43 @@ module "lambda_securityhub_events_suppressor" { timeout = var.lambda_events_suppressor.timeout environment = { - DYNAMODB_TABLE_NAME = var.dynamodb_table + S3_BUCKET_NAME = var.suppressions_s3_bucket_name + S3_OBJECT_NAME = var.suppressions_s3_object_name LOG_LEVEL = var.lambda_events_suppressor.log_level POWERTOOLS_LOGGER_LOG_EVENT = "false" POWERTOOLS_SERVICE_NAME = "securityhub-suppressor" } } -# Lambda to suppress Security Hub findings in response to DynamoDB stream event -module "lambda_securityhub_streams_suppressor" { +# Lambda to suppress Security Hub findings in response to S3 suppressions file uploads +module "lambda_securityhub_trigger_suppressor" { #checkov:skip=CKV_AWS_272:Code signing not used for now source = "schubergphilis/mcaf-lambda/aws" version = "~> 1.1.0" - name = var.lambda_streams_suppressor.name + name = var.lambda_trigger_suppressor.name create_policy = false create_s3_dummy_object = false - description = "Lambda to suppress Security Hub findings in response to DynamoDB stream event" + description = "Lambda to suppress Security Hub findings in response to S3 suppressions file uploads" filename = module.lambda_suppressor_deployment_package.local_filename - handler = "securityhub_streams.lambda_handler" + handler = "securityhub_trigger.lambda_handler" kms_key_arn = var.kms_key_arn log_retention = 365 - memory_size = var.lambda_streams_suppressor.memory_size + memory_size = var.lambda_trigger_suppressor.memory_size role_arn = module.lambda_security_hub_suppressor_role.arn - runtime = var.lambda_streams_suppressor.runtime - s3_bucket = var.s3_bucket_name + runtime = var.lambda_trigger_suppressor.runtime + s3_bucket = var.artifact_s3_bucket_name s3_key = module.lambda_suppressor_deployment_package.s3_object.key s3_object_version = module.lambda_suppressor_deployment_package.s3_object.version_id - security_group_egress_rules = var.lambda_streams_suppressor.security_group_egress_rules + security_group_egress_rules = var.lambda_trigger_suppressor.security_group_egress_rules subnet_ids = var.subnet_ids tags = var.tags - timeout = var.lambda_streams_suppressor.timeout + timeout = var.lambda_trigger_suppressor.timeout environment = { - DYNAMODB_TABLE_NAME = var.dynamodb_table - LOG_LEVEL = var.lambda_streams_suppressor.log_level + S3_BUCKET_NAME = var.suppressions_s3_bucket_name + S3_OBJECT_NAME = var.suppressions_s3_object_name + LOG_LEVEL = var.lambda_trigger_suppressor.log_level POWERTOOLS_LOGGER_LOG_EVENT = "false" POWERTOOLS_SERVICE_NAME = "securityhub-suppressor" } @@ -257,9 +255,24 @@ resource "aws_cloudwatch_event_target" "lambda_securityhub_events_suppressor" { rule = aws_cloudwatch_event_rule.securityhub_events_suppressor_failed_events.name } -# Create event source mapping between Security Hub Streams Lambda function and DynamoDB streams -resource "aws_lambda_event_source_mapping" "lambda_securityhub_streams_mapping" { - event_source_arn = aws_dynamodb_table.suppressor_dynamodb_table.stream_arn - function_name = module.lambda_securityhub_streams_suppressor.name - starting_position = "LATEST" +# Allow S3 to invoke S3 Trigger Lambda function +resource "aws_lambda_permission" "allow_s3_to_invoke_trigger_lambda" { + action = "lambda:InvokeFunction" + function_name = var.lambda_trigger_suppressor.name + principal = "s3.amazonaws.com" + source_arn = module.suppressions_bucket.arn +} + +# Add Security Hub Trigger Lambda function as a target to Suppressions S3 Object Creation Trigger Events +resource "aws_s3_bucket_notification" "bucket_notification" { + bucket = var.suppressions_s3_bucket_name + + lambda_function { + lambda_function_arn = module.lambda_securityhub_trigger_suppressor.arn + events = ["s3:ObjectCreated:*"] + filter_prefix = var.suppressions_s3_object_name + filter_suffix = var.suppressions_s3_object_name + } + + depends_on = [aws_lambda_permission.allow_s3_to_invoke_trigger_lambda] } diff --git a/variables.tf b/variables.tf index 08463f5..4e1b78c 100644 --- a/variables.tf +++ b/variables.tf @@ -1,13 +1,12 @@ -variable "dynamodb_deletion_protection" { - type = bool - default = true - description = "The DynamoDB table deletion protection option." +variable "suppressions_s3_bucket_name" { + type = string + description = "The S3 bucket containing the items to be suppressed in Security Hub" } -variable "dynamodb_table" { +variable "suppressions_s3_object_name" { type = string - default = "securityhub-suppression-list" - description = "The DynamoDB table containing the items to be suppressed in Security Hub" + default = "suppressions.yaml" + description = "The S3 object containing the items to be suppressed in Security Hub" } variable "eventbridge_suppressor_iam_role_name" { @@ -99,9 +98,9 @@ variable "lambda_events_suppressor" { } } -variable "lambda_streams_suppressor" { +variable "lambda_trigger_suppressor" { type = object({ - name = optional(string, "securityhub-streams-suppressor") + name = optional(string, "securityhub-trigger-suppressor") log_level = optional(string, "INFO") memory_size = optional(number, 256) runtime = optional(string, "python3.8") @@ -119,10 +118,10 @@ variable "lambda_streams_suppressor" { })), []) }) default = {} - description = "Lambda Streams Suppressor settings - Supresses the Security Hub findings in response to DynamoDB streams" + description = "Lambda Trigger Suppressor settings - Supresses the Security Hub findings in response to S3 file upload triggers" validation { - condition = alltrue([for o in var.lambda_streams_suppressor.security_group_egress_rules : (o.cidr_ipv4 != null || o.cidr_ipv6 != null || o.prefix_list_id != null || o.referenced_security_group_id != null)]) + condition = alltrue([for o in var.lambda_trigger_suppressor.security_group_egress_rules : (o.cidr_ipv4 != null || o.cidr_ipv6 != null || o.prefix_list_id != null || o.referenced_security_group_id != null)]) error_message = "Although \"cidr_ipv4\", \"cidr_ipv6\", \"prefix_list_id\", and \"referenced_security_group_id\" are all marked as optional, you must provide one of them in order to configure the destination of the traffic." } } @@ -133,7 +132,7 @@ variable "lambda_suppressor_iam_role_name" { description = "The name of the role which will be assumed by both Suppressor Lambda functions" } -variable "s3_bucket_name" { +variable "artifact_s3_bucket_name" { type = string description = "The name for the S3 bucket which will be created for storing the function's deployment package" }