Skip to content

Commit

Permalink
docs(examples): Add Kinesis nXDR Example (#160)
Browse files Browse the repository at this point in the history
* docs(examples): Add Kinesis nXDR Example

* docs: Remove Import

* docs: Add Sample Data

* docs: Update Kinesis ESM

* docs: Add Post-Deploy Delay

* style: Format

* docs: Update Sample Data
  • Loading branch information
jshlbrd authored Apr 29, 2024
1 parent f4d8329 commit b463156
Show file tree
Hide file tree
Showing 11 changed files with 320 additions and 0 deletions.
38 changes: 38 additions & 0 deletions examples/terraform/aws/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,44 @@ flowchart LR
end
```

## nXDR

Deploys a data pipeline that implements an nXDR pattern by applying threat / risk enrichment metadata to events and sending the enriched data to multiple destinations. This pattern is useful for:
- Generating risk-based detection rules
- Guiding analysts during incident investigations and incident response
- Aiding unstructured threat hunts
- Priorizing logs for retention and analysis

```mermaid
flowchart LR
%% resources
kinesis([Kinesis Data Stream])
dynamodb([DynamoDB Table])
ext([External System])
enrichmentHandler[[Handler]]
enrichmentTransforms[Transforms]
transformHandler[[Handler]]
transformTransforms[Transforms]
%% connections
kinesis --> enrichmentHandler
subgraph Substation Enrichment Node
enrichmentHandler --> enrichmentTransforms
end
enrichmentTransforms --> dynamodb
kinesis --> transformHandler
subgraph Substation Transform Node
transformHandler --> transformTransforms
end
transformTransforms --> ext
```

## Time Travel

Deploys a data pipeline that implements a "time travel" pattern by having a subscriber node read data more slowly than an enrichment node. The nodes share data observed across different events using a DynamoDB table.
Expand Down
15 changes: 15 additions & 0 deletions examples/terraform/aws/kinesis/nxdr/config/const.libsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
local sub = import '../../../../../../build/config/substation.libsonnet';

{
threat_signals_key: 'threat.signals',
// threat_signal is a custom function that appends threat info to an
// event as enrichment metadata.
//
// If a smaller event is needed, then the enriched threat signal can
// be emitted as a separate event. This is similar to the implementation
// seen in the enrichment Lambda function.
threat_signal(settings): sub.tf.obj.insert({
obj: { trg: sub.helpers.obj.append_array($.threat_signals_key) },
value: { name: settings.name, description: settings.description, references: settings.references, risk_score: settings.risk_score },
}),
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
local sub = import '../../../../../../../build/config/substation.libsonnet';

local const = import '../const.libsonnet';
local threat = import '../threat_enrichment.libsonnet';

{
concurrency: 2,
transforms:
threat.transforms + [
// Discards any events that don't contain threat signals.
sub.tf.meta.switch({ cases: [
{
condition: sub.cnd.any([
sub.cnd.num.len.eq({ object: { source_key: const.threat_signals_key }, value: 0 }),
]),
transform: sub.tf.util.drop(),
},
] }),
// Explodes the threat signals array into individual events. These become
// threat signal records in the DynamoDB table.
sub.tf.aggregate.from.array({ object: { source_key: const.threat_signals_key } }),
// The host name and current time are used as the keys for the DynamoDB table.
sub.tf.object.copy({ object: { source_key: 'host.name', target_key: 'PK' } }),
sub.tf.time.now({ object: { target_key: 'SK' } }),
sub.tf.time.to.string({ object: { source_key: 'SK', target_key: 'SK' }, format: '2006-01-02T15:04:05.000Z' }),
// Any fields not needed in the DynamoDB item are removed.
sub.tf.object.delete({ object: { source_key: 'event' } }),
sub.tf.object.delete({ object: { source_key: 'host' } }),
sub.tf.object.delete({ object: { source_key: 'process' } }),
sub.tf.object.delete({ object: { source_key: 'threat' } }),
// Writes the threat signal to the DynamoDB table.
sub.tf.send.aws.dynamodb({ table_name: 'substation_threat_signals' }),
],
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
// The nXDR pattern relies on Substation's meta_switch transform to conditionally determine
// if an event matches threat criteria. If the event matches, then a threat signal is created.
// The meta_switch transform supports any combination of if-elif-else logic.
local sub = import '../../../../../../build/config/substation.libsonnet';
local const = import 'const.libsonnet';

// Composable conditions are recommended when managing multiple threat signals.
local cnd = {
process: [
sub.cnd.str.eq({ obj: { src: 'event.category' }, value: 'process' }),
],

macos: {
os: sub.cnd.str.eq({ obj: { src: 'host.os.type' }, value: 'macos' }),
process: $.process + [$.macos.os],
},
};

{
transforms: [
// Privilege Escalation
// https://attack.mitre.org/tactics/TA0004/
//
// https://attack.mitre.org/techniques/T1548/004/
sub.tf.meta.switch({
local name = 'privilege_escalation_elevated_execution_with_prompt',

cases: [
{
transform: const.threat_signal({
name: name,
description: 'Identifies when an authentication prompt is generated by the AuthorizationExecuteWithPrivileges API.',
references: ['https://objective-see.com/blog/blog_0x2A.html'],
// The risk score can be dynamically calculated based on additional
// fields in the event.
risk_score: 73,
}),
condition: sub.cnd.all(cnd.macos.process + [
sub.cnd.str.eq({ obj: { src: 'process.name' }, value: 'security_authtrampoline' }),
]),
},
],
}),
],
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
local sub = import '../../../../../../../build/config/substation.libsonnet';

local threat = import '../threat_enrichment.libsonnet';

{
concurrency: 2,
transforms:
threat.transforms + [
// At this point more transforms can be added and the events can be sent
// to an external system.
sub.tf.send.stdout(),
],
}
5 changes: 5 additions & 0 deletions examples/terraform/aws/kinesis/nxdr/data.jsonl
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{"event":{"category":"process","type":"start"},"host":{"name":"Alice's Macbook Pro","os":{"type":"macos"}},"process":{"command_line":"/usr/sbin/sshd -i","name":"sshd","parent":{"command_line":"/usr/libexec/sshd-keygen-wrapper","name":"sshd-keygen-wrapper","parent":{"command_line":"/usr/libexec/launchd","name":"launchd"}}}}
{"event":{"category":"process","type":"start"},"host":{"name":"Alice's Macbook Pro","os":{"type":"macos"}},"process":{"command_line":"/usr/libexec/security_authtrampoline /usr/sbin/installer auth 22 -verboseR -allowUntrusted -pkg /private/tmp/xp-6100/epsvcp.pkg -target /","name":"security_authtrampoline","parent":{"command_line":"/private/tmp/update_XP-6100 Series/EPSON.app/Contents/MacOS/EpsonInstaller","name":"EpsonInstaller","parent":{"command_line":"/usr/libexec/runningboardd","name":"runningboardd"}}}}
{"event":{"category":"process","type":"start"},"host":{"name":"Alice's Macbook Pro","os":{"type":"macos"}},"process":{"command_line":"/Applications/Google Chrome.app/Contents/MacOS/Google Chrome","name":"Google Chrome","parent":{"command_line":"/usr/bin/open -n -a /Applications/Google Chrome.app","name":"open","parent":{"command_line":"/usr/libexec/launchd","name":"launchd"}}}}
{"event":{"category":"process","type":"start"},"host":{"name":"Alice's Macbook Pro","os":{"type":"macos"}},"process":{"command_line":"/usr/sbin/cupsd","name":"cupsd","parent":{"command_line":"/usr/libexec/cups/backend/usb","name":"cups-usb-backend","parent":{"command_line":"/usr/libexec/launchd","name":"launchd"}}}}
{"event":{"category":"process","type":"start"},"host":{"name":"Alice's Macbook Pro","os":{"type":"macos"}},"process":{"command_line":"/usr/bin/python3 /usr/local/bin/pip install requests","name":"python3","parent":{"command_line":"/usr/local/bin/pip install requests","name":"pip","parent":{"command_line":"/usr/bin/sudo pip3","name":"sudo"}}}}
2 changes: 2 additions & 0 deletions examples/terraform/aws/kinesis/nxdr/post_deploy.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
sleep 5
AWS_DEFAULT_REGION=$AWS_REGION python3 ../build/scripts/aws/kinesis/put_records.py substation terraform/aws/kinesis/nxdr/data.jsonl --print-response
77 changes: 77 additions & 0 deletions examples/terraform/aws/kinesis/nxdr/terraform/_resources.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
module "appconfig" {
source = "../../../../../../build/terraform/aws/appconfig"

config = {
name = "substation"
environments = [{ name = "example" }]
}
}

# Repository for the core Substation application.
module "ecr" {
source = "../../../../../../build/terraform/aws/ecr"

config = {
name = "substation"
force_delete = true
}
}

# Repository for the autoscaling application.
module "ecr_autoscale" {
source = "../../../../../../build/terraform/aws/ecr"

config = {
name = "autoscale"
force_delete = true
}
}

# SNS topic for Kinesis Data Stream autoscaling alarms.
resource "aws_sns_topic" "autoscaling_topic" {
name = "autoscale"
}

# Kinesis Data Stream that stores data sent from pipeline sources.
module "kinesis" {
source = "../../../../../../build/terraform/aws/kinesis_data_stream"

config = {
name = "substation"
autoscaling_topic = aws_sns_topic.autoscaling_topic.arn
}

access = [
# Autoscales the stream.
module.lambda_autoscale.role.name,
# Consumes data from the stream.
module.lambda_transform.role.name,
module.lambda_enrichment.role.name,
]
}

module "dynamodb" {
source = "../../../../../../build/terraform/aws/dynamodb"

config = {
name = "substation_threat_signals"
hash_key = "PK"
range_key = "SK"
ttl = "TTL"

attributes = [
{
name = "PK"
type = "S"
},
{
name = "SK"
type = "S"
},
]
}

access = [
module.lambda_enrichment.role.name,
]
}
33 changes: 33 additions & 0 deletions examples/terraform/aws/kinesis/nxdr/terraform/autoscaler.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
module "lambda_autoscale" {
source = "../../../../../../build/terraform/aws/lambda"
appconfig = module.appconfig

config = {
name = "autoscale"
description = "Autoscaler for Kinesis Data Streams"
image_uri = "${module.ecr_autoscale.url}:v1.2.0"
image_arm = true
}
}

resource "aws_sns_topic_subscription" "autoscaling_subscription" {
topic_arn = aws_sns_topic.autoscaling_topic.arn
protocol = "lambda"
endpoint = module.lambda_autoscale.arn

depends_on = [
module.lambda_autoscale.name
]
}

resource "aws_lambda_permission" "autoscaling_invoke" {
statement_id = "AllowExecutionFromSNS"
action = "lambda:InvokeFunction"
function_name = module.lambda_autoscale.name
principal = "sns.amazonaws.com"
source_arn = aws_sns_topic.autoscaling_topic.arn

depends_on = [
module.lambda_autoscale.name
]
}
29 changes: 29 additions & 0 deletions examples/terraform/aws/kinesis/nxdr/terraform/enrichment.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
module "lambda_enrichment" {
source = "../../../../../../build/terraform/aws/lambda"
appconfig = module.appconfig

config = {
name = "enrichment"
description = "Substation enrichment node that writes threat signals to DynamoDB."
image_uri = "${module.ecr.url}:v1.2.0"
image_arm = true

env = {
"SUBSTATION_CONFIG" : "http://localhost:2772/applications/substation/environments/example/configurations/enrichment"
"SUBSTATION_LAMBDA_HANDLER" : "AWS_KINESIS_DATA_STREAM"
"SUBSTATION_DEBUG" : true
}
}
}

resource "aws_lambda_event_source_mapping" "lambda_enrichment" {
event_source_arn = module.kinesis.arn
function_name = module.lambda_enrichment.arn
maximum_batching_window_in_seconds = 10
batch_size = 100
parallelization_factor = 1
# In this example, we start from the beginning of the stream,
# but in a prod environment, you may want to start from the end
# of the stream to avoid processing old data ("LATEST").
starting_position = "TRIM_HORIZON"
}
29 changes: 29 additions & 0 deletions examples/terraform/aws/kinesis/nxdr/terraform/transform.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
module "lambda_transform" {
source = "../../../../../../build/terraform/aws/lambda"
appconfig = module.appconfig

config = {
name = "transform"
description = "Substation transform node that enriches events with threat information."
image_uri = "${module.ecr.url}:v1.2.0"
image_arm = true

env = {
"SUBSTATION_CONFIG" : "http://localhost:2772/applications/substation/environments/example/configurations/transform"
"SUBSTATION_LAMBDA_HANDLER" : "AWS_KINESIS_DATA_STREAM"
"SUBSTATION_DEBUG" : true
}
}
}

resource "aws_lambda_event_source_mapping" "lambda_transform" {
event_source_arn = module.kinesis.arn
function_name = module.lambda_transform.arn
maximum_batching_window_in_seconds = 10
batch_size = 100
parallelization_factor = 1
# In this example, we start from the beginning of the stream,
# but in a prod environment, you may want to start from the end
# of the stream to avoid processing old data ("LATEST").
starting_position = "TRIM_HORIZON"
}

0 comments on commit b463156

Please sign in to comment.