我正在测试cloudwatch警报筛选器(以精确检查lambda是否在内存中达到最大值)以发送到SNS,然后将其发送到SQS队列。但是,我看不到过滤器出现在日志中。
设置为cloudwatch (filtered alarm) -> SNS -> SQS ->splunk
到目前为止,我有:
resource "aws_cloudwatch_metric_alarm" "general_lambda_error" {
depends_on = [
"aws_cloudwatch_log_metric_filter.max_memory_time_out",
]
alarm_name = "general_lambda_error"
comparison_operator = "GreaterThanOrEqualToThreshold"
evaluation_periods = "1"
metric_name = "Errors"
namespace = "AWS/Lambda"
period = "60"
statistic = "SampleCount"
threshold = "2"
alarm_description = "This metric monitors Lambda Memory Max Usage and other Errors: threshold=2"
alarm_actions = [ "some-arn" ]
dimensions {
FunctionName = "lambda-test"
Resource = "lambda-test"
}
}
resource "aws_cloudwatch_log_metric_filter" "max_memory_time_out" {
name = "max_memory_time_out"
pattern = "[report_name=\"REPORT\",
request_id_name=\"RequestId:\", request_id_value, duration_name=\"Duration:\", duration_value, duration_unit=\"ms\", billed_duration_name_1=\"Billed\", bill_duration_name_2=\"Duration:\", billed_duration_value, billed_duration_unit=\"ms\", memory_size_name_1=\"Memory\", memory_size_name_2=\"Size:\", memory_size_value, memory_size_unit=\"MB\", max_memory_used_name_1=\"Max\", max_memory_used_name_2=\"Memory\", max_memory_used_name_3=\"Used:\", max_memory_used_value, max_memory_used_unit=\"MB\"]"
log_group_name = "/aws/lambda/lambda-test"
metric_transformation {
name = "SampleCount"
namespace = "cloudwatch_filter"
value = "1"
}
}
如何发送已过滤的邮件?我发现这个link描述了类似的问题,但是解决方案是创建一个lambda函数。是否可以在不创建lambda函数的情况下进行操作?
最佳答案
这是将cloudwatch日志获取到S3的代码:
resource "aws_iam_role" "cloudwatchToFirehose" {
name = "${var.env}-${var.name}FirehoseCWL-Role"
assume_role_policy = <<JSON
{
"Version": "2012-10-17",
"Statement": [{
"Action": "sts:AssumeRole",
"Principal": { "Service": "logs.${var.region}.amazonaws.com"},
"Effect": "Allow"
}]
}
JSON
}
resource "aws_iam_policy" "cloudwatchToFirehose" {
name = "${var.env}-${var.name}FirehoseCWL-Policy"
policy = <<JSON
{
"Version": "2012-10-17",
"Statement": [
{
"Effect":"Allow",
"Action": [
"firehose:DeleteDeliveryStream",
"firehose:PutRecord",
"firehose:PutRecordBatch",
"firehose:UpdateDestination"
],
"Resource": ["${aws_kinesis_firehose_delivery_stream.firehoseToS3.arn}"]
}
]
}
JSON
}
resource "aws_iam_role_policy_attachment" "cloudwatchToFirehose" {
role = "${aws_iam_role.cloudwatchToFirehose.name}"
policy_arn = "${aws_iam_policy.cloudwatchToFirehose.arn}"
}
resource "aws_iam_role" "firehoseToS3" {
name = "${var.env}-${var.name}FirehoseS3-Role"
assume_role_policy = <<JSON
{
"Version": "2012-10-17",
"Statement": [{
"Action": "sts:AssumeRole",
"Principal": { "Service": "firehose.amazonaws.com"},
"Effect": "Allow"
}]
}
JSON
lifecycle {
create_before_destroy = true
}
}
resource "aws_iam_policy" "firehoseToS3" {
name = "${var.env}-${var.name}FirehoseS3-Policy"
policy = <<JSON
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"s3:AbortMultipartUpload",
"s3:GetBucketLocation",
"s3:GetObject",
"s3:ListBucket",
"s3:ListBucketMultipartUploads",
"s3:PutObject",
"s3:PutObjectAcl"
],
"Resource": [
"arn:aws:s3:::${var.logs_bucket}",
"arn:aws:s3:::${var.logs_bucket}/*"
]
},
{
"Effect": "Allow",
"Action": [
"kinesis:DescribeStream",
"kinesis:GetShardIterator",
"kinesis:GetRecords"
],
"Resource": "${aws_kinesis_firehose_delivery_stream.firehoseToS3.arn}"
},
{
"Effect": "Allow",
"Action": [
"kms:Decrypt",
"kms:GenerateDataKey"
],
"Resource": [
"${var.kms_general_key}"
]
},
{
"Effect": "Allow",
"Action": [
"logs:PutLogEvents"
],
"Resource": [
"arn:aws:logs:*:*:log-group:${var.org}-${var.group}-${var.environment}/Firehose:*"
]
},
{
"Effect": "Allow",
"Action": [
"lambda:InvokeFunction",
"lambda:GetFunctionConfiguration"
],
"Resource": [
"arn:aws:lambda:*:*:function:*:*"
]
}
]
}
JSON
lifecycle {
create_before_destroy = true
}
}
resource "aws_iam_role_policy_attachment" "firehoseToS3" {
role = "${aws_iam_role.firehoseToS3.name}"
policy_arn = "${aws_iam_policy.firehoseToS3.arn}"
lifecycle {
create_before_destroy = true
}
}
resource "aws_kinesis_firehose_delivery_stream" "firehoseToS3" {
name = "${var.env}-${var.name}Firehose-Stream"
destination = "s3"
s3_configuration {
role_arn = "${aws_iam_role.firehoseToS3.arn}"
bucket_arn = "arn:aws:s3:::${var.logs_bucket}"
buffer_interval = "300"
buffer_size = "10"
prefix = "${var.name}"
}
}
resource "aws_cloudwatch_log_subscription_filter" "cloudwatchToFirehose" {
count = "1"
name = "${var.env}-${var.name}Filter-Subscription"
role_arn = "${aws_iam_role.cloudwatchToFirehose.arn}"
log_group_name = "${element(var.log_groups, count.index)}"
filter_pattern = ""
destination_arn = "${aws_kinesis_firehose_delivery_stream.firehoseToS3.arn}"
}
关于terraform - 如何使用Terraform基于Cloudwatch日志指标筛选器将Cloudwatch警报发送到SNS,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/55366841/