generated from baikonur-oss/terraform-aws-template
-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathmain.tf
153 lines (136 loc) · 4.13 KB
/
main.tf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
locals {
package_filename = "${path.module}/package.zip"
}
data "external" "package" {
program = ["bash", "-c", "curl -s -L -o ${local.package_filename} ${var.lambda_package_url} && echo {}"]
}
data "aws_kinesis_stream" "source" {
name = var.source_stream_name
}
data "aws_kinesis_stream" "target" {
name = var.target_stream_name
}
data "aws_s3_bucket" "failed_log_s3_bucket" {
bucket = var.failed_log_s3_bucket
}
resource "aws_cloudwatch_log_group" "logs" {
name = "/aws/lambda/${var.name}"
retention_in_days = var.log_retention_in_days
}
resource "aws_lambda_function" "function" {
function_name = var.name
handler = var.handler
role = module.iam.arn
runtime = var.runtime
memory_size = var.memory
timeout = var.timeout
filename = local.package_filename
# Below is a very dirty hack to force base64sha256 to wait until
# package download in data.external.package finishes.
#
# WARNING: explicit depends_on from this resource to data.external.package
# does not help
source_code_hash = filebase64sha256(
jsonencode(data.external.package.result) == "{}" ? local.package_filename : "",
)
tracing_config {
mode = var.tracing_mode
}
environment {
variables = {
TZ = var.timezone
LOG_ID_FIELD = var.log_id_field
LOG_TYPE_FIELD = var.log_type_field
LOG_TYPE_UNKNOWN_PREFIX = var.log_type_unknown_prefix
LOG_TIMESTAMP_FIELD = var.log_timestamp_field
LOG_TYPE_WHITELIST = join(",", var.log_type_field_whitelist)
TARGET_STREAM_NAME = data.aws_kinesis_stream.target.name
KINESIS_MAX_RETRIES = var.kinesis_max_retries
FAILED_LOG_S3_BUCKET = var.failed_log_s3_bucket
FAILED_LOG_S3_PREFIX = var.failed_log_s3_prefix
}
}
tags = var.tags
}
resource "aws_lambda_event_source_mapping" "kinesis_mapping" {
batch_size = var.batch_size
event_source_arn = data.aws_kinesis_stream.source.arn
enabled = var.enable_kinesis_mapping
function_name = aws_lambda_function.function.arn
starting_position = var.starting_position
}
resource "aws_iam_role_policy_attachment" "xray_access" {
policy_arn = "arn:aws:iam::aws:policy/AWSXrayWriteOnlyAccess"
role = module.iam.name
}
module "iam" {
source = "baikonur-oss/iam-nofile/aws"
version = "v2.0.0"
type = "lambda"
name = var.name
policy_json = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"kinesis:DescribeStream",
"kinesis:DescribeStreamSummary",
"kinesis:GetShardIterator",
"kinesis:GetRecords",
"kinesis:ListStreams",
"kinesis:ListShards"
],
"Resource": [
"${data.aws_kinesis_stream.source.arn}"
]
},
{
"Effect": "Allow",
"Action": [
"kinesis:DescribeStream",
"kinesis:DescribeStreamSummary",
"kinesis:PutRecord",
"kinesis:PutRecords",
"kinesis:ListStreams"
],
"Resource": [
"${data.aws_kinesis_stream.target.arn}"
]
},
{
"Effect": "Allow",
"Action": [
"kinesis:SubscribeToShard"
],
"Resource": [
"${data.aws_kinesis_stream.source.arn}/consumer/*:*"
]
},
{
"Effect": "Allow",
"Action": [
"logs:CreateLogStream",
"logs:DescribeLogGroups",
"logs:DescribeLogStreams",
"logs:PutLogEvents"
],
"Resource": [
"arn:aws:logs:*:*:*"
]
},
{
"Effect": "Allow",
"Action": [
"s3:PutObject"
],
"Resource": [
"${data.aws_s3_bucket.failed_log_s3_bucket.arn}/*"
]
}
]
}
EOF
}
# Note: data is not supported for AWS ES domains, do not even try to make it