From 0f45ad8acc1226e5aa72d366916545a595eb454e Mon Sep 17 00:00:00 2001 From: akano yuki Date: Tue, 12 Nov 2019 19:29:00 +0900 Subject: [PATCH] Fix syntax for upgrade to 0.12 (#5) * Fix syntax for upgrade to 0.12 * Upgrade pre-commit-terraform version to v1.16.0 and execute pre-commit run --all-files * Fix README to supporting terraform version 0.12 --- .pre-commit-config.yaml | 2 +- README.md | 44 +++++++++++++-------------- main.tf | 66 ++++++++++++++++++++--------------------- variables.tf | 5 ++-- 4 files changed, 59 insertions(+), 58 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e90319d..0a55ba8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: git://github.com/antonbabenko/pre-commit-terraform - rev: v1.7.3 + rev: v1.16.0 hooks: - id: terraform_fmt - id: terraform_docs diff --git a/README.md b/README.md index 5b18e16..38b6a23 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ Terraform module and Lambda for saving JSON log records from Kinesis Data Streams to S3. -![terraform v0.11.x](https://img.shields.io/badge/terraform-v0.11.x-brightgreen.svg) +![terraform v0.12.x](https://img.shields.io/badge/terraform-v0.12.x-brightgreen.svg) ## Prerequisites 1. Records in Kinesis stream must be valid JSON data. Non-JSON data will be **ignored**. @@ -66,27 +66,27 @@ For more information on module version pinning, see [Selecting a Revision](https | Name | Description | Type | Default | Required | |------|-------------|:----:|:-----:|:-----:| -| batch\_size | Maximum number of records passed for a single Lambda invocation | string | n/a | yes | -| failed\_log\_s3\_bucket | S3 bucket name for saving failed logs (ES API errors etc.) | string | n/a | yes | -| failed\_log\_s3\_prefix | Path prefix for failed logs | string | n/a | yes | -| handler | Lambda Function handler (entrypoint) | string | `"main.handler"` | no | -| lambda\_package\_url | Lambda package URL (see Usage in README) | string | n/a | yes | -| log\_id\_field | Key name for unique log ID | string | `"log_id"` | no | -| log\_retention\_in\_days | Lambda Function log retention in days | string | `"30"` | no | -| log\_timestamp\_field | Key name for log timestamp | string | `"time"` | no | -| log\_type\_field | Key name for log type | string | `"log_type"` | no | -| log\_type\_field\_whitelist | Log type whitelist (if empty, all types will be processed) | list | `` | no | -| log\_type\_unknown\_prefix | Log type prefix for logs without log type field | string | `"unknown"` | no | -| memory | Lambda Function memory in megabytes | string | `"256"` | no | -| name | Resource name | string | n/a | yes | -| runtime | Lambda Function runtime | string | `"python3.7"` | no | -| source\_stream\_name | Source Kinesis Data Stream name | string | n/a | yes | -| starting\_position | Kinesis ShardIterator type (see: https://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html ) | string | `"TRIM_HORIZON"` | no | -| tags | Tags for Lambda Function | map | `` | no | -| target\_stream\_name | Target Kinesis Data Stream name | string | n/a | yes | -| timeout | Lambda Function timeout in seconds | string | `"60"` | no | -| timezone | tz database timezone name (e.g. Asia/Tokyo) | string | `"UTC"` | no | -| tracing\_mode | X-Ray tracing mode (see: https://docs.aws.amazon.com/lambda/latest/dg/API_TracingConfig.html ) | string | `"PassThrough"` | no | +| batch_size | Maximum number of records passed for a single Lambda invocation | string | - | yes | +| failed_log_s3_bucket | S3 bucket name for saving failed logs (ES API errors etc.) | string | - | yes | +| failed_log_s3_prefix | Path prefix for failed logs | string | - | yes | +| handler | Lambda Function handler (entrypoint) | string | `main.handler` | no | +| lambda_package_url | Lambda package URL (see Usage in README) | string | - | yes | +| log_id_field | Key name for unique log ID | string | `log_id` | no | +| log_retention_in_days | Lambda Function log retention in days | string | `30` | no | +| log_timestamp_field | Key name for log timestamp | string | `time` | no | +| log_type_field | Key name for log type | string | `log_type` | no | +| log_type_field_whitelist | Log type whitelist (if empty, all types will be processed) | list(string) | `[]` | no | +| log_type_unknown_prefix | Log type prefix for logs without log type field | string | `unknown` | no | +| memory | Lambda Function memory in megabytes | string | `256` | no | +| name | Resource name | string | - | yes | +| runtime | Lambda Function runtime | string | `python3.7` | no | +| source_stream_name | Source Kinesis Data Stream name | string | - | yes | +| starting_position | Kinesis ShardIterator type (see: https://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html ) | string | `TRIM_HORIZON` | no | +| tags | Tags for Lambda Function | map(string) | `{}` | no | +| target_stream_name | Target Kinesis Data Stream name | string | - | yes | +| timeout | Lambda Function timeout in seconds | string | `60` | no | +| timezone | tz database timezone name (e.g. Asia/Tokyo) | string | `UTC` | no | +| tracing_mode | X-Ray tracing mode (see: https://docs.aws.amazon.com/lambda/latest/dg/API_TracingConfig.html ) | string | `PassThrough` | no | diff --git a/main.tf b/main.tf index 852eea5..541651f 100644 --- a/main.tf +++ b/main.tf @@ -7,32 +7,32 @@ data "external" "package" { } data "aws_kinesis_stream" "source" { - name = "${var.source_stream_name}" + name = var.source_stream_name } data "aws_kinesis_stream" "target" { - name = "${var.target_stream_name}" + name = var.target_stream_name } data "aws_s3_bucket" "failed_log_s3_bucket" { - bucket = "${var.failed_log_s3_bucket}" + bucket = var.failed_log_s3_bucket } resource "aws_cloudwatch_log_group" "logs" { name = "/aws/lambda/${var.name}" - retention_in_days = "${var.log_retention_in_days}" + retention_in_days = var.log_retention_in_days } resource "aws_lambda_function" "function" { - function_name = "${var.name}" - handler = "${var.handler}" - role = "${module.iam.arn}" - runtime = "${var.runtime}" - memory_size = "${var.memory}" - timeout = "${var.timeout}" + function_name = var.name + handler = var.handler + role = module.iam.arn + runtime = var.runtime + memory_size = var.memory + timeout = var.timeout - filename = "${local.package_filename}" + filename = local.package_filename # Below is a very dirty hack to force base64sha256 to wait until # package download in data.external.package finishes. @@ -40,39 +40,39 @@ resource "aws_lambda_function" "function" { # WARNING: explicit depends_on from this resource to data.external.package # does not help - source_code_hash = "${base64sha256(file("${jsonencode(data.external.package.result) == "{}" ? local.package_filename : ""}"))}" + source_code_hash = filebase64sha256( + jsonencode(data.external.package.result) == "{}" ? local.package_filename : "", + ) tracing_config { - mode = "${var.tracing_mode}" + mode = var.tracing_mode } environment { - variables { - TZ = "${var.timezone}" - - LOG_ID_FIELD = "${var.log_id_field}" - LOG_TYPE_FIELD = "${var.log_type_field}" - LOG_TYPE_UNKNOWN_PREFIX = "${var.log_type_unknown_prefix}" - LOG_TIMESTAMP_FIELD = "${var.log_timestamp_field}" - LOG_TYPE_WHITELIST = "${join(",", var.log_type_field_whitelist)}" - - TARGET_STREAM_NAME = "${data.aws_kinesis_stream.target.name}" - FAILED_LOG_S3_BUCKET = "${var.failed_log_s3_bucket}" - FAILED_LOG_S3_PREFIX = "${var.failed_log_s3_prefix}" + variables = { + TZ = var.timezone + LOG_ID_FIELD = var.log_id_field + LOG_TYPE_FIELD = var.log_type_field + LOG_TYPE_UNKNOWN_PREFIX = var.log_type_unknown_prefix + LOG_TIMESTAMP_FIELD = var.log_timestamp_field + LOG_TYPE_WHITELIST = join(",", var.log_type_field_whitelist) + TARGET_STREAM_NAME = data.aws_kinesis_stream.target.name + FAILED_LOG_S3_BUCKET = var.failed_log_s3_bucket + FAILED_LOG_S3_PREFIX = var.failed_log_s3_prefix } } - tags = "${var.tags}" + tags = var.tags } resource "aws_lambda_event_source_mapping" "kinesis_mapping" { - batch_size = "${var.batch_size}" - event_source_arn = "${data.aws_kinesis_stream.source.arn}" + batch_size = var.batch_size + event_source_arn = data.aws_kinesis_stream.source.arn enabled = true - function_name = "${aws_lambda_function.function.arn}" - starting_position = "${var.starting_position}" + function_name = aws_lambda_function.function.arn + starting_position = var.starting_position } resource "aws_iam_role_policy_attachment" "xray_access" { policy_arn = "arn:aws:iam::aws:policy/AWSXrayWriteOnlyAccess" - role = "${module.iam.name}" + role = module.iam.name } module "iam" { @@ -80,7 +80,7 @@ module "iam" { version = "1.0.1" type = "lambda" - name = "${var.name}" + name = var.name policy_json = <