From 11abc46eb71a758796f1f41a4e3a12f32e0cd2a8 Mon Sep 17 00:00:00 2001 From: Fabio Santos Date: Tue, 19 Sep 2023 09:53:37 +0100 Subject: [PATCH] feat: Add Waf Source --- README.md | 41 +++++++++++++++--- examples/s3/waf-to-s3/README.md | 66 +++++++++++++++++++++++++++++ examples/s3/waf-to-s3/main.tf | 67 ++++++++++++++++++++++++++++++ examples/s3/waf-to-s3/outputs.tf | 24 +++++++++++ examples/s3/waf-to-s3/variables.tf | 5 +++ examples/s3/waf-to-s3/versions.tf | 14 +++++++ iam.tf | 2 +- locals.tf | 5 ++- main.tf | 6 +-- variables.tf | 12 +++++- 10 files changed, 231 insertions(+), 11 deletions(-) create mode 100644 examples/s3/waf-to-s3/README.md create mode 100644 examples/s3/waf-to-s3/main.tf create mode 100644 examples/s3/waf-to-s3/outputs.tf create mode 100644 examples/s3/waf-to-s3/variables.tf create mode 100644 examples/s3/waf-to-s3/versions.tf diff --git a/README.md b/README.md index 29032dd..2eca02c 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ Supports all destinations and all Kinesis Firehose Features. * [Kinesis Data Stream](#kinesis-data-stream) * [Kinesis Data Stream Encrypted](#kinesis-data-stream-encrypted) * [Direct Put](#direct-put) + * [WAF](waf) * [Destinations](#destinations) * [S3](#s3) * [Redshift](#redshift) @@ -46,6 +47,8 @@ Supports all destinations and all Kinesis Firehose Features. * [Resources](#resources) * [Inputs](#inputs) * [Outputs](#outputs) +* [Deprecation](#deprecation) +* [Upgrade](#upgrade) * [License](#license) ## Module versioning rule @@ -60,6 +63,7 @@ Supports all destinations and all Kinesis Firehose Features. - Sources - Kinesis Data Stream - Direct Put + - WAF - Destinations - S3 - Data Format Conversion @@ -96,14 +100,14 @@ Supports all destinations and all Kinesis Firehose Features. #### Kinesis Data Stream -**To Enabled it:** `enable_kinesis_source = true` +**To Enabled it:** `input_source = "kinesis"`. The use of variable `enable_kinesis_source` is deprecated and will be removed on next Major Release. ```hcl module "firehose" { source = "fdmsantos/kinesis-firehose/aws" version = "x.x.x" name = "firehose-delivery-stream" - enable_kinesis_source = true + input_source = "kinesis" kinesis_source_stream_arn = "" destination = "s3" # or destination = "extended_s3" s3_bucket_arn = "" @@ -114,12 +118,27 @@ module "firehose" { If Kinesis Data Stream is encrypted, it's necessary pass this info to module . -**To Enabled It:** `kinesis_source_is_encrypted = true` +**To Enabled It:** `input_source = "kinesis"`. The use of variable `enable_kinesis_source` is deprecated and will be removed on next Major Release. **KMS Key:** use `kinesis_source_kms_arn` variable to indicate the KMS Key to module add permissions to policy to decrypt the Kinesis Data Stream. #### Direct Put +**To Enabled it:** `input_source = "waf"`. + +```hcl +module "firehose" { + source = "fdmsantos/kinesis-firehose/aws" + version = "x.x.x" + name = "firehose-delivery-stream" + input_source = "waf" + destination = "s3" # or destination = "extended_s3" + s3_bucket_arn = "" +} +``` + +#### WAF + ```hcl module "firehose" { source = "fdmsantos/kinesis-firehose/aws" @@ -721,7 +740,8 @@ The destination variable configured in module is mapped to firehose valid destin ## Examples - [Direct Put](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/s3/direct-put-to-s3) - Creates an encrypted Kinesis firehose stream with Direct Put as source and S3 as destination. -- [Kinesis Data Stream Source](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/s3/kinesis-to-s3-basic) - Creates a basic Kinesis Firehose stream with Kinesis data stream as source and s3 as destination . +- [Kinesis Data Stream Source](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/s3/kinesis-to-s3-basic) - Creates a basic Kinesis Firehose stream with Kinesis data stream as source and s3 as destination. +- [WAF Source](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/s3/waf-to-s3) - Creates a Kinesis Firehose Stream with AWS Web WAF as source and S3 as destination. - [S3 Destination Complete](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/s3/kinesis-to-s3-complete) - Creates a Kinesis Firehose Stream with all features enabled. - [Redshift](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/redshift/direct-put-to-redshift) - Creates a Kinesis Firehose Stream with redshift as destination. - [Redshift In VPC](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/redshift/redshift-in-vpc) - Creates a Kinesis Firehose Stream with redshift in VPC as destination. @@ -891,7 +911,7 @@ No modules. | [enable\_data\_format\_conversion](#input\_enable\_data\_format\_conversion) | Set it to true if you want to disable format conversion. | `bool` | `false` | no | | [enable\_destination\_log](#input\_enable\_destination\_log) | The CloudWatch Logging Options for the delivery stream | `bool` | `true` | no | | [enable\_dynamic\_partitioning](#input\_enable\_dynamic\_partitioning) | Enables or disables dynamic partitioning | `bool` | `false` | no | -| [enable\_kinesis\_source](#input\_enable\_kinesis\_source) | Set it to true to use kinesis data stream as source | `bool` | `false` | no | +| [enable\_kinesis\_source](#input\_enable\_kinesis\_source) | DEPRECATED: Use instead `input_source = "kinesis"` | `bool` | `false` | no | | [enable\_lambda\_transform](#input\_enable\_lambda\_transform) | Set it to true to enable data transformation with lambda | `bool` | `false` | no | | [enable\_s3\_backup](#input\_enable\_s3\_backup) | The Amazon S3 backup mode | `bool` | `false` | no | | [enable\_s3\_encryption](#input\_enable\_s3\_encryption) | Indicates if want use encryption in S3 bucket. | `bool` | `false` | no | @@ -906,6 +926,7 @@ No modules. | [http\_endpoint\_request\_configuration\_content\_encoding](#input\_http\_endpoint\_request\_configuration\_content\_encoding) | Kinesis Data Firehose uses the content encoding to compress the body of a request before sending the request to the destination | `string` | `"GZIP"` | no | | [http\_endpoint\_retry\_duration](#input\_http\_endpoint\_retry\_duration) | Total amount of seconds Firehose spends on retries. This duration starts after the initial attempt fails, It does not include the time periods during which Firehose waits for acknowledgment from the specified destination after each attempt | `number` | `300` | no | | [http\_endpoint\_url](#input\_http\_endpoint\_url) | The HTTP endpoint URL to which Kinesis Firehose sends your data | `string` | `null` | no | +| [input\_source](#input\_input\_source) | This is the kinesis firehose source | `string` | `"direct-put"` | no | | [kinesis\_source\_is\_encrypted](#input\_kinesis\_source\_is\_encrypted) | Indicates if Kinesis data stream source is encrypted | `bool` | `false` | no | | [kinesis\_source\_kms\_arn](#input\_kinesis\_source\_kms\_arn) | Kinesis Source KMS Key to add Firehose role to decrypt the records | `string` | `null` | no | | [kinesis\_source\_role\_arn](#input\_kinesis\_source\_role\_arn) | The ARN of the role that provides access to the source Kinesis stream | `string` | `null` | no | @@ -1010,6 +1031,16 @@ No modules. | [s3\_cross\_account\_bucket\_policy](#output\_s3\_cross\_account\_bucket\_policy) | Bucket Policy to S3 Bucket Destination when the bucket belongs to another account | +## Deprecation + +### Version >= 2.1.0 + +* variable `enable_kinesis_source` is deprecated. Use instead `input_source = "kinesis"`. + +## Upgrade + +- Version 1.x to 2.x Upgrade Guide [here](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/blob/main/UPGRADE-2.0.md) + ## License Apache 2 Licensed. See [LICENSE](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/LICENSE) for full details. diff --git a/examples/s3/waf-to-s3/README.md b/examples/s3/waf-to-s3/README.md new file mode 100644 index 0000000..69a1bf8 --- /dev/null +++ b/examples/s3/waf-to-s3/README.md @@ -0,0 +1,66 @@ +# Kinesis Firehose: Kinesis Data Source To S3 + +Basic Configuration in this directory creates kinesis firehose stream with Kinesis Data Stream as source and S3 bucket as destination with a basic configuration. + +## Usage + +To run this example you need to execute: + +```bash +$ terraform init +$ terraform plan +$ terraform apply +``` + +Note that this example may create resources which cost money. Run `terraform destroy` when you don't need these resources. + +Can use the following command to send records to Kinesis Data Stream. + +```shell +aws kinesis put-record \ + --stream-name $(terraform output -json | jq -r .kinesis_data_stream_name.value) \ + --cli-binary-format raw-in-base64-out \ + --data '{"user_id":"user1", "score": 100}' \ + --partition-key 1 +``` + + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | >= 0.13.1 | +| [aws](#requirement\_aws) | ~> 5.0 | +| [random](#requirement\_random) | ~> 3.0 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | ~> 5.0 | +| [random](#provider\_random) | ~> 3.0 | + +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [firehose](#module\_firehose) | ../../../ | n/a | +| [waf](#module\_waf) | cloudposse/waf/aws | 1.2.0 | + +## Resources + +| Name | Type | +|------|------| +| [aws_s3_bucket.s3](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | +| [random_pet.this](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/pet) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [name\_prefix](#input\_name\_prefix) | Name prefix to use in resources | `string` | `"waf-to-s3"` | no | + +## Outputs + +No outputs. + diff --git a/examples/s3/waf-to-s3/main.tf b/examples/s3/waf-to-s3/main.tf new file mode 100644 index 0000000..7e1aff6 --- /dev/null +++ b/examples/s3/waf-to-s3/main.tf @@ -0,0 +1,67 @@ +resource "random_pet" "this" { + length = 2 +} + +resource "aws_s3_bucket" "s3" { + bucket = "${var.name_prefix}-destination-bucket-${random_pet.this.id}" + force_destroy = true +} + +module "waf" { + source = "cloudposse/waf/aws" + version = "1.2.0" + + log_destination_configs = [module.firehose.kinesis_firehose_arn] + + visibility_config = { + cloudwatch_metrics_enabled = false + metric_name = "rules-example-metric" + sampled_requests_enabled = false + } + + managed_rule_group_statement_rules = [ + { + name = "AWS-AWSManagedRulesAdminProtectionRuleSet" + priority = 1 + + statement = { + name = "AWSManagedRulesAdminProtectionRuleSet" + vendor_name = "AWS" + } + + visibility_config = { + cloudwatch_metrics_enabled = true + sampled_requests_enabled = true + metric_name = "AWS-AWSManagedRulesAdminProtectionRuleSet" + } + } + ] + + context = { + enabled = true + namespace = "test" + tenant = null + environment = null + stage = null + name = null + delimiter = null + attributes = [] + tags = {} + additional_tag_map = {} + regex_replace_chars = null + label_order = [] + id_length_limit = null + label_key_case = null + label_value_case = null + descriptor_formats = {} + labels_as_tags = ["unset"] + } +} + +module "firehose" { + source = "../../../" + name = "${var.name_prefix}-delivery-stream" + input_source = "waf" + destination = "s3" + s3_bucket_arn = aws_s3_bucket.s3.arn +} diff --git a/examples/s3/waf-to-s3/outputs.tf b/examples/s3/waf-to-s3/outputs.tf new file mode 100644 index 0000000..d086fb7 --- /dev/null +++ b/examples/s3/waf-to-s3/outputs.tf @@ -0,0 +1,24 @@ +#output "kinesis_firehose_arn" { +# description = "The ARN of the Kinesis Firehose Stream" +# value = module.firehose.kinesis_firehose_arn +#} +# +#output "kinesis_data_stream_name" { +# description = "The name of the Kinesis Firehose Stream" +# value = module.firehose.kinesis_firehose_name +#} +# +#output "kinesis_firehose_destination_id" { +# description = "The Destination id of the Kinesis Firehose Stream" +# value = module.firehose.kinesis_firehose_destination_id +#} +# +#output "kinesis_firehose_version_id" { +# description = "The Version id of the Kinesis Firehose Stream" +# value = module.firehose.kinesis_firehose_version_id +#} +# +#output "kinesis_firehose_role_arn" { +# description = "The ARN of the IAM role created for Kinesis Firehose Stream" +# value = module.firehose.kinesis_firehose_role_arn +#} diff --git a/examples/s3/waf-to-s3/variables.tf b/examples/s3/waf-to-s3/variables.tf new file mode 100644 index 0000000..8e619cc --- /dev/null +++ b/examples/s3/waf-to-s3/variables.tf @@ -0,0 +1,5 @@ +variable "name_prefix" { + description = "Name prefix to use in resources" + type = string + default = "waf-to-s3" +} diff --git a/examples/s3/waf-to-s3/versions.tf b/examples/s3/waf-to-s3/versions.tf new file mode 100644 index 0000000..ab58b32 --- /dev/null +++ b/examples/s3/waf-to-s3/versions.tf @@ -0,0 +1,14 @@ +terraform { + required_version = ">= 0.13.1" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 5.0" + } + random = { + source = "hashicorp/random" + version = "~> 3.0" + } + } +} diff --git a/iam.tf b/iam.tf index 75f222b..a3b8a40 100644 --- a/iam.tf +++ b/iam.tf @@ -3,7 +3,7 @@ locals { application_role_name = var.create_application_role ? coalesce(var.application_role_name, "${var.name}-application-role", "*") : null create_application_role_policy = var.create && var.create_application_role_policy add_backup_policies = local.enable_s3_backup && var.s3_backup_use_existing_role - add_kinesis_source_policy = var.create && var.create_role && var.enable_kinesis_source && var.kinesis_source_use_existing_role + add_kinesis_source_policy = var.create && var.create_role && local.is_kinesis_source && var.kinesis_source_use_existing_role add_lambda_policy = var.create && var.create_role && var.enable_lambda_transform add_s3_kms_policy = var.create && var.create_role && ((local.add_backup_policies && var.s3_backup_enable_encryption) || var.enable_s3_encryption) add_glue_policy = var.create && var.create_role && var.enable_data_format_conversion && var.data_format_conversion_glue_use_existing_role diff --git a/locals.tf b/locals.tf index 8c543fc..ca28211 100644 --- a/locals.tf +++ b/locals.tf @@ -3,6 +3,9 @@ locals { cw_log_group_name = "/aws/kinesisfirehose/${var.name}" cw_log_delivery_stream_name = "DestinationDelivery" cw_log_backup_stream_name = "BackupDelivery" + source = var.enable_kinesis_source ? "kinesis" : var.input_source # TODO: This should be removed when delete enable_kinesis_source variable (Next Major Version) + is_kinesis_source = local.source == "kinesis" ? true : false + is_waf_source = local.source == "waf" ? true : false destinations = { s3 : "extended_s3", extended_s3 : "extended_s3", @@ -138,7 +141,7 @@ locals { s3_backup_mode = local.use_backup_vars_in_s3_configuration ? local.backup_modes[local.destination][var.s3_backup_mode] : null # Kinesis source Stream - kinesis_source_stream_role = (var.enable_kinesis_source ? ( + kinesis_source_stream_role = (local.is_kinesis_source ? ( var.kinesis_source_use_existing_role ? local.firehose_role_arn : var.kinesis_source_role_arn ) : null) diff --git a/main.tf b/main.tf index d9cd4ea..3cc7179 100644 --- a/main.tf +++ b/main.tf @@ -9,11 +9,11 @@ data "aws_subnet" "elasticsearch" { resource "aws_kinesis_firehose_delivery_stream" "this" { count = var.create ? 1 : 0 - name = var.name + name = local.is_waf_source ? "aws-waf-logs-${var.name}" : var.name destination = local.destination dynamic "kinesis_source_configuration" { - for_each = var.enable_kinesis_source ? [1] : [] + for_each = local.is_kinesis_source ? [1] : [] content { kinesis_stream_arn = var.kinesis_source_stream_arn role_arn = local.kinesis_source_stream_role @@ -21,7 +21,7 @@ resource "aws_kinesis_firehose_delivery_stream" "this" { } dynamic "server_side_encryption" { - for_each = !var.enable_kinesis_source && var.enable_sse ? [1] : [] + for_each = !local.is_kinesis_source && var.enable_sse ? [1] : [] content { enabled = var.enable_sse key_arn = var.sse_kms_key_arn diff --git a/variables.tf b/variables.tf index f512267..180ac39 100644 --- a/variables.tf +++ b/variables.tf @@ -9,6 +9,16 @@ variable "name" { type = string } +variable "input_source" { + description = "This is the kinesis firehose source" + type = string + default = "direct-put" + validation { + error_message = "Please use a valid source!" + condition = contains(["direct-put", "kinesis", "waf"], var.input_source) + } +} + variable "destination" { description = "This is the destination to where the data is delivered" type = string @@ -346,7 +356,7 @@ variable "sse_kms_key_arn" { } variable "enable_kinesis_source" { - description = "Set it to true to use kinesis data stream as source" + description = "DEPRECATED: Use instead `input_source = \"kinesis\"`" type = bool default = false }