diff --git a/README.md b/README.md index 3ed1d52..56af852 100644 --- a/README.md +++ b/README.md @@ -85,7 +85,10 @@ function name unique per region, for example by setting | publish | Whether to publish creation/change as new Lambda Function Version | string | `"false"` | no | | reserved\_concurrent\_executions | The amount of reserved concurrent executions for this Lambda function | string | `"0"` | no | | runtime | The runtime environment for the Lambda function | string | n/a | yes | -| source\_path | The source file or directory containing your Lambda source code | string | n/a | yes | +| s3\_bucket | The S3 bucket location containing the function's deployment package. Required when `source_from_s3` = `true`. This bucket must reside in the same AWS region where you are creating the Lambda function. | string | - | no | +| s3\_key | The S3 key of an object containing the function's deployment package. Required when `source_from_s3` = `true` | string | - | no | +| source\_from\_s3 | Set this to true if fetching the Lambda source code from S3. | string | `false` | no | +| source\_path | The source file or directory containing your Lambda source code. Ignored when `source_from_s3` = `true` | string | `` | no | | tags | A mapping of tags | map | `` | no | | timeout | The amount of time your Lambda function had to run in seconds | string | `"10"` | no | | vpc\_config | VPC configuration for the Lambda function | map | `` | no | diff --git a/archive.tf b/archive.tf index 76596b3..10ff18e 100644 --- a/archive.tf +++ b/archive.tf @@ -5,7 +5,8 @@ locals { # Generates a filename for the zip archive based on the contents of the files # in source_path. The filename will change when the source code changes. data "external" "archive" { - program = ["python", "${path.module}/hash.py"] + count = "${var.source_from_s3 ? 0 : 1}" + program = ["${path.module}/hash.py"] query = { build_command = "${var.build_command}" @@ -18,6 +19,7 @@ data "external" "archive" { # Build the zip archive whenever the filename changes. resource "null_resource" "archive" { + count = "${var.source_from_s3 ? 0 : 1}" triggers { filename = "${lookup(data.external.archive.result, "filename")}" } @@ -34,7 +36,8 @@ resource "null_resource" "archive" { # deletes the Lambda function. If the file is rebuilt here, the build # output is unfortunately invisible. data "external" "built" { - program = ["python", "${path.module}/built.py"] + count = "${var.source_from_s3 ? 0 : 1}" + program = ["${path.module}/built.py"] query = { build_command = "${lookup(data.external.archive.result, "build_command")}" diff --git a/lambda.tf b/lambda.tf index 94591bc..546f6bb 100644 --- a/lambda.tf +++ b/lambda.tf @@ -1,5 +1,5 @@ resource "aws_lambda_function" "lambda" { - count = "${! var.attach_vpc_config && ! var.attach_dead_letter_config ? 1 : 0}" + count = "${! var.attach_vpc_config && ! var.attach_dead_letter_config && ! var.source_from_s3 ? 1 : 0}" # ---------------------------------------------------------------------------- # IMPORTANT: @@ -33,13 +33,40 @@ resource "aws_lambda_function" "lambda" { environment = ["${slice( list(var.environment), 0, length(var.environment) == 0 ? 0 : 1 )}"] } +resource "aws_lambda_function" "lambda_s3" { + count = "${var.source_from_s3 && ! var.attach_vpc_config && ! var.attach_dead_letter_config ? 1 : 0}" + + function_name = "${var.function_name}" + description = "${var.description}" + role = "${aws_iam_role.lambda.arn}" + handler = "${var.handler}" + memory_size = "${var.memory_size}" + reserved_concurrent_executions = "${var.reserved_concurrent_executions}" + runtime = "${var.runtime}" + timeout = "${var.timeout}" + tags = "${var.tags}" + + s3_bucket = "${var.s3_bucket}" + s3_key = "${var.s3_key}" + + # The aws_lambda_function resource has a schema for the environment + # variable, where the only acceptable values are: + # a. Undefined + # b. An empty list + # c. A list containing 1 element: a map with a specific schema + # Use slice to get option "b" or "c" depending on whether a non-empty + # value was passed into this module. + + environment = ["${slice( list(var.environment), 0, length(var.environment) == 0 ? 0 : 1 )}"] +} + # The vpc_config and dead_letter_config variables are lists of maps which, # due to a bug or missing feature of Terraform, do not work with computed # values. So here is a copy and paste of of the above resource for every # combination of these variables. resource "aws_lambda_function" "lambda_with_dl" { - count = "${var.attach_dead_letter_config && ! var.attach_vpc_config ? 1 : 0}" + count = "${var.attach_dead_letter_config && ! var.attach_vpc_config && ! var.source_from_s3 ? 1 : 0}" dead_letter_config { target_arn = "${var.dead_letter_config["target_arn"]}" @@ -66,7 +93,7 @@ resource "aws_lambda_function" "lambda_with_dl" { } resource "aws_lambda_function" "lambda_with_vpc" { - count = "${var.attach_vpc_config && ! var.attach_dead_letter_config ? 1 : 0}" + count = "${var.attach_vpc_config && ! var.attach_dead_letter_config && ! var.source_from_s3 ? 1 : 0}" vpc_config { security_group_ids = ["${var.vpc_config["security_group_ids"]}"] @@ -94,7 +121,7 @@ resource "aws_lambda_function" "lambda_with_vpc" { } resource "aws_lambda_function" "lambda_with_dl_and_vpc" { - count = "${var.attach_dead_letter_config && var.attach_vpc_config ? 1 : 0}" + count = "${var.attach_dead_letter_config && var.attach_vpc_config && ! var.source_from_s3 ? 1 : 0}" dead_letter_config { target_arn = "${var.dead_letter_config["target_arn"]}" diff --git a/outputs.tf b/outputs.tf index 7602f3e..1fe1d1f 100644 --- a/outputs.tf +++ b/outputs.tf @@ -1,11 +1,11 @@ output "function_arn" { description = "The ARN of the Lambda function" - value = "${element(concat(aws_lambda_function.lambda.*.arn, aws_lambda_function.lambda_with_dl.*.arn, aws_lambda_function.lambda_with_vpc.*.arn, aws_lambda_function.lambda_with_dl_and_vpc.*.arn), 0)}" + value = "${element(concat(aws_lambda_function.lambda.*.arn, aws_lambda_function.lambda_s3.*.arn, aws_lambda_function.lambda_with_dl.*.arn, aws_lambda_function.lambda_with_vpc.*.arn, aws_lambda_function.lambda_with_dl_and_vpc.*.arn), 0)}" } output "function_name" { description = "The name of the Lambda function" - value = "${element(concat(aws_lambda_function.lambda.*.function_name, aws_lambda_function.lambda_with_dl.*.function_name, aws_lambda_function.lambda_with_vpc.*.function_name, aws_lambda_function.lambda_with_dl_and_vpc.*.function_name), 0)}" + value = "${element(concat(aws_lambda_function.lambda.*.function_name, aws_lambda_function.lambda_s3.*.arn, aws_lambda_function.lambda_with_dl.*.function_name, aws_lambda_function.lambda_with_vpc.*.function_name, aws_lambda_function.lambda_with_dl_and_vpc.*.function_name), 0)}" } output "function_qualified_arn" { diff --git a/tests/s3-bucket-key/lambda.py b/tests/s3-bucket-key/lambda.py new file mode 100644 index 0000000..7a16f44 --- /dev/null +++ b/tests/s3-bucket-key/lambda.py @@ -0,0 +1,2 @@ +def lambda_handler(event, context): + return 'test passed' diff --git a/tests/s3-bucket-key/main.tf b/tests/s3-bucket-key/main.tf new file mode 100644 index 0000000..efa1560 --- /dev/null +++ b/tests/s3-bucket-key/main.tf @@ -0,0 +1,46 @@ +terraform { + backend "local" { + path = "terraform.tfstate" + } +} + +provider "aws" { + region = "eu-west-1" +} + +resource "random_id" "name" { + byte_length = 6 + prefix = "terraform-aws-lambda-s3-" +} + +resource "aws_s3_bucket" "b" { + bucket = "${random_id.name.hex}" + acl = "private" +} +data "archive_file" "l" { + type = "zip" + source_file = "${path.module}/lambda.py" + output_path = "${path.module}/lambda.zip" +} + +resource "aws_s3_bucket_object" "o" { + bucket = "${aws_s3_bucket.b.id}" + key = "lambda.zip" + source = "${path.module}/lambda.zip" +} + +module "lambda" { + source = "../../" + + function_name = "terraform-aws-lambda-test-s3-bucket-key" + description = "Test S3 bucket and key in terraform-aws-lambda" + handler = "lambda.lambda_handler" + memory_size = 128 + reserved_concurrent_executions = 3 + runtime = "python3.6" + timeout = 30 + + source_from_s3 = true + s3_bucket = "${aws_s3_bucket.b.id}" + s3_key = "${aws_s3_bucket_object.o.id}" +} diff --git a/variables.tf b/variables.tf index c22af6c..0311456 100644 --- a/variables.tf +++ b/variables.tf @@ -32,8 +32,9 @@ variable "timeout" { } variable "source_path" { - description = "The source file or directory containing your Lambda source code" + description = "The source file or directory containing your Lambda source code. Ignored when `source_from_s3` = `true`" type = "string" + default = "" } variable "build_command" { @@ -84,6 +85,24 @@ variable "attach_vpc_config" { default = false } +variable "source_from_s3" { + description = "Set this to true if fetching the Lambda source code from S3." + type = "string" + default = false +} + +variable "s3_bucket" { + description = "The S3 bucket location containing the function's deployment package. Required when `source_from_s3` = `true`. This bucket must reside in the same AWS region where you are creating the Lambda function." + type = "string" + default = "" +} + +variable "s3_key" { + description = "The S3 key of an object containing the function's deployment package. Required when `source_from_s3` = `true`" + type = "string" + default = "" +} + variable "tags" { description = "A mapping of tags" type = "map"