Skip to content
This repository has been archived by the owner on Jun 8, 2022. It is now read-only.

Support for lambda source from S3 #29

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,10 @@ function name unique per region, for example by setting
| publish | Whether to publish creation/change as new Lambda Function Version | string | `"false"` | no |
| reserved\_concurrent\_executions | The amount of reserved concurrent executions for this Lambda function | string | `"0"` | no |
| runtime | The runtime environment for the Lambda function | string | n/a | yes |
| source\_path | The source file or directory containing your Lambda source code | string | n/a | yes |
| s3\_bucket | The S3 bucket location containing the function's deployment package. Required when `source_from_s3` = `true`. This bucket must reside in the same AWS region where you are creating the Lambda function. | string | - | no |
| s3\_key | The S3 key of an object containing the function's deployment package. Required when `source_from_s3` = `true` | string | - | no |
| source\_from\_s3 | Set this to true if fetching the Lambda source code from S3. | string | `false` | no |
| source\_path | The source file or directory containing your Lambda source code. Ignored when `source_from_s3` = `true` | string | `` | no |
| tags | A mapping of tags | map | `<map>` | no |
| timeout | The amount of time your Lambda function had to run in seconds | string | `"10"` | no |
| vpc\_config | VPC configuration for the Lambda function | map | `<map>` | no |
Expand Down
7 changes: 5 additions & 2 deletions archive.tf
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ locals {
# Generates a filename for the zip archive based on the contents of the files
# in source_path. The filename will change when the source code changes.
data "external" "archive" {
program = ["python", "${path.module}/hash.py"]
count = "${var.source_from_s3 ? 0 : 1}"
program = ["${path.module}/hash.py"]

query = {
build_command = "${var.build_command}"
Expand All @@ -18,6 +19,7 @@ data "external" "archive" {

# Build the zip archive whenever the filename changes.
resource "null_resource" "archive" {
count = "${var.source_from_s3 ? 0 : 1}"
triggers {
filename = "${lookup(data.external.archive.result, "filename")}"
}
Expand All @@ -34,7 +36,8 @@ resource "null_resource" "archive" {
# deletes the Lambda function. If the file is rebuilt here, the build
# output is unfortunately invisible.
data "external" "built" {
program = ["python", "${path.module}/built.py"]
count = "${var.source_from_s3 ? 0 : 1}"
program = ["${path.module}/built.py"]

query = {
build_command = "${lookup(data.external.archive.result, "build_command")}"
Expand Down
35 changes: 31 additions & 4 deletions lambda.tf
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
resource "aws_lambda_function" "lambda" {
count = "${! var.attach_vpc_config && ! var.attach_dead_letter_config ? 1 : 0}"
count = "${! var.attach_vpc_config && ! var.attach_dead_letter_config && ! var.source_from_s3 ? 1 : 0}"

# ----------------------------------------------------------------------------
# IMPORTANT:
Expand Down Expand Up @@ -33,13 +33,40 @@ resource "aws_lambda_function" "lambda" {
environment = ["${slice( list(var.environment), 0, length(var.environment) == 0 ? 0 : 1 )}"]
}

resource "aws_lambda_function" "lambda_s3" {
count = "${var.source_from_s3 && ! var.attach_vpc_config && ! var.attach_dead_letter_config ? 1 : 0}"

function_name = "${var.function_name}"
description = "${var.description}"
role = "${aws_iam_role.lambda.arn}"
handler = "${var.handler}"
memory_size = "${var.memory_size}"
reserved_concurrent_executions = "${var.reserved_concurrent_executions}"
runtime = "${var.runtime}"
timeout = "${var.timeout}"
tags = "${var.tags}"

s3_bucket = "${var.s3_bucket}"
s3_key = "${var.s3_key}"

# The aws_lambda_function resource has a schema for the environment
# variable, where the only acceptable values are:
# a. Undefined
# b. An empty list
# c. A list containing 1 element: a map with a specific schema
# Use slice to get option "b" or "c" depending on whether a non-empty
# value was passed into this module.

environment = ["${slice( list(var.environment), 0, length(var.environment) == 0 ? 0 : 1 )}"]
}

# The vpc_config and dead_letter_config variables are lists of maps which,
# due to a bug or missing feature of Terraform, do not work with computed
# values. So here is a copy and paste of of the above resource for every
# combination of these variables.

resource "aws_lambda_function" "lambda_with_dl" {
count = "${var.attach_dead_letter_config && ! var.attach_vpc_config ? 1 : 0}"
count = "${var.attach_dead_letter_config && ! var.attach_vpc_config && ! var.source_from_s3 ? 1 : 0}"

dead_letter_config {
target_arn = "${var.dead_letter_config["target_arn"]}"
Expand All @@ -66,7 +93,7 @@ resource "aws_lambda_function" "lambda_with_dl" {
}

resource "aws_lambda_function" "lambda_with_vpc" {
count = "${var.attach_vpc_config && ! var.attach_dead_letter_config ? 1 : 0}"
count = "${var.attach_vpc_config && ! var.attach_dead_letter_config && ! var.source_from_s3 ? 1 : 0}"

vpc_config {
security_group_ids = ["${var.vpc_config["security_group_ids"]}"]
Expand Down Expand Up @@ -94,7 +121,7 @@ resource "aws_lambda_function" "lambda_with_vpc" {
}

resource "aws_lambda_function" "lambda_with_dl_and_vpc" {
count = "${var.attach_dead_letter_config && var.attach_vpc_config ? 1 : 0}"
count = "${var.attach_dead_letter_config && var.attach_vpc_config && ! var.source_from_s3 ? 1 : 0}"

dead_letter_config {
target_arn = "${var.dead_letter_config["target_arn"]}"
Expand Down
4 changes: 2 additions & 2 deletions outputs.tf
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
output "function_arn" {
description = "The ARN of the Lambda function"
value = "${element(concat(aws_lambda_function.lambda.*.arn, aws_lambda_function.lambda_with_dl.*.arn, aws_lambda_function.lambda_with_vpc.*.arn, aws_lambda_function.lambda_with_dl_and_vpc.*.arn), 0)}"
value = "${element(concat(aws_lambda_function.lambda.*.arn, aws_lambda_function.lambda_s3.*.arn, aws_lambda_function.lambda_with_dl.*.arn, aws_lambda_function.lambda_with_vpc.*.arn, aws_lambda_function.lambda_with_dl_and_vpc.*.arn), 0)}"
}

output "function_name" {
description = "The name of the Lambda function"
value = "${element(concat(aws_lambda_function.lambda.*.function_name, aws_lambda_function.lambda_with_dl.*.function_name, aws_lambda_function.lambda_with_vpc.*.function_name, aws_lambda_function.lambda_with_dl_and_vpc.*.function_name), 0)}"
value = "${element(concat(aws_lambda_function.lambda.*.function_name, aws_lambda_function.lambda_s3.*.arn, aws_lambda_function.lambda_with_dl.*.function_name, aws_lambda_function.lambda_with_vpc.*.function_name, aws_lambda_function.lambda_with_dl_and_vpc.*.function_name), 0)}"
}

output "function_qualified_arn" {
Expand Down
2 changes: 2 additions & 0 deletions tests/s3-bucket-key/lambda.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
def lambda_handler(event, context):
return 'test passed'
46 changes: 46 additions & 0 deletions tests/s3-bucket-key/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
terraform {
backend "local" {
path = "terraform.tfstate"
}
}

provider "aws" {
region = "eu-west-1"
}

resource "random_id" "name" {
byte_length = 6
prefix = "terraform-aws-lambda-s3-"
}

resource "aws_s3_bucket" "b" {
bucket = "${random_id.name.hex}"
acl = "private"
}
data "archive_file" "l" {
type = "zip"
source_file = "${path.module}/lambda.py"
output_path = "${path.module}/lambda.zip"
}

resource "aws_s3_bucket_object" "o" {
bucket = "${aws_s3_bucket.b.id}"
key = "lambda.zip"
source = "${path.module}/lambda.zip"
}

module "lambda" {
source = "../../"

function_name = "terraform-aws-lambda-test-s3-bucket-key"
description = "Test S3 bucket and key in terraform-aws-lambda"
handler = "lambda.lambda_handler"
memory_size = 128
reserved_concurrent_executions = 3
runtime = "python3.6"
timeout = 30

source_from_s3 = true
s3_bucket = "${aws_s3_bucket.b.id}"
s3_key = "${aws_s3_bucket_object.o.id}"
}
21 changes: 20 additions & 1 deletion variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,9 @@ variable "timeout" {
}

variable "source_path" {
description = "The source file or directory containing your Lambda source code"
description = "The source file or directory containing your Lambda source code. Ignored when `source_from_s3` = `true`"
type = "string"
default = ""
}

variable "build_command" {
Expand Down Expand Up @@ -84,6 +85,24 @@ variable "attach_vpc_config" {
default = false
}

variable "source_from_s3" {
description = "Set this to true if fetching the Lambda source code from S3."
type = "string"
default = false
}

variable "s3_bucket" {
description = "The S3 bucket location containing the function's deployment package. Required when `source_from_s3` = `true`. This bucket must reside in the same AWS region where you are creating the Lambda function."
type = "string"
default = ""
}

variable "s3_key" {
description = "The S3 key of an object containing the function's deployment package. Required when `source_from_s3` = `true`"
type = "string"
default = ""
}

variable "tags" {
description = "A mapping of tags"
type = "map"
Expand Down