Skip to content

Commit

Permalink
Prevents persistent diff with option to deploy lambda from s3 package (
Browse files Browse the repository at this point in the history
…#83)

* Prevents persistent diff with option to deploy lambda from s3 package

Fixes #82

* Adds example for deploying lambda from s3 package

* Updates readme using terraform-docs

* Formats code using terraform fmt -recursive

* Includes version in key as change detection mechanism for lambda

* Adds a test to exercise the s3-hosted autoscaler binary

* Removes example variables in response to feedback

* Updates module version to 2.4.0

* Sets region in test config
  • Loading branch information
lorengordon authored Mar 15, 2024
1 parent 4a701c3 commit 39f3809
Show file tree
Hide file tree
Showing 7 changed files with 132 additions and 14 deletions.
11 changes: 10 additions & 1 deletion .spacelift/config.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
version: 1
module_version: 2.3.5
module_version: 2.4.0
tests:
- name: AMD64-based workerpool
project_root: examples/amd64
Expand All @@ -24,3 +24,12 @@ tests:
TF_VAR_spacelift_api_key_secret: "EXAMPLEf7anuofh4b6a4e43aplqt49099606de2mzbq4391tj1d3dc9872q23z8fvctu4kh"
TF_VAR_spacelift_api_key_endpoint: "https://example.app.spacelift.io"
TF_VAR_worker_pool_id: "01HBD5QZ932J8CEH5GTBM1QMAS"

- name: S3-hosted autoscaler
project_root: examples/autoscaler-s3-package
environment:
AWS_DEFAULT_REGION: "eu-west-1"
TF_VAR_spacelift_api_key_id: "EXAMPLE0VOYU49U485BMZZVAWXU59VOW2"
TF_VAR_spacelift_api_key_secret: "EXAMPLEf7anuofh4b6a4e43aplqt49099606de2mzbq4391tj1d3dc9872q23z8fvctu4kh"
TF_VAR_spacelift_api_key_endpoint: "https://example.app.spacelift.io"
TF_VAR_worker_pool_id: "01HBD5QZ932J8CEH5GTBM1QMAS"
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ terraform {
}
module "my_workerpool" {
source = "github.com/spacelift-io/terraform-aws-spacelift-workerpool-on-ec2?ref=v2.3.5"
source = "github.com/spacelift-io/terraform-aws-spacelift-workerpool-on-ec2?ref=v2.4.0"
configuration = <<-EOT
export SPACELIFT_TOKEN="${var.worker_pool_config}"
Expand Down Expand Up @@ -111,6 +111,7 @@ $ make docs
| <a name="input_additional_tags"></a> [additional\_tags](#input\_additional\_tags) | Additional tags to set on the resources | `map(string)` | `{}` | no |
| <a name="input_ami_id"></a> [ami\_id](#input\_ami\_id) | ID of the Spacelift AMI. If left empty, the latest Spacelift AMI will be used. | `string` | `""` | no |
| <a name="input_autoscaler_architecture"></a> [autoscaler\_architecture](#input\_autoscaler\_architecture) | Instruction set architecture of the autoscaler to use | `string` | `"amd64"` | no |
| <a name="input_autoscaler_s3_package"></a> [autoscaler\_s3\_package](#input\_autoscaler\_s3\_package) | Configuration to retrieve autoscaler lambda package from s3 bucket | <pre>object({<br> bucket = string<br> key = string<br> object_version = optional(string)<br> })</pre> | `null` | no |
| <a name="input_autoscaler_version"></a> [autoscaler\_version](#input\_autoscaler\_version) | Version of the autoscaler to deploy | `string` | `"v0.3.0"` | no |
| <a name="input_autoscaling_max_create"></a> [autoscaling\_max\_create](#input\_autoscaling\_max\_create) | The maximum number of instances the utility is allowed to create in a single run | `number` | `1` | no |
| <a name="input_autoscaling_max_terminate"></a> [autoscaling\_max\_terminate](#input\_autoscaling\_max\_terminate) | The maximum number of instances the utility is allowed to terminate in a single run | `number` | `1` | no |
Expand Down
31 changes: 19 additions & 12 deletions autoscaler.tf
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
locals {
function_name = "${local.base_name}-ec2-autoscaler"
function_name = "${local.base_name}-ec2-autoscaler"
use_s3_package = var.autoscaler_s3_package != null
}

resource "aws_ssm_parameter" "spacelift_api_key_secret" {
Expand All @@ -10,7 +11,7 @@ resource "aws_ssm_parameter" "spacelift_api_key_secret" {
}

resource "null_resource" "download" {
count = var.enable_autoscaling ? 1 : 0
count = var.enable_autoscaling && !local.use_s3_package ? 1 : 0
triggers = {
# Always re-download the archive file
now = timestamp()
Expand All @@ -21,23 +22,29 @@ resource "null_resource" "download" {
}

data "archive_file" "binary" {
count = var.enable_autoscaling ? 1 : 0
count = var.enable_autoscaling && !local.use_s3_package ? 1 : 0
type = "zip"
source_file = "lambda/bootstrap"
output_path = "ec2-workerpool-autoscaler_${var.autoscaler_version}.zip"
depends_on = [null_resource.download]
}

resource "aws_lambda_function" "autoscaler" {
count = var.enable_autoscaling ? 1 : 0
filename = data.archive_file.binary[count.index].output_path
source_code_hash = data.archive_file.binary[count.index].output_base64sha256
function_name = local.function_name
role = aws_iam_role.autoscaler[count.index].arn
handler = "bootstrap"
runtime = "provided.al2"
architectures = [var.autoscaler_architecture == "amd64" ? "x86_64" : var.autoscaler_architecture]
timeout = var.autoscaling_timeout
count = var.enable_autoscaling ? 1 : 0

filename = !local.use_s3_package ? data.archive_file.binary[count.index].output_path : null
source_code_hash = !local.use_s3_package ? data.archive_file.binary[count.index].output_base64sha256 : null

s3_bucket = local.use_s3_package ? var.autoscaler_s3_package.bucket : null
s3_key = local.use_s3_package ? var.autoscaler_s3_package.key : null
s3_object_version = local.use_s3_package ? var.autoscaler_s3_package.object_version : null

function_name = local.function_name
role = aws_iam_role.autoscaler[count.index].arn
handler = "bootstrap"
runtime = "provided.al2"
architectures = [var.autoscaler_architecture == "amd64" ? "x86_64" : var.autoscaler_architecture]
timeout = var.autoscaling_timeout

environment {
variables = {
Expand Down
38 changes: 38 additions & 0 deletions examples/autoscaler-s3-package/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
data "aws_vpc" "this" {
default = true
}

data "aws_security_group" "this" {
name = "default"
vpc_id = data.aws_vpc.this.id
}

data "aws_subnets" "this" {
filter {
name = "vpc-id"
values = [data.aws_vpc.this.id]
}
}

#### Spacelift worker pool ####

module "this" {
source = "../../"

configuration = <<-EOT
export SPACELIFT_TOKEN="<token-here>"
export SPACELIFT_POOL_PRIVATE_KEY="<private-key-here>"
EOT
security_groups = [data.aws_security_group.this.id]
spacelift_api_key_endpoint = var.spacelift_api_key_endpoint
spacelift_api_key_id = var.spacelift_api_key_id
spacelift_api_key_secret = var.spacelift_api_key_secret
vpc_subnets = data.aws_subnets.this.ids
worker_pool_id = var.worker_pool_id

enable_autoscaling = true
autoscaler_s3_package = {
bucket = aws_s3_bucket.autoscaler_binary.id
key = aws_s3_object.autoscaler_binary.id
}
}
20 changes: 20 additions & 0 deletions examples/autoscaler-s3-package/s3_package.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# This is just a toy example config that pulls the autoscaler binary from GitHub
# and hosts it in S3. This setup allows a simple plan/apply to work directly on
# the example. In actual usage, it would not be recommended to abuse the http data
# source and aws_s3_object resource in this manner. Instead, use an external process
# to host the binary in your own S3 bucket.

resource "aws_s3_bucket" "autoscaler_binary" {
bucket_prefix = "spacelift-autoscaler-example-"
}

data "http" "autoscaler_binary" {
url = "https://github.com/spacelift-io/ec2-workerpool-autoscaler/releases/download/${var.autoscaler_version}/ec2-workerpool-autoscaler_linux_${var.autoscaler_architecture}.zip"
}

resource "aws_s3_object" "autoscaler_binary" {
key = "releases/download/${var.autoscaler_version}/ec2-workerpool-autoscaler_linux_${var.autoscaler_architecture}.zip"
bucket = aws_s3_bucket.autoscaler_binary.id
content_base64 = data.http.autoscaler_binary.response_body_base64
content_type = "application/octet-stream"
}
33 changes: 33 additions & 0 deletions examples/autoscaler-s3-package/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
variable "spacelift_api_key_id" {
type = string
description = "ID of the Spacelift API key to use"
}

variable "spacelift_api_key_secret" {
type = string
sensitive = true
description = "Secret corresponding to the Spacelift API key to use"
}

variable "spacelift_api_key_endpoint" {
type = string
description = "Full URL of the Spacelift API endpoint to use, eg. https://demo.app.spacelift.io"
}

variable "worker_pool_id" {
type = string
description = "ID (ULID) of the the worker pool."
}

variable "autoscaler_version" {
description = "Version of the autoscaler to deploy"
type = string
default = "v0.3.0"
nullable = false
}

variable "autoscaler_architecture" {
type = string
description = "Instruction set architecture of the autoscaler to use"
default = "amd64"
}
10 changes: 10 additions & 0 deletions variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -206,3 +206,13 @@ variable "autoscaling_timeout" {
description = "Timeout (in seconds) for a single autoscaling run. The more instances you have, the higher this should be."
default = 30
}

variable "autoscaler_s3_package" {
type = object({
bucket = string
key = string
object_version = optional(string)
})
description = "Configuration to retrieve autoscaler lambda package from s3 bucket"
default = null
}

0 comments on commit 39f3809

Please sign in to comment.