Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add dry_run and fix s3 backend merging behaviour #50

Merged
merged 16 commits into from
Mar 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,10 @@ jobs:
uses: actions/checkout@v3
- name: Pull LocalStack Docker image
run: docker pull localstack/localstack &
- name: Set up Python 3.11
- name: Set up Python 3.12
uses: actions/setup-python@v2
with:
python-version: '3.11'
python-version: '3.12'
- name: Install dependencies
run: make install
- name: Run code linter
Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ pip install terraform-local
## Configurations

The following environment variables can be configured:
* `DRY_RUN`: Generate the override file without invoking Terraform
* `TF_CMD`: Terraform command to call (default: `terraform`)
* `AWS_ENDPOINT_URL`: hostname and port of the target LocalStack instance
* `LOCALSTACK_HOSTNAME`: __(Deprecated)__ host name of the target LocalStack instance
Expand All @@ -48,6 +49,7 @@ please refer to the man pages of `terraform --help`.

## Change Log

* v0.18.0: Add `DRY_RUN` and patch S3 backend entrypoints
* v0.17.1: Add `packaging` module to install requirements
* v0.17.0: Add option to use new endpoints S3 backend options
* v0.16.1: Update Setuptools to exclude tests during packaging
Expand Down
95 changes: 76 additions & 19 deletions bin/tflocal
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ if os.path.isdir(os.path.join(PARENT_FOLDER, ".venv")):
from localstack_client import config # noqa: E402
import hcl2 # noqa: E402

DRY_RUN = str(os.environ.get("DRY_RUN")).strip().lower() in ["1", "true"]
DEFAULT_REGION = "us-east-1"
DEFAULT_ACCESS_KEY = "test"
AWS_ENDPOINT_URL = os.environ.get("AWS_ENDPOINT_URL")
Expand All @@ -35,6 +36,7 @@ LOCALHOST_HOSTNAME = "localhost.localstack.cloud"
S3_HOSTNAME = os.environ.get("S3_HOSTNAME") or f"s3.{LOCALHOST_HOSTNAME}"
USE_EXEC = str(os.environ.get("USE_EXEC")).strip().lower() in ["1", "true"]
TF_CMD = os.environ.get("TF_CMD") or "terraform"
TF_PROXIED_CMDS = ("init", "plan", "apply", "destroy")
LS_PROVIDERS_FILE = os.environ.get("LS_PROVIDERS_FILE") or "localstack_providers_override.tf"
LOCALSTACK_HOSTNAME = urlparse(AWS_ENDPOINT_URL).hostname or os.environ.get("LOCALSTACK_HOSTNAME") or "localhost"
EDGE_PORT = int(urlparse(AWS_ENDPOINT_URL).port or os.environ.get("EDGE_PORT") or 4566)
Expand Down Expand Up @@ -153,12 +155,15 @@ def create_provider_config_file(provider_aliases=None):

# write temporary config file
providers_file = get_providers_file_path()
if os.path.exists(providers_file):
msg = f"Providers override file {providers_file} already exists - please delete it first"
raise Exception(msg)
write_provider_config_file(providers_file, tf_config)

return providers_file


def write_provider_config_file(providers_file, tf_config):
"""Write provider config into file"""
with open(providers_file, mode="w") as fp:
fp.write(tf_config)
return providers_file


def get_providers_file_path() -> str:
Expand Down Expand Up @@ -186,9 +191,12 @@ def determine_provider_aliases() -> list:

def generate_s3_backend_config() -> str:
"""Generate an S3 `backend {..}` block with local endpoints, if configured"""
is_tf_legacy = TF_VERSION < version.Version("1.6")
backend_config = None
tf_files = parse_tf_files()
for obj in tf_files.values():
for filename, obj in tf_files.items():
if LS_PROVIDERS_FILE == filename:
continue
tf_configs = ensure_list(obj.get("terraform", []))
for tf_config in tf_configs:
backend_config = ensure_list(tf_config.get("backend"))
Expand All @@ -199,6 +207,13 @@ def generate_s3_backend_config() -> str:
if not backend_config:
return ""

legacy_endpoint_mappings = {
"endpoint": "s3",
"iam_endpoint": "iam",
"sts_endpoint": "sts",
"dynamodb_endpoint": "dynamodb",
}

configs = {
# note: default values, updated by `backend_config` further below...
"bucket": "tf-test-state",
Expand All @@ -213,15 +228,29 @@ def generate_s3_backend_config() -> str:
"dynamodb": get_service_endpoint("dynamodb"),
},
}
# Merge in legacy endpoint configs if not existing already
if is_tf_legacy and backend_config.get("endpoints"):
print("Warning: Unsupported backend option(s) detected (`endpoints`). Please make sure you always use the corresponding options to your Terraform version.")
exit(1)
for legacy_endpoint, endpoint in legacy_endpoint_mappings.items():
if legacy_endpoint in backend_config and (not backend_config.get("endpoints") or endpoint not in backend_config["endpoints"]):
if not backend_config.get("endpoints"):
backend_config["endpoints"] = {}
backend_config["endpoints"].update({endpoint: backend_config[legacy_endpoint]})
# Add any missing default endpoints
if backend_config.get("endpoints"):
backend_config["endpoints"] = {
k: backend_config["endpoints"].get(k) or v
for k, v in configs["endpoints"].items()}
configs.update(backend_config)
get_or_create_bucket(configs["bucket"])
get_or_create_ddb_table(configs["dynamodb_table"], region=configs["region"])
if not DRY_RUN:
get_or_create_bucket(configs["bucket"])
get_or_create_ddb_table(configs["dynamodb_table"], region=configs["region"])
result = TF_S3_BACKEND_CONFIG
for key, value in configs.items():
if isinstance(value, bool):
value = str(value).lower()
elif isinstance(value, dict):
is_tf_legacy = not (TF_VERSION.major > 1 or (TF_VERSION.major == 1 and TF_VERSION.minor > 5))
if key == "endpoints" and is_tf_legacy:
value = textwrap.indent(
text=textwrap.dedent(f"""\
Expand All @@ -241,6 +270,21 @@ def generate_s3_backend_config() -> str:
return result


def check_override_file(providers_file: str) -> None:
"""Checks override file existance"""
if os.path.exists(providers_file):
msg = f"Providers override file {providers_file} already exists"
err_msg = msg + " - please delete it first, exiting..."
if DRY_RUN:
msg += ". File will be overwritten."
print(msg)
print("\tOnly 'yes' will be accepted to approve.")
if input("\tEnter a value: ") == "yes":
return
print(err_msg)
exit(1)


# ---
# AWS CLIENT UTILS
# ---
Expand Down Expand Up @@ -357,6 +401,11 @@ def get_or_create_ddb_table(table_name: str, region: str = None):
# ---
# TF UTILS
# ---
def is_override_needed(args) -> bool:
if any(map(lambda x: x in args, TF_PROXIED_CMDS)):
return True
return False


def parse_tf_files() -> dict:
"""Parse the local *.tf files and return a dict of <filename> -> <resource_dict>"""
Expand Down Expand Up @@ -432,18 +481,26 @@ def main():
print(f"Unable to determine version. See error message for details: {e}")
exit(1)

# create TF provider config file
providers = determine_provider_aliases()
config_file = create_provider_config_file(providers)
if is_override_needed(sys.argv[1:]):
check_override_file(get_providers_file_path())

# call terraform command
try:
if USE_EXEC:
run_tf_exec(cmd, env)
else:
run_tf_subprocess(cmd, env)
finally:
os.remove(config_file)
# create TF provider config file
providers = determine_provider_aliases()
config_file = create_provider_config_file(providers)
else:
config_file = None

# call terraform command if not dry-run or any of the commands
if not DRY_RUN or not is_override_needed(sys.argv[1:]):
try:
if USE_EXEC:
run_tf_exec(cmd, env)
else:
run_tf_subprocess(cmd, env)
finally:
# fall through if haven't set during dry-run
if config_file:
os.remove(config_file)


if __name__ == "__main__":
Expand Down
3 changes: 2 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[metadata]
name = terraform-local
version = 0.17.1
version = 0.18.0
url = https://github.com/localstack/terraform-local
author = LocalStack Team
author_email = [email protected]
Expand All @@ -15,6 +15,7 @@ classifiers =
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: 3.12
License :: OSI Approved :: Apache Software License
Topic :: Software Development :: Testing

Expand Down
149 changes: 145 additions & 4 deletions tests/test_apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,16 @@
import subprocess
import tempfile
import uuid
import json
from typing import Dict, Generator
from shutil import rmtree
from packaging import version


import boto3
import pytest
import hcl2


THIS_PATH = os.path.abspath(os.path.dirname(__file__))
ROOT_PATH = os.path.join(THIS_PATH, "..")
Expand Down Expand Up @@ -193,19 +199,154 @@ def test_s3_backend():
assert result["ResponseMetadata"]["HTTPStatusCode"] == 200


def test_dry_run(monkeypatch):
monkeypatch.setenv("DRY_RUN", "1")
state_bucket = "tf-state-dry-run"
state_table = "tf-state-dry-run"
bucket_name = "bucket.dry-run"
config = """
terraform {
backend "s3" {
bucket = "%s"
key = "terraform.tfstate"
dynamodb_table = "%s"
region = "us-east-2"
skip_credentials_validation = true
}
}
resource "aws_s3_bucket" "test-bucket" {
bucket = "%s"
}
""" % (state_bucket, state_table, bucket_name)
is_legacy_tf = is_legacy_tf_version(get_version())

temp_dir = deploy_tf_script(config, cleanup=False, user_input="yes")
override_file = os.path.join(temp_dir, "localstack_providers_override.tf")
assert check_override_file_exists(override_file)

assert check_override_file_content(override_file, is_legacy=is_legacy_tf)

# assert that bucket with state file exists
s3 = client("s3", region_name="us-east-2")

with pytest.raises(s3.exceptions.NoSuchBucket):
s3.list_objects(Bucket=state_bucket)

# assert that DynamoDB table with state file locks exists
dynamodb = client("dynamodb", region_name="us-east-2")
with pytest.raises(dynamodb.exceptions.ResourceNotFoundException):
dynamodb.describe_table(TableName=state_table)

# assert that S3 resource has been created
s3 = client("s3")
with pytest.raises(s3.exceptions.ClientError):
s3.head_bucket(Bucket=bucket_name)


@pytest.mark.parametrize("endpoints", [
'',
'endpoint = "http://s3-localhost.localstack.cloud:4566"',
'endpoints = { "s3": "http://s3-localhost.localstack.cloud:4566" }',
'''
endpoint = "http://localhost-s3.localstack.cloud:4566"
endpoints = { "s3": "http://s3-localhost.localstack.cloud:4566" }
'''])
def test_s3_backend_endpoints_merge(monkeypatch, endpoints: str):
monkeypatch.setenv("DRY_RUN", "1")
state_bucket = "tf-state-merge"
state_table = "tf-state-merge"
bucket_name = "bucket.merge"
config = """
terraform {
backend "s3" {
bucket = "%s"
key = "terraform.tfstate"
dynamodb_table = "%s"
region = "us-east-2"
skip_credentials_validation = true
%s
}
}
resource "aws_s3_bucket" "test-bucket" {
bucket = "%s"
}
""" % (state_bucket, state_table, endpoints, bucket_name)
is_legacy_tf = is_legacy_tf_version(get_version())
if is_legacy_tf and endpoints not in ("", 'endpoint = "http://s3-localhost.localstack.cloud:4566"'):
with pytest.raises(subprocess.CalledProcessError):
deploy_tf_script(config, user_input="yes")
else:
temp_dir = deploy_tf_script(config, cleanup=False, user_input="yes")
override_file = os.path.join(temp_dir, "localstack_providers_override.tf")
assert check_override_file_exists(override_file)
assert check_override_file_content(override_file, is_legacy=is_legacy_tf)
rmtree(temp_dir)


def check_override_file_exists(override_file):
return os.path.isfile(override_file)


def check_override_file_content(override_file, is_legacy: bool = False):
legacy_options = (
"endpoint",
"iam_endpoint",
"dynamodb_endpoint",
"sts_endpoint",
)
new_options = (
"iam",
"dynamodb",
"s3",
"sso",
"sts",
)
try:
with open(override_file, "r") as fp:
result = hcl2.load(fp)
result = result["terraform"][0]["backend"][0]["s3"]
except Exception as e:
print(f'Unable to parse "{override_file}" as HCL file: {e}')

new_options_check = "endpoints" in result and all(map(lambda x: x in result.get("endpoints"), new_options))
Copy link
Member

@whummer whummer Mar 6, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice! That's great - seems much cleaner imo (and more forward-compatible) to parse the file and explicitly check for the contents we're expecting. 👌 (as compared to the file hashing)


if is_legacy:
legacy_options_check = all(map(lambda x: x in result, legacy_options))
return not new_options_check and legacy_options_check

legacy_options_check = any(map(lambda x: x in result, legacy_options))
return new_options_check and not legacy_options_check


###
# UTIL FUNCTIONS
###

def deploy_tf_script(script: str, env_vars: Dict[str, str] = None):
with tempfile.TemporaryDirectory() as temp_dir:

def is_legacy_tf_version(tf_version, legacy_version: str = "1.6") -> bool:
"""Check if Terraform version is legacy"""
if tf_version < version.Version(legacy_version):
return True
return False


def get_version():
"""Get Terraform version"""
output = run([TFLOCAL_BIN, "version", "-json"]).decode("utf-8")
return version.parse(json.loads(output)["terraform_version"])


def deploy_tf_script(script: str, cleanup: bool = True, env_vars: Dict[str, str] = None, user_input: str = None):
with tempfile.TemporaryDirectory(delete=cleanup) as temp_dir:
with open(os.path.join(temp_dir, "test.tf"), "w") as f:
f.write(script)
kwargs = {"cwd": temp_dir}
if user_input:
kwargs.update({"input": bytes(user_input, "utf-8")})
kwargs["env"] = {**os.environ, **(env_vars or {})}
run([TFLOCAL_BIN, "init"], **kwargs)
out = run([TFLOCAL_BIN, "apply", "-auto-approve"], **kwargs)
return out
run([TFLOCAL_BIN, "apply", "-auto-approve"], **kwargs)
return temp_dir


def get_bucket_names(**kwargs: dict) -> list:
Expand Down
Loading