Skip to content

Commit a2adb28

Browse files
committed
refactor code + add tests + fix multiple terraform block for S3 backends
1 parent 3e5c4d1 commit a2adb28

File tree

3 files changed

+205
-166
lines changed

3 files changed

+205
-166
lines changed

bin/tflocal

Lines changed: 103 additions & 166 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ import textwrap
1818

1919
from packaging import version
2020
from urllib.parse import urlparse
21-
from typing import Iterable, Optional
21+
from typing import Iterable, Optional, Dict
2222

2323
PARENT_FOLDER = os.path.realpath(os.path.join(os.path.dirname(__file__), ".."))
2424
if os.path.isdir(os.path.join(PARENT_FOLDER, ".venv")):
@@ -271,30 +271,23 @@ def determine_provider_aliases() -> list:
271271

272272
def generate_s3_backend_config() -> str:
273273
"""Generate an S3 `backend {..}` block with local endpoints, if configured"""
274-
is_tf_legacy = TF_VERSION < version.Version("1.6")
275-
backend_config = None
274+
s3_backend_config = {}
276275
tf_files = parse_tf_files()
277276
for filename, obj in tf_files.items():
278277
if LS_PROVIDERS_FILE == filename:
279278
continue
280279
tf_configs = ensure_list(obj.get("terraform", []))
281280
for tf_config in tf_configs:
282-
backend_config = ensure_list(tf_config.get("backend"))
283-
if backend_config:
284-
backend_config = backend_config[0]
285-
break
286-
backend_config = backend_config and backend_config.get("s3")
287-
if not backend_config:
288-
return ""
281+
if tf_config.get("backend"):
282+
backend_config = ensure_list(tf_config.get("backend"))[0]
283+
if backend_config.get("s3"):
284+
s3_backend_config = backend_config["s3"]
285+
break
289286

290-
legacy_endpoint_mappings = {
291-
"endpoint": "s3",
292-
"iam_endpoint": "iam",
293-
"sts_endpoint": "sts",
294-
"dynamodb_endpoint": "dynamodb",
295-
}
287+
if not s3_backend_config:
288+
return ""
296289

297-
configs = {
290+
backend_default_config = {
298291
# note: default values, updated by `backend_config` further below...
299292
"bucket": "tf-test-state",
300293
"key": "terraform.tfstate",
@@ -310,6 +303,81 @@ def generate_s3_backend_config() -> str:
310303
"dynamodb": get_service_endpoint("dynamodb"),
311304
},
312305
}
306+
307+
config_options = _generate_s3_backend_config(s3_backend_config, backend_default_config)
308+
if not DRY_RUN:
309+
get_or_create_bucket(backend_default_config["bucket"])
310+
if "dynamodb_table" in backend_default_config:
311+
get_or_create_ddb_table(
312+
backend_default_config["dynamodb_table"],
313+
region=backend_default_config["region"],
314+
)
315+
316+
result = TF_S3_BACKEND_CONFIG.replace("<configs>", config_options)
317+
return result
318+
319+
320+
def generate_remote_state_config() -> str:
321+
"""
322+
Generate configuration for terraform_remote_state data sources to use LocalStack endpoints.
323+
Similar to generate_s3_backend_config but for terraform_remote_state blocks.
324+
"""
325+
326+
tf_files = parse_tf_files()
327+
328+
result = ""
329+
for filename, obj in tf_files.items():
330+
if LS_PROVIDERS_FILE == filename:
331+
continue
332+
data_blocks = ensure_list(obj.get("data", []))
333+
for data_block in data_blocks:
334+
terraform_remote_state = data_block.get("terraform_remote_state")
335+
if not terraform_remote_state:
336+
continue
337+
for data_name, data_config in terraform_remote_state.items():
338+
if data_config.get("backend") != "s3":
339+
continue
340+
# Create override for S3 remote state
341+
backend_config = data_config.get("config", {})
342+
if not backend_config:
343+
continue
344+
345+
# Set up default configs
346+
remote_state_default_config = {
347+
"bucket": "tf-test-state",
348+
"key": "terraform.tfstate",
349+
"region": get_region(),
350+
"skip_credentials_validation": True,
351+
"skip_metadata_api_check": True,
352+
"secret_key": "test",
353+
"endpoints": {
354+
"s3": get_service_endpoint("s3"),
355+
"iam": get_service_endpoint("iam"),
356+
"sso": get_service_endpoint("sso"),
357+
"sts": get_service_endpoint("sts"),
358+
},
359+
}
360+
361+
config_options = _generate_s3_backend_config(backend_config, remote_state_default_config)
362+
363+
# Create the final config
364+
remote_state_config = TF_REMOTE_STATE_CONFIG.replace(
365+
"<name>", data_name
366+
).replace("<configs>", config_options)
367+
result += remote_state_config
368+
369+
return result
370+
371+
372+
def _generate_s3_backend_config(backend_config: Dict, default_config: Dict) -> str:
373+
is_tf_legacy = TF_VERSION < version.Version("1.6")
374+
legacy_endpoint_mappings = {
375+
"endpoint": "s3",
376+
"iam_endpoint": "iam",
377+
"sts_endpoint": "sts",
378+
"dynamodb_endpoint": "dynamodb",
379+
}
380+
313381
# Merge in legacy endpoint configs if not existing already
314382
if is_tf_legacy and backend_config.get("endpoints"):
315383
print(
@@ -318,53 +386,47 @@ def generate_s3_backend_config() -> str:
318386
exit(1)
319387
for legacy_endpoint, endpoint in legacy_endpoint_mappings.items():
320388
if (
321-
legacy_endpoint in backend_config
322-
and backend_config.get("endpoints")
323-
and endpoint in backend_config["endpoints"]
389+
legacy_endpoint in backend_config
390+
and backend_config.get("endpoints")
391+
and endpoint in backend_config["endpoints"]
324392
):
325393
del backend_config[legacy_endpoint]
326394
continue
327395
if legacy_endpoint in backend_config and (
328-
not backend_config.get("endpoints")
329-
or endpoint not in backend_config["endpoints"]
396+
not backend_config.get("endpoints")
397+
or endpoint not in backend_config["endpoints"]
330398
):
331399
if not backend_config.get("endpoints"):
332400
backend_config["endpoints"] = {}
333401
backend_config["endpoints"].update(
334402
{endpoint: backend_config[legacy_endpoint]}
335403
)
336404
del backend_config[legacy_endpoint]
405+
337406
# Add any missing default endpoints
338407
if backend_config.get("endpoints"):
339-
backend_config["endpoints"] = {
340-
k: backend_config["endpoints"].get(k) or v
341-
for k, v in configs["endpoints"].items()
342-
}
408+
default_config["endpoints"].update(backend_config["endpoints"])
409+
343410
backend_config["access_key"] = (
344411
get_access_key(backend_config) if CUSTOMIZE_ACCESS_KEY else DEFAULT_ACCESS_KEY
345412
)
346-
configs.update(backend_config)
347-
if not DRY_RUN:
348-
get_or_create_bucket(configs["bucket"])
349-
if "dynamodb_table" in configs:
350-
get_or_create_ddb_table(configs["dynamodb_table"], region=configs["region"])
351-
result = TF_S3_BACKEND_CONFIG
413+
414+
# Update with user-provided configs
415+
default_config.update(backend_config)
416+
# Generate config string
352417
config_options = ""
353-
for key, value in sorted(configs.items()):
418+
for key, value in sorted(default_config.items()):
354419
if isinstance(value, bool):
355420
value = str(value).lower()
356421
elif isinstance(value, dict):
357422
if key == "endpoints" and is_tf_legacy:
358423
for legacy_endpoint, endpoint in legacy_endpoint_mappings.items():
359-
config_options += (
360-
f'\n {legacy_endpoint} = "{configs[key][endpoint]}"'
361-
)
424+
config_options += f'\n {legacy_endpoint} = "{default_config[key][endpoint]}"'
362425
continue
363426
else:
427+
joined_values = "\n".join([f' {k} = "{v}"' for k, v in value.items()])
364428
value = textwrap.indent(
365-
text=f"{key} = {{\n"
366-
+ "\n".join([f' {k} = "{v}"' for k, v in value.items()])
367-
+ "\n}",
429+
text=f"{key} = {{\n{joined_values}\n}}",
368430
prefix=" " * 4,
369431
)
370432
config_options += f"\n{value}"
@@ -377,137 +439,12 @@ def generate_s3_backend_config() -> str:
377439
else:
378440
value = f'"{str(value)}"'
379441
config_options += f"\n {key} = {value}"
380-
result = result.replace("<configs>", config_options)
381-
return result
382442

383-
384-
def generate_remote_state_config() -> str:
385-
"""
386-
Generate configuration for terraform_remote_state data sources to use LocalStack endpoints.
387-
Similar to generate_s3_backend_config but for terraform_remote_state blocks.
388-
"""
389-
390-
is_tf_legacy = TF_VERSION < version.Version("1.6")
391-
tf_files = parse_tf_files()
392-
393-
legacy_endpoint_mappings = {
394-
"endpoint": "s3",
395-
"iam_endpoint": "iam",
396-
"sts_endpoint": "sts",
397-
"dynamodb_endpoint": "dynamodb",
398-
}
399-
400-
result = ""
401-
for filename, obj in tf_files.items():
402-
if LS_PROVIDERS_FILE == filename:
403-
continue
404-
data_blocks = ensure_list(obj.get("data", []))
405-
for data_block in data_blocks:
406-
terraform_remote_state = data_block.get("terraform_remote_state")
407-
if not terraform_remote_state:
408-
continue
409-
for data_name, data_config in terraform_remote_state.items():
410-
if data_config.get("backend") != "s3":
411-
continue
412-
# Create override for S3 remote state
413-
config_attrs = data_config.get("config", {})
414-
if not config_attrs:
415-
continue
416-
# Merge in legacy endpoint configs if not existing already
417-
if is_tf_legacy and config_attrs.get("endpoints"):
418-
print(
419-
"Warning: Unsupported backend option(s) detected (`endpoints`). Please make sure you always use the corresponding options to your Terraform version."
420-
)
421-
exit(1)
422-
for legacy_endpoint, endpoint in legacy_endpoint_mappings.items():
423-
if (
424-
legacy_endpoint in config_attrs
425-
and config_attrs.get("endpoints")
426-
and endpoint in config_attrs["endpoints"]
427-
):
428-
del config_attrs[legacy_endpoint]
429-
continue
430-
if legacy_endpoint in config_attrs and (
431-
not config_attrs.get("endpoints")
432-
or endpoint not in config_attrs["endpoints"]
433-
):
434-
if not config_attrs.get("endpoints"):
435-
config_attrs["endpoints"] = {}
436-
config_attrs["endpoints"].update(
437-
{endpoint: config_attrs[legacy_endpoint]}
438-
)
439-
del config_attrs[legacy_endpoint]
440-
441-
# Set up default configs
442-
configs = {
443-
"bucket": config_attrs.get("bucket", "tf-test-state"),
444-
"key": config_attrs.get("key", "terraform.tfstate"),
445-
"region": config_attrs.get("region", get_region()),
446-
"endpoints": {
447-
"s3": get_service_endpoint("s3"),
448-
"iam": get_service_endpoint("iam"),
449-
"sso": get_service_endpoint("sso"),
450-
"sts": get_service_endpoint("sts"),
451-
},
452-
}
453-
454-
# Add any missing default endpoints
455-
if config_attrs.get("endpoints"):
456-
config_attrs["endpoints"] = {
457-
k: config_attrs["endpoints"].get(k) or v
458-
for k, v in configs["endpoints"].items()
459-
}
460-
461-
# Update with user-provided configs
462-
configs.update(config_attrs)
463-
464-
# Generate config string
465-
config_options = ""
466-
for key, value in sorted(configs.items()):
467-
if isinstance(value, bool):
468-
value = str(value).lower()
469-
elif isinstance(value, dict):
470-
if key == "endpoints" and is_tf_legacy:
471-
for (
472-
legacy_endpoint,
473-
endpoint,
474-
) in legacy_endpoint_mappings.items():
475-
config_options += f'\n {legacy_endpoint} = "{configs[key][endpoint]}"'
476-
continue
477-
else:
478-
value = textwrap.indent(
479-
text=f"{key} = {{\n"
480-
+ "\n".join(
481-
[f' {k} = "{v}"' for k, v in value.items()]
482-
)
483-
+ "\n}",
484-
prefix=" " * 4,
485-
)
486-
config_options += f"\n{value}"
487-
continue
488-
elif isinstance(value, list):
489-
# TODO this will break if it's a list of dicts or other complex object
490-
# this serialization logic should probably be moved to a separate recursive function
491-
as_string = [f'"{item}"' for item in value]
492-
value = f"[{', '.join(as_string)}]"
493-
else:
494-
value = f'"{str(value)}"'
495-
config_options += f"\n {key} = {value}"
496-
497-
# Create the final config
498-
remote_state_config = TF_REMOTE_STATE_CONFIG.replace(
499-
"<name>", data_name
500-
)
501-
remote_state_config = remote_state_config.replace(
502-
"<configs>", config_options
503-
)
504-
result += remote_state_config
505-
506-
return result
443+
return config_options
507444

508445

509446
def check_override_file(providers_file: str) -> None:
510-
"""Checks override file existance"""
447+
"""Checks override file existence"""
511448
if os.path.exists(providers_file):
512449
msg = f"Providers override file {providers_file} already exists"
513450
err_msg = msg + " - please delete it first, exiting..."

tests/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
@pytest.fixture(scope="session", autouse=True)
66
def start_localstack():
77
subprocess.check_output(["localstack", "start", "-d"])
8+
subprocess.check_output(["localstack", "wait"])
89

910
yield
1011

0 commit comments

Comments
 (0)