@@ -343,6 +343,53 @@ def test_s3_remote_data_source():
343343 assert get_obj ["Body" ].read () == b"test"
344344
345345
346+ def test_s3_remote_data_source_with_workspace (monkeypatch ):
347+ monkeypatch .setenv ("DRY_RUN" , "1" )
348+ state_bucket = f"tf-data-source-{ short_uid ()} "
349+ config = """
350+ terraform {
351+ backend "s3" {
352+ bucket = "%s"
353+ key = "terraform.tfstate"
354+ region = "us-east-1"
355+ skip_credentials_validation = true
356+ }
357+ }
358+
359+ data "terraform_remote_state" "terraform_infra" {
360+ backend = "s3"
361+ workspace = terraform.workspace
362+
363+ config = {
364+ bucket = "<state-bucket>"
365+ workspace_key_prefix = "terraform-infrastructure/place"
366+ key = "terraform.tfstate"
367+ }
368+ }
369+
370+ data "terraform_remote_state" "build_infra" {
371+ backend = "s3"
372+ workspace = "build"
373+
374+ config = {
375+ bucket = "<state-bucket>"
376+ workspace_key_prefix = "terraform-infrastructure"
377+ key = "terraform.tfstate"
378+ }
379+ }
380+
381+ """ .replace ("<state-bucket>" , state_bucket )
382+
383+ temp_dir = deploy_tf_script (config , cleanup = False , user_input = "yes" )
384+ override_file = os .path .join (temp_dir , "localstack_providers_override.tf" )
385+ assert check_override_file_exists (override_file )
386+
387+ with open (override_file , "r" ) as fp :
388+ result = hcl2 .load (fp )
389+ assert result ["data" ][0 ]["terraform_remote_state" ]["terraform_infra" ]["workspace" ] == "${terraform.workspace}"
390+ assert result ["data" ][1 ]["terraform_remote_state" ]["build_infra" ]["workspace" ] == "build"
391+
392+
346393def test_dry_run (monkeypatch ):
347394 monkeypatch .setenv ("DRY_RUN" , "1" )
348395 state_bucket = "tf-state-dry-run"
0 commit comments