diff --git a/CHANGELOG.md b/CHANGELOG.md
index 052a8e594..7415aa3c9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,7 @@
## [Unreleased]
+- Add resource `elasticstack_elasticsearch_data_stream_lifecycle` ([838](https://github.com/elastic/terraform-provider-elasticstack/issues/838))
+
## [0.11.10] - 2024-10-23
- Fix bug updating alert delay ([#859](https://github.com/elastic/terraform-provider-elasticstack/pull/859))
diff --git a/docs/resources/elasticsearch_data_stream_lifecycle.md b/docs/resources/elasticsearch_data_stream_lifecycle.md
new file mode 100644
index 000000000..cfa310000
--- /dev/null
+++ b/docs/resources/elasticsearch_data_stream_lifecycle.md
@@ -0,0 +1,109 @@
+---
+subcategory: "Index"
+layout: ""
+page_title: "Elasticstack: elasticstack_elasticsearch_data_stream_lifecycle Resource"
+description: |-
+ Manages Lifecycle for Elasticsearch Data Streams
+---
+
+# Resource: elasticstack_elasticsearch_data_stream
+
+Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html
+
+## Example Usage
+
+```terraform
+provider "elasticstack" {
+ elasticsearch {}
+}
+
+// First we must have a index template created
+resource "elasticstack_elasticsearch_index_template" "my_data_stream_template" {
+ name = "my_data_stream"
+
+ index_patterns = ["my-stream*"]
+
+ data_stream {}
+}
+
+// and now we can create data stream based on the index template
+resource "elasticstack_elasticsearch_data_stream" "my_data_stream" {
+ name = "my-stream"
+
+ // make sure that template is created before the data stream
+ depends_on = [
+ elasticstack_elasticsearch_index_template.my_data_stream_template
+ ]
+}
+
+// finally we can manage lifecycle of data stream
+resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle" {
+ name = "my-stream"
+ data_retention = "3d"
+
+ depends_on = [
+ elasticstack_elasticsearch_data_stream.my_data_stream,
+ ]
+}
+
+// or you can use wildcards to manage multiple lifecycles at once
+resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle_multiple" {
+ name = "stream-*"
+ data_retention = "3d"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `name` (String) Name of the data stream. Supports wildcards.
+
+### Optional
+
+- `data_retention` (String) Every document added to this data stream will be stored at least for this time frame. When empty, every document in this data stream will be stored indefinitely
+- `downsampling` (Attributes List) Downsampling configuration objects, each defining an after interval representing when the backing index is meant to be downsampled and a fixed_interval representing the downsampling interval. (see [below for nested schema](#nestedatt--downsampling))
+- `elasticsearch_connection` (Block List, Deprecated) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch_connection))
+- `enabled` (Boolean) Data stream lifecycle on/off.
+- `expand_wildcards` (String) Determines how wildcard patterns in the `indices` parameter match data streams and indices. Supports comma-separated values, such as `closed,hidden`.
+
+### Read-Only
+
+- `id` (String) Internal identifier of the resource.
+
+
+### Nested Schema for `downsampling`
+
+Required:
+
+- `after` (String) Interval representing when the backing index is meant to be downsampled
+- `fixed_interval` (String) The interval at which to aggregate the original time series index.
+
+
+
+### Nested Schema for `elasticsearch_connection`
+
+Optional:
+
+- `api_key` (String, Sensitive) API Key to use for authentication to Elasticsearch
+- `bearer_token` (String, Sensitive) Bearer Token to use for authentication to Elasticsearch
+- `ca_data` (String) PEM-encoded custom Certificate Authority certificate
+- `ca_file` (String) Path to a custom Certificate Authority certificate
+- `cert_data` (String) PEM encoded certificate for client auth
+- `cert_file` (String) Path to a file containing the PEM encoded certificate for client auth
+- `endpoints` (List of String, Sensitive) A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.
+- `es_client_authentication` (String, Sensitive) ES Client Authentication field to be used with the bearer token
+- `insecure` (Boolean) Disable TLS certificate validation
+- `key_data` (String, Sensitive) PEM encoded private key for client auth
+- `key_file` (String) Path to a file containing the PEM encoded private key for client auth
+- `password` (String, Sensitive) Password to use for API authentication to Elasticsearch.
+- `username` (String) Username to use for API authentication to Elasticsearch.
+
+## Import
+
+Import is supported using the following syntax:
+
+```shell
+terraform import elasticstack_elasticsearch_data_stream_lifecycle.my_data_stream_lifecycle /
+```
diff --git a/examples/resources/elasticstack_elasticsearch_data_stream_lifecycle/import.sh b/examples/resources/elasticstack_elasticsearch_data_stream_lifecycle/import.sh
new file mode 100644
index 000000000..57fef5d5a
--- /dev/null
+++ b/examples/resources/elasticstack_elasticsearch_data_stream_lifecycle/import.sh
@@ -0,0 +1,2 @@
+terraform import elasticstack_elasticsearch_data_stream_lifecycle.my_data_stream_lifecycle /
+
diff --git a/examples/resources/elasticstack_elasticsearch_data_stream_lifecycle/resource.tf b/examples/resources/elasticstack_elasticsearch_data_stream_lifecycle/resource.tf
new file mode 100644
index 000000000..6c8ebf91d
--- /dev/null
+++ b/examples/resources/elasticstack_elasticsearch_data_stream_lifecycle/resource.tf
@@ -0,0 +1,38 @@
+provider "elasticstack" {
+ elasticsearch {}
+}
+
+// First we must have a index template created
+resource "elasticstack_elasticsearch_index_template" "my_data_stream_template" {
+ name = "my_data_stream"
+
+ index_patterns = ["my-stream*"]
+
+ data_stream {}
+}
+
+// and now we can create data stream based on the index template
+resource "elasticstack_elasticsearch_data_stream" "my_data_stream" {
+ name = "my-stream"
+
+ // make sure that template is created before the data stream
+ depends_on = [
+ elasticstack_elasticsearch_index_template.my_data_stream_template
+ ]
+}
+
+// finally we can manage lifecycle of data stream
+resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle" {
+ name = "my-stream"
+ data_retention = "3d"
+
+ depends_on = [
+ elasticstack_elasticsearch_data_stream.my_data_stream,
+ ]
+}
+
+// or you can use wildcards to manage multiple lifecycles at once
+resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle_multiple" {
+ name = "stream-*"
+ data_retention = "3d"
+}
diff --git a/internal/clients/elasticsearch/index.go b/internal/clients/elasticsearch/index.go
index f26864158..6af28ba8e 100644
--- a/internal/clients/elasticsearch/index.go
+++ b/internal/clients/elasticsearch/index.go
@@ -499,6 +499,85 @@ func DeleteDataStream(ctx context.Context, apiClient *clients.ApiClient, dataStr
return diags
}
+func PutDataStreamLifecycle(ctx context.Context, apiClient *clients.ApiClient, dataStreamName string, expand_wildcards string, lifecycle models.LifecycleSettings) fwdiags.Diagnostics {
+
+ esClient, err := apiClient.GetESClient()
+ if err != nil {
+ return utils.FrameworkDiagFromError(err)
+ }
+
+ lifecycleBytes, err := json.Marshal(lifecycle)
+ if err != nil {
+ return utils.FrameworkDiagFromError(err)
+ }
+
+ opts := []func(*esapi.IndicesPutDataLifecycleRequest){
+ esClient.Indices.PutDataLifecycle.WithBody(bytes.NewReader(lifecycleBytes)),
+ esClient.Indices.PutDataLifecycle.WithContext(ctx),
+ esClient.Indices.PutDataLifecycle.WithExpandWildcards(expand_wildcards),
+ }
+ res, err := esClient.Indices.PutDataLifecycle([]string{dataStreamName}, opts...)
+ if err != nil {
+ return utils.FrameworkDiagFromError(err)
+ }
+ defer res.Body.Close()
+ if diags := utils.CheckError(res, fmt.Sprintf("Unable to create DataStreamLifecycle: %s", dataStreamName)); diags.HasError() {
+ return utils.FrameworkDiagsFromSDK(diags)
+ }
+ return nil
+}
+
+func GetDataStreamLifecycle(ctx context.Context, apiClient *clients.ApiClient, dataStreamName string, expand_wildcards string) (*[]models.DataStreamLifecycle, fwdiags.Diagnostics) {
+ esClient, err := apiClient.GetESClient()
+ if err != nil {
+ return nil, utils.FrameworkDiagFromError(err)
+ }
+ opts := []func(*esapi.IndicesGetDataLifecycleRequest){
+ esClient.Indices.GetDataLifecycle.WithContext(ctx),
+ esClient.Indices.GetDataLifecycle.WithExpandWildcards(expand_wildcards),
+ }
+ res, err := esClient.Indices.GetDataLifecycle([]string{dataStreamName}, opts...)
+ if err != nil {
+ return nil, utils.FrameworkDiagFromError(err)
+ }
+ defer res.Body.Close()
+ if res.StatusCode == http.StatusNotFound {
+ return nil, nil
+ }
+ if diags := utils.CheckError(res, fmt.Sprintf("Unable to get requested DataStreamLifecycle: %s", dataStreamName)); diags.HasError() {
+ return nil, utils.FrameworkDiagsFromSDK(diags)
+ }
+
+ dStreams := make(map[string][]models.DataStreamLifecycle)
+ if err := json.NewDecoder(res.Body).Decode(&dStreams); err != nil {
+ return nil, utils.FrameworkDiagFromError(err)
+ }
+ ds := dStreams["data_streams"]
+ return &ds, nil
+}
+
+func DeleteDataStreamLifecycle(ctx context.Context, apiClient *clients.ApiClient, dataStreamName string, expand_wildcards string) fwdiags.Diagnostics {
+
+ esClient, err := apiClient.GetESClient()
+ if err != nil {
+ return utils.FrameworkDiagFromError(err)
+ }
+ opts := []func(*esapi.IndicesDeleteDataLifecycleRequest){
+ esClient.Indices.DeleteDataLifecycle.WithContext(ctx),
+ esClient.Indices.DeleteDataLifecycle.WithExpandWildcards(expand_wildcards),
+ }
+ res, err := esClient.Indices.DeleteDataLifecycle([]string{dataStreamName}, opts...)
+ if err != nil {
+ return utils.FrameworkDiagFromError(err)
+ }
+ defer res.Body.Close()
+ if diags := utils.CheckError(res, fmt.Sprintf("Unable to delete DataStreamLifecycle: %s", dataStreamName)); diags.HasError() {
+ return utils.FrameworkDiagsFromSDK(diags)
+ }
+
+ return nil
+}
+
func PutIngestPipeline(ctx context.Context, apiClient *clients.ApiClient, pipeline *models.IngestPipeline) diag.Diagnostics {
var diags diag.Diagnostics
pipelineBytes, err := json.Marshal(pipeline)
diff --git a/internal/elasticsearch/index/data_stream_lifecycle/acc_test.go b/internal/elasticsearch/index/data_stream_lifecycle/acc_test.go
new file mode 100644
index 000000000..07310da07
--- /dev/null
+++ b/internal/elasticsearch/index/data_stream_lifecycle/acc_test.go
@@ -0,0 +1,286 @@
+package data_stream_lifecycle_test
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "testing"
+
+ "github.com/elastic/terraform-provider-elasticstack/internal/acctest"
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients"
+ "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index/data_stream_lifecycle"
+ "github.com/elastic/terraform-provider-elasticstack/internal/models"
+ "github.com/elastic/terraform-provider-elasticstack/internal/versionutils"
+ sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
+)
+
+func TestAccResourceDataStreamLifecycle(t *testing.T) {
+ dsName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlpha)
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() { acctest.PreCheck(t) },
+ CheckDestroy: checkResourceDataStreamLifecycleDestroy,
+ ProtoV6ProviderFactories: acctest.Providers,
+ Steps: []resource.TestStep{
+ {
+ SkipFunc: versionutils.CheckIfVersionIsUnsupported(data_stream_lifecycle.MinVersion),
+ Config: testAccResourceDataStreamLifecycleCreate(dsName),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "name", dsName+"-one"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "data_retention", "3d"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "downsampling.#", "2"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "downsampling.0.after", "1d"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "downsampling.0.fixed_interval", "10m"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "downsampling.1.after", "7d"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "downsampling.1.fixed_interval", "1d"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle_multiple", "name", dsName+"-multiple-*"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle_multiple", "data_retention", "3d"),
+ ),
+ },
+ {
+ SkipFunc: versionutils.CheckIfVersionIsUnsupported(data_stream_lifecycle.MinVersion),
+ Config: testAccResourceDataStreamLifecycleUpdate(dsName),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "name", dsName+"-one"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "data_retention", "2d"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "downsampling.#", "0"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle_multiple", "name", dsName+"-multiple-*"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle_multiple", "data_retention", "2d"),
+ ),
+ },
+ {
+ SkipFunc: versionutils.CheckIfVersionIsUnsupported(data_stream_lifecycle.MinVersion),
+ PreConfig: func() {
+ client, err := clients.NewAcceptanceTestingClient()
+ if err != nil {
+ t.Fatalf("Failed to create testing client: %s", err)
+ }
+ esClient, err := client.GetESClient()
+ if err != nil {
+ t.Fatalf("Failed to get es client: %s", err)
+ }
+ lifecycle := models.LifecycleSettings{
+ DataRetention: "10d",
+ Downsampling: []models.Downsampling{
+ {After: "10d", FixedInterval: "5d"},
+ {After: "20d", FixedInterval: "10d"},
+ },
+ }
+ lifecycleBytes, err := json.Marshal(lifecycle)
+ if err != nil {
+ t.Fatalf("Cannot marshal lifecycle: %s", err)
+ }
+ _, err = esClient.Indices.PutDataLifecycle([]string{dsName + "-multiple-two"}, esClient.Indices.PutDataLifecycle.WithBody(bytes.NewReader(lifecycleBytes)))
+ if err != nil {
+ t.Fatalf("Cannot update lifecycle: %s", err)
+ }
+ },
+ Config: testAccResourceDataStreamLifecycleUpdate(dsName),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "name", dsName+"-one"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle", "data_retention", "2d"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle_multiple", "name", dsName+"-multiple-*"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle_multiple", "data_retention", "2d"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle_multiple", "downsampling.0.after", "1d"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle_multiple", "downsampling.0.fixed_interval", "10m"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle_multiple", "downsampling.1.after", "7d"),
+ resource.TestCheckResourceAttr("elasticstack_elasticsearch_data_stream_lifecycle.test_ds_lifecycle_multiple", "downsampling.1.fixed_interval", "1d"),
+ ),
+ },
+ },
+ })
+}
+
+func testAccResourceDataStreamLifecycleCreate(name string) string {
+ return fmt.Sprintf(`
+provider "elasticstack" {
+ elasticsearch {}
+}
+
+resource "elasticstack_elasticsearch_index_template" "test_ds_template" {
+ name = "%[1]s"
+
+ index_patterns = ["%[1]s*"]
+
+ data_stream {}
+}
+
+resource "elasticstack_elasticsearch_data_stream" "test_ds_one" {
+ name = "%[1]s-one"
+
+ depends_on = [
+ elasticstack_elasticsearch_index_template.test_ds_template
+ ]
+}
+
+resource "elasticstack_elasticsearch_data_stream" "test_ds_two" {
+ name = "%[1]s-multiple-one"
+
+ depends_on = [
+ elasticstack_elasticsearch_index_template.test_ds_template
+ ]
+}
+
+resource "elasticstack_elasticsearch_data_stream" "test_ds_three" {
+ name = "%[1]s-multiple-two"
+
+ depends_on = [
+ elasticstack_elasticsearch_index_template.test_ds_template
+ ]
+}
+
+resource "elasticstack_elasticsearch_data_stream_lifecycle" "test_ds_lifecycle" {
+ name = "%[1]s-one"
+ data_retention = "3d"
+ downsampling = [
+ {
+ after = "1d"
+ fixed_interval = "10m"
+ },
+ {
+ after = "7d"
+ fixed_interval = "1d"
+ }
+ ]
+
+ depends_on = [
+ elasticstack_elasticsearch_data_stream.test_ds_one
+ ]
+}
+
+resource "elasticstack_elasticsearch_data_stream_lifecycle" "test_ds_lifecycle_multiple" {
+ name = "%[1]s-multiple-*"
+ data_retention = "3d"
+ downsampling = [
+ {
+ after = "1d"
+ fixed_interval = "10m"
+ },
+ {
+ after = "7d"
+ fixed_interval = "1d"
+ }
+ ]
+
+ depends_on = [
+ elasticstack_elasticsearch_data_stream.test_ds_two,
+ elasticstack_elasticsearch_data_stream.test_ds_three
+ ]
+}
+`, name)
+
+}
+
+func testAccResourceDataStreamLifecycleUpdate(name string) string {
+ return fmt.Sprintf(`
+provider "elasticstack" {
+ elasticsearch {}
+}
+
+resource "elasticstack_elasticsearch_index_template" "test_ds_template" {
+ name = "%[1]s"
+
+ index_patterns = ["%[1]s*"]
+
+ data_stream {}
+}
+
+resource "elasticstack_elasticsearch_data_stream" "test_ds_one" {
+ name = "%[1]s-one"
+
+ depends_on = [
+ elasticstack_elasticsearch_index_template.test_ds_template
+ ]
+}
+
+resource "elasticstack_elasticsearch_data_stream" "test_ds_two" {
+ name = "%[1]s-multiple-one"
+
+ depends_on = [
+ elasticstack_elasticsearch_index_template.test_ds_template
+ ]
+}
+
+resource "elasticstack_elasticsearch_data_stream" "test_ds_three" {
+ name = "%[1]s-multiple-two"
+
+ depends_on = [
+ elasticstack_elasticsearch_index_template.test_ds_template
+ ]
+}
+
+resource "elasticstack_elasticsearch_data_stream_lifecycle" "test_ds_lifecycle" {
+ name = "%[1]s-one"
+ data_retention = "2d"
+
+ depends_on = [
+ elasticstack_elasticsearch_data_stream.test_ds_one
+ ]
+}
+
+resource "elasticstack_elasticsearch_data_stream_lifecycle" "test_ds_lifecycle_multiple" {
+ name = "%[1]s-multiple-*"
+ data_retention = "2d"
+ downsampling = [
+ {
+ after = "1d"
+ fixed_interval = "10m"
+ },
+ {
+ after = "7d"
+ fixed_interval = "1d"
+ }
+ ]
+
+ depends_on = [
+ elasticstack_elasticsearch_data_stream.test_ds_two,
+ elasticstack_elasticsearch_data_stream.test_ds_three
+ ]
+}
+
+`, name)
+
+}
+
+func checkResourceDataStreamLifecycleDestroy(s *terraform.State) error {
+ client, err := clients.NewAcceptanceTestingClient()
+ if err != nil {
+ return err
+ }
+
+ for _, rs := range s.RootModule().Resources {
+ if rs.Type != "elasticstack_elasticsearch_data_stream_lifecycle" {
+ continue
+ }
+ compId, _ := clients.CompositeIdFromStr(rs.Primary.ID)
+
+ esClient, err := client.GetESClient()
+ if err != nil {
+ return err
+ }
+
+ res, err := esClient.Indices.GetDataLifecycle([]string{compId.ResourceId})
+ if err != nil {
+ return err
+ }
+
+ // for lifecycle without wildcard 404 is returned when no ds matches
+ if res.StatusCode == 404 {
+ return nil
+ }
+
+ defer res.Body.Close()
+
+ dStreams := make(map[string][]models.DataStreamLifecycle)
+ if err := json.NewDecoder(res.Body).Decode(&dStreams); err != nil {
+ return err
+ }
+ // for lifecycle with wildcard empty array is returned
+ if len(dStreams["data_streams"]) > 0 {
+ return fmt.Errorf("Data Stream Lifecycle (%s) still exists", compId.ResourceId)
+ }
+ }
+ return nil
+}
diff --git a/internal/elasticsearch/index/data_stream_lifecycle/create.go b/internal/elasticsearch/index/data_stream_lifecycle/create.go
new file mode 100644
index 000000000..b7d04f43a
--- /dev/null
+++ b/internal/elasticsearch/index/data_stream_lifecycle/create.go
@@ -0,0 +1,60 @@
+package data_stream_lifecycle
+
+import (
+ "context"
+
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients"
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch"
+ "github.com/elastic/terraform-provider-elasticstack/internal/utils"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/tfsdk"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ resp.Diagnostics.Append(r.create(ctx, req.Plan, &resp.State)...)
+}
+
+func (r Resource) create(ctx context.Context, plan tfsdk.Plan, state *tfsdk.State) diag.Diagnostics {
+ var planModel tfModel
+ diags := plan.Get(ctx, &planModel)
+ if diags.HasError() {
+ return diags
+ }
+
+ client, d := clients.MaybeNewApiClientFromFrameworkResource(ctx, planModel.ElasticsearchConnection, r.client)
+ diags.Append(d...)
+ if diags.HasError() {
+ return diags
+ }
+
+ name := planModel.Name.ValueString()
+ id, sdkDiags := client.ID(ctx, name)
+ if sdkDiags.HasError() {
+ diags.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...)
+ return diags
+ }
+
+ planModel.ID = types.StringValue(id.String())
+
+ apiModel, d := planModel.toAPIModel(ctx)
+ diags.Append(d...)
+ if diags.HasError() {
+ return diags
+ }
+
+ diags.Append(elasticsearch.PutDataStreamLifecycle(ctx, client, name, planModel.ExpandWildcards.ValueString(), apiModel)...)
+ if diags.HasError() {
+ return diags
+ }
+
+ finalModel, d := r.read(ctx, client, planModel)
+ diags.Append(d...)
+ if diags.HasError() {
+ return diags
+ }
+
+ diags.Append(state.Set(ctx, finalModel)...)
+ return diags
+}
diff --git a/internal/elasticsearch/index/data_stream_lifecycle/delete.go b/internal/elasticsearch/index/data_stream_lifecycle/delete.go
new file mode 100644
index 000000000..832dc190b
--- /dev/null
+++ b/internal/elasticsearch/index/data_stream_lifecycle/delete.go
@@ -0,0 +1,35 @@
+package data_stream_lifecycle
+
+import (
+ "context"
+
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients"
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+)
+
+func (r *Resource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var stateModel tfModel
+ resp.Diagnostics.Append(req.State.Get(ctx, &stateModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, stateModel.ElasticsearchConnection, r.client)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ compId, diags := stateModel.GetID()
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(elasticsearch.DeleteDataStreamLifecycle(ctx, client, compId.ResourceId, stateModel.ExpandWildcards.ValueString())...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.State.RemoveResource(ctx)
+}
diff --git a/internal/elasticsearch/index/data_stream_lifecycle/models.go b/internal/elasticsearch/index/data_stream_lifecycle/models.go
new file mode 100644
index 000000000..82e47d168
--- /dev/null
+++ b/internal/elasticsearch/index/data_stream_lifecycle/models.go
@@ -0,0 +1,111 @@
+package data_stream_lifecycle
+
+import (
+ "context"
+
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients"
+ "github.com/elastic/terraform-provider-elasticstack/internal/models"
+ "github.com/elastic/terraform-provider-elasticstack/internal/utils"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+type tfModel struct {
+ ID types.String `tfsdk:"id"`
+ ElasticsearchConnection types.List `tfsdk:"elasticsearch_connection"`
+ Name types.String `tfsdk:"name"`
+ DataRetention types.String `tfsdk:"data_retention"`
+ ExpandWildcards types.String `tfsdk:"expand_wildcards"`
+ Enabled types.Bool `tfsdk:"enabled"`
+ Downsampling types.List `tfsdk:"downsampling"`
+}
+
+type downsamplingTfModel struct {
+ After types.String `tfsdk:"after"`
+ FixedInterval types.String `tfsdk:"fixed_interval"`
+}
+
+func (model tfModel) GetID() (*clients.CompositeId, diag.Diagnostics) {
+ compId, sdkDiags := clients.CompositeIdFromStr(model.ID.ValueString())
+ if sdkDiags.HasError() {
+ return nil, utils.FrameworkDiagsFromSDK(sdkDiags)
+ }
+
+ return compId, nil
+}
+
+func (model tfModel) toAPIModel(ctx context.Context) (models.LifecycleSettings, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ apiModel := models.LifecycleSettings{
+ DataRetention: model.DataRetention.ValueString(),
+ Enabled: model.Enabled.ValueBool(),
+ }
+
+ if !model.Downsampling.IsNull() && !model.Downsampling.IsUnknown() && len(model.Downsampling.Elements()) > 0 {
+
+ downsampling := make([]downsamplingTfModel, len(model.Downsampling.Elements()))
+ if diags := model.Downsampling.ElementsAs(ctx, &downsampling, true); diags.HasError() {
+ return models.LifecycleSettings{}, diags
+ }
+
+ apiModel.Downsampling = make([]models.Downsampling, len(model.Downsampling.Elements()))
+ for i, ds := range downsampling {
+ apiModel.Downsampling[i] = models.Downsampling{
+ After: ds.After.ValueString(),
+ FixedInterval: ds.FixedInterval.ValueString(),
+ }
+ }
+ }
+
+ return apiModel, diags
+}
+
+func (model *tfModel) populateFromAPI(ctx context.Context, ds []models.DataStreamLifecycle) diag.Diagnostics {
+ actualRetention := model.DataRetention.ValueString()
+ actualDownsampling := make([]downsamplingTfModel, len(model.Downsampling.Elements()))
+ if diags := model.Downsampling.ElementsAs(ctx, &actualDownsampling, true); diags.HasError() {
+ return nil
+ }
+
+ for _, lf := range ds {
+ if lf.Lifecycle.DataRetention != actualRetention {
+ model.DataRetention = types.StringValue(lf.Lifecycle.DataRetention)
+ }
+ var updateDownsampling bool
+ if len(lf.Lifecycle.Downsampling) != len(actualDownsampling) {
+ updateDownsampling = true
+ } else {
+ for i, ds := range actualDownsampling {
+ if ds.After.ValueString() != lf.Lifecycle.Downsampling[i].After || ds.FixedInterval.ValueString() != lf.Lifecycle.Downsampling[i].FixedInterval {
+ updateDownsampling = true
+ break
+ }
+ }
+ }
+ if updateDownsampling {
+ listValue, diags := convertDownsamplingToModel(ctx, lf.Lifecycle.Downsampling)
+ diags.Append(diags...)
+ if diags.HasError() {
+ return diags
+ }
+ model.Downsampling = listValue
+ }
+ }
+ return nil
+}
+
+func convertDownsamplingToModel(ctx context.Context, apiDownsamplings []models.Downsampling) (types.List, diag.Diagnostics) {
+ var downsamplings []downsamplingTfModel
+
+ for _, apiDs := range apiDownsamplings {
+ downsamplings = append(downsamplings, downsamplingTfModel{
+ After: types.StringValue(apiDs.After),
+ FixedInterval: types.StringValue(apiDs.FixedInterval),
+ })
+ }
+
+ listValue, diags := types.ListValueFrom(ctx, downsamplingElementType(), downsamplings)
+
+ return listValue, diags
+}
diff --git a/internal/elasticsearch/index/data_stream_lifecycle/read.go b/internal/elasticsearch/index/data_stream_lifecycle/read.go
new file mode 100644
index 000000000..5e3466495
--- /dev/null
+++ b/internal/elasticsearch/index/data_stream_lifecycle/read.go
@@ -0,0 +1,56 @@
+package data_stream_lifecycle
+
+import (
+ "context"
+
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients"
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+)
+
+func (r *Resource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var stateModel tfModel
+ resp.Diagnostics.Append(req.State.Get(ctx, &stateModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, stateModel.ElasticsearchConnection, r.client)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ finalModel, diags := r.read(ctx, client, stateModel)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ if finalModel == nil {
+ resp.State.RemoveResource(ctx)
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, *finalModel)...)
+}
+
+func (r *Resource) read(ctx context.Context, client *clients.ApiClient, model tfModel) (*tfModel, diag.Diagnostics) {
+ var diags diag.Diagnostics
+ compId, diags := model.GetID()
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ ds, diags := elasticsearch.GetDataStreamLifecycle(ctx, client, compId.ResourceId, model.ExpandWildcards.ValueString())
+ if diags.HasError() {
+ return nil, diags
+ }
+ if ds == nil || len(*ds) == 0 {
+ return nil, nil
+ }
+
+ diags.Append(model.populateFromAPI(ctx, *ds)...)
+ return &model, diags
+}
diff --git a/internal/elasticsearch/index/data_stream_lifecycle/resource.go b/internal/elasticsearch/index/data_stream_lifecycle/resource.go
new file mode 100644
index 000000000..b8fa48207
--- /dev/null
+++ b/internal/elasticsearch/index/data_stream_lifecycle/resource.go
@@ -0,0 +1,35 @@
+package data_stream_lifecycle
+
+import (
+ "context"
+
+ "github.com/elastic/terraform-provider-elasticstack/internal/clients"
+ "github.com/hashicorp/go-version"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+)
+
+var _ resource.Resource = &Resource{}
+var _ resource.ResourceWithConfigure = &Resource{}
+var _ resource.ResourceWithImportState = &Resource{}
+var (
+ MinVersion = version.Must(version.NewVersion("8.11.0"))
+)
+
+type Resource struct {
+ client *clients.ApiClient
+}
+
+func (r *Resource) Configure(ctx context.Context, request resource.ConfigureRequest, response *resource.ConfigureResponse) {
+ client, diags := clients.ConvertProviderData(request.ProviderData)
+ response.Diagnostics.Append(diags...)
+ r.client = client
+}
+
+func (r *Resource) Metadata(ctx context.Context, request resource.MetadataRequest, response *resource.MetadataResponse) {
+ response.TypeName = request.ProviderTypeName + "_elasticsearch_data_stream_lifecycle"
+}
+
+func (r *Resource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+ resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp)
+}
diff --git a/internal/elasticsearch/index/data_stream_lifecycle/schema.go b/internal/elasticsearch/index/data_stream_lifecycle/schema.go
new file mode 100644
index 000000000..7d21a72e0
--- /dev/null
+++ b/internal/elasticsearch/index/data_stream_lifecycle/schema.go
@@ -0,0 +1,89 @@
+package data_stream_lifecycle
+
+import (
+ "context"
+
+ "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+
+ providerschema "github.com/elastic/terraform-provider-elasticstack/internal/schema"
+)
+
+func (r *Resource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = getSchema()
+}
+
+func getSchema() schema.Schema {
+ return schema.Schema{
+ Description: "Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html",
+ Blocks: map[string]schema.Block{
+ "elasticsearch_connection": providerschema.GetEsFWConnectionBlock("elasticsearch_connection", false),
+ },
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Description: "Internal identifier of the resource.",
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "name": schema.StringAttribute{
+ Description: "Name of the data stream. Supports wildcards.",
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "data_retention": schema.StringAttribute{
+ Description: "Every document added to this data stream will be stored at least for this time frame. When empty, every document in this data stream will be stored indefinitely",
+ Optional: true,
+ },
+ "enabled": schema.BoolAttribute{
+ Description: "Data stream lifecycle on/off.",
+ Optional: true,
+ Computed: true,
+ Default: booldefault.StaticBool(true),
+ },
+ "expand_wildcards": schema.StringAttribute{
+ Description: "Determines how wildcard patterns in the `indices` parameter match data streams and indices. Supports comma-separated values, such as `closed,hidden`.",
+ Optional: true,
+ Computed: true,
+ Default: stringdefault.StaticString("open"),
+ Validators: []validator.String{
+ stringvalidator.OneOf("all", "open", "closed", "hidden", "none"),
+ },
+ },
+ "downsampling": schema.ListNestedAttribute{
+ Description: "Downsampling configuration objects, each defining an after interval representing when the backing index is meant to be downsampled and a fixed_interval representing the downsampling interval.",
+ Optional: true,
+ Validators: []validator.List{
+ listvalidator.SizeAtMost(10),
+ },
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "after": schema.StringAttribute{
+ Description: "Interval representing when the backing index is meant to be downsampled",
+ Required: true,
+ },
+ "fixed_interval": schema.StringAttribute{
+ Description: "The interval at which to aggregate the original time series index.",
+ Required: true,
+ },
+ },
+ },
+ },
+ },
+ }
+}
+
+func downsamplingElementType() attr.Type {
+ return getSchema().Attributes["downsampling"].GetType().(attr.TypeWithElementType).ElementType()
+}
diff --git a/internal/elasticsearch/index/data_stream_lifecycle/update.go b/internal/elasticsearch/index/data_stream_lifecycle/update.go
new file mode 100644
index 000000000..94c6fa16b
--- /dev/null
+++ b/internal/elasticsearch/index/data_stream_lifecycle/update.go
@@ -0,0 +1,11 @@
+package data_stream_lifecycle
+
+import (
+ "context"
+
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+)
+
+func (r *Resource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ resp.Diagnostics.Append(r.create(ctx, req.Plan, &resp.State)...)
+}
diff --git a/internal/models/models.go b/internal/models/models.go
index 29882b308..3cc9c1e18 100644
--- a/internal/models/models.go
+++ b/internal/models/models.go
@@ -312,7 +312,14 @@ type IndexAlias struct {
}
type LifecycleSettings struct {
- DataRetention string `json:"data_retention,omitempty"`
+ DataRetention string `json:"data_retention,omitempty"`
+ Enabled bool `json:"enabled,omitempty"`
+ Downsampling []Downsampling `json:"downsampling,omitempty"`
+}
+
+type Downsampling struct {
+ After string `json:"after,omitempty"`
+ FixedInterval string `json:"fixed_interval,omitempty"`
}
type DataStream struct {
@@ -334,6 +341,11 @@ type DataStreamIndex struct {
IndexUUID string `json:"index_uuid"`
}
+type DataStreamLifecycle struct {
+ Name string `json:"name"`
+ Lifecycle LifecycleSettings `json:"lifecycle,omitempty"`
+}
+
type TimestampField struct {
Name string `json:"name"`
}
diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go
index 994d494ec..652005d65 100644
--- a/provider/plugin_framework.go
+++ b/provider/plugin_framework.go
@@ -5,6 +5,7 @@ import (
"github.com/elastic/terraform-provider-elasticstack/internal/clients"
"github.com/elastic/terraform-provider-elasticstack/internal/clients/config"
+ "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index/data_stream_lifecycle"
"github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index/index"
"github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index/indices"
"github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/security/api_key"
@@ -93,6 +94,7 @@ func (p *Provider) Resources(ctx context.Context) []func() resource.Resource {
func() resource.Resource { return &index.Resource{} },
func() resource.Resource { return &synthetics.Resource{} },
func() resource.Resource { return &api_key.Resource{} },
+ func() resource.Resource { return &data_stream_lifecycle.Resource{} },
agent_policy.NewResource,
integration.NewResource,
integration_policy.NewResource,
diff --git a/templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl b/templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl
new file mode 100644
index 000000000..ee163bffb
--- /dev/null
+++ b/templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl
@@ -0,0 +1,23 @@
+---
+subcategory: "Index"
+layout: ""
+page_title: "Elasticstack: elasticstack_elasticsearch_data_stream_lifecycle Resource"
+description: |-
+ Manages Lifecycle for Elasticsearch Data Streams
+---
+
+# Resource: elasticstack_elasticsearch_data_stream
+
+Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html
+
+## Example Usage
+
+{{ tffile "examples/resources/elasticstack_elasticsearch_data_stream_lifecycle/resource.tf" }}
+
+{{ .SchemaMarkdown | trimspace }}
+
+## Import
+
+Import is supported using the following syntax:
+
+{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_data_stream_lifecycle/import.sh" }}