Skip to content

Commit 0d7324b

Browse files
committed
Cleaning up tests
1 parent d835f6f commit 0d7324b

File tree

9 files changed

+211
-124
lines changed

9 files changed

+211
-124
lines changed

examples/ecs_fargate/README.md

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
# ECS Fargate Example
2+
3+
A simple ECS Fargate Task Definition with out of the box Datadog instrumentation.
4+
5+
## Usage
6+
7+
* Create a [Datadog API Key](https://app.datadoghq.com/organization-settings/api-keys)
8+
* Create a `terraform.tfvars` file
9+
* Set the `dd_api_key` to the Datadog API Key (required)
10+
* Set the `dd_service` to the name of the service you want to use to filter for the resource in Datadog
11+
* Set the `dd_site` to the [Datadog destination site](https://docs.datadoghq.com/getting_started/site/) for your metrics, traces, and logs
12+
* Run the following commands:
13+
14+
```bash
15+
terraform init
16+
terraform plan
17+
terraform apply
18+
```

examples/ecs_fargate/main.tf

Lines changed: 4 additions & 120 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@
55
module "ecs_task" {
66
source = "../../modules/ecs_fargate"
77

8-
# Configure Datadaog
9-
# dd_api_key_secret_arn = "arn:aws:secretsmanager:us-east-1:376334461865:secret:ecs-terraform-dd-api-key-IU8YjD"
10-
# execution_role_arn = "arn:aws:iam::376334461865:role/my-ecs-task-execution-role"
11-
dd_api_key = "gabegabegabe"
8+
# Configure Datadog
9+
dd_api_key = var.dd_api_key
10+
dd_site = var.dd_site
11+
dd_service = var.dd_service
1212

1313
dd_environment = [
1414
{
@@ -43,119 +43,3 @@ module "ecs_task" {
4343
}
4444
requires_compatibilities = ["FARGATE"]
4545
}
46-
47-
################################################################################
48-
# Task Definition: All Task Parameters Example
49-
################################################################################
50-
51-
resource "aws_efs_file_system" "fs" {
52-
creation_token = "my-efs-file-system"
53-
performance_mode = "generalPurpose"
54-
55-
tags = {
56-
Name = "MyEFSFileSystem"
57-
}
58-
}
59-
60-
resource "aws_efs_access_point" "fs" {
61-
file_system_id = aws_efs_file_system.fs.id
62-
63-
posix_user {
64-
uid = 1000
65-
gid = 1000
66-
}
67-
68-
root_directory {
69-
path = "/example"
70-
71-
creation_info {
72-
owner_uid = 1000
73-
owner_gid = 1000
74-
permissions = "755"
75-
}
76-
}
77-
}
78-
79-
module "ecs_task_all_task_features" {
80-
source = "../../modules/ecs_fargate"
81-
82-
family = "all-task-features"
83-
84-
container_definitions = {
85-
allContainer = {
86-
name = "datadog-dummy-app",
87-
image = "public.ecr.aws/ubuntu/ubuntu:22.04_stable",
88-
essential = true,
89-
entryPoint = [
90-
"/usr/bin/bash",
91-
"-c",
92-
"cp /usr/bin/bash /tmp/malware; chmod u+s /tmp/malware; apt update;apt install -y curl wget; /tmp/malware -c 'while true; do wget https://google.com; sleep 60; done'"
93-
],
94-
}
95-
}
96-
97-
cpu = 256
98-
memory = 512
99-
enable_fault_injection = false
100-
ephemeral_storage = {
101-
size_in_gib = 40
102-
}
103-
# NOT SUPPORTED ON FARGATE
104-
# inference_accelerator = [{
105-
# device_name = "device_1"
106-
# device_type = "eia1.medium"
107-
# }]
108-
# NOT SUPPORTED ON FARGATE
109-
# ipc_mode = "host"
110-
network_mode = "awsvpc"
111-
pid_mode = "task"
112-
# NOT SUPPORTED ON FARGATE
113-
# placement_constraints = [{
114-
# type = "memberOf"
115-
# expression = "attribute:ecs.availability-zone in [us-west-2a, us-east-1a]"
116-
# }]
117-
proxy_configuration = {
118-
type = "APPMESH"
119-
container_name = "datadog-dummy-app"
120-
properties = {
121-
AppPorts = "8080"
122-
EgressIgnoredIPs = "169.254.170.2,169.254.169.254"
123-
IgnoredUID = "1337"
124-
ProxyEgressPort = 15001
125-
ProxyIngressPort = 15000
126-
}
127-
}
128-
volume = {
129-
name = "service-storage"
130-
131-
docker_volume_configuration = {
132-
scope = "shared"
133-
autoprovision = true
134-
driver = "local"
135-
136-
driver_opts = {
137-
"type" = "nfs"
138-
"device" = "${aws_efs_file_system.fs.dns_name}:/"
139-
"o" = "addr=${aws_efs_file_system.fs.dns_name},rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport"
140-
}
141-
}
142-
143-
efs_volume_configuration = {
144-
file_system_id = aws_efs_file_system.fs.id
145-
root_directory = "/opt/data"
146-
transit_encryption = "ENABLED"
147-
transit_encryption_port = 2999
148-
authorization_config = {
149-
access_point_id = aws_efs_access_point.fs.id
150-
iam = "ENABLED"
151-
}
152-
}
153-
}
154-
155-
skip_destroy = false
156-
157-
runtime_platform = {
158-
operating_system_family = "LINUX"
159-
cpu_architecture = "X86_64"
160-
}
161-
}

examples/ecs_fargate/variables.tf

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
variable "dd_api_key" {
2+
description = "Datadog API Key"
3+
type = string
4+
}
5+
6+
variable "dd_service" {
7+
description = "Service name for resource filtering in Datadog"
8+
type = string
9+
default = null
10+
}
11+
12+
variable "dd_site" {
13+
description = "Datadog Site"
14+
type = string
15+
default = "datadoghq.com"
16+
}

modules/ecs_fargate/datadog.tf

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,9 @@ locals {
55
for pair in [
66
{ key = "DD_API_KEY", value = var.dd_api_key },
77
{ key = "DD_SITE", value = var.dd_site },
8+
{ key = "DD_SERVICE", value = var.dd_service },
9+
{ key = "DD_ENV", value = var.dd_env },
10+
{ key = "DD_VERSION", value = var.dd_version },
811
# TODO: clusterName, ddTags, etc.
912
] : { name = pair.key, value = pair.value } if pair.value != null
1013
]

modules/ecs_fargate/variables.tf

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,24 @@ variable "dd_environment" {
3838
default = [{}]
3939
}
4040

41+
variable "dd_service" {
42+
description = "The task service name. Used for tagging (UST)"
43+
type = string
44+
default = null
45+
}
46+
47+
variable "dd_env" {
48+
description = "The task environment name. Used for tagging (UST)"
49+
type = string
50+
default = null
51+
}
52+
53+
variable "dd_version" {
54+
description = "The task version name. Used for tagging (UST)"
55+
type = string
56+
default = null
57+
}
58+
4159
################################################################################
4260
# Task Definition
4361
################################################################################
@@ -113,21 +131,21 @@ variable "pid_mode" {
113131
# Not Fargate Compatible
114132
variable "placement_constraints" {
115133
description = "Configuration block for rules that are taken into consideration during task placement (up to max of 10). This is set at the task definition, see `placement_constraints` for setting at the service"
116-
type = list(object({
134+
type = list(object({
117135
type = string
118136
expression = string
119137
}))
120-
default = []
138+
default = []
121139
}
122140

123141
variable "proxy_configuration" {
124142
description = "Configuration block for the App Mesh proxy"
125-
type = object({
143+
type = object({
126144
container_name = string
127145
properties = map(any)
128146
type = optional(string, "APPMESH")
129147
})
130-
default = null
148+
default = null
131149
}
132150

133151
variable "requires_compatibilities" {

smoke_tests/README.md

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
# Smoke Tests
2+
3+
A simple smoke test setup that creates one of each of the various configurations
4+
that we support, making sure that our parameters are sensible and work.
5+
6+
## Usage
7+
8+
* Create a [Datadog API Key](https://app.datadoghq.com/organization-settings/api-keys)
9+
* Create a `terraform.tfvars` file
10+
* Set the `dd_api_key` to the Datadog API Key
11+
* Set the `dd_service_name` to the name of the service you want to use to filter for the resource in Datadog
12+
* Set the `dd_site` to the [Datadog destination site](https://docs.datadoghq.com/getting_started/site/) for your metrics, traces, and logs
13+
* Run the following commands
14+
15+
```bash
16+
terraform init
17+
terraform plan
18+
terraform apply
19+
```
20+
21+
Confirm that the ecs tasks were all created as expected.
22+
23+
Run the following commands to clean up the environment:
24+
25+
```bash
26+
terraform destroy
27+
```

smoke_tests/ecs_fargate/main.tf

Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
################################################################################
2+
# Task Definition: All Task Parameters Example
3+
################################################################################
4+
5+
resource "aws_efs_file_system" "fs" {
6+
creation_token = "my-efs-file-system"
7+
performance_mode = "generalPurpose"
8+
tags = {
9+
Name = "MyEFSFileSystem"
10+
}
11+
}
12+
13+
resource "aws_efs_access_point" "fs" {
14+
file_system_id = aws_efs_file_system.fs.id
15+
posix_user {
16+
uid = 1000
17+
gid = 1000
18+
}
19+
root_directory {
20+
path = "/example"
21+
creation_info {
22+
owner_uid = 1000
23+
owner_gid = 1000
24+
permissions = "755"
25+
}
26+
}
27+
}
28+
29+
module "ecs_task_all_task_features" {
30+
source = "../../modules/ecs_fargate"
31+
32+
dd_api_key = var.dd_api_key
33+
dd_site = var.dd_site
34+
dd_service = var.dd_service
35+
36+
family = "all-task-features"
37+
container_definitions = {
38+
allContainer = {
39+
name = "datadog-dummy-app",
40+
image = "public.ecr.aws/ubuntu/ubuntu:22.04_stable",
41+
essential = true,
42+
entryPoint = [
43+
"/usr/bin/bash",
44+
"-c",
45+
"cp /usr/bin/bash /tmp/malware; chmod u+s /tmp/malware; apt update;apt install -y curl wget; /tmp/malware -c 'while true; do wget https://google.com; sleep 60; done'"
46+
],
47+
}
48+
}
49+
cpu = 256
50+
memory = 512
51+
enable_fault_injection = false
52+
ephemeral_storage = {
53+
size_in_gib = 40
54+
}
55+
network_mode = "awsvpc"
56+
pid_mode = "task"
57+
proxy_configuration = {
58+
type = "APPMESH"
59+
container_name = "datadog-dummy-app"
60+
properties = {
61+
AppPorts = "8080"
62+
EgressIgnoredIPs = "169.254.170.2,169.254.169.254"
63+
IgnoredUID = "1337"
64+
ProxyEgressPort = 15001
65+
ProxyIngressPort = 15000
66+
}
67+
}
68+
volume = {
69+
name = "service-storage"
70+
docker_volume_configuration = {
71+
scope = "shared"
72+
autoprovision = true
73+
driver = "local"
74+
driver_opts = {
75+
"type" = "nfs"
76+
"device" = "${aws_efs_file_system.fs.dns_name}:/"
77+
"o" = "addr=${aws_efs_file_system.fs.dns_name},rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport"
78+
}
79+
}
80+
efs_volume_configuration = {
81+
file_system_id = aws_efs_file_system.fs.id
82+
root_directory = "/opt/data"
83+
transit_encryption = "ENABLED"
84+
transit_encryption_port = 2999
85+
authorization_config = {
86+
access_point_id = aws_efs_access_point.fs.id
87+
iam = "ENABLED"
88+
}
89+
}
90+
}
91+
skip_destroy = false
92+
runtime_platform = {
93+
operating_system_family = "LINUX"
94+
cpu_architecture = "X86_64"
95+
}
96+
}
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
variable "dd_api_key" {
2+
description = "Datadog API Key"
3+
type = string
4+
}
5+
6+
variable "dd_service" {
7+
description = "Service name for resource filtering in Datadog"
8+
type = string
9+
}
10+
11+
variable "dd_site" {
12+
description = "Datadog Site"
13+
type = string
14+
default = "datadoghq.com"
15+
}
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
terraform {
2+
required_version = ">= 1.5.0"
3+
4+
required_providers {
5+
aws = {
6+
source = "hashicorp/aws"
7+
version = ">= 5.77.0"
8+
}
9+
}
10+
}

0 commit comments

Comments
 (0)