diff --git a/.changelog/803.txt b/.changelog/803.txt
new file mode 100644
index 000000000..f15e1e321
--- /dev/null
+++ b/.changelog/803.txt
@@ -0,0 +1,3 @@
+```release-note:feature
+ Add DataDog config option for hcp_log_streaming_destination resource.
+ ```
\ No newline at end of file
diff --git a/docs/resources/log_streaming_destination.md b/docs/resources/log_streaming_destination.md
index 9d30ba96a..c2bba4e6f 100644
--- a/docs/resources/log_streaming_destination.md
+++ b/docs/resources/log_streaming_destination.md
@@ -25,6 +25,19 @@ resource "hcp_log_streaming_destination" "example_cloudwatch" {
}
```
+## Example Usage: DataDog
+
+```terraform
+resource "hcp_log_streaming_destination" "example_datadog" {
+ name = "example_datadog"
+ datadog = {
+ endpoint = "https://datadog-api.com"
+ api_key = "API_KEY_VALUE_HERE"
+ application_key = "APPLICATION_VALUE_HERE"
+ }
+}
+```
+
## Example Usage: SplunkCloud
```terraform
@@ -47,6 +60,7 @@ resource "hcp_log_streaming_destination" "example_splunk_cloud" {
### Optional
- `cloudwatch` (Attributes) (see [below for nested schema](#nestedatt--cloudwatch))
+- `datadog` (Attributes) (see [below for nested schema](#nestedatt--datadog))
- `splunk_cloud` (Attributes) (see [below for nested schema](#nestedatt--splunk_cloud))
### Read-Only
@@ -67,6 +81,19 @@ Optional:
- `log_group_name` (String) The log_group_name of the CloudWatch destination.
+
+### Nested Schema for `datadog`
+
+Required:
+
+- `api_key` (String, Sensitive) The value for the DD-API-KEY to send when making requests to DataDog.
+- `endpoint` (String) The Datadog endpoint to send logs to.
+
+Optional:
+
+- `application_key` (String, Sensitive) The value for the DD-APPLICATION-KEY to send when making requests to DataDog.
+
+
### Nested Schema for `splunk_cloud`
diff --git a/examples/resources/hcp_log_streaming_destination/resource_datadog.tf b/examples/resources/hcp_log_streaming_destination/resource_datadog.tf
new file mode 100644
index 000000000..ae1f66810
--- /dev/null
+++ b/examples/resources/hcp_log_streaming_destination/resource_datadog.tf
@@ -0,0 +1,8 @@
+resource "hcp_log_streaming_destination" "example_datadog" {
+ name = "example_datadog"
+ datadog = {
+ endpoint = "https://datadog-api.com"
+ api_key = "API_KEY_VALUE_HERE"
+ application_key = "APPLICATION_VALUE_HERE"
+ }
+}
\ No newline at end of file
diff --git a/internal/provider/logstreaming/resource_hcp_log_streaming_destination.go b/internal/provider/logstreaming/resource_hcp_log_streaming_destination.go
index 4486a10e2..376fede8a 100644
--- a/internal/provider/logstreaming/resource_hcp_log_streaming_destination.go
+++ b/internal/provider/logstreaming/resource_hcp_log_streaming_destination.go
@@ -81,9 +81,10 @@ func (r *resourceHCPLogStreamingDestination) Schema(_ context.Context, _ resourc
},
Optional: true,
Validators: []validator.Object{
- // Validate only this attribute or cloudwatch is configured.
+ // Validate only this attribute, cloudwatch or datadog is configured.
objectvalidator.ExactlyOneOf(path.Expressions{
path.MatchRoot("cloudwatch"),
+ path.MatchRoot("datadog"),
}...),
},
},
@@ -113,9 +114,39 @@ func (r *resourceHCPLogStreamingDestination) Schema(_ context.Context, _ resourc
},
Optional: true,
Validators: []validator.Object{
- // Validate only this attribute or splunk_cloud is configured.
+ // Validate only this attribute, splunk_cloud or datadog is configured.
objectvalidator.ExactlyOneOf(path.Expressions{
path.MatchRoot("splunk_cloud"),
+ path.MatchRoot("datadog"),
+ }...),
+ },
+ },
+ "datadog": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "endpoint": schema.StringAttribute{
+ Description: "The Datadog endpoint to send logs to.",
+ Required: true,
+ },
+ "api_key": schema.StringAttribute{
+ Description: "The value for the DD-API-KEY to send when making requests to DataDog.",
+ Required: true,
+ Sensitive: true,
+ },
+ "application_key": schema.StringAttribute{
+ Description: "The value for the DD-APPLICATION-KEY to send when making requests to DataDog.",
+ Optional: true,
+ Sensitive: true,
+ },
+ },
+ PlanModifiers: []planmodifier.Object{
+ objectplanmodifier.UseStateForUnknown(),
+ },
+ Optional: true,
+ Validators: []validator.Object{
+ // Validate only this attribute, splunk_cloud or cloudwatch is configured.
+ objectvalidator.ExactlyOneOf(path.Expressions{
+ path.MatchRoot("splunk_cloud"),
+ path.MatchRoot("cloudwatch"),
}...),
},
},
@@ -143,9 +174,25 @@ type HCPLogStreamingDestination struct {
StreamingDestinationID types.String `tfsdk:"streaming_destination_id"`
SplunkCloud types.Object `tfsdk:"splunk_cloud"`
CloudWatch types.Object `tfsdk:"cloudwatch"`
+ Datadog types.Object `tfsdk:"datadog"`
splunkCloud *SplunkCloudProvider `tfsdk:"-"`
cloudwatch *CloudWatchProvider `tfsdk:"-"`
+ datadog *DataDogProvider `tfsdk:"-"`
+}
+
+type DataDogProvider struct {
+ Endpoint types.String `tfsdk:"endpoint"`
+ APIKey types.String `tfsdk:"api_key"`
+ ApplicationKey types.String `tfsdk:"application_key"`
+}
+
+func (d DataDogProvider) AttributeTypes() map[string]attr.Type {
+ return map[string]attr.Type{
+ "endpoint": types.StringType,
+ "api_key": types.StringType,
+ "application_key": types.StringType,
+ }
}
type SplunkCloudProvider struct {
@@ -190,12 +237,17 @@ func (h *HCPLogStreamingDestination) extract(ctx context.Context) diag.Diagnosti
diags = h.CloudWatch.As(ctx, h.cloudwatch, basetypes.ObjectAsOptions{})
}
+ if !h.Datadog.IsNull() {
+ h.datadog = &DataDogProvider{}
+ diags = h.Datadog.As(ctx, h.datadog, basetypes.ObjectAsOptions{})
+ }
+
return diags
}
// fromModel encodes the values from a Log Streaming Destination model into the
// Terraform values, such that they can be saved to state.
-func (h *HCPLogStreamingDestination) fromModel(ctx context.Context, logSD *models.LogService20210330Destination) diag.Diagnostics {
+func (h *HCPLogStreamingDestination) fromModel(ctx context.Context, logSD *models.LogService20210330Destination, dataDogAPIKeyValue string) diag.Diagnostics {
var diags diag.Diagnostics
h.Name = types.StringValue(logSD.Name)
h.StreamingDestinationID = types.StringValue(logSD.Resource.ID)
@@ -215,6 +267,23 @@ func (h *HCPLogStreamingDestination) fromModel(ctx context.Context, logSD *model
})
}
+ if logSD.DatadogProvider != nil {
+ var applicationKeyValue basetypes.StringValue
+
+ if logSD.DatadogProvider.Authorization.ExtraProperties != nil {
+ extraProps, ok := logSD.DatadogProvider.Authorization.ExtraProperties.(map[string]interface{})
+ if ok {
+ applicationKeyValue = types.StringValue(extraProps["DD-APPLICATION-KEY"].(string))
+ }
+ }
+
+ h.Datadog = types.ObjectValueMust(h.Datadog.AttributeTypes(ctx), map[string]attr.Value{
+ "endpoint": types.StringValue(logSD.DatadogProvider.Endpoint),
+ "api_key": types.StringValue(dataDogAPIKeyValue),
+ "application_key": applicationKeyValue,
+ })
+ }
+
return diags
}
@@ -257,6 +326,27 @@ func (r *resourceHCPLogStreamingDestination) Create(ctx context.Context, req res
}
}
+ fromModelDatadogAPIKey := ""
+ if plan.datadog != nil {
+ fromModelDatadogAPIKey = plan.datadog.APIKey.ValueString()
+
+ ddProviderAuthorization := &models.LogService20210330Authorization{
+ Header: "DD-API-KEY",
+ Value: fromModelDatadogAPIKey,
+ }
+
+ if !plan.datadog.ApplicationKey.IsNull() {
+ ddProviderAuthorization.ExtraProperties = map[string]string{
+ "DD-APPLICATION-KEY": plan.datadog.ApplicationKey.ValueString(),
+ }
+ }
+
+ createRequestBody.DatadogProvider = &models.LogService20210330DatadogProvider{
+ Endpoint: plan.datadog.Endpoint.ValueString(),
+ Authorization: ddProviderAuthorization,
+ }
+ }
+
createParams.Body = createRequestBody
res, err := r.client.LogService.LogServiceCreateStreamingDestination(createParams, nil)
@@ -276,7 +366,7 @@ func (r *resourceHCPLogStreamingDestination) Create(ctx context.Context, req res
resp.Diagnostics.AddError("Error retrieving newly created Log Streaming Destination", err.Error())
}
- resp.Diagnostics.Append(plan.fromModel(ctx, logStreamingDest)...)
+ resp.Diagnostics.Append(plan.fromModel(ctx, logStreamingDest, fromModelDatadogAPIKey)...)
resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...)
}
@@ -302,7 +392,14 @@ func (r *resourceHCPLogStreamingDestination) Read(ctx context.Context, req resou
return
}
- resp.Diagnostics.Append(state.fromModel(ctx, res)...)
+ ddAPIKey := ""
+ if !state.Datadog.IsNull() {
+ var dataDogState DataDogProvider
+ _ = state.Datadog.As(ctx, &dataDogState, basetypes.ObjectAsOptions{})
+ ddAPIKey = dataDogState.APIKey.ValueString()
+ }
+
+ resp.Diagnostics.Append(state.fromModel(ctx, res, ddAPIKey)...)
resp.Diagnostics.Append(resp.State.Set(ctx, &state)...)
}
@@ -362,6 +459,34 @@ func (r *resourceHCPLogStreamingDestination) Update(ctx context.Context, req res
}
}
+ // if tf plan is for datadog
+ if !plan.Datadog.IsNull() {
+ if !state.Datadog.IsNull() && plan.Datadog.Equal(state.Datadog) {
+ // do nothing ... state has not changed
+ } else {
+ // if there is a diff between plan and state we need to call log service to update destination
+ fieldMaskPaths = append(fieldMaskPaths, "provider")
+ ddProviderAuthorization := &models.LogService20210330Authorization{
+ Header: "DD-API-KEY",
+ Value: plan.datadog.APIKey.ValueString(),
+ }
+
+ if !plan.datadog.ApplicationKey.IsNull() {
+ ddProviderAuthorization.ExtraProperties = map[string]string{
+ "DD-APPLICATION-KEY": plan.datadog.ApplicationKey.ValueString(),
+ }
+ }
+
+ destination.DatadogProvider = &models.LogService20210330DatadogProvider{
+ Endpoint: plan.datadog.Endpoint.ValueString(),
+ Authorization: ddProviderAuthorization,
+ }
+ }
+ }
+
+ // For the sake of simplicity ... we update the entire provider object if a value in said provider object has been changed.
+ // We could have opted to change the subfields of a specific provider object but that would lead to more complexity as we add
+ // providers to the supported list.
if len(fieldMaskPaths) > 0 {
destination.Resource = &models.LocationLink{
ID: state.StreamingDestinationID.ValueString(),
@@ -379,6 +504,7 @@ func (r *resourceHCPLogStreamingDestination) Update(ctx context.Context, req res
resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...)
}
}
+
func (r *resourceHCPLogStreamingDestination) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
var state HCPLogStreamingDestination
resp.Diagnostics.Append(req.State.Get(ctx, &state)...)
diff --git a/internal/provider/logstreaming/resource_hcp_log_streaming_destination_test.go b/internal/provider/logstreaming/resource_hcp_log_streaming_destination_test.go
index a8b8f3b44..b29d70674 100644
--- a/internal/provider/logstreaming/resource_hcp_log_streaming_destination_test.go
+++ b/internal/provider/logstreaming/resource_hcp_log_streaming_destination_test.go
@@ -172,6 +172,85 @@ func testAccCloudWatchLogsConfigUpdated(name string) string {
`, name)
}
+func TestAccHCPLogStreamingDestinationDatadog(t *testing.T) {
+ resourceName := "hcp_log_streaming_destination.test_datadog"
+ ddName := "dd-resource-name-1"
+ ddNameUpdated := "dd-resource-name-2"
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() { acctest.PreCheck(t) },
+ ProtoV6ProviderFactories: acctest.ProtoV6ProviderFactories,
+ CheckDestroy: func(s *terraform.State) error {
+ err := testAccHCPLogStreamingDestinationDestroy(t, s)
+ if err != nil {
+ return err
+ }
+ return nil
+ },
+ Steps: []resource.TestStep{
+ // Tests create
+ {
+ Config: testAccDatadogConfig(ddName),
+ Check: resource.ComposeTestCheckFunc(
+ testAccHCPLogStreamingDestinationExists(t, resourceName),
+ resource.TestCheckResourceAttr(resourceName, "name", ddName),
+ resource.TestCheckResourceAttrSet(resourceName, "datadog.endpoint"),
+ resource.TestCheckResourceAttrSet(resourceName, "datadog.application_key"),
+ resource.TestCheckResourceAttrSet(resourceName, "datadog.api_key"),
+ resource.TestCheckResourceAttr(resourceName, "datadog.endpoint", "https://datadog-api.com"),
+ resource.TestCheckResourceAttr(resourceName, "datadog.application_key", "APPLICATION-VALUE-HERE"),
+ resource.TestCheckResourceAttr(resourceName, "datadog.api_key", "VALUEHERE"),
+ ),
+ },
+ {
+ // Update the name, endpoint and api key and expect in-place update
+ Config: testAccDatadogConfigUpdated(ddNameUpdated),
+ ConfigPlanChecks: resource.ConfigPlanChecks{
+ PreApply: []plancheck.PlanCheck{
+ plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionUpdate),
+ },
+ },
+ Check: resource.ComposeTestCheckFunc(
+ testAccHCPLogStreamingDestinationExists(t, resourceName),
+ resource.TestCheckResourceAttr(resourceName, "name", ddNameUpdated),
+ resource.TestCheckResourceAttrSet(resourceName, "datadog.endpoint"),
+ resource.TestCheckResourceAttrSet(resourceName, "datadog.application_key"),
+ resource.TestCheckResourceAttrSet(resourceName, "datadog.api_key"),
+ resource.TestCheckResourceAttr(resourceName, "datadog.endpoint", "https://datadog-api.com/updated-endpoint"),
+ resource.TestCheckResourceAttr(resourceName, "datadog.application_key", "APPLICATION-VALUE-HERE"),
+ resource.TestCheckResourceAttr(resourceName, "datadog.api_key", "VALUEHERECHANGED"),
+ ),
+ },
+ },
+ })
+}
+
+func testAccDatadogConfig(name string) string {
+ return fmt.Sprintf(`
+ resource "hcp_log_streaming_destination" "test_datadog" {
+ name = "%[1]s"
+ datadog = {
+ endpoint = "https://datadog-api.com"
+ api_key = "VALUEHERE"
+ application_key = "APPLICATION-VALUE-HERE"
+ }
+ }
+ `, name)
+}
+
+func testAccDatadogConfigUpdated(name string) string {
+ return fmt.Sprintf(`
+ resource "hcp_log_streaming_destination" "test_datadog" {
+ name = "%[1]s"
+ datadog = {
+ endpoint = "https://datadog-api.com/updated-endpoint"
+ api_key = "VALUEHERECHANGED"
+ application_key = "APPLICATION-VALUE-HERE"
+ }
+ }
+ `, name)
+}
+
func testAccHCPLogStreamingDestinationExists(t *testing.T, name string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[name]
diff --git a/templates/resources/log_streaming_destination.md.tmpl b/templates/resources/log_streaming_destination.md.tmpl
index ae0153e23..09d717d21 100644
--- a/templates/resources/log_streaming_destination.md.tmpl
+++ b/templates/resources/log_streaming_destination.md.tmpl
@@ -15,6 +15,10 @@ description: |-
{{ tffile "examples/resources/hcp_log_streaming_destination/resource_cloudwatch.tf" }}
+## Example Usage: DataDog
+
+{{ tffile "examples/resources/hcp_log_streaming_destination/resource_datadog.tf" }}
+
## Example Usage: SplunkCloud
{{ tffile "examples/resources/hcp_log_streaming_destination/resource_splunk_cloud.tf" }}