Skip to content

Commit

Permalink
Add DataDog config to HCP Log Streaming Destination (#803)
Browse files Browse the repository at this point in the history
* add datadog config option to hcp log streaming destination resource

* update docs

* add changelog
  • Loading branch information
leahrob authored Apr 4, 2024
1 parent ff7a0bc commit a2a669e
Show file tree
Hide file tree
Showing 6 changed files with 252 additions and 5 deletions.
3 changes: 3 additions & 0 deletions .changelog/803.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:feature
Add DataDog config option for hcp_log_streaming_destination resource.
```
27 changes: 27 additions & 0 deletions docs/resources/log_streaming_destination.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,19 @@ resource "hcp_log_streaming_destination" "example_cloudwatch" {
}
```

## Example Usage: DataDog

```terraform
resource "hcp_log_streaming_destination" "example_datadog" {
name = "example_datadog"
datadog = {
endpoint = "https://datadog-api.com"
api_key = "API_KEY_VALUE_HERE"
application_key = "APPLICATION_VALUE_HERE"
}
}
```

## Example Usage: SplunkCloud

```terraform
Expand All @@ -47,6 +60,7 @@ resource "hcp_log_streaming_destination" "example_splunk_cloud" {
### Optional

- `cloudwatch` (Attributes) (see [below for nested schema](#nestedatt--cloudwatch))
- `datadog` (Attributes) (see [below for nested schema](#nestedatt--datadog))
- `splunk_cloud` (Attributes) (see [below for nested schema](#nestedatt--splunk_cloud))

### Read-Only
Expand All @@ -67,6 +81,19 @@ Optional:
- `log_group_name` (String) The log_group_name of the CloudWatch destination.


<a id="nestedatt--datadog"></a>
### Nested Schema for `datadog`

Required:

- `api_key` (String, Sensitive) The value for the DD-API-KEY to send when making requests to DataDog.
- `endpoint` (String) The Datadog endpoint to send logs to.

Optional:

- `application_key` (String, Sensitive) The value for the DD-APPLICATION-KEY to send when making requests to DataDog.


<a id="nestedatt--splunk_cloud"></a>
### Nested Schema for `splunk_cloud`

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
resource "hcp_log_streaming_destination" "example_datadog" {
name = "example_datadog"
datadog = {
endpoint = "https://datadog-api.com"
api_key = "API_KEY_VALUE_HERE"
application_key = "APPLICATION_VALUE_HERE"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,10 @@ func (r *resourceHCPLogStreamingDestination) Schema(_ context.Context, _ resourc
},
Optional: true,
Validators: []validator.Object{
// Validate only this attribute or cloudwatch is configured.
// Validate only this attribute, cloudwatch or datadog is configured.
objectvalidator.ExactlyOneOf(path.Expressions{
path.MatchRoot("cloudwatch"),
path.MatchRoot("datadog"),
}...),
},
},
Expand Down Expand Up @@ -113,9 +114,39 @@ func (r *resourceHCPLogStreamingDestination) Schema(_ context.Context, _ resourc
},
Optional: true,
Validators: []validator.Object{
// Validate only this attribute or splunk_cloud is configured.
// Validate only this attribute, splunk_cloud or datadog is configured.
objectvalidator.ExactlyOneOf(path.Expressions{
path.MatchRoot("splunk_cloud"),
path.MatchRoot("datadog"),
}...),
},
},
"datadog": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"endpoint": schema.StringAttribute{
Description: "The Datadog endpoint to send logs to.",
Required: true,
},
"api_key": schema.StringAttribute{
Description: "The value for the DD-API-KEY to send when making requests to DataDog.",
Required: true,
Sensitive: true,
},
"application_key": schema.StringAttribute{
Description: "The value for the DD-APPLICATION-KEY to send when making requests to DataDog.",
Optional: true,
Sensitive: true,
},
},
PlanModifiers: []planmodifier.Object{
objectplanmodifier.UseStateForUnknown(),
},
Optional: true,
Validators: []validator.Object{
// Validate only this attribute, splunk_cloud or cloudwatch is configured.
objectvalidator.ExactlyOneOf(path.Expressions{
path.MatchRoot("splunk_cloud"),
path.MatchRoot("cloudwatch"),
}...),
},
},
Expand Down Expand Up @@ -143,9 +174,25 @@ type HCPLogStreamingDestination struct {
StreamingDestinationID types.String `tfsdk:"streaming_destination_id"`
SplunkCloud types.Object `tfsdk:"splunk_cloud"`
CloudWatch types.Object `tfsdk:"cloudwatch"`
Datadog types.Object `tfsdk:"datadog"`

splunkCloud *SplunkCloudProvider `tfsdk:"-"`
cloudwatch *CloudWatchProvider `tfsdk:"-"`
datadog *DataDogProvider `tfsdk:"-"`
}

type DataDogProvider struct {
Endpoint types.String `tfsdk:"endpoint"`
APIKey types.String `tfsdk:"api_key"`
ApplicationKey types.String `tfsdk:"application_key"`
}

func (d DataDogProvider) AttributeTypes() map[string]attr.Type {
return map[string]attr.Type{
"endpoint": types.StringType,
"api_key": types.StringType,
"application_key": types.StringType,
}
}

type SplunkCloudProvider struct {
Expand Down Expand Up @@ -190,12 +237,17 @@ func (h *HCPLogStreamingDestination) extract(ctx context.Context) diag.Diagnosti
diags = h.CloudWatch.As(ctx, h.cloudwatch, basetypes.ObjectAsOptions{})
}

if !h.Datadog.IsNull() {
h.datadog = &DataDogProvider{}
diags = h.Datadog.As(ctx, h.datadog, basetypes.ObjectAsOptions{})
}

return diags
}

// fromModel encodes the values from a Log Streaming Destination model into the
// Terraform values, such that they can be saved to state.
func (h *HCPLogStreamingDestination) fromModel(ctx context.Context, logSD *models.LogService20210330Destination) diag.Diagnostics {
func (h *HCPLogStreamingDestination) fromModel(ctx context.Context, logSD *models.LogService20210330Destination, dataDogAPIKeyValue string) diag.Diagnostics {
var diags diag.Diagnostics
h.Name = types.StringValue(logSD.Name)
h.StreamingDestinationID = types.StringValue(logSD.Resource.ID)
Expand All @@ -215,6 +267,23 @@ func (h *HCPLogStreamingDestination) fromModel(ctx context.Context, logSD *model
})
}

if logSD.DatadogProvider != nil {
var applicationKeyValue basetypes.StringValue

if logSD.DatadogProvider.Authorization.ExtraProperties != nil {
extraProps, ok := logSD.DatadogProvider.Authorization.ExtraProperties.(map[string]interface{})
if ok {
applicationKeyValue = types.StringValue(extraProps["DD-APPLICATION-KEY"].(string))
}
}

h.Datadog = types.ObjectValueMust(h.Datadog.AttributeTypes(ctx), map[string]attr.Value{
"endpoint": types.StringValue(logSD.DatadogProvider.Endpoint),
"api_key": types.StringValue(dataDogAPIKeyValue),
"application_key": applicationKeyValue,
})
}

return diags
}

Expand Down Expand Up @@ -257,6 +326,27 @@ func (r *resourceHCPLogStreamingDestination) Create(ctx context.Context, req res
}
}

fromModelDatadogAPIKey := ""
if plan.datadog != nil {
fromModelDatadogAPIKey = plan.datadog.APIKey.ValueString()

ddProviderAuthorization := &models.LogService20210330Authorization{
Header: "DD-API-KEY",
Value: fromModelDatadogAPIKey,
}

if !plan.datadog.ApplicationKey.IsNull() {
ddProviderAuthorization.ExtraProperties = map[string]string{
"DD-APPLICATION-KEY": plan.datadog.ApplicationKey.ValueString(),
}
}

createRequestBody.DatadogProvider = &models.LogService20210330DatadogProvider{
Endpoint: plan.datadog.Endpoint.ValueString(),
Authorization: ddProviderAuthorization,
}
}

createParams.Body = createRequestBody

res, err := r.client.LogService.LogServiceCreateStreamingDestination(createParams, nil)
Expand All @@ -276,7 +366,7 @@ func (r *resourceHCPLogStreamingDestination) Create(ctx context.Context, req res
resp.Diagnostics.AddError("Error retrieving newly created Log Streaming Destination", err.Error())
}

resp.Diagnostics.Append(plan.fromModel(ctx, logStreamingDest)...)
resp.Diagnostics.Append(plan.fromModel(ctx, logStreamingDest, fromModelDatadogAPIKey)...)
resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...)
}

Expand All @@ -302,7 +392,14 @@ func (r *resourceHCPLogStreamingDestination) Read(ctx context.Context, req resou
return
}

resp.Diagnostics.Append(state.fromModel(ctx, res)...)
ddAPIKey := ""
if !state.Datadog.IsNull() {
var dataDogState DataDogProvider
_ = state.Datadog.As(ctx, &dataDogState, basetypes.ObjectAsOptions{})
ddAPIKey = dataDogState.APIKey.ValueString()
}

resp.Diagnostics.Append(state.fromModel(ctx, res, ddAPIKey)...)
resp.Diagnostics.Append(resp.State.Set(ctx, &state)...)
}

Expand Down Expand Up @@ -362,6 +459,34 @@ func (r *resourceHCPLogStreamingDestination) Update(ctx context.Context, req res
}
}

// if tf plan is for datadog
if !plan.Datadog.IsNull() {
if !state.Datadog.IsNull() && plan.Datadog.Equal(state.Datadog) {
// do nothing ... state has not changed
} else {
// if there is a diff between plan and state we need to call log service to update destination
fieldMaskPaths = append(fieldMaskPaths, "provider")
ddProviderAuthorization := &models.LogService20210330Authorization{
Header: "DD-API-KEY",
Value: plan.datadog.APIKey.ValueString(),
}

if !plan.datadog.ApplicationKey.IsNull() {
ddProviderAuthorization.ExtraProperties = map[string]string{
"DD-APPLICATION-KEY": plan.datadog.ApplicationKey.ValueString(),
}
}

destination.DatadogProvider = &models.LogService20210330DatadogProvider{
Endpoint: plan.datadog.Endpoint.ValueString(),
Authorization: ddProviderAuthorization,
}
}
}

// For the sake of simplicity ... we update the entire provider object if a value in said provider object has been changed.
// We could have opted to change the subfields of a specific provider object but that would lead to more complexity as we add
// providers to the supported list.
if len(fieldMaskPaths) > 0 {
destination.Resource = &models.LocationLink{
ID: state.StreamingDestinationID.ValueString(),
Expand All @@ -379,6 +504,7 @@ func (r *resourceHCPLogStreamingDestination) Update(ctx context.Context, req res
resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...)
}
}

func (r *resourceHCPLogStreamingDestination) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
var state HCPLogStreamingDestination
resp.Diagnostics.Append(req.State.Get(ctx, &state)...)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,85 @@ func testAccCloudWatchLogsConfigUpdated(name string) string {
`, name)
}

func TestAccHCPLogStreamingDestinationDatadog(t *testing.T) {
resourceName := "hcp_log_streaming_destination.test_datadog"
ddName := "dd-resource-name-1"
ddNameUpdated := "dd-resource-name-2"

resource.Test(t, resource.TestCase{
PreCheck: func() { acctest.PreCheck(t) },
ProtoV6ProviderFactories: acctest.ProtoV6ProviderFactories,
CheckDestroy: func(s *terraform.State) error {
err := testAccHCPLogStreamingDestinationDestroy(t, s)
if err != nil {
return err
}
return nil
},
Steps: []resource.TestStep{
// Tests create
{
Config: testAccDatadogConfig(ddName),
Check: resource.ComposeTestCheckFunc(
testAccHCPLogStreamingDestinationExists(t, resourceName),
resource.TestCheckResourceAttr(resourceName, "name", ddName),
resource.TestCheckResourceAttrSet(resourceName, "datadog.endpoint"),
resource.TestCheckResourceAttrSet(resourceName, "datadog.application_key"),
resource.TestCheckResourceAttrSet(resourceName, "datadog.api_key"),
resource.TestCheckResourceAttr(resourceName, "datadog.endpoint", "https://datadog-api.com"),
resource.TestCheckResourceAttr(resourceName, "datadog.application_key", "APPLICATION-VALUE-HERE"),
resource.TestCheckResourceAttr(resourceName, "datadog.api_key", "VALUEHERE"),
),
},
{
// Update the name, endpoint and api key and expect in-place update
Config: testAccDatadogConfigUpdated(ddNameUpdated),
ConfigPlanChecks: resource.ConfigPlanChecks{
PreApply: []plancheck.PlanCheck{
plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionUpdate),
},
},
Check: resource.ComposeTestCheckFunc(
testAccHCPLogStreamingDestinationExists(t, resourceName),
resource.TestCheckResourceAttr(resourceName, "name", ddNameUpdated),
resource.TestCheckResourceAttrSet(resourceName, "datadog.endpoint"),
resource.TestCheckResourceAttrSet(resourceName, "datadog.application_key"),
resource.TestCheckResourceAttrSet(resourceName, "datadog.api_key"),
resource.TestCheckResourceAttr(resourceName, "datadog.endpoint", "https://datadog-api.com/updated-endpoint"),
resource.TestCheckResourceAttr(resourceName, "datadog.application_key", "APPLICATION-VALUE-HERE"),
resource.TestCheckResourceAttr(resourceName, "datadog.api_key", "VALUEHERECHANGED"),
),
},
},
})
}

func testAccDatadogConfig(name string) string {
return fmt.Sprintf(`
resource "hcp_log_streaming_destination" "test_datadog" {
name = "%[1]s"
datadog = {
endpoint = "https://datadog-api.com"
api_key = "VALUEHERE"
application_key = "APPLICATION-VALUE-HERE"
}
}
`, name)
}

func testAccDatadogConfigUpdated(name string) string {
return fmt.Sprintf(`
resource "hcp_log_streaming_destination" "test_datadog" {
name = "%[1]s"
datadog = {
endpoint = "https://datadog-api.com/updated-endpoint"
api_key = "VALUEHERECHANGED"
application_key = "APPLICATION-VALUE-HERE"
}
}
`, name)
}

func testAccHCPLogStreamingDestinationExists(t *testing.T, name string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[name]
Expand Down
4 changes: 4 additions & 0 deletions templates/resources/log_streaming_destination.md.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ description: |-

{{ tffile "examples/resources/hcp_log_streaming_destination/resource_cloudwatch.tf" }}

## Example Usage: DataDog

{{ tffile "examples/resources/hcp_log_streaming_destination/resource_datadog.tf" }}

## Example Usage: SplunkCloud

{{ tffile "examples/resources/hcp_log_streaming_destination/resource_splunk_cloud.tf" }}
Expand Down

0 comments on commit a2a669e

Please sign in to comment.