Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Failing test TestAccSecurityCenterV2ProjectBigQueryExportConfig_basic + others #20700

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .changelog/12334.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:none
securitycenterv2: fixed flaky test TestAccSecurityCenterV2ProjectBigQueryExportConfig_basic
```
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,13 @@ func TestAccSecurityCenterOrganizationBigQueryExportConfig_basic(t *testing.T) {
t.Parallel()

randomSuffix := acctest.RandString(t, 10)
dataset_id := "tf_test_" + randomSuffix
datasetID := "tf_test_" + randomSuffix
orgID := envvar.GetTestOrgFromEnv(t)

context := map[string]interface{}{
"org_id": orgID,
"random_suffix": randomSuffix,
"dataset_id": dataset_id,
"dataset_id": datasetID,
"big_query_export_id": "tf-test-export-" + randomSuffix,
"name": fmt.Sprintf("organizations/%s/bigQueryExports/%s",
orgID, "tf-test-export-"+randomSuffix),
Expand Down Expand Up @@ -67,6 +67,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true

labels = {
env = "default"
Expand All @@ -79,7 +80,7 @@ resource "google_bigquery_dataset" "default" {

resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "3m"
create_duration = "6m"
}

resource "google_scc_organization_scc_big_query_export" "default" {
Expand All @@ -92,6 +93,10 @@ resource "google_scc_organization_scc_big_query_export" "default" {
depends_on = [time_sleep.wait_1_minute]
}

resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_organization_scc_big_query_export.default]
}
`, context)
}

Expand All @@ -105,6 +110,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true

labels = {
env = "default"
Expand All @@ -115,12 +121,24 @@ resource "google_bigquery_dataset" "default" {
}
}

resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "6m"
}

resource "google_scc_organization_scc_big_query_export" "default" {
big_query_export_id = "%{big_query_export_id}"
organization = "%{org_id}"
dataset = google_bigquery_dataset.default.id
description = "SCC Findings Big Query Export Update"
filter = "state=\"ACTIVE\" AND NOT mute=\"MUTED\""

depends_on = [time_sleep.wait_1_minute]
}

resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_organization_scc_big_query_export.default]
}
`, context)
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@ func TestAccSecurityCenterProjectBigQueryExportConfig_basic(t *testing.T) {
t.Parallel()

randomSuffix := acctest.RandString(t, 10)
dataset_id := "tf_test_" + randomSuffix
datasetID := "tf_test_" + randomSuffix
orgID := envvar.GetTestOrgFromEnv(t)

context := map[string]interface{}{
"org_id": orgID,
"random_suffix": randomSuffix,
"dataset_id": dataset_id,
"dataset_id": datasetID,
"big_query_export_id": "tf-test-export-" + randomSuffix,
"project": envvar.GetTestProjectFromEnv(),
}
Expand Down Expand Up @@ -65,6 +65,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true

labels = {
env = "default"
Expand All @@ -77,7 +78,7 @@ resource "google_bigquery_dataset" "default" {

resource "time_sleep" "wait_x_minutes" {
depends_on = [google_bigquery_dataset.default]
create_duration = "3m"
create_duration = "6m"
# need to wait for destruction due to
# 'still in use' error from api
destroy_duration = "1m"
Expand All @@ -93,6 +94,11 @@ resource "google_scc_project_scc_big_query_export" "default" {
depends_on = [time_sleep.wait_x_minutes]
}

resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_project_scc_big_query_export.default]
}

`, context)
}

Expand All @@ -106,6 +112,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true

labels = {
env = "default"
Expand All @@ -116,12 +123,27 @@ resource "google_bigquery_dataset" "default" {
}
}

resource "time_sleep" "wait_x_minutes" {
depends_on = [google_bigquery_dataset.default]
create_duration = "6m"
# need to wait for destruction due to
# 'still in use' error from api
destroy_duration = "1m"
}

resource "google_scc_project_scc_big_query_export" "default" {
big_query_export_id = "%{big_query_export_id}"
project = "%{project}"
dataset = google_bigquery_dataset.default.id
description = "SCC Findings Big Query Export Update"
filter = "state=\"ACTIVE\" AND NOT mute=\"MUTED\""

depends_on = [time_sleep.wait_x_minutes]
}

resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_project_scc_big_query_export.default]
}

`, context)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,13 @@ func TestAccSecurityCenterV2OrganizationBigQueryExportConfig_basic(t *testing.T)
t.Parallel()

randomSuffix := acctest.RandString(t, 10)
dataset_id := "tf_test_" + randomSuffix
datasetID := "tf_test_" + randomSuffix
orgID := envvar.GetTestOrgFromEnv(t)

context := map[string]interface{}{
"org_id": orgID,
"random_suffix": randomSuffix,
"dataset_id": dataset_id,
"dataset_id": datasetID,
"big_query_export_id": "tf-test-export-" + randomSuffix,
"name": fmt.Sprintf("organizations/%s/locations/global/bigQueryExports/%s",
orgID, "tf-test-export-"+randomSuffix),
Expand Down Expand Up @@ -96,7 +96,7 @@ resource "google_scc_v2_organization_scc_big_query_export" "default" {
}

resource "time_sleep" "wait_for_cleanup" {
create_duration = "3m"
create_duration = "6m"
depends_on = [google_scc_v2_organization_scc_big_query_export.default]
}
`, context)
Expand All @@ -123,6 +123,11 @@ resource "google_bigquery_dataset" "default" {
}
}

resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "6m"
}

resource "google_scc_v2_organization_scc_big_query_export" "default" {
name = "%{name}"
big_query_export_id = "%{big_query_export_id}"
Expand All @@ -131,10 +136,12 @@ resource "google_scc_v2_organization_scc_big_query_export" "default" {
location = "global"
description = "SCC Findings Big Query Export Update"
filter = "state=\"ACTIVE\" AND NOT mute=\"MUTED\""

depends_on = [time_sleep.wait_1_minute]
}

resource "time_sleep" "wait_for_cleanup" {
create_duration = "3m"
create_duration = "6m"
depends_on = [google_scc_v2_organization_scc_big_query_export.default]
}
`, context)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,13 @@ func TestAccSecurityCenterV2OrganizationBigQueryExportsConfig_basic(t *testing.T
t.Parallel()

randomSuffix := acctest.RandString(t, 10)
dataset_id := "tf_test_" + randomSuffix
datasetID := "tf_test_" + randomSuffix
orgID := envvar.GetTestOrgFromEnv(t)

context := map[string]interface{}{
"org_id": orgID,
"random_suffix": randomSuffix,
"dataset_id": dataset_id,
"dataset_id": datasetID,
"big_query_export_id": "tf-test-export-" + randomSuffix,
"name": fmt.Sprintf("organizations/%s/locations/global/bigQueryExports/%s",
orgID, "tf-test-export-"+randomSuffix),
Expand Down Expand Up @@ -96,7 +96,7 @@ resource "google_scc_v2_organization_scc_big_query_exports" "default" {
}

resource "time_sleep" "wait_for_cleanup" {
create_duration = "3m"
create_duration = "6m"
depends_on = [google_scc_v2_organization_scc_big_query_exports.default]
}
`, context)
Expand All @@ -123,6 +123,11 @@ resource "google_bigquery_dataset" "default" {
}
}

resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "6m"
}

resource "google_scc_v2_organization_scc_big_query_exports" "default" {
name = "%{name}"
big_query_export_id = "%{big_query_export_id}"
Expand All @@ -131,10 +136,12 @@ resource "google_scc_v2_organization_scc_big_query_exports" "default" {
location = "global"
description = "SCC Findings Big Query Export Update"
filter = "state=\"ACTIVE\" AND NOT mute=\"MUTED\""

depends_on = [time_sleep.wait_1_minute]
}

resource "time_sleep" "wait_for_cleanup" {
create_duration = "3m"
create_duration = "6m"
depends_on = [google_scc_v2_organization_scc_big_query_exports.default]
}
`, context)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,13 @@ func TestAccSecurityCenterV2ProjectBigQueryExportConfig_basic(t *testing.T) {
t.Parallel()

randomSuffix := acctest.RandString(t, 10)
dataset_id := "tf_test_" + randomSuffix
datasetID := "tf_test_" + randomSuffix
orgID := envvar.GetTestOrgFromEnv(t)

context := map[string]interface{}{
"org_id": orgID,
"random_suffix": randomSuffix,
"dataset_id": dataset_id,
"dataset_id": datasetID,
"big_query_export_id": "tf-test-export-" + randomSuffix,
"name": fmt.Sprintf("projects/%s/locations/global/bigQueryExports/%s",
envvar.GetTestProjectFromEnv(), "tf-test-export-"+randomSuffix),
Expand Down Expand Up @@ -68,6 +68,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true

labels = {
env = "default"
Expand All @@ -80,7 +81,10 @@ resource "google_bigquery_dataset" "default" {

resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "3m"
create_duration = "6m"
# need to wait for destruction due to
# 'still in use' error from api
destroy_duration = "1m"
}

resource "google_scc_v2_project_scc_big_query_export" "default" {
Expand All @@ -94,6 +98,11 @@ resource "google_scc_v2_project_scc_big_query_export" "default" {
depends_on = [time_sleep.wait_1_minute]
}

resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_v2_project_scc_big_query_export.default]
}

`, context)
}

Expand All @@ -107,6 +116,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true

labels = {
env = "default"
Expand All @@ -117,6 +127,14 @@ resource "google_bigquery_dataset" "default" {
}
}

resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "6m"
# need to wait for destruction due to
# 'still in use' error from api
destroy_duration = "1m"
}

resource "google_scc_v2_project_scc_big_query_export" "default" {
big_query_export_id = "%{big_query_export_id}"
project = "%{project}"
Expand All @@ -125,6 +143,13 @@ resource "google_scc_v2_project_scc_big_query_export" "default" {
description = "SCC Findings Big Query Export Update"
filter = "state=\"ACTIVE\" AND NOT mute=\"MUTED\""

depends_on = [time_sleep.wait_1_minute]

}

resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_v2_project_scc_big_query_export.default]
}

`, context)
Expand Down
Loading