diff --git a/Cargo.lock b/Cargo.lock index af6b9d7f2c1..a6ed5931150 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8389,6 +8389,7 @@ dependencies = [ "maplit", "predicates", "rand 0.8.5", + "regex", "tempfile", ] diff --git a/rs/ic_os/fstrim_tool/BUILD.bazel b/rs/ic_os/fstrim_tool/BUILD.bazel index f387d516321..38abf36e4ec 100644 --- a/rs/ic_os/fstrim_tool/BUILD.bazel +++ b/rs/ic_os/fstrim_tool/BUILD.bazel @@ -17,6 +17,7 @@ DEV_DEPENDENCIES = [ "@crate_index//:assert_matches", "@crate_index//:predicates", "@crate_index//:rand", + "@crate_index//:regex", "@crate_index//:tempfile", ] diff --git a/rs/ic_os/fstrim_tool/Cargo.toml b/rs/ic_os/fstrim_tool/Cargo.toml index 56348b47f9f..d97b8a61d13 100644 --- a/rs/ic_os/fstrim_tool/Cargo.toml +++ b/rs/ic_os/fstrim_tool/Cargo.toml @@ -19,4 +19,5 @@ assert_matches = { workspace = true } ic-crypto-test-utils-reproducible-rng = { path = "../../crypto/test_utils/reproducible_rng" } predicates = { workspace = true } rand = { workspace = true } +regex = { workspace = true } tempfile = { workspace = true } diff --git a/rs/ic_os/fstrim_tool/src/lib.rs b/rs/ic_os/fstrim_tool/src/lib.rs index af39d6c6f5d..1bbe877bf5e 100644 --- a/rs/ic_os/fstrim_tool/src/lib.rs +++ b/rs/ic_os/fstrim_tool/src/lib.rs @@ -61,7 +61,12 @@ fn write_metrics_using_tmp_file(metrics: &FsTrimMetrics, metrics_filename: &str) .context("Failed to write metrics to file") } -fn update_metrics(elapsed: Duration, is_success: bool, metrics_filename: &str) -> Result<()> { +fn update_metrics( + elapsed: Duration, + is_success: bool, + metrics_filename: &str, + is_datadir: bool, +) -> Result<()> { let mut metrics = parse_existing_metrics_from_file(metrics_filename) .unwrap_or_else(|e| { eprintln!("error parsing existing metrics: {}", e); @@ -71,7 +76,13 @@ fn update_metrics(elapsed: Duration, is_success: bool, metrics_filename: &str) - eprintln!("no existing metrics found"); FsTrimMetrics::default() }); - metrics.update(is_success, elapsed)?; + + if is_datadir { + metrics.update_datadir(is_success, elapsed)?; + } else { + metrics.update(is_success, elapsed)?; + } + write_metrics_using_tmp_file(&metrics, metrics_filename) } @@ -101,14 +112,13 @@ pub fn fstrim_tool( let start = std::time::Instant::now(); let res_target = run_command(command, &target); let elapsed_target = start.elapsed(); - update_metrics(elapsed_target, res_target.is_ok(), &metrics_filename)?; + update_metrics(elapsed_target, res_target.is_ok(), &metrics_filename, false)?; if !datadir_target.is_empty() && !is_node_assigned() { - // TODO observability changes needed, expand the metrics logic - // let start_datadir = std::time::Instant::now(); + let start = std::time::Instant::now(); let res_datadir = run_command(command, &datadir_target); - // let elapsed_datadir = start_datadir.elapsed(); - // update_metrics(elapsed_datadir, res_datadir.is_ok(), &metrics_filename)?; + let elapsed = start.elapsed(); + update_metrics(elapsed, res_datadir.is_ok(), &metrics_filename, true)?; res_target.and(res_datadir) } else { res_target diff --git a/rs/ic_os/fstrim_tool/src/metrics/mod.rs b/rs/ic_os/fstrim_tool/src/metrics/mod.rs index 5e42bce8d83..dfa68037edb 100644 --- a/rs/ic_os/fstrim_tool/src/metrics/mod.rs +++ b/rs/ic_os/fstrim_tool/src/metrics/mod.rs @@ -8,11 +8,20 @@ const METRICS_LAST_RUN_DURATION_MILLISECONDS: &str = "fstrim_last_run_duration_m const METRICS_LAST_RUN_SUCCESS: &str = "fstrim_last_run_success"; const METRICS_RUNS_TOTAL: &str = "fstrim_runs_total"; +const METRICS_LAST_RUN_DURATION_MILLISECONDS_DATADIR: &str = + "fstrim_datadir_last_run_duration_milliseconds"; +const METRICS_LAST_RUN_SUCCESS_DATADIR: &str = "fstrim_datadir_last_run_success"; +const METRICS_RUNS_TOTAL_DATADIR: &str = "fstrim_datadir_runs_total"; + #[derive(Debug)] pub struct FsTrimMetrics { pub last_duration_milliseconds: f64, pub last_run_success: bool, pub total_runs: f64, + + pub last_duration_milliseconds_datadir: f64, + pub last_run_success_datadir: bool, + pub total_runs_datadir: f64, } impl Default for FsTrimMetrics { @@ -21,6 +30,10 @@ impl Default for FsTrimMetrics { last_duration_milliseconds: 0f64, last_run_success: true, total_runs: 0f64, + + last_duration_milliseconds_datadir: 0f64, + last_run_success_datadir: true, + total_runs_datadir: 0f64, } } } @@ -33,26 +46,54 @@ impl FsTrimMetrics { Ok(()) } + pub(crate) fn update_datadir(&mut self, success: bool, duration: Duration) -> Result<()> { + self.total_runs_datadir += 1f64; + self.last_run_success_datadir = success; + self.last_duration_milliseconds_datadir = duration.as_millis() as f64; + Ok(()) + } + pub fn to_p8s_metrics_string(&self) -> String { + let fstrim_last_run_duration_milliseconds = to_go_f64(self.last_duration_milliseconds); + let fstrim_last_run_success = if self.last_run_success { "1" } else { "0" }; + let fstrim_runs_total = to_go_f64(self.total_runs); + + let fstrim_datadir_last_run_duration_milliseconds = + to_go_f64(self.last_duration_milliseconds_datadir); + let fstrim_datadir_last_run_success = if self.last_run_success_datadir { + "1" + } else { + "0" + }; + let fstrim_datadir_runs_total = to_go_f64(self.total_runs_datadir); + format!( "# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds\n\ # TYPE fstrim_last_run_duration_milliseconds gauge\n\ - fstrim_last_run_duration_milliseconds {}\n\ + fstrim_last_run_duration_milliseconds {fstrim_last_run_duration_milliseconds}\n\ # HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0)\n\ # TYPE fstrim_last_run_success gauge\n\ - fstrim_last_run_success {}\n\ + fstrim_last_run_success {fstrim_last_run_success}\n\ # HELP fstrim_runs_total Total number of runs of fstrim\n\ # TYPE fstrim_runs_total counter\n\ - fstrim_runs_total {}\n", - to_go_f64(self.last_duration_milliseconds), - if self.last_run_success { "1" } else { "0" }, - to_go_f64(self.total_runs), - ).to_string() + fstrim_runs_total {fstrim_runs_total}\n\ + # HELP fstrim_datadir_last_run_duration_milliseconds Duration of last run of fstrim on datadir in milliseconds\n\ + # TYPE fstrim_datadir_last_run_duration_milliseconds gauge\n\ + fstrim_datadir_last_run_duration_milliseconds {fstrim_datadir_last_run_duration_milliseconds}\n\ + # HELP fstrim_datadir_last_run_success Success status of last run of fstrim on datadir (success: 1, failure: 0)\n\ + # TYPE fstrim_datadir_last_run_success gauge\n\ + fstrim_datadir_last_run_success {fstrim_datadir_last_run_success}\n\ + # HELP fstrim_datadir_runs_total Total number of runs of fstrim on datadir\n\ + # TYPE fstrim_datadir_runs_total counter\n\ + fstrim_datadir_runs_total {fstrim_datadir_runs_total}\n" + ) } fn are_valid(&self) -> bool { is_f64_finite_and_0_or_larger(self.total_runs) && is_f64_finite_and_0_or_larger(self.last_duration_milliseconds) + && is_f64_finite_and_0_or_larger(self.total_runs_datadir) + && is_f64_finite_and_0_or_larger(self.last_duration_milliseconds_datadir) } } @@ -102,27 +143,41 @@ where let mut last_duration_milliseconds: Option = None; let mut last_run_success: Option = None; let mut total_runs: Option = None; + + // Default datadir fields (we treat them as optional in the metrics file) + let mut datadir_last_duration_milliseconds: f64 = 0f64; + let mut datadir_last_run_success: bool = true; + let mut datadir_total_runs: f64 = 0f64; + for line_or_err in lines { let line = line_or_err.map_err(|e| format_err!("failed to read line: {}", e))?; match line.split(' ').collect::>()[..] { ["#", ..] => continue, [key, value] => match key { METRICS_LAST_RUN_DURATION_MILLISECONDS => { - let _ = last_duration_milliseconds - .get_or_insert(parse_metrics_value(key, value)?); + last_duration_milliseconds.get_or_insert(parse_metrics_value(key, value)?); } METRICS_LAST_RUN_SUCCESS => { - let _ = - last_run_success.get_or_insert(parse_metrics_value(key, value)? > 0f64); + last_run_success.get_or_insert(parse_metrics_value(key, value)? > 0f64); } METRICS_RUNS_TOTAL => { - let _ = total_runs.get_or_insert(parse_metrics_value(key, value)?); + total_runs.get_or_insert(parse_metrics_value(key, value)?); + } + METRICS_LAST_RUN_DURATION_MILLISECONDS_DATADIR => { + datadir_last_duration_milliseconds = parse_metrics_value(key, value)?; + } + METRICS_LAST_RUN_SUCCESS_DATADIR => { + datadir_last_run_success = parse_metrics_value(key, value)? > 0f64; + } + METRICS_RUNS_TOTAL_DATADIR => { + datadir_total_runs = parse_metrics_value(key, value)?; } _ => return Err(format_err!("unknown metric key: {}", key)), }, _ => return Err(format_err!("invalid metric line: {:?}", line)), } } + let metrics = FsTrimMetrics { last_duration_milliseconds: last_duration_milliseconds.ok_or(format_err!( "missing metric: {}", @@ -131,6 +186,9 @@ where last_run_success: last_run_success .ok_or(format_err!("missing metric: {}", METRICS_LAST_RUN_SUCCESS))?, total_runs: total_runs.ok_or(format_err!("missing metric: {}", METRICS_RUNS_TOTAL))?, + last_duration_milliseconds_datadir: datadir_last_duration_milliseconds, + last_run_success_datadir: datadir_last_run_success, + total_runs_datadir: datadir_total_runs, }; if !metrics.are_valid() { return Err(format_err!("parsed metrics are invalid")); @@ -148,6 +206,12 @@ impl PartialEq for FsTrimMetrics { other.last_duration_milliseconds, ) && (self.last_run_success == other.last_run_success) + && f64_approx_eq( + self.last_duration_milliseconds_datadir, + other.last_duration_milliseconds_datadir, + ) + && (self.last_run_success_datadir == other.last_run_success_datadir) + && f64_approx_eq(self.total_runs_datadir, other.total_runs_datadir) } } diff --git a/rs/ic_os/fstrim_tool/src/metrics/tests.rs b/rs/ic_os/fstrim_tool/src/metrics/tests.rs index bca2d1e79b4..b21a61f3723 100644 --- a/rs/ic_os/fstrim_tool/src/metrics/tests.rs +++ b/rs/ic_os/fstrim_tool/src/metrics/tests.rs @@ -3,9 +3,11 @@ use assert_matches::assert_matches; use ic_crypto_test_utils_reproducible_rng::reproducible_rng; use rand::Rng; use std::fs::write; +use std::time::Duration; +use tempfile::tempdir; #[test] -fn should_compare_f64() { +fn compare_f64() { assert!(f64_approx_eq(f64::NAN, f64::NAN)); assert!(f64_approx_eq(f64::INFINITY, f64::INFINITY)); assert!(f64_approx_eq(f64::INFINITY + 1f64, f64::INFINITY)); @@ -16,8 +18,8 @@ fn should_compare_f64() { } #[test] -fn should_parse_valid_metrics_file() { - let temp_dir = tempfile::TempDir::new().expect("failed to create a temporary directory"); +fn parse_valid_metrics_file() { + let temp_dir = tempdir().expect("failed to create a temporary directory"); let test_file = temp_dir.as_ref().join("test_file"); let metrics_file_content = "# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds\n\ @@ -30,18 +32,22 @@ fn should_parse_valid_metrics_file() { # TYPE fstrim_runs_total counter\n\ fstrim_runs_total 1\n"; write(&test_file, metrics_file_content).expect("error writing to file"); + let parsed_metrics = parse_existing_metrics_from_file(&test_file.to_string_lossy()).unwrap(); let expected_metrics = FsTrimMetrics { last_duration_milliseconds: 6.0, last_run_success: true, total_runs: 1.0, + last_duration_milliseconds_datadir: 0.0, + last_run_success_datadir: true, + total_runs_datadir: 0.0, }; assert_eq!(parsed_metrics, Some(expected_metrics)); } #[test] -fn should_only_consider_first_parsed_value_when_parsing_metrics_file() { - let temp_dir = tempfile::TempDir::new().expect("failed to create a temporary directory"); +fn ignore_subsequent_values_for_same_metric() { + let temp_dir = tempdir().expect("failed to create a temporary directory"); let test_file = temp_dir.as_ref().join("test_file"); let metrics_file_content = "# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds\n\ @@ -57,17 +63,21 @@ fn should_only_consider_first_parsed_value_when_parsing_metrics_file() { fstrim_runs_total 12\n\ fstrim_runs_total 1\n"; write(&test_file, metrics_file_content).expect("error writing to file"); + let parsed_metrics = parse_existing_metrics_from_file(&test_file.to_string_lossy()).unwrap(); let expected_metrics = FsTrimMetrics { last_duration_milliseconds: 6.0, last_run_success: true, total_runs: 12.0, + last_duration_milliseconds_datadir: 0.0, + last_run_success_datadir: true, + total_runs_datadir: 0.0, }; assert_eq!(parsed_metrics, Some(expected_metrics)); } #[test] -fn should_return_error_when_parsing_empty_metrics_file() { +fn should_error_on_empty_metrics_file() { let temp_dir = tempfile::TempDir::new().expect("failed to create a temporary directory"); let test_file = temp_dir.as_ref().join("test_file"); write(&test_file, "").expect("error writing to file"); @@ -76,7 +86,7 @@ fn should_return_error_when_parsing_empty_metrics_file() { } #[test] -fn should_return_error_for_metrics_file_with_too_many_tokens() { +fn should_error_when_metrics_file_has_too_many_tokens() { let temp_dir = tempfile::TempDir::new().expect("failed to create a temporary directory"); let test_file = temp_dir.as_ref().join("test_file"); write(&test_file, "pineapple on pizza is delicious").expect("error writing to file"); @@ -87,7 +97,7 @@ fn should_return_error_for_metrics_file_with_too_many_tokens() { } #[test] -fn should_return_error_for_metrics_file_with_unknown_metric_name() { +fn should_error_when_unknown_metric_name() { let temp_dir = tempfile::TempDir::new().expect("failed to create a temporary directory"); let test_file = temp_dir.as_ref().join("test_file"); write(&test_file, "pineapple pizza").expect("error writing to file"); @@ -98,7 +108,7 @@ fn should_return_error_for_metrics_file_with_unknown_metric_name() { } #[test] -fn should_return_error_for_metrics_file_with_timestamp() { +fn should_error_when_metrics_file_has_timestamp() { let temp_dir = tempfile::TempDir::new().expect("failed to create a temporary directory"); let test_file = temp_dir.as_ref().join("test_file"); write( @@ -113,7 +123,7 @@ fn should_return_error_for_metrics_file_with_timestamp() { } #[test] -fn should_return_error_for_metrics_file_with_non_numeric_value() { +fn should_error_when_metrics_file_has_non_numeric_value() { let temp_dir = tempfile::TempDir::new().expect("failed to create a temporary directory"); let test_file = temp_dir.as_ref().join("test_file"); write(&test_file, format!("{} pizza", METRICS_RUNS_TOTAL).as_str()) @@ -125,7 +135,7 @@ fn should_return_error_for_metrics_file_with_non_numeric_value() { } #[test] -fn should_return_none_when_parsing_if_metrics_file_does_not_exist() { +fn file_does_not_exist() { let temp_dir = tempfile::TempDir::new().expect("failed to create a temporary directory"); let test_file = temp_dir.as_ref().join("test_file"); let parsed_metrics = parse_existing_metrics_from_file(&test_file.to_string_lossy()).unwrap(); @@ -133,7 +143,7 @@ fn should_return_none_when_parsing_if_metrics_file_does_not_exist() { } #[test] -fn should_set_metrics() { +fn set_metrics() { let mut existing_metrics = FsTrimMetrics::default(); existing_metrics .update(true, Duration::from_millis(110)) @@ -142,12 +152,13 @@ fn should_set_metrics() { last_duration_milliseconds: 110.0, last_run_success: true, total_runs: 1.0, + ..FsTrimMetrics::default() }; assert_eq!(existing_metrics, expected_metrics); } #[test] -fn should_update_metrics() { +fn update_metrics() { let mut rng = reproducible_rng(); for _ in 0..100 { let total_runs: u64 = rng.gen_range(0..10000000); @@ -162,7 +173,7 @@ fn should_update_metrics() { for _ in 0..100 { let success = rng.gen_bool(0.5); let duration = Duration::from_millis(rng.gen_range(0..15000)); - update_metrics(&mut expected_metrics, success, duration); + update_metrics_locally(&mut expected_metrics, success, duration); updated_metrics .update(success, duration) .expect("should update metrics successfully"); @@ -177,14 +188,15 @@ fn should_update_metrics() { } } -fn update_metrics(metrics: &mut FsTrimMetrics, success: bool, duration: Duration) { +// Simple local "update" for the test reference +fn update_metrics_locally(metrics: &mut FsTrimMetrics, success: bool, duration: Duration) { metrics.total_runs += 1f64; metrics.last_run_success = success; metrics.last_duration_milliseconds = duration.as_millis() as f64; } #[test] -fn should_update_metric_with_infinite_values() { +fn update_metrics_with_infinite_values() { let mut existing_metrics = FsTrimMetrics { total_runs: f64::INFINITY, ..FsTrimMetrics::default() @@ -198,13 +210,14 @@ fn should_update_metric_with_infinite_values() { last_duration_milliseconds: duration.as_millis() as f64, last_run_success: success, total_runs: f64::INFINITY, + ..FsTrimMetrics::default() }; assert_eq!(existing_metrics, expected_metrics); } #[test] -fn should_update_metric_with_nan_values() { +fn update_metrics_with_nan_values() { let mut existing_metrics = FsTrimMetrics { total_runs: f64::NAN, ..FsTrimMetrics::default() @@ -218,6 +231,7 @@ fn should_update_metric_with_nan_values() { last_duration_milliseconds: duration.as_millis() as f64, last_run_success: success, total_runs: f64::NAN, + ..FsTrimMetrics::default() }; assert_eq!(existing_metrics, expected_metrics); @@ -230,7 +244,7 @@ fn verify_invariants(i: f64, existing_metrics: &FsTrimMetrics) { } #[test] -fn should_maintain_invariants() { +fn maintain_invariants() { let mut existing_metrics = FsTrimMetrics::default(); let rng = &mut reproducible_rng(); for i in 0..100 { @@ -242,3 +256,60 @@ fn should_maintain_invariants() { verify_invariants(i as f64, &existing_metrics); } } + +#[test] +fn update_datadir_metrics() { + let mut metrics = FsTrimMetrics::default(); + assert_eq!(metrics.total_runs_datadir, 0.0); + assert_eq!(metrics.last_duration_milliseconds_datadir, 0.0); + assert!(metrics.last_run_success_datadir); + + metrics + .update_datadir(false, Duration::from_millis(123)) + .expect("should update datadir metrics"); + + assert_eq!(metrics.total_runs_datadir, 1.0); + assert_eq!(metrics.last_duration_milliseconds_datadir, 123.0); + assert!(!metrics.last_run_success_datadir); + + // Check that normal fields remain untouched + assert_eq!(metrics.total_runs, 0.0); + assert_eq!(metrics.last_duration_milliseconds, 0.0); + assert!(metrics.last_run_success); +} + +#[test] +fn format_metrics_output() { + let metrics = FsTrimMetrics { + last_duration_milliseconds: 123.45, + last_run_success: true, + total_runs: 6.0, + last_duration_milliseconds_datadir: 678.9, + last_run_success_datadir: false, + total_runs_datadir: 4.0, + }; + + let metrics_str = metrics.to_p8s_metrics_string(); + let expected_str = "\ +# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds +# TYPE fstrim_last_run_duration_milliseconds gauge +fstrim_last_run_duration_milliseconds 123.45 +# HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0) +# TYPE fstrim_last_run_success gauge +fstrim_last_run_success 1 +# HELP fstrim_runs_total Total number of runs of fstrim +# TYPE fstrim_runs_total counter +fstrim_runs_total 6 +# HELP fstrim_datadir_last_run_duration_milliseconds Duration of last run of fstrim on datadir in milliseconds +# TYPE fstrim_datadir_last_run_duration_milliseconds gauge +fstrim_datadir_last_run_duration_milliseconds 678.9 +# HELP fstrim_datadir_last_run_success Success status of last run of fstrim on datadir (success: 1, failure: 0) +# TYPE fstrim_datadir_last_run_success gauge +fstrim_datadir_last_run_success 0 +# HELP fstrim_datadir_runs_total Total number of runs of fstrim on datadir +# TYPE fstrim_datadir_runs_total counter +fstrim_datadir_runs_total 4 +"; + + assert_eq!(metrics_str, expected_str); +} diff --git a/rs/ic_os/fstrim_tool/src/tests.rs b/rs/ic_os/fstrim_tool/src/tests.rs index 7fbb921466f..6fd146a0b85 100644 --- a/rs/ic_os/fstrim_tool/src/tests.rs +++ b/rs/ic_os/fstrim_tool/src/tests.rs @@ -1,32 +1,34 @@ use super::*; use assert_matches::assert_matches; -use std::fs::write; +use std::fs::{read_to_string, write}; +use std::path::PathBuf; +use std::time::Duration; use tempfile::tempdir; -const EXISTING_METRICS_CONTENT: &str = - "# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds\n\ - # TYPE fstrim_last_run_duration_milliseconds gauge\n\ - fstrim_last_run_duration_milliseconds 0\n\ - # HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0)\n\ - # TYPE fstrim_last_run_success gauge\n\ - fstrim_last_run_success 1\n\ - # HELP fstrim_runs_total Total number of runs of fstrim\n\ - # TYPE fstrim_runs_total counter\n\ - fstrim_runs_total 1\n"; - -const EXISTING_METRICS_CONTENT_WITH_SPECIAL_VALUES: &str = - "# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds\n\ - # TYPE fstrim_last_run_duration_milliseconds gauge\n\ - fstrim_last_run_duration_milliseconds 0\n\ - # HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0)\n\ - # TYPE fstrim_last_run_success gauge\n\ - fstrim_last_run_success 1\n\ - # HELP fstrim_runs_total Total number of runs of fstrim\n\ - # TYPE fstrim_runs_total counter\n\ - fstrim_runs_total +Inf\n"; +const EXISTING_METRICS_CONTENT: &str = r#"# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds +# TYPE fstrim_last_run_duration_milliseconds gauge +fstrim_last_run_duration_milliseconds 0 +# HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0) +# TYPE fstrim_last_run_success gauge +fstrim_last_run_success 1 +# HELP fstrim_runs_total Total number of runs of fstrim +# TYPE fstrim_runs_total counter +fstrim_runs_total 1 +"#; + +const EXISTING_METRICS_CONTENT_WITH_SPECIAL_VALUES: &str = r#"# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds +# TYPE fstrim_last_run_duration_milliseconds gauge +fstrim_last_run_duration_milliseconds 0 +# HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0) +# TYPE fstrim_last_run_success gauge +fstrim_last_run_success 1 +# HELP fstrim_runs_total Total number of runs of fstrim +# TYPE fstrim_runs_total counter +fstrim_runs_total +Inf +"#; #[test] -fn should_parse_metrics_from_file() { +fn parse_metrics_without_datadir_fields() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); write(&metrics_file, EXISTING_METRICS_CONTENT).expect("error writing to file"); @@ -38,12 +40,49 @@ fn should_parse_metrics_from_file() { ) .expect("parsing metrics should succeed") .expect("parsed metrics should be some"); - let parsed_metrics_string = parsed_metrics.to_p8s_metrics_string(); - assert_eq!(parsed_metrics_string, EXISTING_METRICS_CONTENT); + + let expected_metrics = FsTrimMetrics { + last_duration_milliseconds: 0.0, + last_run_success: true, + total_runs: 1.0, + last_duration_milliseconds_datadir: 0.0, + last_run_success_datadir: true, + total_runs_datadir: 0.0, + }; + + assert_eq!(parsed_metrics, expected_metrics); } #[test] -fn should_return_error_if_metrics_in_file_contain_special_values() { +fn parse_metrics_with_datadir_fields() { + let tmp_dir = tempdir().expect("temp dir creation should succeed"); + let metrics_file = tmp_dir.path().join("fstrim.prom"); + + let initial_metrics = FsTrimMetrics { + last_duration_milliseconds: 42.0, + last_run_success: false, + total_runs: 7.0, + last_duration_milliseconds_datadir: 999.0, + last_run_success_datadir: true, + total_runs_datadir: 12.0, + }; + write_metrics_using_tmp_file( + &initial_metrics, + metrics_file + .to_str() + .expect("metrics file path should be valid"), + ) + .unwrap(); + + let parsed_metrics = parse_existing_metrics_from_file(metrics_file.to_str().unwrap()) + .expect("parsing metrics should succeed") + .expect("parsed metrics should be some"); + + assert_eq!(parsed_metrics, initial_metrics); +} + +#[test] +fn should_error_if_metrics_in_file_has_special_values() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); write(&metrics_file, EXISTING_METRICS_CONTENT_WITH_SPECIAL_VALUES) @@ -58,14 +97,21 @@ fn should_return_error_if_metrics_in_file_contain_special_values() { } #[test] -fn should_write_metrics_to_file() { +fn write_metrics_to_file() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); - let default_metrics = FsTrimMetrics::default(); - let default_metrics_string = default_metrics.to_p8s_metrics_string(); + + let metrics = FsTrimMetrics { + last_duration_milliseconds: 64.0, + last_run_success: false, + total_runs: 60.0, + last_duration_milliseconds_datadir: 3.0, + last_run_success_datadir: true, + total_runs_datadir: 16.0, + }; write_metrics_using_tmp_file( - &default_metrics, + &metrics, metrics_file .to_str() .expect("metrics file path should be valid"), @@ -79,19 +125,30 @@ fn should_write_metrics_to_file() { ) .expect("parsing metrics should succeed") .expect("parsed metrics should be some"); - let parsed_metrics_string = parsed_metrics.to_p8s_metrics_string(); - assert_eq!( - parsed_metrics, default_metrics, - "{}\n{}", - parsed_metrics_string, default_metrics_string - ); + + assert_eq!(parsed_metrics, metrics); } #[test] -fn should_update_metrics() { +fn test_update_metrics() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); - write(&metrics_file, EXISTING_METRICS_CONTENT).expect("error writing to file"); + + let initial_metrics = FsTrimMetrics { + last_duration_milliseconds: 0.0, + last_run_success: true, + total_runs: 1.0, + last_duration_milliseconds_datadir: 0.0, + last_run_success_datadir: true, + total_runs_datadir: 0.0, + }; + write_metrics_using_tmp_file( + &initial_metrics, + metrics_file + .to_str() + .expect("metrics file path should be valid"), + ) + .unwrap(); update_metrics( Duration::from_millis(151), @@ -99,8 +156,10 @@ fn should_update_metrics() { metrics_file .to_str() .expect("metrics file path should be valid"), + false, ) .expect("updating metrics should succeed"); + let parsed_metrics = parse_existing_metrics_from_file( metrics_file .to_str() @@ -108,22 +167,68 @@ fn should_update_metrics() { ) .expect("parsing metrics should succeed") .expect("parsed metrics should be some"); - let expected_metrics = - "# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds\n\ - # TYPE fstrim_last_run_duration_milliseconds gauge\n\ - fstrim_last_run_duration_milliseconds 151\n\ - # HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0)\n\ - # TYPE fstrim_last_run_success gauge\n\ - fstrim_last_run_success 1\n\ - # HELP fstrim_runs_total Total number of runs of fstrim\n\ - # TYPE fstrim_runs_total counter\n\ - fstrim_runs_total 2\n"; - let parsed_metrics_string = parsed_metrics.to_p8s_metrics_string(); - assert_eq!(parsed_metrics_string, expected_metrics); + + let expected_metrics = FsTrimMetrics { + last_duration_milliseconds: 151.0, + last_run_success: true, + total_runs: 2.0, + last_duration_milliseconds_datadir: 0.0, + last_run_success_datadir: true, + total_runs_datadir: 0.0, + }; + assert_eq!(parsed_metrics, expected_metrics); } #[test] -fn should_start_from_empty_metrics_for_update_if_metrics_in_file_contain_special_values() { +fn update_datadir_metrics() { + let tmp_dir = tempdir().expect("temp dir creation should succeed"); + let metrics_file = tmp_dir.path().join("fstrim.prom"); + + let initial_metrics = FsTrimMetrics { + last_duration_milliseconds: 0.0, + last_run_success: true, + total_runs: 1.0, + last_duration_milliseconds_datadir: 0.0, + last_run_success_datadir: true, + total_runs_datadir: 0.0, + }; + write_metrics_using_tmp_file( + &initial_metrics, + metrics_file + .to_str() + .expect("metrics file path should be valid"), + ) + .unwrap(); + + update_metrics( + Duration::from_millis(501), + false, + metrics_file + .to_str() + .expect("metrics file path should be valid"), + true, + ) + .expect("updating datadir metrics should succeed"); + + let parsed_metrics = parse_existing_metrics_from_file( + metrics_file.to_str().expect("should convert path to str"), + ) + .expect("parsing metrics should succeed") + .expect("parsed metrics should be some"); + + let expected_metrics = FsTrimMetrics { + last_duration_milliseconds: 0.0, + last_run_success: true, + total_runs: 1.0, + last_duration_milliseconds_datadir: 501.0, + last_run_success_datadir: false, + total_runs_datadir: 1.0, + }; + assert_eq!(parsed_metrics, expected_metrics); +} + +#[test] +fn start_from_empty_metrics_when_file_has_special_values() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); write(&metrics_file, EXISTING_METRICS_CONTENT_WITH_SPECIAL_VALUES) @@ -135,8 +240,10 @@ fn should_start_from_empty_metrics_for_update_if_metrics_in_file_contain_special metrics_file .to_str() .expect("metrics file path should be valid"), + false, ) .expect("updating metrics should succeed"); + let parsed_metrics = parse_existing_metrics_from_file( metrics_file .to_str() @@ -144,37 +251,37 @@ fn should_start_from_empty_metrics_for_update_if_metrics_in_file_contain_special ) .expect("parsing metrics should succeed") .expect("parsed metrics should be some"); - let expected_metrics = - "# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds\n\ - # TYPE fstrim_last_run_duration_milliseconds gauge\n\ - fstrim_last_run_duration_milliseconds 151\n\ - # HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0)\n\ - # TYPE fstrim_last_run_success gauge\n\ - fstrim_last_run_success 1\n\ - # HELP fstrim_runs_total Total number of runs of fstrim\n\ - # TYPE fstrim_runs_total counter\n\ - fstrim_runs_total 1\n"; - let parsed_metrics_string = parsed_metrics.to_p8s_metrics_string(); - assert_eq!(parsed_metrics_string, expected_metrics); + + let expected_metrics = FsTrimMetrics { + last_duration_milliseconds: 151.0, + last_run_success: true, + total_runs: 1.0, + last_duration_milliseconds_datadir: 0.0, + last_run_success_datadir: true, + total_runs_datadir: 0.0, + }; + assert_eq!(parsed_metrics, expected_metrics); } #[test] -fn should_return_ok_from_successfully_run_command() { +fn successfully_run_command() { run_command("true", "/").expect("running command should succeed"); } #[test] -fn should_return_error_from_unsuccessfully_run_command() { +fn unsuccessfully_run_command() { let res = run_command("false", "/"); assert_matches!(res, Err(err) if err.to_string().contains("Failed to run command")); } #[test] -fn should_fail_but_write_metrics_if_command_fails() { +fn command_fails_but_writes_metrics() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); + + // This should fail to run the command, but still write updated metrics assert_matches!( fstrim_tool( "/non/existent/command", @@ -198,11 +305,20 @@ fn should_fail_but_write_metrics_if_command_fails() { if err.to_string().contains("Failed to run command") ); - assert_metrics_file_content(&metrics_file, false, 1); + // Verify that the metrics were written with success=0, total_runs=1, etc. + let parsed_metrics = + parse_existing_metrics_from_file(metrics_file.to_str().expect("valid path")) + .expect("parsing metrics should succeed") + .expect("parsed metrics should be some"); + + assert!(!parsed_metrics.last_run_success); + assert_eq!(parsed_metrics.total_runs, 1.0); + assert!(!parsed_metrics.last_run_success_datadir); + assert_eq!(parsed_metrics.total_runs_datadir, 1.0); } #[test] -fn should_fail_if_command_cannot_be_run() { +fn fails_if_command_cannot_be_run() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); @@ -232,10 +348,11 @@ fn should_fail_if_command_cannot_be_run() { } #[test] -fn should_not_run_command_but_initialize_metrics_if_flag_set() { +fn init_flag() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); + assert!(fstrim_tool( "/non/existent/command", metrics_file @@ -247,7 +364,7 @@ fn should_not_run_command_but_initialize_metrics_if_flag_set() { .to_str() .expect("tmp_dir path should be valid") .to_string(), - true, + true, //init should write out default metrics even though the command fails tmp_dir2 .path() .to_str() @@ -256,11 +373,16 @@ fn should_not_run_command_but_initialize_metrics_if_flag_set() { ) .is_ok()); - assert_metrics_file_content(&metrics_file, true, 0); + let parsed_metrics = + parse_existing_metrics_from_file(metrics_file.to_str().expect("valid path")) + .expect("parsing metrics should succeed") + .expect("parsed metrics should be some"); + + assert_eq!(parsed_metrics, FsTrimMetrics::default()); } #[test] -fn should_not_overwrite_existing_metrics_if_metrics_init_flag_set() { +fn init_flag_does_not_overwrite_existing_metrics() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); @@ -305,14 +427,16 @@ fn should_not_overwrite_existing_metrics_if_metrics_init_flag_set() { ) .is_ok()); - assert_metrics_file_content(&metrics_file, true, 1); + let content = read_to_string(&metrics_file).expect("reading metrics should succeed"); + assert!(content.contains("fstrim_runs_total 1")); } #[test] -fn should_fail_if_metrics_file_cannot_be_written_to() { +fn should_fail_if_metrics_file_cannot_be_written() { let metrics_file = PathBuf::from("/non/existent/directory/fstrim.prom"); let tmp_dir = tempdir().expect("temp dir creation should succeed"); let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); + assert_matches!( fstrim_tool( "true", @@ -344,10 +468,8 @@ fn should_fail_if_target_is_not_a_directory() { let metrics_file = tmp_dir.path().join("fstrim.prom"); let target = PathBuf::from("/non/existent/target/directory"); - let expected_error = format!( - "Target {} is not a directory", - target.to_str().expect("target path should be valid") - ); + let expected_error = format!("Target {} is not a directory", target.to_str().unwrap()); + assert_matches!( fstrim_tool( "true", @@ -370,48 +492,3 @@ fn should_fail_if_target_is_not_a_directory() { if err.to_string() == expected_error ); } - -fn assert_metrics_file_content(metrics_filename: &PathBuf, is_success: bool, total_runs: u32) { - let file = File::open(metrics_filename).expect("should succeed in opening metrics file"); - let reader = BufReader::new(file); - let lines = reader.lines(); - for (i, line) in lines.enumerate() { - match i { - 0 => assert_eq!( - line.unwrap(), - "# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds" - ), - 1 => assert_eq!( - line.unwrap(), - "# TYPE fstrim_last_run_duration_milliseconds gauge" - ), - 2 => assert!(line.unwrap().starts_with("fstrim_last_run_duration_milliseconds")), - 3 => assert_eq!( - line.unwrap(), "# HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0)" - ), - 4 => assert_eq!( - line.unwrap(), "# TYPE fstrim_last_run_success gauge" - ), - 5 => { - let line_str = line.unwrap(); - let mut tokens = line_str.split(' '); - assert_eq!(tokens.next().unwrap(), "fstrim_last_run_success", "{}", line_str); - let success_str = if is_success { "1" } else { "0" }; - assert_eq!(tokens.next().unwrap(), success_str, "{}", line_str); - }, - 6 => assert_eq!( - line.unwrap(), "# HELP fstrim_runs_total Total number of runs of fstrim" - ), - 7 => assert_eq!( - line.unwrap(), "# TYPE fstrim_runs_total counter" - ), - 8 => { - let line_str = line.unwrap(); - let mut tokens = line_str.split(' '); - assert_eq!(tokens.next().unwrap(), "fstrim_runs_total", "{}", line_str); - assert_eq!(tokens.next().unwrap().parse::().unwrap(), total_runs, "{}", line_str); - }, - _ => panic!("unexpected line: {}", line.unwrap()), - } - } -} diff --git a/rs/ic_os/fstrim_tool/tests/integration_tests.rs b/rs/ic_os/fstrim_tool/tests/integration_tests.rs index 3701250642c..e89c785f3cd 100644 --- a/rs/ic_os/fstrim_tool/tests/integration_tests.rs +++ b/rs/ic_os/fstrim_tool/tests/integration_tests.rs @@ -1,68 +1,39 @@ use assert_cmd::Command; use predicates::prelude::*; -use std::fs::File; -use std::io::{BufRead, BufReader}; -use std::path::PathBuf; +use regex::Regex; +use std::fs::read_to_string; use tempfile::tempdir; fn new_fstrim_tool_command() -> Command { match Command::cargo_bin("fstrim_tool") { // When in Cargo environment. - Ok(v) => v, + Ok(cmd) => cmd, // When in Bazel environment Err(_) => Command::new("rs/ic_os/fstrim_tool/fstrim_tool_bin"), } } -fn assert_metrics_file_content(metrics_filename: &PathBuf, is_success: bool, total_runs: u32) { - let file = File::open(metrics_filename).expect("should succeed in opening metrics file"); - let reader = BufReader::new(file); - let lines = reader.lines(); - for (i, line) in lines.enumerate() { - match i { - 0 => assert_eq!( - line.unwrap(), - "# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds" - ), - 1 => assert_eq!( - line.unwrap(), - "# TYPE fstrim_last_run_duration_milliseconds gauge" - ), - 2 => assert!(line.unwrap().starts_with("fstrim_last_run_duration_milliseconds")), - 3 => assert_eq!( - line.unwrap(), "# HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0)" - ), - 4 => assert_eq!( - line.unwrap(), "# TYPE fstrim_last_run_success gauge" - ), - 5 => { - let line_str = line.unwrap(); - let mut tokens = line_str.split(' '); - assert_eq!(tokens.next().unwrap(), "fstrim_last_run_success", "{}", line_str); - let success_str = if is_success { "1" } else { "0" }; - assert_eq!(tokens.next().unwrap(), success_str, "{}", line_str); - }, - 6 => assert_eq!( - line.unwrap(), "# HELP fstrim_runs_total Total number of runs of fstrim" - ), - 7 => assert_eq!( - line.unwrap(), "# TYPE fstrim_runs_total counter" - ), - 8 => { - let line_str = line.unwrap(); - let mut tokens = line_str.split(' '); - assert_eq!(tokens.next().unwrap(), "fstrim_runs_total", "{}", line_str); - assert_eq!(tokens.next().unwrap().parse::().unwrap(), total_runs, "{}", line_str); - }, - _ => panic!("unexpected line: {}", line.unwrap()), - } - } +/// Replaces lines that contain: +/// - `fstrim_last_run_duration_milliseconds X` +/// - `fstrim_datadir_last_run_duration_milliseconds X` +/// +/// with a placeholder: +/// - `fstrim_last_run_duration_milliseconds ` +/// - `fstrim_datadir_last_run_duration_milliseconds ` +/// +/// This ensures that the duration numeric values do not cause test flakiness. +fn normalize_duration_line(input: &str) -> String { + let re = + Regex::new(r"(?m)^(fstrim(?:_datadir)?_last_run_duration_milliseconds)\s+\d+(\.\d+)?$") + .unwrap(); + re.replace_all(input, "$1 ").into_owned() } #[test] -fn should_successfully_initialize_metrics_if_flag_is_set() { +fn initialize_metrics() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); + new_fstrim_tool_command() .args([ "--metrics", @@ -81,13 +52,34 @@ fn should_successfully_initialize_metrics_if_flag_is_set() { .stderr(predicate::str::is_empty()) .success(); - assert_metrics_file_content(&metrics_file, true, 0); + let actual = read_to_string(&metrics_file).expect("reading metrics file should succeed"); + let expected = r#"# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds +# TYPE fstrim_last_run_duration_milliseconds gauge +fstrim_last_run_duration_milliseconds 0 +# HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0) +# TYPE fstrim_last_run_success gauge +fstrim_last_run_success 1 +# HELP fstrim_runs_total Total number of runs of fstrim +# TYPE fstrim_runs_total counter +fstrim_runs_total 0 +# HELP fstrim_datadir_last_run_duration_milliseconds Duration of last run of fstrim on datadir in milliseconds +# TYPE fstrim_datadir_last_run_duration_milliseconds gauge +fstrim_datadir_last_run_duration_milliseconds 0 +# HELP fstrim_datadir_last_run_success Success status of last run of fstrim on datadir (success: 1, failure: 0) +# TYPE fstrim_datadir_last_run_success gauge +fstrim_datadir_last_run_success 1 +# HELP fstrim_datadir_runs_total Total number of runs of fstrim on datadir +# TYPE fstrim_datadir_runs_total counter +fstrim_datadir_runs_total 0 +"#; + assert_eq!(actual, expected); } #[test] -fn should_fail_but_write_metrics_if_target_is_not_a_directory() { +fn should_fail_but_write_metrics_if_target_not_a_directory() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); + new_fstrim_tool_command() .args([ "--metrics", @@ -102,41 +94,36 @@ fn should_fail_but_write_metrics_if_target_is_not_a_directory() { .stderr(predicate::str::contains("not a directory")) .failure(); - assert_metrics_file_content(&metrics_file, false, 1); -} + let actual = read_to_string(&metrics_file).expect("reading metrics file should succeed"); + // The command fails, so success=0, runs=1. Datadir not updated => datadir success=1, runs=0 + let expected = r#"# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds +# TYPE fstrim_last_run_duration_milliseconds gauge +fstrim_last_run_duration_milliseconds 0 +# HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0) +# TYPE fstrim_last_run_success gauge +fstrim_last_run_success 0 +# HELP fstrim_runs_total Total number of runs of fstrim +# TYPE fstrim_runs_total counter +fstrim_runs_total 1 +# HELP fstrim_datadir_last_run_duration_milliseconds Duration of last run of fstrim on datadir in milliseconds +# TYPE fstrim_datadir_last_run_duration_milliseconds gauge +fstrim_datadir_last_run_duration_milliseconds 0 +# HELP fstrim_datadir_last_run_success Success status of last run of fstrim on datadir (success: 1, failure: 0) +# TYPE fstrim_datadir_last_run_success gauge +fstrim_datadir_last_run_success 1 +# HELP fstrim_datadir_runs_total Total number of runs of fstrim on datadir +# TYPE fstrim_datadir_runs_total counter +fstrim_datadir_runs_total 0 +"#; -// This fails if not tested under root user as the successful execution of the 1st target calls fstrim -// #[test] -// fn should_fail_but_write_metrics_if_data_target_is_not_a_directory() { -// let tmp_dir = tempdir().expect("temp dir creation should succeed"); -// let metrics_file = tmp_dir.path().join("fstrim.prom"); -// new_fstrim_tool_command() -// .args([ -// "--metrics", -// metrics_file -// .to_str() -// .expect("metrics file path should be valid"), -// "--target", -// tmp_dir -// .path() -// .to_str() -// .expect("tmp_dir path should be valid"), -// "--datadir_target", -// "/not/a/directory", -// ]) -// .assert() -// .stdout(predicate::str::is_empty()) -// .stderr(predicate::str::contains("not a directory")) -// .failure(); -// -// // As metrics now only target the main target, success will be reported -// assert_metrics_file_content(&metrics_file, true, 1); -// } + assert_eq!(actual, expected); +} #[test] -fn should_fail_but_write_metrics_with_discard_not_supported_with_correct_parameters() { +fn should_fail_but_writes_metrics_when_discard_not_supported() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); + new_fstrim_tool_command() .args([ "--metrics", @@ -151,12 +138,37 @@ fn should_fail_but_write_metrics_with_discard_not_supported_with_correct_paramet ]) .assert() .stdout(predicate::str::is_empty()) - .stderr(predicate::str::contains( - "the discard operation is not supported", - )) + .stderr( + predicate::str::contains("the discard operation is not supported") + .or(predicate::str::contains("Operation not permitted")), + ) .failure(); - assert_metrics_file_content(&metrics_file, false, 1); + let actual_raw = read_to_string(&metrics_file).expect("reading metrics file should succeed"); + let actual = normalize_duration_line(&actual_raw); + // The tool fails => success=0, runs=1. Datadir not updated => success=1, runs=0 + let expected_raw = r#"# HELP fstrim_last_run_duration_milliseconds Duration of last run of fstrim in milliseconds +# TYPE fstrim_last_run_duration_milliseconds gauge +fstrim_last_run_duration_milliseconds 2 +# HELP fstrim_last_run_success Success status of last run of fstrim (success: 1, failure: 0) +# TYPE fstrim_last_run_success gauge +fstrim_last_run_success 0 +# HELP fstrim_runs_total Total number of runs of fstrim +# TYPE fstrim_runs_total counter +fstrim_runs_total 1 +# HELP fstrim_datadir_last_run_duration_milliseconds Duration of last run of fstrim on datadir in milliseconds +# TYPE fstrim_datadir_last_run_duration_milliseconds gauge +fstrim_datadir_last_run_duration_milliseconds 0 +# HELP fstrim_datadir_last_run_success Success status of last run of fstrim on datadir (success: 1, failure: 0) +# TYPE fstrim_datadir_last_run_success gauge +fstrim_datadir_last_run_success 1 +# HELP fstrim_datadir_runs_total Total number of runs of fstrim on datadir +# TYPE fstrim_datadir_runs_total counter +fstrim_datadir_runs_total 0 +"#; + let expected = normalize_duration_line(expected_raw); + + assert_eq!(actual, expected); } #[test]