Skip to content

Commit

Permalink
feat: refactoring JSON output processing #1145
Browse files Browse the repository at this point in the history
  • Loading branch information
hitenkoku committed Sep 2, 2023
1 parent 4f09f30 commit cb60c1d
Show file tree
Hide file tree
Showing 6 changed files with 177 additions and 198 deletions.
183 changes: 56 additions & 127 deletions src/afterfact.rs
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,7 @@ fn emit_csv<W: std::io::Write>(

// remove duplicate dataのための前レコード分の情報を保持する変数
let mut prev_message: HashMap<CompactString, Profile> = HashMap::new();
let mut prev_details_convert_map: HashMap<CompactString, Vec<CompactString>> = HashMap::new();
for (message_idx, time) in MESSAGEKEYS
.lock()
.unwrap()
Expand Down Expand Up @@ -410,8 +411,10 @@ fn emit_csv<W: std::io::Write>(
jsonl_output_flag,
GEOIP_DB_PARSER.read().unwrap().is_some(),
remove_duplicate_data_flag,
&[&detect_info.details_convert_map, &prev_details_convert_map],
);
prev_message = result.1;
prev_details_convert_map = detect_info.details_convert_map.clone();
wtr.write_field(format!("{{ {} }}", &result.0))?;
} else if json_output_flag {
// JSON output
Expand All @@ -422,8 +425,10 @@ fn emit_csv<W: std::io::Write>(
jsonl_output_flag,
GEOIP_DB_PARSER.read().unwrap().is_some(),
remove_duplicate_data_flag,
&[&detect_info.details_convert_map, &prev_details_convert_map],
);
prev_message = result.1;
prev_details_convert_map = detect_info.details_convert_map.clone();
wtr.write_field(&result.0)?;
wtr.write_field("}")?;
} else {
Expand Down Expand Up @@ -1426,6 +1431,7 @@ pub fn output_json_str(
jsonl_output_flag: bool,
is_included_geo_ip: bool,
remove_duplicate_flag: bool,
details_infos: &[&HashMap<CompactString, Vec<CompactString>>],
) -> (String, HashMap<CompactString, Profile>) {
let mut target: Vec<String> = vec![];
let mut target_ext_field = Vec::new();
Expand All @@ -1435,15 +1441,32 @@ pub fn output_json_str(
for (field_name, profile) in ext_field.iter() {
match profile {
Profile::Details(_) | Profile::AllFieldInfo(_) | Profile::ExtraFieldInfo(_) => {
if prev_message
.get(field_name)
.unwrap_or(&Profile::Literal("-".into()))
.to_value()
== profile.to_value()
{
let details_key = match profile {
Profile::Details(_) => "Details",
Profile::AllFieldInfo(_) => "AllFieldInfo",
Profile::ExtraFieldInfo(_) => "ExtraFieldInfo",
_ => "",
};

let empty = vec![];
let now = details_infos[0]
.get(format!("#{details_key}").as_str())
.unwrap_or(&empty);
let prev = details_infos[1]
.get(format!("#{details_key}").as_str())
.unwrap_or(&empty);
let dup_flag = (!profile.to_value().is_empty()
&& prev_message
.get(field_name)
.unwrap_or(&Profile::Literal("".into()))
.to_value()
== profile.to_value())
|| (!&now.is_empty() && !&prev.is_empty() && now == prev);
if dup_flag {
// 合致する場合は前回レコード分のメッセージを更新する合致している場合は出力用のフィールドマップの内容を変更する。
// 合致しているので前回分のメッセージは更新しない
target_ext_field.push((field_name.clone(), profile.convert(&"DUP".into())));
//DUPという通常の文字列を出すためにProfile::Literalを使用する
target_ext_field.push((field_name.clone(), Profile::Literal("DUP".into())));
} else {
// 合致しない場合は前回レコード分のメッセージを更新する
next_prev_message.insert(field_name.clone(), profile.clone());
Expand All @@ -1466,6 +1489,7 @@ pub fn output_json_str(
"TgtCountry",
"TgtCity",
];

let valid_key_add_to_details: Vec<&str> = key_add_to_details
.iter()
.filter(|target_key| {
Expand All @@ -1477,7 +1501,13 @@ pub fn output_json_str(
for (key, profile) in target_ext_field.iter() {
let val = profile.to_value();
let vec_data = _get_json_vec(profile, &val.to_string());
if !key_add_to_details.contains(&key.as_str()) && vec_data.is_empty() {
if (!key_add_to_details.contains(&key.as_str())
&& !matches!(
profile,
Profile::AllFieldInfo(_) | Profile::ExtraFieldInfo(_)
))
&& vec_data.is_empty()
{
let tmp_val: Vec<&str> = val.split(": ").collect();
let output_val =
_convert_valid_json_str(&tmp_val, matches!(profile, Profile::AllFieldInfo(_)));
Expand Down Expand Up @@ -1509,133 +1539,33 @@ pub fn output_json_str(
Profile::Details(_) | Profile::AllFieldInfo(_) | Profile::ExtraFieldInfo(_) => {
let mut output_stock: Vec<String> = vec![];
output_stock.push(format!(" \"{key}\": {{"));
let mut stocked_value: Vec<Vec<String>> = vec![];
let mut key_index_stock = vec![];
for detail_contents in vec_data.iter() {
// 分解してキーとなりえる箇所を抽出する
let mut tmp_stock = vec![];
let mut space_split_contents = detail_contents.split(' ');
while let Some(sp) = space_split_contents.next() {
let first_character =
char::from_str(&sp.chars().next().unwrap_or('-').to_string())
.unwrap_or_default();
if !sp.contains(['\\', '"', '🛂'])
&& first_character.is_uppercase()
&& !sp.starts_with(['-', '/'])
&& sp.ends_with(':')
&& sp.len() > 2
&& sp != "Number:"
{
key_index_stock.push(sp.replace(':', ""));
if sp == "Payload:" {
stocked_value.push(vec![]);
stocked_value.push(
space_split_contents.map(|s| s.to_string()).collect(),
);
break;
} else {
stocked_value.push(tmp_stock);
tmp_stock = vec![];
}
} else if (first_character.is_lowercase()
|| first_character.is_numeric())
&& sp.ends_with(';')
&& sp.len() < 5
&& key_index_stock.len() > 1
&& key_index_stock.last().unwrap_or(&String::default()) != "Cmdline"
{
let last_key = key_index_stock.pop().unwrap_or_default();
let mut last_stocked_value =
stocked_value.pop().unwrap_or_default();
last_stocked_value.push(format!("{last_key}: {sp}"));
stocked_value.push(last_stocked_value);
} else {
tmp_stock.push(sp.to_owned());
}
}
if !tmp_stock.is_empty() {
stocked_value.push(tmp_stock);
}
}
if stocked_value
.iter()
.counts_by(|x| x.len())
.get(&0)
.unwrap_or(&0)
!= &key_index_stock.len()
{
if let Some((target_idx, _)) = key_index_stock
.iter()
.enumerate()
.rfind(|(_, y)| "CmdLine" == *y)
{
let cmd_line_vec_idx_len =
stocked_value[2 * (target_idx + 1) - 1].len();
stocked_value[2 * (target_idx + 1) - 1][cmd_line_vec_idx_len - 1]
.push_str(&format!(" {}:", key_index_stock[target_idx + 1]));
key_index_stock.remove(target_idx + 1);
}
}
let mut key_idx = 0;
let mut output_value_stock = String::default();
for (value_idx, value) in stocked_value.iter().enumerate() {
if key_idx >= key_index_stock.len() {
break;
}
let mut tmp = if value_idx == 0 && !value.is_empty() {
key.as_str()
} else {
key_index_stock[key_idx].as_str()
};
if !output_value_stock.is_empty() {
let separate_chr =
if key_index_stock[key_idx].starts_with("ScriptBlock") {
" | "
} else {
": "
};
output_value_stock.push_str(separate_chr);
}
output_value_stock.push_str(&value.join(" "));
//1つまえのキーの段階で以降にvalueの配列で区切りとなる空の配列が存在しているかを確認する
let is_remain_split_stock = key_index_stock.len() > 1
&& key_idx == key_index_stock.len() - 2
&& value_idx < stocked_value.len() - 1
&& !output_value_stock.is_empty()
&& !stocked_value[value_idx + 1..]
.iter()
.any(|remain_value| remain_value.is_empty());
if (value_idx < stocked_value.len() - 1
&& stocked_value[value_idx + 1].is_empty()
&& key_idx != key_index_stock.len() - 1)
|| is_remain_split_stock
{
// 次の要素を確認して、存在しないもしくは、キーが入っているとなった場合現在ストックしている内容が出力していいことが確定するので出力処理を行う
let output_tmp = format!("{tmp}: {output_value_stock}");
let output: Vec<&str> = output_tmp.split(": ").collect();
let key = _convert_valid_json_str(&[output[0]], false);
let fmted_val = _convert_valid_json_str(&output, false);
let details_key = match profile {
Profile::Details(_) => "Details",
Profile::AllFieldInfo(_) => "AllFieldInfo",
Profile::ExtraFieldInfo(_) => "ExtraFieldInfo",
_ => "",
};
// 個々の段階でDetails, AllFieldInfo, ExtraFieldInfoの要素はdetails_infosに格納されているのでunwrapする
let details_stocks = details_infos[0]
.get(&CompactString::from(format!("#{details_key}")))
.unwrap();
for (idx, contents) in details_stocks.iter().enumerate() {
let (key, value) = contents.split_once(": ").unwrap_or_default();
let output_key = _convert_valid_json_str(&[key], false);
let fmted_val = _convert_valid_json_str(&[value], false);

if idx != details_stocks.len() - 1 {
output_stock.push(format!(
"{},",
_create_json_output_format(
&key,
&output_key,
&fmted_val,
key.starts_with('\"'),
fmted_val.starts_with('\"'),
8
)
));
output_value_stock.clear();
tmp = "";
key_idx += 1;
}
if value_idx == stocked_value.len() - 1
&& !(tmp.is_empty() && stocked_value.is_empty())
{
let output_tmp = format!("{tmp}: {output_value_stock}");
let output: Vec<&str> = output_tmp.split(": ").collect();
let key = _convert_valid_json_str(&[output[0]], false);
let fmted_val = _convert_valid_json_str(&output, false);
} else {
let last_contents_end =
if is_included_geo_ip && !valid_key_add_to_details.is_empty() {
","
Expand All @@ -1652,7 +1582,6 @@ pub fn output_json_str(
8,
)
));
key_idx += 1;
}
}
if is_included_geo_ip {
Expand Down
14 changes: 9 additions & 5 deletions src/detections/detection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -655,17 +655,19 @@ impl Detection {
_ => {}
}
}
//ルール側にdetailsの項目があればそれをそのまま出力し、そうでない場合はproviderとeventidの組で設定したdetailsの項目を出力する
let details_fmt_str = match rule.yaml["details"].as_str() {
Some(s) => s.to_string(),
None => match stored_static
.default_details
.get(&CompactString::from(format!("{provider}_{eid}")))
{
Some(str) => str.to_string(),
None => create_recordinfos(&record_info.record, &FieldDataMapKey::default(), &None),
None => create_recordinfos(&record_info.record, &FieldDataMapKey::default(), &None)
.join(" ¦ "),
},
};
let field_data_map_key = if stored_static.field_data_map.is_none() {
let field_data_map_key: FieldDataMapKey = if stored_static.field_data_map.is_none() {
FieldDataMapKey::default()
} else {
FieldDataMapKey {
Expand Down Expand Up @@ -693,13 +695,14 @@ impl Detection {
detail: CompactString::default(),
ext_field: stored_static.profiles.as_ref().unwrap().to_owned(),
is_condition: false,
details_convert_map: HashMap::default(),
};
message::insert(
&record_info.record,
CompactString::new(details_fmt_str),
detect_info,
time,
&mut profile_converter,
&profile_converter,
(false, is_json_timeline, included_all_field_info_flag),
(
eventkey_alias,
Expand Down Expand Up @@ -911,6 +914,7 @@ impl Detection {
detail: output,
ext_field: stored_static.profiles.as_ref().unwrap().to_owned(),
is_condition: true,
details_convert_map: HashMap::default(),
};
let binding = STORED_EKEY_ALIAS.read().unwrap();
let eventkey_alias = binding.as_ref().unwrap();
Expand All @@ -921,7 +925,7 @@ impl Detection {
CompactString::new(rule.yaml["details"].as_str().unwrap_or("-")),
detect_info,
agg_result.start_timedate,
&mut profile_converter,
&profile_converter,
(true, is_json_timeline, false),
(eventkey_alias, &field_data_map_key, &None),
)
Expand Down Expand Up @@ -1144,7 +1148,7 @@ impl Detection {
is_csv_output: bool,
) -> CompactString {
for alias in target_alias {
let search_data = message::parse_message(
let (search_data, _) = message::parse_message(
record,
CompactString::from(alias),
eventkey_alias,
Expand Down
2 changes: 1 addition & 1 deletion src/detections/field_data_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,7 @@ mod tests {
Ok(record) => {
let ret = utils::create_recordinfos(&record, &key, &Some(map));
let expected = "ElevatedToken: NO ¦ ImpersonationLevel: A ¦ NewProcessId: 6528 ¦ ProcessId: 1100".to_string();
assert_eq!(ret, expected);
assert_eq!(ret.join(" ¦ "), expected);
}
Err(_) => {
panic!("Failed to parse json record.");
Expand Down
Loading

0 comments on commit cb60c1d

Please sign in to comment.