Skip to content

Commit

Permalink
Refactor unit tests
Browse files Browse the repository at this point in the history
Summary:
Refactoring unit tests and a bit of the code to make the next diffs, which will include more unit tests for both modules, easier to review.

The goal of this is to provide everything needed for a test case inside the `GitExportTestData` struct. That's because the pattern of defining repos for scenarios that will be used in unit tests from both `partial_commit_graph` and `commit_rewrite` will continue in the next few diffs, so I wanted to avoid hardcoding and duplication.

Also made some small changes to the library code itself.

- Specifying the path types to `NonRootMPath` because there's not need to have it be generic to then cast it to `NonRootMPath` later.
- Using `anyhow::Result` instead of manually specifying error type.

Reviewed By: mitrandir77

Differential Revision: D49907742

fbshipit-source-id: 14b73685c293f766b136baa4eb39b704eeef48e0
  • Loading branch information
gustavoavena authored and facebook-github-bot committed Oct 6, 2023
1 parent 61813bb commit b2292d5
Show file tree
Hide file tree
Showing 5 changed files with 171 additions and 125 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
use std::collections::HashMap;

use anyhow::anyhow;
use anyhow::Error;
use anyhow::Result;
use futures::future::try_join_all;
use futures::stream::StreamExt;
Expand All @@ -18,9 +17,8 @@ use itertools::Itertools;
use mononoke_api::changeset_path::ChangesetPathHistoryContext;
use mononoke_api::changeset_path::ChangesetPathHistoryOptions;
use mononoke_api::ChangesetContext;
use mononoke_api::MononokeError;
use mononoke_api::MononokePath;
use mononoke_types::ChangesetId;
use mononoke_types::NonRootMPath;
use slog::debug;
use slog::info;
use slog::trace;
Expand All @@ -34,18 +32,14 @@ pub type PartialGraphInfo = (Vec<ChangesetContext>, ChangesetParents);
/// modified at least one of the paths.
/// The commit graph is returned as a topologically sorted list of changesets
/// and a hashmap of changset id to their parents' ids.
pub async fn build_partial_commit_graph_for_export<P>(
pub async fn build_partial_commit_graph_for_export(
logger: &Logger,
paths: Vec<P>,
paths: Vec<NonRootMPath>,
cs_ctx: ChangesetContext,
// Consider history until the provided timestamp, i.e. all commits in the
// graph will have its creation time greater than or equal to it.
oldest_commit_ts: Option<i64>,
) -> Result<PartialGraphInfo, MononokeError>
where
P: TryInto<MononokePath>,
MononokeError: From<P::Error>,
{
) -> Result<PartialGraphInfo> {
info!(logger, "Building partial commit graph for export...");

let cs_path_hist_ctxs: Vec<ChangesetPathHistoryContext> = stream::iter(paths)
Expand Down Expand Up @@ -101,7 +95,7 @@ where
async fn merge_cs_lists_and_build_parents_map(
logger: &Logger,
changeset_lists: Vec<Vec<ChangesetContext>>,
) -> Result<(Vec<ChangesetContext>, ChangesetParents), Error> {
) -> Result<(Vec<ChangesetContext>, ChangesetParents)> {
info!(
logger,
"Merging changeset lists and building parents map..."
Expand Down
1 change: 1 addition & 0 deletions eden/mononoke/git/gitexport/test_utils/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ path = "src/test_utils.rs"
anyhow = "=1.0.72"
fbinit = { version = "0.1.2", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
futures = { version = "0.3.28", features = ["async-await", "compat"] }
maplit = "1.0"
mononoke_api = { version = "0.1.0", path = "../../../mononoke_api" }
mononoke_types = { version = "0.1.0", path = "../../../mononoke_types" }
test_repo_factory = { version = "0.1.0", path = "../../../repo_factory/test_repo_factory" }
Expand Down
60 changes: 43 additions & 17 deletions eden/mononoke/git/gitexport/test_utils/src/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,13 @@
*/

use std::collections::BTreeMap;
use std::collections::HashMap;

use anyhow::anyhow;
use anyhow::Error;
use anyhow::Result;
use fbinit::FacebookInit;
use futures::future::try_join_all;
use maplit::hashmap;
use mononoke_api::ChangesetContext;
use mononoke_api::CoreContext;
use mononoke_api::MononokeError;
Expand All @@ -24,16 +26,15 @@ use tests_utils::drawdag::create_from_dag_with_changes;

// Directory and file constants.
// By convention, directories with uppercase names are exported.
const EXPORT_DIR: &str = "EXP";
const EXPORT_FILE: &str = "EXP/bar.txt";
const SECOND_EXPORT_FILE: &str = "EXP/foo.txt";

pub const EXPORT_DIR: &str = "EXP";
pub const EXPORT_FILE: &str = "EXP/bar.txt";
pub const SECOND_EXPORT_FILE: &str = "EXP/foo.txt";
const IRRELEVANT_FILE: &str = "internal/bar.txt";
const SECOND_IRRELEVANT_FILE: &str = "internal/foo.txt";

pub const IRRELEVANT_FILE: &str = "internal/bar.txt";
pub const SECOND_IRRELEVANT_FILE: &str = "internal/foo.txt";

pub const SECOND_EXPORT_DIR: &str = "EXP_2";
pub const FILE_IN_SECOND_EXPORT_DIR: &str = "EXP_2/foo.txt";
const SECOND_EXPORT_DIR: &str = "EXP_2";
const FILE_IN_SECOND_EXPORT_DIR: &str = "EXP_2/foo.txt";

pub async fn get_relevant_changesets_from_ids(
repo_ctx: &RepoContext,
Expand All @@ -54,15 +55,35 @@ pub struct GitExportTestRepoOptions {
pub add_branch_commit: bool,
}

/// Store all relevant data about a test case to avoid harcoding and duplication
pub struct GitExportTestData {
/// Repo created for the test case
pub repo_ctx: RepoContext,
/// Map of commit id/name to the actual ChangesetId
pub commit_id_map: BTreeMap<String, ChangesetId>,
/// ID of the HEAD commit
pub head_id: &'static str,
/// Paths that were used in the commits and should be known by the tests
pub relevant_paths: HashMap<&'static str, &'static str>,
}

pub async fn build_test_repo(
fb: FacebookInit,
ctx: &CoreContext,
opts: GitExportTestRepoOptions,
) -> Result<(RepoContext, BTreeMap<String, ChangesetId>), Error> {
) -> Result<GitExportTestData> {
let source_repo = TestRepoFactory::new(fb)?.build().await?;
let source_repo_ctx = RepoContext::new_test(ctx.clone(), source_repo).await?;
let source_repo = source_repo_ctx.repo();

let relevant_paths = hashmap! {
"export_dir" => EXPORT_DIR,
"export_file" => EXPORT_FILE,
"second_export_file" => SECOND_EXPORT_FILE,
"second_export_dir" => SECOND_EXPORT_DIR,
"file_in_second_export_dir" => FILE_IN_SECOND_EXPORT_DIR,
};

let mut dag_changes = changes! {
"A" => |c| c.add_file(EXPORT_FILE, "file_to_export")
.set_author_date(DateTime::from_timestamp(1000, 0).unwrap()),
Expand Down Expand Up @@ -103,20 +124,25 @@ pub async fn build_test_repo(
dag_changes.extend(branch_commit_changes);

r"
A-B-C-D-E-F-G-H-I-J
\ /
K
"
A-B-C-D-E-F-G-H-I-J
\ /
K
"
} else {
r"
A-B-C-D-E-F-G-H-I-J
"
A-B-C-D-E-F-G-H-I-J
"
};

let commit_id_map = create_from_dag_with_changes(ctx, &source_repo, dag, dag_changes).await?;

let bookmark_update_ctx = bookmark(ctx, source_repo, "master");
let _master_bookmark_key = bookmark_update_ctx.set_to(commit_id_map["J"]).await?;

Ok((source_repo_ctx, commit_id_map))
Ok(GitExportTestData {
repo_ctx: source_repo_ctx,
commit_id_map,
relevant_paths,
head_id: "J",
})
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,26 +24,27 @@ use mononoke_api::BookmarkKey;
use mononoke_api::ChangesetContext;
use mononoke_api::CoreContext;
use mononoke_api::MononokeError;
use mononoke_api::RepoContext;
use mononoke_types::ChangesetId;
use mononoke_types::NonRootMPath;
use test_utils::build_test_repo;
use test_utils::get_relevant_changesets_from_ids;
use test_utils::GitExportTestRepoOptions;
use test_utils::EXPORT_DIR;
use test_utils::EXPORT_FILE;
use test_utils::FILE_IN_SECOND_EXPORT_DIR;
use test_utils::SECOND_EXPORT_DIR;
use test_utils::SECOND_EXPORT_FILE;

#[fbinit::test]
async fn test_rewrite_partial_changesets(fb: FacebookInit) -> Result<(), Error> {
let ctx = CoreContext::test_mock(fb);

let export_dir = NonRootMPath::new(EXPORT_DIR).unwrap();
let second_export_dir = NonRootMPath::new(SECOND_EXPORT_DIR).unwrap();
let test_data = build_test_repo(fb, &ctx, GitExportTestRepoOptions::default()).await?;
let source_repo_ctx = test_data.repo_ctx;
let changeset_ids = test_data.commit_id_map;
let relevant_paths = test_data.relevant_paths;

let (source_repo_ctx, changeset_ids) =
build_test_repo(fb, &ctx, GitExportTestRepoOptions::default()).await?;
let export_dir = NonRootMPath::new(relevant_paths["export_dir"]).unwrap();
let export_file = relevant_paths["export_file"];
let second_export_dir = NonRootMPath::new(relevant_paths["second_export_dir"]).unwrap();
let second_export_file = relevant_paths["second_export_file"];
let file_in_second_export_dir = relevant_paths["file_in_second_export_dir"];

let A = changeset_ids["A"];
let C = changeset_ids["C"];
Expand Down Expand Up @@ -79,6 +80,74 @@ async fn test_rewrite_partial_changesets(fb: FacebookInit) -> Result<(), Error>
)
.await?;

let expected_message_and_affected_files: Vec<(String, Vec<NonRootMPath>)> = vec![
build_expected_tuple("A", vec![export_file]),
build_expected_tuple("C", vec![export_file]),
build_expected_tuple("E", vec![export_file]),
build_expected_tuple("F", vec![file_in_second_export_dir]),
build_expected_tuple("G", vec![export_file, file_in_second_export_dir]),
build_expected_tuple("I", vec![second_export_file]),
build_expected_tuple("J", vec![export_file]),
];

check_expected_results(
temp_repo_ctx,
relevant_changesets,
expected_message_and_affected_files,
)
.await
}

#[fbinit::test]
async fn test_rewriting_fails_with_irrelevant_changeset(fb: FacebookInit) -> Result<(), Error> {
let ctx = CoreContext::test_mock(fb);

let test_data = build_test_repo(fb, &ctx, GitExportTestRepoOptions::default()).await?;
let source_repo_ctx = test_data.repo_ctx;
let changeset_ids = test_data.commit_id_map;
let relevant_paths = test_data.relevant_paths;

let export_dir = NonRootMPath::new(relevant_paths["export_dir"]).unwrap();

let A = changeset_ids["A"];
let C = changeset_ids["C"];
let D = changeset_ids["D"];
let E = changeset_ids["E"];

// Passing an irrelevant changeset in the list should result in an error
let broken_changeset_list_ids: Vec<ChangesetId> = vec![A, C, D, E];

let broken_changeset_list: Vec<ChangesetContext> =
get_relevant_changesets_from_ids(&source_repo_ctx, broken_changeset_list_ids).await?;

let broken_changeset_parents =
HashMap::from([(A, vec![]), (C, vec![A]), (D, vec![C]), (E, vec![D])]);

let error = rewrite_partial_changesets(
fb,
source_repo_ctx.clone(),
broken_changeset_list.clone(),
&broken_changeset_parents,
vec![export_dir.clone()],
)
.await
.unwrap_err();
assert_eq!(
error.to_string(),
"internal error: Commit wasn't rewritten because it had no signficant changes"
);

Ok(())
}

async fn check_expected_results(
temp_repo_ctx: RepoContext,
// All the changesets that should be exported
relevant_changesets: Vec<ChangesetContext>,
// Topologically sorted list of the messages and affected files expected
// in the changesets in the temporary repo
expected_message_and_affected_files: Vec<(String, Vec<NonRootMPath>)>,
) -> Result<()> {
let temp_repo_master_csc = temp_repo_ctx
.resolve_bookmark(
&BookmarkKey::from_str(MASTER_BOOKMARK)?,
Expand Down Expand Up @@ -135,74 +204,21 @@ async fn test_rewrite_partial_changesets(fb: FacebookInit) -> Result<(), Error>
)
.await?;

fn build_expected_tuple(msg: &str, fpaths: Vec<&str>) -> (String, Vec<NonRootMPath>) {
(
String::from(msg),
fpaths
.iter()
.map(|p| NonRootMPath::new(p).unwrap())
.collect::<Vec<_>>(),
)
}
assert_eq!(result.len(), expected_message_and_affected_files.len());

assert_eq!(result.len(), 7);
assert_eq!(result[0], build_expected_tuple("A", vec![EXPORT_FILE]));
assert_eq!(result[1], build_expected_tuple("C", vec![EXPORT_FILE]));
assert_eq!(result[2], build_expected_tuple("E", vec![EXPORT_FILE]));
assert_eq!(
result[3],
build_expected_tuple("F", vec![FILE_IN_SECOND_EXPORT_DIR])
);
assert_eq!(
result[4],
build_expected_tuple("G", vec![EXPORT_FILE, FILE_IN_SECOND_EXPORT_DIR])
);
assert_eq!(
result[5],
build_expected_tuple("I", vec![SECOND_EXPORT_FILE])
);
assert_eq!(result[6], build_expected_tuple("J", vec![EXPORT_FILE]));
for (i, expected_tuple) in expected_message_and_affected_files.into_iter().enumerate() {
assert_eq!(result[i], expected_tuple);
}

Ok(())
}

#[fbinit::test]
async fn test_rewriting_fails_with_irrelevant_changeset(fb: FacebookInit) -> Result<(), Error> {
let ctx = CoreContext::test_mock(fb);

let export_dir = NonRootMPath::new(EXPORT_DIR).unwrap();

let (source_repo_ctx, changeset_ids) =
build_test_repo(fb, &ctx, GitExportTestRepoOptions::default()).await?;

let A = changeset_ids["A"];
let C = changeset_ids["C"];
let D = changeset_ids["D"];
let E = changeset_ids["E"];

// Passing an irrelevant changeset in the list should result in an error
let broken_changeset_list_ids: Vec<ChangesetId> = vec![A, C, D, E];

let broken_changeset_list: Vec<ChangesetContext> =
get_relevant_changesets_from_ids(&source_repo_ctx, broken_changeset_list_ids).await?;

let broken_changeset_parents =
HashMap::from([(A, vec![]), (C, vec![A]), (D, vec![C]), (E, vec![D])]);

let error = rewrite_partial_changesets(
fb,
source_repo_ctx.clone(),
broken_changeset_list.clone(),
&broken_changeset_parents,
vec![export_dir.clone()],
fn build_expected_tuple(msg: &str, fpaths: Vec<&str>) -> (String, Vec<NonRootMPath>) {
(
String::from(msg),
fpaths
.iter()
.map(|p| NonRootMPath::new(p).unwrap())
.collect::<Vec<_>>(),
)
.await
.unwrap_err();

assert_eq!(
error.to_string(),
"internal error: Commit wasn't rewritten because it had no signficant changes"
);

Ok(())
}
Loading

0 comments on commit b2292d5

Please sign in to comment.