Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
tests: .block_on().unwrap() with .block_on()? in tests using Test…
…Result from parent commit

This commit should be comparable with #9050

As of this writing, this replaced all but out of 1053 instances of
`.block().unwrap()`. The remaining ones (as counted by AI):

```

┌──────────────────┬───────┬────────────────────────────────────────────────────┐
  │     Category     │ Count │                   Could convert?
│

├──────────────────┼───────┼────────────────────────────────────────────────────┤
  │ testutils        │ 20    │ No — return concrete types, would cascade to
│
  │ helpers          │       │ hundreds of callers across all test files
│

├──────────────────┼───────┼────────────────────────────────────────────────────┤
  │ Closures         │ 53    │ No — closures return () or concrete values
│

├──────────────────┼───────┼────────────────────────────────────────────────────┤
  │ Test helpers     │ 22    │ Technically yes, but adds ~70 ? at call sites
to   │
  │                  │       │ save 22 unwraps
│

├──────────────────┼───────┼────────────────────────────────────────────────────┤
  │ Non-convertible  │ 4     │ No — CommandError (2), Pin<Box<dyn Future>>
(1),   │
  │                  │       │ closure in test_matrix (1)
│

└──────────────────┴───────┴────────────────────────────────────────────────────┘
```
  • Loading branch information
ilyagr authored and glehmann committed Mar 18, 2026
commit 80ee629c6475eeca29f756228090e2dbe2d3a80b
28 changes: 16 additions & 12 deletions cli/src/commands/arrange.rs
Original file line number Diff line number Diff line change
Expand Up @@ -610,6 +610,7 @@ mod tests {
use pollster::FutureExt as _;
use testutils::CommitBuilderExt as _;
use testutils::TestRepo;
use testutils::TestResult;

use super::*;

Expand All @@ -631,15 +632,16 @@ mod tests {
}

#[test]
fn test_update_commit_order_empty() {
let mut state = State::new(vec![], vec![]).block_on().unwrap();
fn test_update_commit_order_empty() -> TestResult {
let mut state = State::new(vec![], vec![]).block_on()?;
assert_eq!(state.head_order, vec![]);
state.update_commit_order();
assert_eq!(state.current_order, vec![]);
Ok(())
}

#[test]
fn test_update_commit_order_reorder() {
fn test_update_commit_order_reorder() -> TestResult {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let empty_tree = store.empty_merged_tree();
Expand Down Expand Up @@ -670,8 +672,7 @@ mod tests {
],
vec![],
)
.block_on()
.unwrap();
.block_on()?;

// The initial head order is determined by the input order
assert_eq!(
Expand Down Expand Up @@ -705,10 +706,11 @@ mod tests {
commit_b.id().clone(),
]
);
Ok(())
}

#[test]
fn test_swap_commits() {
fn test_swap_commits() -> TestResult {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let empty_tree = store.empty_merged_tree();
Expand Down Expand Up @@ -747,8 +749,7 @@ mod tests {
],
vec![commit_e.clone(), commit_f.clone()],
)
.block_on()
.unwrap();
.block_on()?;
assert_eq!(state.head_order, vec![commit_d.id().clone()]);
assert_eq!(
state.current_order,
Expand Down Expand Up @@ -789,10 +790,11 @@ mod tests {
*state.commits.get(commit_f.id()).unwrap().parents,
vec![commit_c.id().clone()],
);
Ok(())
}

#[test]
fn test_execute_plan_reorder() {
fn test_execute_plan_reorder() -> TestResult {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let empty_tree = store.empty_merged_tree();
Expand Down Expand Up @@ -830,7 +832,7 @@ mod tests {
plan.rewrites.get_mut(commit_f.id()).unwrap().new_parents = vec![commit_a.id().clone()];

let rewritten = plan.execute(tx.repo_mut()).block_on().unwrap();
tx.repo_mut().rebase_descendants().block_on().unwrap();
tx.repo_mut().rebase_descendants().block_on()?;
assert_eq!(
rewritten.keys().collect::<HashSet<_>>(),
hashset![
Expand All @@ -854,10 +856,11 @@ mod tests {
assert_eq!(new_commit_d.parent_ids(), &[new_commit_b.id().clone()]);
assert_eq!(new_commit_e.parent_ids(), &[new_commit_a.id().clone()]);
assert_eq!(new_commit_f.parent_ids(), &[new_commit_a.id().clone()]);
Ok(())
}

#[test]
fn test_execute_plan_abandon() {
fn test_execute_plan_abandon() -> TestResult {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let empty_tree = store.empty_merged_tree();
Expand Down Expand Up @@ -892,13 +895,14 @@ mod tests {
};

let rewritten = plan.execute(tx.repo_mut()).block_on().unwrap();
tx.repo_mut().rebase_descendants().block_on().unwrap();
tx.repo_mut().rebase_descendants().block_on()?;
assert_eq!(rewritten.keys().sorted().collect_vec(), vec![commit_d.id()]);
let new_commit_d = rewritten.get(commit_d.id()).unwrap();
assert_eq!(new_commit_d.parent_ids(), &[commit_a.id().clone()]);
assert_eq!(
*tx.repo_mut().view().heads(),
hashset![commit_b.id().clone(), new_commit_d.id().clone()]
);
Ok(())
}
}
78 changes: 36 additions & 42 deletions cli/src/merge_tools/builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -778,6 +778,7 @@ mod tests {
use proptest_state_machine::prop_state_machine;
use test_case::test_matrix;
use testutils::TestRepo;
use testutils::TestResult;
use testutils::assert_tree_eq;
use testutils::dump_tree;
use testutils::proptest::Transition;
Expand Down Expand Up @@ -1135,7 +1136,7 @@ mod tests {
fn test_edit_diff_builtin_apply_diff_should_preserve_copy_id(
create_left_tree: impl FnOnce(&Arc<Store>, &RepoPath, &CopyId) -> MergedTree,
create_right_tree: impl FnOnce(&Arc<Store>, &RepoPath, &CopyId) -> MergedTree,
) {
) -> TestResult {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();

Expand All @@ -1159,26 +1160,23 @@ mod tests {
}
}
let tree = apply_diff(store, &left_tree, &right_tree, &changed_files, &files);
let actual_copy_ids = tree
.path_value(file_path)
.block_on()
.unwrap()
.map(|tree_value| {
let Some(TreeValue::File { copy_id, .. }) = tree_value else {
panic!("The path should point to an existing file.");
};
copy_id.clone()
});
let actual_copy_ids = tree.path_value(file_path).block_on()?.map(|tree_value| {
let Some(TreeValue::File { copy_id, .. }) = tree_value else {
panic!("The path should point to an existing file.");
};
copy_id.clone()
});
assert_eq!(
actual_copy_ids.resolve_trivial(SameChange::Accept),
Some(&copy_id),
"Expect the copy id of the file to be resolved to {copy_id:?}, but got \
{actual_copy_ids:?}."
);
Ok(())
}

#[test]
fn test_edit_merge_builtin_should_preserve_copy_id() {
fn test_edit_merge_builtin_should_preserve_copy_id() -> TestResult {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();

Expand All @@ -1203,30 +1201,27 @@ mod tests {
.copy_id(new_copy_id.clone());
let tree = tree_builder.write_merged_tree();

let merge_tool_file = MergeToolFile::from_tree_and_path(&tree, file_path)
.block_on()
.unwrap();
let merge_tool_file = MergeToolFile::from_tree_and_path(&tree, file_path).block_on()?;
let merge_file = make_merge_file(&merge_tool_file, store.merge_options()).unwrap();
let tree = apply_merge_builtin(store, &tree, vec![file_path.to_owned()], &[merge_file])
.block_on()
.unwrap();
.block_on()?;

let actual_copy_ids =
tree.path_value(file_path)
.block_on()
.unwrap()
.into_map(|tree_value| {
let Some(TreeValue::File { copy_id, .. }) = tree_value else {
panic!("The path should point to an existing file.");
};
copy_id
});
let actual_copy_ids = tree
.path_value(file_path)
.block_on()?
.into_map(|tree_value| {
let Some(TreeValue::File { copy_id, .. }) = tree_value else {
panic!("The path should point to an existing file.");
};
copy_id
});
assert_eq!(
actual_copy_ids.resolve_trivial(SameChange::Accept),
Some(&new_copy_id),
"Expect the copy id of the file to be resolved to {new_copy_id:?}, but got \
{actual_copy_ids:?}."
);
Ok(())
}

#[test]
Expand Down Expand Up @@ -2022,7 +2017,7 @@ mod tests {
}

#[test]
fn test_edit_diff_builtin_with_matcher() {
fn test_edit_diff_builtin_with_matcher() -> TestResult {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();

Expand Down Expand Up @@ -2050,22 +2045,22 @@ mod tests {

assert_eq!(changed_files, vec![matched_path.to_owned()]);

let result_tree = apply_diff_builtin(store, &left_tree, &right_tree, changed_files, &files)
.block_on()
.unwrap();
let result_tree =
apply_diff_builtin(store, &left_tree, &right_tree, changed_files, &files).block_on()?;

assert_eq!(
result_tree.path_value(matched_path).block_on().unwrap(),
left_tree.path_value(matched_path).block_on().unwrap()
result_tree.path_value(matched_path).block_on()?,
left_tree.path_value(matched_path).block_on()?
);
assert_eq!(
result_tree.path_value(unmatched_path).block_on().unwrap(),
right_tree.path_value(unmatched_path).block_on().unwrap()
result_tree.path_value(unmatched_path).block_on()?,
right_tree.path_value(unmatched_path).block_on()?
);
Ok(())
}

#[test]
fn test_make_merge_sections() {
fn test_make_merge_sections() -> TestResult {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();

Expand Down Expand Up @@ -2097,13 +2092,11 @@ mod tests {
}

let merge = Merge::from_vec(vec![
to_file_id(left_tree.path_value(path).block_on().unwrap()),
to_file_id(base_tree.path_value(path).block_on().unwrap()),
to_file_id(right_tree.path_value(path).block_on().unwrap()),
to_file_id(left_tree.path_value(path).block_on()?),
to_file_id(base_tree.path_value(path).block_on()?),
to_file_id(right_tree.path_value(path).block_on()?),
]);
let content = extract_as_single_hunk(&merge, store, path)
.block_on()
.unwrap();
let content = extract_as_single_hunk(&merge, store, path).block_on()?;
let merge_result = files::merge_hunks(&content, store.merge_options());
let sections = make_merge_sections(merge_result).unwrap();
insta::assert_debug_snapshot!(sections, @r#"
Expand Down Expand Up @@ -2155,6 +2148,7 @@ mod tests {
},
]
"#);
Ok(())
}

prop_state_machine! {
Expand Down
13 changes: 8 additions & 5 deletions lib/src/file_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -487,6 +487,7 @@ mod tests {
use itertools::Itertools as _;
use pollster::FutureExt as _;
use test_case::test_case;
use testutils::TestResult;

use super::*;
use crate::tests::new_temp_dir;
Expand Down Expand Up @@ -691,30 +692,32 @@ mod tests {
}

#[test]
fn test_blocking_async_reader() {
fn test_blocking_async_reader() -> TestResult {
let input = b"hello";
let sync_reader = Cursor::new(&input);
let mut async_reader = BlockingAsyncReader::new(sync_reader);

let mut buf = [0u8; 3];
let num_bytes_read = async_reader.read(&mut buf).block_on().unwrap();
let num_bytes_read = async_reader.read(&mut buf).block_on()?;
assert_eq!(num_bytes_read, 3);
assert_eq!(&buf, &input[0..3]);

let num_bytes_read = async_reader.read(&mut buf).block_on().unwrap();
let num_bytes_read = async_reader.read(&mut buf).block_on()?;
assert_eq!(num_bytes_read, 2);
assert_eq!(&buf[0..2], &input[3..5]);
Ok(())
}

#[test]
fn test_blocking_async_reader_read_to_end() {
fn test_blocking_async_reader_read_to_end() -> TestResult {
let input = b"hello";
let sync_reader = Cursor::new(&input);
let mut async_reader = BlockingAsyncReader::new(sync_reader);

let mut buf = vec![];
let num_bytes_read = async_reader.read_to_end(&mut buf).block_on().unwrap();
let num_bytes_read = async_reader.read_to_end(&mut buf).block_on()?;
assert_eq!(num_bytes_read, input.len());
assert_eq!(&buf, &input);
Ok(())
}
}
Loading