Skip to content

Commit

Permalink
conflicts: propagate error from conflict materialization
Browse files Browse the repository at this point in the history
  • Loading branch information
martinvonz committed Jun 17, 2024
1 parent 66e45a7 commit f8a5ad0
Show file tree
Hide file tree
Showing 5 changed files with 38 additions and 17 deletions.
4 changes: 3 additions & 1 deletion cli/src/merge_tools/builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1014,7 +1014,9 @@ mod tests {
to_file_id(base_tree.path_value(path).unwrap()),
to_file_id(right_tree.path_value(path).unwrap()),
]);
let content = extract_as_single_hunk(&merge, store, path).block_on();
let content = extract_as_single_hunk(&merge, store, path)
.block_on()
.unwrap();
let slices = content.map(|ContentHunk(buf)| buf.as_slice());
let merge_result = files::merge(&slices);
let sections = make_merge_sections(merge_result).unwrap();
Expand Down
2 changes: 1 addition & 1 deletion cli/src/merge_tools/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ impl MergeEditor {
});
};
let content =
extract_as_single_hunk(&simplified_file_merge, tree.store(), repo_path).block_on();
extract_as_single_hunk(&simplified_file_merge, tree.store(), repo_path).block_on()?;

match &self.tool {
MergeTool::Builtin => {
Expand Down
8 changes: 7 additions & 1 deletion lib/src/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use crate::content_hash::ContentHash;
use crate::index::Index;
use crate::merge::Merge;
use crate::object_id::{id_type, ObjectId};
use crate::repo_path::{RepoPath, RepoPathComponent, RepoPathComponentBuf};
use crate::repo_path::{RepoPath, RepoPathBuf, RepoPathComponent, RepoPathComponentBuf};
use crate::signing::SignResult;

id_type!(
Expand Down Expand Up @@ -199,6 +199,12 @@ pub enum BackendError {
hash: String,
source: Box<dyn std::error::Error + Send + Sync>,
},
#[error("Error when reading file content for file {} with id {}", path.as_internal_file_string(), id.hex())]
ReadFile {
path: RepoPathBuf,
id: FileId,
source: Box<dyn std::error::Error + Send + Sync>,
},
#[error("Could not write object of type {object_type}")]
WriteObject {
object_type: &'static str,
Expand Down
37 changes: 24 additions & 13 deletions lib/src/conflicts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
use std::io::{Read, Write};
use std::iter::zip;

use futures::StreamExt;
use futures::{StreamExt, TryStreamExt};
use itertools::Itertools;
use regex::bytes::Regex;

Expand Down Expand Up @@ -77,34 +77,45 @@ fn write_diff_hunks(hunks: &[DiffHunk], file: &mut dyn Write) -> std::io::Result
Ok(())
}

async fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> ContentHunk {
async fn get_file_contents(
store: &Store,
path: &RepoPath,
term: &Option<FileId>,
) -> BackendResult<ContentHunk> {
match term {
Some(id) => {
let mut content = vec![];
store
.read_file_async(path, id)
.await
.unwrap()
.await?
.read_to_end(&mut content)
.unwrap();
ContentHunk(content)
.map_err(|err| BackendError::ReadFile {
path: path.to_owned(),
id: id.clone(),
source: format!(
"Failed to read file contents for {}: {err}",
path.as_internal_file_string()
)
.into(),
})?;
Ok(ContentHunk(content))
}
// If the conflict had removed the file on one side, we pretend that the file
// was empty there.
None => ContentHunk(vec![]),
None => Ok(ContentHunk(vec![])),
}
}

pub async fn extract_as_single_hunk(
merge: &Merge<Option<FileId>>,
store: &Store,
path: &RepoPath,
) -> Merge<ContentHunk> {
) -> BackendResult<Merge<ContentHunk>> {
let builder: MergeBuilder<ContentHunk> = futures::stream::iter(merge.iter())
.then(|term| get_file_contents(store, path, term))
.collect()
.await;
builder.build()
.try_collect()
.await?;
Ok(builder.build())
}

/// A type similar to `MergedTreeValue` but with associated data to include in
Expand Down Expand Up @@ -183,7 +194,7 @@ async fn materialize_tree_value_no_access_denied(
let mut contents = vec![];
if let Some(file_merge) = conflict.to_file_merge() {
let file_merge = file_merge.simplify();
let content = extract_as_single_hunk(&file_merge, store, path).await;
let content = extract_as_single_hunk(&file_merge, store, path).await?;
materialize_merge_result(&content, &mut contents)
.expect("Failed to materialize conflict to in-memory buffer");
} else {
Expand Down Expand Up @@ -453,7 +464,7 @@ pub async fn update_from_content(
// conflicts (for example) are not converted to regular files in the working
// copy.
let mut old_content = Vec::with_capacity(content.len());
let merge_hunk = extract_as_single_hunk(simplified_file_ids, store, path).await;
let merge_hunk = extract_as_single_hunk(simplified_file_ids, store, path).await?;
materialize_merge_result(&merge_hunk, &mut old_content).unwrap();
if content == old_content {
return Ok(file_ids.clone());
Expand Down
4 changes: 3 additions & 1 deletion lib/tests/test_conflicts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -991,7 +991,9 @@ fn materialize_conflict_string(
conflict: &Merge<Option<FileId>>,
) -> String {
let mut result: Vec<u8> = vec![];
let contents = extract_as_single_hunk(conflict, store, path).block_on();
let contents = extract_as_single_hunk(conflict, store, path)
.block_on()
.unwrap();
materialize_merge_result(&contents, &mut result).unwrap();
String::from_utf8(result).unwrap()
}

0 comments on commit f8a5ad0

Please sign in to comment.