diff --git a/builder/src/merge.rs b/builder/src/merge.rs index 8301b32cf8d..56b0c8d3a94 100644 --- a/builder/src/merge.rs +++ b/builder/src/merge.rs @@ -29,6 +29,20 @@ use super::{ pub struct Merger {} impl Merger { + fn get_string_from_list( + original_ids: &Option>, + idx: usize, + ) -> Result> { + Ok(if let Some(id) = &original_ids { + let id_string = id + .get(idx) + .ok_or_else(|| anyhow!("unmatched digest index {}", idx))?; + Some(id_string.clone()) + } else { + None + }) + } + fn get_digest_from_list(digests: &Option>, idx: usize) -> Result> { Ok(if let Some(digests) = &digests { let digest = digests @@ -62,6 +76,7 @@ impl Merger { parent_bootstrap_path: Option, sources: Vec, blob_digests: Option>, + original_blob_ids: Option>, blob_sizes: Option>, blob_toc_digests: Option>, blob_toc_sizes: Option>, @@ -80,6 +95,14 @@ impl Merger { sources.len(), ); } + if let Some(original_ids) = original_blob_ids.as_ref() { + ensure!( + original_ids.len() == sources.len(), + "number of original blob id entries {} doesn't match number of sources {}", + original_ids.len(), + sources.len(), + ); + } if let Some(sizes) = blob_sizes.as_ref() { ensure!( sizes.len() == sources.len(), @@ -194,7 +217,14 @@ impl Merger { } else { // The blob id (blob sha256 hash) in parent bootstrap is invalid for nydusd // runtime, should change it to the hash of whole tar blob. - blob_ctx.blob_id = BlobInfo::get_blob_id_from_meta_path(bootstrap_path)?; + if let Some(original_id) = + Self::get_string_from_list(&original_blob_ids, layer_idx)? + { + blob_ctx.blob_id = original_id; + } else { + blob_ctx.blob_id = + BlobInfo::get_blob_id_from_meta_path(bootstrap_path)?; + } } if let Some(digest) = Self::get_digest_from_list(&blob_digests, layer_idx)? { if blob.has_feature(BlobFeatures::SEPARATE) { diff --git a/src/bin/nydus-image/main.rs b/src/bin/nydus-image/main.rs index f551930b78b..200b5cc3784 100644 --- a/src/bin/nydus-image/main.rs +++ b/src/bin/nydus-image/main.rs @@ -431,6 +431,12 @@ fn prepare_cmd_args(bti_string: &'static str) -> App { .required(false) .help("RAFS blob digest list separated by comma"), ) + .arg( + Arg::new("original-blob-ids") + .long("original-blob-ids") + .required(false) + .help("original blob id list separated by comma, it may usually be a sha256 hex string"), + ) .arg( Arg::new("blob-sizes") .long("blob-sizes") @@ -1194,6 +1200,12 @@ impl Command { .map(|item| item.trim().to_string()) .collect() }); + let original_blob_ids: Option> = + matches.get_one::("original-blob-ids").map(|list| { + list.split(',') + .map(|item| item.trim().to_string()) + .collect() + }); let blob_toc_sizes: Option> = matches.get_one::("blob-toc-sizes").map(|list| { list.split(',') @@ -1234,6 +1246,7 @@ impl Command { parent_bootstrap_path, source_bootstrap_paths, blob_digests, + original_blob_ids, blob_sizes, blob_toc_digests, blob_toc_sizes, @@ -1695,9 +1708,10 @@ impl Command { let file_type = metadata(path.as_ref()) .context(format!("failed to access path {:?}", path.as_ref()))? .file_type(); + // The SOURCE can be a regular file, FIFO file, or /dev/stdin char device, etc.. ensure!( - file_type.is_file() || file_type.is_fifo(), - "specified path must be a regular/fifo file: {:?}", + file_type.is_file() || file_type.is_fifo() || file_type.is_char_device(), + "specified path must be a regular/fifo/char_device file: {:?}", path.as_ref() ); Ok(()) @@ -1804,3 +1818,12 @@ impl Command { nydus_service::validate_threads_configuration(v).map(|s| s.to_string()) } } + +#[cfg(test)] +mod tests { + use super::Command; + #[test] + fn test_ensure_file() { + Command::ensure_file("/dev/stdin").unwrap(); + } +} diff --git a/storage/src/meta/mod.rs b/storage/src/meta/mod.rs index a8d1e23d0a4..5787f5bcf94 100644 --- a/storage/src/meta/mod.rs +++ b/storage/src/meta/mod.rs @@ -436,9 +436,10 @@ impl BlobCompressionContextInfo { if let Some(reader) = reader { let buffer = unsafe { std::slice::from_raw_parts_mut(base as *mut u8, expected_size) }; - buffer[0..].fill(0); Self::read_metadata(blob_info, reader, buffer)?; - Self::validate_header(blob_info, header)?; + if !Self::validate_header(blob_info, header)? { + return Err(enoent!(format!("double check blob_info still invalid",))); + } filemap.sync_data()?; } else { return Err(enoent!(format!( @@ -851,7 +852,6 @@ impl BlobCompressionContextInfo { if u32::from_le(header.s_magic) != BLOB_CCT_MAGIC || u32::from_le(header.s_magic2) != BLOB_CCT_MAGIC || u32::from_le(header.s_ci_entries) != blob_info.chunk_count() - || u32::from_le(header.s_features) != blob_info.features().bits() || u32::from_le(header.s_ci_compressor) != blob_info.meta_ci_compressor() as u32 || u64::from_le(header.s_ci_offset) != blob_info.meta_ci_offset() || u64::from_le(header.s_ci_compressed_size) != blob_info.meta_ci_compressed_size()