Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Wip: support use cache only to run image #1416

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions api/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,7 @@ impl BackendConfigV2 {
}
None => return false,
},
"noop" => return true,
_ => return false,
}

Expand Down Expand Up @@ -755,6 +756,9 @@ pub struct FileCacheConfig {
/// Key for data encryption, a heximal representation of [u8; 32].
#[serde(default)]
pub encryption_key: String,
/// disable chunk map func,
#[serde(default)]
pub disable_chunk_map: bool,
}

impl FileCacheConfig {
Expand Down Expand Up @@ -842,6 +846,9 @@ pub struct RafsConfigV2 {
/// Filesystem prefetching configuration.
#[serde(default)]
pub prefetch: PrefetchConfigV2,
// Only use cache, don't access the backend
#[serde(default)]
pub use_cache_only: bool,
}

impl RafsConfigV2 {
Expand Down Expand Up @@ -1366,6 +1373,9 @@ struct RafsConfig {
// ZERO value means, amplifying user io is not enabled.
#[serde(default = "default_batch_size")]
pub amplify_io: usize,
// Only use cache, don't access the backend
#[serde(default)]
pub use_cache_only: bool,
}

impl TryFrom<RafsConfig> for ConfigV2 {
Expand All @@ -1383,6 +1393,7 @@ impl TryFrom<RafsConfig> for ConfigV2 {
access_pattern: v.access_pattern,
latest_read_files: v.latest_read_files,
prefetch: v.fs_prefetch.into(),
use_cache_only: v.use_cache_only,
};
if !cache.prefetch.enable && rafs.prefetch.enable {
cache.prefetch = rafs.prefetch.clone();
Expand Down
2 changes: 2 additions & 0 deletions builder/src/compact.rs
Original file line number Diff line number Diff line change
Expand Up @@ -610,6 +610,8 @@ impl BlobCompactor {
false,
Features::new(),
false,
None,
None,
);
let mut bootstrap_mgr =
BootstrapManager::new(Some(ArtifactStorage::SingleFile(d_bootstrap)), None);
Expand Down
20 changes: 16 additions & 4 deletions builder/src/core/blob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@
// SPDX-License-Identifier: Apache-2.0

use std::borrow::Cow;
use std::io::Write;
use std::io::{Seek, Write};
use std::mem::size_of;
use std::slice;

use anyhow::{Context, Result};
use nydus_rafs::metadata::RAFS_MAX_CHUNK_SIZE;
use nydus_storage::device::BlobFeatures;
use nydus_storage::meta::{toc, BlobMetaChunkArray};
use nydus_storage::meta::{toc, BlobCompressionContextHeader, BlobMetaChunkArray};
use nydus_utils::digest::{self, DigestHasher, RafsDigest};
use nydus_utils::{compress, crypt};
use nydus_utils::{compress, crypt, try_round_up_4k};
use sha2::digest::Digest;

use super::layout::BlobLayout;
Expand Down Expand Up @@ -194,7 +195,6 @@ impl Blob {
} else if ctx.blob_tar_reader.is_some() {
header.set_separate_blob(true);
};

let mut compressor = Self::get_compression_algorithm_for_meta(ctx);
let (compressed_data, compressed) = compress::compress(ci_data, compressor)
.with_context(|| "failed to compress blob chunk info array".to_string())?;
Expand Down Expand Up @@ -223,6 +223,18 @@ impl Blob {
}

blob_ctx.blob_meta_header = header;
if let Some(meta_writer) = ctx.blob_meta_writer.as_ref() {
let mut meta = meta_writer.lock().unwrap();
let aligned_uncompressed_size = try_round_up_4k(uncompressed_size as u64).unwrap();
meta.set_len(
aligned_uncompressed_size + size_of::<BlobCompressionContextHeader>() as u64,
)?;
meta.seek(std::io::SeekFrom::Start(0))?;
meta.write_all(ci_data)?;
meta.seek(std::io::SeekFrom::Start(aligned_uncompressed_size))?;
meta.write_all(header.as_bytes())?;
meta.flush()?;
}
let encrypted_header =
crypt::encrypt_with_context(header.as_bytes(), cipher_obj, cipher_ctx, encrypt)?;
let header_size = encrypted_header.len();
Expand Down
16 changes: 15 additions & 1 deletion builder/src/core/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ impl Write for ArtifactMemoryWriter {
}
}

struct ArtifactFileWriter(ArtifactWriter);
pub struct ArtifactFileWriter(pub ArtifactWriter);

impl RafsIoWrite for ArtifactFileWriter {
fn as_any(&self) -> &dyn Any {
Expand All @@ -215,6 +215,12 @@ impl RafsIoWrite for ArtifactFileWriter {
}
}

impl ArtifactFileWriter {
pub fn set_len(&mut self, s: u64) -> std::io::Result<()> {
self.0.file.get_mut().set_len(s)
}
}

impl Seek for ArtifactFileWriter {
fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result<u64> {
self.0.file.seek(pos)
Expand Down Expand Up @@ -1182,6 +1188,8 @@ pub struct BuildContext {

pub features: Features,
pub configuration: Arc<ConfigV2>,
pub blob_cache_writer: Option<Mutex<ArtifactFileWriter>>,
pub blob_meta_writer: Option<Mutex<ArtifactFileWriter>>,
}

impl BuildContext {
Expand All @@ -1201,6 +1209,8 @@ impl BuildContext {
blob_inline_meta: bool,
features: Features,
encrypt: bool,
blob_cache_writer: Option<Mutex<ArtifactFileWriter>>,
blob_meta_writer: Option<Mutex<ArtifactFileWriter>>,
) -> Self {
// It's a flag for images built with new nydus-image 2.2 and newer.
let mut blob_features = BlobFeatures::CAP_TAR_TOC;
Expand Down Expand Up @@ -1250,6 +1260,8 @@ impl BuildContext {

features,
configuration: Arc::new(ConfigV2::default()),
blob_cache_writer,
blob_meta_writer,
}
}

Expand Down Expand Up @@ -1299,6 +1311,8 @@ impl Default for BuildContext {
blob_inline_meta: false,
features: Features::new(),
configuration: Arc::new(ConfigV2::default()),
blob_cache_writer: None,
blob_meta_writer: None,
}
}
}
Expand Down
17 changes: 16 additions & 1 deletion builder/src/core/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
use std::ffi::{OsStr, OsString};
use std::fmt::{self, Display, Formatter, Result as FmtResult};
use std::fs::{self, File};
use std::io::{Read, Write};
use std::io::{Read, Seek, Write};
use std::ops::Deref;
#[cfg(target_os = "linux")]
use std::os::linux::fs::MetadataExt;
Expand Down Expand Up @@ -462,6 +462,21 @@ impl Node {
chunk.set_compressed(is_compressed);
}

match &ctx.blob_cache_writer {
Some(writer) => {
let mut guard = writer.lock().unwrap();
let curr_pos = guard.seek(std::io::SeekFrom::End(0))?;
if curr_pos < chunk.uncompressed_offset() + aligned_d_size as u64 {
guard.set_len(chunk.uncompressed_offset() + aligned_d_size as u64)?;
}

guard.seek(std::io::SeekFrom::Start(chunk.uncompressed_offset()))?;
guard
.write_all(&chunk_data)
.context("failed to write blob cache")?;
}
None => (),
}
event_tracer!("blob_uncompressed_size", +d_size);

Ok(chunk_info)
Expand Down
4 changes: 2 additions & 2 deletions builder/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ pub use self::compact::BlobCompactor;
pub use self::core::bootstrap::Bootstrap;
pub use self::core::chunk_dict::{parse_chunk_dict_arg, ChunkDict, HashChunkDict};
pub use self::core::context::{
ArtifactStorage, ArtifactWriter, BlobContext, BlobManager, BootstrapContext, BootstrapManager,
BuildContext, BuildOutput, ConversionType,
ArtifactFileWriter, ArtifactStorage, ArtifactWriter, BlobContext, BlobManager,
BootstrapContext, BootstrapManager, BuildContext, BuildOutput, ConversionType,
};
pub use self::core::feature::{Feature, Features};
pub use self::core::node::{ChunkSource, NodeChunk};
Expand Down
2 changes: 2 additions & 0 deletions builder/src/stargz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -935,6 +935,8 @@ mod tests {
false,
Features::new(),
false,
None,
None,
);
ctx.fs_version = RafsVersion::V6;
let mut bootstrap_mgr =
Expand Down
4 changes: 4 additions & 0 deletions builder/src/tarball.rs
Original file line number Diff line number Diff line change
Expand Up @@ -689,6 +689,8 @@ mod tests {
false,
Features::new(),
false,
None,
None,
);
let mut bootstrap_mgr =
BootstrapManager::new(Some(ArtifactStorage::FileDir(tmp_dir)), None);
Expand Down Expand Up @@ -721,6 +723,8 @@ mod tests {
false,
Features::new(),
true,
None,
None,
);
let mut bootstrap_mgr =
BootstrapManager::new(Some(ArtifactStorage::FileDir(tmp_dir)), None);
Expand Down
71 changes: 68 additions & 3 deletions src/bin/nydus-image/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,10 @@ use nix::unistd::{getegid, geteuid};
use nydus::{get_build_time_info, setup_logging};
use nydus_api::{BuildTimeInfo, ConfigV2, LocalFsConfig};
use nydus_builder::{
parse_chunk_dict_arg, ArtifactStorage, BlobCompactor, BlobManager, BootstrapManager,
BuildContext, BuildOutput, Builder, ConversionType, DirectoryBuilder, Feature, Features,
HashChunkDict, Merger, Prefetch, PrefetchPolicy, StargzBuilder, TarballBuilder, WhiteoutSpec,
parse_chunk_dict_arg, ArtifactFileWriter, ArtifactStorage, ArtifactWriter, BlobCompactor,
BlobManager, BootstrapManager, BuildContext, BuildOutput, Builder, ConversionType,
DirectoryBuilder, Feature, Features, HashChunkDict, Merger, Prefetch, PrefetchPolicy,
StargzBuilder, TarballBuilder, WhiteoutSpec,
};
use nydus_rafs::metadata::{RafsSuper, RafsSuperConfig, RafsVersion};
use nydus_storage::backend::localfs::LocalFs;
Expand Down Expand Up @@ -356,6 +357,20 @@ fn prepare_cmd_args(bti_string: &'static str) -> App {
.action(ArgAction::SetTrue)
.required(false)
)
.arg(
Arg::new("blob-cache")
.long("blob-cache")
.help("generate blob cache file")
.value_parser(clap::value_parser!(PathBuf))
.required(false)
)
.arg(
Arg::new("blob-meta")
.long("blob-meta")
.help("generate blob meta file")
.value_parser(clap::value_parser!(PathBuf))
.required(false)
)
);

let app = app.subcommand(
Expand Down Expand Up @@ -793,6 +808,8 @@ impl Command {
let version = Self::get_fs_version(matches)?;
let chunk_size = Self::get_chunk_size(matches, conversion_type)?;
let batch_size = Self::get_batch_size(matches, version, conversion_type, chunk_size)?;
let blob_cache_writer = Self::get_blob_cache_writer(matches, conversion_type)?;
let blob_meta_writer = Self::get_blob_meta_writer(matches, conversion_type)?;
let aligned_chunk = if version.is_v6() && conversion_type != ConversionType::TarToTarfs {
true
} else {
Expand Down Expand Up @@ -1028,6 +1045,8 @@ impl Command {
blob_inline_meta,
features,
encrypt,
blob_cache_writer,
blob_meta_writer,
);
build_ctx.set_fs_version(version);
build_ctx.set_chunk_size(chunk_size);
Expand Down Expand Up @@ -1462,6 +1481,52 @@ impl Command {
}
}

fn get_blob_meta_writer(
matches: &ArgMatches,
conversion_type: ConversionType,
) -> Result<Option<Mutex<ArtifactFileWriter>>> {
if conversion_type == ConversionType::EStargzIndexToRef {
Ok(None)
} else if let Some(p) = matches
.get_one::<PathBuf>("blob-meta")
.map(|b| ArtifactStorage::SingleFile(b.clone()))
{
if conversion_type == ConversionType::TarToTarfs {
bail!(
"conversion type `{}` conflicts with `--blob-meta`",
conversion_type
);
}
let writer = ArtifactFileWriter(ArtifactWriter::new(p)?);
Ok(Some(Mutex::new(writer)))
} else {
Ok(None)
}
}

fn get_blob_cache_writer(
matches: &ArgMatches,
conversion_type: ConversionType,
) -> Result<Option<Mutex<ArtifactFileWriter>>> {
if conversion_type == ConversionType::EStargzIndexToRef {
Ok(None)
} else if let Some(p) = matches
.get_one::<PathBuf>("blob-cache")
.map(|b| ArtifactStorage::SingleFile(b.clone()))
{
if conversion_type == ConversionType::TarToTarfs {
bail!(
"conversion type `{}` conflicts with `--blob-cache`",
conversion_type
);
}
let writer = ArtifactFileWriter(ArtifactWriter::new(p)?);
Ok(Some(Mutex::new(writer)))
} else {
Ok(None)
}
}

// Must specify a path to blob file.
// For cli/binary interface compatibility sake, keep option `backend-config`, but
// it only receives "localfs" backend type and it will be REMOVED in the future
Expand Down
2 changes: 2 additions & 0 deletions storage/src/backend/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ pub mod registry;
#[cfg(feature = "backend-s3")]
pub mod s3;

pub mod noop;

/// Error codes related to storage backend operations.
#[derive(Debug)]
pub enum BackendError {
Expand Down
Loading
Loading