Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Backport latest pr to 2.2 #1413

Merged
merged 3 commits into from
Aug 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 25 additions & 2 deletions src/bin/nydus-image/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,12 @@ fn prepare_cmd_args(bti_string: &'static str) -> App {
.required(false)
.help("RAFS blob digest list separated by comma"),
)
.arg(
Arg::new("original-blob-ids")
.long("original-blob-ids")
.required(false)
.help("original blob id list separated by comma, it may usually be a sha256 hex string"),
)
.arg(
Arg::new("blob-sizes")
.long("blob-sizes")
Expand Down Expand Up @@ -903,6 +909,12 @@ impl Command {
.map(|item| item.trim().to_string())
.collect()
});
let original_blob_ids: Option<Vec<String>> =
matches.get_one::<String>("original-blob-ids").map(|list| {
list.split(',')
.map(|item| item.trim().to_string())
.collect()
});
let blob_toc_sizes: Option<Vec<u64>> =
matches.get_one::<String>("blob-toc-sizes").map(|list| {
list.split(',')
Expand Down Expand Up @@ -943,6 +955,7 @@ impl Command {
parent_bootstrap_path,
source_bootstrap_paths,
blob_digests,
original_blob_ids,
blob_sizes,
blob_toc_digests,
blob_toc_sizes,
Expand Down Expand Up @@ -1345,9 +1358,10 @@ impl Command {
let file_type = metadata(path.as_ref())
.context(format!("failed to access path {:?}", path.as_ref()))?
.file_type();
// The SOURCE can be a regular file, FIFO file, or /dev/stdin char device, etc..
ensure!(
file_type.is_file() || file_type.is_fifo(),
"specified path must be a regular/fifo file: {:?}",
file_type.is_file() || file_type.is_fifo() || file_type.is_char_device(),
"specified path must be a regular/fifo/char_device file: {:?}",
path.as_ref()
);
Ok(())
Expand All @@ -1364,3 +1378,12 @@ impl Command {
Ok(())
}
}

#[cfg(test)]
mod tests {
use super::Command;
#[test]
fn test_ensure_file() {
Command::ensure_file("/dev/stdin").unwrap();
}
}
40 changes: 39 additions & 1 deletion src/bin/nydus-image/merge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,20 @@ use crate::core::tree::{MetadataTreeBuilder, Tree};
pub struct Merger {}

impl Merger {
fn get_string_from_list(
original_ids: &Option<Vec<String>>,
idx: usize,
) -> Result<Option<String>> {
Ok(if let Some(id) = &original_ids {
let id_string = id
.get(idx)
.ok_or_else(|| anyhow!("unmatched digest index {}", idx))?;
Some(id_string.clone())
} else {
None
})
}

fn get_digest_from_list(digests: &Option<Vec<String>>, idx: usize) -> Result<Option<[u8; 32]>> {
Ok(if let Some(digests) = &digests {
let digest = digests
Expand Down Expand Up @@ -65,6 +79,7 @@ impl Merger {
parent_bootstrap_path: Option<String>,
sources: Vec<PathBuf>,
blob_digests: Option<Vec<String>>,
original_blob_ids: Option<Vec<String>>,
blob_sizes: Option<Vec<u64>>,
blob_toc_digests: Option<Vec<String>>,
blob_toc_sizes: Option<Vec<u64>>,
Expand All @@ -83,6 +98,22 @@ impl Merger {
sources.len(),
);
}
if let Some(original_ids) = original_blob_ids.as_ref() {
ensure!(
original_ids.len() == sources.len(),
"number of original blob id entries {} doesn't match number of sources {}",
original_ids.len(),
sources.len(),
);
}
if let Some(sizes) = blob_sizes.as_ref() {
ensure!(
sizes.len() == sources.len(),
"number of blob size entries {} doesn't match number of sources {}",
sizes.len(),
sources.len(),
);
}
if let Some(toc_digests) = blob_toc_digests.as_ref() {
ensure!(
toc_digests.len() == sources.len(),
Expand Down Expand Up @@ -186,7 +217,14 @@ impl Merger {
} else {
// The blob id (blob sha256 hash) in parent bootstrap is invalid for nydusd
// runtime, should change it to the hash of whole tar blob.
blob_ctx.blob_id = BlobInfo::get_blob_id_from_meta_path(bootstrap_path)?;
if let Some(original_id) =
Self::get_string_from_list(&original_blob_ids, layer_idx)?
{
blob_ctx.blob_id = original_id;
} else {
blob_ctx.blob_id =
BlobInfo::get_blob_id_from_meta_path(bootstrap_path)?;
}
}
if let Some(digest) = Self::get_digest_from_list(&blob_digests, layer_idx)? {
if blob.has_feature(BlobFeatures::SEPARATE) {
Expand Down
6 changes: 3 additions & 3 deletions storage/src/meta/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -405,9 +405,10 @@ impl BlobCompressionContextInfo {
if let Some(reader) = reader {
let buffer =
unsafe { std::slice::from_raw_parts_mut(base as *mut u8, expected_size) };
buffer[0..].fill(0);
Self::read_metadata(blob_info, reader, buffer)?;
Self::validate_header(blob_info, header)?;
if !Self::validate_header(blob_info, header)? {
return Err(enoent!(format!("double check blob_info still invalid",)));
}
filemap.sync_data()?;
} else {
return Err(enoent!(format!(
Expand Down Expand Up @@ -751,7 +752,6 @@ impl BlobCompressionContextInfo {
if u32::from_le(header.s_magic) != BLOB_CCT_MAGIC
|| u32::from_le(header.s_magic2) != BLOB_CCT_MAGIC
|| u32::from_le(header.s_ci_entries) != blob_info.chunk_count()
|| u32::from_le(header.s_features) != blob_info.features().bits()
|| u32::from_le(header.s_ci_compressor) != blob_info.meta_ci_compressor() as u32
|| u64::from_le(header.s_ci_offset) != blob_info.meta_ci_offset()
|| u64::from_le(header.s_ci_compressed_size) != blob_info.meta_ci_compressed_size()
Expand Down