Skip to content

Commit

Permalink
all info directives are now supported
Browse files Browse the repository at this point in the history
  • Loading branch information
cosmicexplorer committed Aug 29, 2024
1 parent 404de6a commit 69670e3
Show file tree
Hide file tree
Showing 8 changed files with 302 additions and 39 deletions.
17 changes: 12 additions & 5 deletions cli/src/args/info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,6 @@ impl UnixModeFormat {

#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum TimestampFormat {
UnixEpochMilliseconds,
DateOnly,
TimeOnly,
#[default]
Expand All @@ -258,7 +257,6 @@ impl TimestampFormat {
pub fn parse(s: &str) -> Result<Self, ModifierParseError> {
match s {
"" => Ok(Self::default()),
":epoch" => Ok(Self::UnixEpochMilliseconds),
":date" => Ok(Self::DateOnly),
":time" => Ok(Self::TimeOnly),
":date-time" => Ok(Self::DateAndTime),
Expand Down Expand Up @@ -337,6 +335,7 @@ pub enum EntryFormatDirective {
LocalHeaderStart(OffsetFormat),
ContentStart(OffsetFormat),
ContentEnd(OffsetFormat),
CentralHeaderStart(OffsetFormat),
CompressedSize(ByteSizeFormat),
UncompressedSize(ByteSizeFormat),
UnixMode(UnixModeFormat),
Expand Down Expand Up @@ -374,6 +373,11 @@ impl ParseableDirective for EntryFormatDirective {
.map_err(|e| DirectiveParseError::Modifier(s.to_string(), e))?;
Ok(Self::ContentEnd(offset_fmt))
}
s if s.starts_with("central-header-start") => {
let offset_fmt = OffsetFormat::parse(&s["central-header-start".len()..])
.map_err(|e| DirectiveParseError::Modifier(s.to_string(), e))?;
Ok(Self::CentralHeaderStart(offset_fmt))
}
s if s.starts_with("compressed-size") => {
let size_fmt = ByteSizeFormat::parse(&s["compressed-size".len()..])
.map_err(|e| DirectiveParseError::Modifier(s.to_string(), e))?;
Expand Down Expand Up @@ -563,6 +567,10 @@ all the output to a single line.
The offset of the end of the entry's possibly-compressed content. The next
entry's local header begins immediately after.
%central-header-start<offset>%
The offset of the entry's central directory header, at the end of the
zip file.
%compressed-size<byte-size>%
The size of the entry's possibly-compressed content as stored in
the archive.
Expand All @@ -584,7 +592,7 @@ all the output to a single line.
%timestamp<timestamp>%
The timestamp for the entry.
Note that zip timestamps only have precision down to the minute.
Note that zip timestamps only have precision down to 2 seconds.
## Entry format directives:
Expand All @@ -608,9 +616,8 @@ unix-mode = '' [DEFAULT => octal]
= ':pretty' (`ls`-like permissions string)
timestamp = '' [DEFAULT => date-time]
= ':epoch' (milliseconds since unix epoch as a decimal number)
= ':date' (ISO 8601 string representation of date)
= ':time' (HH:MM string representation of time)
= ':time' (HH:MM:SS string representation of time)
= ':date-time'
(ISO 8601 date then HH:MM time joined by a space)
Expand Down
1 change: 1 addition & 0 deletions cli/src/compress.rs
Original file line number Diff line number Diff line change
Expand Up @@ -418,6 +418,7 @@ pub fn execute_compress(mut err: impl Write, args: Compress) -> Result<(), Comma
"name {last_name} remaining after all entry flags processed"
)));
}

for pos_arg in positional_paths.into_iter() {
let file_type = fs::symlink_metadata(&pos_arg)
.wrap_err_with(|| format!("failed to read metadata from path {}", pos_arg.display()))?
Expand Down
4 changes: 2 additions & 2 deletions cli/src/extract.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ fn maybe_process_symlink<'a, 't>(
* contents with io::Read. ZipEntry<'a, R> from
* https://github.com/zip-rs/zip2/pull/233 avoids this issue!!! */
let data = EntryData::from_entry(&entry);
(data.kind, data.size)
(data.kind, data.uncompressed_size)
};
if !matches!(kind, EntryKind::Symlink) {
return Ok(None);
Expand Down Expand Up @@ -86,7 +86,7 @@ where
deduped_matching_extracts
.into_iter()
.flat_map(|(recv, names)| names.into_iter().map(move |n| (recv, n)))
.map(|(recv, name)| recv.generate_entry_handle(data, symlink_target.as_deref(), name))
.map(|(recv, name)| recv.generate_entry_handle(&data, symlink_target.as_deref(), name))
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.flatten(),
Expand Down
4 changes: 2 additions & 2 deletions cli/src/extract/matcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -391,8 +391,8 @@ impl EntryMatcher for Size {

fn matches(&self, entry: &EntryData) -> bool {
match self {
Self::Max(max) => entry.size <= *max,
Self::Min(min) => entry.size >= *min,
Self::Max(max) => entry.uncompressed_size <= *max,
Self::Min(min) => entry.uncompressed_size >= *min,
}
}
}
Expand Down
44 changes: 37 additions & 7 deletions cli/src/extract/receiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,11 @@ use std::{
rc::Rc,
};

use zip::{read::ZipFile, CompressionMethod};
use zip::{
extra_fields::{ExtendedTimestamp, ExtraField},
read::ZipFile,
CompressionMethod, DateTime,
};

use super::matcher::{CompiledMatcher, EntryMatcher};
use super::transform::{CompiledTransformer, NameTransformer};
Expand All @@ -21,13 +25,21 @@ pub enum EntryKind {
Symlink,
}

#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct EntryData<'a> {
pub name: &'a str,
pub kind: EntryKind,
pub compression: CompressionMethod,
pub unix_mode: Option<u32>,
pub size: u64,
pub comment: &'a str,
pub uncompressed_size: u64,
pub compressed_size: u64,
pub local_header_start: u64,
pub content_start: u64,
pub central_header_start: u64,
pub crc32: u32,
pub last_modified_time: Option<DateTime>,
pub extended_timestamp: Option<ExtendedTimestamp>,
}

impl<'a> EntryData<'a> {
Expand All @@ -44,9 +56,27 @@ impl<'a> EntryData<'a> {
},
compression: entry.compression(),
unix_mode: entry.unix_mode(),
size: entry.size(),
comment: entry.comment(),
uncompressed_size: entry.size(),
compressed_size: entry.compressed_size(),
local_header_start: entry.header_start(),
content_start: entry.data_start(),
central_header_start: entry.central_header_start(),
crc32: entry.crc32(),
last_modified_time: entry.last_modified(),
extended_timestamp: entry
.extra_data_fields()
.find_map(|f| match f {
ExtraField::ExtendedTimestamp(ts) => Some(ts),
})
.cloned(),
}
}

#[inline(always)]
pub const fn content_end(&self) -> u64 {
self.content_start + self.compressed_size
}
}

pub struct ConcatEntry<'w> {
Expand Down Expand Up @@ -136,7 +166,7 @@ pub enum MatchingEntrySpec<'a, 'c, 'w> {

impl<'a, 'c, 'w> MatchingEntrySpec<'a, 'c, 'w> {
/* Split output handles for concat, and split generated handles by extract source and
* name. use ptr::eq() to split, and Cow::<'s, str>::eq() with str AsRef. */
* name. use Rc::ptr_eq() to split, and Cow::<'s, str>::eq() with str AsRef. */
pub fn is_nested_duplicate(
self,
deduped_concat_writers: &mut Vec<&'c Rc<RefCell<dyn Write + 'w>>>,
Expand Down Expand Up @@ -177,7 +207,7 @@ impl<'a, 'c, 'w> MatchingEntrySpec<'a, 'c, 'w> {
pub trait EntryReceiver: fmt::Debug {
fn generate_entry_handle<'s>(
&self,
data: EntryData<'s>,
data: &EntryData<'s>,
symlink_target: Option<&[u8]>,
name: Cow<'s, str>,
) -> Result<Option<Box<dyn Write>>, CommandError>;
Expand Down Expand Up @@ -274,7 +304,7 @@ where
{
fn generate_entry_handle<'s>(
&self,
data: EntryData<'s>,
data: &EntryData<'s>,
symlink_target: Option<&[u8]>,
name: Cow<'s, str>,
) -> Result<Option<Box<dyn Write>>, CommandError> {
Expand Down
Loading

0 comments on commit 69670e3

Please sign in to comment.