Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Check Omaha sha1 hash if available and Verify checksum after download, with retry #47

Merged
merged 4 commits into from
Jan 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,8 @@ jobs:
with:
command: test
args: --workspace
- name: Run clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: --workspace
18 changes: 16 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions examples/download_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ fn main() -> Result<(), Box<dyn Error>> {

let tempdir = tempfile::tempdir()?;
let path = tempdir.path().join("tmpfile");
let res = download_and_hash(&client, url, &path, false)?;
let res = download_and_hash(&client, url, &path, None, None, false)?;
tempdir.close()?;

println!("hash: {}", res.hash);
println!("hash: {}", res.hash_sha256);

Ok(())
}
6 changes: 3 additions & 3 deletions examples/full_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,12 +80,12 @@ fn main() -> Result<(), Box<dyn Error>> {

let tempdir = tempfile::tempdir()?;
let path = tempdir.path().join("tmpfile");
let res = ue_rs::download_and_hash(&client, url.clone(), &path, false).context(format!("download_and_hash({url:?}) failed"))?;
let res = ue_rs::download_and_hash(&client, url.clone(), &path, Some(expected_sha256.clone()), None, false).context(format!("download_and_hash({url:?}) failed"))?;
tempdir.close()?;

println!("\texpected sha256: {}", expected_sha256);
println!("\tcalculated sha256: {}", res.hash);
println!("\tsha256 match? {}", expected_sha256 == res.hash);
println!("\tcalculated sha256: {}", res.hash_sha256);
println!("\tsha256 match? {}", expected_sha256 == res.hash_sha256);
}

Ok(())
Expand Down
3 changes: 3 additions & 0 deletions omaha/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ uuid = "1.2"
ct-codecs = "1"
url = "2"
anyhow = "1.0.75"
sha2 = "0.10.8"
sha1 = "0.10.6"
digest = "0.10.7"

[dependencies.hard-xml]
path = "../vendor/hard-xml"
45 changes: 43 additions & 2 deletions omaha/src/hash_types.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
use std::fmt;
use std::str;

use sha2::Digest;

use anyhow::{Error as CodecError, anyhow};

#[rustfmt::skip]
Expand All @@ -22,24 +24,63 @@ pub trait HashAlgo {
const HASH_NAME: &'static str;

type Output: AsRef<[u8]> + AsMut<[u8]> + Default + Sized + Eq;

fn hasher() -> impl digest::DynDigest;
fn from_boxed(s: Box<[u8]>) -> Self::Output;
}

impl HashAlgo for Sha1 {
const HASH_NAME: &'static str = "Sha1";
type Output = [u8; 20];

fn hasher() -> impl digest::DynDigest {
sha1::Sha1::new()
}

fn from_boxed(s: Box<[u8]>) -> Self::Output {
let mut v = s.into_vec();
v.resize(Self::Output::default().len(), 0);
let boxed_array: Box<Self::Output> = match v.into_boxed_slice().try_into() {
Ok(a) => a,
Err(e) => {
println!("Unexpected length {}", e.len());
#[allow(clippy::box_default)]
Box::new(Self::Output::default())
}
};
*boxed_array
}
}

impl HashAlgo for Sha256 {
const HASH_NAME: &'static str = "Sha256";
type Output = [u8; 32];

fn hasher() -> impl digest::DynDigest {
sha2::Sha256::new()
}

fn from_boxed(s: Box<[u8]>) -> Self::Output {
let mut v = s.into_vec();
v.resize(Self::Output::default().len(), 0);
let boxed_array: Box<Self::Output> = match v.into_boxed_slice().try_into() {
Ok(a) => a,
Err(e) => {
println!("Unexpected length {}", e.len());
#[allow(clippy::box_default)]
Box::new(Self::Output::default())
}
};
*boxed_array
}
}

#[derive(PartialEq, Eq, Clone)]
pub struct Hash<T: HashAlgo>(T::Output);

impl<T: HashAlgo> Hash<T> {
pub fn from_bytes(digest: T::Output) -> Self {
Self(digest)
pub fn from_bytes(digest: Box<[u8]>) -> Self {
Self(T::from_boxed(digest))
}
}

Expand Down
78 changes: 46 additions & 32 deletions src/bin/download_sysext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use reqwest::redirect::Policy;
use url::Url;

use update_format_crau::delta_update;
use ue_rs::hash_on_disk_sha256;
use ue_rs::hash_on_disk;

#[derive(Debug)]
enum PackageStatus {
Expand All @@ -38,7 +38,8 @@ enum PackageStatus {
struct Package<'a> {
url: Url,
name: Cow<'a, str>,
hash: omaha::Hash<omaha::Sha256>,
hash_sha256: Option<omaha::Hash<omaha::Sha256>>,
hash_sha1: Option<omaha::Hash<omaha::Sha1>>,
size: omaha::FileSize,
status: PackageStatus,
}
Expand All @@ -48,8 +49,8 @@ impl<'a> Package<'a> {
// Return Sha256 hash of data in the given path.
// If maxlen is None, a simple read to the end of the file.
// If maxlen is Some, read only until the given length.
fn hash_on_disk(&mut self, path: &Path, maxlen: Option<usize>) -> Result<omaha::Hash<omaha::Sha256>> {
hash_on_disk_sha256(path, maxlen)
fn hash_on_disk<T: omaha::HashAlgo>(&mut self, path: &Path, maxlen: Option<usize>) -> Result<omaha::Hash<T>> {
hash_on_disk::<T>(path, maxlen)
}

#[rustfmt::skip]
Expand Down Expand Up @@ -80,10 +81,13 @@ impl<'a> Package<'a> {

if size_on_disk == expected_size {
info!("{}: download complete, checking hash...", path.display());
let hash = self.hash_on_disk(&path, None).context({
let hash_sha256 = self.hash_on_disk::<omaha::Sha256>(&path, None).context({
format!("failed to hash_on_disk, path ({:?})", path.display())
})?;
if self.verify_checksum(hash) {
let hash_sha1 = self.hash_on_disk::<omaha::Sha1>(&path, None).context({
format!("failed to hash_on_disk, path ({:?})", path.display())
})?;
if self.verify_checksum(hash_sha256, hash_sha1) {
info!("{}: good hash, will continue without re-download", path.display());
} else {
info!("{}: bad hash, will re-download", path.display());
Expand All @@ -105,7 +109,14 @@ impl<'a> Package<'a> {
info!("downloading {}...", self.url);

let path = into_dir.join(&*self.name);
let res = match ue_rs::download_and_hash(client, self.url.clone(), &path, print_progress) {
match ue_rs::download_and_hash(
client,
self.url.clone(),
&path,
self.hash_sha256.clone(),
self.hash_sha1.clone(),
print_progress,
) {
Ok(ok) => ok,
Err(err) => {
error!("Downloading failed with error {}", err);
Expand All @@ -114,16 +125,19 @@ impl<'a> Package<'a> {
}
};

self.verify_checksum(res.hash);
self.status = PackageStatus::Unverified;
Ok(())
}

fn verify_checksum(&mut self, calculated: omaha::Hash<omaha::Sha256>) -> bool {
debug!(" expected sha256: {}", self.hash);
debug!(" calculated sha256: {}", calculated);
debug!(" sha256 match? {}", self.hash == calculated);
fn verify_checksum(&mut self, calculated_sha256: omaha::Hash<omaha::Sha256>, calculated_sha1: omaha::Hash<omaha::Sha1>) -> bool {
debug!(" expected sha256: {:?}", self.hash_sha256);
debug!(" calculated sha256: {}", calculated_sha256);
debug!(" sha256 match? {}", self.hash_sha256 == Some(calculated_sha256.clone()));
debug!(" expected sha1: {:?}", self.hash_sha1);
debug!(" calculated sha1: {}", calculated_sha1);
debug!(" sha1 match? {}", self.hash_sha1 == Some(calculated_sha1.clone()));

if self.hash != calculated {
if self.hash_sha256.is_some() && self.hash_sha256 != Some(calculated_sha256.clone()) || self.hash_sha1.is_some() && self.hash_sha1 != Some(calculated_sha1.clone()) {
self.status = PackageStatus::BadChecksum;
false
} else {
Expand All @@ -150,7 +164,7 @@ impl<'a> Package<'a> {

// Get length of header and data, including header and manifest.
let header_data_length = delta_update::get_header_data_length(&header, &delta_archive_manifest).context("failed to get header data length")?;
let hdhash = self.hash_on_disk(from_path, Some(header_data_length)).context(format!("failed to hash_on_disk path ({:?}) failed", from_path.display()))?;
let hdhash = self.hash_on_disk::<omaha::Sha256>(from_path, Some(header_data_length)).context(format!("failed to hash_on_disk path ({:?}) failed", from_path.display()))?;
let hdhashvec: Vec<u8> = hdhash.clone().into();

// Extract data blobs into a file, datablobspath.
Expand All @@ -162,8 +176,8 @@ impl<'a> Package<'a> {
None => bail!("unable to get new_partition_info hash"),
};

let datahash = self.hash_on_disk(datablobspath.as_path(), None).context(format!("failed to hash_on_disk path ({:?})", datablobspath.display()))?;
if datahash != omaha::Hash::from_bytes(pinfo_hash.as_slice()[..].try_into().unwrap_or_default()) {
let datahash = self.hash_on_disk::<omaha::Sha256>(datablobspath.as_path(), None).context(format!("failed to hash_on_disk path ({:?})", datablobspath.display()))?;
if datahash != omaha::Hash::from_bytes(pinfo_hash.as_slice()[..].into()) {
bail!(
"mismatch of data hash ({:?}) with new_partition_info hash ({:?})",
datahash,
Expand Down Expand Up @@ -207,30 +221,29 @@ fn get_pkgs_to_download<'a>(resp: &'a omaha::Response, glob_set: &GlobSet)
}

let hash_sha256 = pkg.hash_sha256.as_ref();
let hash_sha1 = pkg.hash.as_ref();

// TODO: multiple URLs per package
// not sure if nebraska sends us more than one right now but i suppose this is
// for mirrors?
let url = app.update_check.urls.get(0)
.map(|u| u.join(&pkg.name));
let Some(Ok(url)) = app.update_check.urls.first()
.map(|u| u.join(&pkg.name)) else {
warn!("can't get url for package `{}`, skipping", pkg.name);
continue;
};

match (url, hash_sha256) {
(Some(Ok(url)), Some(hash)) => {
if hash_sha256.is_none() && hash_sha1.is_none() {
warn!("package `{}` doesn't have a valid SHA256 or SHA1 hash, skipping", pkg.name);
continue;
}
to_download.push(Package {
url,
name: Cow::Borrowed(&pkg.name),
hash: hash.clone(),
hash_sha256: hash_sha256.cloned(),
hash_sha1: hash_sha1.cloned(),
size: pkg.size,
status: PackageStatus::ToDownload
})
}

(Some(Ok(_)), None) => {
warn!("package `{}` doesn't have a valid SHA256 hash, skipping", pkg.name);
}

_ => (),
}
});
}
}

Expand All @@ -243,11 +256,12 @@ where
U: reqwest::IntoUrl + From<U> + std::clone::Clone + std::fmt::Debug,
Url: From<U>,
{
let r = ue_rs::download_and_hash(client, input_url.clone(), path, print_progress).context(format!("unable to download data(url {:?})", input_url))?;
let r = ue_rs::download_and_hash(client, input_url.clone(), path, None, None, print_progress).context(format!("unable to download data(url {:?})", input_url))?;

Ok(Package {
name: Cow::Borrowed(path.file_name().unwrap_or(OsStr::new("fakepackage")).to_str().unwrap_or("fakepackage")),
hash: r.hash,
hash_sha256: Some(r.hash_sha256),
hash_sha1: Some(r.hash_sha1),
size: FileSize::from_bytes(r.data.metadata().context(format!("failed to get metadata, path ({:?})", path.display()))?.len() as usize),
url: input_url.into(),
status: PackageStatus::Unverified,
Expand Down
Loading
Loading