Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add hash and length verification to MetaFile and TargetFile #1437

Merged
merged 3 commits into from
Jun 16, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 64 additions & 4 deletions tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

from tests import utils

import tuf.exceptions
from tuf import exceptions
from tuf.api.metadata import (
Metadata,
Root,
Expand Down Expand Up @@ -178,7 +178,7 @@ def test_sign_verify(self):
self.assertTrue(len(metadata_obj.signatures) == 1)
# ... which is valid for the correct key.
targets_key.verify_signature(metadata_obj)
with self.assertRaises(tuf.exceptions.UnsignedMetadataError):
with self.assertRaises(exceptions.UnsignedMetadataError):
snapshot_key.verify_signature(metadata_obj)

sslib_signer = SSlibSigner(self.keystore['snapshot'])
Expand All @@ -197,7 +197,7 @@ def test_sign_verify(self):
self.assertTrue(len(metadata_obj.signatures) == 1)
# ... valid for that key.
timestamp_key.verify_signature(metadata_obj)
with self.assertRaises(tuf.exceptions.UnsignedMetadataError):
with self.assertRaises(exceptions.UnsignedMetadataError):
targets_key.verify_signature(metadata_obj)


Expand Down Expand Up @@ -280,7 +280,6 @@ def test_targetfile_class(self):
targetfile_obj = TargetFile.from_dict(copy.copy(data))
self.assertEqual(targetfile_obj.to_dict(), data)


def test_metadata_snapshot(self):
snapshot_path = os.path.join(
self.repo_dir, 'metadata', 'snapshot.json')
Expand Down Expand Up @@ -352,6 +351,7 @@ def test_metadata_timestamp(self):
timestamp_test = Timestamp.from_dict(test_dict)
self.assertEqual(timestamp_dict['signed'], timestamp_test.to_dict())


def test_key_class(self):
keys = {
"59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d":{
Expand Down Expand Up @@ -638,6 +638,66 @@ def test_support_for_unrecognized_fields(self):
metadata_obj.signed.to_dict(), metadata_obj2.signed.to_dict()
)

def test_length_and_hash_validation(self):

# Test metadata files' hash and length verification.
# Use timestamp to get a MetaFile object and snapshot
# for untrusted metadata file to verify.
timestamp_path = os.path.join(
self.repo_dir, 'metadata', 'timestamp.json')
timestamp = Metadata.from_file(timestamp_path)
snapshot_metafile = timestamp.signed.meta["snapshot.json"]

snapshot_path = os.path.join(
self.repo_dir, 'metadata', 'snapshot.json')

with open(snapshot_path, "rb") as file:
# test with data as a file object
snapshot_metafile.verify_length_and_hashes(file)
file.seek(0)
data = file.read()
# test with data as bytes
snapshot_metafile.verify_length_and_hashes(data)

# test exceptions
expected_length = snapshot_metafile.length
snapshot_metafile.length = 2345
self.assertRaises(exceptions.LengthOrHashMismatchError,
snapshot_metafile.verify_length_and_hashes, data)

snapshot_metafile.length = expected_length
snapshot_metafile.hashes = {'sha256': 'incorrecthash'}
self.assertRaises(exceptions.LengthOrHashMismatchError,
snapshot_metafile.verify_length_and_hashes, data)

# test optional length and hashes
snapshot_metafile.length = None
snapshot_metafile.hashes = None
snapshot_metafile.verify_length_and_hashes(data)


# Test target files' hash and length verification
targets_path = os.path.join(
self.repo_dir, 'metadata', 'targets.json')
targets = Metadata.from_file(targets_path)
file1_targetfile = targets.signed.targets['file1.txt']
filepath = os.path.join(
self.repo_dir, 'targets', 'file1.txt')

with open(filepath, "rb") as file1:
file1_targetfile.verify_length_and_hashes(file1)

# test exceptions
expected_length = file1_targetfile.length
file1_targetfile.length = 2345
self.assertRaises(exceptions.LengthOrHashMismatchError,
file1_targetfile.verify_length_and_hashes, file1)

file1_targetfile.length = expected_length
file1_targetfile.hashes = {'sha256': 'incorrecthash'}
self.assertRaises(exceptions.LengthOrHashMismatchError,
file1_targetfile.verify_length_and_hashes, file1)


# Run unit test.
if __name__ == '__main__':
Expand Down
119 changes: 110 additions & 9 deletions tuf/api/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,23 @@

"""
import abc
import io
import tempfile
from datetime import datetime, timedelta
from typing import Any, ClassVar, Dict, List, Mapping, Optional, Tuple, Type
from typing import (
Any,
BinaryIO,
ClassVar,
Dict,
List,
Mapping,
Optional,
Tuple,
Type,
Union,
)

from securesystemslib import hash as sslib_hash
from securesystemslib import keys as sslib_keys
from securesystemslib.signer import Signature, Signer
from securesystemslib.storage import FilesystemBackend, StorageBackendInterface
Expand Down Expand Up @@ -622,7 +635,53 @@ def remove_key(self, role: str, keyid: str) -> None:
del self.keys[keyid]


class MetaFile:
class BaseFile:
"""A base class of MetaFile and TargetFile.

Encapsulates common static methods for length and hash verification.
"""

@staticmethod
def _verify_hashes(
data: Union[bytes, BinaryIO], expected_hashes: Dict[str, str]
) -> None:
"""Verifies that the hash of 'data' matches 'expected_hashes'"""
is_bytes = isinstance(data, bytes)
for algo, exp_hash in expected_hashes.items():
if is_bytes:
digest_object = sslib_hash.digest(algo)
digest_object.update(data)
else:
# if data is not bytes, assume it is a file object
digest_object = sslib_hash.digest_fileobject(data, algo)

observed_hash = digest_object.hexdigest()
if observed_hash != exp_hash:
raise exceptions.LengthOrHashMismatchError(
f"Observed hash {observed_hash} does not match"
f"expected hash {exp_hash}"
)

@staticmethod
def _verify_length(
data: Union[bytes, BinaryIO], expected_length: int
) -> None:
"""Verifies that the length of 'data' matches 'expected_length'"""
if isinstance(data, bytes):
observed_length = len(data)
else:
# if data is not bytes, assume it is a file object
data.seek(0, io.SEEK_END)
observed_length = data.tell()

if observed_length != expected_length:
raise exceptions.LengthOrHashMismatchError(
f"Observed length {observed_length} does not match"
f"expected length {expected_length}"
)


class MetaFile(BaseFile):
"""A container with information about a particular metadata file.

Attributes:
Expand Down Expand Up @@ -660,6 +719,13 @@ def from_dict(cls, meta_dict: Dict[str, Any]) -> "MetaFile":
version = meta_dict.pop("version")
length = meta_dict.pop("length", None)
hashes = meta_dict.pop("hashes", None)

# Do some basic input validation
if version <= 0:
raise ValueError(f"Metafile version must be > 0, got {version}")
if length is not None and length <= 0:
raise ValueError(f"Metafile length must be > 0, got {length}")
Comment on lines +723 to +727
Copy link
Member

@jku jku Jun 9, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess this is where we get to decide what to do with input data that we don't have a "natural" check for: length and version get validated "enough" like this IMO but hashes could still be anything...

For the values we could run

if not all(isinstance(h, str) for h in hashes.values()):
    raise ValueError("Invalid hash value")

But maybe we don't need to? Is it enough that the hash verification will fail when it's tried on completely bogus hash value?

For the dict keys: Can we make SSLib check that the algorithms are known to SSLib?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah ... I avoided intentionally the downward validation spiral here ...

Hash values: there is even a regex about correct hash value in formats.py now but I wasn't sure if it isn't an overkill.
Dict keys: Probably we can pre-define somewhere supported algorithms by sslib because now the check is done runtime during hash calculation.

I will file an issue about it, especially hash values.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Here it is: #1441


# All fields left in the meta_dict are unrecognized.
return cls(version, length, hashes, meta_dict)

Expand All @@ -678,6 +744,22 @@ def to_dict(self) -> Dict[str, Any]:

return res_dict

def verify_length_and_hashes(self, data: Union[bytes, BinaryIO]):
"""Verifies that the length and hashes of "data" match expected
values.
Args:
data: File object or its content in bytes.
Raises:
LengthOrHashMismatchError: Calculated length or hashes do not
match expected values.
"""
if self.length is not None:
self._verify_length(data, self.length)

# Skip the check in case of an empty dictionary too
if self.hashes:
self._verify_hashes(data, self.hashes)


class Timestamp(Signed):
"""A container for the signed part of timestamp metadata.
Expand Down Expand Up @@ -905,7 +987,7 @@ def to_dict(self) -> Dict[str, Any]:
}


class TargetFile:
class TargetFile(BaseFile):
"""A container with information about a particular target file.

Attributes:
Expand All @@ -923,12 +1005,6 @@ class TargetFile:

"""

@property
def custom(self):
if self.unrecognized_fields is None:
return None
return self.unrecognized_fields.get("custom", None)

def __init__(
self,
length: int,
Expand All @@ -939,11 +1015,24 @@ def __init__(
self.hashes = hashes
self.unrecognized_fields = unrecognized_fields or {}

@property
def custom(self):
if self.unrecognized_fields is None:
return None
return self.unrecognized_fields.get("custom", None)

jku marked this conversation as resolved.
Show resolved Hide resolved
@classmethod
def from_dict(cls, target_dict: Dict[str, Any]) -> "TargetFile":
"""Creates TargetFile object from its dict representation."""
length = target_dict.pop("length")
hashes = target_dict.pop("hashes")

# Do some basic validation checks
if length <= 0:
raise ValueError(f"Targetfile length must be > 0, got {length}")
if not hashes:
raise ValueError("Missing targetfile hashes")
jku marked this conversation as resolved.
Show resolved Hide resolved

# All fields left in the target_dict are unrecognized.
return cls(length, hashes, target_dict)

Expand All @@ -955,6 +1044,18 @@ def to_dict(self) -> Dict[str, Any]:
**self.unrecognized_fields,
}

def verify_length_and_hashes(self, data: Union[bytes, BinaryIO]):
"""Verifies that the length and hashes of "data" match expected
values.
Args:
data: File object or its content in bytes.
Raises:
LengthOrHashMismatchError: Calculated length or hashes do not
match expected values.
"""
self._verify_length(data, self.length)
self._verify_hashes(data, self.hashes)


class Targets(Signed):
"""A container for the signed part of targets metadata.
Expand Down
5 changes: 2 additions & 3 deletions tuf/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ def __repr__(self):
class UnsupportedAlgorithmError(Error):
"""Indicate an error while trying to identify a user-specified algorithm."""

class LengthOrHashMismatchError(Error):
"""Indicate an error while checking the length and hash values of an object"""

class BadHashError(Error):
"""Indicate an error while checking the value of a hash object."""
Expand All @@ -88,9 +90,6 @@ def __repr__(self):
# self.__class__.__name__ + '(' + repr(self.expected_hash) + ', ' +
# repr(self.observed_hash) + ')')




class BadVersionNumberError(Error):
"""Indicate an error for metadata that contains an invalid version number."""

Expand Down