|
8 | 8 |
|
9 | 9 | import abc
|
10 | 10 | import fnmatch
|
| 11 | +import hashlib |
11 | 12 | import io
|
12 | 13 | import logging
|
13 | 14 | from dataclasses import dataclass
|
|
21 | 22 | )
|
22 | 23 |
|
23 | 24 | from securesystemslib import exceptions as sslib_exceptions
|
24 |
| -from securesystemslib import hash as sslib_hash |
25 | 25 | from securesystemslib.signer import Key, Signature
|
26 | 26 |
|
27 | 27 | from tuf.api.exceptions import LengthOrHashMismatchError, UnsignedMetadataError
|
|
34 | 34 | _TARGETS = "targets"
|
35 | 35 | _TIMESTAMP = "timestamp"
|
36 | 36 |
|
| 37 | +_DEFAULT_HASH_ALGORITHM = "sha256" |
| 38 | +_BLAKE_HASH_ALGORITHM = "blake2b-256" |
| 39 | + |
37 | 40 | # We aim to support SPECIFICATION_VERSION and require the input metadata
|
38 | 41 | # files to have the same major version (the first number) as ours.
|
39 | 42 | SPECIFICATION_VERSION = ["1", "0", "31"]
|
|
45 | 48 | T = TypeVar("T", "Root", "Timestamp", "Snapshot", "Targets")
|
46 | 49 |
|
47 | 50 |
|
| 51 | +def _hash(algo: str) -> Any: # noqa: ANN401 |
| 52 | + """Returns new hash object, supporting custom "blake2b-256" algo name.""" |
| 53 | + if algo == _BLAKE_HASH_ALGORITHM: |
| 54 | + return hashlib.blake2b(digest_size=32) |
| 55 | + |
| 56 | + return hashlib.new(algo) |
| 57 | + |
| 58 | + |
| 59 | +def _file_hash(f: IO[bytes], algo: str) -> Any: # noqa: ANN401 |
| 60 | + """Returns hashed file.""" |
| 61 | + f.seek(0) |
| 62 | + try: |
| 63 | + # Only availble on Pythons 3.11+ |
| 64 | + digest = hashlib.file_digest(f, lambda: _hash(algo)) # type: ignore[arg-type] |
| 65 | + |
| 66 | + except AttributeError: |
| 67 | + # Ports `securesystemslib.hash.digest_fileobject` (v1.0), as fallback |
| 68 | + # on older Pythons (chunk size is taken from there). |
| 69 | + digest = _hash(algo) |
| 70 | + for chunk in iter(lambda: f.read(4096), b""): |
| 71 | + digest.update(chunk) |
| 72 | + |
| 73 | + return digest |
| 74 | + |
| 75 | + |
48 | 76 | class Signed(metaclass=abc.ABCMeta):
|
49 | 77 | """A base class for the signed part of TUF metadata.
|
50 | 78 |
|
@@ -664,19 +692,15 @@ def _verify_hashes(
|
664 | 692 | data: bytes | IO[bytes], expected_hashes: dict[str, str]
|
665 | 693 | ) -> None:
|
666 | 694 | """Verify that the hash of ``data`` matches ``expected_hashes``."""
|
667 |
| - is_bytes = isinstance(data, bytes) |
668 | 695 | for algo, exp_hash in expected_hashes.items():
|
669 | 696 | try:
|
670 |
| - if is_bytes: |
671 |
| - digest_object = sslib_hash.digest(algo) |
| 697 | + if isinstance(data, bytes): |
| 698 | + digest_object = _hash(algo) |
672 | 699 | digest_object.update(data)
|
673 | 700 | else:
|
674 | 701 | # if data is not bytes, assume it is a file object
|
675 |
| - digest_object = sslib_hash.digest_fileobject(data, algo) |
676 |
| - except ( |
677 |
| - sslib_exceptions.UnsupportedAlgorithmError, |
678 |
| - sslib_exceptions.FormatError, |
679 |
| - ) as e: |
| 702 | + digest_object = _file_hash(data, algo) |
| 703 | + except (ValueError, TypeError) as e: |
680 | 704 | raise LengthOrHashMismatchError(
|
681 | 705 | f"Unsupported algorithm '{algo}'"
|
682 | 706 | ) from e
|
@@ -731,21 +755,16 @@ def _get_length_and_hashes(
|
731 | 755 | hashes = {}
|
732 | 756 |
|
733 | 757 | if hash_algorithms is None:
|
734 |
| - hash_algorithms = [sslib_hash.DEFAULT_HASH_ALGORITHM] |
| 758 | + hash_algorithms = [_DEFAULT_HASH_ALGORITHM] |
735 | 759 |
|
736 | 760 | for algorithm in hash_algorithms:
|
737 | 761 | try:
|
738 | 762 | if isinstance(data, bytes):
|
739 |
| - digest_object = sslib_hash.digest(algorithm) |
| 763 | + digest_object = _hash(algorithm) |
740 | 764 | digest_object.update(data)
|
741 | 765 | else:
|
742 |
| - digest_object = sslib_hash.digest_fileobject( |
743 |
| - data, algorithm |
744 |
| - ) |
745 |
| - except ( |
746 |
| - sslib_exceptions.UnsupportedAlgorithmError, |
747 |
| - sslib_exceptions.FormatError, |
748 |
| - ) as e: |
| 766 | + digest_object = _file_hash(data, algorithm) |
| 767 | + except (ValueError, TypeError) as e: |
749 | 768 | raise ValueError(f"Unsupported algorithm '{algorithm}'") from e
|
750 | 769 |
|
751 | 770 | hashes[algorithm] = digest_object.hexdigest()
|
@@ -1150,7 +1169,7 @@ def is_delegated_path(self, target_filepath: str) -> bool:
|
1150 | 1169 | if self.path_hash_prefixes is not None:
|
1151 | 1170 | # Calculate the hash of the filepath
|
1152 | 1171 | # to determine in which bin to find the target.
|
1153 |
| - digest_object = sslib_hash.digest(algorithm="sha256") |
| 1172 | + digest_object = hashlib.new(name="sha256") |
1154 | 1173 | digest_object.update(target_filepath.encode("utf-8"))
|
1155 | 1174 | target_filepath_hash = digest_object.hexdigest()
|
1156 | 1175 |
|
@@ -1269,7 +1288,7 @@ def get_role_for_target(self, target_filepath: str) -> str:
|
1269 | 1288 | target_filepath: URL path to a target file, relative to a base
|
1270 | 1289 | targets URL.
|
1271 | 1290 | """
|
1272 |
| - hasher = sslib_hash.digest(algorithm="sha256") |
| 1291 | + hasher = hashlib.new(name="sha256") |
1273 | 1292 | hasher.update(target_filepath.encode("utf-8"))
|
1274 | 1293 |
|
1275 | 1294 | # We can't ever need more than 4 bytes (32 bits).
|
|
0 commit comments