diff --git a/swh/model/hashutil.py b/swh/model/hashutil.py index 20bde97..f9aca1b 100644 --- a/swh/model/hashutil.py +++ b/swh/model/hashutil.py @@ -1,236 +1,236 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """Module in charge of hashing function definitions. This is the base module use to compute swh's hashes. Only a subset of hashing algorithms is supported as defined in the ALGORITHMS set. Any provided algorithms not in that list will result in a ValueError explaining the error. This modules defines the following hashing functions: - hash_file: Hash the contents of the given file object with the given algorithms (defaulting to DEFAULT_ALGORITHMS if none provided). - hash_data: Hash the given binary blob with the given algorithms (defaulting to DEFAULT_ALGORITHMS if none provided). - hash_path: Hash the contents of the file at the given path with the given algorithms (defaulting to DEFAULT_ALGORITHMS if none provided). """ import binascii import functools import hashlib import os import sys from io import BytesIO # Supported algorithms ALGORITHMS = set(['sha1', 'sha256', 'sha1_git', 'blake2s256', 'blake2b512']) # Default algorithms used -DEFAULT_ALGORITHMS = set(['sha1', 'sha256', 'sha1_git']) +DEFAULT_ALGORITHMS = set(['sha1', 'sha256', 'sha1_git', 'blake2s256']) # should be a multiple of 64 (sha1/sha256's block size) # FWIW coreutils' sha1sum uses 32768 HASH_BLOCK_SIZE = 32768 # Prior to python3.4, only blake2 is available through pyblake2 module # From 3.5 onwards, it's been integrated in python if sys.version_info.major == 3 and sys.version_info.minor <= 4: import pyblake2 # register those hash algorithms in hashlib __cache = hashlib.__builtin_constructor_cache __cache['blake2s256'] = pyblake2.blake2s __cache['blake2b512'] = pyblake2.blake2b def _new_git_hash(base_algo, git_type, length): """Initialize a digest object (as returned by python's hashlib) for the requested algorithm, and feed it with the header for a git object of the given type and length. The header for hashing a git object consists of: - The type of the object (encoded in ASCII) - One ASCII space (\x20) - The length of the object (decimal encoded in ASCII) - One NUL byte Args: base_algo: a hashlib-supported algorithm git_type: the type of the git object (supposedly one of 'blob', 'commit', 'tag', 'tree') length: the length of the git object you're encoding Returns: a hashutil.hash object """ h = hashlib.new(base_algo) git_header = '%s %d\0' % (git_type, length) h.update(git_header.encode('ascii')) return h def _new_hash(algo, length=None): """Initialize a digest object (as returned by python's hashlib) for the requested algorithm. See the constant ALGORITHMS for the list of supported algorithms. If a git-specific hashing algorithm is requested (e.g., "sha1_git"), the hashing object will be pre-fed with the needed header; for this to work, length must be given. Args: algo (str): a hashing algorithm (one of ALGORITHMS) length (int): the length of the hashed payload (needed for git-specific algorithms) Returns: a hashutil.hash object Raises: ValueError if algo is unknown, or length is missing for a git-specific hash. """ if algo not in ALGORITHMS: raise ValueError( 'Unexpected hashing algorithm %s, expected one of %s' % (algo, ', '.join(sorted(ALGORITHMS)))) if algo.endswith('_git'): if length is None: raise ValueError('Missing length for git hashing algorithm') base_algo = algo[:-4] return _new_git_hash(base_algo, 'blob', length) return hashlib.new(algo) def hash_file(fobj, length=None, algorithms=DEFAULT_ALGORITHMS, chunk_cb=None): """Hash the contents of the given file object with the given algorithms. Args: fobj: a file-like object length: the length of the contents of the file-like object (for the git-specific algorithms) algorithms: the hashing algorithms used Returns: a dict mapping each algorithm to a bytes digest. Raises: ValueError if algorithms contains an unknown hash algorithm. """ hashes = {algo: _new_hash(algo, length) for algo in algorithms} while True: chunk = fobj.read(HASH_BLOCK_SIZE) if not chunk: break for hash in hashes.values(): hash.update(chunk) if chunk_cb: chunk_cb(chunk) return {algo: hash.digest() for algo, hash in hashes.items()} def hash_path(path, algorithms=DEFAULT_ALGORITHMS, chunk_cb=None): """Hash the contents of the file at the given path with the given algorithms. Args: path: the path of the file to hash algorithms: the hashing algorithms used chunk_cb: a callback Returns: a dict mapping each algorithm to a bytes digest. Raises: ValueError if algorithms contains an unknown hash algorithm. OSError on file access error """ length = os.path.getsize(path) with open(path, 'rb') as fobj: hash = hash_file(fobj, length, algorithms, chunk_cb) hash['length'] = length return hash def hash_data(data, algorithms=DEFAULT_ALGORITHMS): """Hash the given binary blob with the given algorithms. Args: data: a bytes object algorithms: the hashing algorithms used Returns: a dict mapping each algorithm to a bytes digest Raises: TypeError if data does not support the buffer interface. ValueError if algorithms contains an unknown hash algorithm. """ fobj = BytesIO(data) return hash_file(fobj, len(data), algorithms) def hash_git_data(data, git_type, base_algo='sha1'): """Hash the given data as a git object of type git_type. Args: data: a bytes object git_type: the git object type base_algo: the base hashing algorithm used (default: sha1) Returns: a dict mapping each algorithm to a bytes digest Raises: ValueError if the git_type is unexpected. """ git_object_types = {'blob', 'tree', 'commit', 'tag'} if git_type not in git_object_types: raise ValueError('Unexpected git object type %s, expected one of %s' % (git_type, ', '.join(sorted(git_object_types)))) h = _new_git_hash(base_algo, git_type, len(data)) h.update(data) return h.digest() @functools.lru_cache() def hash_to_hex(hash): """Converts a hash (in hex or bytes form) to its hexadecimal ascii form""" if isinstance(hash, str): return hash return binascii.hexlify(hash).decode('ascii') @functools.lru_cache() def hash_to_bytehex(hash): """Converts a hash to its hexadecimal bytes representation""" return binascii.hexlify(hash) @functools.lru_cache() def hash_to_bytes(hash): """Converts a hash (in hex or bytes form) to its raw bytes form""" if isinstance(hash, bytes): return hash return bytes.fromhex(hash) @functools.lru_cache() def bytehex_to_hash(hex): """Converts a hexadecimal bytes representation of a hash to that hash""" return hash_to_bytes(hex.decode()) diff --git a/swh/model/identifiers.py b/swh/model/identifiers.py index 4eb2b9d..d51304e 100644 --- a/swh/model/identifiers.py +++ b/swh/model/identifiers.py @@ -1,495 +1,490 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import binascii import datetime from functools import lru_cache -from .hashutil import hash_data, hash_git_data +from .hashutil import hash_data, hash_git_data, DEFAULT_ALGORITHMS @lru_cache() def identifier_to_bytes(identifier): """Convert a text identifier to bytes. Args: identifier: an identifier, either a 40-char hexadecimal string or a bytes object of length 20 Returns: The length 20 bytestring corresponding to the given identifier Raises: ValueError if the identifier is of an unexpected type or length. """ if isinstance(identifier, bytes): if len(identifier) != 20: raise ValueError( 'Wrong length for bytes identifier %s, expected 20' % len(identifier)) return identifier if isinstance(identifier, str): if len(identifier) != 40: raise ValueError( 'Wrong length for str identifier %s, expected 40' % len(identifier)) return bytes.fromhex(identifier) raise ValueError('Wrong type for identifier %s, expected bytes or str' % identifier.__class__.__name__) @lru_cache() def identifier_to_str(identifier): """Convert an identifier to an hexadecimal string. Args: identifier: an identifier, either a 40-char hexadecimal string or a bytes object of length 20 Returns: The length 40 string corresponding to the given identifier, hex encoded Raises: ValueError if the identifier is of an unexpected type or length. """ if isinstance(identifier, str): if len(identifier) != 40: raise ValueError( 'Wrong length for str identifier %s, expected 40' % len(identifier)) return identifier if isinstance(identifier, bytes): if len(identifier) != 20: raise ValueError( 'Wrong length for bytes identifier %s, expected 20' % len(identifier)) return binascii.hexlify(identifier).decode() raise ValueError('Wrong type for identifier %s, expected bytes or str' % identifier.__class__.__name__) def content_identifier(content): """Return the intrinsic identifier for a content. A content's identifier is the sha1, sha1_git and sha256 checksums of its data. Args: content: a content conforming to the Software Heritage schema Returns: A dictionary with all the hashes for the data Raises: KeyError if the content doesn't have a data member. """ - hashes = hash_data( - content['data'], - {'sha1', 'sha1_git', 'sha256'}, - ) - - return hashes + return hash_data(content['data'], DEFAULT_ALGORITHMS) def _sort_key(entry): """The sorting key for tree entries""" if entry['type'] == 'dir': return entry['name'] + b'/' else: return entry['name'] @lru_cache() def _perms_to_bytes(perms): """Convert the perms value to its bytes representation""" oc = oct(perms)[2:] return oc.encode('ascii') def escape_newlines(snippet): """Escape the newlines present in snippet according to git rules. New lines in git manifests are escaped by indenting the next line by one space.""" if b'\n' in snippet: return b'\n '.join(snippet.split(b'\n')) else: return snippet def directory_identifier(directory): """Return the intrinsic identifier for a directory. A directory's identifier is the tree sha1 à la git of a directory listing, using the following algorithm, which is equivalent to the git algorithm for trees: 1. Entries of the directory are sorted using the name (or the name with '/' appended for directory entries) as key, in bytes order. 2. For each entry of the directory, the following bytes are output: - the octal representation of the permissions for the entry (stored in the 'perms' member), which is a representation of the entry type: b'100644' (int 33188) for files b'100755' (int 33261) for executable files b'120000' (int 40960) for symbolic links b'40000' (int 16384) for directories b'160000' (int 57344) for references to revisions - an ascii space (b'\x20') - the entry's name (as raw bytes), stored in the 'name' member - a null byte (b'\x00') - the 20 byte long identifier of the object pointed at by the entry, stored in the 'target' member: for files or executable files: their blob sha1_git for symbolic links: the blob sha1_git of a file containing the link destination for directories: their intrinsic identifier for revisions: their intrinsic identifier (Note that there is no separator between entries) """ components = [] for entry in sorted(directory['entries'], key=_sort_key): components.extend([ _perms_to_bytes(entry['perms']), b'\x20', entry['name'], b'\x00', identifier_to_bytes(entry['target']), ]) return identifier_to_str(hash_git_data(b''.join(components), 'tree')) def format_date(date): """Convert a date object into an UTC timestamp encoded as ascii bytes. Git stores timestamps as an integer number of seconds since the UNIX epoch. However, Software Heritage stores timestamps as an integer number of microseconds (postgres type "datetime with timezone"). Therefore, we print timestamps with no microseconds as integers, and timestamps with microseconds as floating point values. We elide the trailing zeroes from microsecond values, to "future-proof" our representation if we ever need more precision in timestamps. """ if not isinstance(date, dict): raise ValueError('format_date only supports dicts, %r received' % date) seconds = date.get('seconds', 0) microseconds = date.get('microseconds', 0) if not microseconds: return str(seconds).encode() else: float_value = ('%d.%06d' % (seconds, microseconds)) return float_value.rstrip('0').encode() @lru_cache() def format_offset(offset, negative_utc=None): """Convert an integer number of minutes into an offset representation. The offset representation is [+-]hhmm where: hh is the number of hours; mm is the number of minutes. A null offset is represented as +0000. """ if offset < 0 or offset == 0 and negative_utc: sign = '-' else: sign = '+' hours = abs(offset) // 60 minutes = abs(offset) % 60 t = '%s%02d%02d' % (sign, hours, minutes) return t.encode() def normalize_timestamp(time_representation): """Normalize a time representation for processing by Software Heritage This function supports a numeric timestamp (representing a number of seconds since the UNIX epoch, 1970-01-01 at 00:00 UTC), a datetime.datetime object (with timezone information), or a normalized Software Heritage time representation (idempotency). Args: time_representation: the representation of a timestamp Returns: a normalized dictionary with three keys - timestamp: a dict with two optional keys: - seconds: the integral number of seconds since the UNIX epoch - microseconds: the integral number of microseconds - offset: the timezone offset as a number of minutes relative to UTC - negative_utc: a boolean representing whether the offset is -0000 when offset = 0. """ if time_representation is None: return None negative_utc = False if isinstance(time_representation, dict): ts = time_representation['timestamp'] if isinstance(ts, dict): seconds = ts.get('seconds', 0) microseconds = ts.get('microseconds', 0) elif isinstance(ts, int): seconds = ts microseconds = 0 else: raise ValueError( 'normalize_timestamp received non-integer timestamp member:' ' %r' % ts) offset = time_representation['offset'] if 'negative_utc' in time_representation: negative_utc = time_representation['negative_utc'] elif isinstance(time_representation, datetime.datetime): seconds = int(time_representation.timestamp()) microseconds = time_representation.microsecond utcoffset = time_representation.utcoffset() if utcoffset is None: raise ValueError( 'normalize_timestamp received datetime without timezone: %s' % time_representation) # utcoffset is an integer number of minutes seconds_offset = utcoffset.total_seconds() offset = int(seconds_offset) // 60 elif isinstance(time_representation, int): seconds = time_representation microseconds = 0 offset = 0 else: raise ValueError( 'normalize_timestamp received non-integer timestamp:' ' %r' % time_representation) return { 'timestamp': { 'seconds': seconds, 'microseconds': microseconds, }, 'offset': offset, 'negative_utc': negative_utc, } def format_author(author): """Format the specification of an author. An author is either a byte string (passed unchanged), or a dict with three keys, fullname, name and email. If the fullname exists, return it; if it doesn't, we construct a fullname using the following heuristics: if the name value is None, we return the email in angle brackets, else, we return the name, a space, and the email in angle brackets. """ if isinstance(author, bytes) or author is None: return author if 'fullname' in author: return author['fullname'] ret = [] if author['name'] is not None: ret.append(author['name']) if author['email'] is not None: ret.append(b''.join([b'<', author['email'], b'>'])) return b' '.join(ret) def format_author_line(header, author, date_offset): """Format a an author line according to git standards. An author line has three components: - a header, describing the type of author (author, committer, tagger) - a name and email, which is an arbitrary bytestring - optionally, a timestamp with UTC offset specification The author line is formatted thus: `header` `name and email`[ `timestamp` `utc_offset`] The timestamp is encoded as a (decimal) number of seconds since the UNIX epoch (1970-01-01 at 00:00 UTC). As an extension to the git format, we support fractional timestamps, using a dot as the separator for the decimal part. The utc offset is a number of minutes encoded as '[+-]HHMM'. Note some tools can pass a negative offset corresponding to the UTC timezone ('-0000'), which is valid and is encoded as such. For convenience, this function returns the whole line with its trailing newline. Args: header: the header of the author line (one of 'author', 'committer', 'tagger') author: an author specification (dict with two bytes values: name and email, or byte value) date_offset: a normalized date/time representation as returned by `normalize_timestamp`. Returns: the newline-terminated byte string containing the author line """ ret = [header.encode(), b' ', escape_newlines(format_author(author))] date_offset = normalize_timestamp(date_offset) if date_offset is not None: date_f = format_date(date_offset['timestamp']) offset_f = format_offset(date_offset['offset'], date_offset['negative_utc']) ret.extend([b' ', date_f, b' ', offset_f]) ret.append(b'\n') return b''.join(ret) def revision_identifier(revision): """Return the intrinsic identifier for a revision. The fields used for the revision identifier computation are: - directory - parents - author - author_date - committer - committer_date - metadata -> extra_headers - message A revision's identifier is the 'git'-checksum of a commit manifest constructed as follows (newlines are a single ASCII newline character): ``` tree [for each parent in parents] parent [end for each parents] author committer [for each key, value in extra_headers] [end for each extra_headers] ``` The directory identifier is the ascii representation of its hexadecimal encoding. Author and committer are formatted with the `format_author` function. Dates are formatted with the `format_date_offset` function. Extra headers are an ordered list of [key, value] pairs. Keys are strings and get encoded to utf-8 for identifier computation. Values are either byte strings, unicode strings (that get encoded to utf-8), or integers (that get encoded to their utf-8 decimal representation). Multiline extra header values are escaped by indenting the continuation lines with one ascii space. If the message is None, the manifest ends with the last header. Else, the message is appended to the headers after an empty line. The checksum of the full manifest is computed using the 'commit' git object type. """ components = [ b'tree ', identifier_to_str(revision['directory']).encode(), b'\n', ] for parent in revision['parents']: if parent: components.extend([ b'parent ', identifier_to_str(parent).encode(), b'\n', ]) components.extend([ format_author_line('author', revision['author'], revision['date']), format_author_line('committer', revision['committer'], revision['committer_date']), ]) # Handle extra headers metadata = revision.get('metadata') if not metadata: metadata = {} for key, value in metadata.get('extra_headers', []): # Integer values: decimal representation if isinstance(value, int): value = str(value).encode('utf-8') # Unicode string values: utf-8 encoding if isinstance(value, str): value = value.encode('utf-8') # encode the key to utf-8 components.extend([key.encode('utf-8'), b' ', escape_newlines(value), b'\n']) if revision['message'] is not None: components.extend([b'\n', revision['message']]) commit_raw = b''.join(components) return identifier_to_str(hash_git_data(commit_raw, 'commit')) def target_type_to_git(target_type): """Convert a software heritage target type to a git object type""" return { 'content': b'blob', 'directory': b'tree', 'revision': b'commit', 'release': b'tag', }[target_type] def release_identifier(release): """Return the intrinsic identifier for a release.""" components = [ b'object ', identifier_to_str(release['target']).encode(), b'\n', b'type ', target_type_to_git(release['target_type']), b'\n', b'tag ', release['name'], b'\n', ] if 'author' in release and release['author']: components.append( format_author_line('tagger', release['author'], release['date']) ) if release['message'] is not None: components.extend([b'\n', release['message']]) return identifier_to_str(hash_git_data(b''.join(components), 'tag')) diff --git a/swh/model/tests/test_git.py b/swh/model/tests/test_git.py index 3c233c3..0bf81bc 100644 --- a/swh/model/tests/test_git.py +++ b/swh/model/tests/test_git.py @@ -1,722 +1,734 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import shutil import subprocess import tempfile import unittest from nose.plugins.attrib import attr from nose.tools import istest from swh.model import git class GitHashlib(unittest.TestCase): def setUp(self): self.tree_data = b''.join([b'40000 barfoo\0', bytes.fromhex('c3020f6bf135a38c6df' '3afeb5fb38232c5e07087'), b'100644 blah\0', bytes.fromhex('63756ef0df5e4f10b6efa' '33cfe5c758749615f20'), b'100644 hello\0', bytes.fromhex('907b308167f0880fb2a' '5c0e1614bb0c7620f9dc3')]) self.commit_data = """tree 1c61f7259dcb770f46b194d941df4f08ff0a3970 author Antoine R. Dumont (@ardumont) 1444054085 +0200 committer Antoine R. Dumont (@ardumont) 1444054085 +0200 initial """.encode('utf-8') # NOQA self.tag_data = """object 24d012aaec0bc5a4d2f62c56399053d6cc72a241 type commit tag 0.0.1 tagger Antoine R. Dumont (@ardumont) 1444225145 +0200 blah """.encode('utf-8') # NOQA self.checksums = { 'tree_sha1_git': bytes.fromhex('ac212302c45eada382b27bfda795db' '121dacdb1c'), 'commit_sha1_git': bytes.fromhex('e960570b2e6e2798fa4cfb9af2c399' 'd629189653'), 'tag_sha1_git': bytes.fromhex('bc2b99ba469987bcf1272c189ed534' 'e9e959f120'), } @istest def compute_directory_git_sha1(self): # given dirpath = 'some-dir-path' hashes = { dirpath: [{'perms': git.GitPerm.TREE, 'type': git.GitType.TREE, 'name': b'barfoo', 'sha1_git': bytes.fromhex('c3020f6bf135a38c6df' '3afeb5fb38232c5e07087')}, {'perms': git.GitPerm.BLOB, 'type': git.GitType.BLOB, 'name': b'hello', 'sha1_git': bytes.fromhex('907b308167f0880fb2a' '5c0e1614bb0c7620f9dc3')}, {'perms': git.GitPerm.BLOB, 'type': git.GitType.BLOB, 'name': b'blah', 'sha1_git': bytes.fromhex('63756ef0df5e4f10b6efa' '33cfe5c758749615f20')}] } # when checksum = git.compute_directory_git_sha1(dirpath, hashes) # then self.assertEqual(checksum, self.checksums['tree_sha1_git']) @istest def compute_revision_sha1_git(self): # given tree_hash = bytes.fromhex('1c61f7259dcb770f46b194d941df4f08ff0a3970') revision = { 'author': { 'name': b'Antoine R. Dumont (@ardumont)', 'email': b'antoine.romain.dumont@gmail.com', }, 'date': { 'timestamp': 1444054085, 'offset': 120, }, 'committer': { 'name': b'Antoine R. Dumont (@ardumont)', 'email': b'antoine.romain.dumont@gmail.com', }, 'committer_date': { 'timestamp': 1444054085, 'offset': 120, }, 'message': b'initial\n', 'type': 'tar', 'directory': tree_hash, 'parents': [], } # when checksum = git.compute_revision_sha1_git(revision) # then self.assertEqual(checksum, self.checksums['commit_sha1_git']) @istest def compute_release_sha1_git(self): # given revision_hash = bytes.fromhex('24d012aaec0bc5a4d2f62c56399053' 'd6cc72a241') release = { 'name': b'0.0.1', 'author': { 'name': b'Antoine R. Dumont (@ardumont)', 'email': b'antoine.romain.dumont@gmail.com', }, 'date': { 'timestamp': 1444225145, 'offset': 120, }, 'message': b'blah\n', 'target_type': 'revision', 'target': revision_hash, } # when checksum = git.compute_release_sha1_git(release) # then self.assertEqual(checksum, self.checksums['tag_sha1_git']) @attr('fs') class ComputeBlobMetadata(unittest.TestCase): @istest def compute_blob_metadata__special_file_returns_nothing(self): # prepare tmp_root_path = tempfile.mkdtemp().encode('utf-8') name = b'fifo-file' path = os.path.join(tmp_root_path, name) # given os.mkfifo(path) # when actual_metadata = git.compute_blob_metadata(path) # then expected_metadata = { 'sha1': b'\xda9\xa3\xee^kK\r2U\xbf\xef\x95`\x18\x90\xaf\xd8\x07\t', 'sha1_git': b'\xe6\x9d\xe2\x9b\xb2\xd1\xd6CK\x8b)\xaewZ\xd8\xc2' b'\xe4\x8cS\x91', 'sha256': b"\xe3\xb0\xc4B\x98\xfc\x1c\x14\x9a\xfb\xf4\xc8\x99o" b"\xb9$'\xaeA\xe4d\x9b\x93L\xa4\x95\x99\x1bxR\xb8U", + 'blake2s256': b'i!z0y\x90\x80\x94\xe1\x11!\xd0B5J|\x1fU\xb6H,\xa1' + b'\xa5\x1e\x1b%\r\xfd\x1e\xd0\xee\xf9', 'perms': git.GitPerm.BLOB, 'path': path, 'name': name, 'type': git.GitType.BLOB, 'length': 0 } self.assertEquals(actual_metadata, expected_metadata) # cleanup shutil.rmtree(tmp_root_path) @attr('fs') class GitHashWalkArborescenceTree: """Root class to ease walk and git hash testing without side-effecty problems. """ def setUp(self): super().setUp() self.tmp_root_path = tempfile.mkdtemp().encode('utf-8') self.maxDiff = None start_path = os.path.dirname(__file__).encode('utf-8') sample_folder = os.path.join(start_path, b'../../../..', b'swh-storage-testdata', b'dir-folders', b'sample-folder.tgz') self.root_path = os.path.join(self.tmp_root_path, b'sample-folder') # uncompress the sample folder subprocess.check_output( ['tar', 'xvf', sample_folder, '-C', self.tmp_root_path]) def tearDown(self): if os.path.exists(self.tmp_root_path): shutil.rmtree(self.tmp_root_path) class GitHashFromScratch(GitHashWalkArborescenceTree, unittest.TestCase): """Test the main `walk_and_compute_sha1_from_directory` algorithm that scans and compute the disk for checksums. """ @istest def walk_and_compute_sha1_from_directory(self): # make a temporary arborescence tree to hash without ignoring anything # same as previous behavior walk0 = git.walk_and_compute_sha1_from_directory(self.tmp_root_path) keys0 = list(walk0.keys()) path_excluded = os.path.join(self.tmp_root_path, b'sample-folder', b'foo') self.assertTrue(path_excluded in keys0) # it is not excluded here # make the same temporary arborescence tree to hash with ignoring one # folder foo walk1 = git.walk_and_compute_sha1_from_directory( self.tmp_root_path, dir_ok_fn=lambda dirpath: b'sample-folder/foo' not in dirpath) keys1 = list(walk1.keys()) self.assertTrue(path_excluded not in keys1) # remove the keys that can't be the same (due to hash definition) # Those are the top level folders keys_diff = [self.tmp_root_path, os.path.join(self.tmp_root_path, b'sample-folder'), git.ROOT_TREE_KEY] for k in keys_diff: self.assertNotEquals(walk0[k], walk1[k]) # The remaining keys (bottom path) should have exactly the same hashes # as before keys = set(keys1) - set(keys_diff) actual_walk1 = {} for k in keys: self.assertEquals(walk0[k], walk1[k]) actual_walk1[k] = walk1[k] expected_checksums = { - os.path.join(self.tmp_root_path, b'sample-folder/empty-folder'): [], # noqa - os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo'): [{ # noqa - 'type': git.GitType.BLOB, # noqa + os.path.join(self.tmp_root_path, b'sample-folder/empty-folder'): [], # noqa + os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo'): [{ # noqa + 'type': git.GitType.BLOB, # noqa 'length': 72, - 'sha256': b'=\xb5\xae\x16\x80U\xbc\xd9:M\x08(]\xc9\x9f\xfe\xe2\x883\x03\xb2?\xac^\xab\x85\x02s\xa8\xeaUF', # noqa - 'name': b'another-quote.org', # noqa - 'path': os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo/another-quote.org'), # noqa - 'perms': git.GitPerm.BLOB, # noqa - 'sha1': b'\x90\xa6\x13\x8b\xa5\x99\x15&\x1e\x17\x99H8j\xa1\xcc*\xa9"\n', # noqa - 'sha1_git': b'\x136\x93\xb1%\xba\xd2\xb4\xac1\x855\xb8I\x01\xeb\xb1\xf6\xb68'}], # noqa - os.path.join(self.tmp_root_path, b'sample-folder/bar'): [{ # noqa - 'type': git.GitType.TREE, # noqa - 'perms': git.GitPerm.TREE, # noqa - 'name': b'barfoo', # noqa - 'path': os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo'), # noqa - 'sha1_git': b'\xc3\x02\x0fk\xf15\xa3\x8cm\xf3\xaf\xeb_\xb3\x822\xc5\xe0p\x87'}]} # noqa + 'sha256': b'=\xb5\xae\x16\x80U\xbc\xd9:M\x08(]\xc9\x9f\xfe\xe2\x883\x03\xb2?\xac^\xab\x85\x02s\xa8\xeaUF', # noqa + 'name': b'another-quote.org', # noqa + 'path': os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo/another-quote.org'), # noqa + 'perms': git.GitPerm.BLOB, # noqa + 'sha1': b'\x90\xa6\x13\x8b\xa5\x99\x15&\x1e\x17\x99H8j\xa1\xcc*\xa9"\n', # noqa + 'blake2s256': b'\xd2l\x1c\xad\x82\xd4=\xf0\xbf\xfa^{\xe1\x1a`\xe3J\xdb\x85\xa2\x18\xb43\xcb\xceRx\xb1\x0b\x95O\xe8', # noqa + 'sha1_git': b'\x136\x93\xb1%\xba\xd2\xb4\xac1\x855\xb8I\x01\xeb\xb1\xf6\xb68'}], # noqa + os.path.join(self.tmp_root_path, b'sample-folder/bar'): [{ # noqa + 'type': git.GitType.TREE, # noqa + 'perms': git.GitPerm.TREE, # noqa + 'name': b'barfoo', # noqa + 'path': os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo'), # noqa + 'sha1_git': b'\xc3\x02\x0fk\xf15\xa3\x8cm\xf3\xaf\xeb_\xb3\x822\xc5\xe0p\x87'}]} # noqa self.assertEquals(actual_walk1, expected_checksums) @istest def walk_and_compute_sha1_from_directory_without_root_tree(self): # compute the full checksums expected_hashes = git.walk_and_compute_sha1_from_directory( self.tmp_root_path) # except for the key on that round actual_hashes = git.walk_and_compute_sha1_from_directory( self.tmp_root_path, with_root_tree=False) # then, removing the root tree hash from the first round del expected_hashes[git.ROOT_TREE_KEY] # should give us the same checksums as the second round self.assertEquals(actual_hashes, expected_hashes) class WithSampleFolderChecksums: def setUp(self): super().setUp() self.rootkey = b'/tmp/tmp7w3oi_j8' + self.objects = { b'/tmp/tmp7w3oi_j8': { 'children': {b'/tmp/tmp7w3oi_j8/sample-folder'}, 'checksums': { 'type': git.GitType.TREE, 'name': b'tmp7w3oi_j8', 'sha1_git': b'\xa7A\xfcM\x96\x8c{\x8e<\x94\xff\x86\xe7\x04\x80\xc5\xc7\xe5r\xa9', # noqa 'path': b'/tmp/tmp7w3oi_j8', 'perms': git.GitPerm.TREE }, }, b'/tmp/tmp7w3oi_j8/sample-folder': { 'children': { b'/tmp/tmp7w3oi_j8/sample-folder/empty-folder', b'/tmp/tmp7w3oi_j8/sample-folder/link-to-binary', b'/tmp/tmp7w3oi_j8/sample-folder/link-to-another-quote', b'/tmp/tmp7w3oi_j8/sample-folder/link-to-foo', b'/tmp/tmp7w3oi_j8/sample-folder/some-binary', b'/tmp/tmp7w3oi_j8/sample-folder/bar', b'/tmp/tmp7w3oi_j8/sample-folder/foo', }, 'checksums': { 'type': git.GitType.TREE, 'name': b'sample-folder', 'sha1_git': b'\xe8\xb0\xf1Fj\xf8`\x8c\x8a?\xb9\x87\x9d\xb1r\xb8\x87\xe8\x07Y', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder', 'perms': git.GitPerm.TREE} }, b'/tmp/tmp7w3oi_j8/sample-folder/empty-folder': { 'children': {}, 'checksums': { 'type': git.GitType.TREE, 'name': b'empty-folder', 'sha1_git': b'K\x82]\xc6B\xcbn\xb9\xa0`\xe5K\xf8\xd6\x92\x88\xfb\xeeI\x04', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/empty-folder', 'perms': git.GitPerm.TREE } }, b'/tmp/tmp7w3oi_j8/sample-folder/link-to-binary': { 'checksums': { 'name': b'link-to-binary', 'sha1': b'\xd0$\x87\x14\x94\x8b:H\xa2T8#*o\x99\xf01\x8fY\xf1', # noqa 'data': b'some-binary', 'sha1_git': b'\xe8kE\xe58\xd9\xb6\x88\x8c\x96\x9c\x89\xfb\xd2*\x85\xaa\x0e\x03f', # noqa + 'blake2s256': b'\x9c\xe1\x8b\x1a\xde\xcb3\xf8\x91\xca6fM\xa6v\xe1,w,\xc1\x93w\x8a\xac\x9a\x13{\x8d\xc5\x83K\x9b', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/link-to-binary', 'sha256': b'\x14\x12n\x97\xd8?}&\x1cZh\x89\xce\xe76\x19w\x0f\xf0\x9e@\xc5I\x86\x85\xab\xa7E\xbe\x88.\xff', # noqa 'perms': git.GitPerm.LINK, 'type': git.GitType.BLOB, 'length': 11 } }, b'/tmp/tmp7w3oi_j8/sample-folder/link-to-another-quote': { 'checksums': { 'name': b'link-to-another-quote', 'sha1': b'\xcb\xee\xd1^yY\x9c\x90\xdes\x83\xf4 \xfe\xd7\xac\xb4\x8e\xa1q', # noqa 'data': b'bar/barfoo/another-quote.org', 'sha1_git': b'}\\\x08\x11\x1e!\xc8\xa9\xf7\x15@\x93\x99\x98U\x16\x837_\xad', # noqa + 'blake2s256': b"-\x0es\xce\xa0\x1b\xa9I\xc1\x02-\xc1\x0c\x8aC\xe6a\x80c\x96b\xe5\xdc'7\xb8C8/{\x19\x10", # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/link-to-another-quote', # noqa 'sha256': b'\xe6\xe1}\x07\x93\xaau\n\x04@\xeb\x9a\xd5\xb8\x0b%\x80vc~\xf0\xfbh\xf3\xac.Y\xe4\xb9\xac;\xa6', # noqa 'perms': git.GitPerm.LINK, 'type': git.GitType.BLOB, 'length': 28 } }, b'/tmp/tmp7w3oi_j8/sample-folder/link-to-foo': { 'checksums': { 'name': b'link-to-foo', 'sha1': b'\x0b\xee\xc7\xb5\xea?\x0f\xdb\xc9]\r\xd4\x7f<[\xc2u\xda\x8a3', # noqa 'data': b'foo', 'sha1_git': b'\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c', # noqa + 'blake2s256': b'\x08\xd6\xca\xd8\x80u\xde\x8f\x19-\xb0\x97W=\x0e\x82\x94\x11\xcd\x91\xebn\xc6^\x8f\xc1l\x01~\xdf\xdbt', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/link-to-foo', 'sha256': b',&\xb4kh\xff\xc6\x8f\xf9\x9bE<\x1d0A4\x13B-pd\x83\xbf\xa0\xf9\x8a^\x88bf\xe7\xae', # noqa 'perms': git.GitPerm.LINK, 'type': git.GitType.BLOB, 'length': 3 } }, b'/tmp/tmp7w3oi_j8/sample-folder/some-binary': { 'checksums': { 'name': b'some-binary', 'sha1': b'\x0b\xbc\x12\xd7\xf4\xa2\xa1[\x14=\xa8F\x17\xd9\\\xb2#\xc9\xb2<', # noqa 'sha1_git': b'hv\x95y\xc3\xea\xad\xbeUSy\xb9\xc3S\x8ef(\xba\xe1\xeb', # noqa + 'blake2s256': b"\x92.\x0fp\x15\x03R\x12I[\t\x0c'WsW\xa7@\xdd\xd7{\x0b\x9e\x0c\xd2;T\x80\xc0z\x18\xc6", # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/some-binary', 'sha256': b'\xba\xc6P\xd3Jv8\xbb\n\xebSBdm$\xe3\xb9\xadkD\xc9\xb3\x83b\x1f\xaaH+\x99\n6}', # noqa 'perms': git.GitPerm.EXEC, 'type': git.GitType.BLOB, 'length': 5} }, b'/tmp/tmp7w3oi_j8/sample-folder/bar': { 'children': {b'/tmp/tmp7w3oi_j8/sample-folder/bar/barfoo'}, 'checksums': {'type': git.GitType.TREE, 'name': b'bar', 'sha1_git': b'<\x1fW\x83\x94\xf4b?t\xa0\xba\x7f\xe7ar\x9fY\xfcn\xc4', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/bar', 'perms': git.GitPerm.TREE}, }, b'/tmp/tmp7w3oi_j8/sample-folder/bar/barfoo': { 'children': {b'/tmp/tmp7w3oi_j8/sample-folder/bar/barfoo/another-quote.org'}, # noqa 'checksums': {'type': git.GitType.TREE, 'name': b'barfoo', 'sha1_git': b'\xc3\x02\x0fk\xf15\xa3\x8cm\xf3\xaf\xeb_\xb3\x822\xc5\xe0p\x87', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/bar/barfoo', # noqa 'perms': git.GitPerm.TREE}, }, b'/tmp/tmp7w3oi_j8/sample-folder/bar/barfoo/another-quote.org': { 'checksums': {'name': b'another-quote.org', 'sha1': b'\x90\xa6\x13\x8b\xa5\x99\x15&\x1e\x17\x99H8j\xa1\xcc*\xa9"\n', # noqa 'sha1_git': b'\x136\x93\xb1%\xba\xd2\xb4\xac1\x855\xb8I\x01\xeb\xb1\xf6\xb68', # noqa + 'blake2s256': b'\xd2l\x1c\xad\x82\xd4=\xf0\xbf\xfa^{\xe1\x1a`\xe3J\xdb\x85\xa2\x18\xb43\xcb\xceRx\xb1\x0b\x95O\xe8', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/bar/barfoo/another-quote.org', # noqa 'sha256': b'=\xb5\xae\x16\x80U\xbc\xd9:M\x08(]\xc9\x9f\xfe\xe2\x883\x03\xb2?\xac^\xab\x85\x02s\xa8\xeaUF', # noqa 'perms': git.GitPerm.BLOB, 'type': git.GitType.BLOB, 'length': 72} }, b'/tmp/tmp7w3oi_j8/sample-folder/foo': { 'children': { b'/tmp/tmp7w3oi_j8/sample-folder/foo/barfoo', b'/tmp/tmp7w3oi_j8/sample-folder/foo/rel-link-to-barfoo', b'/tmp/tmp7w3oi_j8/sample-folder/foo/quotes.md', }, 'checksums': {'type': git.GitType.TREE, 'name': b'foo', 'sha1_git': b'+A\xc4\x0f\r\x1f\xbf\xfc\xba\x12I}\xb7\x1f\xba\x83\xfc\xca\x96\xe5', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/foo', 'perms': git.GitPerm.TREE} }, b'/tmp/tmp7w3oi_j8/sample-folder/foo/barfoo': { 'checksums': {'name': b'barfoo', 'sha1': b'\x90W\xeem\x01bPn\x01\xc4\xd9\xd5E\x9az\xdd\x1f\xed\xac7', # noqa 'data': b'bar/barfoo', 'sha1_git': b'\x81\x85\xdf\xb2\xc0\xc2\xc5\x97\xd1ou\xa8\xa0\xc3vhV|=~', # noqa + 'blake2s256': b'\xe1%/,\xaaJre\x936\xedZ\xd6\x03\xd3>\x83\xc8;\xa4\xe1KF\xf9\xb8\xa8\x0b', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/foo/quotes.md', # noqa 'sha256': b'\xca\xca\x94*\xed\xa7\xb3\x08\x85\x9e\xb5o\x90\x9e\xc9m\x07\xa4\x99I\x16\x90\xc4S\xf7;\x98\x00\xa9;\x16Y', # noqa 'perms': git.GitPerm.BLOB, 'type': git.GitType.BLOB, 'length': 66} }, } class TestObjectsPerType(WithSampleFolderChecksums, unittest.TestCase): @istest def objects_per_type_blob(self): # given expected_blobs = [ { 'name': b'another-quote.org', 'sha1': b'\x90\xa6\x13\x8b\xa5\x99\x15&\x1e\x17\x99H8j\xa1\xcc*\xa9"\n', # noqa 'sha1_git': b'\x136\x93\xb1%\xba\xd2\xb4\xac1\x855\xb8I\x01\xeb\xb1\xf6\xb68', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/bar/barfoo/another-quote.org', # noqa 'sha256': b'=\xb5\xae\x16\x80U\xbc\xd9:M\x08(]\xc9\x9f\xfe\xe2\x883\x03\xb2?\xac^\xab\x85\x02s\xa8\xeaUF', # noqa 'perms': git.GitPerm.BLOB, 'type': git.GitType.BLOB, 'length': 72 }, { 'name': b'link-to-binary', 'sha1': b'\xd0$\x87\x14\x94\x8b:H\xa2T8#*o\x99\xf01\x8fY\xf1', 'data': b'some-binary', 'sha1_git': b'\xe8kE\xe58\xd9\xb6\x88\x8c\x96\x9c\x89\xfb\xd2*\x85\xaa\x0e\x03f', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/link-to-binary', 'sha256': b'\x14\x12n\x97\xd8?}&\x1cZh\x89\xce\xe76\x19w\x0f\xf0\x9e@\xc5I\x86\x85\xab\xa7E\xbe\x88.\xff', # noqa 'perms': git.GitPerm.LINK, 'type': git.GitType.BLOB, 'length': 11 }, { 'name': b'link-to-another-quote', 'sha1': b'\xcb\xee\xd1^yY\x9c\x90\xdes\x83\xf4 \xfe\xd7\xac\xb4\x8e\xa1q', # noqa 'data': b'bar/barfoo/another-quote.org', 'sha1_git': b'}\\\x08\x11\x1e!\xc8\xa9\xf7\x15@\x93\x99\x98U\x16\x837_\xad', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/link-to-another-quote', # noqa 'sha256': b'\xe6\xe1}\x07\x93\xaau\n\x04@\xeb\x9a\xd5\xb8\x0b%\x80vc~\xf0\xfbh\xf3\xac.Y\xe4\xb9\xac;\xa6', # noqa 'perms': git.GitPerm.LINK, 'type': git.GitType.BLOB, 'length': 28 }, { 'name': b'link-to-foo', 'sha1': b'\x0b\xee\xc7\xb5\xea?\x0f\xdb\xc9]\r\xd4\x7f<[\xc2u\xda\x8a3', # noqa 'data': b'foo', 'sha1_git': b'\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/link-to-foo', 'sha256': b',&\xb4kh\xff\xc6\x8f\xf9\x9bE<\x1d0A4\x13B-pd\x83\xbf\xa0\xf9\x8a^\x88bf\xe7\xae', # noqa 'perms': git.GitPerm.LINK, 'type': git.GitType.BLOB, 'length': 3 }, { 'name': b'some-binary', 'sha1': b'\x0b\xbc\x12\xd7\xf4\xa2\xa1[\x14=\xa8F\x17\xd9\\\xb2#\xc9\xb2<', # noqa 'sha1_git': b'hv\x95y\xc3\xea\xad\xbeUSy\xb9\xc3S\x8ef(\xba\xe1\xeb', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/some-binary', 'sha256': b'\xba\xc6P\xd3Jv8\xbb\n\xebSBdm$\xe3\xb9\xadkD\xc9\xb3\x83b\x1f\xaaH+\x99\n6}', # noqa 'perms': git.GitPerm.EXEC, 'type': git.GitType.BLOB, 'length': 5 }, { 'name': b'barfoo', 'sha1': b'\x90W\xeem\x01bPn\x01\xc4\xd9\xd5E\x9az\xdd\x1f\xed\xac7', # noqa 'data': b'bar/barfoo', 'sha1_git': b'\x81\x85\xdf\xb2\xc0\xc2\xc5\x97\xd1ou\xa8\xa0\xc3vhV|=~', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/foo/barfoo', 'sha256': b')\xad?W%2\x1b\x94\x032\xc7\x8e@6\x01\xaf\xffa\xda\xea\x85\xe9\xc8\x0bJpc\xb6\x88~\xadh', # noqa 'perms': git.GitPerm.LINK, 'type': git.GitType.BLOB, 'length': 10 }, { 'name': b'rel-link-to-barfoo', 'sha1': b'\xdcQ"\x1d0\x8f:\xeb\'T\xdbH9\x1b\x85h|(i\xf4', 'data': b'../bar/barfoo', 'sha1_git': b'\xac\xac2m\xddc\xb0\xbcp\x84\x06Y\xd4\xacCa\x94\x84\xe6\x9f', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/foo/rel-link-to-barfoo', # noqa 'sha256': b'\x80\x07\xd2\r\xb2\xaf@C_B\xdd\xefK\x8a\xd7k\x80\xad\xbe\xc2k$\x9f\xdf\x04s5?\x8d\x99\xdf\x08', # noqa 'perms': git.GitPerm.LINK, 'type': git.GitType.BLOB, 'length': 13 }, { 'name': b'quotes.md', 'sha1': b'\x1b\xf0\xbbr\x1a\xc9,\x18\xa1\x9b\x13\xc0\xeb=t\x1c\xbf\xad\xeb\xfc', # noqa 'sha1_git': b'|LW\xba\x9f\xf4\x96\xad\x17\x9b\x8fe\xb1\xd2\x86\xed\xbd\xa3L\x9a', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/foo/quotes.md', 'sha256': b'\xca\xca\x94*\xed\xa7\xb3\x08\x85\x9e\xb5o\x90\x9e\xc9m\x07\xa4\x99I\x16\x90\xc4S\xf7;\x98\x00\xa9;\x16Y', # noqa 'perms': git.GitPerm.BLOB, 'type': git.GitType.BLOB, 'length': 66 }, ] expected_sha1_blobs = set( ((c['sha1_git'], git.GitType.BLOB) for c in expected_blobs)) # when actual_sha1_blobs = set( ((c['sha1_git'], c['type']) for c in git.objects_per_type(git.GitType.BLOB, self.objects))) # then self.assertEqual(actual_sha1_blobs, expected_sha1_blobs) @istest def objects_per_type_tree(self): def _children_hashes(path, objects=self.objects): return set((c['sha1_git'] for c in git.children_hashes( objects[path]['children'], objects))) expected_trees = [ { 'type': git.GitType.TREE, 'name': b'tmp7w3oi_j8', 'sha1_git': b'\xa7A\xfcM\x96\x8c{\x8e<\x94\xff\x86\xe7\x04\x80\xc5\xc7\xe5r\xa9', # noqa 'path': b'/tmp/tmp7w3oi_j8', 'perms': git.GitPerm.TREE, # we only add children's sha1_git here, in reality, # it's a full dict of hashes. 'children': _children_hashes(b'/tmp/tmp7w3oi_j8') }, { 'type': git.GitType.TREE, 'name': b'sample-folder', 'sha1_git': b'\xe8\xb0\xf1Fj\xf8`\x8c\x8a?\xb9\x87\x9d\xb1r\xb8\x87\xe8\x07Y', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder', 'perms': git.GitPerm.TREE, 'children': _children_hashes( b'/tmp/tmp7w3oi_j8/sample-folder') }, { 'type': git.GitType.TREE, 'name': b'empty-folder', 'sha1_git': b'K\x82]\xc6B\xcbn\xb9\xa0`\xe5K\xf8\xd6\x92\x88\xfb\xeeI\x04', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/empty-folder', 'perms': git.GitPerm.TREE, 'children': _children_hashes( b'/tmp/tmp7w3oi_j8/sample-folder/empty-folder') }, { 'type': git.GitType.TREE, 'name': b'bar', 'sha1_git': b'<\x1fW\x83\x94\xf4b?t\xa0\xba\x7f\xe7ar\x9fY\xfcn\xc4', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/bar', 'perms': git.GitPerm.TREE, 'children': _children_hashes( b'/tmp/tmp7w3oi_j8/sample-folder/bar') }, { 'type': git.GitType.TREE, 'name': b'barfoo', 'sha1_git': b'\xc3\x02\x0fk\xf15\xa3\x8cm\xf3\xaf\xeb_\xb3\x822\xc5\xe0p\x87', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/bar/barfoo', 'perms': git.GitPerm.TREE, 'children': _children_hashes( b'/tmp/tmp7w3oi_j8/sample-folder/bar/barfoo'), }, { 'type': git.GitType.TREE, 'name': b'foo', 'sha1_git': b'+A\xc4\x0f\r\x1f\xbf\xfc\xba\x12I}\xb7\x1f\xba\x83\xfc\xca\x96\xe5', # noqa 'path': b'/tmp/tmp7w3oi_j8/sample-folder/foo', 'perms': git.GitPerm.TREE, 'children': _children_hashes( b'/tmp/tmp7w3oi_j8/sample-folder/foo') }, ] expected_sha1_trees = list( ((c['sha1_git'], git.GitType.TREE, c['children']) for c in expected_trees)) # when actual_sha1_trees = list( ((c['sha1_git'], c['type'], _children_hashes(c['path'])) for c in git.objects_per_type(git.GitType.TREE, self.objects))) self.assertEquals(len(actual_sha1_trees), len(expected_sha1_trees)) for e in actual_sha1_trees: self.assertTrue(e in expected_sha1_trees) class TestComputeHashesFromDirectory(WithSampleFolderChecksums, GitHashWalkArborescenceTree, unittest.TestCase): def __adapt_object_to_rootpath(self, rootpath): def _replace_slash(s, rootpath=self.rootkey, newrootpath=rootpath): return s.replace(rootpath, newrootpath) def _update_children(children): return set((_replace_slash(c) for c in children)) # given expected_objects = {} for path, v in self.objects.items(): p = _replace_slash(path) v['checksums']['path'] = _replace_slash(v['checksums']['path']) v['checksums']['name'] = os.path.basename(v['checksums']['path']) if 'children' in v: v['children'] = _update_children(v['children']) expected_objects[p] = v return expected_objects @istest def compute_hashes_from_directory_default(self): # given expected_objects = self.__adapt_object_to_rootpath(self.tmp_root_path) # when actual_hashes = git.compute_hashes_from_directory(self.tmp_root_path) # then self.assertEquals(actual_hashes, expected_objects) @istest def compute_hashes_from_directory_no_empty_folder(self): # given def _replace_slash(s, rootpath=self.rootkey, newrootpath=self.tmp_root_path): return s.replace(rootpath, newrootpath) expected_objects = self.__adapt_object_to_rootpath(self.tmp_root_path) # when actual_hashes = git.compute_hashes_from_directory( self.tmp_root_path, remove_empty_folder=True) # then # One folder less, so plenty of hashes are different now self.assertNotEquals(actual_hashes, expected_objects) keys = set(actual_hashes.keys()) assert (b'/tmp/tmp7w3oi_j8/sample-folder/empty-folder' in self.objects.keys()) new_empty_folder_path = _replace_slash( b'/tmp/tmp7w3oi_j8/sample-folder/empty-folder') self.assertNotIn(new_empty_folder_path, keys) self.assertEqual(len(keys), len(expected_objects.keys()) - 1) @istest def compute_hashes_from_directory_ignore_some_folder(self): # given def _replace_slash(s, rootpath=self.rootkey, newrootpath=self.tmp_root_path): return s.replace(rootpath, newrootpath) ignore_path = b'/tmp/tmp7w3oi_j8/sample-folder' # when actual_hashes = git.compute_hashes_from_directory( self.tmp_root_path, dir_ok_fn=lambda dirpath: b'sample-folder' not in dirpath) # then # One entry less, so plenty of hashes are different now keys = set(actual_hashes.keys()) assert ignore_path in self.objects.keys() new_ignore_path = _replace_slash(ignore_path) self.assertNotIn(new_ignore_path, keys) # top level directory contains the folder to ignore self.assertEqual(len(keys), 1) diff --git a/swh/model/tests/test_hashutil.py b/swh/model/tests/test_hashutil.py index 758e24e..c9f47e1 100644 --- a/swh/model/tests/test_hashutil.py +++ b/swh/model/tests/test_hashutil.py @@ -1,241 +1,243 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import io import tempfile import unittest from nose.tools import istest from unittest.mock import patch from swh.model import hashutil class Hashutil(unittest.TestCase): def setUp(self): self.data = b'1984\n' self.hex_checksums = { 'sha1': '62be35bf00ff0c624f4a621e2ea5595a049e0731', 'sha1_git': '568aaf43d83b2c3df8067f3bedbb97d83260be6d', 'sha256': '26602113b4b9afd9d55466b08580d3c2' '4a9b50ee5b5866c0d91fab0e65907311', + 'blake2s256': '63cfb259e1fdb485bc5c55749697a6b21ef31fb7445f6c78a' + 'c9422f9f2dc8906', } self.checksums = { type: bytes.fromhex(cksum) for type, cksum in self.hex_checksums.items() } self.git_hex_checksums = { 'blob': self.hex_checksums['sha1_git'], 'tree': '5b2e883aa33d2efab98442693ea4dd5f1b8871b0', 'commit': '79e4093542e72f0fcb7cbd75cb7d270f9254aa8f', 'tag': 'd6bf62466f287b4d986c545890716ce058bddf67', } self.git_checksums = { type: bytes.fromhex(cksum) for type, cksum in self.git_hex_checksums.items() } @istest def hash_data(self): checksums = hashutil.hash_data(self.data) self.assertEqual(checksums, self.checksums) @istest def hash_data_unknown_hash(self): with self.assertRaises(ValueError) as cm: hashutil.hash_data(self.data, ['unknown-hash']) self.assertIn('Unexpected hashing algorithm', cm.exception.args[0]) self.assertIn('unknown-hash', cm.exception.args[0]) @istest def hash_git_data(self): checksums = { git_type: hashutil.hash_git_data(self.data, git_type) for git_type in self.git_checksums } self.assertEqual(checksums, self.git_checksums) @istest def hash_git_data_unknown_git_type(self): with self.assertRaises(ValueError) as cm: hashutil.hash_git_data(self.data, 'unknown-git-type') self.assertIn('Unexpected git object type', cm.exception.args[0]) self.assertIn('unknown-git-type', cm.exception.args[0]) @istest def hash_file(self): fobj = io.BytesIO(self.data) checksums = hashutil.hash_file(fobj, length=len(self.data)) self.assertEqual(checksums, self.checksums) @istest def hash_file_missing_length(self): fobj = io.BytesIO(self.data) with self.assertRaises(ValueError) as cm: hashutil.hash_file(fobj, algorithms=['sha1_git']) self.assertIn('Missing length', cm.exception.args[0]) @istest def hash_path(self): with tempfile.NamedTemporaryFile(delete=False) as f: f.write(self.data) f.close() hashes = hashutil.hash_path(f.name) self.checksums['length'] = len(self.data) self.assertEquals(self.checksums, hashes) @istest def hash_to_hex(self): for type in self.checksums: hex = self.hex_checksums[type] hash = self.checksums[type] self.assertEquals(hashutil.hash_to_hex(hex), hex) self.assertEquals(hashutil.hash_to_hex(hash), hex) @istest def hash_to_bytes(self): for type in self.checksums: hex = self.hex_checksums[type] hash = self.checksums[type] self.assertEquals(hashutil.hash_to_bytes(hex), hash) self.assertEquals(hashutil.hash_to_bytes(hash), hash) @istest def hash_to_bytehex(self): for algo in self.checksums: self.assertEqual(self.hex_checksums[algo].encode('ascii'), hashutil.hash_to_bytehex(self.checksums[algo])) @istest def bytehex_to_hash(self): for algo in self.checksums: self.assertEqual(self.checksums[algo], hashutil.bytehex_to_hash( self.hex_checksums[algo].encode())) @istest def new_hash_unsupported_hashing_algorithm(self): try: hashutil._new_hash('blake2:10') except ValueError as e: self.assertEquals(str(e), 'Unexpected hashing algorithm blake2:10, ' 'expected one of blake2b512, blake2s256, ' 'sha1, sha1_git, sha256') @patch('swh.model.hashutil.hashlib') @istest def new_hash_blake2b(self, mock_hashlib): mock_hashlib.new.return_value = 'some-hashlib-object' h = hashutil._new_hash('blake2b512') self.assertEquals(h, 'some-hashlib-object') mock_hashlib.new.assert_called_with('blake2b512') @patch('swh.model.hashutil.hashlib') @istest def new_hash_blake2s(self, mock_hashlib): mock_hashlib.new.return_value = 'some-hashlib-object' h = hashutil._new_hash('blake2s256') self.assertEquals(h, 'some-hashlib-object') mock_hashlib.new.assert_called_with('blake2s256') class HashlibGit(unittest.TestCase): def setUp(self): self.blob_data = b'42\n' self.tree_data = b''.join([b'40000 barfoo\0', bytes.fromhex('c3020f6bf135a38c6df' '3afeb5fb38232c5e07087'), b'100644 blah\0', bytes.fromhex('63756ef0df5e4f10b6efa' '33cfe5c758749615f20'), b'100644 hello\0', bytes.fromhex('907b308167f0880fb2a' '5c0e1614bb0c7620f9dc3')]) self.commit_data = """tree 1c61f7259dcb770f46b194d941df4f08ff0a3970 author Antoine R. Dumont (@ardumont) 1444054085 +0200 committer Antoine R. Dumont (@ardumont) 1444054085 +0200 initial """.encode('utf-8') # NOQA self.tag_data = """object 24d012aaec0bc5a4d2f62c56399053d6cc72a241 type commit tag 0.0.1 tagger Antoine R. Dumont (@ardumont) 1444225145 +0200 blah """.encode('utf-8') # NOQA self.checksums = { 'blob_sha1_git': bytes.fromhex('d81cc0710eb6cf9efd5b920a8453e1' 'e07157b6cd'), 'tree_sha1_git': bytes.fromhex('ac212302c45eada382b27bfda795db' '121dacdb1c'), 'commit_sha1_git': bytes.fromhex('e960570b2e6e2798fa4cfb9af2c399' 'd629189653'), 'tag_sha1_git': bytes.fromhex('bc2b99ba469987bcf1272c189ed534' 'e9e959f120'), } @istest def unknown_header_type(self): with self.assertRaises(ValueError) as cm: hashutil.hash_git_data(b'any-data', 'some-unknown-type') self.assertIn('Unexpected git object type', cm.exception.args[0]) @istest def hashdata_content(self): # when actual_hash = hashutil.hash_git_data(self.blob_data, git_type='blob') # then self.assertEqual(actual_hash, self.checksums['blob_sha1_git']) @istest def hashdata_tree(self): # when actual_hash = hashutil.hash_git_data(self.tree_data, git_type='tree') # then self.assertEqual(actual_hash, self.checksums['tree_sha1_git']) @istest def hashdata_revision(self): # when actual_hash = hashutil.hash_git_data(self.commit_data, git_type='commit') # then self.assertEqual(actual_hash, self.checksums['commit_sha1_git']) @istest def hashdata_tag(self): # when actual_hash = hashutil.hash_git_data(self.tag_data, git_type='tag') # then self.assertEqual(actual_hash, self.checksums['tag_sha1_git']) diff --git a/swh/model/tests/test_validators.py b/swh/model/tests/test_validators.py index 6f28658..60a1de4 100644 --- a/swh/model/tests/test_validators.py +++ b/swh/model/tests/test_validators.py @@ -1,75 +1,75 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import unittest from nose.tools import istest from swh.model import validators, hashutil, exceptions class TestValidators(unittest.TestCase): def setUp(self): self.valid_visible_content = { 'status': 'visible', 'length': 5, 'data': b'1984\n', 'ctime': datetime.datetime(2015, 11, 22, 16, 33, 56, tzinfo=datetime.timezone.utc), } self.valid_visible_content.update( hashutil.hash_data(self.valid_visible_content['data'])) self.valid_absent_content = { 'status': 'absent', 'length': 5, 'ctime': datetime.datetime(2015, 11, 22, 16, 33, 56, tzinfo=datetime.timezone.utc), 'reason': 'Content too large', 'sha1_git': self.valid_visible_content['sha1_git'], 'origin': 42, } self.invalid_content_hash_mismatch = self.valid_visible_content.copy() self.invalid_content_hash_mismatch.update( hashutil.hash_data(b"this is not the data you're looking for")) @istest def validate_content(self): self.assertTrue( validators.validate_content(self.valid_visible_content)) self.assertTrue( validators.validate_content(self.valid_absent_content)) @istest def validate_content_hash_mismatch(self): with self.assertRaises(exceptions.ValidationError) as cm: validators.validate_content(self.invalid_content_hash_mismatch) # All the hashes are wrong. The exception should be of the form: # ValidationError({ # NON_FIELD_ERRORS: [ # ValidationError('content-hash-mismatch', 'sha1'), # ValidationError('content-hash-mismatch', 'sha1_git'), # ValidationError('content-hash-mismatch', 'sha256'), # ] # }) exc = cm.exception self.assertIsInstance(str(exc), str) self.assertEquals(set(exc.error_dict.keys()), {exceptions.NON_FIELD_ERRORS}) hash_mismatches = exc.error_dict[exceptions.NON_FIELD_ERRORS] self.assertIsInstance(hash_mismatches, list) - self.assertEqual(len(hash_mismatches), 3) + self.assertEqual(len(hash_mismatches), 4) self.assertTrue(all(mismatch.code == 'content-hash-mismatch' for mismatch in hash_mismatches)) self.assertEqual(set(mismatch.params['hash'] for mismatch in hash_mismatches), - {'sha1', 'sha1_git', 'sha256'}) + {'sha1', 'sha1_git', 'sha256', 'blake2s256'})