diff --git a/PKG-INFO b/PKG-INFO index c67ba2c..2897539 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,10 +1,10 @@ Metadata-Version: 1.0 Name: swh.model -Version: 0.0.4 +Version: 0.0.5 Summary: Software Heritage data model Home-page: https://forge.softwareheritage.org/diffusion/DMOD/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN diff --git a/README-dev.md b/README-dev.md new file mode 100644 index 0000000..97f9fb4 --- /dev/null +++ b/README-dev.md @@ -0,0 +1,118 @@ +Git sha1 computation +-------------------- + +Document to describe how the git sha1 computation takes place. + +### commit/revision + +sha1 git commit/revision computation: + + commit `size`\0 + tree `sha1-git-tree-and-subtree-in-plain-hex-string` + ([parent `commit-parent-n`]) + author `name` <`email`> `date-ts` `date-offset` + committer `name` <`email`> `date-ts` `date-offset` + ([extra-header-key-n extra-header-value-n]) + + `commit-message` + (inline-gpg-signature) + + +Notes: +- [] denotes list of entries (one per line) +- () denotes optional entry. For example, the parent entry is optional. +- empty line at the end of the commit message +- timestamp example: 1444054085 +- date offset for example: +0200, -0100 + +sources: +- commit_tree_extended: https://github.com/git/git/blob/8d530c4d64ffcc853889f7b385f554d53db375ed/commit.c#L1522 +- commit_tree: https://github.com/git/git/blob/8d530c4d64ffcc853889f7b385f554d53db375ed/commit.c#L1392 + +Examples: + +```sh +$ cat commit.txt +tree 85a74718d377195e1efd0843ba4f3260bad4fe07 +parent 01e2d0627a9a6edb24c37db45db5ecb31e9de808 +author Linus Torvalds 1436739030 -0700 +committer Linus Torvalds 1436739030 -0700 +svn-repo-uuid 046f1af7-66c2-d61b-5410-ce57b7db7bff +svn-revision 10 + +Linux 4.2-rc2 +``` + +``` +$ cat commit.txt | git hash-object -t commit --stdin +010d34f384fa99d047cdd5e2f41e56e5c2feee45 +``` + +commit: 44cc742a8ca17b9c279be4cc195a93a6ef7a320e +``` +$ git cat-file -p 44cc742a8ca17b9c279be4cc195a93a6ef7a320e +... +tree b134f9b7dc434f593c0bab696345548b37de0558 +parent 689664ae944b4692724f13b709a4e4de28b54e57 +parent c888305e1efbaa252d01b4e5e6b778f865a97514 +author Jiang Xin 1428538899 +0800 +committer Jiang Xin 1428538899 +0800 +gpgsig -----BEGIN PGP SIGNATURE----- + Version: GnuPG v1.4.13 (Darwin) + + iQIcBAABAgAGBQJVJcYsAAoJEBiY3kIkQRNJVAUQAJ8/XQIfMqqC5oYeEFfHOPYZ + L7qy46bXHVBa9Qd8zAJ2Dou3IbI2ZoF6/Et89K/UggOycMlt5FKV/9toWyuZv4Po + L682wonoxX99qvVTHo6+wtnmYO7+G0f82h+qHMErxjP+I6gzRNBvRr+SfY7VlGdK + wikMKOMWC5smrScSHITnOq1Ews5pe3N7qDYMzK0XVZmgDoaem4RSWMJs4My/qVLN + e0CqYWq2A22GX7sXl6pjneJYQvcAXUX+CAzp24QnPSb+Q22Guj91TcxLFcHCTDdn + qgqMsEyMiisoglwrCbO+D+1xq9mjN9tNFWP66SQ48mrrHYTBV5sz9eJyDfroJaLP + CWgbDTgq6GzRMehHT3hXfYS5NNatjnhkNISXR7pnVP/obIi/vpWh5ll6Gd8q26z+ + a/O41UzOaLTeNI365MWT4/cnXohVLRG7iVJbAbCxoQmEgsYMRc/pBAzWJtLfcB2G + jdTswYL6+MUdL8sB9pZ82D+BP/YAdHe69CyTu1lk9RT2pYtI/kkfjHubXBCYEJSG + +VGllBbYG6idQJpyrOYNRJyrDi9yvDJ2W+S0iQrlZrxzGBVGTB/y65S8C+2WTBcE + lf1Qb5GDsQrZWgD+jtWTywOYHtCBwyCKSAXxSARMbNPeak9WPlcW/Jmu+fUcMe2x + dg1KdHOa34shrKDaOVzW + =od6m + -----END PGP SIGNATURE----- + +Merge branch 'master' of git://github.com/alexhenrie/git-po + +* 'master' of git://github.com/alexhenrie/git-po: + l10n: ca.po: update translation +``` + +### directory/tree + +sha1 git directory/tree computation: + + tree `tree-size`\0 + \0... \0... + + +Notes: +- no newline separator between tree entries +- no empty newline at the end of the tree entries +- tree content header size is the length of the content +- The tree entries are ordered according to bytes in their properties. + +Note: Tree entries referencing trees are sorted as if their name have a trailing / +at their end. + +Possible permissions are: +- 100644 - file +- 40000 - directory +- 100755 - executable file +- 120000 - symbolink link +- 160000 - git link (relative to submodule) + +### content/file + +sha1 git content computation: + + blob `blob-size`\0 + `blob-content` + +Notes: +- no newline at the end of the blob content + +Compress with DEFLATE and compute sha1 diff --git a/swh.model.egg-info/PKG-INFO b/swh.model.egg-info/PKG-INFO index c67ba2c..2897539 100644 --- a/swh.model.egg-info/PKG-INFO +++ b/swh.model.egg-info/PKG-INFO @@ -1,10 +1,10 @@ Metadata-Version: 1.0 Name: swh.model -Version: 0.0.4 +Version: 0.0.5 Summary: Software Heritage data model Home-page: https://forge.softwareheritage.org/diffusion/DMOD/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN diff --git a/swh.model.egg-info/SOURCES.txt b/swh.model.egg-info/SOURCES.txt index c8d507f..6d07beb 100644 --- a/swh.model.egg-info/SOURCES.txt +++ b/swh.model.egg-info/SOURCES.txt @@ -1,40 +1,41 @@ .gitignore AUTHORS LICENSE MANIFEST.in Makefile Makefile.local +README-dev.md requirements.txt setup.py version.txt debian/changelog debian/compat debian/control debian/copyright debian/rules debian/source/format swh.model.egg-info/PKG-INFO swh.model.egg-info/SOURCES.txt swh.model.egg-info/dependency_links.txt swh.model.egg-info/requires.txt swh.model.egg-info/top_level.txt swh/model/__init__.py swh/model/exceptions.py swh/model/git.py swh/model/hashutil.py swh/model/identifiers.py swh/model/validators.py swh/model/fields/__init__.py swh/model/fields/compound.py swh/model/fields/hashes.py swh/model/fields/simple.py swh/model/tests/__init__.py swh/model/tests/test_git.py swh/model/tests/test_git_slow.py swh/model/tests/test_hashutil.py swh/model/tests/test_identifiers.py swh/model/tests/test_validators.py swh/model/tests/fields/__init__.py swh/model/tests/fields/test_compound.py swh/model/tests/fields/test_hashes.py swh/model/tests/fields/test_simple.py \ No newline at end of file diff --git a/swh/model/git.py b/swh/model/git.py index 79852f8..dc7ab0d 100644 --- a/swh/model/git.py +++ b/swh/model/git.py @@ -1,266 +1,447 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from enum import Enum from swh.model import hashutil, identifiers ROOT_TREE_KEY = b'' class GitType(Enum): BLOB = b'blob' TREE = b'tree' EXEC = b'exec' LINK = b'link' COMM = b'commit' RELE = b'release' REFS = b'ref' class GitPerm(Enum): BLOB = b'100644' TREE = b'40000' EXEC = b'100755' LINK = b'120000' def compute_directory_git_sha1(dirpath, hashes): """Compute a directory git sha1 for a dirpath. Args: dirpath: the directory's absolute path hashes: list of tree entries with keys: - sha1_git: the tree entry's sha1 - name: file or subdir's name - perms: the tree entry's sha1 permissions Returns: the binary sha1 of the dictionary's identifier Assumes: Every path exists in hashes. """ directory = { 'entries': [ { 'name': entry['name'], 'perms': int(entry['perms'].value, 8), 'target': entry['sha1_git'], 'type': 'dir' if entry['perms'] == GitPerm.TREE else 'file', } for entry in hashes[dirpath] ] } return hashutil.hash_to_bytes(identifiers.directory_identifier(directory)) def compute_revision_sha1_git(revision): """Compute a revision sha1 git from its dict representation. Args: revision: Additional dictionary information needed to compute a synthetic revision. Following keys are expected: - author - date - committer - committer_date - message - type - directory: binary form of the tree hash Returns: revision sha1 in bytes # FIXME: beware, bytes output from storage api """ return hashutil.hash_to_bytes(identifiers.revision_identifier(revision)) def compute_release_sha1_git(release): """Compute a release sha1 git from its dict representation. Args: release: Additional dictionary information needed to compute a synthetic release. Following keys are expected: - name - message - date - author - revision: binary form of the sha1_git revision targeted by this Returns: release sha1 in bytes """ return hashutil.hash_to_bytes(identifiers.release_identifier(release)) def compute_link_metadata(linkpath): """Given a linkpath, compute the git metadata. Args: linkpath: absolute pathname of the link Returns: Dictionary of values: + - data: link's content + - length: link's content length - name: basename of the link - perms: git permission for link - type: git type for link + - path: absolute path to the link on filesystem + """ data = os.readlink(linkpath) link_metadata = hashutil.hash_data(data) link_metadata.update({ 'data': data, 'length': len(data), 'name': os.path.basename(linkpath), 'perms': GitPerm.LINK, 'type': GitType.BLOB, 'path': linkpath }) return link_metadata def compute_blob_metadata(filepath): """Given a filepath, compute the git metadata. Args: filepath: absolute pathname of the file. Returns: Dictionary of values: - name: basename of the file - perms: git permission for file - type: git type for file + - path: absolute filepath on filesystem """ blob_metadata = hashutil.hash_path(filepath) perms = GitPerm.EXEC if os.access(filepath, os.X_OK) else GitPerm.BLOB blob_metadata.update({ 'name': os.path.basename(filepath), 'perms': perms, 'type': GitType.BLOB, 'path': filepath }) return blob_metadata def compute_tree_metadata(dirname, ls_hashes): """Given a dirname, compute the git metadata. Args: dirname: absolute pathname of the directory. Returns: Dictionary of values: + - sha1_git: tree's sha1 git - name: basename of the directory - perms: git permission for directory - type: git type for directory + - path: absolute path to directory on filesystem """ return { 'sha1_git': compute_directory_git_sha1(dirname, ls_hashes), 'name': os.path.basename(dirname), 'perms': GitPerm.TREE, 'type': GitType.TREE, 'path': dirname } def walk_and_compute_sha1_from_directory(rootdir, - dir_ok_fn=lambda dirpath: True): + dir_ok_fn=lambda dirpath: True, + with_root_tree=True): """Compute git sha1 from directory rootdir. Args: - rootdir: Root directory from which beginning the git hash computation - dir_ok_fn: Filter function to filter directory according to rules defined in the function. By default, all folders are ok. Example override: dir_ok_fn = lambda dirpath: b'svn' not in dirpath + - with_root_tree: Determine if we compute the upper root tree's + checksums. As a default, we want it. One possible use case where this + is not useful is the update (cf. `update_checksums_from`) + Returns: Dictionary of entries with keys and as values a list of directory entries. Those are list of dictionary with keys: - 'perms' - 'type' - 'name' - 'sha1_git' - and specifically content: 'sha1', 'sha256', ... Note: One special key is ROOT_TREE_KEY to indicate the upper root of the directory (this is the revision's directory). Raises: Nothing If something is raised, this is a programmatic error. """ ls_hashes = {} all_links = set() + if rootdir.endswith(b'/'): + rootdir = rootdir.rstrip(b'/') + def filtfn(dirpath, dirnames): return list(filter(lambda dirname: dir_ok_fn(os.path.join(dirpath, dirname)), dirnames)) gen_dir = ((dp, filtfn(dp, dns), fns) for (dp, dns, fns) in os.walk(rootdir, topdown=False) if dir_ok_fn(dp)) for dirpath, dirnames, filenames in gen_dir: hashes = [] links = (os.path.join(dirpath, file) for file in (filenames+dirnames) if os.path.islink(os.path.join(dirpath, file))) for linkpath in links: all_links.add(linkpath) m_hashes = compute_link_metadata(linkpath) hashes.append(m_hashes) only_files = (os.path.join(dirpath, file) for file in filenames if os.path.join(dirpath, file) not in all_links) for filepath in only_files: m_hashes = compute_blob_metadata(filepath) hashes.append(m_hashes) ls_hashes[dirpath] = hashes dir_hashes = [] subdirs = (os.path.join(dirpath, dir) for dir in dirnames if os.path.join(dirpath, dir) not in all_links) for fulldirname in subdirs: tree_hash = compute_tree_metadata(fulldirname, ls_hashes) dir_hashes.append(tree_hash) ls_hashes[dirpath].extend(dir_hashes) - # compute the current directory hashes - root_hash = { - 'sha1_git': compute_directory_git_sha1(rootdir, ls_hashes), - 'path': rootdir, - 'name': os.path.basename(rootdir), - 'perms': GitPerm.TREE, - 'type': GitType.TREE - } - ls_hashes[ROOT_TREE_KEY] = [root_hash] + if with_root_tree: + # compute the current directory hashes + root_hash = { + 'sha1_git': compute_directory_git_sha1(rootdir, ls_hashes), + 'path': rootdir, + 'name': os.path.basename(rootdir), + 'perms': GitPerm.TREE, + 'type': GitType.TREE + } + ls_hashes[ROOT_TREE_KEY] = [root_hash] return ls_hashes + + +def recompute_sha1_in_memory(root, deeper_rootdir, objects): + """Recompute git sha1 from directory deeper_rootdir to root. + + This function relies exclusively on `objects` for hashes. It + expects the deeper_rootdir and every key below that path to be + already updated. + + Args: + - root: Upper root directory (so same as + objects[ROOT_TREE_KEY][0]['path']) + + - deeper_rootdir: Root directory from which the git hash + computation begins + + - objects: objects dictionary as per returned by + `walk_and_compute_sha1_from_directory` + + Returns: + Dictionary of entries with keys and as values a list of + directory entries. + Those are list of dictionary with keys: + - 'perms' + - 'type' + - 'name' + - 'sha1_git' + - and specifically content: 'sha1', 'sha256', ... + + Note: + One special key is ROOT_TREE_KEY to indicate the upper root of the + directory (this is the revision's target directory). + + Raises: + Nothing + If something is raised, this is a programmatic error. + + """ + # list of paths to update from bottom to top + upper_root = os.path.dirname(root) + rootdir = os.path.dirname(deeper_rootdir) + while rootdir != upper_root: + files = objects.get(rootdir, None) + if files: + ls_hashes = [] + for hashfile in files: + fulldirname = hashfile['path'] + if hashfile['type'] == GitType.TREE: + tree_hash = compute_tree_metadata(fulldirname, objects) + ls_hashes.append(tree_hash) + else: + ls_hashes.append(hashfile) + + objects[rootdir] = ls_hashes + + rootdir = os.path.dirname(rootdir) + + # update root + objects[ROOT_TREE_KEY][0]['sha1_git'] = compute_directory_git_sha1(root, + objects) + return objects + + +def commonpath(paths): + """Given a sequence of path names, returns the longest common sub-path. + + Copied from Python3.5 + + """ + + if not paths: + raise ValueError('commonpath() arg is an empty sequence') + + if isinstance(paths[0], bytes): + sep = b'/' + curdir = b'.' + else: + sep = '/' + curdir = '.' + + try: + split_paths = [path.split(sep) for path in paths] + + try: + isabs, = set(p[:1] == sep for p in paths) + except ValueError: + raise ValueError("Can't mix absolute and relative paths") + + split_paths = [ + [c for c in s if c and c != curdir] for s in split_paths] + s1 = min(split_paths) + s2 = max(split_paths) + common = s1 + for i, c in enumerate(s1): + if c != s2[i]: + common = s1[:i] + break + + prefix = sep if isabs else sep[:0] + return prefix + sep.join(common) + except (TypeError, AttributeError): + raise + + +def update_checksums_from(changed_paths, objects, + dir_ok_fn=lambda dirpath: True): + """Given a list of changed paths, recompute the checksums only where + needed. + + Args: + changed_paths: Dictionary list representing path changes. + A dictionary has the form: + - path: the full path to the file Added, Modified or Deleted + - action: A, M or D + objects: dictionary returned by `walk_and_compute_sha1_from_directory`. + + Returns: + Dictionary returned by `walk_and_compute_sha1_from_directory` + updated (mutated) according to latest filesystem modifications. + + """ + root = objects[ROOT_TREE_KEY][0]['path'] + if root.endswith(b'/'): + root = root.rstrip(b'/') + + paths = [] + # a first round-trip to ensure we don't need to... + for changed_path in changed_paths: + path = changed_path['path'] + + parent = os.path.dirname(path) + if parent == root: # ... recompute everything anyway + return walk_and_compute_sha1_from_directory(root, + dir_ok_fn) + + if changed_path['action'] == 'D': # (D)elete + k = objects.pop(path, None) + if k: # it's a dir, we need to remove the descendant paths + prefix_path = path + b'/' + new_objects = {k: objects[k] for k in objects.keys() + if not k.startswith(prefix_path)} + objects = new_objects + + paths.append(parent) + + if not paths: # no modification on paths + return objects + + rootdir = commonpath(paths) + + # common ancestor is the root anyway, no optimization possible, + # recompute all + if root == rootdir: + return walk_and_compute_sha1_from_directory(root, + dir_ok_fn) + + # Recompute from disk the checksums from impacted common ancestor + # rootdir changes. Then update the original objects with new + # checksums for the arborescence tree below rootdir + hashes = walk_and_compute_sha1_from_directory(rootdir, dir_ok_fn, + with_root_tree=False) + objects.update(hashes) + + # Recompute the hashes in memory from rootdir to root + return recompute_sha1_in_memory(root, rootdir, objects) diff --git a/swh/model/hashutil.py b/swh/model/hashutil.py index 2d5ff12..b2558a3 100644 --- a/swh/model/hashutil.py +++ b/swh/model/hashutil.py @@ -1,187 +1,189 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import binascii import functools import hashlib from io import BytesIO import os # supported hashing algorithms ALGORITHMS = set(['sha1', 'sha256', 'sha1_git']) # should be a multiple of 64 (sha1/sha256's block size) # FWIW coreutils' sha1sum uses 32768 HASH_BLOCK_SIZE = 32768 def _new_git_hash(base_algo, git_type, length): """Initialize a digest object (as returned by python's hashlib) for the requested algorithm, and feed it with the header for a git object of the given type and length. The header for hashing a git object consists of: - The type of the object (encoded in ASCII) - One ASCII space (\x20) - The length of the object (decimal encoded in ASCII) - One NUL byte Args: base_algo: a hashlib-supported algorithm git_type: the type of the git object (supposedly one of 'blob', 'commit', 'tag', 'tree') length: the length of the git object you're encoding Returns: a hashutil.hash object """ h = hashlib.new(base_algo) git_header = '%s %d\0' % (git_type, length) h.update(git_header.encode('ascii')) return h def _new_hash(algo, length=None): """Initialize a digest object (as returned by python's hashlib) for the requested algorithm. See the constant ALGORITHMS for the list of supported algorithms. If a git-specific hashing algorithm is requested (e.g., "sha1_git"), the hashing object will be pre-fed with the needed header; for this to work, length must be given. Args: algo: a hashing algorithm (one of ALGORITHMS) length: the length of the hashed payload (needed for git-specific algorithms) Returns: a hashutil.hash object Raises: ValueError if algo is unknown, or length is missing for a git-specific hash. """ if algo not in ALGORITHMS: raise ValueError('Unexpected hashing algorithm %s, ' 'expected one of %s' % (algo, ', '.join(sorted(ALGORITHMS)))) h = None if algo.endswith('_git'): if length is None: raise ValueError('Missing length for git hashing algorithm') base_algo = algo[:-4] h = _new_git_hash(base_algo, 'blob', length) else: h = hashlib.new(algo) return h def hash_file(fobj, length=None, algorithms=ALGORITHMS, chunk_cb=None): """Hash the contents of the given file object with the given algorithms. Args: fobj: a file-like object length: the length of the contents of the file-like object (for the git-specific algorithms) algorithms: the hashing algorithms used Returns: a dict mapping each algorithm to a bytes digest. Raises: ValueError if algorithms contains an unknown hash algorithm. """ hashes = {algo: _new_hash(algo, length) for algo in algorithms} while True: chunk = fobj.read(HASH_BLOCK_SIZE) if not chunk: break for hash in hashes.values(): hash.update(chunk) if chunk_cb: chunk_cb(chunk) return {algo: hash.digest() for algo, hash in hashes.items()} def hash_path(path, algorithms=ALGORITHMS, chunk_cb=None): """Hash the contents of the file at the given path with the given algorithms. Args: path: the path of the file to hash algorithms: the hashing algorithms used chunk_cb: a callback Returns: a dict mapping each algorithm to a bytes digest. Raises: ValueError if algorithms contains an unknown hash algorithm. OSError on file access error """ length = os.path.getsize(path) with open(path, 'rb') as fobj: - return hash_file(fobj, length, algorithms, chunk_cb) + hash = hash_file(fobj, length, algorithms, chunk_cb) + hash['length'] = length + return hash def hash_data(data, algorithms=ALGORITHMS): """Hash the given binary blob with the given algorithms. Args: data: a bytes object algorithms: the hashing algorithms used Returns: a dict mapping each algorithm to a bytes digest Raises: TypeError if data does not support the buffer interface. ValueError if algorithms contains an unknown hash algorithm. """ fobj = BytesIO(data) return hash_file(fobj, len(data), algorithms) def hash_git_data(data, git_type, base_algo='sha1'): """Hash the given data as a git object of type git_type. Args: data: a bytes object git_type: the git object type base_algo: the base hashing algorithm used (default: sha1) Returns: a dict mapping each algorithm to a bytes digest Raises: ValueError if the git_type is unexpected. """ git_object_types = {'blob', 'tree', 'commit', 'tag'} if git_type not in git_object_types: raise ValueError('Unexpected git object type %s, expected one of %s' % (git_type, ', '.join(sorted(git_object_types)))) h = _new_git_hash(base_algo, git_type, len(data)) h.update(data) return h.digest() @functools.lru_cache() def hash_to_hex(hash): """Converts a hash (in hex or bytes form) to its hexadecimal ascii form""" if isinstance(hash, str): return hash return binascii.hexlify(hash).decode('ascii') @functools.lru_cache() def hash_to_bytes(hash): """Converts a hash (in hex or bytes form) to its raw bytes form""" if isinstance(hash, bytes): return hash return bytes.fromhex(hash) diff --git a/swh/model/identifiers.py b/swh/model/identifiers.py index 36bfa20..cf3b326 100644 --- a/swh/model/identifiers.py +++ b/swh/model/identifiers.py @@ -1,300 +1,465 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import binascii import datetime from functools import lru_cache -from . import hashutil +from .hashutil import hash_data, hash_git_data @lru_cache() def identifier_to_bytes(identifier): """Convert a text identifier to bytes. Args: identifier: an identifier, either a 40-char hexadecimal string or a bytes object of length 20 Returns: The length 20 bytestring corresponding to the given identifier Raises: ValueError if the identifier is of an unexpected type or length. """ if isinstance(identifier, bytes): if len(identifier) != 20: raise ValueError( 'Wrong length for bytes identifier %s, expected 20' % len(identifier)) return identifier if isinstance(identifier, str): if len(identifier) != 40: raise ValueError( 'Wrong length for str identifier %s, expected 40' % len(identifier)) return bytes.fromhex(identifier) - raise ValueError('Wrong type for identitfier %s, expected bytes or str' % + raise ValueError('Wrong type for identifier %s, expected bytes or str' % identifier.__class__.__name__) @lru_cache() def identifier_to_str(identifier): """Convert an identifier to an hexadecimal string. Args: identifier: an identifier, either a 40-char hexadecimal string or a bytes object of length 20 Returns: The length 40 string corresponding to the given identifier, hex encoded Raises: ValueError if the identifier is of an unexpected type or length. """ if isinstance(identifier, str): if len(identifier) != 40: raise ValueError( 'Wrong length for str identifier %s, expected 40' % len(identifier)) return identifier if isinstance(identifier, bytes): if len(identifier) != 20: raise ValueError( 'Wrong length for bytes identifier %s, expected 20' % len(identifier)) return binascii.hexlify(identifier).decode() - raise ValueError('Wrong type for identitfier %s, expected bytes or str' % + raise ValueError('Wrong type for identifier %s, expected bytes or str' % identifier.__class__.__name__) def content_identifier(content): """Return the intrinsic identifier for a content. A content's identifier is the sha1, sha1_git and sha256 checksums of its data. Args: content: a content conforming to the Software Heritage schema Returns: A dictionary with all the hashes for the data Raises: KeyError if the content doesn't have a data member. """ - hashes = hashutil.hash_data( + hashes = hash_data( content['data'], {'sha1', 'sha1_git', 'sha256'}, ) return hashes def _sort_key(entry): """The sorting key for tree entries""" if entry['type'] == 'dir': return entry['name'] + b'/' else: return entry['name'] @lru_cache() def _perms_to_bytes(perms): """Convert the perms value to its bytes representation""" oc = oct(perms)[2:] return oc.encode('ascii') def directory_identifier(directory): """Return the intrinsic identifier for a directory. A directory's identifier is the tree sha1 à la git of a directory listing, using the following algorithm, which is equivalent to the git algorithm for trees: 1. Entries of the directory are sorted using the name (or the name with '/' appended for directory entries) as key, in bytes order. 2. For each entry of the directory, the following bytes are output: - the octal representation of the permissions for the entry (stored in the 'perms' member), which is a representation of the entry type: b'100644' (int 33188) for files b'100755' (int 33261) for executable files b'120000' (int 40960) for symbolic links b'40000' (int 16384) for directories b'160000' (int 57344) for references to revisions - an ascii space (b'\x20') - the entry's name (as raw bytes), stored in the 'name' member - a null byte (b'\x00') - the 20 byte long identifier of the object pointed at by the entry, stored in the 'target' member: for files or executable files: their blob sha1_git for symbolic links: the blob sha1_git of a file containing the link destination for directories: their intrinsic identifier for revisions: their intrinsic identifier (Note that there is no separator between entries) """ components = [] for entry in sorted(directory['entries'], key=_sort_key): components.extend([ _perms_to_bytes(entry['perms']), b'\x20', entry['name'], b'\x00', identifier_to_bytes(entry['target']), ]) - return identifier_to_str(hashutil.hash_git_data(b''.join(components), - 'tree')) + return identifier_to_str(hash_git_data(b''.join(components), 'tree')) def format_date(date): """Convert a date object into an UTC timestamp encoded as ascii bytes. Git stores timestamps as an integer number of seconds since the UNIX epoch. However, Software Heritage stores timestamps as an integer number of microseconds (postgres type "datetime with timezone"). Therefore, we print timestamps with no microseconds as integers, and timestamps with microseconds as floating point values. """ if isinstance(date, datetime.datetime): if date.microsecond == 0: date = int(date.timestamp()) else: date = date.timestamp() return str(date).encode() else: if date == int(date): date = int(date) return str(date).encode() @lru_cache() -def format_offset(offset): +def format_offset(offset, negative_utc=None): """Convert an integer number of minutes into an offset representation. The offset representation is [+-]hhmm where: hh is the number of hours; mm is the number of minutes. A null offset is represented as +0000. """ - if offset >= 0: - sign = '+' - else: + if offset < 0 or offset == 0 and negative_utc: sign = '-' + else: + sign = '+' hours = abs(offset) // 60 minutes = abs(offset) % 60 t = '%s%02d%02d' % (sign, hours, minutes) return t.encode() -def format_date_offset(date_offset): - """Format a date-compatible object with its timezone offset. +def normalize_timestamp(time_representation): + """Normalize a time representation for processing by Software Heritage + + This function supports a numeric timestamp (representing a number of + seconds since the UNIX epoch, 1970-01-01 at 00:00 UTC), a datetime.datetime + object (with timezone information), or a normalized Software + Heritage time representation (idempotency). + + Args: + time_representation: the representation of a timestamp + + Returns: a normalized dictionary with three keys + + - timestamp: a number of seconds since the UNIX epoch (1970-01-01 at 00:00 + UTC) + - offset: the timezone offset as a number of minutes relative to UTC + - negative_utc: a boolean representing whether the offset is -0000 when + offset = 0. - A date-compatible object is either: - - a dict with two members - timestamp: floating point number of seconds since the unix epoch - offset: (int) number of minutes representing the offset from UTC - - a datetime.datetime object with a timezone - - a numeric value (in which case the offset is hardcoded to 0) """ - # FIXME: move normalization to another module + if time_representation is None: + return None + + negative_utc = False - if isinstance(date_offset, dict): - date = date_offset['timestamp'] - offset = date_offset['offset'] - elif isinstance(date_offset, datetime.datetime): - date = date_offset - utcoffset = date_offset.utcoffset() + if isinstance(time_representation, dict): + timestamp = time_representation['timestamp'] + offset = time_representation['offset'] + if 'negative_utc' in time_representation: + negative_utc = time_representation['negative_utc'] + elif isinstance(time_representation, datetime.datetime): + timestamp = time_representation.timestamp() + utcoffset = time_representation.utcoffset() if utcoffset is None: - raise ValueError('Received a datetime without a timezone') + raise ValueError( + 'normalize_timestamp received datetime without timezone: %s' % + time_representation) + + # utcoffset is an integer number of minutes seconds_offset = utcoffset.total_seconds() - if seconds_offset - int(seconds_offset) != 0 or seconds_offset % 60: - raise ValueError('Offset is not an integer number of minutes') offset = int(seconds_offset) // 60 else: - date = date_offset + timestamp = time_representation offset = 0 - return b''.join([format_date(date), b' ', format_offset(offset)]) + return { + 'timestamp': timestamp, + 'offset': offset, + 'negative_utc': negative_utc, + } def format_author(author): - return b''.join([author['name'], b' <', author['email'], b'>']) + """Format the specification of an author. + + An author is either a byte string (passed unchanged), or a dict with three + keys, fullname, name and email. + + If the fullname exists, return it; if it doesn't, we construct a fullname + using the following heuristics: if the name value is None, we return the + email in angle brackets, else, we return the name, a space, and the email + in angle brackets. + + """ + if isinstance(author, bytes) or author is None: + return author + + if 'fullname' in author: + return author['fullname'] + + ret = [] + if author['name'] is not None: + ret.append(author['name']) + if author['email'] is not None: + ret.append(b''.join([b'<', author['email'], b'>'])) + + return b' '.join(ret) + + +def format_author_line(header, author, date_offset): + """Format a an author line according to git standards. + + An author line has three components: + - a header, describing the type of author (author, committer, tagger) + - a name and email, which is an arbitrary bytestring + - optionally, a timestamp with UTC offset specification + + The author line is formatted thus: + + `header` `name and email`[ `timestamp` `utc_offset`] + + The timestamp is encoded as a (decimal) number of seconds since the UNIX + epoch (1970-01-01 at 00:00 UTC). As an extension to the git format, we + support fractional timestamps, using a dot as the separator for the decimal + part. + + The utc offset is a number of minutes encoded as '[+-]HHMM'. Note some + tools can pass a negative offset corresponding to the UTC timezone + ('-0000'), which is valid and is encoded as such. + + For convenience, this function returns the whole line with its trailing + newline. + + Args: + header: the header of the author line (one of 'author', 'committer', + 'tagger') + author: an author specification (dict with two bytes values: name and + email, or byte value) + date_offset: a normalized date/time representation as returned by + `normalize_timestamp`. + + Returns: + the newline-terminated byte string containing the author line + + """ + + ret = [header.encode(), b' ', format_author(author)] + + date_offset = normalize_timestamp(date_offset) + + if date_offset is not None: + date_f = format_date(date_offset['timestamp']) + offset_f = format_offset(date_offset['offset'], + date_offset['negative_utc']) + + ret.extend([b' ', date_f, b' ', offset_f]) + + ret.append(b'\n') + return b''.join(ret) def revision_identifier(revision): """Return the intrinsic identifier for a revision. + + The fields used for the revision identifier computation are: + - directory + - parents + - author + - author_date + - committer + - committer_date + - metadata -> extra_headers + - message + + A revision's identifier is the 'git'-checksum of a commit manifest + constructed as follows (newlines are a single ASCII newline character): + + ``` + tree + [for each parent in parents] + parent + [end for each parents] + author + committer + [for each key, value in extra_headers] + + [end for each extra_headers] + + + ``` + + The directory identifier is the ascii representation of its hexadecimal + encoding. + + Author and committer are formatted with the `format_author` function. + Dates are formatted with the `format_date_offset` function. + + Extra headers are an ordered list of [key, value] pairs. Keys are strings + and get encoded to utf-8 for identifier computation. Values are either byte + strings, unicode strings (that get encoded to utf-8), or integers (that get + encoded to their utf-8 decimal representation). + + Multiline extra header values are escaped by indenting the continuation + lines with one ascii space. + + If the message is None, the manifest ends with the last header. Else, the + message is appended to the headers after an empty line. + + The checksum of the full manifest is computed using the 'commit' git object + type. + """ components = [ b'tree ', identifier_to_str(revision['directory']).encode(), b'\n', ] for parent in revision['parents']: if parent: components.extend([ b'parent ', identifier_to_str(parent).encode(), b'\n', ]) components.extend([ - b'author ', format_author(revision['author']), - b' ', format_date_offset(revision['date']), b'\n', - b'committer ', format_author(revision['committer']), - b' ', format_date_offset(revision['committer_date']), b'\n', - b'\n', - revision['message'], + format_author_line('author', revision['author'], revision['date']), + format_author_line('committer', revision['committer'], + revision['committer_date']), ]) - return identifier_to_str(hashutil.hash_git_data(b''.join(components), - 'commit')) + # Handle extra headers + metadata = revision.get('metadata') + if not metadata: + metadata = {} + + for key, value in metadata.get('extra_headers', []): + + # Integer values: decimal representation + if isinstance(value, int): + value = str(value).encode('utf-8') + + # Unicode string values: utf-8 encoding + if isinstance(value, str): + value = value.encode('utf-8') + + # multi-line values: indent continuation lines + if b'\n' in value: + value_chunks = value.split(b'\n') + value = b'\n '.join(value_chunks) + + # encode the key to utf-8 + components.extend([key.encode('utf-8'), b' ', value, b'\n']) + + if revision['message'] is not None: + components.extend([b'\n', revision['message']]) + + commit_raw = b''.join(components) + return identifier_to_str(hash_git_data(commit_raw, 'commit')) def target_type_to_git(target_type): """Convert a software heritage target type to a git object type""" return { 'content': b'blob', 'directory': b'tree', 'revision': b'commit', 'release': b'tag', }[target_type] def release_identifier(release): """Return the intrinsic identifier for a release.""" components = [ b'object ', identifier_to_str(release['target']).encode(), b'\n', b'type ', target_type_to_git(release['target_type']), b'\n', b'tag ', release['name'], b'\n', ] if 'author' in release and release['author']: - components.extend([ - b'tagger ', format_author(release['author']), b' ', - format_date_offset(release['date']), b'\n', - ]) + components.append( + format_author_line('tagger', release['author'], release['date']) + ) - components.extend([b'\n', release['message']]) + if release['message'] is not None: + components.extend([b'\n', release['message']]) - return identifier_to_str(hashutil.hash_git_data(b''.join(components), - 'tag')) + return identifier_to_str(hash_git_data(b''.join(components), 'tag')) diff --git a/swh/model/tests/test_git.py b/swh/model/tests/test_git.py index 4351019..3ed3ff3 100644 --- a/swh/model/tests/test_git.py +++ b/swh/model/tests/test_git.py @@ -1,212 +1,632 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os +import shutil +import subprocess import tempfile import unittest -import subprocess from nose.tools import istest from swh.model import git class GitHashlib(unittest.TestCase): def setUp(self): self.tree_data = b''.join([b'40000 barfoo\0', bytes.fromhex('c3020f6bf135a38c6df' '3afeb5fb38232c5e07087'), b'100644 blah\0', bytes.fromhex('63756ef0df5e4f10b6efa' '33cfe5c758749615f20'), b'100644 hello\0', bytes.fromhex('907b308167f0880fb2a' '5c0e1614bb0c7620f9dc3')]) self.commit_data = """tree 1c61f7259dcb770f46b194d941df4f08ff0a3970 author Antoine R. Dumont (@ardumont) 1444054085 +0200 committer Antoine R. Dumont (@ardumont) 1444054085 +0200 initial """.encode('utf-8') # NOQA self.tag_data = """object 24d012aaec0bc5a4d2f62c56399053d6cc72a241 type commit tag 0.0.1 tagger Antoine R. Dumont (@ardumont) 1444225145 +0200 blah """.encode('utf-8') # NOQA self.checksums = { 'tree_sha1_git': bytes.fromhex('ac212302c45eada382b27bfda795db' '121dacdb1c'), 'commit_sha1_git': bytes.fromhex('e960570b2e6e2798fa4cfb9af2c399' 'd629189653'), 'tag_sha1_git': bytes.fromhex('bc2b99ba469987bcf1272c189ed534' 'e9e959f120'), } @istest def compute_directory_git_sha1(self): # given dirpath = 'some-dir-path' hashes = { dirpath: [{'perms': git.GitPerm.TREE, 'type': git.GitType.TREE, 'name': b'barfoo', 'sha1_git': bytes.fromhex('c3020f6bf135a38c6df' '3afeb5fb38232c5e07087')}, {'perms': git.GitPerm.BLOB, 'type': git.GitType.BLOB, 'name': b'hello', 'sha1_git': bytes.fromhex('907b308167f0880fb2a' '5c0e1614bb0c7620f9dc3')}, {'perms': git.GitPerm.BLOB, 'type': git.GitType.BLOB, 'name': b'blah', 'sha1_git': bytes.fromhex('63756ef0df5e4f10b6efa' '33cfe5c758749615f20')}] } # when checksum = git.compute_directory_git_sha1(dirpath, hashes) # then self.assertEqual(checksum, self.checksums['tree_sha1_git']) @istest def compute_revision_sha1_git(self): # given tree_hash = bytes.fromhex('1c61f7259dcb770f46b194d941df4f08ff0a3970') revision = { 'author': { 'name': b'Antoine R. Dumont (@ardumont)', 'email': b'antoine.romain.dumont@gmail.com', }, 'date': { 'timestamp': 1444054085, 'offset': 120, }, 'committer': { 'name': b'Antoine R. Dumont (@ardumont)', 'email': b'antoine.romain.dumont@gmail.com', }, 'committer_date': { 'timestamp': 1444054085, 'offset': 120, }, 'message': b'initial\n', 'type': 'tar', 'directory': tree_hash, 'parents': [], } # when checksum = git.compute_revision_sha1_git(revision) # then self.assertEqual(checksum, self.checksums['commit_sha1_git']) @istest def compute_release_sha1_git(self): # given revision_hash = bytes.fromhex('24d012aaec0bc5a4d2f62c56399053' 'd6cc72a241') release = { 'name': b'0.0.1', 'author': { 'name': b'Antoine R. Dumont (@ardumont)', 'email': b'antoine.romain.dumont@gmail.com', }, 'date': { 'timestamp': 1444225145, 'offset': 120, }, 'message': b'blah\n', 'target_type': 'revision', 'target': revision_hash, } # when checksum = git.compute_release_sha1_git(release) # then self.assertEqual(checksum, self.checksums['tag_sha1_git']) -class GitHashArborescenceTree(unittest.TestCase): - @classmethod - def setUpClass(cls): - super().setUpClass() +class GitHashWalkArborescenceTree(unittest.TestCase): + """Root class to ease walk and git hash testing without side-effecty problems. - cls.tmp_root_path = tempfile.mkdtemp().encode('utf-8') + """ + def setUp(self): + self.tmp_root_path = tempfile.mkdtemp().encode('utf-8') start_path = os.path.dirname(__file__).encode('utf-8') - sample_folder_archive = os.path.join(start_path, - b'../../../..', - b'swh-storage-testdata', - b'dir-folders', - b'sample-folder.tgz') + pkg_doc_linux_r11 = os.path.join(start_path, + b'../../../..', + b'swh-storage-testdata', + b'dir-folders', + b'sample-folder.tgz') - cls.root_path = os.path.join(cls.tmp_root_path, b'sample-folder') + self.root_path = os.path.join(self.tmp_root_path, b'sample-folder') # uncompress the sample folder subprocess.check_output( - ['tar', 'xvf', sample_folder_archive, '-C', cls.tmp_root_path]) + ['tar', 'xvf', pkg_doc_linux_r11, '-C', self.tmp_root_path]) + + def tearDown(self): + if os.path.exists(self.tmp_root_path): + shutil.rmtree(self.tmp_root_path) + +class GitHashFromScratch(GitHashWalkArborescenceTree): + """Test the main `walk_and_compute_sha1_from_directory` algorithm that + scans and compute the disk for checksums. + + """ @istest def walk_and_compute_sha1_from_directory(self): # make a temporary arborescence tree to hash without ignoring anything # same as previous behavior walk0 = git.walk_and_compute_sha1_from_directory(self.tmp_root_path) keys0 = list(walk0.keys()) path_excluded = os.path.join(self.tmp_root_path, b'sample-folder', b'foo') self.assertTrue(path_excluded in keys0) # it is not excluded here # make the same temporary arborescence tree to hash with ignoring one # folder foo walk1 = git.walk_and_compute_sha1_from_directory( self.tmp_root_path, dir_ok_fn=lambda dirpath: b'sample-folder/foo' not in dirpath) keys1 = list(walk1.keys()) self.assertTrue(path_excluded not in keys1) # remove the keys that can't be the same (due to hash definition) # Those are the top level folders keys_diff = [self.tmp_root_path, os.path.join(self.tmp_root_path, b'sample-folder'), git.ROOT_TREE_KEY] for k in keys_diff: self.assertNotEquals(walk0[k], walk1[k]) # The remaining keys (bottom path) should have exactly the same hashes # as before keys = set(keys1) - set(keys_diff) actual_walk1 = {} for k in keys: self.assertEquals(walk0[k], walk1[k]) actual_walk1[k] = walk1[k] expected_checksums = { os.path.join(self.tmp_root_path, b'sample-folder/empty-folder'): [], # noqa os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo'): [{ # noqa 'type': git.GitType.BLOB, # noqa + 'length': 72, 'sha256': b'=\xb5\xae\x16\x80U\xbc\xd9:M\x08(]\xc9\x9f\xfe\xe2\x883\x03\xb2?\xac^\xab\x85\x02s\xa8\xeaUF', # noqa 'name': b'another-quote.org', # noqa 'path': os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo/another-quote.org'), # noqa 'perms': git.GitPerm.BLOB, # noqa 'sha1': b'\x90\xa6\x13\x8b\xa5\x99\x15&\x1e\x17\x99H8j\xa1\xcc*\xa9"\n', # noqa 'sha1_git': b'\x136\x93\xb1%\xba\xd2\xb4\xac1\x855\xb8I\x01\xeb\xb1\xf6\xb68'}], # noqa os.path.join(self.tmp_root_path, b'sample-folder/bar'): [{ # noqa 'type': git.GitType.TREE, # noqa 'perms': git.GitPerm.TREE, # noqa 'name': b'barfoo', # noqa 'path': os.path.join(self.tmp_root_path, b'sample-folder/bar/barfoo'), # noqa 'sha1_git': b'\xc3\x02\x0fk\xf15\xa3\x8cm\xf3\xaf\xeb_\xb3\x822\xc5\xe0p\x87'}]} # noqa self.assertEquals(actual_walk1, expected_checksums) + + @istest + def walk_and_compute_sha1_from_directory_without_root_tree(self): + # compute the full checksums + expected_hashes = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # except for the key on that round + actual_hashes = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path, + with_root_tree=False) + + # then, removing the root tree hash from the first round + del expected_hashes[git.ROOT_TREE_KEY] + + # should give us the same checksums as the second round + self.assertEquals(actual_hashes, expected_hashes) + + +class GitHashUpdate(GitHashWalkArborescenceTree): + """Test `walk and git hash only on modified fs` functions. + + """ + @istest + def update_checksums_from_add_new_file(self): + # make a temporary arborescence tree to hash without ignoring anything + # update the disk in some way (add a new file) + # update the actual git checksums from the deeper tree modified + + # when + objects = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # update the existing file + changed_path = os.path.join(self.tmp_root_path, + b'sample-folder/bar/barfoo/new') + with open(changed_path, 'wb') as f: + f.write(b'new line') + + # walk1 (this will be our expectation) + expected_dict = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # then + actual_dict = git.update_checksums_from( + [{'path': changed_path, 'action': 'A'}], + objects) + + self.assertEquals(expected_dict, actual_dict) + + @istest + def update_checksums_from_modify_existing_file(self): + # make a temporary arborescence tree to hash without ignoring anything + # update the disk in some way () + # update the actual git checksums where only the modification is needed + + # when + objects = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # update existing file + changed_path = os.path.join( + self.tmp_root_path, + b'sample-folder/bar/barfoo/another-quote.org') + with open(changed_path, 'wb+') as f: + f.write(b'I have a dream') + + # walk1 (this will be our expectation) + expected_dict = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # then + actual_dict = git.update_checksums_from( + [{'path': changed_path, 'action': 'M'}], + objects) + + self.assertEquals(expected_dict, actual_dict) + + @istest + def update_checksums_no_change(self): + # when + expected_dict = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # nothing changes on disk + + # then + actual_dict = git.update_checksums_from([], expected_dict) + + self.assertEquals(actual_dict, expected_dict) + + @istest + def update_checksums_delete_existing_file(self): + # make a temporary arborescence tree to hash without ignoring anything + # update the disk in some way (delete a file) + # update the actual git checksums from the deeper tree modified + + # when + objects = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # Remove folder + changed_path = os.path.join(self.tmp_root_path, + b'sample-folder/bar/barfoo') + shutil.rmtree(changed_path) + + # Actually walking the fs will be the resulting expectation + expected_dict = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # then + actual_dict = git.update_checksums_from( + [{'path': changed_path, 'action': 'D'}], + objects) + + self.assertEquals(actual_dict, expected_dict) + + @istest + def update_checksums_from_multiple_fs_modifications(self): + # make a temporary arborescence tree to hash without ignoring anything + # update the disk in some way (modify a file, add a new, delete one) + # update the actual git checksums from the deeper tree modified + + # when + objects = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # Actions on disk (imagine a checkout of some form) + + # 1. Create a new file + changed_path = os.path.join(self.tmp_root_path, + b'sample-folder/bar/barfoo/new') + with open(changed_path, 'wb') as f: + f.write(b'new line') + + # 2. update the existing file + changed_path1 = os.path.join( + self.tmp_root_path, + b'sample-folder/bar/barfoo/another-quote.org') + with open(changed_path1, 'wb') as f: + f.write(b'new line') + + # 3. Remove some folder + changed_path2 = os.path.join(self.tmp_root_path, + b'sample-folder/foo') + shutil.rmtree(changed_path2) + + # Actually walking the fs will be the resulting expectation + expected_dict = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # then + actual_dict = git.update_checksums_from( + [{'path': changed_path, 'action': 'A'}, + {'path': changed_path1, 'action': 'M'}, + {'path': changed_path2, 'action': 'D'}], + objects) + + self.assertEquals(expected_dict, actual_dict) + + @istest + def update_checksums_from_common_ancestor(self): + # when + # Add some new arborescence below a folder destined to be removed + # want to check that old keys does not remain + future_folder_to_remove = os.path.join(self.tmp_root_path, + b'sample-folder/bar/barfoo') + + # add .../barfoo/hello/world under (.../barfoo which will be destroyed) + new_folder = os.path.join(future_folder_to_remove, b'hello') + os.makedirs(new_folder, exist_ok=True) + with open(os.path.join(future_folder_to_remove, b'world'), 'wb') as f: + f.write(b"i'm sad 'cause i'm destined to be removed...") + + # now we scan the disk + objects = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + assert objects[future_folder_to_remove] + + # Actions on disk (to simulate a checkout of some sort) + + # 1. Create a new file + changed_path = os.path.join(self.tmp_root_path, + b'sample-folder/bar/barfoo/new') + with open(changed_path, 'wb') as f: + f.write(b'new line') + + # 2. update the existing file + changed_path1 = os.path.join( + self.tmp_root_path, + b'sample-folder/bar/barfoo/another-quote.org') + with open(changed_path1, 'wb') as f: + f.write(b'new line') + + # 3. Remove folder + shutil.rmtree(future_folder_to_remove) + + # Actually walking the fs will be the resulting expectation + expected_dict = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # then + actual_dict = git.update_checksums_from( + [{'path': changed_path, 'action': 'A'}, + {'path': changed_path1, 'action': 'M'}, + {'path': future_folder_to_remove, 'action': 'D'}], + objects) + + self.assertEquals(expected_dict, actual_dict) + + @istest + def update_checksums_detects_recomputation_from_all_is_needed(self): + # when + objects = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # Actions on disk (imagine a checkout of some form) + + # 1. Create a new file + changed_path = os.path.join(self.tmp_root_path, + b'new-file-at-root') + with open(changed_path, 'wb') as f: + f.write(b'new line') + + # 2. update the existing file + changed_path1 = os.path.join( + self.tmp_root_path, + b'sample-folder/bar/barfoo/another-quote.org') + with open(changed_path1, 'wb') as f: + f.write(b'new line') + + # 3. Remove some folder + changed_path2 = os.path.join(self.tmp_root_path, + b'sample-folder/foo') + + # 3. Remove some folder + changed_path2 = os.path.join(self.tmp_root_path, + b'sample-folder/bar/barfoo') + shutil.rmtree(changed_path2) + + # Actually walking the fs will be the resulting expectation + expected_dict = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path) + + # then + actual_dict = git.update_checksums_from( + [{'path': changed_path, 'action': 'A'}, + {'path': changed_path1, 'action': 'M'}, + {'path': changed_path2, 'action': 'D'}], + objects) + + self.assertEquals(expected_dict, actual_dict) + + @istest + def commonpath(self): + paths = ['r/0/h', + 'r/1/d', 'r/1/i/a', 'r/1/i/b', 'r/1/i/c', + 'r/2/e', 'r/2/f', 'r/2/g'] + self.assertEquals(git.commonpath(paths), 'r') + + paths = ['r/1/d', 'r/1/i/a', 'r/1/i/b', 'r/1/i/c'] + self.assertEquals(git.commonpath(paths), 'r/1') + + paths = ['/a/r/2/g', '/a/r/1/i/c', '/a/r/0/h'] + self.assertEquals(git.commonpath(paths), '/a/r') + + paths = [b'/a/r/2/g', b'/b/r/1/i/c', b'/c/r/0/h'] + self.assertEquals(git.commonpath(paths), b'/') + + paths = ['a/z', 'a/z', 'a/z'] + self.assertEquals(git.commonpath(paths), 'a/z') + + paths = ['0'] + self.assertEquals(git.commonpath(paths), '0') + + +def untar(archive, dest): + # cleanup + shutil.rmtree(dest) + os.mkdir(dest) + # untar + cmd = [b'tar', b'xf', archive, b'-C', dest] + subprocess.check_output(cmd) + + +def ignore_svn_folder(dirpath): + return b'.svn' not in dirpath + + +class GitHashUpdateRealUseCase(GitHashWalkArborescenceTree): + """Test `walk and git hash only on modified fs` functions. + + """ + def setUp(self): + self.tmp_root_path = tempfile.mkdtemp().encode('utf-8') + + archives_folder = os.path.join( + os.path.dirname(__file__).encode('utf-8'), + b'../../../..', + b'swh-storage-testdata', + b'svn-folders') + + self.pkg_doc_linux_r10 = os.path.join(archives_folder, + b'pkg-doc-linux-r10.tgz') + self.pkg_doc_linux_r11 = os.path.join(archives_folder, + b'pkg-doc-linux-r11.tgz') + self.pkg_doc_linux_r12 = os.path.join(archives_folder, + b'pkg-doc-linux-r12.tgz') + + def tearDown(self): + if os.path.exists(self.tmp_root_path): + shutil.rmtree(self.tmp_root_path) + + @istest + def use_case_1_r10_r11(self): + # given + # untar the svn revision 10 + untar(self.pkg_doc_linux_r10, self.tmp_root_path) + + objects_r10 = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path, + ignore_svn_folder) + + # untar the svn revision 11 + untar(self.pkg_doc_linux_r11, self.tmp_root_path) + + objects_r11 = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path, + ignore_svn_folder) + + assert objects_r10 != objects_r11 + + changes = [ + {'action': 'D', 'path': os.path.join(self.tmp_root_path, b'copyrights/non-free/Kiosk')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'copyrights/undistributable/Kiosk')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'copyrights/undistributable')}, # noqa + {'action': 'D', 'path': os.path.join(self.tmp_root_path, b'copyrights/non-free/UPS')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'copyrights/undistributable/UPS')} # noqa + ] + + # when + # update from objects from previous revision (r10) with + # actual changes from r10 to r11 + actual_objects = git.update_checksums_from(changes, + objects_r10, + ignore_svn_folder) + + # then + self.assertEquals(actual_objects, objects_r11) + + @istest + def use_case_2_r11_r12(self): + # given + # untar the svn revision 11 + untar(self.pkg_doc_linux_r11, self.tmp_root_path) + + objects_r11 = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path, + ignore_svn_folder) + + # untar the svn revision 12 + untar(self.pkg_doc_linux_r12, self.tmp_root_path) + + objects_r12 = git.walk_and_compute_sha1_from_directory( + self.tmp_root_path, + ignore_svn_folder) + + assert objects_r11 != objects_r12 + changes = [ + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk')}, # noqa + {'action': 'D', 'path': os.path.join(self.tmp_root_path, b'copyrights')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/copyright.head')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/split-package')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-base.faq')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/make-copyright')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.menu')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/redirect.patch')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.overrides')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.prerm')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/README.updating')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.preinst')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.dirs')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/changelog')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-text.README.Debian')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/html2docs')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/rules')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.postrm')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/make-omf')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-text.preinst')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.postinst')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/copyrights')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/control')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-text.dirs')}, # noqa + {'action': 'A', 'path': os.path.join(self.tmp_root_path, b'trunk/doc-linux/debian/doc-linux-html.README.Debian')} # noqa + ] + + # when + # update from objects from previous revision (r11) with + # actual changes from r11 to r12 + actual_objects = git.update_checksums_from(changes, + objects_r11, + ignore_svn_folder) + + # then + self.assertEquals(actual_objects, objects_r12) diff --git a/swh/model/tests/test_hashutil.py b/swh/model/tests/test_hashutil.py index 79cdc9e..f795e87 100644 --- a/swh/model/tests/test_hashutil.py +++ b/swh/model/tests/test_hashutil.py @@ -1,111 +1,112 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import io import tempfile import unittest from nose.tools import istest from swh.model import hashutil class Hashutil(unittest.TestCase): def setUp(self): self.data = b'1984\n' self.hex_checksums = { 'sha1': '62be35bf00ff0c624f4a621e2ea5595a049e0731', 'sha1_git': '568aaf43d83b2c3df8067f3bedbb97d83260be6d', 'sha256': '26602113b4b9afd9d55466b08580d3c2' '4a9b50ee5b5866c0d91fab0e65907311', } self.checksums = { type: bytes.fromhex(cksum) for type, cksum in self.hex_checksums.items() } self.git_hex_checksums = { 'blob': self.hex_checksums['sha1_git'], 'tree': '5b2e883aa33d2efab98442693ea4dd5f1b8871b0', 'commit': '79e4093542e72f0fcb7cbd75cb7d270f9254aa8f', 'tag': 'd6bf62466f287b4d986c545890716ce058bddf67', } self.git_checksums = { type: bytes.fromhex(cksum) for type, cksum in self.git_hex_checksums.items() } @istest def hash_data(self): checksums = hashutil.hash_data(self.data) self.assertEqual(checksums, self.checksums) @istest def hash_data_unknown_hash(self): with self.assertRaises(ValueError) as cm: hashutil.hash_data(self.data, ['unknown-hash']) self.assertIn('Unexpected hashing algorithm', cm.exception.args[0]) self.assertIn('unknown-hash', cm.exception.args[0]) @istest def hash_git_data(self): checksums = { git_type: hashutil.hash_git_data(self.data, git_type) for git_type in self.git_checksums } self.assertEqual(checksums, self.git_checksums) @istest def hash_git_data_unknown_git_type(self): with self.assertRaises(ValueError) as cm: hashutil.hash_git_data(self.data, 'unknown-git-type') self.assertIn('Unexpected git object type', cm.exception.args[0]) self.assertIn('unknown-git-type', cm.exception.args[0]) @istest def hash_file(self): fobj = io.BytesIO(self.data) checksums = hashutil.hash_file(fobj, length=len(self.data)) self.assertEqual(checksums, self.checksums) @istest def hash_file_missing_length(self): fobj = io.BytesIO(self.data) with self.assertRaises(ValueError) as cm: hashutil.hash_file(fobj, algorithms=['sha1_git']) self.assertIn('Missing length', cm.exception.args[0]) @istest def hash_path(self): with tempfile.NamedTemporaryFile(delete=False) as f: f.write(self.data) f.close() hashes = hashutil.hash_path(f.name) + self.checksums['length'] = len(self.data) self.assertEquals(self.checksums, hashes) @istest def hash_to_hex(self): for type in self.checksums: hex = self.hex_checksums[type] hash = self.checksums[type] self.assertEquals(hashutil.hash_to_hex(hex), hex) self.assertEquals(hashutil.hash_to_hex(hash), hex) @istest def hash_to_bytes(self): for type in self.checksums: hex = self.hex_checksums[type] hash = self.checksums[type] self.assertEquals(hashutil.hash_to_bytes(hex), hash) self.assertEquals(hashutil.hash_to_bytes(hash), hash) diff --git a/swh/model/tests/test_identifiers.py b/swh/model/tests/test_identifiers.py index da221df..e1adfea 100644 --- a/swh/model/tests/test_identifiers.py +++ b/swh/model/tests/test_identifiers.py @@ -1,346 +1,641 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import binascii import datetime import unittest from nose.tools import istest from swh.model import hashutil, identifiers class UtilityFunctionsIdentifier(unittest.TestCase): def setUp(self): self.str_id = 'c2e41aae41ac17bd4a650770d6ee77f62e52235b' self.bytes_id = binascii.unhexlify(self.str_id) self.bad_type_id = object() @istest def identifier_to_bytes(self): for id in [self.str_id, self.bytes_id]: self.assertEqual(identifiers.identifier_to_bytes(id), self.bytes_id) # wrong length with self.assertRaises(ValueError) as cm: identifiers.identifier_to_bytes(id[:-2]) self.assertIn('length', str(cm.exception)) with self.assertRaises(ValueError) as cm: identifiers.identifier_to_bytes(self.bad_type_id) self.assertIn('type', str(cm.exception)) @istest def identifier_to_str(self): for id in [self.str_id, self.bytes_id]: self.assertEqual(identifiers.identifier_to_str(id), self.str_id) # wrong length with self.assertRaises(ValueError) as cm: identifiers.identifier_to_str(id[:-2]) self.assertIn('length', str(cm.exception)) with self.assertRaises(ValueError) as cm: identifiers.identifier_to_str(self.bad_type_id) self.assertIn('type', str(cm.exception)) class UtilityFunctionsDateOffset(unittest.TestCase): def setUp(self): self.date = datetime.datetime( 2015, 11, 22, 16, 33, 56, tzinfo=datetime.timezone.utc) self.date_int = int(self.date.timestamp()) self.date_repr = b'1448210036' self.date_microseconds = datetime.datetime( 2015, 11, 22, 16, 33, 56, 2342, tzinfo=datetime.timezone.utc) self.date_microseconds_float = self.date_microseconds.timestamp() self.date_microseconds_repr = b'1448210036.002342' self.offsets = { 0: b'+0000', -630: b'-1030', 800: b'+1320', } @istest def format_date(self): for date in [self.date, self.date_int]: self.assertEqual(identifiers.format_date(date), self.date_repr) for date in [self.date_microseconds, self.date_microseconds_float]: self.assertEqual(identifiers.format_date(date), self.date_microseconds_repr) @istest def format_offset(self): for offset, res in self.offsets.items(): self.assertEqual(identifiers.format_offset(offset), res) class ContentIdentifier(unittest.TestCase): def setUp(self): self.content = { 'status': 'visible', 'length': 5, 'data': b'1984\n', 'ctime': datetime.datetime(2015, 11, 22, 16, 33, 56, tzinfo=datetime.timezone.utc), } self.content_id = hashutil.hash_data(self.content['data']) @istest def content_identifier(self): self.assertEqual(identifiers.content_identifier(self.content), self.content_id) class DirectoryIdentifier(unittest.TestCase): def setUp(self): self.directory = { 'id': 'c2e41aae41ac17bd4a650770d6ee77f62e52235b', 'entries': [ { 'type': 'file', 'perms': 33188, 'name': b'README', 'target': '37ec8ea2110c0b7a32fbb0e872f6e7debbf95e21' }, { 'type': 'file', 'perms': 33188, 'name': b'Rakefile', 'target': '3bb0e8592a41ae3185ee32266c860714980dbed7' }, { 'type': 'dir', 'perms': 16384, 'name': b'app', 'target': '61e6e867f5d7ba3b40540869bc050b0c4fed9e95' }, { 'type': 'file', 'perms': 33188, 'name': b'1.megabyte', 'target': '7c2b2fbdd57d6765cdc9d84c2d7d333f11be7fb3' }, { 'type': 'dir', 'perms': 16384, 'name': b'config', 'target': '591dfe784a2e9ccc63aaba1cb68a765734310d98' }, { 'type': 'dir', 'perms': 16384, 'name': b'public', 'target': '9588bf4522c2b4648bfd1c61d175d1f88c1ad4a5' }, { 'type': 'file', 'perms': 33188, 'name': b'development.sqlite3', 'target': 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391' }, { 'type': 'dir', 'perms': 16384, 'name': b'doc', 'target': '154705c6aa1c8ead8c99c7915373e3c44012057f' }, { 'type': 'dir', 'perms': 16384, 'name': b'db', 'target': '85f157bdc39356b7bc7de9d0099b4ced8b3b382c' }, { 'type': 'dir', 'perms': 16384, 'name': b'log', 'target': '5e3d3941c51cce73352dff89c805a304ba96fffe' }, { 'type': 'dir', 'perms': 16384, 'name': b'script', 'target': '1b278423caf176da3f3533592012502aa10f566c' }, { 'type': 'dir', 'perms': 16384, 'name': b'test', 'target': '035f0437c080bfd8711670b3e8677e686c69c763' }, { 'type': 'dir', 'perms': 16384, 'name': b'vendor', 'target': '7c0dc9ad978c1af3f9a4ce061e50f5918bd27138' }, { 'type': 'rev', 'perms': 57344, 'name': b'will_paginate', 'target': '3d531e169db92a16a9a8974f0ae6edf52e52659e' } ], } self.empty_directory = { 'id': '4b825dc642cb6eb9a060e54bf8d69288fbee4904', 'entries': [], } @istest def dir_identifier(self): self.assertEqual( identifiers.directory_identifier(self.directory), self.directory['id']) @istest def dir_identifier_empty_directory(self): self.assertEqual( identifiers.directory_identifier(self.empty_directory), self.empty_directory['id']) class RevisionIdentifier(unittest.TestCase): def setUp(self): linus_tz = datetime.timezone(datetime.timedelta(minutes=-420)) + gpgsig = b'''\ +-----BEGIN PGP SIGNATURE----- +Version: GnuPG v1.4.13 (Darwin) + +iQIcBAABAgAGBQJVJcYsAAoJEBiY3kIkQRNJVAUQAJ8/XQIfMqqC5oYeEFfHOPYZ +L7qy46bXHVBa9Qd8zAJ2Dou3IbI2ZoF6/Et89K/UggOycMlt5FKV/9toWyuZv4Po +L682wonoxX99qvVTHo6+wtnmYO7+G0f82h+qHMErxjP+I6gzRNBvRr+SfY7VlGdK +wikMKOMWC5smrScSHITnOq1Ews5pe3N7qDYMzK0XVZmgDoaem4RSWMJs4My/qVLN +e0CqYWq2A22GX7sXl6pjneJYQvcAXUX+CAzp24QnPSb+Q22Guj91TcxLFcHCTDdn +qgqMsEyMiisoglwrCbO+D+1xq9mjN9tNFWP66SQ48mrrHYTBV5sz9eJyDfroJaLP +CWgbDTgq6GzRMehHT3hXfYS5NNatjnhkNISXR7pnVP/obIi/vpWh5ll6Gd8q26z+ +a/O41UzOaLTeNI365MWT4/cnXohVLRG7iVJbAbCxoQmEgsYMRc/pBAzWJtLfcB2G +jdTswYL6+MUdL8sB9pZ82D+BP/YAdHe69CyTu1lk9RT2pYtI/kkfjHubXBCYEJSG ++VGllBbYG6idQJpyrOYNRJyrDi9yvDJ2W+S0iQrlZrxzGBVGTB/y65S8C+2WTBcE +lf1Qb5GDsQrZWgD+jtWTywOYHtCBwyCKSAXxSARMbNPeak9WPlcW/Jmu+fUcMe2x +dg1KdHOa34shrKDaOVzW +=od6m +-----END PGP SIGNATURE-----''' + self.revision = { 'id': 'bc0195aad0daa2ad5b0d76cce22b167bc3435590', 'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07', 'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'], 'author': { 'name': b'Linus Torvalds', 'email': b'torvalds@linux-foundation.org', }, 'date': datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), 'committer': { 'name': b'Linus Torvalds', 'email': b'torvalds@linux-foundation.org', }, 'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), 'message': b'Linux 4.2-rc2\n', } + self.revision_none_metadata = { + 'id': 'bc0195aad0daa2ad5b0d76cce22b167bc3435590', + 'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07', + 'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'], + 'author': { + 'name': b'Linus Torvalds', + 'email': b'torvalds@linux-foundation.org', + }, + 'date': datetime.datetime(2015, 7, 12, 15, 10, 30, + tzinfo=linus_tz), + 'committer': { + 'name': b'Linus Torvalds', + 'email': b'torvalds@linux-foundation.org', + }, + 'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30, + tzinfo=linus_tz), + 'message': b'Linux 4.2-rc2\n', + 'metadata': None, + } + self.synthetic_revision = { 'id': b'\xb2\xa7\xe1&\x04\x92\xe3D\xfa\xb3\xcb\xf9\x1b\xc1<\x91' b'\xe0T&\xfd', 'author': { 'name': b'Software Heritage', 'email': b'robot@softwareheritage.org', }, 'date': { 'timestamp': 1437047495.0, 'offset': 0, + 'negative_utc': False, }, 'type': 'tar', 'committer': { 'name': b'Software Heritage', 'email': b'robot@softwareheritage.org', }, 'committer_date': 1437047495, 'synthetic': True, 'parents': [None], 'message': b'synthetic revision message\n', 'directory': b'\xd1\x1f\x00\xa6\xa0\xfe\xa6\x05SA\xd2U\x84\xb5\xa9' b'e\x16\xc0\xd2\xb8', 'metadata': {'original_artifact': [ {'archive_type': 'tar', 'name': 'gcc-5.2.0.tar.bz2', 'sha1_git': '39d281aff934d44b439730057e55b055e206a586', 'sha1': 'fe3f5390949d47054b613edc36c557eb1d51c18e', 'sha256': '5f835b04b5f7dd4f4d2dc96190ec1621b8d89f' '2dc6f638f9f8bc1b1014ba8cad'}]}, } + # cat commit.txt | git hash-object -t commit --stdin + self.revision_with_extra_headers = { + 'id': '010d34f384fa99d047cdd5e2f41e56e5c2feee45', + 'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07', + 'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'], + 'author': { + 'name': b'Linus Torvalds', + 'email': b'torvalds@linux-foundation.org', + 'fullname': b'Linus Torvalds ', + }, + 'date': datetime.datetime(2015, 7, 12, 15, 10, 30, + tzinfo=linus_tz), + 'committer': { + 'name': b'Linus Torvalds', + 'email': b'torvalds@linux-foundation.org', + 'fullname': b'Linus Torvalds ', + }, + 'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30, + tzinfo=linus_tz), + 'message': b'Linux 4.2-rc2\n', + 'metadata': { + 'extra_headers': [ + ['svn-repo-uuid', '046f1af7-66c2-d61b-5410-ce57b7db7bff'], + ['svn-revision', 10], + ] + } + } + + self.revision_with_gpgsig = { + 'id': '44cc742a8ca17b9c279be4cc195a93a6ef7a320e', + 'directory': 'b134f9b7dc434f593c0bab696345548b37de0558', + 'parents': ['689664ae944b4692724f13b709a4e4de28b54e57', + 'c888305e1efbaa252d01b4e5e6b778f865a97514'], + 'author': { + 'name': b'Jiang Xin', + 'email': b'worldhello.net@gmail.com', + 'fullname': b'Jiang Xin ', + }, + 'date': { + 'timestamp': '1428538899', + 'offset': 480, + }, + 'committer': { + 'name': b'Jiang Xin', + 'email': b'worldhello.net@gmail.com', + }, + 'committer_date': { + 'timestamp': '1428538899', + 'offset': 480, + }, + 'metadata': { + 'extra_headers': [ + ['gpgsig', gpgsig], + ], + }, + 'message': b'''Merge branch 'master' of git://github.com/alexhenrie/git-po + +* 'master' of git://github.com/alexhenrie/git-po: + l10n: ca.po: update translation +''' + } + + self.revision_no_message = { + 'id': '4cfc623c9238fa92c832beed000ce2d003fd8333', + 'directory': 'b134f9b7dc434f593c0bab696345548b37de0558', + 'parents': ['689664ae944b4692724f13b709a4e4de28b54e57', + 'c888305e1efbaa252d01b4e5e6b778f865a97514'], + 'author': { + 'name': b'Jiang Xin', + 'email': b'worldhello.net@gmail.com', + 'fullname': b'Jiang Xin ', + }, + 'date': { + 'timestamp': '1428538899', + 'offset': 480, + }, + 'committer': { + 'name': b'Jiang Xin', + 'email': b'worldhello.net@gmail.com', + }, + 'committer_date': { + 'timestamp': '1428538899', + 'offset': 480, + }, + 'message': None, + } + + self.revision_empty_message = { + 'id': '7442cd78bd3b4966921d6a7f7447417b7acb15eb', + 'directory': 'b134f9b7dc434f593c0bab696345548b37de0558', + 'parents': ['689664ae944b4692724f13b709a4e4de28b54e57', + 'c888305e1efbaa252d01b4e5e6b778f865a97514'], + 'author': { + 'name': b'Jiang Xin', + 'email': b'worldhello.net@gmail.com', + 'fullname': b'Jiang Xin ', + }, + 'date': { + 'timestamp': '1428538899', + 'offset': 480, + }, + 'committer': { + 'name': b'Jiang Xin', + 'email': b'worldhello.net@gmail.com', + }, + 'committer_date': { + 'timestamp': '1428538899', + 'offset': 480, + }, + 'message': b'', + } + + self.revision_only_fullname = { + 'id': '010d34f384fa99d047cdd5e2f41e56e5c2feee45', + 'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07', + 'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'], + 'author': { + 'fullname': b'Linus Torvalds ', + }, + 'date': datetime.datetime(2015, 7, 12, 15, 10, 30, + tzinfo=linus_tz), + 'committer': { + 'fullname': b'Linus Torvalds ', + }, + 'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30, + tzinfo=linus_tz), + 'message': b'Linux 4.2-rc2\n', + 'metadata': { + 'extra_headers': [ + ['svn-repo-uuid', '046f1af7-66c2-d61b-5410-ce57b7db7bff'], + ['svn-revision', 10], + ] + } + } + @istest def revision_identifier(self): self.assertEqual( identifiers.revision_identifier(self.revision), identifiers.identifier_to_str(self.revision['id']), ) + @istest + def revision_identifier_none_metadata(self): + self.assertEqual( + identifiers.revision_identifier(self.revision_none_metadata), + identifiers.identifier_to_str(self.revision_none_metadata['id']), + ) + @istest def revision_identifier_synthetic(self): self.assertEqual( identifiers.revision_identifier(self.synthetic_revision), identifiers.identifier_to_str(self.synthetic_revision['id']), ) + @istest + def revision_identifier_with_extra_headers(self): + self.assertEqual( + identifiers.revision_identifier( + self.revision_with_extra_headers), + identifiers.identifier_to_str( + self.revision_with_extra_headers['id']), + ) + + @istest + def revision_identifier_with_gpgsig(self): + self.assertEqual( + identifiers.revision_identifier( + self.revision_with_gpgsig), + identifiers.identifier_to_str( + self.revision_with_gpgsig['id']), + ) + + @istest + def revision_identifier_no_message(self): + self.assertEqual( + identifiers.revision_identifier( + self.revision_no_message), + identifiers.identifier_to_str( + self.revision_no_message['id']), + ) + + @istest + def revision_identifier_empty_message(self): + self.assertEqual( + identifiers.revision_identifier( + self.revision_empty_message), + identifiers.identifier_to_str( + self.revision_empty_message['id']), + ) + + @istest + def revision_identifier_only_fullname(self): + self.assertEqual( + identifiers.revision_identifier( + self.revision_only_fullname), + identifiers.identifier_to_str( + self.revision_only_fullname['id']), + ) + class ReleaseIdentifier(unittest.TestCase): def setUp(self): linus_tz = datetime.timezone(datetime.timedelta(minutes=-420)) self.release = { 'id': '2b10839e32c4c476e9d94492756bb1a3e1ec4aa8', 'target': b't\x1b"R\xa5\xe1Ml`\xa9\x13\xc7z`\x99\xab\xe7:\x85J', 'target_type': 'revision', 'name': b'v2.6.14', 'author': { 'name': b'Linus Torvalds', 'email': b'torvalds@g5.osdl.org', }, 'date': datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz), 'message': b'''\ Linux 2.6.14 release -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.1 (GNU/Linux) iD8DBQBDYWq6F3YsRnbiHLsRAmaeAJ9RCez0y8rOBbhSv344h86l/VVcugCeIhO1 wdLOnvj91G4wxYqrvThthbE= =7VeT -----END PGP SIGNATURE----- ''', 'synthetic': False, } self.release_no_author = { 'id': b'&y\x1a\x8b\xcf\x0em3\xf4:\xefv\x82\xbd\xb5U#mV\xde', 'target': '9ee1c939d1cb936b1f98e8d81aeffab57bae46ab', 'target_type': 'revision', 'name': b'v2.6.12', 'message': b'''\ This is the final 2.6.12 release -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.2.4 (GNU/Linux) iD8DBQBCsykyF3YsRnbiHLsRAvPNAJ482tCZwuxp/bJRz7Q98MHlN83TpACdHr37 o6X/3T+vm8K3bf3driRr34c= =sBHn -----END PGP SIGNATURE----- ''', 'synthetic': False, } + self.release_no_message = { + 'id': 'b6f4f446715f7d9543ef54e41b62982f0db40045', + 'target': '9ee1c939d1cb936b1f98e8d81aeffab57bae46ab', + 'target_type': 'revision', + 'name': b'v2.6.12', + 'author': { + 'name': b'Linus Torvalds', + 'email': b'torvalds@g5.osdl.org', + }, + 'date': datetime.datetime(2005, 10, 27, 17, 2, 33, + tzinfo=linus_tz), + 'message': None, + } + + self.release_empty_message = { + 'id': '71a0aea72444d396575dc25ac37fec87ee3c6492', + 'target': '9ee1c939d1cb936b1f98e8d81aeffab57bae46ab', + 'target_type': 'revision', + 'name': b'v2.6.12', + 'author': { + 'name': b'Linus Torvalds', + 'email': b'torvalds@g5.osdl.org', + }, + 'date': datetime.datetime(2005, 10, 27, 17, 2, 33, + tzinfo=linus_tz), + 'message': b'', + } + + self.release_negative_utc = { + 'id': '97c8d2573a001f88e72d75f596cf86b12b82fd01', + 'name': b'20081029', + 'target': '54e9abca4c77421e2921f5f156c9fe4a9f7441c7', + 'target_type': 'revision', + 'date': { + 'timestamp': 1225281976.0, + 'offset': 0, + 'negative_utc': True, + }, + 'author': { + 'name': b'Otavio Salvador', + 'email': b'otavio@debian.org', + 'id': 17640, + }, + 'synthetic': False, + 'message': b'tagging version 20081029\n\nr56558\n', + } + @istest def release_identifier(self): self.assertEqual( identifiers.release_identifier(self.release), identifiers.identifier_to_str(self.release['id']) ) @istest def release_identifier_no_author(self): self.assertEqual( identifiers.release_identifier(self.release_no_author), identifiers.identifier_to_str(self.release_no_author['id']) ) + + @istest + def release_identifier_no_message(self): + self.assertEqual( + identifiers.release_identifier(self.release_no_message), + identifiers.identifier_to_str(self.release_no_message['id']) + ) + + @istest + def release_identifier_empty_message(self): + self.assertEqual( + identifiers.release_identifier(self.release_empty_message), + identifiers.identifier_to_str(self.release_empty_message['id']) + ) + + @istest + def release_identifier_negative_utc(self): + self.assertEqual( + identifiers.release_identifier(self.release_negative_utc), + identifiers.identifier_to_str(self.release_negative_utc['id']) + ) diff --git a/version.txt b/version.txt index d1537af..faf6cf6 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -v0.0.4-0-g696d23e \ No newline at end of file +v0.0.5-0-g0fbf74e \ No newline at end of file