diff --git a/PKG-INFO b/PKG-INFO index a1069b6..d5b37a9 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,10 +1,10 @@ Metadata-Version: 1.0 Name: swh.model -Version: 0.0.20 +Version: 0.0.21 Summary: Software Heritage data model Home-page: https://forge.softwareheritage.org/diffusion/DMOD/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN diff --git a/bin/swh-hash-file b/bin/swh-hash-file new file mode 100755 index 0000000..c30de78 --- /dev/null +++ b/bin/swh-hash-file @@ -0,0 +1,32 @@ +#!/usr/bin/python3 + +# Copyright (C) 2018 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import sys + +from swh.model.from_disk import Content +from swh.model.hashutil import hash_to_hex + + +HASH_ALGO = 'sha1_git' + + +def hash_file(fname): + return hash_to_hex(Content.from_file(path=fname.encode()).hash) + + +def main(fnames): + for f in fnames: + print(f, hash_file(f), sep='\t') + + +if __name__ == '__main__': + fnames = sys.argv[1:] + if not fnames: + print('Usage: swh-hash-file FILE...') + sys.exit(2) + + main(fnames) diff --git a/docs/data-model.rst b/docs/data-model.rst index f365f9f..1693ae4 100644 --- a/docs/data-model.rst +++ b/docs/data-model.rst @@ -1,13 +1,13 @@ .. _data-model: -Software Heritage data model -============================ +Data model +========== TODO Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. diff --git a/docs/index.rst b/docs/index.rst index db68071..74756e7 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,22 +1,23 @@ .. _swh-model: Software Heritage - Development Documentation ============================================= .. toctree:: :maxdepth: 2 :caption: Contents: Overview -------- * :ref:`data-model` +* :ref:`persistent-identifiers` Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` diff --git a/docs/persistent-identifiers.rst b/docs/persistent-identifiers.rst new file mode 100644 index 0000000..c796a80 --- /dev/null +++ b/docs/persistent-identifiers.rst @@ -0,0 +1,145 @@ +.. _persistent-identifiers: + +Persistent identifiers +====================== + +You can point to objects present in the Software Heritage archive by the means +of **persistent identifiers** that are guaranteed to remain stable (persistent) +over time. Their syntax, meaning, and usage is described below. Note that they +are identifiers and not URLs, even though an URL-based resolver for Software +Heritage persistent identifiers is also provided. + +A persistent identifier can point to any software artifact (or "object") +available in the Software Heritage archive. Objects come in different types, +and most notably: + +* contents +* directories +* revisions +* releases +* snapshots + +Each object is identified by an intrinsic, type-specific object identifier that +is embedded in its persistent identifier as described below. Object identifiers +are strong cryptographic hashes computed on the entire set of object properties +to form a `Merkle structure `_. + +See :ref:`data-model` for an overview of object types and how they are linked +together. See :py:mod:`swh.model.identifiers` for details on how intrinsic +object identifiers are computed. + + +Syntax +------ + +Syntactically, persistent identifiers are generated by the ```` +entry point of the grammar: + +.. code-block:: bnf + + ::= "swh" ":" ":" ":" ; + ::= "1" ; + ::= + "snp" (* snapshot *) + | "rel" (* release *) + | "rev" (* revision *) + | "dir" (* directory *) + | "cnt" (* content *) + ; + ::= 40 * ; (* intrinsic object id, as hex-encoded SHA1 *) + ::= "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" + | "a" | "b" | "c" | "d" | "e" | "f" ; + + +Semantics +--------- + +``:`` is used as separator between the logical parts of identifiers. The +``swh`` prefix makes explicit that these identifiers are related to *SoftWare +Heritage*. ``1`` (````) is the current version of this +identifier *scheme*; future editions will use higher version numbers, possibly +breaking backward compatibility (but without breaking the resolvability of +identifiers that conform to previous versions of the scheme). + +A persistent identifier points to a single object, whose type is explicitly +captured by ````: + +* ``snp`` identifiers points to **snapshots**, +* ``rel`` to **releases**, +* ``rev`` to **revisions**, +* ``dir`` to **directories**, +* ``cnt`` to **releases**. + +The actual object pointed to is identified by the intrinsic identifier +````, which is a hex-encoded (using lowercase ASCII characters) SHA1 +computed on the content and metadata of the object itself, as follows: + +* for **snapshots**, intrinsic identifiers are computed as per + :py:func:`swh.model.identifiers.snapshot_identifier` + +* for **releases**, as per + :py:func:`swh.model.identifiers.release_identifier` + +* for **revisions**, as per + :py:func:`swh.model.identifiers.revision_identifier` + +* for **directories**, as per + :py:func:`swh.model.identifiers.directory_identifier` + +* for **contents**, the intrinsic identifier is the ``sha1_git`` hash of the + multiple hashes returned by + :py:func:`swh.model.identifiers.content_identifier`, i.e., the SHA1 of a byte + sequence obtained by juxtaposing the ASCII string ``"blob"`` (without + quotes), a space, the length of the content as decimal digits, a NULL byte, + and the actual content of the file. + + +Git compatibility +~~~~~~~~~~~~~~~~~ + +Intrinsic object identifiers for contents, directories, revisions, and releases +are, at present, compatible with the `Git `_ way of +`computing identifiers +`_ for its objects. +A Software Heritage content identifier will be identical to a Git blob +identifier of any file with the same content, a Software Heritage revision +identifier will be identical to the corresponding Git commit identifier, etc. +This is not the case for snapshot identifiers as Git doesn't have a +corresponding object type. + +Note that Git compatibility is incidental and is not guaranteed to be +maintained in future versions of this scheme (or Git). + + +Examples +-------- + +* ``swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`` points to the content + of a file containing the full text of the GPL3 license +* ``swh:1:dir:d198bc9d7a6bcf6db04f476d29314f157507d505`` points to a directory + containing the source code of the Darktable photography application as it was + at some point on 4 May 2017 +* ``swh:1:rev:309cf2674ee7a0749978cf8265ab91a60aea0f7d`` points to a commit in + the development history of Darktable, dated 16 January 2017, that added + undo/redo supports for masks +* ``swh:1:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f`` points to Darktable + release 2.3.0, dated 24 December 2016 +* ``swh:1:snp:c7c108084bc0bf3d81436bf980b46e98bd338453`` points to a snapshot + of the entire Darktable Git repository taken on 4 May 2017 from GitHub + + +Resolution +---------- + +Persistent identifiers can be resolved using the Software Heritage Web +application (see :py:mod:`swh.web`). + +In particular, the ``/browse/`` endpoint can be given a persistent identifier +and will lead to the browsing page of the corresponding object, like this: +``https://archive.softwareheritage.org/browse/``. For example: + +* ``_ +* ``_ +* ``_ +* ``_ +* ``_ diff --git a/swh.model.egg-info/PKG-INFO b/swh.model.egg-info/PKG-INFO index a1069b6..d5b37a9 100644 --- a/swh.model.egg-info/PKG-INFO +++ b/swh.model.egg-info/PKG-INFO @@ -1,10 +1,10 @@ Metadata-Version: 1.0 Name: swh.model -Version: 0.0.20 +Version: 0.0.21 Summary: Software Heritage data model Home-page: https://forge.softwareheritage.org/diffusion/DMOD/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN diff --git a/swh.model.egg-info/SOURCES.txt b/swh.model.egg-info/SOURCES.txt index bd2e46a..d870641 100644 --- a/swh.model.egg-info/SOURCES.txt +++ b/swh.model.egg-info/SOURCES.txt @@ -1,54 +1,56 @@ .gitignore AUTHORS LICENSE MANIFEST.in Makefile Makefile.local README-dev.md requirements-swh.txt requirements.txt setup.py version.txt bin/git-revhash +bin/swh-hash-file bin/swh-revhash debian/changelog debian/compat debian/control debian/copyright debian/rules debian/source/format docs/.gitignore docs/Makefile docs/conf.py docs/data-model.rst docs/index.rst +docs/persistent-identifiers.rst docs/_static/.placeholder docs/_templates/.placeholder swh/__init__.py swh.model.egg-info/PKG-INFO swh.model.egg-info/SOURCES.txt swh.model.egg-info/dependency_links.txt swh.model.egg-info/requires.txt swh.model.egg-info/top_level.txt swh/model/__init__.py swh/model/exceptions.py swh/model/from_disk.py swh/model/hashutil.py swh/model/identifiers.py swh/model/merkle.py swh/model/validators.py swh/model/fields/__init__.py swh/model/fields/compound.py swh/model/fields/hashes.py swh/model/fields/simple.py swh/model/tests/__init__.py swh/model/tests/generate_testdata_from_disk.py swh/model/tests/test_from_disk.py swh/model/tests/test_hashutil.py swh/model/tests/test_identifiers.py swh/model/tests/test_merkle.py swh/model/tests/test_validators.py swh/model/tests/fields/__init__.py swh/model/tests/fields/test_compound.py swh/model/tests/fields/test_hashes.py swh/model/tests/fields/test_simple.py \ No newline at end of file diff --git a/swh/model/hashutil.py b/swh/model/hashutil.py index 96905d9..0dfbdc3 100644 --- a/swh/model/hashutil.py +++ b/swh/model/hashutil.py @@ -1,274 +1,275 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """Module in charge of hashing function definitions. This is the base module use to compute swh's hashes. Only a subset of hashing algorithms is supported as defined in the ALGORITHMS set. Any provided algorithms not in that list will result in a ValueError explaining the error. This modules defines the following hashing functions: - hash_file: Hash the contents of the given file object with the given algorithms (defaulting to DEFAULT_ALGORITHMS if none provided). - hash_data: Hash the given binary blob with the given algorithms (defaulting to DEFAULT_ALGORITHMS if none provided). - hash_path: Hash the contents of the file at the given path with the given algorithms (defaulting to DEFAULT_ALGORITHMS if none provided). """ import binascii import functools import hashlib import os from io import BytesIO ALGORITHMS = set(['sha1', 'sha256', 'sha1_git', 'blake2s256', 'blake2b512']) """Hashing algorithms supported by this module""" DEFAULT_ALGORITHMS = set(['sha1', 'sha256', 'sha1_git', 'blake2s256']) """Algorithms computed by default when calling the functions from this module. Subset of :const:`ALGORITHMS`. """ HASH_BLOCK_SIZE = 32768 """Block size for streaming hash computations made in this module""" # Load blake2 hashes from pyblake2 if they are not available in the builtin # hashlib __pyblake2_hashes = {'blake2s256': 'blake2s', 'blake2b512': 'blake2b'} __cache = hashlib.__builtin_constructor_cache for __hash, __pyblake2_fn in __pyblake2_hashes.items(): if __hash not in hashlib.algorithms_available: import pyblake2 __cache[__hash] = getattr(pyblake2, __pyblake2_fn) def _new_git_hash(base_algo, git_type, length): """Initialize a digest object (as returned by python's hashlib) for the requested algorithm, and feed it with the header for a git object of the given type and length. The header for hashing a git object consists of: - The type of the object (encoded in ASCII) - One ASCII space (\x20) - The length of the object (decimal encoded in ASCII) - One NUL byte Args: base_algo (str from :const:`ALGORITHMS`): a hashlib-supported algorithm git_type: the type of the git object (supposedly one of 'blob', 'commit', 'tag', 'tree') length: the length of the git object you're encoding Returns: a hashutil.hash object """ h = hashlib.new(base_algo) git_header = '%s %d\0' % (git_type, length) h.update(git_header.encode('ascii')) return h def _new_hash(algo, length=None): """Initialize a digest object (as returned by python's hashlib) for the requested algorithm. See the constant ALGORITHMS for the list of supported algorithms. If a git-specific hashing algorithm is requested (e.g., "sha1_git"), the hashing object will be pre-fed with the needed header; for this to work, length must be given. Args: algo (str): a hashing algorithm (one of ALGORITHMS) length (int): the length of the hashed payload (needed for git-specific algorithms) Returns: a hashutil.hash object Raises: ValueError if algo is unknown, or length is missing for a git-specific hash. """ if algo not in ALGORITHMS: raise ValueError( 'Unexpected hashing algorithm %s, expected one of %s' % (algo, ', '.join(sorted(ALGORITHMS)))) if algo.endswith('_git'): if length is None: raise ValueError('Missing length for git hashing algorithm') base_algo = algo[:-4] return _new_git_hash(base_algo, 'blob', length) return hashlib.new(algo) def hash_file(fobj, length=None, algorithms=DEFAULT_ALGORITHMS, chunk_cb=None): """Hash the contents of the given file object with the given algorithms. Args: fobj: a file-like object length: the length of the contents of the file-like object (for the - git-specific algorithms) - algorithms: the hashing algorithms used + git-specific algorithms) + algorithms: the hashing algorithms to be used, as an iterable over + strings Returns: a dict mapping each algorithm to a bytes digest. Raises: ValueError if algorithms contains an unknown hash algorithm. """ hashes = {algo: _new_hash(algo, length) for algo in algorithms} while True: chunk = fobj.read(HASH_BLOCK_SIZE) if not chunk: break for hash in hashes.values(): hash.update(chunk) if chunk_cb: chunk_cb(chunk) return {algo: hash.digest() for algo, hash in hashes.items()} def hash_path(path, algorithms=DEFAULT_ALGORITHMS, chunk_cb=None): """Hash the contents of the file at the given path with the given algorithms. Args: path: the path of the file to hash algorithms: the hashing algorithms used chunk_cb: a callback Returns: a dict mapping each algorithm to a bytes digest. Raises: ValueError if algorithms contains an unknown hash algorithm. OSError on file access error """ length = os.path.getsize(path) with open(path, 'rb') as fobj: hash = hash_file(fobj, length, algorithms, chunk_cb) hash['length'] = length return hash def hash_data(data, algorithms=DEFAULT_ALGORITHMS, with_length=False): """Hash the given binary blob with the given algorithms. Args: data (bytes): raw content to hash algorithms (list): the hashing algorithms used with_length (bool): add the length key in the resulting dict Returns: a dict mapping each algorithm to a bytes digest Raises: TypeError if data does not support the buffer interface. ValueError if algorithms contains an unknown hash algorithm. """ fobj = BytesIO(data) length = len(data) data = hash_file(fobj, length, algorithms) if with_length: data['length'] = length return data def hash_git_data(data, git_type, base_algo='sha1'): """Hash the given data as a git object of type git_type. Args: data: a bytes object git_type: the git object type base_algo: the base hashing algorithm used (default: sha1) Returns: a dict mapping each algorithm to a bytes digest Raises: ValueError if the git_type is unexpected. """ git_object_types = {'blob', 'tree', 'commit', 'tag', 'snapshot'} if git_type not in git_object_types: raise ValueError('Unexpected git object type %s, expected one of %s' % (git_type, ', '.join(sorted(git_object_types)))) h = _new_git_hash(base_algo, git_type, len(data)) h.update(data) return h.digest() @functools.lru_cache() def hash_to_hex(hash): """Converts a hash (in hex or bytes form) to its hexadecimal ascii form Args: hash (str or bytes): a :class:`bytes` hash or a :class:`str` containing the hexadecimal form of the hash Returns: str: the hexadecimal form of the hash """ if isinstance(hash, str): return hash return binascii.hexlify(hash).decode('ascii') @functools.lru_cache() def hash_to_bytehex(hash): """Converts a hash to its hexadecimal bytes representation Args: hash (bytes): a :class:`bytes` hash Returns: bytes: the hexadecimal form of the hash, as :class:`bytes` """ return binascii.hexlify(hash) @functools.lru_cache() def hash_to_bytes(hash): """Converts a hash (in hex or bytes form) to its raw bytes form Args: hash (str or bytes): a :class:`bytes` hash or a :class:`str` containing the hexadecimal form of the hash Returns: bytes: the :class:`bytes` form of the hash """ if isinstance(hash, bytes): return hash return bytes.fromhex(hash) @functools.lru_cache() def bytehex_to_hash(hex): """Converts a hexadecimal bytes representation of a hash to that hash Args: hash (bytes): a :class:`bytes` containing the hexadecimal form of the hash encoded in ascii Returns: bytes: the :class:`bytes` form of the hash """ return hash_to_bytes(hex.decode()) diff --git a/swh/model/identifiers.py b/swh/model/identifiers.py index b4ec15d..51d2d2e 100644 --- a/swh/model/identifiers.py +++ b/swh/model/identifiers.py @@ -1,586 +1,655 @@ -# Copyright (C) 2015 The Software Heritage developers +# Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import binascii import datetime from functools import lru_cache from .hashutil import hash_data, hash_git_data, DEFAULT_ALGORITHMS +from .hashutil import hash_to_hex + + +SNAPSHOT = 'snapshot' +REVISION = 'revision' +RELEASE = 'release' +DIRECTORY = 'directory' +CONTENT = 'content' @lru_cache() def identifier_to_bytes(identifier): """Convert a text identifier to bytes. Args: identifier: an identifier, either a 40-char hexadecimal string or a bytes object of length 20 Returns: The length 20 bytestring corresponding to the given identifier Raises: ValueError: if the identifier is of an unexpected type or length. """ if isinstance(identifier, bytes): if len(identifier) != 20: raise ValueError( 'Wrong length for bytes identifier %s, expected 20' % len(identifier)) return identifier if isinstance(identifier, str): if len(identifier) != 40: raise ValueError( 'Wrong length for str identifier %s, expected 40' % len(identifier)) return bytes.fromhex(identifier) raise ValueError('Wrong type for identifier %s, expected bytes or str' % identifier.__class__.__name__) @lru_cache() def identifier_to_str(identifier): """Convert an identifier to an hexadecimal string. Args: identifier: an identifier, either a 40-char hexadecimal string or a bytes object of length 20 Returns: The length 40 string corresponding to the given identifier, hex encoded Raises: ValueError if the identifier is of an unexpected type or length. """ if isinstance(identifier, str): if len(identifier) != 40: raise ValueError( 'Wrong length for str identifier %s, expected 40' % len(identifier)) return identifier if isinstance(identifier, bytes): if len(identifier) != 20: raise ValueError( 'Wrong length for bytes identifier %s, expected 20' % len(identifier)) return binascii.hexlify(identifier).decode() raise ValueError('Wrong type for identifier %s, expected bytes or str' % identifier.__class__.__name__) def content_identifier(content): """Return the intrinsic identifier for a content. A content's identifier is the sha1, sha1_git and sha256 checksums of its data. Args: content: a content conforming to the Software Heritage schema Returns: A dictionary with all the hashes for the data Raises: KeyError: if the content doesn't have a data member. """ return hash_data(content['data'], DEFAULT_ALGORITHMS) def _sort_key(entry): """The sorting key for tree entries""" if entry['type'] == 'dir': return entry['name'] + b'/' else: return entry['name'] @lru_cache() def _perms_to_bytes(perms): """Convert the perms value to its bytes representation""" oc = oct(perms)[2:] return oc.encode('ascii') def escape_newlines(snippet): """Escape the newlines present in snippet according to git rules. New lines in git manifests are escaped by indenting the next line by one space. """ if b'\n' in snippet: return b'\n '.join(snippet.split(b'\n')) else: return snippet def directory_identifier(directory): """Return the intrinsic identifier for a directory. A directory's identifier is the tree sha1 à la git of a directory listing, using the following algorithm, which is equivalent to the git algorithm for trees: 1. Entries of the directory are sorted using the name (or the name with '/' appended for directory entries) as key, in bytes order. 2. For each entry of the directory, the following bytes are output: - the octal representation of the permissions for the entry (stored in the 'perms' member), which is a representation of the entry type: - b'100644' (int 33188) for files - b'100755' (int 33261) for executable files - b'120000' (int 40960) for symbolic links - b'40000' (int 16384) for directories - b'160000' (int 57344) for references to revisions - an ascii space (b'\x20') - the entry's name (as raw bytes), stored in the 'name' member - a null byte (b'\x00') - the 20 byte long identifier of the object pointed at by the entry, stored in the 'target' member: - for files or executable files: their blob sha1_git - for symbolic links: the blob sha1_git of a file containing the link destination - for directories: their intrinsic identifier - for revisions: their intrinsic identifier (Note that there is no separator between entries) """ components = [] for entry in sorted(directory['entries'], key=_sort_key): components.extend([ _perms_to_bytes(entry['perms']), b'\x20', entry['name'], b'\x00', identifier_to_bytes(entry['target']), ]) return identifier_to_str(hash_git_data(b''.join(components), 'tree')) def format_date(date): """Convert a date object into an UTC timestamp encoded as ascii bytes. Git stores timestamps as an integer number of seconds since the UNIX epoch. However, Software Heritage stores timestamps as an integer number of microseconds (postgres type "datetime with timezone"). Therefore, we print timestamps with no microseconds as integers, and timestamps with microseconds as floating point values. We elide the trailing zeroes from microsecond values, to "future-proof" our representation if we ever need more precision in timestamps. """ if not isinstance(date, dict): raise ValueError('format_date only supports dicts, %r received' % date) seconds = date.get('seconds', 0) microseconds = date.get('microseconds', 0) if not microseconds: return str(seconds).encode() else: float_value = ('%d.%06d' % (seconds, microseconds)) return float_value.rstrip('0').encode() @lru_cache() def format_offset(offset, negative_utc=None): """Convert an integer number of minutes into an offset representation. The offset representation is [+-]hhmm where: - hh is the number of hours; - mm is the number of minutes. A null offset is represented as +0000. """ if offset < 0 or offset == 0 and negative_utc: sign = '-' else: sign = '+' hours = abs(offset) // 60 minutes = abs(offset) % 60 t = '%s%02d%02d' % (sign, hours, minutes) return t.encode() def normalize_timestamp(time_representation): """Normalize a time representation for processing by Software Heritage This function supports a numeric timestamp (representing a number of seconds since the UNIX epoch, 1970-01-01 at 00:00 UTC), a :obj:`datetime.datetime` object (with timezone information), or a normalized Software Heritage time representation (idempotency). Args: time_representation: the representation of a timestamp Returns: dict: a normalized dictionary with three keys: - timestamp: a dict with two optional keys: - seconds: the integral number of seconds since the UNIX epoch - microseconds: the integral number of microseconds - offset: the timezone offset as a number of minutes relative to UTC - negative_utc: a boolean representing whether the offset is -0000 when offset = 0. """ if time_representation is None: return None negative_utc = False if isinstance(time_representation, dict): ts = time_representation['timestamp'] if isinstance(ts, dict): seconds = ts.get('seconds', 0) microseconds = ts.get('microseconds', 0) elif isinstance(ts, int): seconds = ts microseconds = 0 else: raise ValueError( 'normalize_timestamp received non-integer timestamp member:' ' %r' % ts) offset = time_representation['offset'] if 'negative_utc' in time_representation: negative_utc = time_representation['negative_utc'] elif isinstance(time_representation, datetime.datetime): seconds = int(time_representation.timestamp()) microseconds = time_representation.microsecond utcoffset = time_representation.utcoffset() if utcoffset is None: raise ValueError( 'normalize_timestamp received datetime without timezone: %s' % time_representation) # utcoffset is an integer number of minutes seconds_offset = utcoffset.total_seconds() offset = int(seconds_offset) // 60 elif isinstance(time_representation, int): seconds = time_representation microseconds = 0 offset = 0 else: raise ValueError( 'normalize_timestamp received non-integer timestamp:' ' %r' % time_representation) return { 'timestamp': { 'seconds': seconds, 'microseconds': microseconds, }, 'offset': offset, 'negative_utc': negative_utc, } def format_author(author): """Format the specification of an author. An author is either a byte string (passed unchanged), or a dict with three keys, fullname, name and email. If the fullname exists, return it; if it doesn't, we construct a fullname using the following heuristics: if the name value is None, we return the email in angle brackets, else, we return the name, a space, and the email in angle brackets. """ if isinstance(author, bytes) or author is None: return author if 'fullname' in author: return author['fullname'] ret = [] if author['name'] is not None: ret.append(author['name']) if author['email'] is not None: ret.append(b''.join([b'<', author['email'], b'>'])) return b' '.join(ret) def format_author_line(header, author, date_offset): """Format a an author line according to git standards. An author line has three components: - a header, describing the type of author (author, committer, tagger) - a name and email, which is an arbitrary bytestring - optionally, a timestamp with UTC offset specification The author line is formatted thus:: `header` `name and email`[ `timestamp` `utc_offset`] The timestamp is encoded as a (decimal) number of seconds since the UNIX epoch (1970-01-01 at 00:00 UTC). As an extension to the git format, we support fractional timestamps, using a dot as the separator for the decimal part. The utc offset is a number of minutes encoded as '[+-]HHMM'. Note some tools can pass a negative offset corresponding to the UTC timezone ('-0000'), which is valid and is encoded as such. For convenience, this function returns the whole line with its trailing newline. Args: header: the header of the author line (one of 'author', 'committer', 'tagger') author: an author specification (dict with two bytes values: name and email, or byte value) date_offset: a normalized date/time representation as returned by :func:`normalize_timestamp`. Returns: the newline-terminated byte string containing the author line """ ret = [header.encode(), b' ', escape_newlines(format_author(author))] date_offset = normalize_timestamp(date_offset) if date_offset is not None: date_f = format_date(date_offset['timestamp']) offset_f = format_offset(date_offset['offset'], date_offset['negative_utc']) ret.extend([b' ', date_f, b' ', offset_f]) ret.append(b'\n') return b''.join(ret) def revision_identifier(revision): """Return the intrinsic identifier for a revision. The fields used for the revision identifier computation are: - directory - parents - author - author_date - committer - committer_date - metadata -> extra_headers - message A revision's identifier is the 'git'-checksum of a commit manifest constructed as follows (newlines are a single ASCII newline character):: tree [for each parent in parents] parent [end for each parents] author committer [for each key, value in extra_headers] [end for each extra_headers] The directory identifier is the ascii representation of its hexadecimal encoding. Author and committer are formatted with the :func:`format_author` function. Dates are formatted with the :func:`format_offset` function. Extra headers are an ordered list of [key, value] pairs. Keys are strings and get encoded to utf-8 for identifier computation. Values are either byte strings, unicode strings (that get encoded to utf-8), or integers (that get encoded to their utf-8 decimal representation). Multiline extra header values are escaped by indenting the continuation lines with one ascii space. If the message is None, the manifest ends with the last header. Else, the message is appended to the headers after an empty line. The checksum of the full manifest is computed using the 'commit' git object type. """ components = [ b'tree ', identifier_to_str(revision['directory']).encode(), b'\n', ] for parent in revision['parents']: if parent: components.extend([ b'parent ', identifier_to_str(parent).encode(), b'\n', ]) components.extend([ format_author_line('author', revision['author'], revision['date']), format_author_line('committer', revision['committer'], revision['committer_date']), ]) # Handle extra headers metadata = revision.get('metadata') if not metadata: metadata = {} for key, value in metadata.get('extra_headers', []): # Integer values: decimal representation if isinstance(value, int): value = str(value).encode('utf-8') # Unicode string values: utf-8 encoding if isinstance(value, str): value = value.encode('utf-8') # encode the key to utf-8 components.extend([key.encode('utf-8'), b' ', escape_newlines(value), b'\n']) if revision['message'] is not None: components.extend([b'\n', revision['message']]) commit_raw = b''.join(components) return identifier_to_str(hash_git_data(commit_raw, 'commit')) def target_type_to_git(target_type): """Convert a software heritage target type to a git object type""" return { 'content': b'blob', 'directory': b'tree', 'revision': b'commit', 'release': b'tag', }[target_type] def release_identifier(release): """Return the intrinsic identifier for a release.""" components = [ b'object ', identifier_to_str(release['target']).encode(), b'\n', b'type ', target_type_to_git(release['target_type']), b'\n', b'tag ', release['name'], b'\n', ] if 'author' in release and release['author']: components.append( format_author_line('tagger', release['author'], release['date']) ) if release['message'] is not None: components.extend([b'\n', release['message']]) return identifier_to_str(hash_git_data(b''.join(components), 'tag')) def snapshot_identifier(snapshot, *, ignore_unresolved=False): """Return the intrinsic identifier for a snapshot. Snapshots are a set of named branches, which are pointers to objects at any level of the Software Heritage DAG. As well as pointing to other objects in the Software Heritage DAG, branches can also be *alias*es, in which case their target is the name of another branch in the same snapshot, or *dangling*, in which case the target is unknown (and represented by the ``None`` value). A snapshot identifier is a salted sha1 (using the git hashing algorithm with the ``snapshot`` object type) of a manifest following the algorithm: 1. Branches are sorted using the name as key, in bytes order. 2. For each branch, the following bytes are output: - the type of the branch target: - ``content``, ``directory``, ``revision``, ``release`` or ``snapshot`` for the corresponding entries in the DAG; - ``alias`` for branches referencing another branch; - ``dangling`` for dangling branches - an ascii space (``\\x20``) - the branch name (as raw bytes) - a null byte (``\\x00``) - the length of the target identifier, as an ascii-encoded decimal number (``20`` for current intrinisic identifiers, ``0`` for dangling branches, the length of the target branch name for branch aliases) - a colon (``:``) - the identifier of the target object pointed at by the branch, stored in the 'target' member: - for contents: their *sha1_git* - for directories, revisions, releases or snapshots: their intrinsic identifier - for branch aliases, the name of the target branch (as raw bytes) - for dangling branches, the empty string Note that, akin to directory manifests, there is no separator between entries. Because of symbolic branches, identifiers are of arbitrary length but are length-encoded to avoid ambiguity. Args: snapshot (dict): the snapshot of which to compute the identifier. A single entry is needed, ``'branches'``, which is itself a :class:`dict` mapping each branch to its target ignore_unresolved (bool): if `True`, ignore unresolved branch aliases. Returns: str: the intrinsic identifier for `snapshot` """ unresolved = [] lines = [] for name, target in sorted(snapshot['branches'].items()): if not target: target_type = b'dangling' target_id = b'' elif target['target_type'] == 'alias': target_type = b'alias' target_id = target['target'] if target_id not in snapshot['branches'] or target_id == name: unresolved.append((name, target_id)) else: target_type = target['target_type'].encode() target_id = identifier_to_bytes(target['target']) lines.extend([ target_type, b'\x20', name, b'\x00', ('%d:' % len(target_id)).encode(), target_id, ]) if unresolved and not ignore_unresolved: raise ValueError('Branch aliases unresolved: %s' % ', '.join('%s -> %s' % (name, target) for name, target in unresolved)) return identifier_to_str(hash_git_data(b''.join(lines), 'snapshot')) + + +def persistent_identifier(type, object, version=1): + """Compute persistent identifier (stable over time) as per + documentation. + + Documentation: + https://docs.softwareheritage.org/devel/swh-model/persistent-identifiers.html # noqa + + Args: + type (str): Object's type + object (str): Object's dict representation + version (int): persistent identifier version (default to 1) + + Returns: + Persistent identifier as string. + + """ + _map = { + SNAPSHOT: { + 'short_name': 'snp', + 'key_id': 'id' + }, + RELEASE: { + 'short_name': 'rel', + 'key_id': 'id' + }, + REVISION: { + 'short_name': 'rev', + 'key_id': 'id' + }, + DIRECTORY: { + 'short_name': 'dir', + 'key_id': 'id' + }, + CONTENT: { + 'short_name': 'cnt', + 'key_id': 'sha1_git' + }, + } + o = _map[type] + _hash = hash_to_hex(object[o['key_id']]) + return 'swh:%s:%s:%s' % (version, o['short_name'], _hash) + + +PERSISTENT_IDENTIFIER_KEYS = [ + 'namespace', 'scheme_version', 'object_type', 'object_id'] + + +def parse_persistent_identifier(persistent_id): + """Parse swh's persistent identifier scheme. + + Args: + persistent_id (str): A persistent identifier + + Returns: + dict with keys namespace, scheme_version, object_type, object_id + + """ + data = persistent_id.split(':') + return dict(zip(PERSISTENT_IDENTIFIER_KEYS, data)) diff --git a/swh/model/tests/test_identifiers.py b/swh/model/tests/test_identifiers.py index 4a56b0c..26dc01e 100644 --- a/swh/model/tests/test_identifiers.py +++ b/swh/model/tests/test_identifiers.py @@ -1,770 +1,823 @@ -# Copyright (C) 2015-2017 The Software Heritage developers +# Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import binascii import datetime import unittest from nose.tools import istest from swh.model import hashutil, identifiers +from swh.model.identifiers import SNAPSHOT, RELEASE, REVISION, DIRECTORY +from swh.model.identifiers import CONTENT + class UtilityFunctionsIdentifier(unittest.TestCase): def setUp(self): self.str_id = 'c2e41aae41ac17bd4a650770d6ee77f62e52235b' self.bytes_id = binascii.unhexlify(self.str_id) self.bad_type_id = object() @istest def identifier_to_bytes(self): for id in [self.str_id, self.bytes_id]: self.assertEqual(identifiers.identifier_to_bytes(id), self.bytes_id) # wrong length with self.assertRaises(ValueError) as cm: identifiers.identifier_to_bytes(id[:-2]) self.assertIn('length', str(cm.exception)) with self.assertRaises(ValueError) as cm: identifiers.identifier_to_bytes(self.bad_type_id) self.assertIn('type', str(cm.exception)) @istest def identifier_to_str(self): for id in [self.str_id, self.bytes_id]: self.assertEqual(identifiers.identifier_to_str(id), self.str_id) # wrong length with self.assertRaises(ValueError) as cm: identifiers.identifier_to_str(id[:-2]) self.assertIn('length', str(cm.exception)) with self.assertRaises(ValueError) as cm: identifiers.identifier_to_str(self.bad_type_id) self.assertIn('type', str(cm.exception)) class UtilityFunctionsDateOffset(unittest.TestCase): def setUp(self): self.dates = { b'1448210036': { 'seconds': 1448210036, 'microseconds': 0, }, b'1448210036.002342': { 'seconds': 1448210036, 'microseconds': 2342, }, b'1448210036.12': { 'seconds': 1448210036, 'microseconds': 120000, } } self.broken_dates = [ 1448210036.12, ] self.offsets = { 0: b'+0000', -630: b'-1030', 800: b'+1320', } @istest def format_date(self): for date_repr, date in self.dates.items(): self.assertEqual(identifiers.format_date(date), date_repr) @istest def format_date_fail(self): for date in self.broken_dates: with self.assertRaises(ValueError): identifiers.format_date(date) @istest def format_offset(self): for offset, res in self.offsets.items(): self.assertEqual(identifiers.format_offset(offset), res) class ContentIdentifier(unittest.TestCase): def setUp(self): self.content = { 'status': 'visible', 'length': 5, 'data': b'1984\n', 'ctime': datetime.datetime(2015, 11, 22, 16, 33, 56, tzinfo=datetime.timezone.utc), } self.content_id = hashutil.hash_data(self.content['data']) @istest def content_identifier(self): self.assertEqual(identifiers.content_identifier(self.content), self.content_id) class DirectoryIdentifier(unittest.TestCase): def setUp(self): self.directory = { 'id': 'c2e41aae41ac17bd4a650770d6ee77f62e52235b', 'entries': [ { 'type': 'file', 'perms': 33188, 'name': b'README', 'target': '37ec8ea2110c0b7a32fbb0e872f6e7debbf95e21' }, { 'type': 'file', 'perms': 33188, 'name': b'Rakefile', 'target': '3bb0e8592a41ae3185ee32266c860714980dbed7' }, { 'type': 'dir', 'perms': 16384, 'name': b'app', 'target': '61e6e867f5d7ba3b40540869bc050b0c4fed9e95' }, { 'type': 'file', 'perms': 33188, 'name': b'1.megabyte', 'target': '7c2b2fbdd57d6765cdc9d84c2d7d333f11be7fb3' }, { 'type': 'dir', 'perms': 16384, 'name': b'config', 'target': '591dfe784a2e9ccc63aaba1cb68a765734310d98' }, { 'type': 'dir', 'perms': 16384, 'name': b'public', 'target': '9588bf4522c2b4648bfd1c61d175d1f88c1ad4a5' }, { 'type': 'file', 'perms': 33188, 'name': b'development.sqlite3', 'target': 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391' }, { 'type': 'dir', 'perms': 16384, 'name': b'doc', 'target': '154705c6aa1c8ead8c99c7915373e3c44012057f' }, { 'type': 'dir', 'perms': 16384, 'name': b'db', 'target': '85f157bdc39356b7bc7de9d0099b4ced8b3b382c' }, { 'type': 'dir', 'perms': 16384, 'name': b'log', 'target': '5e3d3941c51cce73352dff89c805a304ba96fffe' }, { 'type': 'dir', 'perms': 16384, 'name': b'script', 'target': '1b278423caf176da3f3533592012502aa10f566c' }, { 'type': 'dir', 'perms': 16384, 'name': b'test', 'target': '035f0437c080bfd8711670b3e8677e686c69c763' }, { 'type': 'dir', 'perms': 16384, 'name': b'vendor', 'target': '7c0dc9ad978c1af3f9a4ce061e50f5918bd27138' }, { 'type': 'rev', 'perms': 57344, 'name': b'will_paginate', 'target': '3d531e169db92a16a9a8974f0ae6edf52e52659e' } ], } self.empty_directory = { 'id': '4b825dc642cb6eb9a060e54bf8d69288fbee4904', 'entries': [], } @istest def dir_identifier(self): self.assertEqual( identifiers.directory_identifier(self.directory), self.directory['id']) @istest def dir_identifier_empty_directory(self): self.assertEqual( identifiers.directory_identifier(self.empty_directory), self.empty_directory['id']) class RevisionIdentifier(unittest.TestCase): def setUp(self): linus_tz = datetime.timezone(datetime.timedelta(minutes=-420)) gpgsig = b'''\ -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.13 (Darwin) iQIcBAABAgAGBQJVJcYsAAoJEBiY3kIkQRNJVAUQAJ8/XQIfMqqC5oYeEFfHOPYZ L7qy46bXHVBa9Qd8zAJ2Dou3IbI2ZoF6/Et89K/UggOycMlt5FKV/9toWyuZv4Po L682wonoxX99qvVTHo6+wtnmYO7+G0f82h+qHMErxjP+I6gzRNBvRr+SfY7VlGdK wikMKOMWC5smrScSHITnOq1Ews5pe3N7qDYMzK0XVZmgDoaem4RSWMJs4My/qVLN e0CqYWq2A22GX7sXl6pjneJYQvcAXUX+CAzp24QnPSb+Q22Guj91TcxLFcHCTDdn qgqMsEyMiisoglwrCbO+D+1xq9mjN9tNFWP66SQ48mrrHYTBV5sz9eJyDfroJaLP CWgbDTgq6GzRMehHT3hXfYS5NNatjnhkNISXR7pnVP/obIi/vpWh5ll6Gd8q26z+ a/O41UzOaLTeNI365MWT4/cnXohVLRG7iVJbAbCxoQmEgsYMRc/pBAzWJtLfcB2G jdTswYL6+MUdL8sB9pZ82D+BP/YAdHe69CyTu1lk9RT2pYtI/kkfjHubXBCYEJSG +VGllBbYG6idQJpyrOYNRJyrDi9yvDJ2W+S0iQrlZrxzGBVGTB/y65S8C+2WTBcE lf1Qb5GDsQrZWgD+jtWTywOYHtCBwyCKSAXxSARMbNPeak9WPlcW/Jmu+fUcMe2x dg1KdHOa34shrKDaOVzW =od6m -----END PGP SIGNATURE-----''' self.revision = { 'id': 'bc0195aad0daa2ad5b0d76cce22b167bc3435590', 'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07', 'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'], 'author': { 'name': b'Linus Torvalds', 'email': b'torvalds@linux-foundation.org', }, 'date': datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), 'committer': { 'name': b'Linus Torvalds', 'email': b'torvalds@linux-foundation.org', }, 'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), 'message': b'Linux 4.2-rc2\n', } self.revision_none_metadata = { 'id': 'bc0195aad0daa2ad5b0d76cce22b167bc3435590', 'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07', 'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'], 'author': { 'name': b'Linus Torvalds', 'email': b'torvalds@linux-foundation.org', }, 'date': datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), 'committer': { 'name': b'Linus Torvalds', 'email': b'torvalds@linux-foundation.org', }, 'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), 'message': b'Linux 4.2-rc2\n', 'metadata': None, } self.synthetic_revision = { 'id': b'\xb2\xa7\xe1&\x04\x92\xe3D\xfa\xb3\xcb\xf9\x1b\xc1<\x91' b'\xe0T&\xfd', 'author': { 'name': b'Software Heritage', 'email': b'robot@softwareheritage.org', }, 'date': { 'timestamp': {'seconds': 1437047495}, 'offset': 0, 'negative_utc': False, }, 'type': 'tar', 'committer': { 'name': b'Software Heritage', 'email': b'robot@softwareheritage.org', }, 'committer_date': 1437047495, 'synthetic': True, 'parents': [None], 'message': b'synthetic revision message\n', 'directory': b'\xd1\x1f\x00\xa6\xa0\xfe\xa6\x05SA\xd2U\x84\xb5\xa9' b'e\x16\xc0\xd2\xb8', 'metadata': {'original_artifact': [ {'archive_type': 'tar', 'name': 'gcc-5.2.0.tar.bz2', 'sha1_git': '39d281aff934d44b439730057e55b055e206a586', 'sha1': 'fe3f5390949d47054b613edc36c557eb1d51c18e', 'sha256': '5f835b04b5f7dd4f4d2dc96190ec1621b8d89f' '2dc6f638f9f8bc1b1014ba8cad'}]}, } # cat commit.txt | git hash-object -t commit --stdin self.revision_with_extra_headers = { 'id': '010d34f384fa99d047cdd5e2f41e56e5c2feee45', 'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07', 'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'], 'author': { 'name': b'Linus Torvalds', 'email': b'torvalds@linux-foundation.org', 'fullname': b'Linus Torvalds ', }, 'date': datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), 'committer': { 'name': b'Linus Torvalds', 'email': b'torvalds@linux-foundation.org', 'fullname': b'Linus Torvalds ', }, 'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), 'message': b'Linux 4.2-rc2\n', 'metadata': { 'extra_headers': [ ['svn-repo-uuid', '046f1af7-66c2-d61b-5410-ce57b7db7bff'], ['svn-revision', 10], ] } } self.revision_with_gpgsig = { 'id': '44cc742a8ca17b9c279be4cc195a93a6ef7a320e', 'directory': 'b134f9b7dc434f593c0bab696345548b37de0558', 'parents': ['689664ae944b4692724f13b709a4e4de28b54e57', 'c888305e1efbaa252d01b4e5e6b778f865a97514'], 'author': { 'name': b'Jiang Xin', 'email': b'worldhello.net@gmail.com', 'fullname': b'Jiang Xin ', }, 'date': { 'timestamp': 1428538899, 'offset': 480, }, 'committer': { 'name': b'Jiang Xin', 'email': b'worldhello.net@gmail.com', }, 'committer_date': { 'timestamp': 1428538899, 'offset': 480, }, 'metadata': { 'extra_headers': [ ['gpgsig', gpgsig], ], }, 'message': b'''Merge branch 'master' of git://github.com/alexhenrie/git-po * 'master' of git://github.com/alexhenrie/git-po: l10n: ca.po: update translation ''' } self.revision_no_message = { 'id': '4cfc623c9238fa92c832beed000ce2d003fd8333', 'directory': 'b134f9b7dc434f593c0bab696345548b37de0558', 'parents': ['689664ae944b4692724f13b709a4e4de28b54e57', 'c888305e1efbaa252d01b4e5e6b778f865a97514'], 'author': { 'name': b'Jiang Xin', 'email': b'worldhello.net@gmail.com', 'fullname': b'Jiang Xin ', }, 'date': { 'timestamp': 1428538899, 'offset': 480, }, 'committer': { 'name': b'Jiang Xin', 'email': b'worldhello.net@gmail.com', }, 'committer_date': { 'timestamp': 1428538899, 'offset': 480, }, 'message': None, } self.revision_empty_message = { 'id': '7442cd78bd3b4966921d6a7f7447417b7acb15eb', 'directory': 'b134f9b7dc434f593c0bab696345548b37de0558', 'parents': ['689664ae944b4692724f13b709a4e4de28b54e57', 'c888305e1efbaa252d01b4e5e6b778f865a97514'], 'author': { 'name': b'Jiang Xin', 'email': b'worldhello.net@gmail.com', 'fullname': b'Jiang Xin ', }, 'date': { 'timestamp': 1428538899, 'offset': 480, }, 'committer': { 'name': b'Jiang Xin', 'email': b'worldhello.net@gmail.com', }, 'committer_date': { 'timestamp': 1428538899, 'offset': 480, }, 'message': b'', } self.revision_only_fullname = { 'id': '010d34f384fa99d047cdd5e2f41e56e5c2feee45', 'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07', 'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'], 'author': { 'fullname': b'Linus Torvalds ', }, 'date': datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), 'committer': { 'fullname': b'Linus Torvalds ', }, 'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), 'message': b'Linux 4.2-rc2\n', 'metadata': { 'extra_headers': [ ['svn-repo-uuid', '046f1af7-66c2-d61b-5410-ce57b7db7bff'], ['svn-revision', 10], ] } } @istest def revision_identifier(self): self.assertEqual( identifiers.revision_identifier(self.revision), identifiers.identifier_to_str(self.revision['id']), ) @istest def revision_identifier_none_metadata(self): self.assertEqual( identifiers.revision_identifier(self.revision_none_metadata), identifiers.identifier_to_str(self.revision_none_metadata['id']), ) @istest def revision_identifier_synthetic(self): self.assertEqual( identifiers.revision_identifier(self.synthetic_revision), identifiers.identifier_to_str(self.synthetic_revision['id']), ) @istest def revision_identifier_with_extra_headers(self): self.assertEqual( identifiers.revision_identifier( self.revision_with_extra_headers), identifiers.identifier_to_str( self.revision_with_extra_headers['id']), ) @istest def revision_identifier_with_gpgsig(self): self.assertEqual( identifiers.revision_identifier( self.revision_with_gpgsig), identifiers.identifier_to_str( self.revision_with_gpgsig['id']), ) @istest def revision_identifier_no_message(self): self.assertEqual( identifiers.revision_identifier( self.revision_no_message), identifiers.identifier_to_str( self.revision_no_message['id']), ) @istest def revision_identifier_empty_message(self): self.assertEqual( identifiers.revision_identifier( self.revision_empty_message), identifiers.identifier_to_str( self.revision_empty_message['id']), ) @istest def revision_identifier_only_fullname(self): self.assertEqual( identifiers.revision_identifier( self.revision_only_fullname), identifiers.identifier_to_str( self.revision_only_fullname['id']), ) class ReleaseIdentifier(unittest.TestCase): def setUp(self): linus_tz = datetime.timezone(datetime.timedelta(minutes=-420)) self.release = { 'id': '2b10839e32c4c476e9d94492756bb1a3e1ec4aa8', 'target': b't\x1b"R\xa5\xe1Ml`\xa9\x13\xc7z`\x99\xab\xe7:\x85J', 'target_type': 'revision', 'name': b'v2.6.14', 'author': { 'name': b'Linus Torvalds', 'email': b'torvalds@g5.osdl.org', }, 'date': datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz), 'message': b'''\ Linux 2.6.14 release -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.1 (GNU/Linux) iD8DBQBDYWq6F3YsRnbiHLsRAmaeAJ9RCez0y8rOBbhSv344h86l/VVcugCeIhO1 wdLOnvj91G4wxYqrvThthbE= =7VeT -----END PGP SIGNATURE----- ''', 'synthetic': False, } self.release_no_author = { 'id': b'&y\x1a\x8b\xcf\x0em3\xf4:\xefv\x82\xbd\xb5U#mV\xde', 'target': '9ee1c939d1cb936b1f98e8d81aeffab57bae46ab', 'target_type': 'revision', 'name': b'v2.6.12', 'message': b'''\ This is the final 2.6.12 release -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.2.4 (GNU/Linux) iD8DBQBCsykyF3YsRnbiHLsRAvPNAJ482tCZwuxp/bJRz7Q98MHlN83TpACdHr37 o6X/3T+vm8K3bf3driRr34c= =sBHn -----END PGP SIGNATURE----- ''', 'synthetic': False, } self.release_no_message = { 'id': 'b6f4f446715f7d9543ef54e41b62982f0db40045', 'target': '9ee1c939d1cb936b1f98e8d81aeffab57bae46ab', 'target_type': 'revision', 'name': b'v2.6.12', 'author': { 'name': b'Linus Torvalds', 'email': b'torvalds@g5.osdl.org', }, 'date': datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz), 'message': None, } self.release_empty_message = { 'id': '71a0aea72444d396575dc25ac37fec87ee3c6492', 'target': '9ee1c939d1cb936b1f98e8d81aeffab57bae46ab', 'target_type': 'revision', 'name': b'v2.6.12', 'author': { 'name': b'Linus Torvalds', 'email': b'torvalds@g5.osdl.org', }, 'date': datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz), 'message': b'', } self.release_negative_utc = { 'id': '97c8d2573a001f88e72d75f596cf86b12b82fd01', 'name': b'20081029', 'target': '54e9abca4c77421e2921f5f156c9fe4a9f7441c7', 'target_type': 'revision', 'date': { 'timestamp': {'seconds': 1225281976}, 'offset': 0, 'negative_utc': True, }, 'author': { 'name': b'Otavio Salvador', 'email': b'otavio@debian.org', 'id': 17640, }, 'synthetic': False, 'message': b'tagging version 20081029\n\nr56558\n', } self.release_newline_in_author = { 'author': { 'email': b'esycat@gmail.com', 'fullname': b'Eugene Janusov\n', 'name': b'Eugene Janusov\n', }, 'date': { 'negative_utc': None, 'offset': 600, 'timestamp': { 'microseconds': 0, 'seconds': 1377480558, }, }, 'id': b'\\\x98\xf5Y\xd04\x16-\xe2->\xbe\xb9T3\xe6\xf8\x88R1', 'message': b'Release of v0.3.2.', 'name': b'0.3.2', 'synthetic': False, 'target': (b'\xc0j\xa3\xd9;x\xa2\x86\\I5\x17' b'\x000\xf8\xc2\xd79o\xd3'), 'target_type': 'revision', } @istest def release_identifier(self): self.assertEqual( identifiers.release_identifier(self.release), identifiers.identifier_to_str(self.release['id']) ) @istest def release_identifier_no_author(self): self.assertEqual( identifiers.release_identifier(self.release_no_author), identifiers.identifier_to_str(self.release_no_author['id']) ) @istest def release_identifier_no_message(self): self.assertEqual( identifiers.release_identifier(self.release_no_message), identifiers.identifier_to_str(self.release_no_message['id']) ) @istest def release_identifier_empty_message(self): self.assertEqual( identifiers.release_identifier(self.release_empty_message), identifiers.identifier_to_str(self.release_empty_message['id']) ) @istest def release_identifier_negative_utc(self): self.assertEqual( identifiers.release_identifier(self.release_negative_utc), identifiers.identifier_to_str(self.release_negative_utc['id']) ) @istest def release_identifier_newline_in_author(self): self.assertEqual( identifiers.release_identifier(self.release_newline_in_author), identifiers.identifier_to_str(self.release_newline_in_author['id']) ) class SnapshotIdentifier(unittest.TestCase): def setUp(self): super().setUp() self.empty = { 'id': '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e', 'branches': {}, } self.dangling_branch = { 'id': 'c84502e821eb21ed84e9fd3ec40973abc8b32353', 'branches': { b'HEAD': None, }, } self.unresolved = { 'id': '84b4548ea486e4b0a7933fa541ff1503a0afe1e0', 'branches': { b'foo': { 'target': b'bar', 'target_type': 'alias', }, }, } self.all_types = { 'id': '6e65b86363953b780d92b0a928f3e8fcdd10db36', 'branches': { b'directory': { 'target': '1bd0e65f7d2ff14ae994de17a1e7fe65111dcad8', 'target_type': 'directory', }, b'content': { 'target': 'fe95a46679d128ff167b7c55df5d02356c5a1ae1', 'target_type': 'content', }, b'alias': { 'target': b'revision', 'target_type': 'alias', }, b'revision': { 'target': 'aafb16d69fd30ff58afdd69036a26047f3aebdc6', 'target_type': 'revision', }, b'release': { 'target': '7045404f3d1c54e6473c71bbb716529fbad4be24', 'target_type': 'release', }, b'snapshot': { 'target': '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e', 'target_type': 'snapshot', }, b'dangling': None, } } def test_empty_snapshot(self): self.assertEqual( identifiers.snapshot_identifier(self.empty), identifiers.identifier_to_str(self.empty['id']), ) def test_dangling_branch(self): self.assertEqual( identifiers.snapshot_identifier(self.dangling_branch), identifiers.identifier_to_str(self.dangling_branch['id']), ) def test_unresolved(self): with self.assertRaisesRegex(ValueError, "b'foo' -> b'bar'"): identifiers.snapshot_identifier(self.unresolved) def test_unresolved_force(self): self.assertEqual( identifiers.snapshot_identifier( self.unresolved, ignore_unresolved=True, ), identifiers.identifier_to_str(self.unresolved['id']), ) def test_all_types(self): self.assertEqual( identifiers.snapshot_identifier(self.all_types), identifiers.identifier_to_str(self.all_types['id']), ) + + def test_persistent_identifier(self): + _snapshot = {'id': hashutil.hash_to_bytes( + 'c7c108084bc0bf3d81436bf980b46e98bd338453')} + _release = {'id': '22ece559cc7cc2364edc5e5593d63ae8bd229f9f'} + _revision = {'id': '309cf2674ee7a0749978cf8265ab91a60aea0f7d'} + _directory = {'id': 'd198bc9d7a6bcf6db04f476d29314f157507d505'} + _content = {'sha1_git': '94a9ed024d3859793618152ea559a168bbcbb5e2'} + for full_type, _hash, expected_persistent_id, version in [ + (SNAPSHOT, _snapshot, + 'swh:1:snp:c7c108084bc0bf3d81436bf980b46e98bd338453', None), + (RELEASE, _release, + 'swh:2:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f', 2), + (REVISION, _revision, + 'swh:1:rev:309cf2674ee7a0749978cf8265ab91a60aea0f7d', None), + (DIRECTORY, _directory, + 'swh:1:dir:d198bc9d7a6bcf6db04f476d29314f157507d505', None), + (CONTENT, _content, + 'swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2', 1) + ]: + if version: + actual_value = identifiers.persistent_identifier( + full_type, _hash, version) + else: + actual_value = identifiers.persistent_identifier( + full_type, _hash) + + self.assertEquals(actual_value, expected_persistent_id) + + def test_parse_persistent_identifier(self): + for pid, _type, _version, _hash in [ + ('swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2', 'cnt', + '1', '94a9ed024d3859793618152ea559a168bbcbb5e2'), + ('swh:2:dir:d198bc9d7a6bcf6db04f476d29314f157507d505', 'dir', + '2', 'd198bc9d7a6bcf6db04f476d29314f157507d505'), + ('swh:1:rev:309cf2674ee7a0749978cf8265ab91a60aea0f7d', 'rev', + '1', '309cf2674ee7a0749978cf8265ab91a60aea0f7d'), + ('swh:1:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f', 'rel', + '1', '22ece559cc7cc2364edc5e5593d63ae8bd229f9f'), + ('swh:1:snp:c7c108084bc0bf3d81436bf980b46e98bd338453', 'snp', + '1', 'c7c108084bc0bf3d81436bf980b46e98bd338453'), + ]: + expected_result = { + 'namespace': 'swh', + 'scheme_version': _version, + 'object_type': _type, + 'object_id': _hash, + } + actual_result = identifiers.parse_persistent_identifier(pid) + self.assertEquals(actual_result, expected_result) diff --git a/version.txt b/version.txt index 7b21cbd..943a41a 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -v0.0.20-0-g91d74ef \ No newline at end of file +v0.0.21-0-gbdf26f5 \ No newline at end of file