Page MenuHomeSoftware Heritage

No OneTemporary

diff --git a/swh/model/identifiers.py b/swh/model/identifiers.py
index f775426..e7608e9 100644
--- a/swh/model/identifiers.py
+++ b/swh/model/identifiers.py
@@ -1,788 +1,792 @@
# Copyright (C) 2015-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import binascii
import datetime
from collections import namedtuple
from functools import lru_cache
from .exceptions import ValidationError
from .fields.hashes import validate_sha1
from .hashutil import hash_data, hash_git_data, DEFAULT_ALGORITHMS
from .hashutil import hash_to_hex
SNAPSHOT = 'snapshot'
REVISION = 'revision'
RELEASE = 'release'
DIRECTORY = 'directory'
CONTENT = 'content'
@lru_cache()
def identifier_to_bytes(identifier):
"""Convert a text identifier to bytes.
Args:
identifier: an identifier, either a 40-char hexadecimal string or a
bytes object of length 20
Returns:
The length 20 bytestring corresponding to the given identifier
Raises:
ValueError: if the identifier is of an unexpected type or length.
"""
if isinstance(identifier, bytes):
if len(identifier) != 20:
raise ValueError(
'Wrong length for bytes identifier %s, expected 20' %
len(identifier))
return identifier
if isinstance(identifier, str):
if len(identifier) != 40:
raise ValueError(
'Wrong length for str identifier %s, expected 40' %
len(identifier))
return bytes.fromhex(identifier)
raise ValueError('Wrong type for identifier %s, expected bytes or str' %
identifier.__class__.__name__)
@lru_cache()
def identifier_to_str(identifier):
"""Convert an identifier to an hexadecimal string.
Args:
identifier: an identifier, either a 40-char hexadecimal string or a
bytes object of length 20
Returns:
The length 40 string corresponding to the given identifier, hex encoded
Raises:
ValueError: if the identifier is of an unexpected type or length.
"""
if isinstance(identifier, str):
if len(identifier) != 40:
raise ValueError(
'Wrong length for str identifier %s, expected 40' %
len(identifier))
return identifier
if isinstance(identifier, bytes):
if len(identifier) != 20:
raise ValueError(
'Wrong length for bytes identifier %s, expected 20' %
len(identifier))
return binascii.hexlify(identifier).decode()
raise ValueError('Wrong type for identifier %s, expected bytes or str' %
identifier.__class__.__name__)
def content_identifier(content):
"""Return the intrinsic identifier for a content.
A content's identifier is the sha1, sha1_git and sha256 checksums of its
data.
Args:
content: a content conforming to the Software Heritage schema
Returns:
A dictionary with all the hashes for the data
Raises:
KeyError: if the content doesn't have a data member.
"""
return hash_data(content['data'], DEFAULT_ALGORITHMS)
def _sort_key(entry):
"""The sorting key for tree entries"""
if entry['type'] == 'dir':
return entry['name'] + b'/'
else:
return entry['name']
@lru_cache()
def _perms_to_bytes(perms):
"""Convert the perms value to its bytes representation"""
oc = oct(perms)[2:]
return oc.encode('ascii')
def escape_newlines(snippet):
"""Escape the newlines present in snippet according to git rules.
New lines in git manifests are escaped by indenting the next line by one
space.
"""
if b'\n' in snippet:
return b'\n '.join(snippet.split(b'\n'))
else:
return snippet
def directory_identifier(directory):
"""Return the intrinsic identifier for a directory.
A directory's identifier is the tree sha1 à la git of a directory listing,
using the following algorithm, which is equivalent to the git algorithm for
trees:
1. Entries of the directory are sorted using the name (or the name with '/'
appended for directory entries) as key, in bytes order.
2. For each entry of the directory, the following bytes are output:
- the octal representation of the permissions for the entry (stored in
the 'perms' member), which is a representation of the entry type:
- b'100644' (int 33188) for files
- b'100755' (int 33261) for executable files
- b'120000' (int 40960) for symbolic links
- b'40000' (int 16384) for directories
- b'160000' (int 57344) for references to revisions
- an ascii space (b'\x20')
- the entry's name (as raw bytes), stored in the 'name' member
- a null byte (b'\x00')
- the 20 byte long identifier of the object pointed at by the entry,
stored in the 'target' member:
- for files or executable files: their blob sha1_git
- for symbolic links: the blob sha1_git of a file containing the link
destination
- for directories: their intrinsic identifier
- for revisions: their intrinsic identifier
(Note that there is no separator between entries)
"""
components = []
for entry in sorted(directory['entries'], key=_sort_key):
components.extend([
_perms_to_bytes(entry['perms']),
b'\x20',
entry['name'],
b'\x00',
identifier_to_bytes(entry['target']),
])
return identifier_to_str(hash_git_data(b''.join(components), 'tree'))
def format_date(date):
"""Convert a date object into an UTC timestamp encoded as ascii bytes.
Git stores timestamps as an integer number of seconds since the UNIX epoch.
However, Software Heritage stores timestamps as an integer number of
microseconds (postgres type "datetime with timezone").
Therefore, we print timestamps with no microseconds as integers, and
timestamps with microseconds as floating point values. We elide the
trailing zeroes from microsecond values, to "future-proof" our
representation if we ever need more precision in timestamps.
"""
if not isinstance(date, dict):
raise ValueError('format_date only supports dicts, %r received' % date)
seconds = date.get('seconds', 0)
microseconds = date.get('microseconds', 0)
if not microseconds:
return str(seconds).encode()
else:
float_value = ('%d.%06d' % (seconds, microseconds))
return float_value.rstrip('0').encode()
@lru_cache()
def format_offset(offset, negative_utc=None):
"""Convert an integer number of minutes into an offset representation.
The offset representation is [+-]hhmm where:
- hh is the number of hours;
- mm is the number of minutes.
A null offset is represented as +0000.
"""
if offset < 0 or offset == 0 and negative_utc:
sign = '-'
else:
sign = '+'
hours = abs(offset) // 60
minutes = abs(offset) % 60
t = '%s%02d%02d' % (sign, hours, minutes)
return t.encode()
def normalize_timestamp(time_representation):
"""Normalize a time representation for processing by Software Heritage
This function supports a numeric timestamp (representing a number of
seconds since the UNIX epoch, 1970-01-01 at 00:00 UTC), a
:obj:`datetime.datetime` object (with timezone information), or a
normalized Software Heritage time representation (idempotency).
Args:
time_representation: the representation of a timestamp
Returns:
dict: a normalized dictionary with three keys:
- timestamp: a dict with two optional keys:
- seconds: the integral number of seconds since the UNIX epoch
- microseconds: the integral number of microseconds
- offset: the timezone offset as a number of minutes relative to
UTC
- negative_utc: a boolean representing whether the offset is -0000
when offset = 0.
"""
if time_representation is None:
return None
negative_utc = False
if isinstance(time_representation, dict):
ts = time_representation['timestamp']
if isinstance(ts, dict):
seconds = ts.get('seconds', 0)
microseconds = ts.get('microseconds', 0)
elif isinstance(ts, int):
seconds = ts
microseconds = 0
else:
raise ValueError(
'normalize_timestamp received non-integer timestamp member:'
' %r' % ts)
offset = time_representation['offset']
if 'negative_utc' in time_representation:
negative_utc = time_representation['negative_utc']
elif isinstance(time_representation, datetime.datetime):
seconds = int(time_representation.timestamp())
microseconds = time_representation.microsecond
utcoffset = time_representation.utcoffset()
if utcoffset is None:
raise ValueError(
'normalize_timestamp received datetime without timezone: %s' %
time_representation)
# utcoffset is an integer number of minutes
seconds_offset = utcoffset.total_seconds()
offset = int(seconds_offset) // 60
elif isinstance(time_representation, int):
seconds = time_representation
microseconds = 0
offset = 0
else:
raise ValueError(
'normalize_timestamp received non-integer timestamp:'
' %r' % time_representation)
return {
'timestamp': {
'seconds': seconds,
'microseconds': microseconds,
},
'offset': offset,
'negative_utc': negative_utc,
}
def format_author(author):
"""Format the specification of an author.
An author is either a byte string (passed unchanged), or a dict with three
keys, fullname, name and email.
If the fullname exists, return it; if it doesn't, we construct a fullname
using the following heuristics: if the name value is None, we return the
email in angle brackets, else, we return the name, a space, and the email
in angle brackets.
"""
if isinstance(author, bytes) or author is None:
return author
if 'fullname' in author:
return author['fullname']
ret = []
if author['name'] is not None:
ret.append(author['name'])
if author['email'] is not None:
ret.append(b''.join([b'<', author['email'], b'>']))
return b' '.join(ret)
def format_author_line(header, author, date_offset):
"""Format a an author line according to git standards.
An author line has three components:
- a header, describing the type of author (author, committer, tagger)
- a name and email, which is an arbitrary bytestring
- optionally, a timestamp with UTC offset specification
The author line is formatted thus::
`header` `name and email`[ `timestamp` `utc_offset`]
The timestamp is encoded as a (decimal) number of seconds since the UNIX
epoch (1970-01-01 at 00:00 UTC). As an extension to the git format, we
support fractional timestamps, using a dot as the separator for the decimal
part.
The utc offset is a number of minutes encoded as '[+-]HHMM'. Note some
tools can pass a negative offset corresponding to the UTC timezone
('-0000'), which is valid and is encoded as such.
For convenience, this function returns the whole line with its trailing
newline.
Args:
header: the header of the author line (one of 'author', 'committer',
'tagger')
author: an author specification (dict with two bytes values: name and
email, or byte value)
date_offset: a normalized date/time representation as returned by
:func:`normalize_timestamp`.
Returns:
the newline-terminated byte string containing the author line
"""
ret = [header.encode(), b' ', escape_newlines(format_author(author))]
date_offset = normalize_timestamp(date_offset)
if date_offset is not None:
date_f = format_date(date_offset['timestamp'])
offset_f = format_offset(date_offset['offset'],
date_offset['negative_utc'])
ret.extend([b' ', date_f, b' ', offset_f])
ret.append(b'\n')
return b''.join(ret)
def revision_identifier(revision):
"""Return the intrinsic identifier for a revision.
The fields used for the revision identifier computation are:
- directory
- parents
- author
- author_date
- committer
- committer_date
- metadata -> extra_headers
- message
A revision's identifier is the 'git'-checksum of a commit manifest
constructed as follows (newlines are a single ASCII newline character)::
tree <directory identifier>
[for each parent in parents]
parent <parent identifier>
[end for each parents]
author <author> <author_date>
committer <committer> <committer_date>
[for each key, value in extra_headers]
<key> <encoded value>
[end for each extra_headers]
<message>
The directory identifier is the ascii representation of its hexadecimal
encoding.
Author and committer are formatted with the :func:`format_author` function.
Dates are formatted with the :func:`format_offset` function.
Extra headers are an ordered list of [key, value] pairs. Keys are strings
and get encoded to utf-8 for identifier computation. Values are either byte
strings, unicode strings (that get encoded to utf-8), or integers (that get
encoded to their utf-8 decimal representation).
Multiline extra header values are escaped by indenting the continuation
lines with one ascii space.
If the message is None, the manifest ends with the last header. Else, the
message is appended to the headers after an empty line.
The checksum of the full manifest is computed using the 'commit' git object
type.
"""
components = [
b'tree ', identifier_to_str(revision['directory']).encode(), b'\n',
]
for parent in revision['parents']:
if parent:
components.extend([
b'parent ', identifier_to_str(parent).encode(), b'\n',
])
components.extend([
format_author_line('author', revision['author'], revision['date']),
format_author_line('committer', revision['committer'],
revision['committer_date']),
])
# Handle extra headers
metadata = revision.get('metadata')
if not metadata:
metadata = {}
for key, value in metadata.get('extra_headers', []):
# Integer values: decimal representation
if isinstance(value, int):
value = str(value).encode('utf-8')
# Unicode string values: utf-8 encoding
if isinstance(value, str):
value = value.encode('utf-8')
# encode the key to utf-8
components.extend([key.encode('utf-8'), b' ',
escape_newlines(value), b'\n'])
if revision['message'] is not None:
components.extend([b'\n', revision['message']])
commit_raw = b''.join(components)
return identifier_to_str(hash_git_data(commit_raw, 'commit'))
def target_type_to_git(target_type):
"""Convert a software heritage target type to a git object type"""
return {
'content': b'blob',
'directory': b'tree',
'revision': b'commit',
'release': b'tag',
}[target_type]
def release_identifier(release):
"""Return the intrinsic identifier for a release."""
components = [
b'object ', identifier_to_str(release['target']).encode(), b'\n',
b'type ', target_type_to_git(release['target_type']), b'\n',
b'tag ', release['name'], b'\n',
]
if 'author' in release and release['author']:
components.append(
format_author_line('tagger', release['author'], release['date'])
)
if release['message'] is not None:
components.extend([b'\n', release['message']])
return identifier_to_str(hash_git_data(b''.join(components), 'tag'))
def snapshot_identifier(snapshot, *, ignore_unresolved=False):
"""Return the intrinsic identifier for a snapshot.
Snapshots are a set of named branches, which are pointers to objects at any
level of the Software Heritage DAG.
As well as pointing to other objects in the Software Heritage DAG, branches
can also be *alias*es, in which case their target is the name of another
branch in the same snapshot, or *dangling*, in which case the target is
unknown (and represented by the ``None`` value).
A snapshot identifier is a salted sha1 (using the git hashing algorithm
with the ``snapshot`` object type) of a manifest following the algorithm:
1. Branches are sorted using the name as key, in bytes order.
2. For each branch, the following bytes are output:
- the type of the branch target:
- ``content``, ``directory``, ``revision``, ``release`` or ``snapshot``
for the corresponding entries in the DAG;
- ``alias`` for branches referencing another branch;
- ``dangling`` for dangling branches
- an ascii space (``\\x20``)
- the branch name (as raw bytes)
- a null byte (``\\x00``)
- the length of the target identifier, as an ascii-encoded decimal number
(``20`` for current intrinsic identifiers, ``0`` for dangling
branches, the length of the target branch name for branch aliases)
- a colon (``:``)
- the identifier of the target object pointed at by the branch,
stored in the 'target' member:
- for contents: their *sha1_git*
- for directories, revisions, releases or snapshots: their intrinsic
identifier
- for branch aliases, the name of the target branch (as raw bytes)
- for dangling branches, the empty string
Note that, akin to directory manifests, there is no separator between
entries. Because of symbolic branches, identifiers are of arbitrary
length but are length-encoded to avoid ambiguity.
Args:
snapshot (dict): the snapshot of which to compute the identifier. A
single entry is needed, ``'branches'``, which is itself a :class:`dict`
mapping each branch to its target
ignore_unresolved (bool): if `True`, ignore unresolved branch aliases.
Returns:
str: the intrinsic identifier for `snapshot`
"""
unresolved = []
lines = []
for name, target in sorted(snapshot['branches'].items()):
if not target:
target_type = b'dangling'
target_id = b''
elif target['target_type'] == 'alias':
target_type = b'alias'
target_id = target['target']
if target_id not in snapshot['branches'] or target_id == name:
unresolved.append((name, target_id))
else:
target_type = target['target_type'].encode()
target_id = identifier_to_bytes(target['target'])
lines.extend([
target_type, b'\x20', name, b'\x00',
('%d:' % len(target_id)).encode(), target_id,
])
if unresolved and not ignore_unresolved:
raise ValueError('Branch aliases unresolved: %s' %
', '.join('%s -> %s' % (name, target)
for name, target in unresolved))
return identifier_to_str(hash_git_data(b''.join(lines), 'snapshot'))
_object_type_map = {
SNAPSHOT: {
'short_name': 'snp',
'key_id': 'id'
},
RELEASE: {
'short_name': 'rel',
'key_id': 'id'
},
REVISION: {
'short_name': 'rev',
'key_id': 'id'
},
DIRECTORY: {
'short_name': 'dir',
'key_id': 'id'
},
CONTENT: {
'short_name': 'cnt',
'key_id': 'sha1_git'
}
}
PERSISTENT_IDENTIFIER_TYPES = ['snp', 'rel', 'rev', 'dir', 'cnt']
PERSISTENT_IDENTIFIER_KEYS = [
'namespace', 'scheme_version', 'object_type', 'object_id', 'metadata']
PERSISTENT_IDENTIFIER_PARTS_SEP = ';'
class PersistentId(namedtuple('PersistentId', PERSISTENT_IDENTIFIER_KEYS)):
"""
Named tuple holding the relevant info associated to a Software Heritage
persistent identifier.
Args:
namespace (str): the namespace of the identifier, defaults to 'swh'
scheme_version (int): the scheme version of the identifier,
defaults to 1
object_type (str): the type of object the identifier points to,
either 'content', 'directory', 'release', 'revision' or 'snapshot'
object_id (dict/bytes/str): object's dict representation or
object identifier
metadata (dict): optional dict filled with metadata related to
pointed object
Raises:
swh.model.exceptions.ValidationError: In case of invalid object type or id
Once created, it contains the following attributes:
Attributes:
namespace (str): the namespace of the identifier
scheme_version (int): the scheme version of the identifier
object_type (str): the type of object the identifier points to
object_id (str): hexadecimal representation of the object hash
metadata (dict): metadata related to the pointed object
To get the raw persistent identifier string from an instance of
this named tuple, use the :func:`str` function::
pid = PersistentId(object_type='content', object_id='8ff44f081d43176474b267de5451f2c2e88089d0')
pid_str = str(pid) # 'swh:1:cnt:8ff44f081d43176474b267de5451f2c2e88089d0'
""" # noqa
__slots__ = ()
def __new__(cls, namespace='swh', scheme_version=1,
object_type='', object_id='', metadata={}):
o = _object_type_map.get(object_type)
if not o:
raise ValidationError('Wrong input: Supported types are %s' % (
list(_object_type_map.keys())))
# internal swh representation resolution
if isinstance(object_id, dict):
object_id = object_id[o['key_id']]
validate_sha1(object_id) # can raise if invalid hash
object_id = hash_to_hex(object_id)
return super(cls, PersistentId).__new__(
cls, namespace, scheme_version, object_type, object_id, metadata)
def __str__(self):
o = _object_type_map.get(self.object_type)
pid = '%s:%s:%s:%s' % (self.namespace, self.scheme_version,
o['short_name'], self.object_id)
if self.metadata:
for k, v in self.metadata.items():
pid += '%s%s=%s' % (PERSISTENT_IDENTIFIER_PARTS_SEP, k, v)
return pid
-def persistent_identifier(object_type, object_id, scheme_version=1):
+def persistent_identifier(object_type, object_id, scheme_version=1,
+ metadata={}):
"""Compute persistent identifier (stable over time) as per
documentation.
Documentation:
- https://docs.softwareheritage.org/devel/swh-model/persistent-identifiers.html
+ https://docs.softwareheritage.org/devel/swh-model/persistent-identifiers.html # noqa
Args:
- object_type (str): object's type, either 'content', 'directory', 'release',
- 'revision' or 'snapshot'
+ object_type (str): object's type, either 'content', 'directory',
+ 'release', 'revision' or 'snapshot'
object_id (dict/bytes/str): object's dict representation or object
identifier
- scheme_version (int): persistent identifier scheme version, defaults to 1
+ scheme_version (int): persistent identifier scheme version,
+ defaults to 1
+ metadata (dict): metadata related to the pointed object
Raises:
- swh.model.exceptions.ValidationError: In case of invalid object type or id
+ swh.model.exceptions.ValidationError: In case of invalid object type
+ or id
Returns:
str: the persistent identifier
- """ # noqa
+ """
pid = PersistentId(scheme_version=scheme_version, object_type=object_type,
- object_id=object_id)
+ object_id=object_id, metadata=metadata)
return str(pid)
def parse_persistent_identifier(persistent_id):
"""Parse swh's :ref:`persistent-identifiers` scheme.
Args:
persistent_id (str): A persistent identifier
Raises:
swh.model.exceptions.ValidationError: in case of:
* missing mandatory values (4)
* invalid namespace supplied
* invalid version supplied
* invalid type supplied
* missing hash
* invalid hash identifier supplied
Returns:
PersistentId: a named tuple holding the parsing result
"""
# <pid>;<contextual-information>
persistent_id_parts = persistent_id.split(PERSISTENT_IDENTIFIER_PARTS_SEP)
pid_data = persistent_id_parts.pop(0).split(':')
if len(pid_data) != 4:
raise ValidationError(
'Wrong format: There should be 4 mandatory values')
# Checking for parsing errors
_ns, _version, _type, _id = pid_data
if _ns != 'swh':
raise ValidationError(
'Wrong format: Supported namespace is \'swh\'')
if _version != '1':
raise ValidationError(
'Wrong format: Supported version is 1')
pid_data[1] = int(pid_data[1])
expected_types = PERSISTENT_IDENTIFIER_TYPES
if _type not in expected_types:
raise ValidationError(
'Wrong format: Supported types are %s' % (
', '.join(expected_types)))
for otype, data in _object_type_map.items():
if _type == data['short_name']:
pid_data[2] = otype
break
if not _id:
raise ValidationError(
'Wrong format: Identifier should be present')
try:
validate_sha1(_id)
except ValidationError:
raise ValidationError(
'Wrong format: Identifier should be a valid hash')
persistent_id_metadata = {}
for part in persistent_id_parts:
try:
key, val = part.split('=')
persistent_id_metadata[key] = val
except Exception:
msg = 'Contextual data is badly formatted, form key=val expected'
raise ValidationError(msg)
pid_data.append(persistent_id_metadata)
return PersistentId._make(pid_data)
diff --git a/swh/model/tests/test_identifiers.py b/swh/model/tests/test_identifiers.py
index 01c1153..6658608 100644
--- a/swh/model/tests/test_identifiers.py
+++ b/swh/model/tests/test_identifiers.py
@@ -1,906 +1,919 @@
# Copyright (C) 2015-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import binascii
import datetime
import unittest
from nose.tools import istest
from swh.model import hashutil, identifiers
from swh.model.exceptions import ValidationError
from swh.model.identifiers import SNAPSHOT, RELEASE, REVISION, DIRECTORY
from swh.model.identifiers import CONTENT, PERSISTENT_IDENTIFIER_TYPES
from swh.model.identifiers import PersistentId
class UtilityFunctionsIdentifier(unittest.TestCase):
def setUp(self):
self.str_id = 'c2e41aae41ac17bd4a650770d6ee77f62e52235b'
self.bytes_id = binascii.unhexlify(self.str_id)
self.bad_type_id = object()
@istest
def identifier_to_bytes(self):
for id in [self.str_id, self.bytes_id]:
self.assertEqual(identifiers.identifier_to_bytes(id),
self.bytes_id)
# wrong length
with self.assertRaises(ValueError) as cm:
identifiers.identifier_to_bytes(id[:-2])
self.assertIn('length', str(cm.exception))
with self.assertRaises(ValueError) as cm:
identifiers.identifier_to_bytes(self.bad_type_id)
self.assertIn('type', str(cm.exception))
@istest
def identifier_to_str(self):
for id in [self.str_id, self.bytes_id]:
self.assertEqual(identifiers.identifier_to_str(id),
self.str_id)
# wrong length
with self.assertRaises(ValueError) as cm:
identifiers.identifier_to_str(id[:-2])
self.assertIn('length', str(cm.exception))
with self.assertRaises(ValueError) as cm:
identifiers.identifier_to_str(self.bad_type_id)
self.assertIn('type', str(cm.exception))
class UtilityFunctionsDateOffset(unittest.TestCase):
def setUp(self):
self.dates = {
b'1448210036': {
'seconds': 1448210036,
'microseconds': 0,
},
b'1448210036.002342': {
'seconds': 1448210036,
'microseconds': 2342,
},
b'1448210036.12': {
'seconds': 1448210036,
'microseconds': 120000,
}
}
self.broken_dates = [
1448210036.12,
]
self.offsets = {
0: b'+0000',
-630: b'-1030',
800: b'+1320',
}
@istest
def format_date(self):
for date_repr, date in self.dates.items():
self.assertEqual(identifiers.format_date(date), date_repr)
@istest
def format_date_fail(self):
for date in self.broken_dates:
with self.assertRaises(ValueError):
identifiers.format_date(date)
@istest
def format_offset(self):
for offset, res in self.offsets.items():
self.assertEqual(identifiers.format_offset(offset), res)
class ContentIdentifier(unittest.TestCase):
def setUp(self):
self.content = {
'status': 'visible',
'length': 5,
'data': b'1984\n',
'ctime': datetime.datetime(2015, 11, 22, 16, 33, 56,
tzinfo=datetime.timezone.utc),
}
self.content_id = hashutil.hash_data(self.content['data'])
@istest
def content_identifier(self):
self.assertEqual(identifiers.content_identifier(self.content),
self.content_id)
class DirectoryIdentifier(unittest.TestCase):
def setUp(self):
self.directory = {
'id': 'c2e41aae41ac17bd4a650770d6ee77f62e52235b',
'entries': [
{
'type': 'file',
'perms': 33188,
'name': b'README',
'target': '37ec8ea2110c0b7a32fbb0e872f6e7debbf95e21'
},
{
'type': 'file',
'perms': 33188,
'name': b'Rakefile',
'target': '3bb0e8592a41ae3185ee32266c860714980dbed7'
},
{
'type': 'dir',
'perms': 16384,
'name': b'app',
'target': '61e6e867f5d7ba3b40540869bc050b0c4fed9e95'
},
{
'type': 'file',
'perms': 33188,
'name': b'1.megabyte',
'target': '7c2b2fbdd57d6765cdc9d84c2d7d333f11be7fb3'
},
{
'type': 'dir',
'perms': 16384,
'name': b'config',
'target': '591dfe784a2e9ccc63aaba1cb68a765734310d98'
},
{
'type': 'dir',
'perms': 16384,
'name': b'public',
'target': '9588bf4522c2b4648bfd1c61d175d1f88c1ad4a5'
},
{
'type': 'file',
'perms': 33188,
'name': b'development.sqlite3',
'target': 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391'
},
{
'type': 'dir',
'perms': 16384,
'name': b'doc',
'target': '154705c6aa1c8ead8c99c7915373e3c44012057f'
},
{
'type': 'dir',
'perms': 16384,
'name': b'db',
'target': '85f157bdc39356b7bc7de9d0099b4ced8b3b382c'
},
{
'type': 'dir',
'perms': 16384,
'name': b'log',
'target': '5e3d3941c51cce73352dff89c805a304ba96fffe'
},
{
'type': 'dir',
'perms': 16384,
'name': b'script',
'target': '1b278423caf176da3f3533592012502aa10f566c'
},
{
'type': 'dir',
'perms': 16384,
'name': b'test',
'target': '035f0437c080bfd8711670b3e8677e686c69c763'
},
{
'type': 'dir',
'perms': 16384,
'name': b'vendor',
'target': '7c0dc9ad978c1af3f9a4ce061e50f5918bd27138'
},
{
'type': 'rev',
'perms': 57344,
'name': b'will_paginate',
'target': '3d531e169db92a16a9a8974f0ae6edf52e52659e'
}
],
}
self.empty_directory = {
'id': '4b825dc642cb6eb9a060e54bf8d69288fbee4904',
'entries': [],
}
@istest
def dir_identifier(self):
self.assertEqual(
identifiers.directory_identifier(self.directory),
self.directory['id'])
@istest
def dir_identifier_empty_directory(self):
self.assertEqual(
identifiers.directory_identifier(self.empty_directory),
self.empty_directory['id'])
class RevisionIdentifier(unittest.TestCase):
def setUp(self):
linus_tz = datetime.timezone(datetime.timedelta(minutes=-420))
gpgsig = b'''\
-----BEGIN PGP SIGNATURE-----
Version: GnuPG v1.4.13 (Darwin)
iQIcBAABAgAGBQJVJcYsAAoJEBiY3kIkQRNJVAUQAJ8/XQIfMqqC5oYeEFfHOPYZ
L7qy46bXHVBa9Qd8zAJ2Dou3IbI2ZoF6/Et89K/UggOycMlt5FKV/9toWyuZv4Po
L682wonoxX99qvVTHo6+wtnmYO7+G0f82h+qHMErxjP+I6gzRNBvRr+SfY7VlGdK
wikMKOMWC5smrScSHITnOq1Ews5pe3N7qDYMzK0XVZmgDoaem4RSWMJs4My/qVLN
e0CqYWq2A22GX7sXl6pjneJYQvcAXUX+CAzp24QnPSb+Q22Guj91TcxLFcHCTDdn
qgqMsEyMiisoglwrCbO+D+1xq9mjN9tNFWP66SQ48mrrHYTBV5sz9eJyDfroJaLP
CWgbDTgq6GzRMehHT3hXfYS5NNatjnhkNISXR7pnVP/obIi/vpWh5ll6Gd8q26z+
a/O41UzOaLTeNI365MWT4/cnXohVLRG7iVJbAbCxoQmEgsYMRc/pBAzWJtLfcB2G
jdTswYL6+MUdL8sB9pZ82D+BP/YAdHe69CyTu1lk9RT2pYtI/kkfjHubXBCYEJSG
+VGllBbYG6idQJpyrOYNRJyrDi9yvDJ2W+S0iQrlZrxzGBVGTB/y65S8C+2WTBcE
lf1Qb5GDsQrZWgD+jtWTywOYHtCBwyCKSAXxSARMbNPeak9WPlcW/Jmu+fUcMe2x
dg1KdHOa34shrKDaOVzW
=od6m
-----END PGP SIGNATURE-----'''
self.revision = {
'id': 'bc0195aad0daa2ad5b0d76cce22b167bc3435590',
'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07',
'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'],
'author': {
'name': b'Linus Torvalds',
'email': b'torvalds@linux-foundation.org',
},
'date': datetime.datetime(2015, 7, 12, 15, 10, 30,
tzinfo=linus_tz),
'committer': {
'name': b'Linus Torvalds',
'email': b'torvalds@linux-foundation.org',
},
'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30,
tzinfo=linus_tz),
'message': b'Linux 4.2-rc2\n',
}
self.revision_none_metadata = {
'id': 'bc0195aad0daa2ad5b0d76cce22b167bc3435590',
'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07',
'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'],
'author': {
'name': b'Linus Torvalds',
'email': b'torvalds@linux-foundation.org',
},
'date': datetime.datetime(2015, 7, 12, 15, 10, 30,
tzinfo=linus_tz),
'committer': {
'name': b'Linus Torvalds',
'email': b'torvalds@linux-foundation.org',
},
'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30,
tzinfo=linus_tz),
'message': b'Linux 4.2-rc2\n',
'metadata': None,
}
self.synthetic_revision = {
'id': b'\xb2\xa7\xe1&\x04\x92\xe3D\xfa\xb3\xcb\xf9\x1b\xc1<\x91'
b'\xe0T&\xfd',
'author': {
'name': b'Software Heritage',
'email': b'robot@softwareheritage.org',
},
'date': {
'timestamp': {'seconds': 1437047495},
'offset': 0,
'negative_utc': False,
},
'type': 'tar',
'committer': {
'name': b'Software Heritage',
'email': b'robot@softwareheritage.org',
},
'committer_date': 1437047495,
'synthetic': True,
'parents': [None],
'message': b'synthetic revision message\n',
'directory': b'\xd1\x1f\x00\xa6\xa0\xfe\xa6\x05SA\xd2U\x84\xb5\xa9'
b'e\x16\xc0\xd2\xb8',
'metadata': {'original_artifact': [
{'archive_type': 'tar',
'name': 'gcc-5.2.0.tar.bz2',
'sha1_git': '39d281aff934d44b439730057e55b055e206a586',
'sha1': 'fe3f5390949d47054b613edc36c557eb1d51c18e',
'sha256': '5f835b04b5f7dd4f4d2dc96190ec1621b8d89f'
'2dc6f638f9f8bc1b1014ba8cad'}]},
}
# cat commit.txt | git hash-object -t commit --stdin
self.revision_with_extra_headers = {
'id': '010d34f384fa99d047cdd5e2f41e56e5c2feee45',
'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07',
'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'],
'author': {
'name': b'Linus Torvalds',
'email': b'torvalds@linux-foundation.org',
'fullname': b'Linus Torvalds <torvalds@linux-foundation.org>',
},
'date': datetime.datetime(2015, 7, 12, 15, 10, 30,
tzinfo=linus_tz),
'committer': {
'name': b'Linus Torvalds',
'email': b'torvalds@linux-foundation.org',
'fullname': b'Linus Torvalds <torvalds@linux-foundation.org>',
},
'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30,
tzinfo=linus_tz),
'message': b'Linux 4.2-rc2\n',
'metadata': {
'extra_headers': [
['svn-repo-uuid', '046f1af7-66c2-d61b-5410-ce57b7db7bff'],
['svn-revision', 10],
]
}
}
self.revision_with_gpgsig = {
'id': '44cc742a8ca17b9c279be4cc195a93a6ef7a320e',
'directory': 'b134f9b7dc434f593c0bab696345548b37de0558',
'parents': ['689664ae944b4692724f13b709a4e4de28b54e57',
'c888305e1efbaa252d01b4e5e6b778f865a97514'],
'author': {
'name': b'Jiang Xin',
'email': b'worldhello.net@gmail.com',
'fullname': b'Jiang Xin <worldhello.net@gmail.com>',
},
'date': {
'timestamp': 1428538899,
'offset': 480,
},
'committer': {
'name': b'Jiang Xin',
'email': b'worldhello.net@gmail.com',
},
'committer_date': {
'timestamp': 1428538899,
'offset': 480,
},
'metadata': {
'extra_headers': [
['gpgsig', gpgsig],
],
},
'message': b'''Merge branch 'master' of git://github.com/alexhenrie/git-po
* 'master' of git://github.com/alexhenrie/git-po:
l10n: ca.po: update translation
'''
}
self.revision_no_message = {
'id': '4cfc623c9238fa92c832beed000ce2d003fd8333',
'directory': 'b134f9b7dc434f593c0bab696345548b37de0558',
'parents': ['689664ae944b4692724f13b709a4e4de28b54e57',
'c888305e1efbaa252d01b4e5e6b778f865a97514'],
'author': {
'name': b'Jiang Xin',
'email': b'worldhello.net@gmail.com',
'fullname': b'Jiang Xin <worldhello.net@gmail.com>',
},
'date': {
'timestamp': 1428538899,
'offset': 480,
},
'committer': {
'name': b'Jiang Xin',
'email': b'worldhello.net@gmail.com',
},
'committer_date': {
'timestamp': 1428538899,
'offset': 480,
},
'message': None,
}
self.revision_empty_message = {
'id': '7442cd78bd3b4966921d6a7f7447417b7acb15eb',
'directory': 'b134f9b7dc434f593c0bab696345548b37de0558',
'parents': ['689664ae944b4692724f13b709a4e4de28b54e57',
'c888305e1efbaa252d01b4e5e6b778f865a97514'],
'author': {
'name': b'Jiang Xin',
'email': b'worldhello.net@gmail.com',
'fullname': b'Jiang Xin <worldhello.net@gmail.com>',
},
'date': {
'timestamp': 1428538899,
'offset': 480,
},
'committer': {
'name': b'Jiang Xin',
'email': b'worldhello.net@gmail.com',
},
'committer_date': {
'timestamp': 1428538899,
'offset': 480,
},
'message': b'',
}
self.revision_only_fullname = {
'id': '010d34f384fa99d047cdd5e2f41e56e5c2feee45',
'directory': '85a74718d377195e1efd0843ba4f3260bad4fe07',
'parents': ['01e2d0627a9a6edb24c37db45db5ecb31e9de808'],
'author': {
'fullname': b'Linus Torvalds <torvalds@linux-foundation.org>',
},
'date': datetime.datetime(2015, 7, 12, 15, 10, 30,
tzinfo=linus_tz),
'committer': {
'fullname': b'Linus Torvalds <torvalds@linux-foundation.org>',
},
'committer_date': datetime.datetime(2015, 7, 12, 15, 10, 30,
tzinfo=linus_tz),
'message': b'Linux 4.2-rc2\n',
'metadata': {
'extra_headers': [
['svn-repo-uuid', '046f1af7-66c2-d61b-5410-ce57b7db7bff'],
['svn-revision', 10],
]
}
}
@istest
def revision_identifier(self):
self.assertEqual(
identifiers.revision_identifier(self.revision),
identifiers.identifier_to_str(self.revision['id']),
)
@istest
def revision_identifier_none_metadata(self):
self.assertEqual(
identifiers.revision_identifier(self.revision_none_metadata),
identifiers.identifier_to_str(self.revision_none_metadata['id']),
)
@istest
def revision_identifier_synthetic(self):
self.assertEqual(
identifiers.revision_identifier(self.synthetic_revision),
identifiers.identifier_to_str(self.synthetic_revision['id']),
)
@istest
def revision_identifier_with_extra_headers(self):
self.assertEqual(
identifiers.revision_identifier(
self.revision_with_extra_headers),
identifiers.identifier_to_str(
self.revision_with_extra_headers['id']),
)
@istest
def revision_identifier_with_gpgsig(self):
self.assertEqual(
identifiers.revision_identifier(
self.revision_with_gpgsig),
identifiers.identifier_to_str(
self.revision_with_gpgsig['id']),
)
@istest
def revision_identifier_no_message(self):
self.assertEqual(
identifiers.revision_identifier(
self.revision_no_message),
identifiers.identifier_to_str(
self.revision_no_message['id']),
)
@istest
def revision_identifier_empty_message(self):
self.assertEqual(
identifiers.revision_identifier(
self.revision_empty_message),
identifiers.identifier_to_str(
self.revision_empty_message['id']),
)
@istest
def revision_identifier_only_fullname(self):
self.assertEqual(
identifiers.revision_identifier(
self.revision_only_fullname),
identifiers.identifier_to_str(
self.revision_only_fullname['id']),
)
class ReleaseIdentifier(unittest.TestCase):
def setUp(self):
linus_tz = datetime.timezone(datetime.timedelta(minutes=-420))
self.release = {
'id': '2b10839e32c4c476e9d94492756bb1a3e1ec4aa8',
'target': b't\x1b"R\xa5\xe1Ml`\xa9\x13\xc7z`\x99\xab\xe7:\x85J',
'target_type': 'revision',
'name': b'v2.6.14',
'author': {
'name': b'Linus Torvalds',
'email': b'torvalds@g5.osdl.org',
},
'date': datetime.datetime(2005, 10, 27, 17, 2, 33,
tzinfo=linus_tz),
'message': b'''\
Linux 2.6.14 release
-----BEGIN PGP SIGNATURE-----
Version: GnuPG v1.4.1 (GNU/Linux)
iD8DBQBDYWq6F3YsRnbiHLsRAmaeAJ9RCez0y8rOBbhSv344h86l/VVcugCeIhO1
wdLOnvj91G4wxYqrvThthbE=
=7VeT
-----END PGP SIGNATURE-----
''',
'synthetic': False,
}
self.release_no_author = {
'id': b'&y\x1a\x8b\xcf\x0em3\xf4:\xefv\x82\xbd\xb5U#mV\xde',
'target': '9ee1c939d1cb936b1f98e8d81aeffab57bae46ab',
'target_type': 'revision',
'name': b'v2.6.12',
'message': b'''\
This is the final 2.6.12 release
-----BEGIN PGP SIGNATURE-----
Version: GnuPG v1.2.4 (GNU/Linux)
iD8DBQBCsykyF3YsRnbiHLsRAvPNAJ482tCZwuxp/bJRz7Q98MHlN83TpACdHr37
o6X/3T+vm8K3bf3driRr34c=
=sBHn
-----END PGP SIGNATURE-----
''',
'synthetic': False,
}
self.release_no_message = {
'id': 'b6f4f446715f7d9543ef54e41b62982f0db40045',
'target': '9ee1c939d1cb936b1f98e8d81aeffab57bae46ab',
'target_type': 'revision',
'name': b'v2.6.12',
'author': {
'name': b'Linus Torvalds',
'email': b'torvalds@g5.osdl.org',
},
'date': datetime.datetime(2005, 10, 27, 17, 2, 33,
tzinfo=linus_tz),
'message': None,
}
self.release_empty_message = {
'id': '71a0aea72444d396575dc25ac37fec87ee3c6492',
'target': '9ee1c939d1cb936b1f98e8d81aeffab57bae46ab',
'target_type': 'revision',
'name': b'v2.6.12',
'author': {
'name': b'Linus Torvalds',
'email': b'torvalds@g5.osdl.org',
},
'date': datetime.datetime(2005, 10, 27, 17, 2, 33,
tzinfo=linus_tz),
'message': b'',
}
self.release_negative_utc = {
'id': '97c8d2573a001f88e72d75f596cf86b12b82fd01',
'name': b'20081029',
'target': '54e9abca4c77421e2921f5f156c9fe4a9f7441c7',
'target_type': 'revision',
'date': {
'timestamp': {'seconds': 1225281976},
'offset': 0,
'negative_utc': True,
},
'author': {
'name': b'Otavio Salvador',
'email': b'otavio@debian.org',
'id': 17640,
},
'synthetic': False,
'message': b'tagging version 20081029\n\nr56558\n',
}
self.release_newline_in_author = {
'author': {
'email': b'esycat@gmail.com',
'fullname': b'Eugene Janusov\n<esycat@gmail.com>',
'name': b'Eugene Janusov\n',
},
'date': {
'negative_utc': None,
'offset': 600,
'timestamp': {
'microseconds': 0,
'seconds': 1377480558,
},
},
'id': b'\\\x98\xf5Y\xd04\x16-\xe2->\xbe\xb9T3\xe6\xf8\x88R1',
'message': b'Release of v0.3.2.',
'name': b'0.3.2',
'synthetic': False,
'target': (b'\xc0j\xa3\xd9;x\xa2\x86\\I5\x17'
b'\x000\xf8\xc2\xd79o\xd3'),
'target_type': 'revision',
}
@istest
def release_identifier(self):
self.assertEqual(
identifiers.release_identifier(self.release),
identifiers.identifier_to_str(self.release['id'])
)
@istest
def release_identifier_no_author(self):
self.assertEqual(
identifiers.release_identifier(self.release_no_author),
identifiers.identifier_to_str(self.release_no_author['id'])
)
@istest
def release_identifier_no_message(self):
self.assertEqual(
identifiers.release_identifier(self.release_no_message),
identifiers.identifier_to_str(self.release_no_message['id'])
)
@istest
def release_identifier_empty_message(self):
self.assertEqual(
identifiers.release_identifier(self.release_empty_message),
identifiers.identifier_to_str(self.release_empty_message['id'])
)
@istest
def release_identifier_negative_utc(self):
self.assertEqual(
identifiers.release_identifier(self.release_negative_utc),
identifiers.identifier_to_str(self.release_negative_utc['id'])
)
@istest
def release_identifier_newline_in_author(self):
self.assertEqual(
identifiers.release_identifier(self.release_newline_in_author),
identifiers.identifier_to_str(self.release_newline_in_author['id'])
)
class SnapshotIdentifier(unittest.TestCase):
def setUp(self):
super().setUp()
self.empty = {
'id': '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e',
'branches': {},
}
self.dangling_branch = {
'id': 'c84502e821eb21ed84e9fd3ec40973abc8b32353',
'branches': {
b'HEAD': None,
},
}
self.unresolved = {
'id': '84b4548ea486e4b0a7933fa541ff1503a0afe1e0',
'branches': {
b'foo': {
'target': b'bar',
'target_type': 'alias',
},
},
}
self.all_types = {
'id': '6e65b86363953b780d92b0a928f3e8fcdd10db36',
'branches': {
b'directory': {
'target': '1bd0e65f7d2ff14ae994de17a1e7fe65111dcad8',
'target_type': 'directory',
},
b'content': {
'target': 'fe95a46679d128ff167b7c55df5d02356c5a1ae1',
'target_type': 'content',
},
b'alias': {
'target': b'revision',
'target_type': 'alias',
},
b'revision': {
'target': 'aafb16d69fd30ff58afdd69036a26047f3aebdc6',
'target_type': 'revision',
},
b'release': {
'target': '7045404f3d1c54e6473c71bbb716529fbad4be24',
'target_type': 'release',
},
b'snapshot': {
'target': '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e',
'target_type': 'snapshot',
},
b'dangling': None,
}
}
def test_empty_snapshot(self):
self.assertEqual(
identifiers.snapshot_identifier(self.empty),
identifiers.identifier_to_str(self.empty['id']),
)
def test_dangling_branch(self):
self.assertEqual(
identifiers.snapshot_identifier(self.dangling_branch),
identifiers.identifier_to_str(self.dangling_branch['id']),
)
def test_unresolved(self):
with self.assertRaisesRegex(ValueError, "b'foo' -> b'bar'"):
identifiers.snapshot_identifier(self.unresolved)
def test_unresolved_force(self):
self.assertEqual(
identifiers.snapshot_identifier(
self.unresolved,
ignore_unresolved=True,
),
identifiers.identifier_to_str(self.unresolved['id']),
)
def test_all_types(self):
self.assertEqual(
identifiers.snapshot_identifier(self.all_types),
identifiers.identifier_to_str(self.all_types['id']),
)
def test_persistent_identifier(self):
_snapshot_id = hashutil.hash_to_bytes(
'c7c108084bc0bf3d81436bf980b46e98bd338453')
_release_id = '22ece559cc7cc2364edc5e5593d63ae8bd229f9f'
_revision_id = '309cf2674ee7a0749978cf8265ab91a60aea0f7d'
_directory_id = 'd198bc9d7a6bcf6db04f476d29314f157507d505'
_content_id = '94a9ed024d3859793618152ea559a168bbcbb5e2'
_snapshot = {'id': _snapshot_id}
_release = {'id': _release_id}
_revision = {'id': _revision_id}
_directory = {'id': _directory_id}
_content = {'sha1_git': _content_id}
- for full_type, _hash, expected_persistent_id, version in [
+ for full_type, _hash, expected_persistent_id, version, _meta in [
(SNAPSHOT, _snapshot_id,
- 'swh:1:snp:c7c108084bc0bf3d81436bf980b46e98bd338453', None),
+ 'swh:1:snp:c7c108084bc0bf3d81436bf980b46e98bd338453',
+ None, {}),
(RELEASE, _release_id,
- 'swh:2:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f', 2),
+ 'swh:2:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f',
+ 2, {}),
(REVISION, _revision_id,
- 'swh:1:rev:309cf2674ee7a0749978cf8265ab91a60aea0f7d', None),
+ 'swh:1:rev:309cf2674ee7a0749978cf8265ab91a60aea0f7d',
+ None, {}),
(DIRECTORY, _directory_id,
- 'swh:1:dir:d198bc9d7a6bcf6db04f476d29314f157507d505', None),
+ 'swh:1:dir:d198bc9d7a6bcf6db04f476d29314f157507d505',
+ None, {}),
(CONTENT, _content_id,
- 'swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2', 1),
+ 'swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2',
+ 1, {}),
(SNAPSHOT, _snapshot,
- 'swh:1:snp:c7c108084bc0bf3d81436bf980b46e98bd338453', None),
+ 'swh:1:snp:c7c108084bc0bf3d81436bf980b46e98bd338453',
+ None, {}),
(RELEASE, _release,
- 'swh:2:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f', 2),
+ 'swh:2:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f',
+ 2, {}),
(REVISION, _revision,
- 'swh:1:rev:309cf2674ee7a0749978cf8265ab91a60aea0f7d', None),
+ 'swh:1:rev:309cf2674ee7a0749978cf8265ab91a60aea0f7d',
+ None, {}),
(DIRECTORY, _directory,
- 'swh:1:dir:d198bc9d7a6bcf6db04f476d29314f157507d505', None),
+ 'swh:1:dir:d198bc9d7a6bcf6db04f476d29314f157507d505',
+ None, {}),
(CONTENT, _content,
- 'swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2', 1),
+ 'swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2',
+ 1, {}),
+ (CONTENT, _content,
+ 'swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2;origin=1',
+ 1, {'origin': '1'}),
]:
if version:
actual_value = identifiers.persistent_identifier(
- full_type, _hash, version)
+ full_type, _hash, version, metadata=_meta)
else:
actual_value = identifiers.persistent_identifier(
- full_type, _hash)
+ full_type, _hash, metadata=_meta)
self.assertEquals(actual_value, expected_persistent_id)
def test_persistent_identifier_wrong_input(self):
_snapshot_id = 'notahash4bc0bf3d81436bf980b46e98bd338453'
_snapshot = {'id': _snapshot_id}
for _type, _hash, _error in [
(SNAPSHOT, _snapshot_id, 'Unexpected characters'),
(SNAPSHOT, _snapshot, 'Unexpected characters'),
('foo', '', 'Wrong input: Supported types are'),
]:
with self.assertRaisesRegex(ValidationError, _error):
identifiers.persistent_identifier(_type, _hash)
def test_parse_persistent_identifier(self):
for pid, _type, _version, _hash in [
('swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2',
CONTENT, 1, '94a9ed024d3859793618152ea559a168bbcbb5e2'),
('swh:1:dir:d198bc9d7a6bcf6db04f476d29314f157507d505',
DIRECTORY, 1, 'd198bc9d7a6bcf6db04f476d29314f157507d505'),
('swh:1:rev:309cf2674ee7a0749978cf8265ab91a60aea0f7d',
REVISION, 1, '309cf2674ee7a0749978cf8265ab91a60aea0f7d'),
('swh:1:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f',
RELEASE, 1, '22ece559cc7cc2364edc5e5593d63ae8bd229f9f'),
('swh:1:snp:c7c108084bc0bf3d81436bf980b46e98bd338453',
SNAPSHOT, 1, 'c7c108084bc0bf3d81436bf980b46e98bd338453'),
]:
expected_result = PersistentId(
namespace='swh',
scheme_version=_version,
object_type=_type,
object_id=_hash,
metadata={}
)
actual_result = identifiers.parse_persistent_identifier(pid)
self.assertEquals(actual_result, expected_result)
for pid, _type, _version, _hash, _metadata in [
('swh:1:cnt:9c95815d9e9d91b8dae8e05d8bbc696fe19f796b;lines=1-18;origin=https://github.com/python/cpython', # noqa
CONTENT, 1, '9c95815d9e9d91b8dae8e05d8bbc696fe19f796b',
{
'lines': '1-18',
'origin': 'https://github.com/python/cpython'
}),
('swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;origin=deb://Debian/packages/linuxdoc-tools', # noqa
DIRECTORY, 1, '0b6959356d30f1a4e9b7f6bca59b9a336464c03d',
{
'origin': 'deb://Debian/packages/linuxdoc-tools'
})
]:
expected_result = PersistentId(
namespace='swh',
scheme_version=_version,
object_type=_type,
object_id=_hash,
metadata=_metadata
)
actual_result = identifiers.parse_persistent_identifier(pid)
self.assertEquals(actual_result, expected_result)
def test_parse_persistent_identifier_parsing_error(self):
for pid, _error in [
('swh:1:cnt',
'Wrong format: There should be 4 mandatory values'),
('swh:1:',
'Wrong format: There should be 4 mandatory values'),
('swh:',
'Wrong format: There should be 4 mandatory values'),
('swh:1:cnt:',
'Wrong format: Identifier should be present'),
('foo:1:cnt:abc8bc9d7a6bcf6db04f476d29314f157507d505',
'Wrong format: Supported namespace is \'swh\''),
('swh:2:dir:def8bc9d7a6bcf6db04f476d29314f157507d505',
'Wrong format: Supported version is 1'),
('swh:1:foo:fed8bc9d7a6bcf6db04f476d29314f157507d505',
'Wrong format: Supported types are %s' % (
', '.join(PERSISTENT_IDENTIFIER_TYPES))),
('swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;invalid;'
'malformed',
'Contextual data is badly formatted, form key=val expected'),
('swh:1:snp:gh6959356d30f1a4e9b7f6bca59b9a336464c03d',
'Wrong format: Identifier should be a valid hash'),
('swh:1:snp:foo',
'Wrong format: Identifier should be a valid hash')
]:
with self.assertRaisesRegex(
ValidationError, _error):
identifiers.parse_persistent_identifier(pid)

File Metadata

Mime Type
text/x-diff
Expires
Jul 4 2025, 10:08 AM (5 w, 6 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3213068

Event Timeline