diff --git a/PKG-INFO b/PKG-INFO index 5de3f06..c27c44f 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,42 +1,42 @@ Metadata-Version: 2.1 Name: swh.model -Version: 6.3.1 +Version: 6.4.0 Summary: Software Heritage data model Home-page: https://forge.softwareheritage.org/diffusion/DMOD/ Author: Software Heritage developers Author-email: swh-devel@inria.fr Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-model Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-model/ Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: cli Provides-Extra: testing-minimal Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-model ========= Implementation of the Data model of the Software Heritage project, used to archive source code artifacts. This module defines the notion of SoftWare Heritage persistent IDentifiers (SWHIDs) and provides tools to compute them: ```sh $ swh-identify fork.c kmod.c sched/deadline.c swh:1:cnt:2e391c754ae730bd2d8520c2ab497c403220c6e3 fork.c swh:1:cnt:0277d1216f80ae1adeed84a686ed34c9b2931fc2 kmod.c swh:1:cnt:57b939c81bce5d06fa587df8915f05affbe22b82 sched/deadline.c $ swh-identify --no-filename /usr/src/linux/kernel/ swh:1:dir:f9f858a48d663b3809c9e2f336412717496202ab ``` diff --git a/swh.model.egg-info/PKG-INFO b/swh.model.egg-info/PKG-INFO index 5de3f06..c27c44f 100644 --- a/swh.model.egg-info/PKG-INFO +++ b/swh.model.egg-info/PKG-INFO @@ -1,42 +1,42 @@ Metadata-Version: 2.1 Name: swh.model -Version: 6.3.1 +Version: 6.4.0 Summary: Software Heritage data model Home-page: https://forge.softwareheritage.org/diffusion/DMOD/ Author: Software Heritage developers Author-email: swh-devel@inria.fr Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-model Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-model/ Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: cli Provides-Extra: testing-minimal Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-model ========= Implementation of the Data model of the Software Heritage project, used to archive source code artifacts. This module defines the notion of SoftWare Heritage persistent IDentifiers (SWHIDs) and provides tools to compute them: ```sh $ swh-identify fork.c kmod.c sched/deadline.c swh:1:cnt:2e391c754ae730bd2d8520c2ab497c403220c6e3 fork.c swh:1:cnt:0277d1216f80ae1adeed84a686ed34c9b2931fc2 kmod.c swh:1:cnt:57b939c81bce5d06fa587df8915f05affbe22b82 sched/deadline.c $ swh-identify --no-filename /usr/src/linux/kernel/ swh:1:dir:f9f858a48d663b3809c9e2f336412717496202ab ``` diff --git a/swh/model/git_objects.py b/swh/model/git_objects.py index 41be6f2..cf25837 100644 --- a/swh/model/git_objects.py +++ b/swh/model/git_objects.py @@ -1,648 +1,654 @@ # Copyright (C) 2015-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """ Converts SWH model objects to git(-like) objects Most of the functions in this module take as argument an object from :mod:`swh.model.model`, and format it like a git object. They are the inverse functions of those in :mod:`swh.loader.git.converters`, but with extensions, as SWH's model is a superset of Git's: * extensions of existing types (eg. revision/commit and release/tag dates can be expressed with precision up to milliseconds, to support formatting Mercurial objects) * new types, for SWH's specific needs (:class:`swh.model.model.RawExtrinsicMetadata` and :class:`swh.model.model.ExtID`) * support for somewhat corrupted git objects that we need to reproduce This is used for two purposes: * Format manifests that can be hashed to produce :ref:`intrinsic identifiers ` * Write git objects to reproduce git repositories that were ingested in the archive. """ from __future__ import annotations import datetime from functools import lru_cache from typing import Dict, Iterable, List, Optional, Tuple, Union, cast import warnings from . import model from .collections import ImmutableDict from .hashutil import git_object_header, hash_to_bytehex def content_git_object(content: model.Content) -> bytes: """Formats a content as a git blob. A content's identifier is the blob sha1 à la git of the tagged content. """ content = cast(model.Content, content) if content.data is None: raise model.MissingData("Content data is None, cannot format.") return git_object_header("blob", len(content.data)) + content.data def directory_entry_sort_key(entry: model.DirectoryEntry): """The sorting key for tree entries""" if isinstance(entry, dict): # For backward compatibility entry = model.DirectoryEntry.from_dict(entry) if entry.type == "dir": return entry.name + b"/" else: return entry.name @lru_cache() def _perms_to_bytes(perms): """Convert the perms value to its canonical bytes representation""" oc = oct(perms)[2:] return oc.encode("ascii") def escape_newlines(snippet): """Escape the newlines present in snippet according to git rules. New lines in git manifests are escaped by indenting the next line by one space. """ if b"\n" in snippet: return b"\n ".join(snippet.split(b"\n")) else: return snippet def format_date(date: model.Timestamp) -> bytes: """Convert a date object into an UTC timestamp encoded as ascii bytes. Git stores timestamps as an integer number of seconds since the UNIX epoch. However, Software Heritage stores timestamps as an integer number of microseconds (postgres type "datetime with timezone"). Therefore, we print timestamps with no microseconds as integers, and timestamps with microseconds as floating point values. We elide the trailing zeroes from microsecond values, to "future-proof" our representation if we ever need more precision in timestamps. """ if isinstance(date, dict): # For backward compatibility date = model.Timestamp.from_dict(date) if not date.microseconds: return str(date.seconds).encode() else: float_value = "%d.%06d" % (date.seconds, date.microseconds) return float_value.rstrip("0").encode() def normalize_timestamp(time_representation): """Normalize a time representation for processing by Software Heritage This function supports a numeric timestamp (representing a number of seconds since the UNIX epoch, 1970-01-01 at 00:00 UTC), a :obj:`datetime.datetime` object (with timezone information), or a normalized Software Heritage time representation (idempotency). Args: time_representation: the representation of a timestamp Returns: dict: a normalized dictionary with three keys: - timestamp: a dict with two optional keys: - seconds: the integral number of seconds since the UNIX epoch - microseconds: the integral number of microseconds - offset: the timezone offset as a number of minutes relative to UTC - negative_utc: a boolean representing whether the offset is -0000 when offset = 0. """ if time_representation is None: return None else: return model.TimestampWithTimezone.from_dict(time_representation).to_dict() def directory_git_object(directory: Union[Dict, model.Directory]) -> bytes: """Formats a directory as a git tree. A directory's identifier is the tree sha1 à la git of a directory listing, using the following algorithm, which is equivalent to the git algorithm for trees: 1. Entries of the directory are sorted using the name (or the name with '/' appended for directory entries) as key, in bytes order. 2. For each entry of the directory, the following bytes are output: - the octal representation of the permissions for the entry (stored in the 'perms' member), which is a representation of the entry type: - b'100644' (int 33188) for files - b'100755' (int 33261) for executable files - b'120000' (int 40960) for symbolic links - b'40000' (int 16384) for directories - b'160000' (int 57344) for references to revisions - an ascii space (b'\x20') - the entry's name (as raw bytes), stored in the 'name' member - a null byte (b'\x00') - the 20 byte long identifier of the object pointed at by the entry, stored in the 'target' member: - for files or executable files: their blob sha1_git - for symbolic links: the blob sha1_git of a file containing the link destination - for directories: their intrinsic identifier - for revisions: their intrinsic identifier (Note that there is no separator between entries) """ if isinstance(directory, dict): # For backward compatibility warnings.warn( "directory_git_object's argument should be a swh.model.model.Directory " "object.", DeprecationWarning, stacklevel=2, ) directory = model.Directory.from_dict(directory) directory = cast(model.Directory, directory) components = [] for entry in sorted(directory.entries, key=directory_entry_sort_key): components.extend( [ _perms_to_bytes(entry.perms), b"\x20", entry.name, b"\x00", entry.target, ] ) return format_git_object_from_parts("tree", components) def format_git_object_from_headers( git_type: str, headers: Iterable[Tuple[bytes, bytes]], message: Optional[bytes] = None, ) -> bytes: """Format a git_object comprised of a git header and a manifest, which is itself a sequence of `headers`, and an optional `message`. The git_object format, compatible with the git format for tag and commit objects, is as follows: - for each `key`, `value` in `headers`, emit: - the `key`, literally - an ascii space (``\\x20``) - the `value`, with newlines escaped using :func:`escape_newlines`, - an ascii newline (``\\x0a``) - if the `message` is not None, emit: - an ascii newline (``\\x0a``) - the `message`, literally Args: headers: a sequence of key/value headers stored in the manifest; message: an optional message used to trail the manifest. Returns: the formatted git_object as bytes """ entries: List[bytes] = [] for key, value in headers: entries.extend((key, b" ", escape_newlines(value), b"\n")) if message is not None: entries.extend((b"\n", message)) return format_git_object_from_parts(git_type, entries) def format_git_object_from_parts(git_type: str, parts: Iterable[bytes]) -> bytes: """Similar to :func:`format_git_object_from_headers`, but for manifests made of a flat list of entries, instead of key-value + message, ie. trees and snapshots.""" concatenated_parts = b"".join(parts) header = git_object_header(git_type, len(concatenated_parts)) return header + concatenated_parts def format_author_data( author: model.Person, date_offset: Optional[model.TimestampWithTimezone] ) -> bytes: """Format authorship data according to git standards. Git authorship data has two components: - an author specification, usually a name and email, but in practice an arbitrary bytestring - optionally, a timestamp with a UTC offset specification The authorship data is formatted thus:: `name and email`[ `timestamp` `utc_offset`] The timestamp is encoded as a (decimal) number of seconds since the UNIX epoch (1970-01-01 at 00:00 UTC). As an extension to the git format, we support fractional timestamps, using a dot as the separator for the decimal part. The utc offset is a number of minutes encoded as '[+-]HHMM'. Note that some tools can pass a negative offset corresponding to the UTC timezone ('-0000'), which is valid and is encoded as such. Returns: the byte string containing the authorship data """ ret = [author.fullname] if date_offset is not None: date_f = format_date(date_offset.timestamp) ret.extend([b" ", date_f, b" ", date_offset.offset_bytes]) return b"".join(ret) def revision_git_object(revision: Union[Dict, model.Revision]) -> bytes: """Formats a revision as a git tree. The fields used for the revision identifier computation are: - directory - parents - author - author_date - committer - committer_date - extra_headers or metadata -> extra_headers - message A revision's identifier is the 'git'-checksum of a commit manifest constructed as follows (newlines are a single ASCII newline character):: tree [for each parent in parents] parent [end for each parents] author committer [for each key, value in extra_headers] [end for each extra_headers] The directory identifier is the ascii representation of its hexadecimal encoding. Author and committer are formatted using the :attr:`Person.fullname` attribute only. Dates are formatted with the :func:`format_offset` function. Extra headers are an ordered list of [key, value] pairs. Keys are strings and get encoded to utf-8 for identifier computation. Values are either byte strings, unicode strings (that get encoded to utf-8), or integers (that get encoded to their utf-8 decimal representation). Multiline extra header values are escaped by indenting the continuation lines with one ascii space. If the message is None, the manifest ends with the last header. Else, the message is appended to the headers after an empty line. The checksum of the full manifest is computed using the 'commit' git object type. """ if isinstance(revision, dict): # For backward compatibility warnings.warn( "revision_git_object's argument should be a swh.model.model.Revision " "object.", DeprecationWarning, stacklevel=2, ) revision = model.Revision.from_dict(revision) revision = cast(model.Revision, revision) headers = [(b"tree", hash_to_bytehex(revision.directory))] for parent in revision.parents: if parent: headers.append((b"parent", hash_to_bytehex(parent))) if revision.author is not None: headers.append((b"author", format_author_data(revision.author, revision.date))) if revision.committer is not None: headers.append( ( b"committer", format_author_data(revision.committer, revision.committer_date), ) ) # Handle extra headers metadata = revision.metadata or ImmutableDict() extra_headers = revision.extra_headers or () if not extra_headers and "extra_headers" in metadata: extra_headers = metadata["extra_headers"] headers.extend(extra_headers) return format_git_object_from_headers("commit", headers, revision.message) def target_type_to_git(target_type: model.ObjectType) -> bytes: """Convert a software heritage target type to a git object type""" return { model.ObjectType.CONTENT: b"blob", model.ObjectType.DIRECTORY: b"tree", model.ObjectType.REVISION: b"commit", model.ObjectType.RELEASE: b"tag", model.ObjectType.SNAPSHOT: b"refs", }[target_type] def release_git_object(release: Union[Dict, model.Release]) -> bytes: if isinstance(release, dict): # For backward compatibility warnings.warn( "release_git_object's argument should be a swh.model.model.Directory " "object.", DeprecationWarning, stacklevel=2, ) release = model.Release.from_dict(release) release = cast(model.Release, release) headers = [ (b"object", hash_to_bytehex(release.target)), (b"type", target_type_to_git(release.target_type)), (b"tag", release.name), ] if release.author is not None: headers.append((b"tagger", format_author_data(release.author, release.date))) return format_git_object_from_headers("tag", headers, release.message) -def snapshot_git_object(snapshot: Union[Dict, model.Snapshot]) -> bytes: +def snapshot_git_object( + snapshot: Union[Dict, model.Snapshot], *, ignore_unresolved: bool = False +) -> bytes: """Formats a snapshot as a git-like object. Snapshots are a set of named branches, which are pointers to objects at any level of the Software Heritage DAG. As well as pointing to other objects in the Software Heritage DAG, branches can also be *alias*es, in which case their target is the name of another branch in the same snapshot, or *dangling*, in which case the target is unknown (and represented by the ``None`` value). A snapshot identifier is a salted sha1 (using the git hashing algorithm with the ``snapshot`` object type) of a manifest following the algorithm: 1. Branches are sorted using the name as key, in bytes order. 2. For each branch, the following bytes are output: - the type of the branch target: - ``content``, ``directory``, ``revision``, ``release`` or ``snapshot`` for the corresponding entries in the DAG; - ``alias`` for branches referencing another branch; - ``dangling`` for dangling branches - an ascii space (``\\x20``) - the branch name (as raw bytes) - a null byte (``\\x00``) - the length of the target identifier, as an ascii-encoded decimal number (``20`` for current intrinsic identifiers, ``0`` for dangling branches, the length of the target branch name for branch aliases) - a colon (``:``) - the identifier of the target object pointed at by the branch, stored in the 'target' member: - for contents: their *sha1_git* - for directories, revisions, releases or snapshots: their intrinsic identifier - for branch aliases, the name of the target branch (as raw bytes) - for dangling branches, the empty string Note that, akin to directory manifests, there is no separator between entries. Because of symbolic branches, identifiers are of arbitrary length but are length-encoded to avoid ambiguity. + + Args: + ignore_unresolved: if False (the default), raises an exception when + alias branches point to non-existing branches """ if isinstance(snapshot, dict): # For backward compatibility warnings.warn( "snapshot_git_object's argument should be a swh.model.model.Snapshot " "object.", DeprecationWarning, stacklevel=2, ) snapshot = model.Snapshot.from_dict(snapshot) snapshot = cast(model.Snapshot, snapshot) unresolved = [] lines = [] for name, target in sorted(snapshot.branches.items()): if not target: target_type = b"dangling" target_id = b"" elif target.target_type == model.TargetType.ALIAS: target_type = b"alias" target_id = target.target if target_id not in snapshot.branches or target_id == name: unresolved.append((name, target_id)) else: target_type = target.target_type.value.encode() target_id = target.target lines.extend( [ target_type, b"\x20", name, b"\x00", ("%d:" % len(target_id)).encode(), target_id, ] ) - if unresolved: + if unresolved and not ignore_unresolved: raise ValueError( "Branch aliases unresolved: %s" % ", ".join("%r -> %r" % x for x in unresolved), unresolved, ) return format_git_object_from_parts("snapshot", lines) def raw_extrinsic_metadata_git_object( metadata: Union[Dict, model.RawExtrinsicMetadata] ) -> bytes: """Formats RawExtrinsicMetadata as a git-like object. A raw_extrinsic_metadata identifier is a salted sha1 (using the git hashing algorithm with the ``raw_extrinsic_metadata`` object type) of a manifest following the format:: target $ExtendedSwhid discovery_date $Timestamp authority $StrWithoutSpaces $IRI fetcher $Str $Version format $StrWithoutSpaces origin $IRI <- optional visit $IntInDecimal <- optional snapshot $CoreSwhid <- optional release $CoreSwhid <- optional revision $CoreSwhid <- optional path $Bytes <- optional directory $CoreSwhid <- optional $MetadataBytes $IRI must be RFC 3987 IRIs (so they may contain newlines, that are escaped as described below) $StrWithoutSpaces and $Version are ASCII strings, and may not contain spaces. $Str is an UTF-8 string. $CoreSwhid are core SWHIDs, as defined in :ref:`persistent-identifiers`. $ExtendedSwhid is a core SWHID, with extra types allowed ('ori' for origins and 'emd' for raw extrinsic metadata) $Timestamp is a decimal representation of the rounded-down integer number of seconds since the UNIX epoch (1970-01-01 00:00:00 UTC), with no leading '0' (unless the timestamp value is zero) and no timezone. It may be negative by prefixing it with a '-', which must not be followed by a '0'. Newlines in $Bytes, $Str, and $Iri are escaped as with other git fields, ie. by adding a space after them. """ if isinstance(metadata, dict): # For backward compatibility warnings.warn( "raw_extrinsic_metadata_git_object's argument should be a " "swh.model.model.RawExtrinsicMetadata object.", DeprecationWarning, stacklevel=2, ) metadata = model.RawExtrinsicMetadata.from_dict(metadata) metadata = cast(model.RawExtrinsicMetadata, metadata) # equivalent to using math.floor(dt.timestamp()) to round down, # as int(dt.timestamp()) rounds toward zero, # which would map two seconds on the 0 timestamp. # # This should never be an issue in practice as Software Heritage didn't # start collecting metadata before 2015. timestamp = ( metadata.discovery_date.astimezone(datetime.timezone.utc) .replace(microsecond=0) .timestamp() ) assert timestamp.is_integer() headers = [ (b"target", str(metadata.target).encode()), (b"discovery_date", str(int(timestamp)).encode("ascii")), ( b"authority", f"{metadata.authority.type.value} {metadata.authority.url}".encode(), ), ( b"fetcher", f"{metadata.fetcher.name} {metadata.fetcher.version}".encode(), ), (b"format", metadata.format.encode()), ] for key in ( "origin", "visit", "snapshot", "release", "revision", "path", "directory", ): if getattr(metadata, key, None) is not None: value: bytes if key == "path": value = getattr(metadata, key) else: value = str(getattr(metadata, key)).encode() headers.append((key.encode("ascii"), value)) return format_git_object_from_headers( "raw_extrinsic_metadata", headers, metadata.metadata ) def extid_git_object(extid: model.ExtID) -> bytes: """Formats an extid as a gi-like object. An ExtID identifier is a salted sha1 (using the git hashing algorithm with the ``extid`` object type) of a manifest following the format: ``` extid_type $StrWithoutSpaces [extid_version $Str] extid $Bytes target $CoreSwhid ``` $StrWithoutSpaces is an ASCII string, and may not contain spaces. Newlines in $Bytes are escaped as with other git fields, ie. by adding a space after them. The extid_version line is only generated if the version is non-zero. """ headers = [ (b"extid_type", extid.extid_type.encode("ascii")), ] extid_version = extid.extid_version if extid_version != 0: headers.append((b"extid_version", str(extid_version).encode("ascii"))) headers.extend( [ (b"extid", extid.extid), (b"target", str(extid.target).encode("ascii")), ] ) return format_git_object_from_headers("extid", headers) diff --git a/swh/model/hypothesis_strategies.py b/swh/model/hypothesis_strategies.py index 89fe6de..19a8d89 100644 --- a/swh/model/hypothesis_strategies.py +++ b/swh/model/hypothesis_strategies.py @@ -1,581 +1,593 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import string from typing import Sequence from hypothesis import assume from hypothesis.extra.dateutil import timezones from hypothesis.strategies import ( binary, booleans, builds, characters, composite, datetimes, dictionaries, from_regex, integers, just, lists, none, one_of, sampled_from, sets, text, tuples, ) from .from_disk import DentryPerms from .model import ( BaseContent, Content, Directory, DirectoryEntry, MetadataAuthority, MetadataFetcher, ObjectType, Origin, OriginVisit, OriginVisitStatus, Person, RawExtrinsicMetadata, Release, Revision, RevisionType, SkippedContent, Snapshot, SnapshotBranch, TargetType, Timestamp, TimestampWithTimezone, ) from .swhids import ExtendedObjectType, ExtendedSWHID pgsql_alphabet = characters( blacklist_categories=("Cs",), blacklist_characters=["\u0000"] ) # postgresql does not like these def optional(strategy): return one_of(none(), strategy) def pgsql_text(): return text(alphabet=pgsql_alphabet) def sha1_git(): return binary(min_size=20, max_size=20) def sha1(): return binary(min_size=20, max_size=20) def binaries_without_bytes(blacklist: Sequence[int]): """Like hypothesis.strategies.binary, but takes a sequence of bytes that should not be included.""" return lists(sampled_from([i for i in range(256) if i not in blacklist])).map(bytes) @composite def extended_swhids(draw): object_type = draw(sampled_from(ExtendedObjectType)) object_id = draw(sha1_git()) return ExtendedSWHID(object_type=object_type, object_id=object_id) def aware_datetimes(): # datetimes in Software Heritage are not used for software artifacts # (which may be much older than 2000), but only for objects like scheduler # task runs, and origin visits, which were created by Software Heritage, # so at least in 2015. # We're forbidding old datetimes, because until 1956, many timezones had seconds # in their "UTC offsets" (see # ), which is not # encodable in ISO8601; and we need our datetimes to be ISO8601-encodable in the # RPC protocol min_value = datetime.datetime(2000, 1, 1, 0, 0, 0) return datetimes(min_value=min_value, timezones=timezones()) @composite def iris(draw): protocol = draw(sampled_from(["git", "http", "https", "deb"])) domain = draw(from_regex(r"\A([a-z]([a-z0-9é🏛️-]*)\.){1,3}([a-z0-9é])+\Z")) return "%s://%s" % (protocol, domain) @composite def persons_d(draw): fullname = draw(binary()) email = draw(optional(binary())) name = draw(optional(binary())) assume(not (len(fullname) == 32 and email is None and name is None)) return dict(fullname=fullname, name=name, email=email) def persons(**kwargs): return persons_d(**kwargs).map(Person.from_dict) def timestamps_d(**kwargs): max_seconds = datetime.datetime.max.replace( tzinfo=datetime.timezone.utc ).timestamp() min_seconds = datetime.datetime.min.replace( tzinfo=datetime.timezone.utc ).timestamp() + + # in Python 3.9, datetime.datetime.max is 9999-12-31T23:59:59.999999, which + # means its .timestamp() is 253402300799.999999 in UTC. Unfortunately, because of + # flotting-point loss of precision, this is rounded up to 253402300800.0, which + # is the timestamp of 10000-01-01T00:00:00 in UTC, which cannot be passed to + # datetime.datetime.fromtimestamp because it overflows. + # To work around this issue, we move from max_seconds and min_seconds one second + # closer to Epoch, which is more than enough (actually, subtracting 20ms from + # max_seconds is enough). + max_seconds -= 1 + min_seconds += 1 + defaults = dict( seconds=integers(min_seconds, max_seconds), microseconds=integers(0, 1000000 - 1), ) return builds(dict, **{**defaults, **kwargs}) def timestamps(): return timestamps_d().map(Timestamp.from_dict) @composite def timestamps_with_timezone_d( draw, *, timestamp=timestamps_d(), offset=integers(min_value=-14 * 60, max_value=14 * 60), negative_utc=booleans(), ): timestamp = draw(timestamp) offset = draw(offset) negative_utc = draw(negative_utc) assume(not (negative_utc and offset)) return dict(timestamp=timestamp, offset=offset, negative_utc=negative_utc) timestamps_with_timezone = timestamps_with_timezone_d().map( TimestampWithTimezone.from_dict ) def origins_d(*, url=iris()): return builds(dict, url=url) def origins(**kwargs): return origins_d(**kwargs).map(Origin.from_dict) def origin_visits_d(**kwargs): defaults = dict( visit=integers(1, 1000), origin=iris(), date=aware_datetimes(), type=pgsql_text(), ) return builds(dict, **{**defaults, **kwargs}) def origin_visits(**kwargs): return origin_visits_d(**kwargs).map(OriginVisit.from_dict) def metadata_dicts(): return dictionaries(pgsql_text(), pgsql_text()) def origin_visit_statuses_d(**kwargs): defaults = dict( visit=integers(1, 1000), origin=iris(), type=optional(sampled_from(["git", "svn", "pypi", "debian"])), status=sampled_from( ["created", "ongoing", "full", "partial", "not_found", "failed"] ), date=aware_datetimes(), snapshot=optional(sha1_git()), metadata=optional(metadata_dicts()), ) return builds(dict, **{**defaults, **kwargs}) def origin_visit_statuses(**kwargs): return origin_visit_statuses_d(**kwargs).map(OriginVisitStatus.from_dict) @composite def releases_d(draw, **kwargs): defaults = dict( target_type=sampled_from([x.value for x in ObjectType]), name=binary(), message=optional(binary()), synthetic=booleans(), target=sha1_git(), metadata=optional(revision_metadata()), raw_manifest=optional(binary()), ) d = draw( one_of( # None author/date: builds(dict, author=none(), date=none(), **{**defaults, **kwargs}), # non-None author/date: builds( dict, date=timestamps_with_timezone_d(), author=persons_d(), **{**defaults, **kwargs}, ), # it is also possible for date to be None but not author, but let's not # overwhelm hypothesis with this edge case ) ) if d["raw_manifest"] is None: del d["raw_manifest"] return d def releases(**kwargs): return releases_d(**kwargs).map(Release.from_dict) revision_metadata = metadata_dicts def extra_headers(): return lists( tuples(binary(min_size=0, max_size=50), binary(min_size=0, max_size=500)) ).map(tuple) @composite def revisions_d(draw, **kwargs): defaults = dict( message=optional(binary()), synthetic=booleans(), parents=tuples(sha1_git()), directory=sha1_git(), type=sampled_from([x.value for x in RevisionType]), metadata=optional(revision_metadata()), extra_headers=extra_headers(), raw_manifest=optional(binary()), ) d = draw( one_of( # None author/committer/date/committer_date builds( dict, author=none(), committer=none(), date=none(), committer_date=none(), **{**defaults, **kwargs}, ), # non-None author/committer/date/committer_date builds( dict, author=persons_d(), committer=persons_d(), date=timestamps_with_timezone_d(), committer_date=timestamps_with_timezone_d(), **{**defaults, **kwargs}, ), # There are many other combinations, but let's not overwhelm hypothesis # with these edge cases ) ) # TODO: metadata['extra_headers'] can have binary keys and values if d["raw_manifest"] is None: del d["raw_manifest"] return d def revisions(**kwargs): return revisions_d(**kwargs).map(Revision.from_dict) def directory_entries_d(**kwargs): defaults = dict( name=binaries_without_bytes(b"/"), target=sha1_git(), ) return one_of( builds( dict, type=just("file"), perms=one_of( integers(min_value=0o100000, max_value=0o100777), # regular file integers(min_value=0o120000, max_value=0o120777), # symlink ), **{**defaults, **kwargs}, ), builds( dict, type=just("dir"), perms=integers( min_value=DentryPerms.directory, max_value=DentryPerms.directory + 0o777, ), **{**defaults, **kwargs}, ), builds( dict, type=just("rev"), perms=integers( min_value=DentryPerms.revision, max_value=DentryPerms.revision + 0o777, ), **{**defaults, **kwargs}, ), ) def directory_entries(**kwargs): return directory_entries_d(**kwargs).map(DirectoryEntry) @composite def directories_d(draw, raw_manifest=optional(binary())): d = draw(builds(dict, entries=tuples(directory_entries_d()))) d["raw_manifest"] = draw(raw_manifest) if d["raw_manifest"] is None: del d["raw_manifest"] return d def directories(**kwargs): return directories_d(**kwargs).map(Directory.from_dict) def contents_d(): return one_of(present_contents_d(), skipped_contents_d()) def contents(): return one_of(present_contents(), skipped_contents()) def present_contents_d(**kwargs): defaults = dict( data=binary(max_size=4096), ctime=optional(aware_datetimes()), status=one_of(just("visible"), just("hidden")), ) return builds(dict, **{**defaults, **kwargs}) def present_contents(**kwargs): return present_contents_d().map(lambda d: Content.from_data(**d)) @composite def skipped_contents_d( draw, reason=pgsql_text(), status=just("absent"), ctime=optional(aware_datetimes()) ): result = BaseContent._hash_data(draw(binary(max_size=4096))) result.pop("data") nullify_attrs = draw( sets(sampled_from(["sha1", "sha1_git", "sha256", "blake2s256"])) ) for k in nullify_attrs: result[k] = None result["reason"] = draw(reason) result["status"] = draw(status) result["ctime"] = draw(ctime) return result def skipped_contents(**kwargs): return skipped_contents_d().map(SkippedContent.from_dict) def branch_names(): return binary(min_size=1) def branch_targets_object_d(): return builds( dict, target=sha1_git(), target_type=sampled_from( [x.value for x in TargetType if x.value not in ("alias",)] ), ) def branch_targets_alias_d(): return builds( dict, target=sha1_git(), target_type=just("alias") ) # TargetType.ALIAS.value)) def branch_targets_d(*, only_objects=False): if only_objects: return branch_targets_object_d() else: return one_of(branch_targets_alias_d(), branch_targets_object_d()) def branch_targets(*, only_objects=False): return builds(SnapshotBranch.from_dict, branch_targets_d(only_objects=only_objects)) @composite def snapshots_d(draw, *, min_size=0, max_size=100, only_objects=False): branches = draw( dictionaries( keys=branch_names(), values=optional(branch_targets_d(only_objects=only_objects)), min_size=min_size, max_size=max_size, ) ) if not only_objects: # Make sure aliases point to actual branches unresolved_aliases = { branch: target["target"] for branch, target in branches.items() if ( target and target["target_type"] == "alias" and target["target"] not in branches ) } for alias_name, alias_target in unresolved_aliases.items(): # Override alias branch with one pointing to a real object # if max_size constraint is reached alias = alias_target if len(branches) < max_size else alias_name branches[alias] = draw(branch_targets_d(only_objects=True)) # Ensure no cycles between aliases while True: try: snapshot = Snapshot.from_dict( { "branches": { name: branch or None for (name, branch) in branches.items() } } ) except ValueError as e: for (source, target) in e.args[1]: branches[source] = draw(branch_targets_d(only_objects=True)) else: break return snapshot.to_dict() def snapshots(*, min_size=0, max_size=100, only_objects=False): return snapshots_d( min_size=min_size, max_size=max_size, only_objects=only_objects ).map(Snapshot.from_dict) def metadata_authorities(url=iris()): return builds(MetadataAuthority, url=url, metadata=just(None)) def metadata_fetchers(**kwargs): defaults = dict( name=text(min_size=1, alphabet=string.printable), version=text( min_size=1, alphabet=string.ascii_letters + string.digits + string.punctuation, ), ) return builds( MetadataFetcher, metadata=just(None), **{**defaults, **kwargs}, ) def raw_extrinsic_metadata(**kwargs): defaults = dict( target=extended_swhids(), discovery_date=aware_datetimes(), authority=metadata_authorities(), fetcher=metadata_fetchers(), format=text(min_size=1, alphabet=string.printable), ) return builds(RawExtrinsicMetadata, **{**defaults, **kwargs}) def raw_extrinsic_metadata_d(**kwargs): return raw_extrinsic_metadata(**kwargs).map(RawExtrinsicMetadata.to_dict) def objects(blacklist_types=("origin_visit_status",), split_content=False): """generates a random couple (type, obj) which obj is an instance of the Model class corresponding to obj_type. `blacklist_types` is a list of obj_type to exclude from the strategy. If `split_content` is True, generates Content and SkippedContent under different obj_type, resp. "content" and "skipped_content". """ strategies = [ ("origin", origins), ("origin_visit", origin_visits), ("origin_visit_status", origin_visit_statuses), ("snapshot", snapshots), ("release", releases), ("revision", revisions), ("directory", directories), ("raw_extrinsic_metadata", raw_extrinsic_metadata), ] if split_content: strategies.append(("content", present_contents)) strategies.append(("skipped_content", skipped_contents)) else: strategies.append(("content", contents)) args = [ obj_gen().map(lambda x, obj_type=obj_type: (obj_type, x)) for (obj_type, obj_gen) in strategies if obj_type not in blacklist_types ] return one_of(*args) def object_dicts(blacklist_types=("origin_visit_status",), split_content=False): """generates a random couple (type, dict) which dict is suitable for .from_dict() factory methods. `blacklist_types` is a list of obj_type to exclude from the strategy. If `split_content` is True, generates Content and SkippedContent under different obj_type, resp. "content" and "skipped_content". """ strategies = [ ("origin", origins_d), ("origin_visit", origin_visits_d), ("origin_visit_status", origin_visit_statuses_d), ("snapshot", snapshots_d), ("release", releases_d), ("revision", revisions_d), ("directory", directories_d), ("raw_extrinsic_metadata", raw_extrinsic_metadata_d), ] if split_content: strategies.append(("content", present_contents_d)) strategies.append(("skipped_content", skipped_contents_d)) else: strategies.append(("content", contents_d)) args = [ obj_gen().map(lambda x, obj_type=obj_type: (obj_type, x)) for (obj_type, obj_gen) in strategies if obj_type not in blacklist_types ] return one_of(*args) diff --git a/swh/model/model.py b/swh/model/model.py index 1073cc6..8910db8 100644 --- a/swh/model/model.py +++ b/swh/model/model.py @@ -1,1634 +1,1638 @@ # Copyright (C) 2018-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """ Implementation of Software Heritage's data model See :ref:`data-model` for an overview of the data model. The classes defined in this module are immutable `attrs objects `__ and enums. All classes define a ``from_dict`` class method and a ``to_dict`` method to convert between them and msgpack-serializable objects. """ from abc import ABCMeta, abstractmethod import collections import datetime from enum import Enum import hashlib from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, TypeVar, Union import attr from attrs_strict import AttributeTypeError import dateutil.parser import iso8601 from typing_extensions import Final from . import git_objects from .collections import ImmutableDict from .hashutil import DEFAULT_ALGORITHMS, MultiHash, hash_to_bytehex, hash_to_hex from .swhids import CoreSWHID from .swhids import ExtendedObjectType as SwhidExtendedObjectType from .swhids import ExtendedSWHID from .swhids import ObjectType as SwhidObjectType class MissingData(Exception): """Raised by `Content.with_data` when it has no way of fetching the data (but not when fetching the data fails).""" pass KeyType = Union[Dict[str, str], Dict[str, bytes], bytes] """The type returned by BaseModel.unique_key().""" SHA1_SIZE = 20 _OFFSET_CHARS = frozenset(b"+-0123456789") # TODO: Limit this to 20 bytes Sha1Git = bytes Sha1 = bytes KT = TypeVar("KT") VT = TypeVar("VT") def hash_repr(h: bytes) -> str: if h is None: return "None" else: return f"hash_to_bytes('{hash_to_hex(h)}')" def freeze_optional_dict( d: Union[None, Dict[KT, VT], ImmutableDict[KT, VT]] # type: ignore ) -> Optional[ImmutableDict[KT, VT]]: if isinstance(d, dict): return ImmutableDict(d) else: return d def dictify(value): "Helper function used by BaseModel.to_dict()" if isinstance(value, BaseModel): return value.to_dict() elif isinstance(value, (CoreSWHID, ExtendedSWHID)): return str(value) elif isinstance(value, Enum): return value.value elif isinstance(value, (dict, ImmutableDict)): return {k: dictify(v) for k, v in value.items()} elif isinstance(value, tuple): return tuple(dictify(v) for v in value) else: return value def _check_type(type_, value): if type_ is object or type_ is Any: return True if type_ is None: return value is None origin = getattr(type_, "__origin__", None) # Non-generic type, check it directly if origin is None: # This is functionally equivalent to using just this: # return isinstance(value, type) # but using type equality before isinstance allows very quick checks # when the exact class is used (which is the overwhelming majority of cases) # while still allowing subclasses to be used. return type(value) == type_ or isinstance(value, type_) # Check the type of the value itself # # For the same reason as above, this condition is functionally equivalent to: # if origin is not Union and not isinstance(value, origin): if origin is not Union and type(value) != origin and not isinstance(value, origin): return False # Then, if it's a container, check its items. if origin is tuple: args = type_.__args__ if len(args) == 2 and args[1] is Ellipsis: # Infinite tuple return all(_check_type(args[0], item) for item in value) else: # Finite tuple if len(args) != len(value): return False return all( _check_type(item_type, item) for (item_type, item) in zip(args, value) ) elif origin is Union: args = type_.__args__ return any(_check_type(variant, value) for variant in args) elif origin is ImmutableDict: (key_type, value_type) = type_.__args__ return all( _check_type(key_type, key) and _check_type(value_type, value) for (key, value) in value.items() ) else: # No need to check dict or list. because they are converted to ImmutableDict # and tuple respectively. raise NotImplementedError(f"Type-checking {type_}") def type_validator(): """Like attrs_strict.type_validator(), but stricter. It is an attrs validator, which checks attributes have the specified type, using type equality instead of ``isinstance()``, for improved performance """ def validator(instance, attribute, value): if not _check_type(attribute.type, value): raise AttributeTypeError(value, attribute) return validator ModelType = TypeVar("ModelType", bound="BaseModel") class BaseModel: """Base class for SWH model classes. Provides serialization/deserialization to/from Python dictionaries, that are suitable for JSON/msgpack-like formats.""" __slots__ = () def to_dict(self): """Wrapper of `attr.asdict` that can be overridden by subclasses that have special handling of some of the fields.""" return dictify(attr.asdict(self, recurse=False)) @classmethod def from_dict(cls, d): """Takes a dictionary representing a tree of SWH objects, and recursively builds the corresponding objects.""" return cls(**d) def anonymize(self: ModelType) -> Optional[ModelType]: """Returns an anonymized version of the object, if needed. If the object model does not need/support anonymization, returns None. """ return None def unique_key(self) -> KeyType: """Returns a unique key for this object, that can be used for deduplication.""" raise NotImplementedError(f"unique_key for {self}") def check(self) -> None: """Performs internal consistency checks, and raises an error if one fails.""" - attr.validate(self) + # without the type-ignore comment below, attr >= 22.1.0 causes mypy to report: + # Argument 1 has incompatible type "BaseModel"; expected "AttrsInstance" + attr.validate(self) # type: ignore[arg-type] def _compute_hash_from_manifest(manifest: bytes) -> Sha1Git: return hashlib.new("sha1", manifest).digest() class HashableObject(metaclass=ABCMeta): """Mixin to automatically compute object identifier hash when the associated model is instantiated.""" __slots__ = () id: Sha1Git def compute_hash(self) -> bytes: """Derived model classes must implement this to compute the object hash. This method is called by the object initialization if the `id` attribute is set to an empty value. """ return self._compute_hash_from_attributes() @abstractmethod def _compute_hash_from_attributes(self) -> Sha1Git: raise NotImplementedError(f"_compute_hash_from_attributes for {self}") def __attrs_post_init__(self): if not self.id: obj_id = self.compute_hash() object.__setattr__(self, "id", obj_id) def unique_key(self) -> KeyType: return self.id def check(self) -> None: super().check() # type: ignore if self.id != self.compute_hash(): raise ValueError("'id' does not match recomputed hash.") class HashableObjectWithManifest(HashableObject): """Derived class of HashableObject, for objects that may need to store verbatim git objects as ``raw_manifest`` to preserve original hashes.""" __slots__ = () raw_manifest: Optional[bytes] = None """Stores the original content of git objects when they cannot be faithfully represented using only the other attributes. This should only be used as a last resort, and only set in the Git loader, for objects too corrupt to fit the data model.""" def to_dict(self): d = super().to_dict() if d["raw_manifest"] is None: del d["raw_manifest"] return d def compute_hash(self) -> bytes: """Derived model classes must implement this to compute the object hash. This method is called by the object initialization if the `id` attribute is set to an empty value. """ if self.raw_manifest is None: return super().compute_hash() # calls self._compute_hash_from_attributes() else: return _compute_hash_from_manifest(self.raw_manifest) def check(self) -> None: super().check() if ( self.raw_manifest is not None and self.id == self._compute_hash_from_attributes() ): raise ValueError( f"{self} has a non-none raw_manifest attribute, but does not need it." ) @attr.s(frozen=True, slots=True) class Person(BaseModel): """Represents the author/committer of a revision or release.""" object_type: Final = "person" fullname = attr.ib(type=bytes, validator=type_validator()) name = attr.ib(type=Optional[bytes], validator=type_validator(), eq=False) email = attr.ib(type=Optional[bytes], validator=type_validator(), eq=False) @classmethod def from_fullname(cls, fullname: bytes): """Returns a Person object, by guessing the name and email from the fullname, in the `name ` format. The fullname is left unchanged.""" if fullname is None: raise TypeError("fullname is None.") name: Optional[bytes] email: Optional[bytes] try: open_bracket = fullname.index(b"<") except ValueError: name = fullname email = None else: raw_name = fullname[:open_bracket] raw_email = fullname[open_bracket + 1 :] if not raw_name: name = None else: name = raw_name.strip() try: close_bracket = raw_email.rindex(b">") except ValueError: email = raw_email else: email = raw_email[:close_bracket] return Person( name=name or None, email=email or None, fullname=fullname, ) def anonymize(self) -> "Person": """Returns an anonymized version of the Person object. Anonymization is simply a Person which fullname is the hashed, with unset name or email. """ return Person( fullname=hashlib.sha256(self.fullname).digest(), name=None, email=None, ) @classmethod def from_dict(cls, d): """ If the fullname is missing, construct a fullname using the following heuristics: if the name value is None, we return the email in angle brackets, else, we return the name, a space, and the email in angle brackets. """ if "fullname" not in d: parts = [] if d["name"] is not None: parts.append(d["name"]) if d["email"] is not None: parts.append(b"".join([b"<", d["email"], b">"])) fullname = b" ".join(parts) d = {**d, "fullname": fullname} d = {"name": None, "email": None, **d} return super().from_dict(d) @attr.s(frozen=True, slots=True) class Timestamp(BaseModel): """Represents a naive timestamp from a VCS.""" object_type: Final = "timestamp" seconds = attr.ib(type=int, validator=type_validator()) microseconds = attr.ib(type=int, validator=type_validator()) @seconds.validator def check_seconds(self, attribute, value): """Check that seconds fit in a 64-bits signed integer.""" if not (-(2**63) <= value < 2**63): raise ValueError("Seconds must be a signed 64-bits integer.") @microseconds.validator def check_microseconds(self, attribute, value): """Checks that microseconds are positive and < 1000000.""" if not (0 <= value < 10**6): raise ValueError("Microseconds must be in [0, 1000000[.") @attr.s(frozen=True, slots=True) class TimestampWithTimezone(BaseModel): """Represents a TZ-aware timestamp from a VCS.""" object_type: Final = "timestamp_with_timezone" timestamp = attr.ib(type=Timestamp, validator=type_validator()) offset_bytes = attr.ib(type=bytes, validator=type_validator()) """Raw git representation of the timezone, as an offset from UTC. It should follow this format: ``+HHMM`` or ``-HHMM`` (including ``+0000`` and ``-0000``). However, when created from git objects, it must be the exact bytes used in the original objects, so it may differ from this format when they do. """ @classmethod def from_numeric_offset( cls, timestamp: Timestamp, offset: int, negative_utc: bool ) -> "TimestampWithTimezone": """Returns a :class:`TimestampWithTimezone` instance from the old dictionary format (with ``offset`` and ``negative_utc`` instead of ``offset_bytes``). """ negative = offset < 0 or negative_utc (hours, minutes) = divmod(abs(offset), 60) offset_bytes = f"{'-' if negative else '+'}{hours:02}{minutes:02}".encode() tstz = TimestampWithTimezone(timestamp=timestamp, offset_bytes=offset_bytes) assert tstz.offset_minutes() == offset, (tstz.offset_minutes(), offset) return tstz @classmethod def from_dict( cls, time_representation: Union[Dict, datetime.datetime, int] ) -> "TimestampWithTimezone": """Builds a TimestampWithTimezone from any of the formats accepted by :func:`swh.model.normalize_timestamp`.""" # TODO: this accept way more types than just dicts; find a better # name if isinstance(time_representation, dict): ts = time_representation["timestamp"] if isinstance(ts, dict): seconds = ts.get("seconds", 0) microseconds = ts.get("microseconds", 0) elif isinstance(ts, int): seconds = ts microseconds = 0 else: raise ValueError( f"TimestampWithTimezone.from_dict received non-integer timestamp " f"member {ts!r}" ) timestamp = Timestamp(seconds=seconds, microseconds=microseconds) if "offset_bytes" in time_representation: return cls( timestamp=timestamp, offset_bytes=time_representation["offset_bytes"], ) else: # old format offset = time_representation["offset"] negative_utc = time_representation.get("negative_utc") or False return cls.from_numeric_offset(timestamp, offset, negative_utc) elif isinstance(time_representation, datetime.datetime): # TODO: warn when using from_dict() on a datetime utcoffset = time_representation.utcoffset() time_representation = time_representation.astimezone(datetime.timezone.utc) microseconds = time_representation.microsecond if microseconds: time_representation = time_representation.replace(microsecond=0) seconds = int(time_representation.timestamp()) if utcoffset is None: raise ValueError( f"TimestampWithTimezone.from_dict received datetime without " f"timezone: {time_representation}" ) # utcoffset is an integer number of minutes seconds_offset = utcoffset.total_seconds() offset = int(seconds_offset) // 60 # TODO: warn if remainder is not zero return cls.from_numeric_offset( Timestamp(seconds=seconds, microseconds=microseconds), offset, False ) elif isinstance(time_representation, int): # TODO: warn when using from_dict() on an int seconds = time_representation timestamp = Timestamp(seconds=time_representation, microseconds=0) return cls(timestamp=timestamp, offset_bytes=b"+0000") else: raise ValueError( f"TimestampWithTimezone.from_dict received non-integer timestamp: " f"{time_representation!r}" ) @classmethod def from_datetime(cls, dt: datetime.datetime) -> "TimestampWithTimezone": return cls.from_dict(dt) def to_datetime(self) -> datetime.datetime: """Convert to a datetime (with a timezone set to the recorded fixed UTC offset) Beware that this conversion can be lossy: ``-0000`` and 'weird' offsets cannot be represented. Also note that it may fail due to type overflow. """ timestamp = datetime.datetime.fromtimestamp( self.timestamp.seconds, datetime.timezone(datetime.timedelta(minutes=self.offset_minutes())), ) timestamp = timestamp.replace(microsecond=self.timestamp.microseconds) return timestamp @classmethod def from_iso8601(cls, s): """Builds a TimestampWithTimezone from an ISO8601-formatted string.""" dt = iso8601.parse_date(s) tstz = cls.from_datetime(dt) if dt.tzname() == "-00:00": assert tstz.offset_bytes == b"+0000" tstz = attr.evolve(tstz, offset_bytes=b"-0000") return tstz @staticmethod def _parse_offset_bytes(offset_bytes: bytes) -> int: """Parses an ``offset_bytes`` value (in Git's ``[+-]HHMM`` format), and returns the corresponding numeric values (in number of minutes). Tries to account for some mistakes in the format, to support incorrect Git implementations. >>> TimestampWithTimezone._parse_offset_bytes(b"+0000") 0 >>> TimestampWithTimezone._parse_offset_bytes(b"-0000") 0 >>> TimestampWithTimezone._parse_offset_bytes(b"+0200") 120 >>> TimestampWithTimezone._parse_offset_bytes(b"-0200") -120 >>> TimestampWithTimezone._parse_offset_bytes(b"+200") 120 >>> TimestampWithTimezone._parse_offset_bytes(b"-200") -120 >>> TimestampWithTimezone._parse_offset_bytes(b"+02") 120 >>> TimestampWithTimezone._parse_offset_bytes(b"-02") -120 >>> TimestampWithTimezone._parse_offset_bytes(b"+0010") 10 >>> TimestampWithTimezone._parse_offset_bytes(b"-0010") -10 >>> TimestampWithTimezone._parse_offset_bytes(b"+200000000000000000") 0 >>> TimestampWithTimezone._parse_offset_bytes(b"+0160") # 60 minutes... 0 """ offset_str = offset_bytes.decode() assert offset_str[0] in "+-" sign = int(offset_str[0] + "1") if len(offset_str) <= 3: hours = int(offset_str[1:]) minutes = 0 else: hours = int(offset_str[1:-2]) minutes = int(offset_str[-2:]) offset = sign * (hours * 60 + minutes) if (0 <= minutes <= 59) and (-(2**15) <= offset < 2**15): return offset else: # can't parse it to a reasonable value; give up and pretend it's UTC. return 0 def offset_minutes(self): """Returns the offset, as a number of minutes since UTC. >>> TimestampWithTimezone( ... Timestamp(seconds=1642765364, microseconds=0), offset_bytes=b"+0000" ... ).offset_minutes() 0 >>> TimestampWithTimezone( ... Timestamp(seconds=1642765364, microseconds=0), offset_bytes=b"+0200" ... ).offset_minutes() 120 >>> TimestampWithTimezone( ... Timestamp(seconds=1642765364, microseconds=0), offset_bytes=b"-0200" ... ).offset_minutes() -120 >>> TimestampWithTimezone( ... Timestamp(seconds=1642765364, microseconds=0), offset_bytes=b"+0530" ... ).offset_minutes() 330 """ return self._parse_offset_bytes(self.offset_bytes) @attr.s(frozen=True, slots=True) class Origin(HashableObject, BaseModel): """Represents a software source: a VCS and an URL.""" object_type: Final = "origin" url = attr.ib(type=str, validator=type_validator()) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"") def unique_key(self) -> KeyType: return {"url": self.url} def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(self.url.encode("utf-8")) def swhid(self) -> ExtendedSWHID: """Returns a SWHID representing this origin.""" return ExtendedSWHID( object_type=SwhidExtendedObjectType.ORIGIN, object_id=self.id, ) @attr.s(frozen=True, slots=True) class OriginVisit(BaseModel): """Represents an origin visit with a given type at a given point in time, by a SWH loader.""" object_type: Final = "origin_visit" origin = attr.ib(type=str, validator=type_validator()) date = attr.ib(type=datetime.datetime, validator=type_validator()) type = attr.ib(type=str, validator=type_validator()) """Should not be set before calling 'origin_visit_add()'.""" visit = attr.ib(type=Optional[int], validator=type_validator(), default=None) @date.validator def check_date(self, attribute, value): """Checks the date has a timezone.""" if value is not None and value.tzinfo is None: raise ValueError("date must be a timezone-aware datetime.") def to_dict(self): """Serializes the date as a string and omits the visit id if it is `None`.""" ov = super().to_dict() if ov["visit"] is None: del ov["visit"] return ov def unique_key(self) -> KeyType: return {"origin": self.origin, "date": str(self.date)} @attr.s(frozen=True, slots=True) class OriginVisitStatus(BaseModel): """Represents a visit update of an origin at a given point in time.""" object_type: Final = "origin_visit_status" origin = attr.ib(type=str, validator=type_validator()) visit = attr.ib(type=int, validator=type_validator()) date = attr.ib(type=datetime.datetime, validator=type_validator()) status = attr.ib( type=str, validator=attr.validators.in_( ["created", "ongoing", "full", "partial", "not_found", "failed"] ), ) snapshot = attr.ib( type=Optional[Sha1Git], validator=type_validator(), repr=hash_repr ) # Type is optional be to able to use it before adding it to the database model type = attr.ib(type=Optional[str], validator=type_validator(), default=None) metadata = attr.ib( type=Optional[ImmutableDict[str, object]], validator=type_validator(), converter=freeze_optional_dict, default=None, ) @date.validator def check_date(self, attribute, value): """Checks the date has a timezone.""" if value is not None and value.tzinfo is None: raise ValueError("date must be a timezone-aware datetime.") def unique_key(self) -> KeyType: return {"origin": self.origin, "visit": str(self.visit), "date": str(self.date)} class TargetType(Enum): """The type of content pointed to by a snapshot branch. Usually a revision or an alias.""" CONTENT = "content" DIRECTORY = "directory" REVISION = "revision" RELEASE = "release" SNAPSHOT = "snapshot" ALIAS = "alias" def __repr__(self): return f"TargetType.{self.name}" class ObjectType(Enum): """The type of content pointed to by a release. Usually a revision""" CONTENT = "content" DIRECTORY = "directory" REVISION = "revision" RELEASE = "release" SNAPSHOT = "snapshot" def __repr__(self): return f"ObjectType.{self.name}" @attr.s(frozen=True, slots=True) class SnapshotBranch(BaseModel): """Represents one of the branches of a snapshot.""" object_type: Final = "snapshot_branch" target = attr.ib(type=bytes, validator=type_validator(), repr=hash_repr) target_type = attr.ib(type=TargetType, validator=type_validator()) @target.validator def check_target(self, attribute, value): """Checks the target type is not an alias, checks the target is a valid sha1_git.""" if self.target_type != TargetType.ALIAS and self.target is not None: if len(value) != 20: raise ValueError("Wrong length for bytes identifier: %d" % len(value)) @classmethod def from_dict(cls, d): return cls(target=d["target"], target_type=TargetType(d["target_type"])) @attr.s(frozen=True, slots=True) class Snapshot(HashableObject, BaseModel): """Represents the full state of an origin at a given point in time.""" object_type: Final = "snapshot" branches = attr.ib( type=ImmutableDict[bytes, Optional[SnapshotBranch]], validator=type_validator(), converter=freeze_optional_dict, ) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) def _compute_hash_from_attributes(self) -> bytes: - return _compute_hash_from_manifest(git_objects.snapshot_git_object(self)) + return _compute_hash_from_manifest( + git_objects.snapshot_git_object(self, ignore_unresolved=True) + ) @classmethod def from_dict(cls, d): d = d.copy() return cls( branches=ImmutableDict( (name, SnapshotBranch.from_dict(branch) if branch else None) for (name, branch) in d.pop("branches").items() ), **d, ) def swhid(self) -> CoreSWHID: """Returns a SWHID representing this object.""" return CoreSWHID(object_type=SwhidObjectType.SNAPSHOT, object_id=self.id) @attr.s(frozen=True, slots=True) class Release(HashableObjectWithManifest, BaseModel): object_type: Final = "release" name = attr.ib(type=bytes, validator=type_validator()) message = attr.ib(type=Optional[bytes], validator=type_validator()) target = attr.ib(type=Optional[Sha1Git], validator=type_validator(), repr=hash_repr) target_type = attr.ib(type=ObjectType, validator=type_validator()) synthetic = attr.ib(type=bool, validator=type_validator()) author = attr.ib(type=Optional[Person], validator=type_validator(), default=None) date = attr.ib( type=Optional[TimestampWithTimezone], validator=type_validator(), default=None ) metadata = attr.ib( type=Optional[ImmutableDict[str, object]], validator=type_validator(), converter=freeze_optional_dict, default=None, ) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) raw_manifest = attr.ib(type=Optional[bytes], default=None) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(git_objects.release_git_object(self)) @author.validator def check_author(self, attribute, value): """If the author is `None`, checks the date is `None` too.""" if self.author is None and self.date is not None: raise ValueError("release date must be None if author is None.") def to_dict(self): rel = super().to_dict() if rel["metadata"] is None: del rel["metadata"] return rel @classmethod def from_dict(cls, d): d = d.copy() if d.get("author"): d["author"] = Person.from_dict(d["author"]) if d.get("date"): d["date"] = TimestampWithTimezone.from_dict(d["date"]) return cls(target_type=ObjectType(d.pop("target_type")), **d) def swhid(self) -> CoreSWHID: """Returns a SWHID representing this object.""" return CoreSWHID(object_type=SwhidObjectType.RELEASE, object_id=self.id) def anonymize(self) -> "Release": """Returns an anonymized version of the Release object. Anonymization consists in replacing the author with an anonymized Person object. """ author = self.author and self.author.anonymize() return attr.evolve(self, author=author) class RevisionType(Enum): GIT = "git" TAR = "tar" DSC = "dsc" SUBVERSION = "svn" MERCURIAL = "hg" CVS = "cvs" BAZAAR = "bzr" def __repr__(self): return f"RevisionType.{self.name}" def tuplify_extra_headers(value: Iterable): return tuple((k, v) for k, v in value) @attr.s(frozen=True, slots=True) class Revision(HashableObjectWithManifest, BaseModel): object_type: Final = "revision" message = attr.ib(type=Optional[bytes], validator=type_validator()) author = attr.ib(type=Optional[Person], validator=type_validator()) committer = attr.ib(type=Optional[Person], validator=type_validator()) date = attr.ib(type=Optional[TimestampWithTimezone], validator=type_validator()) committer_date = attr.ib( type=Optional[TimestampWithTimezone], validator=type_validator() ) type = attr.ib(type=RevisionType, validator=type_validator()) directory = attr.ib(type=Sha1Git, validator=type_validator(), repr=hash_repr) synthetic = attr.ib(type=bool, validator=type_validator()) metadata = attr.ib( type=Optional[ImmutableDict[str, object]], validator=type_validator(), converter=freeze_optional_dict, default=None, ) parents = attr.ib(type=Tuple[Sha1Git, ...], validator=type_validator(), default=()) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) extra_headers = attr.ib( type=Tuple[Tuple[bytes, bytes], ...], validator=type_validator(), converter=tuplify_extra_headers, default=(), ) raw_manifest = attr.ib(type=Optional[bytes], default=None) def __attrs_post_init__(self): super().__attrs_post_init__() # ensure metadata is a deep copy of whatever was given, and if needed # extract extra_headers from there if self.metadata: metadata = self.metadata if not self.extra_headers and "extra_headers" in metadata: (extra_headers, metadata) = metadata.copy_pop("extra_headers") object.__setattr__( self, "extra_headers", tuplify_extra_headers(extra_headers), ) attr.validate(self) object.__setattr__(self, "metadata", metadata) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(git_objects.revision_git_object(self)) @author.validator def check_author(self, attribute, value): """If the author is `None`, checks the date is `None` too.""" if self.author is None and self.date is not None: raise ValueError("revision date must be None if author is None.") @committer.validator def check_committer(self, attribute, value): """If the committer is `None`, checks the committer_date is `None` too.""" if self.committer is None and self.committer_date is not None: raise ValueError( "revision committer_date must be None if committer is None." ) @classmethod def from_dict(cls, d): d = d.copy() date = d.pop("date") if date: date = TimestampWithTimezone.from_dict(date) committer_date = d.pop("committer_date") if committer_date: committer_date = TimestampWithTimezone.from_dict(committer_date) author = d.pop("author") if author: author = Person.from_dict(author) committer = d.pop("committer") if committer: committer = Person.from_dict(committer) return cls( author=author, committer=committer, date=date, committer_date=committer_date, type=RevisionType(d.pop("type")), parents=tuple(d.pop("parents")), # for BW compat **d, ) def swhid(self) -> CoreSWHID: """Returns a SWHID representing this object.""" return CoreSWHID(object_type=SwhidObjectType.REVISION, object_id=self.id) def anonymize(self) -> "Revision": """Returns an anonymized version of the Revision object. Anonymization consists in replacing the author and committer with an anonymized Person object. """ return attr.evolve( self, author=None if self.author is None else self.author.anonymize(), committer=None if self.committer is None else self.committer.anonymize(), ) _DIR_ENTRY_TYPES = ["file", "dir", "rev"] @attr.s(frozen=True, slots=True) class DirectoryEntry(BaseModel): object_type: Final = "directory_entry" name = attr.ib(type=bytes, validator=type_validator()) type = attr.ib(type=str, validator=attr.validators.in_(_DIR_ENTRY_TYPES)) target = attr.ib(type=Sha1Git, validator=type_validator(), repr=hash_repr) perms = attr.ib(type=int, validator=type_validator(), converter=int, repr=oct) """Usually one of the values of `swh.model.from_disk.DentryPerms`.""" @name.validator def check_name(self, attribute, value): if b"/" in value: raise ValueError(f"{value!r} is not a valid directory entry name.") @attr.s(frozen=True, slots=True) class Directory(HashableObjectWithManifest, BaseModel): object_type: Final = "directory" entries = attr.ib(type=Tuple[DirectoryEntry, ...], validator=type_validator()) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) raw_manifest = attr.ib(type=Optional[bytes], default=None) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(git_objects.directory_git_object(self)) @entries.validator def check_entries(self, attribute, value): seen = set() for entry in value: if entry.name in seen: # Cannot use self.swhid() here, self.id may be None raise ValueError( f"swh:1:dir:{hash_to_hex(self.id)} has duplicated entry name: " f"{entry.name!r}" ) seen.add(entry.name) @classmethod def from_dict(cls, d): d = d.copy() return cls( entries=tuple( DirectoryEntry.from_dict(entry) for entry in d.pop("entries") ), **d, ) def swhid(self) -> CoreSWHID: """Returns a SWHID representing this object.""" return CoreSWHID(object_type=SwhidObjectType.DIRECTORY, object_id=self.id) @classmethod def from_possibly_duplicated_entries( cls, *, entries: Tuple[DirectoryEntry, ...], id: Sha1Git = b"", raw_manifest: Optional[bytes] = None, ) -> Tuple[bool, "Directory"]: """Constructs a ``Directory`` object from a list of entries that may contain duplicated names. This is required to represent legacy objects, that were ingested in the storage database before this check was added. As it is impossible for a ``Directory`` instances to have more than one entry with a given names, this function computes a ``raw_manifest`` and renames one of the entries before constructing the ``Directory``. Returns: ``(is_corrupt, directory)`` where ``is_corrupt`` is True iff some entry names were indeed duplicated """ # First, try building a Directory object normally without any extra computation, # which works the overwhelming majority of the time: try: return (False, Directory(entries=entries, id=id, raw_manifest=raw_manifest)) except ValueError: pass # If it fails: # 1. compute a raw_manifest if there isn't already one: if raw_manifest is None: # invalid_directory behaves like a Directory object, but without the # duplicated entry check; which allows computing its raw_manifest invalid_directory = type("", (), {})() invalid_directory.entries = entries raw_manifest = git_objects.directory_git_object(invalid_directory) # 2. look for duplicated entries: entries_by_name: Dict[ bytes, Dict[str, List[DirectoryEntry]] ] = collections.defaultdict(lambda: collections.defaultdict(list)) for entry in entries: entries_by_name[entry.name][entry.type].append(entry) # 3. strip duplicates deduplicated_entries = [] for entry_lists in entries_by_name.values(): # We could pick one entry at random to keep the original name; but we try to # "minimize" the impact, by preserving entries of type "rev" first # (because renaming them would likely break git submodules entirely # when this directory is written to disk), # then entries of type "dir" (because renaming them affects the path # of every file in the dir, instead of just one "cnt"). dir_entry_types = ("rev", "dir", "file") assert set(dir_entry_types) == set(_DIR_ENTRY_TYPES) picked_winner = False # when True, all future entries must be renamed for type_ in dir_entry_types: for entry in entry_lists[type_]: if not picked_winner: # this is the "most important" entry according to this # heuristic; it gets to keep its name. deduplicated_entries.append(entry) picked_winner = True else: # the heuristic already found an entry more important than # this one; so this one must be renamed to something. # we pick the beginning of its hash, it should be good enough # to avoid any conflict. new_name = ( entry.name + b"_" + hash_to_bytehex(entry.target)[0:10] ) renamed_entry = attr.evolve(entry, name=new_name) deduplicated_entries.append(renamed_entry) # Finally, return the "fixed" the directory dir_ = Directory( entries=tuple(deduplicated_entries), id=id, raw_manifest=raw_manifest ) return (True, dir_) @attr.s(frozen=True, slots=True) class BaseContent(BaseModel): status = attr.ib( type=str, validator=attr.validators.in_(["visible", "hidden", "absent"]) ) @staticmethod def _hash_data(data: bytes): """Hash some data, returning most of the fields of a content object""" d = MultiHash.from_data(data).digest() d["data"] = data d["length"] = len(data) return d @classmethod def from_dict(cls, d, use_subclass=True): if use_subclass: # Chooses a subclass to instantiate instead. if d["status"] == "absent": return SkippedContent.from_dict(d) else: return Content.from_dict(d) else: return super().from_dict(d) def get_hash(self, hash_name): if hash_name not in DEFAULT_ALGORITHMS: raise ValueError("{} is not a valid hash name.".format(hash_name)) return getattr(self, hash_name) def hashes(self) -> Dict[str, bytes]: """Returns a dictionary {hash_name: hash_value}""" return {algo: getattr(self, algo) for algo in DEFAULT_ALGORITHMS} @attr.s(frozen=True, slots=True) class Content(BaseContent): object_type: Final = "content" sha1 = attr.ib(type=bytes, validator=type_validator(), repr=hash_repr) sha1_git = attr.ib(type=Sha1Git, validator=type_validator(), repr=hash_repr) sha256 = attr.ib(type=bytes, validator=type_validator(), repr=hash_repr) blake2s256 = attr.ib(type=bytes, validator=type_validator(), repr=hash_repr) length = attr.ib(type=int, validator=type_validator()) status = attr.ib( type=str, validator=attr.validators.in_(["visible", "hidden"]), default="visible", ) data = attr.ib(type=Optional[bytes], validator=type_validator(), default=None) ctime = attr.ib( type=Optional[datetime.datetime], validator=type_validator(), default=None, eq=False, ) @length.validator def check_length(self, attribute, value): """Checks the length is positive.""" if value < 0: raise ValueError("Length must be positive.") @ctime.validator def check_ctime(self, attribute, value): """Checks the ctime has a timezone.""" if value is not None and value.tzinfo is None: raise ValueError("ctime must be a timezone-aware datetime.") def to_dict(self): content = super().to_dict() if content["data"] is None: del content["data"] if content["ctime"] is None: del content["ctime"] return content @classmethod def from_data(cls, data, status="visible", ctime=None) -> "Content": """Generate a Content from a given `data` byte string. This populates the Content with the hashes and length for the data passed as argument, as well as the data itself. """ d = cls._hash_data(data) d["status"] = status d["ctime"] = ctime return cls(**d) @classmethod def from_dict(cls, d): if isinstance(d.get("ctime"), str): d = d.copy() d["ctime"] = dateutil.parser.parse(d["ctime"]) return super().from_dict(d, use_subclass=False) def with_data(self) -> "Content": """Loads the `data` attribute; meaning that it is guaranteed not to be None after this call. This call is almost a no-op, but subclasses may overload this method to lazy-load data (eg. from disk or objstorage).""" if self.data is None: raise MissingData("Content data is None.") return self def unique_key(self) -> KeyType: return self.sha1 # TODO: use a dict of hashes def swhid(self) -> CoreSWHID: """Returns a SWHID representing this object.""" return CoreSWHID(object_type=SwhidObjectType.CONTENT, object_id=self.sha1_git) @attr.s(frozen=True, slots=True) class SkippedContent(BaseContent): object_type: Final = "skipped_content" sha1 = attr.ib(type=Optional[bytes], validator=type_validator(), repr=hash_repr) sha1_git = attr.ib( type=Optional[Sha1Git], validator=type_validator(), repr=hash_repr ) sha256 = attr.ib(type=Optional[bytes], validator=type_validator(), repr=hash_repr) blake2s256 = attr.ib( type=Optional[bytes], validator=type_validator(), repr=hash_repr ) length = attr.ib(type=Optional[int], validator=type_validator()) status = attr.ib(type=str, validator=attr.validators.in_(["absent"])) reason = attr.ib(type=Optional[str], validator=type_validator(), default=None) origin = attr.ib(type=Optional[str], validator=type_validator(), default=None) ctime = attr.ib( type=Optional[datetime.datetime], validator=type_validator(), default=None, eq=False, ) @reason.validator def check_reason(self, attribute, value): """Checks the reason is full if status != absent.""" assert self.reason == value if value is None: raise ValueError("Must provide a reason if content is absent.") @length.validator def check_length(self, attribute, value): """Checks the length is positive or -1.""" if value < -1: raise ValueError("Length must be positive or -1.") @ctime.validator def check_ctime(self, attribute, value): """Checks the ctime has a timezone.""" if value is not None and value.tzinfo is None: raise ValueError("ctime must be a timezone-aware datetime.") def to_dict(self): content = super().to_dict() if content["origin"] is None: del content["origin"] if content["ctime"] is None: del content["ctime"] return content @classmethod def from_data( cls, data: bytes, reason: str, ctime: Optional[datetime.datetime] = None ) -> "SkippedContent": """Generate a SkippedContent from a given `data` byte string. This populates the SkippedContent with the hashes and length for the data passed as argument. You can use `attr.evolve` on such a generated content to nullify some of its attributes, e.g. for tests. """ d = cls._hash_data(data) del d["data"] d["status"] = "absent" d["reason"] = reason d["ctime"] = ctime return cls(**d) @classmethod def from_dict(cls, d): d2 = d.copy() if d2.pop("data", None) is not None: raise ValueError('SkippedContent has no "data" attribute %r' % d) return super().from_dict(d2, use_subclass=False) def unique_key(self) -> KeyType: return self.hashes() class MetadataAuthorityType(Enum): DEPOSIT_CLIENT = "deposit_client" FORGE = "forge" REGISTRY = "registry" def __repr__(self): return f"MetadataAuthorityType.{self.name}" @attr.s(frozen=True, slots=True) class MetadataAuthority(BaseModel): """Represents an entity that provides metadata about an origin or software artifact.""" object_type: Final = "metadata_authority" type = attr.ib(type=MetadataAuthorityType, validator=type_validator()) url = attr.ib(type=str, validator=type_validator()) metadata = attr.ib( type=Optional[ImmutableDict[str, Any]], default=None, validator=type_validator(), converter=freeze_optional_dict, ) def to_dict(self): d = super().to_dict() if d["metadata"] is None: del d["metadata"] return d @classmethod def from_dict(cls, d): d = { **d, "type": MetadataAuthorityType(d["type"]), } return super().from_dict(d) def unique_key(self) -> KeyType: return {"type": self.type.value, "url": self.url} @attr.s(frozen=True, slots=True) class MetadataFetcher(BaseModel): """Represents a software component used to fetch metadata from a metadata authority, and ingest them into the Software Heritage archive.""" object_type: Final = "metadata_fetcher" name = attr.ib(type=str, validator=type_validator()) version = attr.ib(type=str, validator=type_validator()) metadata = attr.ib( type=Optional[ImmutableDict[str, Any]], default=None, validator=type_validator(), converter=freeze_optional_dict, ) def to_dict(self): d = super().to_dict() if d["metadata"] is None: del d["metadata"] return d def unique_key(self) -> KeyType: return {"name": self.name, "version": self.version} def normalize_discovery_date(value: Any) -> datetime.datetime: if not isinstance(value, datetime.datetime): raise TypeError("discovery_date must be a timezone-aware datetime.") if value.tzinfo is None: raise ValueError("discovery_date must be a timezone-aware datetime.") # Normalize timezone to utc, and truncate microseconds to 0 return value.astimezone(datetime.timezone.utc).replace(microsecond=0) @attr.s(frozen=True, slots=True) class RawExtrinsicMetadata(HashableObject, BaseModel): object_type: Final = "raw_extrinsic_metadata" # target object target = attr.ib(type=ExtendedSWHID, validator=type_validator()) # source discovery_date = attr.ib(type=datetime.datetime, converter=normalize_discovery_date) authority = attr.ib(type=MetadataAuthority, validator=type_validator()) fetcher = attr.ib(type=MetadataFetcher, validator=type_validator()) # the metadata itself format = attr.ib(type=str, validator=type_validator()) metadata = attr.ib(type=bytes, validator=type_validator()) # context origin = attr.ib(type=Optional[str], default=None, validator=type_validator()) visit = attr.ib(type=Optional[int], default=None, validator=type_validator()) snapshot = attr.ib( type=Optional[CoreSWHID], default=None, validator=type_validator() ) release = attr.ib( type=Optional[CoreSWHID], default=None, validator=type_validator() ) revision = attr.ib( type=Optional[CoreSWHID], default=None, validator=type_validator() ) path = attr.ib(type=Optional[bytes], default=None, validator=type_validator()) directory = attr.ib( type=Optional[CoreSWHID], default=None, validator=type_validator() ) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest( git_objects.raw_extrinsic_metadata_git_object(self) ) @origin.validator def check_origin(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.SNAPSHOT, SwhidExtendedObjectType.RELEASE, SwhidExtendedObjectType.REVISION, SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'origin' context for " f"{self.target.object_type.name.lower()} object: {value}" ) if value.startswith("swh:"): # Technically this is valid; but: # 1. SWHIDs are URIs, not URLs # 2. if a SWHID gets here, it's very likely to be a mistake # (and we can remove this check if it turns out there is a # legitimate use for it). raise ValueError(f"SWHID used as context origin URL: {value}") @visit.validator def check_visit(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.SNAPSHOT, SwhidExtendedObjectType.RELEASE, SwhidExtendedObjectType.REVISION, SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'visit' context for " f"{self.target.object_type.name.lower()} object: {value}" ) if self.origin is None: raise ValueError("'origin' context must be set if 'visit' is.") if value <= 0: raise ValueError("Nonpositive visit id") @snapshot.validator def check_snapshot(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.RELEASE, SwhidExtendedObjectType.REVISION, SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'snapshot' context for " f"{self.target.object_type.name.lower()} object: {value}" ) self._check_swhid(SwhidObjectType.SNAPSHOT, value) @release.validator def check_release(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.REVISION, SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'release' context for " f"{self.target.object_type.name.lower()} object: {value}" ) self._check_swhid(SwhidObjectType.RELEASE, value) @revision.validator def check_revision(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'revision' context for " f"{self.target.object_type.name.lower()} object: {value}" ) self._check_swhid(SwhidObjectType.REVISION, value) @path.validator def check_path(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'path' context for " f"{self.target.object_type.name.lower()} object: {value}" ) @directory.validator def check_directory(self, attribute, value): if value is None: return if self.target.object_type not in (SwhidExtendedObjectType.CONTENT,): raise ValueError( f"Unexpected 'directory' context for " f"{self.target.object_type.name.lower()} object: {value}" ) self._check_swhid(SwhidObjectType.DIRECTORY, value) def _check_swhid(self, expected_object_type, swhid): if isinstance(swhid, str): raise ValueError(f"Expected SWHID, got a string: {swhid}") if swhid.object_type != expected_object_type: raise ValueError( f"Expected SWHID type '{expected_object_type.name.lower()}', " f"got '{swhid.object_type.name.lower()}' in {swhid}" ) def to_dict(self): d = super().to_dict() context_keys = ( "origin", "visit", "snapshot", "release", "revision", "directory", "path", ) for context_key in context_keys: if d[context_key] is None: del d[context_key] return d @classmethod def from_dict(cls, d): d = { **d, "target": ExtendedSWHID.from_string(d["target"]), "authority": MetadataAuthority.from_dict(d["authority"]), "fetcher": MetadataFetcher.from_dict(d["fetcher"]), } swhid_keys = ("snapshot", "release", "revision", "directory") for swhid_key in swhid_keys: if d.get(swhid_key): d[swhid_key] = CoreSWHID.from_string(d[swhid_key]) return super().from_dict(d) def swhid(self) -> ExtendedSWHID: """Returns a SWHID representing this RawExtrinsicMetadata object.""" return ExtendedSWHID( object_type=SwhidExtendedObjectType.RAW_EXTRINSIC_METADATA, object_id=self.id, ) @attr.s(frozen=True, slots=True) class ExtID(HashableObject, BaseModel): object_type: Final = "extid" extid_type = attr.ib(type=str, validator=type_validator()) extid = attr.ib(type=bytes, validator=type_validator()) target = attr.ib(type=CoreSWHID, validator=type_validator()) extid_version = attr.ib(type=int, validator=type_validator(), default=0) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) @classmethod def from_dict(cls, d): return cls( extid=d["extid"], extid_type=d["extid_type"], target=CoreSWHID.from_string(d["target"]), extid_version=d.get("extid_version", 0), ) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(git_objects.extid_git_object(self)) # Note: we need the type ignore stanza here because mypy cannot figure that all # subclasses of BaseModel do have an object_type attribute, even if BaseModel # itself does not (because these are Final) SWH_MODEL_OBJECT_TYPES: Dict[str, Type[BaseModel]] = { cls.object_type: cls # type: ignore for cls in ( Person, Timestamp, TimestampWithTimezone, Origin, OriginVisit, OriginVisitStatus, Snapshot, SnapshotBranch, Release, Revision, Directory, DirectoryEntry, Content, SkippedContent, MetadataAuthority, MetadataFetcher, RawExtrinsicMetadata, ExtID, ) } diff --git a/swh/model/tests/test_identifiers.py b/swh/model/tests/test_identifiers.py index 793e6d5..d5a0eb3 100644 --- a/swh/model/tests/test_identifiers.py +++ b/swh/model/tests/test_identifiers.py @@ -1,1339 +1,1345 @@ # Copyright (C) 2015-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib from typing import Dict import unittest import pytest from swh.model import git_objects, hashutil from swh.model.hashutil import hash_to_bytes as _x from swh.model.model import ( Content, Directory, ExtID, Origin, RawExtrinsicMetadata, Release, Revision, Snapshot, TimestampWithTimezone, ) def remove_id(d: Dict) -> Dict: """Returns a (shallow) copy of a dict with the 'id' key removed.""" d = d.copy() if "id" in d: del d["id"] return d class UtilityFunctionsDateOffset(unittest.TestCase): def setUp(self): self.dates = { b"1448210036": { "seconds": 1448210036, "microseconds": 0, }, b"1448210036.002342": { "seconds": 1448210036, "microseconds": 2342, }, b"1448210036.12": { "seconds": 1448210036, "microseconds": 120000, }, } def test_format_date(self): for date_repr, date in self.dates.items(): self.assertEqual(git_objects.format_date(date), date_repr) content_example = { "status": "visible", "length": 5, "data": b"1984\n", "ctime": datetime.datetime(2015, 11, 22, 16, 33, 56, tzinfo=datetime.timezone.utc), } class ContentIdentifier(unittest.TestCase): def setUp(self): self.content_id = hashutil.MultiHash.from_data(content_example["data"]).digest() def test_content_identifier(self): self.assertEqual( Content.from_data(content_example["data"]).hashes(), self.content_id ) directory_example = { "id": _x("d7ed3d2c31d608823be58b1cbe57605310615231"), "entries": [ { "type": "file", "perms": 33188, "name": b"README", "target": _x("37ec8ea2110c0b7a32fbb0e872f6e7debbf95e21"), }, { "type": "file", "perms": 33188, "name": b"Rakefile", "target": _x("3bb0e8592a41ae3185ee32266c860714980dbed7"), }, { "type": "dir", "perms": 16384, "name": b"app", "target": _x("61e6e867f5d7ba3b40540869bc050b0c4fed9e95"), }, { "type": "file", "perms": 33188, "name": b"1.megabyte", "target": _x("7c2b2fbdd57d6765cdc9d84c2d7d333f11be7fb3"), }, { "type": "dir", "perms": 16384, "name": b"config", "target": _x("591dfe784a2e9ccc63aaba1cb68a765734310d98"), }, { "type": "dir", "perms": 16384, "name": b"public", "target": _x("9588bf4522c2b4648bfd1c61d175d1f88c1ad4a5"), }, { "type": "file", "perms": 33188, "name": b"development.sqlite3", "target": _x("e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"), }, { "type": "dir", "perms": 16384, "name": b"doc", "target": _x("154705c6aa1c8ead8c99c7915373e3c44012057f"), }, { "type": "dir", "perms": 16384, "name": b"db", "target": _x("85f157bdc39356b7bc7de9d0099b4ced8b3b382c"), }, { "type": "dir", "perms": 16384, "name": b"log", "target": _x("5e3d3941c51cce73352dff89c805a304ba96fffe"), }, { "type": "dir", "perms": 16384, "name": b"script", "target": _x("1b278423caf176da3f3533592012502aa10f566c"), }, { "type": "dir", "perms": 16384, "name": b"test", "target": _x("035f0437c080bfd8711670b3e8677e686c69c763"), }, { "type": "dir", "perms": 16384, "name": b"vendor", "target": _x("7c0dc9ad978c1af3f9a4ce061e50f5918bd27138"), }, { "type": "rev", "perms": 57344, "name": b"will_paginate", "target": _x("3d531e169db92a16a9a8974f0ae6edf52e52659e"), }, # in git order, the dir named "order" should be between the files # named "order." and "order0" { "type": "dir", "perms": 16384, "name": b"order", "target": _x("62cdb7020ff920e5aa642c3d4066950dd1f01f4d"), }, { "type": "file", "perms": 16384, "name": b"order.", "target": _x("0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"), }, { "type": "file", "perms": 16384, "name": b"order0", "target": _x("bbe960a25ea311d21d40669e93df2003ba9b90a2"), }, ], } class DirectoryIdentifier(unittest.TestCase): def setUp(self): self.directory = directory_example self.empty_directory = { "id": "4b825dc642cb6eb9a060e54bf8d69288fbee4904", "entries": [], } def test_dir_identifier(self): self.assertEqual(Directory.from_dict(self.directory).id, self.directory["id"]) self.assertEqual( Directory.from_dict(remove_id(self.directory)).id, self.directory["id"], ) def test_dir_identifier_entry_order(self): # Reverse order of entries, check the id is still the same. directory = {"entries": reversed(self.directory["entries"])} self.assertEqual( Directory.from_dict(remove_id(directory)).id, self.directory["id"], ) def test_dir_identifier_empty_directory(self): self.assertEqual( Directory.from_dict(remove_id(self.empty_directory)).id, _x(self.empty_directory["id"]), ) linus_tz = datetime.timezone(datetime.timedelta(minutes=-420)) revision_example = { "id": _x("bc0195aad0daa2ad5b0d76cce22b167bc3435590"), "directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"), "parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")], "author": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", "fullname": b"Linus Torvalds ", }, "date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), "committer": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", "fullname": b"Linus Torvalds ", }, "committer_date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), "message": b"Linux 4.2-rc2\n", "type": "git", "synthetic": False, } class RevisionIdentifier(unittest.TestCase): def setUp(self): gpgsig = b"""\ -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.13 (Darwin) iQIcBAABAgAGBQJVJcYsAAoJEBiY3kIkQRNJVAUQAJ8/XQIfMqqC5oYeEFfHOPYZ L7qy46bXHVBa9Qd8zAJ2Dou3IbI2ZoF6/Et89K/UggOycMlt5FKV/9toWyuZv4Po L682wonoxX99qvVTHo6+wtnmYO7+G0f82h+qHMErxjP+I6gzRNBvRr+SfY7VlGdK wikMKOMWC5smrScSHITnOq1Ews5pe3N7qDYMzK0XVZmgDoaem4RSWMJs4My/qVLN e0CqYWq2A22GX7sXl6pjneJYQvcAXUX+CAzp24QnPSb+Q22Guj91TcxLFcHCTDdn qgqMsEyMiisoglwrCbO+D+1xq9mjN9tNFWP66SQ48mrrHYTBV5sz9eJyDfroJaLP CWgbDTgq6GzRMehHT3hXfYS5NNatjnhkNISXR7pnVP/obIi/vpWh5ll6Gd8q26z+ a/O41UzOaLTeNI365MWT4/cnXohVLRG7iVJbAbCxoQmEgsYMRc/pBAzWJtLfcB2G jdTswYL6+MUdL8sB9pZ82D+BP/YAdHe69CyTu1lk9RT2pYtI/kkfjHubXBCYEJSG +VGllBbYG6idQJpyrOYNRJyrDi9yvDJ2W+S0iQrlZrxzGBVGTB/y65S8C+2WTBcE lf1Qb5GDsQrZWgD+jtWTywOYHtCBwyCKSAXxSARMbNPeak9WPlcW/Jmu+fUcMe2x dg1KdHOa34shrKDaOVzW =od6m -----END PGP SIGNATURE-----""" self.revision = revision_example self.revision_none_metadata = { "id": _x("bc0195aad0daa2ad5b0d76cce22b167bc3435590"), "directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"), "parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")], "author": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", }, "date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), "committer": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", }, "committer_date": datetime.datetime( 2015, 7, 12, 15, 10, 30, tzinfo=linus_tz ), "message": b"Linux 4.2-rc2\n", "type": "git", "synthetic": False, "metadata": None, } self.synthetic_revision = { "id": _x("b2a7e1260492e344fab3cbf91bc13c91e05426fd"), "author": { "name": b"Software Heritage", "email": b"robot@softwareheritage.org", }, "date": { "timestamp": {"seconds": 1437047495}, "offset_bytes": b"+0000", }, "type": "tar", "committer": { "name": b"Software Heritage", "email": b"robot@softwareheritage.org", }, "committer_date": 1437047495, "synthetic": True, "parents": [], "message": b"synthetic revision message\n", "directory": _x("d11f00a6a0fea6055341d25584b5a96516c0d2b8"), "metadata": { "original_artifact": [ { "archive_type": "tar", "name": "gcc-5.2.0.tar.bz2", "sha1_git": "39d281aff934d44b439730057e55b055e206a586", "sha1": "fe3f5390949d47054b613edc36c557eb1d51c18e", "sha256": "5f835b04b5f7dd4f4d2dc96190ec1621b8d89f" "2dc6f638f9f8bc1b1014ba8cad", } ] }, } # cat commit.txt | git hash-object -t commit --stdin self.revision_with_extra_headers = { "id": _x("010d34f384fa99d047cdd5e2f41e56e5c2feee45"), "directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"), "parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")], "author": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", "fullname": b"Linus Torvalds ", }, "date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), "committer": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", "fullname": b"Linus Torvalds ", }, "committer_date": datetime.datetime( 2015, 7, 12, 15, 10, 30, tzinfo=linus_tz ), "message": b"Linux 4.2-rc2\n", "type": "git", "synthetic": False, "extra_headers": ( (b"svn-repo-uuid", b"046f1af7-66c2-d61b-5410-ce57b7db7bff"), (b"svn-revision", b"10"), ), } self.revision_with_gpgsig = { "id": _x("44cc742a8ca17b9c279be4cc195a93a6ef7a320e"), "directory": _x("b134f9b7dc434f593c0bab696345548b37de0558"), "parents": [ _x("689664ae944b4692724f13b709a4e4de28b54e57"), _x("c888305e1efbaa252d01b4e5e6b778f865a97514"), ], "author": { "name": b"Jiang Xin", "email": b"worldhello.net@gmail.com", "fullname": b"Jiang Xin ", }, "date": { "timestamp": 1428538899, "offset": 480, }, "committer": { "name": b"Jiang Xin", "email": b"worldhello.net@gmail.com", }, "committer_date": { "timestamp": 1428538899, "offset": 480, }, "extra_headers": ((b"gpgsig", gpgsig),), "message": b"""Merge branch 'master' of git://github.com/alexhenrie/git-po * 'master' of git://github.com/alexhenrie/git-po: l10n: ca.po: update translation """, "type": "git", "synthetic": False, } self.revision_no_message = { "id": _x("4cfc623c9238fa92c832beed000ce2d003fd8333"), "directory": _x("b134f9b7dc434f593c0bab696345548b37de0558"), "parents": [ _x("689664ae944b4692724f13b709a4e4de28b54e57"), _x("c888305e1efbaa252d01b4e5e6b778f865a97514"), ], "author": { "name": b"Jiang Xin", "email": b"worldhello.net@gmail.com", "fullname": b"Jiang Xin ", }, "date": { "timestamp": 1428538899, "offset": 480, }, "committer": { "name": b"Jiang Xin", "email": b"worldhello.net@gmail.com", }, "committer_date": { "timestamp": 1428538899, "offset": 480, }, "message": None, "type": "git", "synthetic": False, } self.revision_empty_message = { "id": _x("7442cd78bd3b4966921d6a7f7447417b7acb15eb"), "directory": _x("b134f9b7dc434f593c0bab696345548b37de0558"), "parents": [ _x("689664ae944b4692724f13b709a4e4de28b54e57"), _x("c888305e1efbaa252d01b4e5e6b778f865a97514"), ], "author": { "name": b"Jiang Xin", "email": b"worldhello.net@gmail.com", "fullname": b"Jiang Xin ", }, "date": { "timestamp": 1428538899, "offset": 480, }, "committer": { "name": b"Jiang Xin", "email": b"worldhello.net@gmail.com", }, "committer_date": { "timestamp": 1428538899, "offset": 480, }, "message": b"", "type": "git", "synthetic": False, } self.revision_only_fullname = { "id": _x("010d34f384fa99d047cdd5e2f41e56e5c2feee45"), "directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"), "parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")], "author": { "fullname": b"Linus Torvalds ", }, "date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), "committer": { "fullname": b"Linus Torvalds ", }, "committer_date": datetime.datetime( 2015, 7, 12, 15, 10, 30, tzinfo=linus_tz ), "message": b"Linux 4.2-rc2\n", "type": "git", "synthetic": False, "extra_headers": ( (b"svn-repo-uuid", b"046f1af7-66c2-d61b-5410-ce57b7db7bff"), (b"svn-revision", b"10"), ), } def test_revision_identifier(self): self.assertEqual( Revision.from_dict(self.revision).id, self.revision["id"], ) self.assertEqual( Revision.from_dict(remove_id(self.revision)).id, self.revision["id"], ) def test_revision_identifier_none_metadata(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_none_metadata)).id, self.revision_none_metadata["id"], ) def test_revision_identifier_synthetic(self): self.assertEqual( Revision.from_dict(remove_id(self.synthetic_revision)).id, self.synthetic_revision["id"], ) def test_revision_identifier_with_extra_headers(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_with_extra_headers)).id, self.revision_with_extra_headers["id"], ) def test_revision_identifier_with_gpgsig(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_with_gpgsig)).id, self.revision_with_gpgsig["id"], ) def test_revision_identifier_no_message(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_no_message)).id, self.revision_no_message["id"], ) def test_revision_identifier_empty_message(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_empty_message)).id, self.revision_empty_message["id"], ) def test_revision_identifier_only_fullname(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_only_fullname)).id, self.revision_only_fullname["id"], ) release_example = { "id": _x("2b10839e32c4c476e9d94492756bb1a3e1ec4aa8"), "target": _x("741b2252a5e14d6c60a913c77a6099abe73a854a"), "target_type": "revision", "name": b"v2.6.14", "author": { "name": b"Linus Torvalds", "email": b"torvalds@g5.osdl.org", "fullname": b"Linus Torvalds ", }, "date": datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz), "message": b"""\ Linux 2.6.14 release -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.1 (GNU/Linux) iD8DBQBDYWq6F3YsRnbiHLsRAmaeAJ9RCez0y8rOBbhSv344h86l/VVcugCeIhO1 wdLOnvj91G4wxYqrvThthbE= =7VeT -----END PGP SIGNATURE----- """, "synthetic": False, } class ReleaseIdentifier(unittest.TestCase): def setUp(self): linus_tz = datetime.timezone(datetime.timedelta(minutes=-420)) self.release = release_example self.release_no_author = { "id": _x("26791a8bcf0e6d33f43aef7682bdb555236d56de"), "target": _x("9ee1c939d1cb936b1f98e8d81aeffab57bae46ab"), "target_type": "revision", "name": b"v2.6.12", "message": b"""\ This is the final 2.6.12 release -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.2.4 (GNU/Linux) iD8DBQBCsykyF3YsRnbiHLsRAvPNAJ482tCZwuxp/bJRz7Q98MHlN83TpACdHr37 o6X/3T+vm8K3bf3driRr34c= =sBHn -----END PGP SIGNATURE----- """, "synthetic": False, } self.release_no_message = { "id": _x("b6f4f446715f7d9543ef54e41b62982f0db40045"), "target": _x("9ee1c939d1cb936b1f98e8d81aeffab57bae46ab"), "target_type": "revision", "name": b"v2.6.12", "author": { "name": b"Linus Torvalds", "email": b"torvalds@g5.osdl.org", }, "date": datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz), "message": None, "synthetic": False, } self.release_empty_message = { "id": _x("71a0aea72444d396575dc25ac37fec87ee3c6492"), "target": _x("9ee1c939d1cb936b1f98e8d81aeffab57bae46ab"), "target_type": "revision", "name": b"v2.6.12", "author": { "name": b"Linus Torvalds", "email": b"torvalds@g5.osdl.org", }, "date": datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz), "message": b"", "synthetic": False, } self.release_negative_utc = { "id": _x("97c8d2573a001f88e72d75f596cf86b12b82fd01"), "name": b"20081029", "target": _x("54e9abca4c77421e2921f5f156c9fe4a9f7441c7"), "target_type": "revision", "date": { "timestamp": {"seconds": 1225281976}, "offset_bytes": b"-0000", }, "author": { "name": b"Otavio Salvador", "email": b"otavio@debian.org", }, "synthetic": False, "message": b"tagging version 20081029\n\nr56558\n", } self.release_newline_in_author = { "author": { "email": b"esycat@gmail.com", "fullname": b"Eugene Janusov\n", "name": b"Eugene Janusov\n", }, "date": { "offset_bytes": b"+1000", "timestamp": { "microseconds": 0, "seconds": 1377480558, }, }, "id": _x("5c98f559d034162de22d3ebeb95433e6f8885231"), "message": b"Release of v0.3.2.", "name": b"0.3.2", "synthetic": False, "target": _x("c06aa3d93b78a2865c4935170030f8c2d7396fd3"), "target_type": "revision", } self.release_snapshot_target = dict(self.release) self.release_snapshot_target["target_type"] = "snapshot" self.release_snapshot_target["id"] = _x( "c29c3ddcc6769a04e54dd69d63a6fdcbc566f850" ) def test_release_identifier(self): self.assertEqual( Release.from_dict(self.release).id, self.release["id"], ) self.assertEqual( Release.from_dict(remove_id(self.release)).id, self.release["id"], ) def test_release_identifier_no_author(self): self.assertEqual( Release.from_dict(remove_id(self.release_no_author)).id, self.release_no_author["id"], ) def test_release_identifier_no_message(self): self.assertEqual( Release.from_dict(remove_id(self.release_no_message)).id, self.release_no_message["id"], ) def test_release_identifier_empty_message(self): self.assertEqual( Release.from_dict(remove_id(self.release_empty_message)).id, self.release_empty_message["id"], ) def test_release_identifier_negative_utc(self): self.assertEqual( Release.from_dict(remove_id(self.release_negative_utc)).id, self.release_negative_utc["id"], ) def test_release_identifier_newline_in_author(self): self.assertEqual( Release.from_dict(remove_id(self.release_newline_in_author)).id, self.release_newline_in_author["id"], ) def test_release_identifier_snapshot_target(self): self.assertEqual( Release.from_dict(self.release_snapshot_target).id, self.release_snapshot_target["id"], ) snapshot_example = { "id": _x("6e65b86363953b780d92b0a928f3e8fcdd10db36"), "branches": { b"directory": { "target": _x("1bd0e65f7d2ff14ae994de17a1e7fe65111dcad8"), "target_type": "directory", }, b"content": { "target": _x("fe95a46679d128ff167b7c55df5d02356c5a1ae1"), "target_type": "content", }, b"alias": { "target": b"revision", "target_type": "alias", }, b"revision": { "target": _x("aafb16d69fd30ff58afdd69036a26047f3aebdc6"), "target_type": "revision", }, b"release": { "target": _x("7045404f3d1c54e6473c71bbb716529fbad4be24"), "target_type": "release", }, b"snapshot": { "target": _x("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), "target_type": "snapshot", }, b"dangling": None, }, } class SnapshotIdentifier(unittest.TestCase): def setUp(self): super().setUp() self.empty = { "id": _x("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), "branches": {}, } self.dangling_branch = { "id": _x("c84502e821eb21ed84e9fd3ec40973abc8b32353"), "branches": { b"HEAD": None, }, } self.unresolved = { "id": _x("84b4548ea486e4b0a7933fa541ff1503a0afe1e0"), "branches": { b"foo": { "target": b"bar", "target_type": "alias", }, }, } self.all_types = snapshot_example def test_empty_snapshot(self): self.assertEqual( Snapshot.from_dict(remove_id(self.empty)).id, self.empty["id"], ) def test_dangling_branch(self): self.assertEqual( Snapshot.from_dict(remove_id(self.dangling_branch)).id, self.dangling_branch["id"], ) def test_unresolved(self): + self.assertEqual( + Snapshot.from_dict(remove_id(self.unresolved)).id, self.unresolved["id"] + ) + + def test_git_object_unresolved(self): with self.assertRaisesRegex(ValueError, "b'foo' -> b'bar'"): - Snapshot.from_dict(remove_id(self.unresolved)) + git_objects.snapshot_git_object(self.unresolved) + git_objects.snapshot_git_object(self.unresolved, ignore_unresolved=True) def test_all_types(self): self.assertEqual( Snapshot.from_dict(remove_id(self.all_types)).id, self.all_types["id"], ) authority_example = { "type": "forge", "url": "https://forge.softwareheritage.org/", } fetcher_example = { "name": "swh-phabricator-metadata-fetcher", "version": "0.0.1", } metadata_example = { "target": "swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d", "discovery_date": datetime.datetime( 2021, 1, 25, 11, 27, 51, tzinfo=datetime.timezone.utc ), "authority": authority_example, "fetcher": fetcher_example, "format": "json", "metadata": b'{"foo": "bar"}', } class RawExtrinsicMetadataIdentifier(unittest.TestCase): def setUp(self): super().setUp() self.minimal = metadata_example self.maximal = { **self.minimal, "origin": "https://forge.softwareheritage.org/source/swh-model/", "visit": 42, "snapshot": "swh:1:snp:" + "00" * 20, "release": "swh:1:rel:" + "01" * 20, "revision": "swh:1:rev:" + "02" * 20, "path": b"/abc/def", "directory": "swh:1:dir:" + "03" * 20, } def test_minimal(self): git_object = ( b"raw_extrinsic_metadata 210\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date 1611574071\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(self.minimal) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.minimal).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.minimal).id, _x("5c13f20ba336e44549baf3d7b9305b027ec9f43d"), ) def test_maximal(self): git_object = ( b"raw_extrinsic_metadata 533\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date 1611574071\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"origin https://forge.softwareheritage.org/source/swh-model/\n" b"visit 42\n" b"snapshot swh:1:snp:0000000000000000000000000000000000000000\n" b"release swh:1:rel:0101010101010101010101010101010101010101\n" b"revision swh:1:rev:0202020202020202020202020202020202020202\n" b"path /abc/def\n" b"directory swh:1:dir:0303030303030303030303030303030303030303\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(self.maximal) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.maximal).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.maximal).id, _x("f96966e1093d15236a31fde07e47d5b1c9428049"), ) def test_nonascii_path(self): metadata = { **self.minimal, "path": b"/ab\nc/d\xf0\x9f\xa4\xb7e\x00f", } git_object = ( b"raw_extrinsic_metadata 231\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date 1611574071\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"path /ab\n" b" c/d\xf0\x9f\xa4\xb7e\x00f\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("7cc83fd1912176510c083f5df43f01b09af4b333"), ) def test_timezone_insensitive(self): """Checks the timezone of the datetime.datetime does not affect the hashed git_object.""" utc_plus_one = datetime.timezone(datetime.timedelta(hours=1)) metadata = { **self.minimal, "discovery_date": datetime.datetime( 2021, 1, 25, 12, 27, 51, tzinfo=utc_plus_one, ), } self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(self.minimal) ), git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.minimal).id, RawExtrinsicMetadata.from_dict(metadata).id, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("5c13f20ba336e44549baf3d7b9305b027ec9f43d"), ) def test_microsecond_insensitive(self): """Checks the microseconds of the datetime.datetime does not affect the hashed manifest.""" metadata = { **self.minimal, "discovery_date": datetime.datetime( 2021, 1, 25, 11, 27, 51, 123456, tzinfo=datetime.timezone.utc, ), } self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(self.minimal) ), git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.minimal).id, RawExtrinsicMetadata.from_dict(metadata).id, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("5c13f20ba336e44549baf3d7b9305b027ec9f43d"), ) def test_noninteger_timezone(self): """Checks the discovery_date is translated to UTC before truncating microseconds""" tz = datetime.timezone(datetime.timedelta(microseconds=-42)) metadata = { **self.minimal, "discovery_date": datetime.datetime( 2021, 1, 25, 11, 27, 50, 1_000_000 - 42, tzinfo=tz, ), } self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(self.minimal) ), git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.minimal).id, RawExtrinsicMetadata.from_dict(metadata).id, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("5c13f20ba336e44549baf3d7b9305b027ec9f43d"), ) def test_negative_timestamp(self): metadata = { **self.minimal, "discovery_date": datetime.datetime( 1960, 1, 25, 11, 27, 51, tzinfo=datetime.timezone.utc, ), } git_object = ( b"raw_extrinsic_metadata 210\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date -313504329\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("895d0821a2991dd376ddc303424aceb7c68280f9"), ) def test_epoch(self): metadata = { **self.minimal, "discovery_date": datetime.datetime( 1970, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc, ), } git_object = ( b"raw_extrinsic_metadata 201\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date 0\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("27a53df54ace35ebd910493cdc70b334d6b7cb88"), ) def test_negative_epoch(self): metadata = { **self.minimal, "discovery_date": datetime.datetime( 1969, 12, 31, 23, 59, 59, 1, tzinfo=datetime.timezone.utc, ), } git_object = ( b"raw_extrinsic_metadata 202\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date -1\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("be7154a8fd49d87f81547ea634d1e2152907d089"), ) origin_example = { "url": "https://github.com/torvalds/linux", } class OriginIdentifier(unittest.TestCase): def test_content_identifier(self): self.assertEqual( Origin.from_dict(origin_example).id, _x("b63a575fe3faab7692c9f38fb09d4bb45651bb0f"), ) # Format: [ # ( # input1, # expected_output1, # ), # ( # input2, # expected_output2, # ), # ... # ] TS_DICTS = [ # with current input dict format (offset_bytes) ( {"timestamp": 12345, "offset_bytes": b"+0000"}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000", }, ), ( {"timestamp": 12345, "offset_bytes": b"-0000"}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"-0000", }, ), ( {"timestamp": 12345, "offset_bytes": b"+0200"}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0200", }, ), ( {"timestamp": 12345, "offset_bytes": b"-0200"}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"-0200", }, ), ( {"timestamp": 12345, "offset_bytes": b"--700"}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"--700", }, ), ( {"timestamp": 12345, "offset_bytes": b"1234567"}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"1234567", }, ), # with old-style input dicts (numeric offset + optional negative_utc): ( {"timestamp": 12345, "offset": 0}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000", }, ), ( {"timestamp": 12345, "offset": 0, "negative_utc": False}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000", }, ), ( {"timestamp": 12345, "offset": 0, "negative_utc": False}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000", }, ), ( {"timestamp": 12345, "offset": 0, "negative_utc": None}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000", }, ), ( {"timestamp": {"seconds": 12345}, "offset": 0, "negative_utc": None}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000", }, ), ( { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset": 0, "negative_utc": None, }, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000", }, ), ( { "timestamp": {"seconds": 12345, "microseconds": 100}, "offset": 0, "negative_utc": None, }, { "timestamp": {"seconds": 12345, "microseconds": 100}, "offset_bytes": b"+0000", }, ), ( {"timestamp": 12345, "offset": 0, "negative_utc": True}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"-0000", }, ), ( {"timestamp": 12345, "offset": 0, "negative_utc": None}, { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000", }, ), ] @pytest.mark.parametrize("dict_input,expected", TS_DICTS) def test_normalize_timestamp_dict(dict_input, expected): assert TimestampWithTimezone.from_dict(dict_input).to_dict() == expected TS_DICTS_INVALID_TIMESTAMP = [ {"timestamp": 1.2, "offset": 0}, {"timestamp": "1", "offset": 0}, # these below should really also trigger a ValueError... # {"timestamp": {"seconds": "1"}, "offset": 0}, # {"timestamp": {"seconds": 1.2}, "offset": 0}, # {"timestamp": {"seconds": 1.2}, "offset": 0}, ] @pytest.mark.parametrize("dict_input", TS_DICTS_INVALID_TIMESTAMP) def test_normalize_timestamp_dict_invalid_timestamp(dict_input): with pytest.raises(ValueError, match="non-integer timestamp"): TimestampWithTimezone.from_dict(dict_input) UTC = datetime.timezone.utc TS_TIMEZONES = [ datetime.timezone.min, datetime.timezone(datetime.timedelta(hours=-1)), UTC, datetime.timezone(datetime.timedelta(minutes=+60)), datetime.timezone.max, ] TS_TZ_EXPECTED = [-1439, -60, 0, 60, 1439] TS_TZ_BYTES_EXPECTED = [b"-2359", b"-0100", b"+0000", b"+0100", b"+2359"] TS_DATETIMES = [ datetime.datetime(2020, 2, 27, 14, 39, 19, tzinfo=UTC), datetime.datetime(2120, 12, 31, 23, 59, 59, tzinfo=UTC), datetime.datetime(1610, 5, 14, 15, 43, 0, tzinfo=UTC), ] TS_DT_EXPECTED = [1582814359, 4765132799, -11348929020] @pytest.mark.parametrize("date, seconds", zip(TS_DATETIMES, TS_DT_EXPECTED)) @pytest.mark.parametrize( "tz, offset, offset_bytes", zip(TS_TIMEZONES, TS_TZ_EXPECTED, TS_TZ_BYTES_EXPECTED) ) @pytest.mark.parametrize("microsecond", [0, 1, 10, 100, 1000, 999999]) def test_normalize_timestamp_datetime( date, seconds, tz, offset, offset_bytes, microsecond ): date = date.astimezone(tz).replace(microsecond=microsecond) assert TimestampWithTimezone.from_dict(date).to_dict() == { "timestamp": {"seconds": seconds, "microseconds": microsecond}, "offset_bytes": offset_bytes, } def test_extid_identifier_bwcompat(): extid_dict = { "extid_type": "test-type", "extid": b"extid", "target": "swh:1:dir:" + "00" * 20, } assert ExtID.from_dict(extid_dict).id == _x( "b9295e1931c31e40a7e3e1e967decd1c89426455" ) assert ( ExtID.from_dict({**extid_dict, "extid_version": 0}).id == ExtID.from_dict(extid_dict).id ) assert ( ExtID.from_dict({**extid_dict, "extid_version": 1}).id != ExtID.from_dict(extid_dict).id )