Page MenuHomeSoftware Heritage

No OneTemporary

diff --git a/swh/model/model.py b/swh/model/model.py
index baa51a7..6df310a 100644
--- a/swh/model/model.py
+++ b/swh/model/model.py
@@ -1,1105 +1,1095 @@
# Copyright (C) 2018-2020 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from abc import ABCMeta, abstractmethod
import datetime
from enum import Enum
from hashlib import sha256
from typing import Any, Dict, Iterable, Optional, Tuple, TypeVar, Union
import attr
from attrs_strict import type_validator
import dateutil.parser
import iso8601
from typing_extensions import Final
from .collections import ImmutableDict
from .hashutil import DEFAULT_ALGORITHMS, MultiHash, hash_to_bytes
from .identifiers import (
directory_identifier,
normalize_timestamp,
origin_identifier,
raw_extrinsic_metadata_identifier,
release_identifier,
revision_identifier,
snapshot_identifier,
)
from .identifiers import CoreSWHID
from .identifiers import ExtendedObjectType as SwhidExtendedObjectType
from .identifiers import ExtendedSWHID
from .identifiers import ObjectType as SwhidObjectType
class MissingData(Exception):
"""Raised by `Content.with_data` when it has no way of fetching the
data (but not when fetching the data fails)."""
pass
KeyType = Union[Dict[str, str], Dict[str, bytes], bytes]
"""The type returned by BaseModel.unique_key()."""
SHA1_SIZE = 20
# TODO: Limit this to 20 bytes
Sha1Git = bytes
Sha1 = bytes
KT = TypeVar("KT")
VT = TypeVar("VT")
def freeze_optional_dict(
d: Union[None, Dict[KT, VT], ImmutableDict[KT, VT]] # type: ignore
) -> Optional[ImmutableDict[KT, VT]]:
if isinstance(d, dict):
return ImmutableDict(d)
else:
return d
def dictify(value):
"Helper function used by BaseModel.to_dict()"
if isinstance(value, BaseModel):
return value.to_dict()
elif isinstance(value, (CoreSWHID, ExtendedSWHID)):
return str(value)
elif isinstance(value, Enum):
return value.value
elif isinstance(value, (dict, ImmutableDict)):
return {k: dictify(v) for k, v in value.items()}
elif isinstance(value, tuple):
return tuple(dictify(v) for v in value)
else:
return value
ModelType = TypeVar("ModelType", bound="BaseModel")
class BaseModel:
"""Base class for SWH model classes.
Provides serialization/deserialization to/from Python dictionaries,
that are suitable for JSON/msgpack-like formats."""
__slots__ = ()
def to_dict(self):
"""Wrapper of `attr.asdict` that can be overridden by subclasses
that have special handling of some of the fields."""
return dictify(attr.asdict(self, recurse=False))
@classmethod
def from_dict(cls, d):
"""Takes a dictionary representing a tree of SWH objects, and
recursively builds the corresponding objects."""
return cls(**d)
def anonymize(self: ModelType) -> Optional[ModelType]:
"""Returns an anonymized version of the object, if needed.
If the object model does not need/support anonymization, returns None.
"""
return None
def unique_key(self) -> KeyType:
"""Returns a unique key for this object, that can be used for
deduplication."""
raise NotImplementedError(f"unique_key for {self}")
class HashableObject(metaclass=ABCMeta):
"""Mixin to automatically compute object identifier hash when
the associated model is instantiated."""
__slots__ = ()
@abstractmethod
def compute_hash(self) -> bytes:
"""Derived model classes must implement this to compute
the object hash.
This method is called by the object initialization if the `id`
attribute is set to an empty value.
"""
pass
def __attrs_post_init__(self):
if not self.id:
obj_id = self.compute_hash()
object.__setattr__(self, "id", obj_id)
def unique_key(self) -> KeyType:
return self.id # type: ignore
@attr.s(frozen=True, slots=True)
class Person(BaseModel):
"""Represents the author/committer of a revision or release."""
object_type: Final = "person"
fullname = attr.ib(type=bytes, validator=type_validator())
name = attr.ib(type=Optional[bytes], validator=type_validator())
email = attr.ib(type=Optional[bytes], validator=type_validator())
@classmethod
def from_fullname(cls, fullname: bytes):
"""Returns a Person object, by guessing the name and email from the
fullname, in the `name <email>` format.
The fullname is left unchanged."""
if fullname is None:
raise TypeError("fullname is None.")
name: Optional[bytes]
email: Optional[bytes]
try:
open_bracket = fullname.index(b"<")
except ValueError:
name = fullname
email = None
else:
raw_name = fullname[:open_bracket]
raw_email = fullname[open_bracket + 1 :]
if not raw_name:
name = None
else:
name = raw_name.strip()
try:
close_bracket = raw_email.rindex(b">")
except ValueError:
email = raw_email
else:
email = raw_email[:close_bracket]
return Person(name=name or None, email=email or None, fullname=fullname,)
def anonymize(self) -> "Person":
"""Returns an anonymized version of the Person object.
Anonymization is simply a Person which fullname is the hashed, with unset name
or email.
"""
return Person(fullname=sha256(self.fullname).digest(), name=None, email=None,)
@attr.s(frozen=True, slots=True)
class Timestamp(BaseModel):
"""Represents a naive timestamp from a VCS."""
object_type: Final = "timestamp"
seconds = attr.ib(type=int, validator=type_validator())
microseconds = attr.ib(type=int, validator=type_validator())
@seconds.validator
def check_seconds(self, attribute, value):
"""Check that seconds fit in a 64-bits signed integer."""
if not (-(2 ** 63) <= value < 2 ** 63):
raise ValueError("Seconds must be a signed 64-bits integer.")
@microseconds.validator
def check_microseconds(self, attribute, value):
"""Checks that microseconds are positive and < 1000000."""
if not (0 <= value < 10 ** 6):
raise ValueError("Microseconds must be in [0, 1000000[.")
@attr.s(frozen=True, slots=True)
class TimestampWithTimezone(BaseModel):
"""Represents a TZ-aware timestamp from a VCS."""
object_type: Final = "timestamp_with_timezone"
timestamp = attr.ib(type=Timestamp, validator=type_validator())
offset = attr.ib(type=int, validator=type_validator())
negative_utc = attr.ib(type=bool, validator=type_validator())
@offset.validator
def check_offset(self, attribute, value):
"""Checks the offset is a 16-bits signed integer (in theory, it
should always be between -14 and +14 hours)."""
if not (-(2 ** 15) <= value < 2 ** 15):
# max 14 hours offset in theory, but you never know what
# you'll find in the wild...
raise ValueError("offset too large: %d minutes" % value)
@negative_utc.validator
def check_negative_utc(self, attribute, value):
if self.offset and value:
raise ValueError("negative_utc can only be True is offset=0")
@classmethod
def from_dict(cls, obj: Union[Dict, datetime.datetime, int]):
"""Builds a TimestampWithTimezone from any of the formats
accepted by :func:`swh.model.normalize_timestamp`."""
# TODO: this accept way more types than just dicts; find a better
# name
d = normalize_timestamp(obj)
return cls(
timestamp=Timestamp.from_dict(d["timestamp"]),
offset=d["offset"],
negative_utc=d["negative_utc"],
)
@classmethod
def from_datetime(cls, dt: datetime.datetime):
return cls.from_dict(dt)
@classmethod
def from_iso8601(cls, s):
"""Builds a TimestampWithTimezone from an ISO8601-formatted string.
"""
dt = iso8601.parse_date(s)
tstz = cls.from_datetime(dt)
if dt.tzname() == "-00:00":
tstz = attr.evolve(tstz, negative_utc=True)
return tstz
@attr.s(frozen=True, slots=True)
class Origin(BaseModel):
"""Represents a software source: a VCS and an URL."""
object_type: Final = "origin"
url = attr.ib(type=str, validator=type_validator())
def unique_key(self) -> KeyType:
return {"url": self.url}
def swhid(self) -> ExtendedSWHID:
"""Returns a SWHID representing this origin."""
return ExtendedSWHID(
object_type=SwhidExtendedObjectType.ORIGIN,
object_id=hash_to_bytes(origin_identifier(self.unique_key())),
)
@attr.s(frozen=True, slots=True)
class OriginVisit(BaseModel):
"""Represents an origin visit with a given type at a given point in time, by a
SWH loader."""
object_type: Final = "origin_visit"
origin = attr.ib(type=str, validator=type_validator())
date = attr.ib(type=datetime.datetime, validator=type_validator())
type = attr.ib(type=str, validator=type_validator())
"""Should not be set before calling 'origin_visit_add()'."""
visit = attr.ib(type=Optional[int], validator=type_validator(), default=None)
@date.validator
def check_date(self, attribute, value):
"""Checks the date has a timezone."""
if value is not None and value.tzinfo is None:
raise ValueError("date must be a timezone-aware datetime.")
def to_dict(self):
"""Serializes the date as a string and omits the visit id if it is
`None`."""
ov = super().to_dict()
if ov["visit"] is None:
del ov["visit"]
return ov
def unique_key(self) -> KeyType:
return {"origin": self.origin, "date": str(self.date)}
@attr.s(frozen=True, slots=True)
class OriginVisitStatus(BaseModel):
"""Represents a visit update of an origin at a given point in time.
"""
object_type: Final = "origin_visit_status"
origin = attr.ib(type=str, validator=type_validator())
visit = attr.ib(type=int, validator=type_validator())
date = attr.ib(type=datetime.datetime, validator=type_validator())
status = attr.ib(
type=str,
validator=attr.validators.in_(
["created", "ongoing", "full", "partial", "not_found", "failed"]
),
)
snapshot = attr.ib(type=Optional[Sha1Git], validator=type_validator())
# Type is optional be to able to use it before adding it to the database model
type = attr.ib(type=Optional[str], validator=type_validator(), default=None)
metadata = attr.ib(
type=Optional[ImmutableDict[str, object]],
validator=type_validator(),
converter=freeze_optional_dict,
default=None,
)
@date.validator
def check_date(self, attribute, value):
"""Checks the date has a timezone."""
if value is not None and value.tzinfo is None:
raise ValueError("date must be a timezone-aware datetime.")
def unique_key(self) -> KeyType:
return {"origin": self.origin, "visit": str(self.visit), "date": str(self.date)}
class TargetType(Enum):
"""The type of content pointed to by a snapshot branch. Usually a
revision or an alias."""
CONTENT = "content"
DIRECTORY = "directory"
REVISION = "revision"
RELEASE = "release"
SNAPSHOT = "snapshot"
ALIAS = "alias"
class ObjectType(Enum):
"""The type of content pointed to by a release. Usually a revision"""
CONTENT = "content"
DIRECTORY = "directory"
REVISION = "revision"
RELEASE = "release"
SNAPSHOT = "snapshot"
@attr.s(frozen=True, slots=True)
class SnapshotBranch(BaseModel):
"""Represents one of the branches of a snapshot."""
object_type: Final = "snapshot_branch"
target = attr.ib(type=bytes, validator=type_validator())
target_type = attr.ib(type=TargetType, validator=type_validator())
@target.validator
def check_target(self, attribute, value):
"""Checks the target type is not an alias, checks the target is a
valid sha1_git."""
if self.target_type != TargetType.ALIAS and self.target is not None:
if len(value) != 20:
raise ValueError("Wrong length for bytes identifier: %d" % len(value))
@classmethod
def from_dict(cls, d):
return cls(target=d["target"], target_type=TargetType(d["target_type"]))
@attr.s(frozen=True, slots=True)
class Snapshot(HashableObject, BaseModel):
"""Represents the full state of an origin at a given point in time."""
object_type: Final = "snapshot"
branches = attr.ib(
type=ImmutableDict[bytes, Optional[SnapshotBranch]],
validator=type_validator(),
converter=freeze_optional_dict,
)
id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"")
def compute_hash(self) -> bytes:
return hash_to_bytes(snapshot_identifier(self.to_dict()))
@classmethod
def from_dict(cls, d):
d = d.copy()
return cls(
branches=ImmutableDict(
(name, SnapshotBranch.from_dict(branch) if branch else None)
for (name, branch) in d.pop("branches").items()
),
**d,
)
def swhid(self) -> CoreSWHID:
"""Returns a SWHID representing this object."""
return CoreSWHID(object_type=SwhidObjectType.SNAPSHOT, object_id=self.id)
@attr.s(frozen=True, slots=True)
class Release(HashableObject, BaseModel):
object_type: Final = "release"
name = attr.ib(type=bytes, validator=type_validator())
message = attr.ib(type=Optional[bytes], validator=type_validator())
target = attr.ib(type=Optional[Sha1Git], validator=type_validator())
target_type = attr.ib(type=ObjectType, validator=type_validator())
synthetic = attr.ib(type=bool, validator=type_validator())
author = attr.ib(type=Optional[Person], validator=type_validator(), default=None)
date = attr.ib(
type=Optional[TimestampWithTimezone], validator=type_validator(), default=None
)
metadata = attr.ib(
type=Optional[ImmutableDict[str, object]],
validator=type_validator(),
converter=freeze_optional_dict,
default=None,
)
id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"")
def compute_hash(self) -> bytes:
return hash_to_bytes(release_identifier(self.to_dict()))
@author.validator
def check_author(self, attribute, value):
"""If the author is `None`, checks the date is `None` too."""
if self.author is None and self.date is not None:
raise ValueError("release date must be None if author is None.")
def to_dict(self):
rel = super().to_dict()
if rel["metadata"] is None:
del rel["metadata"]
return rel
@classmethod
def from_dict(cls, d):
d = d.copy()
if d.get("author"):
d["author"] = Person.from_dict(d["author"])
if d.get("date"):
d["date"] = TimestampWithTimezone.from_dict(d["date"])
return cls(target_type=ObjectType(d.pop("target_type")), **d)
def swhid(self) -> CoreSWHID:
"""Returns a SWHID representing this object."""
return CoreSWHID(object_type=SwhidObjectType.RELEASE, object_id=self.id)
def anonymize(self) -> "Release":
"""Returns an anonymized version of the Release object.
Anonymization consists in replacing the author with an anonymized Person object.
"""
author = self.author and self.author.anonymize()
return attr.evolve(self, author=author)
class RevisionType(Enum):
GIT = "git"
TAR = "tar"
DSC = "dsc"
SUBVERSION = "svn"
MERCURIAL = "hg"
def tuplify_extra_headers(value: Iterable):
return tuple((k, v) for k, v in value)
@attr.s(frozen=True, slots=True)
class Revision(HashableObject, BaseModel):
object_type: Final = "revision"
message = attr.ib(type=Optional[bytes], validator=type_validator())
author = attr.ib(type=Person, validator=type_validator())
committer = attr.ib(type=Person, validator=type_validator())
date = attr.ib(type=Optional[TimestampWithTimezone], validator=type_validator())
committer_date = attr.ib(
type=Optional[TimestampWithTimezone], validator=type_validator()
)
type = attr.ib(type=RevisionType, validator=type_validator())
directory = attr.ib(type=Sha1Git, validator=type_validator())
synthetic = attr.ib(type=bool, validator=type_validator())
metadata = attr.ib(
type=Optional[ImmutableDict[str, object]],
validator=type_validator(),
converter=freeze_optional_dict,
default=None,
)
parents = attr.ib(type=Tuple[Sha1Git, ...], validator=type_validator(), default=())
id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"")
extra_headers = attr.ib(
type=Tuple[Tuple[bytes, bytes], ...],
validator=type_validator(),
converter=tuplify_extra_headers,
default=(),
)
def __attrs_post_init__(self):
super().__attrs_post_init__()
# ensure metadata is a deep copy of whatever was given, and if needed
# extract extra_headers from there
if self.metadata:
metadata = self.metadata
if not self.extra_headers and "extra_headers" in metadata:
(extra_headers, metadata) = metadata.copy_pop("extra_headers")
object.__setattr__(
self, "extra_headers", tuplify_extra_headers(extra_headers),
)
attr.validate(self)
object.__setattr__(self, "metadata", metadata)
def compute_hash(self) -> bytes:
return hash_to_bytes(revision_identifier(self.to_dict()))
@classmethod
def from_dict(cls, d):
d = d.copy()
date = d.pop("date")
if date:
date = TimestampWithTimezone.from_dict(date)
committer_date = d.pop("committer_date")
if committer_date:
committer_date = TimestampWithTimezone.from_dict(committer_date)
return cls(
author=Person.from_dict(d.pop("author")),
committer=Person.from_dict(d.pop("committer")),
date=date,
committer_date=committer_date,
type=RevisionType(d.pop("type")),
parents=tuple(d.pop("parents")), # for BW compat
**d,
)
def swhid(self) -> CoreSWHID:
"""Returns a SWHID representing this object."""
return CoreSWHID(object_type=SwhidObjectType.REVISION, object_id=self.id)
def anonymize(self) -> "Revision":
"""Returns an anonymized version of the Revision object.
Anonymization consists in replacing the author and committer with an anonymized
Person object.
"""
return attr.evolve(
self, author=self.author.anonymize(), committer=self.committer.anonymize()
)
@attr.s(frozen=True, slots=True)
class DirectoryEntry(BaseModel):
object_type: Final = "directory_entry"
name = attr.ib(type=bytes, validator=type_validator())
type = attr.ib(type=str, validator=attr.validators.in_(["file", "dir", "rev"]))
target = attr.ib(type=Sha1Git, validator=type_validator())
perms = attr.ib(type=int, validator=type_validator())
"""Usually one of the values of `swh.model.from_disk.DentryPerms`."""
@attr.s(frozen=True, slots=True)
class Directory(HashableObject, BaseModel):
object_type: Final = "directory"
entries = attr.ib(type=Tuple[DirectoryEntry, ...], validator=type_validator())
id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"")
def compute_hash(self) -> bytes:
return hash_to_bytes(directory_identifier(self.to_dict()))
@classmethod
def from_dict(cls, d):
d = d.copy()
return cls(
entries=tuple(
DirectoryEntry.from_dict(entry) for entry in d.pop("entries")
),
**d,
)
def swhid(self) -> CoreSWHID:
"""Returns a SWHID representing this object."""
return CoreSWHID(object_type=SwhidObjectType.DIRECTORY, object_id=self.id)
@attr.s(frozen=True, slots=True)
class BaseContent(BaseModel):
status = attr.ib(
type=str, validator=attr.validators.in_(["visible", "hidden", "absent"])
)
@staticmethod
def _hash_data(data: bytes):
"""Hash some data, returning most of the fields of a content object"""
d = MultiHash.from_data(data).digest()
d["data"] = data
d["length"] = len(data)
return d
@classmethod
def from_dict(cls, d, use_subclass=True):
if use_subclass:
# Chooses a subclass to instantiate instead.
if d["status"] == "absent":
return SkippedContent.from_dict(d)
else:
return Content.from_dict(d)
else:
return super().from_dict(d)
def get_hash(self, hash_name):
if hash_name not in DEFAULT_ALGORITHMS:
raise ValueError("{} is not a valid hash name.".format(hash_name))
return getattr(self, hash_name)
def hashes(self) -> Dict[str, bytes]:
"""Returns a dictionary {hash_name: hash_value}"""
return {algo: getattr(self, algo) for algo in DEFAULT_ALGORITHMS}
@attr.s(frozen=True, slots=True)
class Content(BaseContent):
object_type: Final = "content"
sha1 = attr.ib(type=bytes, validator=type_validator())
sha1_git = attr.ib(type=Sha1Git, validator=type_validator())
sha256 = attr.ib(type=bytes, validator=type_validator())
blake2s256 = attr.ib(type=bytes, validator=type_validator())
length = attr.ib(type=int, validator=type_validator())
status = attr.ib(
type=str,
validator=attr.validators.in_(["visible", "hidden"]),
default="visible",
)
data = attr.ib(type=Optional[bytes], validator=type_validator(), default=None)
ctime = attr.ib(
type=Optional[datetime.datetime],
validator=type_validator(),
default=None,
eq=False,
)
@length.validator
def check_length(self, attribute, value):
"""Checks the length is positive."""
if value < 0:
raise ValueError("Length must be positive.")
@ctime.validator
def check_ctime(self, attribute, value):
"""Checks the ctime has a timezone."""
if value is not None and value.tzinfo is None:
raise ValueError("ctime must be a timezone-aware datetime.")
def to_dict(self):
content = super().to_dict()
if content["data"] is None:
del content["data"]
if content["ctime"] is None:
del content["ctime"]
return content
@classmethod
def from_data(cls, data, status="visible", ctime=None) -> "Content":
"""Generate a Content from a given `data` byte string.
This populates the Content with the hashes and length for the data
passed as argument, as well as the data itself.
"""
d = cls._hash_data(data)
d["status"] = status
d["ctime"] = ctime
return cls(**d)
@classmethod
def from_dict(cls, d):
if isinstance(d.get("ctime"), str):
d = d.copy()
d["ctime"] = dateutil.parser.parse(d["ctime"])
return super().from_dict(d, use_subclass=False)
def with_data(self) -> "Content":
"""Loads the `data` attribute; meaning that it is guaranteed not to
be None after this call.
This call is almost a no-op, but subclasses may overload this method
to lazy-load data (eg. from disk or objstorage)."""
if self.data is None:
raise MissingData("Content data is None.")
return self
def unique_key(self) -> KeyType:
return self.sha1 # TODO: use a dict of hashes
def swhid(self) -> CoreSWHID:
"""Returns a SWHID representing this object."""
return CoreSWHID(object_type=SwhidObjectType.CONTENT, object_id=self.sha1_git)
@attr.s(frozen=True, slots=True)
class SkippedContent(BaseContent):
object_type: Final = "skipped_content"
sha1 = attr.ib(type=Optional[bytes], validator=type_validator())
sha1_git = attr.ib(type=Optional[Sha1Git], validator=type_validator())
sha256 = attr.ib(type=Optional[bytes], validator=type_validator())
blake2s256 = attr.ib(type=Optional[bytes], validator=type_validator())
length = attr.ib(type=Optional[int], validator=type_validator())
status = attr.ib(type=str, validator=attr.validators.in_(["absent"]))
reason = attr.ib(type=Optional[str], validator=type_validator(), default=None)
origin = attr.ib(type=Optional[str], validator=type_validator(), default=None)
ctime = attr.ib(
type=Optional[datetime.datetime],
validator=type_validator(),
default=None,
eq=False,
)
@reason.validator
def check_reason(self, attribute, value):
"""Checks the reason is full if status != absent."""
assert self.reason == value
if value is None:
raise ValueError("Must provide a reason if content is absent.")
@length.validator
def check_length(self, attribute, value):
"""Checks the length is positive or -1."""
if value < -1:
raise ValueError("Length must be positive or -1.")
@ctime.validator
def check_ctime(self, attribute, value):
"""Checks the ctime has a timezone."""
if value is not None and value.tzinfo is None:
raise ValueError("ctime must be a timezone-aware datetime.")
def to_dict(self):
content = super().to_dict()
if content["origin"] is None:
del content["origin"]
if content["ctime"] is None:
del content["ctime"]
return content
@classmethod
def from_data(
cls, data: bytes, reason: str, ctime: Optional[datetime.datetime] = None
) -> "SkippedContent":
"""Generate a SkippedContent from a given `data` byte string.
This populates the SkippedContent with the hashes and length for the
data passed as argument.
You can use `attr.evolve` on such a generated content to nullify some
of its attributes, e.g. for tests.
"""
d = cls._hash_data(data)
del d["data"]
d["status"] = "absent"
d["reason"] = reason
d["ctime"] = ctime
return cls(**d)
@classmethod
def from_dict(cls, d):
d2 = d.copy()
if d2.pop("data", None) is not None:
raise ValueError('SkippedContent has no "data" attribute %r' % d)
return super().from_dict(d2, use_subclass=False)
def unique_key(self) -> KeyType:
return self.hashes()
class MetadataAuthorityType(Enum):
DEPOSIT_CLIENT = "deposit_client"
FORGE = "forge"
REGISTRY = "registry"
@attr.s(frozen=True, slots=True)
class MetadataAuthority(BaseModel):
"""Represents an entity that provides metadata about an origin or
software artifact."""
object_type: Final = "metadata_authority"
type = attr.ib(type=MetadataAuthorityType, validator=type_validator())
url = attr.ib(type=str, validator=type_validator())
metadata = attr.ib(
type=Optional[ImmutableDict[str, Any]],
default=None,
validator=type_validator(),
converter=freeze_optional_dict,
)
def to_dict(self):
d = super().to_dict()
if d["metadata"] is None:
del d["metadata"]
return d
@classmethod
def from_dict(cls, d):
d["type"] = MetadataAuthorityType(d["type"])
return super().from_dict(d)
def unique_key(self) -> KeyType:
return {"type": self.type.value, "url": self.url}
@attr.s(frozen=True, slots=True)
class MetadataFetcher(BaseModel):
"""Represents a software component used to fetch metadata from a metadata
authority, and ingest them into the Software Heritage archive."""
object_type: Final = "metadata_fetcher"
name = attr.ib(type=str, validator=type_validator())
version = attr.ib(type=str, validator=type_validator())
metadata = attr.ib(
type=Optional[ImmutableDict[str, Any]],
default=None,
validator=type_validator(),
converter=freeze_optional_dict,
)
def to_dict(self):
d = super().to_dict()
if d["metadata"] is None:
del d["metadata"]
return d
def unique_key(self) -> KeyType:
return {"name": self.name, "version": self.version}
@attr.s(frozen=True, slots=True)
class RawExtrinsicMetadata(HashableObject, BaseModel):
object_type: Final = "raw_extrinsic_metadata"
# target object
target = attr.ib(type=ExtendedSWHID, validator=type_validator())
# source
discovery_date = attr.ib(type=datetime.datetime, validator=type_validator())
authority = attr.ib(type=MetadataAuthority, validator=type_validator())
fetcher = attr.ib(type=MetadataFetcher, validator=type_validator())
# the metadata itself
format = attr.ib(type=str, validator=type_validator())
metadata = attr.ib(type=bytes, validator=type_validator())
# context
origin = attr.ib(type=Optional[str], default=None, validator=type_validator())
visit = attr.ib(type=Optional[int], default=None, validator=type_validator())
snapshot = attr.ib(
type=Optional[CoreSWHID], default=None, validator=type_validator()
)
release = attr.ib(
type=Optional[CoreSWHID], default=None, validator=type_validator()
)
revision = attr.ib(
type=Optional[CoreSWHID], default=None, validator=type_validator()
)
path = attr.ib(type=Optional[bytes], default=None, validator=type_validator())
directory = attr.ib(
type=Optional[CoreSWHID], default=None, validator=type_validator()
)
id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"")
def compute_hash(self) -> bytes:
return hash_to_bytes(raw_extrinsic_metadata_identifier(self.to_dict()))
@discovery_date.validator
def check_discovery_date(self, attribute, value):
"""Checks the discovery_date has a timezone."""
if value is not None and value.tzinfo is None:
raise ValueError("discovery_date must be a timezone-aware datetime.")
@origin.validator
def check_origin(self, attribute, value):
if value is None:
return
if self.target.object_type not in (
SwhidExtendedObjectType.SNAPSHOT,
SwhidExtendedObjectType.RELEASE,
SwhidExtendedObjectType.REVISION,
SwhidExtendedObjectType.DIRECTORY,
SwhidExtendedObjectType.CONTENT,
):
raise ValueError(
f"Unexpected 'origin' context for "
f"{self.target.object_type.name.lower()} object: {value}"
)
if value.startswith("swh:"):
# Technically this is valid; but:
# 1. SWHIDs are URIs, not URLs
# 2. if a SWHID gets here, it's very likely to be a mistake
# (and we can remove this check if it turns out there is a
# legitimate use for it).
raise ValueError(f"SWHID used as context origin URL: {value}")
@visit.validator
def check_visit(self, attribute, value):
if value is None:
return
if self.target.object_type not in (
SwhidExtendedObjectType.SNAPSHOT,
SwhidExtendedObjectType.RELEASE,
SwhidExtendedObjectType.REVISION,
SwhidExtendedObjectType.DIRECTORY,
SwhidExtendedObjectType.CONTENT,
):
raise ValueError(
f"Unexpected 'visit' context for "
f"{self.target.object_type.name.lower()} object: {value}"
)
if self.origin is None:
raise ValueError("'origin' context must be set if 'visit' is.")
if value <= 0:
raise ValueError("Nonpositive visit id")
@snapshot.validator
def check_snapshot(self, attribute, value):
if value is None:
return
if self.target.object_type not in (
SwhidExtendedObjectType.RELEASE,
SwhidExtendedObjectType.REVISION,
SwhidExtendedObjectType.DIRECTORY,
SwhidExtendedObjectType.CONTENT,
):
raise ValueError(
f"Unexpected 'snapshot' context for "
f"{self.target.object_type.name.lower()} object: {value}"
)
self._check_swhid(SwhidObjectType.SNAPSHOT, value)
@release.validator
def check_release(self, attribute, value):
if value is None:
return
if self.target.object_type not in (
SwhidExtendedObjectType.REVISION,
SwhidExtendedObjectType.DIRECTORY,
SwhidExtendedObjectType.CONTENT,
):
raise ValueError(
f"Unexpected 'release' context for "
f"{self.target.object_type.name.lower()} object: {value}"
)
self._check_swhid(SwhidObjectType.RELEASE, value)
@revision.validator
def check_revision(self, attribute, value):
if value is None:
return
if self.target.object_type not in (
SwhidExtendedObjectType.DIRECTORY,
SwhidExtendedObjectType.CONTENT,
):
raise ValueError(
f"Unexpected 'revision' context for "
f"{self.target.object_type.name.lower()} object: {value}"
)
self._check_swhid(SwhidObjectType.REVISION, value)
@path.validator
def check_path(self, attribute, value):
if value is None:
return
if self.target.object_type not in (
SwhidExtendedObjectType.DIRECTORY,
SwhidExtendedObjectType.CONTENT,
):
raise ValueError(
f"Unexpected 'path' context for "
f"{self.target.object_type.name.lower()} object: {value}"
)
@directory.validator
def check_directory(self, attribute, value):
if value is None:
return
if self.target.object_type not in (SwhidExtendedObjectType.CONTENT,):
raise ValueError(
f"Unexpected 'directory' context for "
f"{self.target.object_type.name.lower()} object: {value}"
)
self._check_swhid(SwhidObjectType.DIRECTORY, value)
def _check_swhid(self, expected_object_type, swhid):
if isinstance(swhid, str):
raise ValueError(f"Expected SWHID, got a string: {swhid}")
if swhid.object_type != expected_object_type:
raise ValueError(
f"Expected SWHID type '{expected_object_type.name.lower()}', "
f"got '{swhid.object_type.name.lower()}' in {swhid}"
)
def to_dict(self):
d = super().to_dict()
context_keys = (
"origin",
"visit",
"snapshot",
"release",
"revision",
"directory",
"path",
)
for context_key in context_keys:
if d[context_key] is None:
del d[context_key]
return d
@classmethod
def from_dict(cls, d):
d = {
**d,
"target": ExtendedSWHID.from_string(d["target"]),
"authority": MetadataAuthority.from_dict(d["authority"]),
"fetcher": MetadataFetcher.from_dict(d["fetcher"]),
}
swhid_keys = ("snapshot", "release", "revision", "directory")
for swhid_key in swhid_keys:
if d.get(swhid_key):
d[swhid_key] = CoreSWHID.from_string(d[swhid_key])
return super().from_dict(d)
-
- def unique_key(self) -> KeyType:
- return {
- "target": str(self.target),
- "authority_type": self.authority.type.value,
- "authority_url": self.authority.url,
- "discovery_date": str(self.discovery_date),
- "fetcher_name": self.fetcher.name,
- "fetcher_version": self.fetcher.version,
- }
diff --git a/swh/model/tests/test_identifiers.py b/swh/model/tests/test_identifiers.py
index a235065..3ba5708 100644
--- a/swh/model/tests/test_identifiers.py
+++ b/swh/model/tests/test_identifiers.py
@@ -1,1737 +1,1736 @@
# Copyright (C) 2015-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import binascii
import datetime
import hashlib
import itertools
from typing import Dict
import unittest
import attr
import pytest
from swh.model import hashutil, identifiers
from swh.model.exceptions import ValidationError
from swh.model.hashutil import hash_to_bytes as _x
from swh.model.identifiers import (
SWHID_QUALIFIERS,
CoreSWHID,
ExtendedObjectType,
ExtendedSWHID,
ObjectType,
QualifiedSWHID,
normalize_timestamp,
)
def remove_id(d: Dict) -> Dict:
"""Returns a (shallow) copy of a dict with the 'id' key removed."""
d = d.copy()
if "id" in d:
del d["id"]
return d
class UtilityFunctionsIdentifier(unittest.TestCase):
def setUp(self):
self.str_id = "c2e41aae41ac17bd4a650770d6ee77f62e52235b"
self.bytes_id = binascii.unhexlify(self.str_id)
self.bad_type_id = object()
def test_identifier_to_bytes(self):
for id in [self.str_id, self.bytes_id]:
self.assertEqual(identifiers.identifier_to_bytes(id), self.bytes_id)
# wrong length
with self.assertRaises(ValueError) as cm:
identifiers.identifier_to_bytes(id[:-2])
self.assertIn("length", str(cm.exception))
with self.assertRaises(ValueError) as cm:
identifiers.identifier_to_bytes(self.bad_type_id)
self.assertIn("type", str(cm.exception))
def test_identifier_to_str(self):
for id in [self.str_id, self.bytes_id]:
self.assertEqual(identifiers.identifier_to_str(id), self.str_id)
# wrong length
with self.assertRaises(ValueError) as cm:
identifiers.identifier_to_str(id[:-2])
self.assertIn("length", str(cm.exception))
with self.assertRaises(ValueError) as cm:
identifiers.identifier_to_str(self.bad_type_id)
self.assertIn("type", str(cm.exception))
class UtilityFunctionsDateOffset(unittest.TestCase):
def setUp(self):
self.dates = {
b"1448210036": {"seconds": 1448210036, "microseconds": 0,},
b"1448210036.002342": {"seconds": 1448210036, "microseconds": 2342,},
b"1448210036.12": {"seconds": 1448210036, "microseconds": 120000,},
}
self.broken_dates = [
1448210036.12,
]
self.offsets = {
0: b"+0000",
-630: b"-1030",
800: b"+1320",
}
def test_format_date(self):
for date_repr, date in self.dates.items():
self.assertEqual(identifiers.format_date(date), date_repr)
def test_format_date_fail(self):
for date in self.broken_dates:
with self.assertRaises(ValueError):
identifiers.format_date(date)
def test_format_offset(self):
for offset, res in self.offsets.items():
self.assertEqual(identifiers.format_offset(offset), res)
content_example = {
"status": "visible",
"length": 5,
"data": b"1984\n",
"ctime": datetime.datetime(2015, 11, 22, 16, 33, 56, tzinfo=datetime.timezone.utc),
}
class ContentIdentifier(unittest.TestCase):
def setUp(self):
self.content_id = hashutil.MultiHash.from_data(content_example["data"]).digest()
def test_content_identifier(self):
self.assertEqual(
identifiers.content_identifier(content_example), self.content_id
)
directory_example = {
"id": "d7ed3d2c31d608823be58b1cbe57605310615231",
"entries": [
{
"type": "file",
"perms": 33188,
"name": b"README",
"target": _x("37ec8ea2110c0b7a32fbb0e872f6e7debbf95e21"),
},
{
"type": "file",
"perms": 33188,
"name": b"Rakefile",
"target": _x("3bb0e8592a41ae3185ee32266c860714980dbed7"),
},
{
"type": "dir",
"perms": 16384,
"name": b"app",
"target": _x("61e6e867f5d7ba3b40540869bc050b0c4fed9e95"),
},
{
"type": "file",
"perms": 33188,
"name": b"1.megabyte",
"target": _x("7c2b2fbdd57d6765cdc9d84c2d7d333f11be7fb3"),
},
{
"type": "dir",
"perms": 16384,
"name": b"config",
"target": _x("591dfe784a2e9ccc63aaba1cb68a765734310d98"),
},
{
"type": "dir",
"perms": 16384,
"name": b"public",
"target": _x("9588bf4522c2b4648bfd1c61d175d1f88c1ad4a5"),
},
{
"type": "file",
"perms": 33188,
"name": b"development.sqlite3",
"target": _x("e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"),
},
{
"type": "dir",
"perms": 16384,
"name": b"doc",
"target": _x("154705c6aa1c8ead8c99c7915373e3c44012057f"),
},
{
"type": "dir",
"perms": 16384,
"name": b"db",
"target": _x("85f157bdc39356b7bc7de9d0099b4ced8b3b382c"),
},
{
"type": "dir",
"perms": 16384,
"name": b"log",
"target": _x("5e3d3941c51cce73352dff89c805a304ba96fffe"),
},
{
"type": "dir",
"perms": 16384,
"name": b"script",
"target": _x("1b278423caf176da3f3533592012502aa10f566c"),
},
{
"type": "dir",
"perms": 16384,
"name": b"test",
"target": _x("035f0437c080bfd8711670b3e8677e686c69c763"),
},
{
"type": "dir",
"perms": 16384,
"name": b"vendor",
"target": _x("7c0dc9ad978c1af3f9a4ce061e50f5918bd27138"),
},
{
"type": "rev",
"perms": 57344,
"name": b"will_paginate",
"target": _x("3d531e169db92a16a9a8974f0ae6edf52e52659e"),
},
# in git order, the dir named "order" should be between the files
# named "order." and "order0"
{
"type": "dir",
"perms": 16384,
"name": b"order",
"target": _x("62cdb7020ff920e5aa642c3d4066950dd1f01f4d"),
},
{
"type": "file",
"perms": 16384,
"name": b"order.",
"target": _x("0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"),
},
{
"type": "file",
"perms": 16384,
"name": b"order0",
"target": _x("bbe960a25ea311d21d40669e93df2003ba9b90a2"),
},
],
}
dummy_qualifiers = {"origin": "https://example.com", "lines": "42"}
class DirectoryIdentifier(unittest.TestCase):
def setUp(self):
self.directory = directory_example
self.empty_directory = {
"id": "4b825dc642cb6eb9a060e54bf8d69288fbee4904",
"entries": [],
}
def test_dir_identifier(self):
self.assertEqual(
identifiers.directory_identifier(self.directory), self.directory["id"]
)
self.assertEqual(
identifiers.directory_identifier(remove_id(self.directory)),
self.directory["id"],
)
def test_dir_identifier_entry_order(self):
# Reverse order of entries, check the id is still the same.
directory = {"entries": reversed(self.directory["entries"])}
self.assertEqual(
identifiers.directory_identifier(remove_id(directory)), self.directory["id"]
)
def test_dir_identifier_empty_directory(self):
self.assertEqual(
identifiers.directory_identifier(remove_id(self.empty_directory)),
self.empty_directory["id"],
)
linus_tz = datetime.timezone(datetime.timedelta(minutes=-420))
revision_example = {
"id": "bc0195aad0daa2ad5b0d76cce22b167bc3435590",
"directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"),
"parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")],
"author": {
"name": b"Linus Torvalds",
"email": b"torvalds@linux-foundation.org",
"fullname": b"Linus Torvalds <torvalds@linux-foundation.org>",
},
"date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz),
"committer": {
"name": b"Linus Torvalds",
"email": b"torvalds@linux-foundation.org",
"fullname": b"Linus Torvalds <torvalds@linux-foundation.org>",
},
"committer_date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz),
"message": b"Linux 4.2-rc2\n",
"type": "git",
"synthetic": False,
}
class RevisionIdentifier(unittest.TestCase):
def setUp(self):
gpgsig = b"""\
-----BEGIN PGP SIGNATURE-----
Version: GnuPG v1.4.13 (Darwin)
iQIcBAABAgAGBQJVJcYsAAoJEBiY3kIkQRNJVAUQAJ8/XQIfMqqC5oYeEFfHOPYZ
L7qy46bXHVBa9Qd8zAJ2Dou3IbI2ZoF6/Et89K/UggOycMlt5FKV/9toWyuZv4Po
L682wonoxX99qvVTHo6+wtnmYO7+G0f82h+qHMErxjP+I6gzRNBvRr+SfY7VlGdK
wikMKOMWC5smrScSHITnOq1Ews5pe3N7qDYMzK0XVZmgDoaem4RSWMJs4My/qVLN
e0CqYWq2A22GX7sXl6pjneJYQvcAXUX+CAzp24QnPSb+Q22Guj91TcxLFcHCTDdn
qgqMsEyMiisoglwrCbO+D+1xq9mjN9tNFWP66SQ48mrrHYTBV5sz9eJyDfroJaLP
CWgbDTgq6GzRMehHT3hXfYS5NNatjnhkNISXR7pnVP/obIi/vpWh5ll6Gd8q26z+
a/O41UzOaLTeNI365MWT4/cnXohVLRG7iVJbAbCxoQmEgsYMRc/pBAzWJtLfcB2G
jdTswYL6+MUdL8sB9pZ82D+BP/YAdHe69CyTu1lk9RT2pYtI/kkfjHubXBCYEJSG
+VGllBbYG6idQJpyrOYNRJyrDi9yvDJ2W+S0iQrlZrxzGBVGTB/y65S8C+2WTBcE
lf1Qb5GDsQrZWgD+jtWTywOYHtCBwyCKSAXxSARMbNPeak9WPlcW/Jmu+fUcMe2x
dg1KdHOa34shrKDaOVzW
=od6m
-----END PGP SIGNATURE-----"""
self.revision = revision_example
self.revision_none_metadata = {
"id": "bc0195aad0daa2ad5b0d76cce22b167bc3435590",
"directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"),
"parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")],
"author": {
"name": b"Linus Torvalds",
"email": b"torvalds@linux-foundation.org",
},
"date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz),
"committer": {
"name": b"Linus Torvalds",
"email": b"torvalds@linux-foundation.org",
},
"committer_date": datetime.datetime(
2015, 7, 12, 15, 10, 30, tzinfo=linus_tz
),
"message": b"Linux 4.2-rc2\n",
"metadata": None,
}
self.synthetic_revision = {
"id": b"\xb2\xa7\xe1&\x04\x92\xe3D\xfa\xb3\xcb\xf9\x1b\xc1<\x91"
b"\xe0T&\xfd",
"author": {
"name": b"Software Heritage",
"email": b"robot@softwareheritage.org",
},
"date": {
"timestamp": {"seconds": 1437047495},
"offset": 0,
"negative_utc": False,
},
"type": "tar",
"committer": {
"name": b"Software Heritage",
"email": b"robot@softwareheritage.org",
},
"committer_date": 1437047495,
"synthetic": True,
"parents": [None],
"message": b"synthetic revision message\n",
"directory": b"\xd1\x1f\x00\xa6\xa0\xfe\xa6\x05SA\xd2U\x84\xb5\xa9"
b"e\x16\xc0\xd2\xb8",
"metadata": {
"original_artifact": [
{
"archive_type": "tar",
"name": "gcc-5.2.0.tar.bz2",
"sha1_git": "39d281aff934d44b439730057e55b055e206a586",
"sha1": "fe3f5390949d47054b613edc36c557eb1d51c18e",
"sha256": "5f835b04b5f7dd4f4d2dc96190ec1621b8d89f"
"2dc6f638f9f8bc1b1014ba8cad",
}
]
},
}
# cat commit.txt | git hash-object -t commit --stdin
self.revision_with_extra_headers = {
"id": "010d34f384fa99d047cdd5e2f41e56e5c2feee45",
"directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"),
"parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")],
"author": {
"name": b"Linus Torvalds",
"email": b"torvalds@linux-foundation.org",
"fullname": b"Linus Torvalds <torvalds@linux-foundation.org>",
},
"date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz),
"committer": {
"name": b"Linus Torvalds",
"email": b"torvalds@linux-foundation.org",
"fullname": b"Linus Torvalds <torvalds@linux-foundation.org>",
},
"committer_date": datetime.datetime(
2015, 7, 12, 15, 10, 30, tzinfo=linus_tz
),
"message": b"Linux 4.2-rc2\n",
"extra_headers": (
(b"svn-repo-uuid", b"046f1af7-66c2-d61b-5410-ce57b7db7bff"),
(b"svn-revision", b"10"),
),
}
self.revision_with_gpgsig = {
"id": "44cc742a8ca17b9c279be4cc195a93a6ef7a320e",
"directory": _x("b134f9b7dc434f593c0bab696345548b37de0558"),
"parents": [
_x("689664ae944b4692724f13b709a4e4de28b54e57"),
_x("c888305e1efbaa252d01b4e5e6b778f865a97514"),
],
"author": {
"name": b"Jiang Xin",
"email": b"worldhello.net@gmail.com",
"fullname": b"Jiang Xin <worldhello.net@gmail.com>",
},
"date": {"timestamp": 1428538899, "offset": 480,},
"committer": {"name": b"Jiang Xin", "email": b"worldhello.net@gmail.com",},
"committer_date": {"timestamp": 1428538899, "offset": 480,},
"extra_headers": ((b"gpgsig", gpgsig),),
"message": b"""Merge branch 'master' of git://github.com/alexhenrie/git-po
* 'master' of git://github.com/alexhenrie/git-po:
l10n: ca.po: update translation
""",
}
self.revision_no_message = {
"id": "4cfc623c9238fa92c832beed000ce2d003fd8333",
"directory": _x("b134f9b7dc434f593c0bab696345548b37de0558"),
"parents": [
_x("689664ae944b4692724f13b709a4e4de28b54e57"),
_x("c888305e1efbaa252d01b4e5e6b778f865a97514"),
],
"author": {
"name": b"Jiang Xin",
"email": b"worldhello.net@gmail.com",
"fullname": b"Jiang Xin <worldhello.net@gmail.com>",
},
"date": {"timestamp": 1428538899, "offset": 480,},
"committer": {"name": b"Jiang Xin", "email": b"worldhello.net@gmail.com",},
"committer_date": {"timestamp": 1428538899, "offset": 480,},
"message": None,
}
self.revision_empty_message = {
"id": "7442cd78bd3b4966921d6a7f7447417b7acb15eb",
"directory": _x("b134f9b7dc434f593c0bab696345548b37de0558"),
"parents": [
_x("689664ae944b4692724f13b709a4e4de28b54e57"),
_x("c888305e1efbaa252d01b4e5e6b778f865a97514"),
],
"author": {
"name": b"Jiang Xin",
"email": b"worldhello.net@gmail.com",
"fullname": b"Jiang Xin <worldhello.net@gmail.com>",
},
"date": {"timestamp": 1428538899, "offset": 480,},
"committer": {"name": b"Jiang Xin", "email": b"worldhello.net@gmail.com",},
"committer_date": {"timestamp": 1428538899, "offset": 480,},
"message": b"",
}
self.revision_only_fullname = {
"id": "010d34f384fa99d047cdd5e2f41e56e5c2feee45",
"directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"),
"parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")],
"author": {"fullname": b"Linus Torvalds <torvalds@linux-foundation.org>",},
"date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz),
"committer": {
"fullname": b"Linus Torvalds <torvalds@linux-foundation.org>",
},
"committer_date": datetime.datetime(
2015, 7, 12, 15, 10, 30, tzinfo=linus_tz
),
"message": b"Linux 4.2-rc2\n",
"extra_headers": (
(b"svn-repo-uuid", b"046f1af7-66c2-d61b-5410-ce57b7db7bff"),
(b"svn-revision", b"10"),
),
}
def test_revision_identifier(self):
self.assertEqual(
identifiers.revision_identifier(self.revision),
identifiers.identifier_to_str(self.revision["id"]),
)
self.assertEqual(
identifiers.revision_identifier(remove_id(self.revision)),
identifiers.identifier_to_str(self.revision["id"]),
)
def test_revision_identifier_none_metadata(self):
self.assertEqual(
identifiers.revision_identifier(remove_id(self.revision_none_metadata)),
identifiers.identifier_to_str(self.revision_none_metadata["id"]),
)
def test_revision_identifier_synthetic(self):
self.assertEqual(
identifiers.revision_identifier(remove_id(self.synthetic_revision)),
identifiers.identifier_to_str(self.synthetic_revision["id"]),
)
def test_revision_identifier_with_extra_headers(self):
self.assertEqual(
identifiers.revision_identifier(
remove_id(self.revision_with_extra_headers)
),
identifiers.identifier_to_str(self.revision_with_extra_headers["id"]),
)
def test_revision_identifier_with_gpgsig(self):
self.assertEqual(
identifiers.revision_identifier(remove_id(self.revision_with_gpgsig)),
identifiers.identifier_to_str(self.revision_with_gpgsig["id"]),
)
def test_revision_identifier_no_message(self):
self.assertEqual(
identifiers.revision_identifier(remove_id(self.revision_no_message)),
identifiers.identifier_to_str(self.revision_no_message["id"]),
)
def test_revision_identifier_empty_message(self):
self.assertEqual(
identifiers.revision_identifier(remove_id(self.revision_empty_message)),
identifiers.identifier_to_str(self.revision_empty_message["id"]),
)
def test_revision_identifier_only_fullname(self):
self.assertEqual(
identifiers.revision_identifier(remove_id(self.revision_only_fullname)),
identifiers.identifier_to_str(self.revision_only_fullname["id"]),
)
release_example = {
"id": "2b10839e32c4c476e9d94492756bb1a3e1ec4aa8",
"target": b't\x1b"R\xa5\xe1Ml`\xa9\x13\xc7z`\x99\xab\xe7:\x85J',
"target_type": "revision",
"name": b"v2.6.14",
"author": {
"name": b"Linus Torvalds",
"email": b"torvalds@g5.osdl.org",
"fullname": b"Linus Torvalds <torvalds@g5.osdl.org>",
},
"date": datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz),
"message": b"""\
Linux 2.6.14 release
-----BEGIN PGP SIGNATURE-----
Version: GnuPG v1.4.1 (GNU/Linux)
iD8DBQBDYWq6F3YsRnbiHLsRAmaeAJ9RCez0y8rOBbhSv344h86l/VVcugCeIhO1
wdLOnvj91G4wxYqrvThthbE=
=7VeT
-----END PGP SIGNATURE-----
""",
"synthetic": False,
}
class ReleaseIdentifier(unittest.TestCase):
def setUp(self):
linus_tz = datetime.timezone(datetime.timedelta(minutes=-420))
self.release = release_example
self.release_no_author = {
"id": b"&y\x1a\x8b\xcf\x0em3\xf4:\xefv\x82\xbd\xb5U#mV\xde",
"target": "9ee1c939d1cb936b1f98e8d81aeffab57bae46ab",
"target_type": "revision",
"name": b"v2.6.12",
"message": b"""\
This is the final 2.6.12 release
-----BEGIN PGP SIGNATURE-----
Version: GnuPG v1.2.4 (GNU/Linux)
iD8DBQBCsykyF3YsRnbiHLsRAvPNAJ482tCZwuxp/bJRz7Q98MHlN83TpACdHr37
o6X/3T+vm8K3bf3driRr34c=
=sBHn
-----END PGP SIGNATURE-----
""",
"synthetic": False,
}
self.release_no_message = {
"id": "b6f4f446715f7d9543ef54e41b62982f0db40045",
"target": "9ee1c939d1cb936b1f98e8d81aeffab57bae46ab",
"target_type": "revision",
"name": b"v2.6.12",
"author": {"name": b"Linus Torvalds", "email": b"torvalds@g5.osdl.org",},
"date": datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz),
"message": None,
}
self.release_empty_message = {
"id": "71a0aea72444d396575dc25ac37fec87ee3c6492",
"target": "9ee1c939d1cb936b1f98e8d81aeffab57bae46ab",
"target_type": "revision",
"name": b"v2.6.12",
"author": {"name": b"Linus Torvalds", "email": b"torvalds@g5.osdl.org",},
"date": datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz),
"message": b"",
}
self.release_negative_utc = {
"id": "97c8d2573a001f88e72d75f596cf86b12b82fd01",
"name": b"20081029",
"target": "54e9abca4c77421e2921f5f156c9fe4a9f7441c7",
"target_type": "revision",
"date": {
"timestamp": {"seconds": 1225281976},
"offset": 0,
"negative_utc": True,
},
"author": {
"name": b"Otavio Salvador",
"email": b"otavio@debian.org",
"id": 17640,
},
"synthetic": False,
"message": b"tagging version 20081029\n\nr56558\n",
}
self.release_newline_in_author = {
"author": {
"email": b"esycat@gmail.com",
"fullname": b"Eugene Janusov\n<esycat@gmail.com>",
"name": b"Eugene Janusov\n",
},
"date": {
"negative_utc": None,
"offset": 600,
"timestamp": {"microseconds": 0, "seconds": 1377480558,},
},
"id": b"\\\x98\xf5Y\xd04\x16-\xe2->\xbe\xb9T3\xe6\xf8\x88R1",
"message": b"Release of v0.3.2.",
"name": b"0.3.2",
"synthetic": False,
"target": (b"\xc0j\xa3\xd9;x\xa2\x86\\I5\x17" b"\x000\xf8\xc2\xd79o\xd3"),
"target_type": "revision",
}
self.release_snapshot_target = dict(self.release)
self.release_snapshot_target["target_type"] = "snapshot"
self.release_snapshot_target["id"] = "c29c3ddcc6769a04e54dd69d63a6fdcbc566f850"
def test_release_identifier(self):
self.assertEqual(
identifiers.release_identifier(self.release),
identifiers.identifier_to_str(self.release["id"]),
)
self.assertEqual(
identifiers.release_identifier(remove_id(self.release)),
identifiers.identifier_to_str(self.release["id"]),
)
def test_release_identifier_no_author(self):
self.assertEqual(
identifiers.release_identifier(remove_id(self.release_no_author)),
identifiers.identifier_to_str(self.release_no_author["id"]),
)
def test_release_identifier_no_message(self):
self.assertEqual(
identifiers.release_identifier(remove_id(self.release_no_message)),
identifiers.identifier_to_str(self.release_no_message["id"]),
)
def test_release_identifier_empty_message(self):
self.assertEqual(
identifiers.release_identifier(remove_id(self.release_empty_message)),
identifiers.identifier_to_str(self.release_empty_message["id"]),
)
def test_release_identifier_negative_utc(self):
self.assertEqual(
identifiers.release_identifier(remove_id(self.release_negative_utc)),
identifiers.identifier_to_str(self.release_negative_utc["id"]),
)
def test_release_identifier_newline_in_author(self):
self.assertEqual(
identifiers.release_identifier(remove_id(self.release_newline_in_author)),
identifiers.identifier_to_str(self.release_newline_in_author["id"]),
)
def test_release_identifier_snapshot_target(self):
self.assertEqual(
identifiers.release_identifier(self.release_snapshot_target),
identifiers.identifier_to_str(self.release_snapshot_target["id"]),
)
snapshot_example = {
"id": _x("6e65b86363953b780d92b0a928f3e8fcdd10db36"),
"branches": {
b"directory": {
"target": _x("1bd0e65f7d2ff14ae994de17a1e7fe65111dcad8"),
"target_type": "directory",
},
b"content": {
"target": _x("fe95a46679d128ff167b7c55df5d02356c5a1ae1"),
"target_type": "content",
},
b"alias": {"target": b"revision", "target_type": "alias",},
b"revision": {
"target": _x("aafb16d69fd30ff58afdd69036a26047f3aebdc6"),
"target_type": "revision",
},
b"release": {
"target": _x("7045404f3d1c54e6473c71bbb716529fbad4be24"),
"target_type": "release",
},
b"snapshot": {
"target": _x("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"),
"target_type": "snapshot",
},
b"dangling": None,
},
}
class SnapshotIdentifier(unittest.TestCase):
def setUp(self):
super().setUp()
self.empty = {
"id": "1a8893e6a86f444e8be8e7bda6cb34fb1735a00e",
"branches": {},
}
self.dangling_branch = {
"id": "c84502e821eb21ed84e9fd3ec40973abc8b32353",
"branches": {b"HEAD": None,},
}
self.unresolved = {
"id": "84b4548ea486e4b0a7933fa541ff1503a0afe1e0",
"branches": {b"foo": {"target": b"bar", "target_type": "alias",},},
}
self.all_types = snapshot_example
def test_empty_snapshot(self):
self.assertEqual(
identifiers.snapshot_identifier(remove_id(self.empty)),
identifiers.identifier_to_str(self.empty["id"]),
)
def test_dangling_branch(self):
self.assertEqual(
identifiers.snapshot_identifier(remove_id(self.dangling_branch)),
identifiers.identifier_to_str(self.dangling_branch["id"]),
)
def test_unresolved(self):
with self.assertRaisesRegex(ValueError, "b'foo' -> b'bar'"):
identifiers.snapshot_identifier(remove_id(self.unresolved))
def test_unresolved_force(self):
self.assertEqual(
identifiers.snapshot_identifier(
remove_id(self.unresolved), ignore_unresolved=True,
),
identifiers.identifier_to_str(self.unresolved["id"]),
)
def test_all_types(self):
self.assertEqual(
identifiers.snapshot_identifier(remove_id(self.all_types)),
identifiers.identifier_to_str(self.all_types["id"]),
)
+authority_example = {
+ "type": "forge",
+ "url": "https://forge.softwareheritage.org/",
+}
+fetcher_example = {
+ "name": "swh-phabricator-metadata-fetcher",
+ "version": "0.0.1",
+}
+metadata_example = {
+ "target": "swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d",
+ "discovery_date": datetime.datetime(
+ 2021, 1, 25, 11, 27, 51, tzinfo=datetime.timezone.utc
+ ),
+ "authority": authority_example,
+ "fetcher": fetcher_example,
+ "format": "json",
+ "metadata": b'{"foo": "bar"}',
+}
+
+
class RawExtrinsicMetadataIdentifier(unittest.TestCase):
def setUp(self):
super().setUp()
- self.authority = {
- "type": "forge",
- "url": "https://forge.softwareheritage.org/",
- }
- self.fetcher = {
- "name": "swh-phabricator-metadata-fetcher",
- "version": "0.0.1",
- }
-
- self.minimal = {
- "type": "content",
- "target": ExtendedSWHID.from_string(
- "swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d"
- ),
- "discovery_date": datetime.datetime(
- 2021, 1, 25, 11, 27, 51, tzinfo=datetime.timezone.utc
- ),
- "authority": self.authority,
- "fetcher": self.fetcher,
- "format": "json",
- "metadata": b'{"foo": "bar"}',
- }
+ self.minimal = metadata_example
self.maximal = {
**self.minimal,
"origin": "https://forge.softwareheritage.org/source/swh-model/",
"visit": 42,
"snapshot": CoreSWHID.from_string("swh:1:snp:" + "00" * 20),
"release": CoreSWHID.from_string("swh:1:rel:" + "01" * 20),
"revision": CoreSWHID.from_string("swh:1:rev:" + "02" * 20),
"path": b"/abc/def",
"directory": CoreSWHID.from_string("swh:1:dir:" + "03" * 20),
}
def test_minimal(self):
manifest = (
b"raw_extrinsic_metadata 210\0"
b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n"
b"discovery_date 1611574071\n"
b"authority forge https://forge.softwareheritage.org/\n"
b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n"
b"format json\n"
b"\n"
b'{"foo": "bar"}'
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(self.minimal),
hashlib.sha1(manifest).hexdigest(),
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(self.minimal),
"5c13f20ba336e44549baf3d7b9305b027ec9f43d",
)
def test_maximal(self):
manifest = (
b"raw_extrinsic_metadata 533\0"
b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n"
b"discovery_date 1611574071\n"
b"authority forge https://forge.softwareheritage.org/\n"
b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n"
b"format json\n"
b"origin https://forge.softwareheritage.org/source/swh-model/\n"
b"visit 42\n"
b"snapshot swh:1:snp:0000000000000000000000000000000000000000\n"
b"release swh:1:rel:0101010101010101010101010101010101010101\n"
b"revision swh:1:rev:0202020202020202020202020202020202020202\n"
b"path /abc/def\n"
b"directory swh:1:dir:0303030303030303030303030303030303030303\n"
b"\n"
b'{"foo": "bar"}'
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(self.maximal),
hashlib.sha1(manifest).hexdigest(),
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(self.maximal),
"f96966e1093d15236a31fde07e47d5b1c9428049",
)
def test_nonascii_path(self):
metadata = {
**self.minimal,
"path": b"/ab\nc/d\xf0\x9f\xa4\xb7e\x00f",
}
manifest = (
b"raw_extrinsic_metadata 231\0"
b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n"
b"discovery_date 1611574071\n"
b"authority forge https://forge.softwareheritage.org/\n"
b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n"
b"format json\n"
b"path /ab\n"
b" c/d\xf0\x9f\xa4\xb7e\x00f\n"
b"\n"
b'{"foo": "bar"}'
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
hashlib.sha1(manifest).hexdigest(),
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
"7cc83fd1912176510c083f5df43f01b09af4b333",
)
def test_timezone_insensitive(self):
"""Checks the timezone of the datetime.datetime does not affect the
hashed manifest."""
utc_plus_one = datetime.timezone(datetime.timedelta(hours=1))
metadata = {
**self.minimal,
"discovery_date": datetime.datetime(
2021, 1, 25, 12, 27, 51, tzinfo=utc_plus_one,
),
}
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(self.minimal),
identifiers.raw_extrinsic_metadata_identifier(metadata),
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
"5c13f20ba336e44549baf3d7b9305b027ec9f43d",
)
def test_microsecond_insensitive(self):
"""Checks the microseconds of the datetime.datetime does not affect the
hashed manifest."""
metadata = {
**self.minimal,
"discovery_date": datetime.datetime(
2021, 1, 25, 11, 27, 51, 123456, tzinfo=datetime.timezone.utc,
),
}
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(self.minimal),
identifiers.raw_extrinsic_metadata_identifier(metadata),
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
"5c13f20ba336e44549baf3d7b9305b027ec9f43d",
)
def test_noninteger_timezone(self):
"""Checks the discovery_date is translated to UTC before truncating
microseconds"""
tz = datetime.timezone(datetime.timedelta(microseconds=-42))
metadata = {
**self.minimal,
"discovery_date": datetime.datetime(
2021, 1, 25, 11, 27, 50, 1_000_000 - 42, tzinfo=tz,
),
}
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(self.minimal),
identifiers.raw_extrinsic_metadata_identifier(metadata),
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
"5c13f20ba336e44549baf3d7b9305b027ec9f43d",
)
def test_negative_timestamp(self):
metadata = {
**self.minimal,
"discovery_date": datetime.datetime(
1960, 1, 25, 11, 27, 51, tzinfo=datetime.timezone.utc,
),
}
manifest = (
b"raw_extrinsic_metadata 210\0"
b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n"
b"discovery_date -313504329\n"
b"authority forge https://forge.softwareheritage.org/\n"
b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n"
b"format json\n"
b"\n"
b'{"foo": "bar"}'
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
hashlib.sha1(manifest).hexdigest(),
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
"895d0821a2991dd376ddc303424aceb7c68280f9",
)
def test_epoch(self):
metadata = {
**self.minimal,
"discovery_date": datetime.datetime(
1970, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc,
),
}
manifest = (
b"raw_extrinsic_metadata 201\0"
b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n"
b"discovery_date 0\n"
b"authority forge https://forge.softwareheritage.org/\n"
b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n"
b"format json\n"
b"\n"
b'{"foo": "bar"}'
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
hashlib.sha1(manifest).hexdigest(),
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
"27a53df54ace35ebd910493cdc70b334d6b7cb88",
)
def test_negative_epoch(self):
metadata = {
**self.minimal,
"discovery_date": datetime.datetime(
1969, 12, 31, 23, 59, 59, 1, tzinfo=datetime.timezone.utc,
),
}
manifest = (
b"raw_extrinsic_metadata 202\0"
b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n"
b"discovery_date -1\n"
b"authority forge https://forge.softwareheritage.org/\n"
b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n"
b"format json\n"
b"\n"
b'{"foo": "bar"}'
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
hashlib.sha1(manifest).hexdigest(),
)
self.assertEqual(
identifiers.raw_extrinsic_metadata_identifier(metadata),
"be7154a8fd49d87f81547ea634d1e2152907d089",
)
origin_example = {
"url": "https://github.com/torvalds/linux",
}
class OriginIdentifier(unittest.TestCase):
def test_content_identifier(self):
self.assertEqual(
identifiers.origin_identifier(origin_example),
"b63a575fe3faab7692c9f38fb09d4bb45651bb0f",
)
TS_DICTS = [
(
{"timestamp": 12345, "offset": 0},
{
"timestamp": {"seconds": 12345, "microseconds": 0},
"offset": 0,
"negative_utc": False,
},
),
(
{"timestamp": 12345, "offset": 0, "negative_utc": False},
{
"timestamp": {"seconds": 12345, "microseconds": 0},
"offset": 0,
"negative_utc": False,
},
),
(
{"timestamp": 12345, "offset": 0, "negative_utc": False},
{
"timestamp": {"seconds": 12345, "microseconds": 0},
"offset": 0,
"negative_utc": False,
},
),
(
{"timestamp": 12345, "offset": 0, "negative_utc": None},
{
"timestamp": {"seconds": 12345, "microseconds": 0},
"offset": 0,
"negative_utc": False,
},
),
(
{"timestamp": {"seconds": 12345}, "offset": 0, "negative_utc": None},
{
"timestamp": {"seconds": 12345, "microseconds": 0},
"offset": 0,
"negative_utc": False,
},
),
(
{
"timestamp": {"seconds": 12345, "microseconds": 0},
"offset": 0,
"negative_utc": None,
},
{
"timestamp": {"seconds": 12345, "microseconds": 0},
"offset": 0,
"negative_utc": False,
},
),
(
{
"timestamp": {"seconds": 12345, "microseconds": 100},
"offset": 0,
"negative_utc": None,
},
{
"timestamp": {"seconds": 12345, "microseconds": 100},
"offset": 0,
"negative_utc": False,
},
),
(
{"timestamp": 12345, "offset": 0, "negative_utc": True},
{
"timestamp": {"seconds": 12345, "microseconds": 0},
"offset": 0,
"negative_utc": True,
},
),
(
{"timestamp": 12345, "offset": 0, "negative_utc": None},
{
"timestamp": {"seconds": 12345, "microseconds": 0},
"offset": 0,
"negative_utc": False,
},
),
]
@pytest.mark.parametrize("dict_input,expected", TS_DICTS)
def test_normalize_timestamp_dict(dict_input, expected):
assert normalize_timestamp(dict_input) == expected
TS_DICTS_INVALID_TIMESTAMP = [
{"timestamp": 1.2, "offset": 0},
{"timestamp": "1", "offset": 0},
# these below should really also trigger a ValueError...
# {"timestamp": {"seconds": "1"}, "offset": 0},
# {"timestamp": {"seconds": 1.2}, "offset": 0},
# {"timestamp": {"seconds": 1.2}, "offset": 0},
]
@pytest.mark.parametrize("dict_input", TS_DICTS_INVALID_TIMESTAMP)
def test_normalize_timestamp_dict_invalid_timestamp(dict_input):
with pytest.raises(ValueError, match="non-integer timestamp"):
normalize_timestamp(dict_input)
# SWHIDs that are outright invalid, no matter the context
INVALID_SWHIDS = [
"swh:1:cnt",
"swh:1:",
"swh:",
"swh:1:cnt:",
"foo:1:cnt:abc8bc9d7a6bcf6db04f476d29314f157507d505",
"swh:2:dir:def8bc9d7a6bcf6db04f476d29314f157507d505",
"swh:1:foo:fed8bc9d7a6bcf6db04f476d29314f157507d505",
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;invalid;malformed",
"swh:1:snp:gh6959356d30f1a4e9b7f6bca59b9a336464c03d",
"swh:1:snp:foo",
# wrong qualifier: ori should be origin
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;ori=something;anchor=1;visit=1;path=/", # noqa
# wrong qualifier: anc should be anchor
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;origin=something;anc=1;visit=1;path=/", # noqa
# wrong qualifier: vis should be visit
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;origin=something;anchor=1;vis=1;path=/", # noqa
# wrong qualifier: pa should be path
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;origin=something;anchor=1;visit=1;pa=/", # noqa
# wrong qualifier: line should be lines
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;line=10;origin=something;anchor=1;visit=1;path=/", # noqa
# wrong qualifier value: it contains space before of after
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;origin= https://some-url", # noqa
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;origin=something;anchor=some-anchor ", # noqa
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;origin=something;anchor=some-anchor ;visit=1", # noqa
# invalid swhid: whitespaces
"swh :1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;ori=something;anchor=1;visit=1;path=/", # noqa
"swh: 1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;ori=something;anchor=1;visit=1;path=/", # noqa
"swh: 1: dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;ori=something;anchor=1;visit=1;path=/", # noqa
"swh:1: dir: 0b6959356d30f1a4e9b7f6bca59b9a336464c03d",
"swh:1: dir: 0b6959356d30f1a4e9b7f6bca59b9a336464c03d; origin=blah",
"swh:1: dir: 0b6959356d30f1a4e9b7f6bca59b9a336464c03d;lines=12",
# other whitespaces
"swh\t:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;lines=12",
"swh:1\n:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;lines=12",
"swh:1:\rdir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;lines=12",
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d\f;lines=12",
"swh:1:dir:0b6959356d30f1a4e9b7f6bca59b9a336464c03d;lines=12\v",
]
SWHID_CLASSES = [CoreSWHID, QualifiedSWHID, ExtendedSWHID]
@pytest.mark.parametrize(
"invalid_swhid,swhid_class", itertools.product(INVALID_SWHIDS, SWHID_CLASSES)
)
def test_swhid_parsing_error(invalid_swhid, swhid_class):
"""Tests SWHID strings that are invalid for all SWHID classes do raise
a ValidationError"""
with pytest.raises(ValidationError):
swhid_class.from_string(invalid_swhid)
# string SWHIDs, and how they should be parsed by each of the classes,
# or None if the class does not support it
HASH = "94a9ed024d3859793618152ea559a168bbcbb5e2"
VALID_SWHIDS = [
(
f"swh:1:cnt:{HASH}",
CoreSWHID(object_type=ObjectType.CONTENT, object_id=_x(HASH),),
QualifiedSWHID(object_type=ObjectType.CONTENT, object_id=_x(HASH),),
ExtendedSWHID(object_type=ExtendedObjectType.CONTENT, object_id=_x(HASH),),
),
(
f"swh:1:dir:{HASH}",
CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=_x(HASH),),
QualifiedSWHID(object_type=ObjectType.DIRECTORY, object_id=_x(HASH),),
ExtendedSWHID(object_type=ExtendedObjectType.DIRECTORY, object_id=_x(HASH),),
),
(
f"swh:1:rev:{HASH}",
CoreSWHID(object_type=ObjectType.REVISION, object_id=_x(HASH),),
QualifiedSWHID(object_type=ObjectType.REVISION, object_id=_x(HASH),),
ExtendedSWHID(object_type=ExtendedObjectType.REVISION, object_id=_x(HASH),),
),
(
f"swh:1:rel:{HASH}",
CoreSWHID(object_type=ObjectType.RELEASE, object_id=_x(HASH),),
QualifiedSWHID(object_type=ObjectType.RELEASE, object_id=_x(HASH),),
ExtendedSWHID(object_type=ExtendedObjectType.RELEASE, object_id=_x(HASH),),
),
(
f"swh:1:snp:{HASH}",
CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=_x(HASH),),
QualifiedSWHID(object_type=ObjectType.SNAPSHOT, object_id=_x(HASH),),
ExtendedSWHID(object_type=ExtendedObjectType.SNAPSHOT, object_id=_x(HASH),),
),
(
f"swh:1:cnt:{HASH};origin=https://github.com/python/cpython;lines=1-18",
None, # CoreSWHID does not allow qualifiers
QualifiedSWHID(
object_type=ObjectType.CONTENT,
object_id=_x(HASH),
origin="https://github.com/python/cpython",
lines=(1, 18),
),
None, # Neither does ExtendedSWHID
),
(
f"swh:1:cnt:{HASH};origin=https://github.com/python/cpython;lines=18",
None, # likewise
QualifiedSWHID(
object_type=ObjectType.CONTENT,
object_id=_x(HASH),
origin="https://github.com/python/cpython",
lines=(18, None),
),
None, # likewise
),
(
f"swh:1:dir:{HASH};origin=deb://Debian/packages/linuxdoc-tools",
None, # likewise
QualifiedSWHID(
object_type=ObjectType.DIRECTORY,
object_id=_x(HASH),
origin="deb://Debian/packages/linuxdoc-tools",
),
None, # likewise
),
(
f"swh:1:ori:{HASH}",
None, # CoreSWHID does not allow origin pseudo-SWHIDs
None, # Neither does QualifiedSWHID
ExtendedSWHID(object_type=ExtendedObjectType.ORIGIN, object_id=_x(HASH),),
),
(
f"swh:1:emd:{HASH}",
None, # likewise for metadata pseudo-SWHIDs
None, # Neither does QualifiedSWHID
ExtendedSWHID(
object_type=ExtendedObjectType.RAW_EXTRINSIC_METADATA, object_id=_x(HASH),
),
),
(
f"swh:1:emd:{HASH};origin=https://github.com/python/cpython",
None, # CoreSWHID does not allow metadata pseudo-SWHIDs or qualifiers
None, # QualifiedSWHID does not allow metadata pseudo-SWHIDs
None, # ExtendedSWHID does not allow qualifiers
),
]
@pytest.mark.parametrize(
"string,core,qualified,extended",
[
pytest.param(string, core, qualified, extended, id=string)
for (string, core, qualified, extended) in VALID_SWHIDS
],
)
def test_parse_unparse_swhids(string, core, qualified, extended):
"""Tests parsing and serializing valid SWHIDs with the various SWHID classes."""
classes = [CoreSWHID, QualifiedSWHID, ExtendedSWHID]
for (cls, parsed_swhid) in zip(classes, [core, qualified, extended]):
if parsed_swhid is None:
# This class should not accept this SWHID
with pytest.raises(ValidationError):
cls.from_string(string)
else:
# This class should
assert cls.from_string(string) == parsed_swhid
# Also check serialization
assert string == str(parsed_swhid)
@pytest.mark.parametrize(
"core,extended",
[
pytest.param(core, extended, id=string)
for (string, core, qualified, extended) in VALID_SWHIDS
if core is not None
],
)
def test_core_to_extended(core, extended):
assert core.to_extended() == extended
@pytest.mark.parametrize(
"ns,version,type,id,qualifiers",
[
("foo", 1, ObjectType.CONTENT, "abc8bc9d7a6bcf6db04f476d29314f157507d505", {}),
("swh", 2, ObjectType.CONTENT, "def8bc9d7a6bcf6db04f476d29314f157507d505", {}),
("swh", 1, ObjectType.DIRECTORY, "aaaa", {}),
],
)
def test_QualifiedSWHID_validation_error(ns, version, type, id, qualifiers):
with pytest.raises(ValidationError):
QualifiedSWHID(
namespace=ns,
scheme_version=version,
object_type=type,
object_id=_x(id),
**qualifiers,
)
@pytest.mark.parametrize(
"object_type,qualifiers,expected",
[
# No qualifier:
(ObjectType.CONTENT, {}, f"swh:1:cnt:{HASH}"),
# origin:
(ObjectType.CONTENT, {"origin": None}, f"swh:1:cnt:{HASH}"),
(ObjectType.CONTENT, {"origin": 42}, ValueError),
# visit:
(
ObjectType.CONTENT,
{"visit": f"swh:1:snp:{HASH}"},
f"swh:1:cnt:{HASH};visit=swh:1:snp:{HASH}",
),
(
ObjectType.CONTENT,
{"visit": CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=_x(HASH))},
f"swh:1:cnt:{HASH};visit=swh:1:snp:{HASH}",
),
(ObjectType.CONTENT, {"visit": 42}, TypeError),
(ObjectType.CONTENT, {"visit": f"swh:1:rel:{HASH}"}, ValidationError,),
(
ObjectType.CONTENT,
{"visit": CoreSWHID(object_type=ObjectType.RELEASE, object_id=_x(HASH))},
ValidationError,
),
# anchor:
(
ObjectType.CONTENT,
{"anchor": f"swh:1:snp:{HASH}"},
f"swh:1:cnt:{HASH};anchor=swh:1:snp:{HASH}",
),
(
ObjectType.CONTENT,
{"anchor": CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=_x(HASH))},
f"swh:1:cnt:{HASH};anchor=swh:1:snp:{HASH}",
),
(
ObjectType.CONTENT,
{"anchor": f"swh:1:dir:{HASH}"},
f"swh:1:cnt:{HASH};anchor=swh:1:dir:{HASH}",
),
(
ObjectType.CONTENT,
{"anchor": CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=_x(HASH))},
f"swh:1:cnt:{HASH};anchor=swh:1:dir:{HASH}",
),
(ObjectType.CONTENT, {"anchor": 42}, TypeError),
(ObjectType.CONTENT, {"anchor": f"swh:1:cnt:{HASH}"}, ValidationError,),
(
ObjectType.CONTENT,
{"anchor": CoreSWHID(object_type=ObjectType.CONTENT, object_id=_x(HASH))},
ValidationError,
),
# path:
(ObjectType.CONTENT, {"path": b"/foo"}, f"swh:1:cnt:{HASH};path=/foo",),
(
ObjectType.CONTENT,
{"path": b"/foo;bar"},
f"swh:1:cnt:{HASH};path=/foo%3Bbar",
),
(ObjectType.CONTENT, {"path": "/foo"}, f"swh:1:cnt:{HASH};path=/foo",),
(
ObjectType.CONTENT,
{"path": "/foo;bar"},
f"swh:1:cnt:{HASH};path=/foo%3Bbar",
),
(ObjectType.CONTENT, {"path": 42}, Exception),
# lines:
(ObjectType.CONTENT, {"lines": (42, None)}, f"swh:1:cnt:{HASH};lines=42",),
(ObjectType.CONTENT, {"lines": (21, 42)}, f"swh:1:cnt:{HASH};lines=21-42",),
(ObjectType.CONTENT, {"lines": 42}, TypeError,),
(ObjectType.CONTENT, {"lines": (None, 42)}, ValueError,),
(ObjectType.CONTENT, {"lines": ("42", None)}, ValueError,),
],
)
def test_QualifiedSWHID_init(object_type, qualifiers, expected):
"""Tests validation and converters of qualifiers"""
if isinstance(expected, type):
assert issubclass(expected, Exception)
with pytest.raises(expected):
QualifiedSWHID(object_type=object_type, object_id=_x(HASH), **qualifiers)
else:
assert isinstance(expected, str)
swhid = QualifiedSWHID(
object_type=object_type, object_id=_x(HASH), **qualifiers
)
# Check the build object has the right serialization
assert expected == str(swhid)
# Check the internal state of the object is the same as if parsed from a string
assert QualifiedSWHID.from_string(expected) == swhid
def test_QualifiedSWHID_hash():
object_id = _x("94a9ed024d3859793618152ea559a168bbcbb5e2")
assert hash(
QualifiedSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id)
) == hash(QualifiedSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id))
assert hash(
QualifiedSWHID(
object_type=ObjectType.DIRECTORY, object_id=object_id, **dummy_qualifiers,
)
) == hash(
QualifiedSWHID(
object_type=ObjectType.DIRECTORY, object_id=object_id, **dummy_qualifiers,
)
)
# Different order of the dictionary, so the underlying order of the tuple in
# ImmutableDict is different.
assert hash(
QualifiedSWHID(
object_type=ObjectType.DIRECTORY,
object_id=object_id,
origin="https://example.com",
lines=(42, None),
)
) == hash(
QualifiedSWHID(
object_type=ObjectType.DIRECTORY,
object_id=object_id,
lines=(42, None),
origin="https://example.com",
)
)
def test_QualifiedSWHID_eq():
object_id = _x("94a9ed024d3859793618152ea559a168bbcbb5e2")
assert QualifiedSWHID(
object_type=ObjectType.DIRECTORY, object_id=object_id
) == QualifiedSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id)
assert QualifiedSWHID(
object_type=ObjectType.DIRECTORY, object_id=object_id, **dummy_qualifiers,
) == QualifiedSWHID(
object_type=ObjectType.DIRECTORY, object_id=object_id, **dummy_qualifiers,
)
assert QualifiedSWHID(
object_type=ObjectType.DIRECTORY, object_id=object_id, **dummy_qualifiers,
) == QualifiedSWHID(
object_type=ObjectType.DIRECTORY, object_id=object_id, **dummy_qualifiers,
)
QUALIFIED_SWHIDS = [
# origin:
(
f"swh:1:cnt:{HASH};origin=https://github.com/python/cpython",
QualifiedSWHID(
object_type=ObjectType.CONTENT,
object_id=_x(HASH),
origin="https://github.com/python/cpython",
),
),
(
f"swh:1:cnt:{HASH};origin=https://example.org/foo%3Bbar%25baz",
QualifiedSWHID(
object_type=ObjectType.CONTENT,
object_id=_x(HASH),
origin="https://example.org/foo%3Bbar%25baz",
),
),
# visit:
(
f"swh:1:cnt:{HASH};visit=swh:1:snp:{HASH}",
QualifiedSWHID(
object_type=ObjectType.CONTENT,
object_id=_x(HASH),
visit=CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=_x(HASH)),
),
),
(f"swh:1:cnt:{HASH};visit=swh:1:rel:{HASH}", None,),
# anchor:
(
f"swh:1:cnt:{HASH};anchor=swh:1:dir:{HASH}",
QualifiedSWHID(
object_type=ObjectType.CONTENT,
object_id=_x(HASH),
anchor=CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=_x(HASH)),
),
),
(
f"swh:1:cnt:{HASH};anchor=swh:1:rev:{HASH}",
QualifiedSWHID(
object_type=ObjectType.CONTENT,
object_id=_x(HASH),
anchor=CoreSWHID(object_type=ObjectType.REVISION, object_id=_x(HASH)),
),
),
(
f"swh:1:cnt:{HASH};anchor=swh:1:cnt:{HASH}",
None, # 'cnt' is not valid in anchor
),
(
f"swh:1:cnt:{HASH};anchor=swh:1:ori:{HASH}",
None, # 'ori' is not valid in a CoreSWHID
),
# path:
(
f"swh:1:cnt:{HASH};path=/foo",
QualifiedSWHID(
object_type=ObjectType.CONTENT, object_id=_x(HASH), path=b"/foo"
),
),
(
f"swh:1:cnt:{HASH};path=/foo%3Bbar",
QualifiedSWHID(
object_type=ObjectType.CONTENT, object_id=_x(HASH), path=b"/foo;bar"
),
),
(
f"swh:1:cnt:{HASH};path=/foo%25bar",
QualifiedSWHID(
object_type=ObjectType.CONTENT, object_id=_x(HASH), path=b"/foo%bar"
),
),
# lines
(
f"swh:1:cnt:{HASH};lines=1-18",
QualifiedSWHID(
object_type=ObjectType.CONTENT, object_id=_x(HASH), lines=(1, 18),
),
),
(
f"swh:1:cnt:{HASH};lines=18",
QualifiedSWHID(
object_type=ObjectType.CONTENT, object_id=_x(HASH), lines=(18, None),
),
),
(f"swh:1:cnt:{HASH};lines=", None,),
(f"swh:1:cnt:{HASH};lines=aa", None,),
(f"swh:1:cnt:{HASH};lines=18-aa", None,),
]
@pytest.mark.parametrize("string,parsed", QUALIFIED_SWHIDS)
def test_QualifiedSWHID_parse_serialize_qualifiers(string, parsed):
"""Tests parsing and serializing valid SWHIDs with the various SWHID classes."""
if parsed is None:
with pytest.raises(ValidationError):
print(repr(QualifiedSWHID.from_string(string)))
else:
assert QualifiedSWHID.from_string(string) == parsed
assert str(parsed) == string
def test_QualifiedSWHID_serialize_origin():
"""Checks that semicolon in origins are escaped."""
string = f"swh:1:cnt:{HASH};origin=https://example.org/foo%3Bbar%25baz"
swhid = QualifiedSWHID(
object_type=ObjectType.CONTENT,
object_id=_x(HASH),
origin="https://example.org/foo;bar%25baz",
)
assert str(swhid) == string
def test_QualifiedSWHID_attributes():
"""Checks the set of QualifiedSWHID attributes match the SWHID_QUALIFIERS
constant."""
assert set(attr.fields_dict(QualifiedSWHID)) == {
"namespace",
"scheme_version",
"object_type",
"object_id",
*SWHID_QUALIFIERS,
}
@pytest.mark.parametrize(
"ns,version,type,id",
[
("foo", 1, ObjectType.CONTENT, "abc8bc9d7a6bcf6db04f476d29314f157507d505"),
("swh", 2, ObjectType.CONTENT, "def8bc9d7a6bcf6db04f476d29314f157507d505"),
("swh", 1, ObjectType.DIRECTORY, "aaaa"),
],
)
def test_CoreSWHID_validation_error(ns, version, type, id):
with pytest.raises(ValidationError):
CoreSWHID(
namespace=ns, scheme_version=version, object_type=type, object_id=_x(id),
)
def test_CoreSWHID_hash():
object_id = _x("94a9ed024d3859793618152ea559a168bbcbb5e2")
assert hash(
CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id)
) == hash(CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id))
assert hash(
CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id,)
) == hash(CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id,))
# Different order of the dictionary, so the underlying order of the tuple in
# ImmutableDict is different.
assert hash(
CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id,)
) == hash(CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id,))
def test_CoreSWHID_eq():
object_id = _x("94a9ed024d3859793618152ea559a168bbcbb5e2")
assert CoreSWHID(
object_type=ObjectType.DIRECTORY, object_id=object_id
) == CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id)
assert CoreSWHID(
object_type=ObjectType.DIRECTORY, object_id=object_id,
) == CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id,)
assert CoreSWHID(
object_type=ObjectType.DIRECTORY, object_id=object_id,
) == CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=object_id,)
@pytest.mark.parametrize(
"ns,version,type,id",
[
(
"foo",
1,
ExtendedObjectType.CONTENT,
"abc8bc9d7a6bcf6db04f476d29314f157507d505",
),
(
"swh",
2,
ExtendedObjectType.CONTENT,
"def8bc9d7a6bcf6db04f476d29314f157507d505",
),
("swh", 1, ExtendedObjectType.DIRECTORY, "aaaa"),
],
)
def test_ExtendedSWHID_validation_error(ns, version, type, id):
with pytest.raises(ValidationError):
ExtendedSWHID(
namespace=ns, scheme_version=version, object_type=type, object_id=_x(id),
)
def test_ExtendedSWHID_hash():
object_id = _x("94a9ed024d3859793618152ea559a168bbcbb5e2")
assert hash(
ExtendedSWHID(object_type=ExtendedObjectType.DIRECTORY, object_id=object_id)
) == hash(
ExtendedSWHID(object_type=ExtendedObjectType.DIRECTORY, object_id=object_id)
)
assert hash(
ExtendedSWHID(object_type=ExtendedObjectType.DIRECTORY, object_id=object_id,)
) == hash(
ExtendedSWHID(object_type=ExtendedObjectType.DIRECTORY, object_id=object_id,)
)
# Different order of the dictionary, so the underlying order of the tuple in
# ImmutableDict is different.
assert hash(
ExtendedSWHID(object_type=ExtendedObjectType.DIRECTORY, object_id=object_id,)
) == hash(
ExtendedSWHID(object_type=ExtendedObjectType.DIRECTORY, object_id=object_id,)
)
def test_ExtendedSWHID_eq():
object_id = _x("94a9ed024d3859793618152ea559a168bbcbb5e2")
assert ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=object_id
) == ExtendedSWHID(object_type=ExtendedObjectType.DIRECTORY, object_id=object_id)
assert ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=object_id,
) == ExtendedSWHID(object_type=ExtendedObjectType.DIRECTORY, object_id=object_id,)
assert ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=object_id,
) == ExtendedSWHID(object_type=ExtendedObjectType.DIRECTORY, object_id=object_id,)
def test_object_types():
"""Checks ExtendedObjectType is a superset of ObjectType"""
for member in ObjectType:
assert getattr(ExtendedObjectType, member.name).value == member.value
diff --git a/swh/model/tests/test_model.py b/swh/model/tests/test_model.py
index 8325bf5..2c84d71 100644
--- a/swh/model/tests/test_model.py
+++ b/swh/model/tests/test_model.py
@@ -1,1116 +1,1121 @@
# Copyright (C) 2019-2020 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import copy
import datetime
import attr
from attrs_strict import AttributeTypeError
from hypothesis import given
from hypothesis.strategies import binary
import pytest
from swh.model.hashutil import MultiHash, hash_to_bytes, hash_to_hex
import swh.model.hypothesis_strategies as strategies
from swh.model.identifiers import (
CoreSWHID,
ExtendedSWHID,
ObjectType,
content_identifier,
directory_identifier,
origin_identifier,
release_identifier,
revision_identifier,
snapshot_identifier,
)
from swh.model.model import (
BaseModel,
Content,
Directory,
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
MissingData,
Origin,
OriginVisit,
OriginVisitStatus,
Person,
RawExtrinsicMetadata,
Release,
Revision,
SkippedContent,
Snapshot,
Timestamp,
TimestampWithTimezone,
)
from swh.model.tests.test_identifiers import (
content_example,
directory_example,
+ metadata_example,
origin_example,
release_example,
revision_example,
snapshot_example,
)
EXAMPLE_HASH = hash_to_bytes("94a9ed024d3859793618152ea559a168bbcbb5e2")
@given(strategies.objects())
def test_todict_inverse_fromdict(objtype_and_obj):
(obj_type, obj) = objtype_and_obj
if obj_type in ("origin", "origin_visit"):
return
obj_as_dict = obj.to_dict()
obj_as_dict_copy = copy.deepcopy(obj_as_dict)
# Check the composition of to_dict and from_dict is the identity
assert obj == type(obj).from_dict(obj_as_dict)
# Check from_dict() does not change the input dict
assert obj_as_dict == obj_as_dict_copy
# Check the composition of from_dict and to_dict is the identity
assert obj_as_dict == type(obj).from_dict(obj_as_dict).to_dict()
def test_unique_key():
url = "http://example.org/"
date = datetime.datetime.now(tz=datetime.timezone.utc)
id_ = b"42" * 10
assert Origin(url=url).unique_key() == {"url": url}
assert OriginVisit(origin=url, date=date, type="git").unique_key() == {
"origin": url,
"date": str(date),
}
assert OriginVisitStatus(
origin=url, visit=42, date=date, status="created", snapshot=None
).unique_key() == {"origin": url, "visit": "42", "date": str(date),}
assert Snapshot.from_dict({**snapshot_example, "id": id_}).unique_key() == id_
assert Release.from_dict({**release_example, "id": id_}).unique_key() == id_
assert Revision.from_dict({**revision_example, "id": id_}).unique_key() == id_
assert Directory.from_dict({**directory_example, "id": id_}).unique_key() == id_
+ assert (
+ RawExtrinsicMetadata.from_dict({**metadata_example, "id": id_}).unique_key()
+ == id_
+ )
cont = Content.from_data(b"foo")
assert cont.unique_key().hex() == "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"
kwargs = {
**cont.to_dict(),
"reason": "foo",
"status": "absent",
}
del kwargs["data"]
assert SkippedContent(**kwargs).unique_key() == cont.hashes()
# Anonymization
@given(strategies.objects())
def test_anonymization(objtype_and_obj):
(obj_type, obj) = objtype_and_obj
def check_person(p):
if p is not None:
assert p.name is None
assert p.email is None
assert len(p.fullname) == 32
anon_obj = obj.anonymize()
if obj_type == "person":
assert anon_obj is not None
check_person(anon_obj)
elif obj_type == "release":
assert anon_obj is not None
check_person(anon_obj.author)
elif obj_type == "revision":
assert anon_obj is not None
check_person(anon_obj.author)
check_person(anon_obj.committer)
else:
assert anon_obj is None
# Origin, OriginVisit, OriginVisitStatus
@given(strategies.origins())
def test_todict_origins(origin):
obj = origin.to_dict()
assert "type" not in obj
assert type(origin)(url=origin.url) == type(origin).from_dict(obj)
@given(strategies.origin_visits())
def test_todict_origin_visits(origin_visit):
obj = origin_visit.to_dict()
assert origin_visit == type(origin_visit).from_dict(obj)
def test_origin_visit_naive_datetime():
with pytest.raises(ValueError, match="must be a timezone-aware datetime"):
OriginVisit(
origin="http://foo/", date=datetime.datetime.now(), type="git",
)
@given(strategies.origin_visit_statuses())
def test_todict_origin_visit_statuses(origin_visit_status):
obj = origin_visit_status.to_dict()
assert origin_visit_status == type(origin_visit_status).from_dict(obj)
def test_origin_visit_status_naive_datetime():
with pytest.raises(ValueError, match="must be a timezone-aware datetime"):
OriginVisitStatus(
origin="http://foo/",
visit=42,
date=datetime.datetime.now(),
status="ongoing",
snapshot=None,
)
# Timestamp
@given(strategies.timestamps())
def test_timestamps_strategy(timestamp):
attr.validate(timestamp)
def test_timestamp_seconds():
attr.validate(Timestamp(seconds=0, microseconds=0))
with pytest.raises(AttributeTypeError):
Timestamp(seconds="0", microseconds=0)
attr.validate(Timestamp(seconds=2 ** 63 - 1, microseconds=0))
with pytest.raises(ValueError):
Timestamp(seconds=2 ** 63, microseconds=0)
attr.validate(Timestamp(seconds=-(2 ** 63), microseconds=0))
with pytest.raises(ValueError):
Timestamp(seconds=-(2 ** 63) - 1, microseconds=0)
def test_timestamp_microseconds():
attr.validate(Timestamp(seconds=0, microseconds=0))
with pytest.raises(AttributeTypeError):
Timestamp(seconds=0, microseconds="0")
attr.validate(Timestamp(seconds=0, microseconds=10 ** 6 - 1))
with pytest.raises(ValueError):
Timestamp(seconds=0, microseconds=10 ** 6)
with pytest.raises(ValueError):
Timestamp(seconds=0, microseconds=-1)
def test_timestamp_from_dict():
assert Timestamp.from_dict({"seconds": 10, "microseconds": 5})
with pytest.raises(AttributeTypeError):
Timestamp.from_dict({"seconds": "10", "microseconds": 5})
with pytest.raises(AttributeTypeError):
Timestamp.from_dict({"seconds": 10, "microseconds": "5"})
with pytest.raises(ValueError):
Timestamp.from_dict({"seconds": 0, "microseconds": -1})
Timestamp.from_dict({"seconds": 0, "microseconds": 10 ** 6 - 1})
with pytest.raises(ValueError):
Timestamp.from_dict({"seconds": 0, "microseconds": 10 ** 6})
# TimestampWithTimezone
def test_timestampwithtimezone():
ts = Timestamp(seconds=0, microseconds=0)
tstz = TimestampWithTimezone(timestamp=ts, offset=0, negative_utc=False)
attr.validate(tstz)
assert tstz.negative_utc is False
attr.validate(TimestampWithTimezone(timestamp=ts, offset=10, negative_utc=False))
attr.validate(TimestampWithTimezone(timestamp=ts, offset=-10, negative_utc=False))
tstz = TimestampWithTimezone(timestamp=ts, offset=0, negative_utc=True)
attr.validate(tstz)
assert tstz.negative_utc is True
with pytest.raises(AttributeTypeError):
TimestampWithTimezone(
timestamp=datetime.datetime.now(), offset=0, negative_utc=False
)
with pytest.raises(AttributeTypeError):
TimestampWithTimezone(timestamp=ts, offset="0", negative_utc=False)
with pytest.raises(AttributeTypeError):
TimestampWithTimezone(timestamp=ts, offset=1.0, negative_utc=False)
with pytest.raises(AttributeTypeError):
TimestampWithTimezone(timestamp=ts, offset=1, negative_utc=0)
with pytest.raises(ValueError):
TimestampWithTimezone(timestamp=ts, offset=1, negative_utc=True)
with pytest.raises(ValueError):
TimestampWithTimezone(timestamp=ts, offset=-1, negative_utc=True)
def test_timestampwithtimezone_from_datetime():
tz = datetime.timezone(datetime.timedelta(minutes=+60))
date = datetime.datetime(2020, 2, 27, 14, 39, 19, tzinfo=tz)
tstz = TimestampWithTimezone.from_datetime(date)
assert tstz == TimestampWithTimezone(
timestamp=Timestamp(seconds=1582810759, microseconds=0,),
offset=60,
negative_utc=False,
)
def test_timestampwithtimezone_from_naive_datetime():
date = datetime.datetime(2020, 2, 27, 14, 39, 19)
with pytest.raises(ValueError, match="datetime without timezone"):
TimestampWithTimezone.from_datetime(date)
def test_timestampwithtimezone_from_iso8601():
date = "2020-02-27 14:39:19.123456+0100"
tstz = TimestampWithTimezone.from_iso8601(date)
assert tstz == TimestampWithTimezone(
timestamp=Timestamp(seconds=1582810759, microseconds=123456,),
offset=60,
negative_utc=False,
)
def test_timestampwithtimezone_from_iso8601_negative_utc():
date = "2020-02-27 13:39:19-0000"
tstz = TimestampWithTimezone.from_iso8601(date)
assert tstz == TimestampWithTimezone(
timestamp=Timestamp(seconds=1582810759, microseconds=0,),
offset=0,
negative_utc=True,
)
def test_person_from_fullname():
"""The author should have name, email and fullname filled.
"""
actual_person = Person.from_fullname(b"tony <ynot@dagobah>")
assert actual_person == Person(
fullname=b"tony <ynot@dagobah>", name=b"tony", email=b"ynot@dagobah",
)
def test_person_from_fullname_no_email():
"""The author and fullname should be the same as the input (author).
"""
actual_person = Person.from_fullname(b"tony")
assert actual_person == Person(fullname=b"tony", name=b"tony", email=None,)
def test_person_from_fullname_empty_person():
"""Empty person has only its fullname filled with the empty
byte-string.
"""
actual_person = Person.from_fullname(b"")
assert actual_person == Person(fullname=b"", name=None, email=None,)
def test_git_author_line_to_author():
# edge case out of the way
with pytest.raises(TypeError):
Person.from_fullname(None)
tests = {
b"a <b@c.com>": Person(name=b"a", email=b"b@c.com", fullname=b"a <b@c.com>",),
b"<foo@bar.com>": Person(
name=None, email=b"foo@bar.com", fullname=b"<foo@bar.com>",
),
b"malformed <email": Person(
name=b"malformed", email=b"email", fullname=b"malformed <email"
),
b'malformed <"<br"@ckets>': Person(
name=b"malformed",
email=b'"<br"@ckets',
fullname=b'malformed <"<br"@ckets>',
),
b"trailing <sp@c.e> ": Person(
name=b"trailing", email=b"sp@c.e", fullname=b"trailing <sp@c.e> ",
),
b"no<sp@c.e>": Person(name=b"no", email=b"sp@c.e", fullname=b"no<sp@c.e>",),
b" more <sp@c.es>": Person(
name=b"more", email=b"sp@c.es", fullname=b" more <sp@c.es>",
),
b" <>": Person(name=None, email=None, fullname=b" <>",),
}
for person in sorted(tests):
expected_person = tests[person]
assert expected_person == Person.from_fullname(person)
# Content
def test_content_get_hash():
hashes = dict(sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux")
c = Content(length=42, status="visible", **hashes)
for (hash_name, hash_) in hashes.items():
assert c.get_hash(hash_name) == hash_
def test_content_hashes():
hashes = dict(sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux")
c = Content(length=42, status="visible", **hashes)
assert c.hashes() == hashes
def test_content_data():
c = Content(
length=42,
status="visible",
data=b"foo",
sha1=b"foo",
sha1_git=b"bar",
sha256=b"baz",
blake2s256=b"qux",
)
assert c.with_data() == c
def test_content_data_missing():
c = Content(
length=42,
status="visible",
sha1=b"foo",
sha1_git=b"bar",
sha256=b"baz",
blake2s256=b"qux",
)
with pytest.raises(MissingData):
c.with_data()
@given(strategies.present_contents_d())
def test_content_from_dict(content_d):
c = Content.from_data(**content_d)
assert c
assert c.ctime == content_d["ctime"]
content_d2 = c.to_dict()
c2 = Content.from_dict(content_d2)
assert c2.ctime == c.ctime
def test_content_from_dict_str_ctime():
# test with ctime as a string
n = datetime.datetime(2020, 5, 6, 12, 34, tzinfo=datetime.timezone.utc)
content_d = {
"ctime": n.isoformat(),
"data": b"",
"length": 0,
"sha1": b"\x00",
"sha256": b"\x00",
"sha1_git": b"\x00",
"blake2s256": b"\x00",
}
c = Content.from_dict(content_d)
assert c.ctime == n
def test_content_from_dict_str_naive_ctime():
# test with ctime as a string
n = datetime.datetime(2020, 5, 6, 12, 34)
content_d = {
"ctime": n.isoformat(),
"data": b"",
"length": 0,
"sha1": b"\x00",
"sha256": b"\x00",
"sha1_git": b"\x00",
"blake2s256": b"\x00",
}
with pytest.raises(ValueError, match="must be a timezone-aware datetime."):
Content.from_dict(content_d)
@given(binary(max_size=4096))
def test_content_from_data(data):
c = Content.from_data(data)
assert c.data == data
assert c.length == len(data)
assert c.status == "visible"
for key, value in MultiHash.from_data(data).digest().items():
assert getattr(c, key) == value
@given(binary(max_size=4096))
def test_hidden_content_from_data(data):
c = Content.from_data(data, status="hidden")
assert c.data == data
assert c.length == len(data)
assert c.status == "hidden"
for key, value in MultiHash.from_data(data).digest().items():
assert getattr(c, key) == value
def test_content_naive_datetime():
c = Content.from_data(b"foo")
with pytest.raises(ValueError, match="must be a timezone-aware datetime"):
Content(
**c.to_dict(), ctime=datetime.datetime.now(),
)
# SkippedContent
@given(binary(max_size=4096))
def test_skipped_content_from_data(data):
c = SkippedContent.from_data(data, reason="reason")
assert c.reason == "reason"
assert c.length == len(data)
assert c.status == "absent"
for key, value in MultiHash.from_data(data).digest().items():
assert getattr(c, key) == value
@given(strategies.skipped_contents_d())
def test_skipped_content_origin_is_str(skipped_content_d):
assert SkippedContent.from_dict(skipped_content_d)
skipped_content_d["origin"] = "http://path/to/origin"
assert SkippedContent.from_dict(skipped_content_d)
skipped_content_d["origin"] = Origin(url="http://path/to/origin")
with pytest.raises(ValueError, match="origin"):
SkippedContent.from_dict(skipped_content_d)
def test_skipped_content_naive_datetime():
c = SkippedContent.from_data(b"foo", reason="reason")
with pytest.raises(ValueError, match="must be a timezone-aware datetime"):
SkippedContent(
**c.to_dict(), ctime=datetime.datetime.now(),
)
# Revision
def test_revision_extra_headers_no_headers():
rev_dict = revision_example.copy()
rev_dict.pop("id")
rev = Revision.from_dict(rev_dict)
rev_dict = attr.asdict(rev, recurse=False)
rev_model = Revision(**rev_dict)
assert rev_model.metadata is None
assert rev_model.extra_headers == ()
rev_dict["metadata"] = {
"something": "somewhere",
"some other thing": "stranger",
}
rev_model = Revision(**rev_dict)
assert rev_model.metadata == rev_dict["metadata"]
assert rev_model.extra_headers == ()
def test_revision_extra_headers_with_headers():
rev_dict = revision_example.copy()
rev_dict.pop("id")
rev = Revision.from_dict(rev_dict)
rev_dict = attr.asdict(rev, recurse=False)
rev_dict["metadata"] = {
"something": "somewhere",
"some other thing": "stranger",
}
extra_headers = (
(b"header1", b"value1"),
(b"header2", b"42"),
(b"header3", b"should I?\x00"),
(b"header1", b"again"),
)
rev_dict["extra_headers"] = extra_headers
rev_model = Revision(**rev_dict)
assert "extra_headers" not in rev_model.metadata
assert rev_model.extra_headers == extra_headers
def test_revision_extra_headers_in_metadata():
rev_dict = revision_example.copy()
rev_dict.pop("id")
rev = Revision.from_dict(rev_dict)
rev_dict = attr.asdict(rev, recurse=False)
rev_dict["metadata"] = {
"something": "somewhere",
"some other thing": "stranger",
}
extra_headers = (
(b"header1", b"value1"),
(b"header2", b"42"),
(b"header3", b"should I?\x00"),
(b"header1", b"again"),
)
# check the bw-compat init hook does the job
# ie. extra_headers are given in the metadata field
rev_dict["metadata"]["extra_headers"] = extra_headers
rev_model = Revision(**rev_dict)
assert "extra_headers" not in rev_model.metadata
assert rev_model.extra_headers == extra_headers
def test_revision_extra_headers_as_lists():
rev_dict = revision_example.copy()
rev_dict.pop("id")
rev = Revision.from_dict(rev_dict)
rev_dict = attr.asdict(rev, recurse=False)
rev_dict["metadata"] = {}
extra_headers = (
(b"header1", b"value1"),
(b"header2", b"42"),
(b"header3", b"should I?\x00"),
(b"header1", b"again"),
)
# check Revision.extra_headers tuplify does the job
rev_dict["extra_headers"] = [list(x) for x in extra_headers]
rev_model = Revision(**rev_dict)
assert "extra_headers" not in rev_model.metadata
assert rev_model.extra_headers == extra_headers
def test_revision_extra_headers_type_error():
rev_dict = revision_example.copy()
rev_dict.pop("id")
rev = Revision.from_dict(rev_dict)
orig_rev_dict = attr.asdict(rev, recurse=False)
orig_rev_dict["metadata"] = {
"something": "somewhere",
"some other thing": "stranger",
}
extra_headers = (
("header1", b"value1"),
(b"header2", 42),
("header1", "again"),
)
# check headers one at a time
# if given as extra_header
for extra_header in extra_headers:
rev_dict = copy.deepcopy(orig_rev_dict)
rev_dict["extra_headers"] = (extra_header,)
with pytest.raises(AttributeTypeError):
Revision(**rev_dict)
# if given as metadata
for extra_header in extra_headers:
rev_dict = copy.deepcopy(orig_rev_dict)
rev_dict["metadata"]["extra_headers"] = (extra_header,)
with pytest.raises(AttributeTypeError):
Revision(**rev_dict)
def test_revision_extra_headers_from_dict():
rev_dict = revision_example.copy()
rev_dict.pop("id")
rev_model = Revision.from_dict(rev_dict)
assert rev_model.metadata is None
assert rev_model.extra_headers == ()
rev_dict["metadata"] = {
"something": "somewhere",
"some other thing": "stranger",
}
rev_model = Revision.from_dict(rev_dict)
assert rev_model.metadata == rev_dict["metadata"]
assert rev_model.extra_headers == ()
extra_headers = (
(b"header1", b"value1"),
(b"header2", b"42"),
(b"header3", b"should I?\nmaybe\x00\xff"),
(b"header1", b"again"),
)
rev_dict["extra_headers"] = extra_headers
rev_model = Revision.from_dict(rev_dict)
assert "extra_headers" not in rev_model.metadata
assert rev_model.extra_headers == extra_headers
def test_revision_extra_headers_in_metadata_from_dict():
rev_dict = revision_example.copy()
rev_dict.pop("id")
rev_dict["metadata"] = {
"something": "somewhere",
"some other thing": "stranger",
}
extra_headers = (
(b"header1", b"value1"),
(b"header2", b"42"),
(b"header3", b"should I?\nmaybe\x00\xff"),
(b"header1", b"again"),
)
# check the bw-compat init hook does the job
rev_dict["metadata"]["extra_headers"] = extra_headers
rev_model = Revision.from_dict(rev_dict)
assert "extra_headers" not in rev_model.metadata
assert rev_model.extra_headers == extra_headers
def test_revision_extra_headers_as_lists_from_dict():
rev_dict = revision_example.copy()
rev_dict.pop("id")
rev_model = Revision.from_dict(rev_dict)
rev_dict["metadata"] = {
"something": "somewhere",
"some other thing": "stranger",
}
extra_headers = (
(b"header1", b"value1"),
(b"header2", b"42"),
(b"header3", b"should I?\nmaybe\x00\xff"),
(b"header1", b"again"),
)
# check Revision.extra_headers converter does the job
rev_dict["extra_headers"] = [list(x) for x in extra_headers]
rev_model = Revision.from_dict(rev_dict)
assert "extra_headers" not in rev_model.metadata
assert rev_model.extra_headers == extra_headers
# ID computation
def test_content_model_id_computation():
cnt_dict = content_example.copy()
cnt_id_str = hash_to_hex(content_identifier(cnt_dict)["sha1_git"])
cnt_model = Content.from_data(cnt_dict["data"])
assert str(cnt_model.swhid()) == "swh:1:cnt:" + cnt_id_str
def test_directory_model_id_computation():
dir_dict = directory_example.copy()
del dir_dict["id"]
dir_id_str = directory_identifier(dir_dict)
dir_id = hash_to_bytes(dir_id_str)
dir_model = Directory.from_dict(dir_dict)
assert dir_model.id == dir_id
assert str(dir_model.swhid()) == "swh:1:dir:" + dir_id_str
def test_revision_model_id_computation():
rev_dict = revision_example.copy()
del rev_dict["id"]
rev_id_str = revision_identifier(rev_dict)
rev_id = hash_to_bytes(rev_id_str)
rev_model = Revision.from_dict(rev_dict)
assert rev_model.id == rev_id
assert str(rev_model.swhid()) == "swh:1:rev:" + rev_id_str
def test_revision_model_id_computation_with_no_date():
"""We can have revision with date to None
"""
rev_dict = revision_example.copy()
rev_dict["date"] = None
rev_dict["committer_date"] = None
del rev_dict["id"]
rev_id = hash_to_bytes(revision_identifier(rev_dict))
rev_model = Revision.from_dict(rev_dict)
assert rev_model.date is None
assert rev_model.committer_date is None
assert rev_model.id == rev_id
def test_release_model_id_computation():
rel_dict = release_example.copy()
del rel_dict["id"]
rel_id_str = release_identifier(rel_dict)
rel_id = hash_to_bytes(rel_id_str)
rel_model = Release.from_dict(rel_dict)
assert isinstance(rel_model.date, TimestampWithTimezone)
assert rel_model.id == hash_to_bytes(rel_id)
assert str(rel_model.swhid()) == "swh:1:rel:" + rel_id_str
def test_snapshot_model_id_computation():
snp_dict = snapshot_example.copy()
del snp_dict["id"]
snp_id_str = snapshot_identifier(snp_dict)
snp_id = hash_to_bytes(snp_id_str)
snp_model = Snapshot.from_dict(snp_dict)
assert snp_model.id == snp_id
assert str(snp_model.swhid()) == "swh:1:snp:" + snp_id_str
def test_origin_model_id_computation():
ori_dict = origin_example.copy()
ori_id_str = origin_identifier(ori_dict)
ori_model = Origin.from_dict(ori_dict)
assert str(ori_model.swhid()) == "swh:1:ori:" + ori_id_str
@given(strategies.objects(split_content=True))
def test_object_type(objtype_and_obj):
obj_type, obj = objtype_and_obj
assert obj_type == obj.object_type
def test_object_type_is_final():
object_types = set()
def check_final(cls):
if hasattr(cls, "object_type"):
assert cls.object_type not in object_types
object_types.add(cls.object_type)
if cls.__subclasses__():
assert not hasattr(cls, "object_type")
for subcls in cls.__subclasses__():
check_final(subcls)
check_final(BaseModel)
_metadata_authority = MetadataAuthority(
type=MetadataAuthorityType.FORGE, url="https://forge.softwareheritage.org",
)
_metadata_fetcher = MetadataFetcher(name="test-fetcher", version="0.0.1",)
_content_swhid = ExtendedSWHID.from_string(
"swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2"
)
_origin_url = "https://forge.softwareheritage.org/source/swh-model.git"
_origin_swhid = ExtendedSWHID.from_string(
"swh:1:ori:94a9ed024d3859793618152ea559a168bbcbb5e2"
)
_dummy_qualifiers = {"origin": "https://example.com", "lines": "42"}
_common_metadata_fields = dict(
discovery_date=datetime.datetime(
2021, 1, 29, 13, 57, 9, tzinfo=datetime.timezone.utc
),
authority=_metadata_authority,
fetcher=_metadata_fetcher,
format="json",
metadata=b'{"origin": "https://example.com", "lines": "42"}',
)
def test_metadata_valid():
"""Checks valid RawExtrinsicMetadata objects don't raise an error."""
# Simplest case
RawExtrinsicMetadata(target=_origin_swhid, **_common_metadata_fields)
# Object with an SWHID
RawExtrinsicMetadata(
target=_content_swhid, **_common_metadata_fields,
)
def test_metadata_to_dict():
"""Checks valid RawExtrinsicMetadata objects don't raise an error."""
common_fields = {
"authority": {"type": "forge", "url": "https://forge.softwareheritage.org"},
"fetcher": {"name": "test-fetcher", "version": "0.0.1",},
"discovery_date": _common_metadata_fields["discovery_date"],
"format": "json",
"metadata": b'{"origin": "https://example.com", "lines": "42"}',
}
m = RawExtrinsicMetadata(target=_origin_swhid, **_common_metadata_fields,)
assert m.to_dict() == {
"target": str(_origin_swhid),
"id": b"@j\xc9\x01\xbc\x1e#p*\xf3q9\xa7u\x97\x00\x14\x02xa",
**common_fields,
}
assert RawExtrinsicMetadata.from_dict(m.to_dict()) == m
m = RawExtrinsicMetadata(target=_content_swhid, **_common_metadata_fields,)
assert m.to_dict() == {
"target": "swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2",
"id": b"\xbc\xa3U\xddf\x19U\xc5\xd2\xd7\xdfK\xd7c\x1f\xa8\xfeh\x992",
**common_fields,
}
assert RawExtrinsicMetadata.from_dict(m.to_dict()) == m
hash_hex = "6162" * 10
hash_bin = b"ab" * 10
m = RawExtrinsicMetadata(
target=_content_swhid,
**_common_metadata_fields,
origin="https://example.org/",
snapshot=CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=hash_bin),
release=CoreSWHID(object_type=ObjectType.RELEASE, object_id=hash_bin),
revision=CoreSWHID(object_type=ObjectType.REVISION, object_id=hash_bin),
path=b"/foo/bar",
directory=CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=hash_bin),
)
assert m.to_dict() == {
"target": "swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2",
"id": b"\x14l\xb0\x1f\xb9\xc0{)\xc7\x0f\xbd\xc0*,YZ\xf5C\xab\xfc",
**common_fields,
"origin": "https://example.org/",
"snapshot": f"swh:1:snp:{hash_hex}",
"release": f"swh:1:rel:{hash_hex}",
"revision": f"swh:1:rev:{hash_hex}",
"path": b"/foo/bar",
"directory": f"swh:1:dir:{hash_hex}",
}
assert RawExtrinsicMetadata.from_dict(m.to_dict()) == m
def test_metadata_invalid_target():
"""Checks various invalid values for the 'target' field."""
# SWHID passed as string instead of SWHID
with pytest.raises(ValueError, match="target must be.*ExtendedSWHID"):
RawExtrinsicMetadata(
target="swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2",
**_common_metadata_fields,
)
def test_metadata_naive_datetime():
with pytest.raises(ValueError, match="must be a timezone-aware datetime"):
RawExtrinsicMetadata(
target=_origin_swhid,
**{**_common_metadata_fields, "discovery_date": datetime.datetime.now()},
)
def test_metadata_validate_context_origin():
"""Checks validation of RawExtrinsicMetadata.origin."""
# Origins can't have an 'origin' context
with pytest.raises(
ValueError, match="Unexpected 'origin' context for origin object"
):
RawExtrinsicMetadata(
target=_origin_swhid, origin=_origin_url, **_common_metadata_fields,
)
# but all other types can
RawExtrinsicMetadata(
target=_content_swhid, origin=_origin_url, **_common_metadata_fields,
)
# SWHIDs aren't valid origin URLs
with pytest.raises(ValueError, match="SWHID used as context origin URL"):
RawExtrinsicMetadata(
target=_content_swhid,
origin="swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2",
**_common_metadata_fields,
)
def test_metadata_validate_context_visit():
"""Checks validation of RawExtrinsicMetadata.visit."""
# Origins can't have a 'visit' context
with pytest.raises(
ValueError, match="Unexpected 'visit' context for origin object"
):
RawExtrinsicMetadata(
target=_origin_swhid, visit=42, **_common_metadata_fields,
)
# but all other types can
RawExtrinsicMetadata(
target=_content_swhid, origin=_origin_url, visit=42, **_common_metadata_fields,
)
# Missing 'origin'
with pytest.raises(ValueError, match="'origin' context must be set if 'visit' is"):
RawExtrinsicMetadata(
target=_content_swhid, visit=42, **_common_metadata_fields,
)
# visit id must be positive
with pytest.raises(ValueError, match="Nonpositive visit id"):
RawExtrinsicMetadata(
target=_content_swhid,
origin=_origin_url,
visit=-42,
**_common_metadata_fields,
)
def test_metadata_validate_context_snapshot():
"""Checks validation of RawExtrinsicMetadata.snapshot."""
# Origins can't have a 'snapshot' context
with pytest.raises(
ValueError, match="Unexpected 'snapshot' context for origin object"
):
RawExtrinsicMetadata(
target=_origin_swhid,
snapshot=CoreSWHID(
object_type=ObjectType.SNAPSHOT, object_id=EXAMPLE_HASH,
),
**_common_metadata_fields,
)
# but content can
RawExtrinsicMetadata(
target=_content_swhid,
snapshot=CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=EXAMPLE_HASH),
**_common_metadata_fields,
)
# SWHID type doesn't match the expected type of this context key
with pytest.raises(
ValueError, match="Expected SWHID type 'snapshot', got 'content'"
):
RawExtrinsicMetadata(
target=_content_swhid,
snapshot=CoreSWHID(object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH,),
**_common_metadata_fields,
)
def test_metadata_validate_context_release():
"""Checks validation of RawExtrinsicMetadata.release."""
# Origins can't have a 'release' context
with pytest.raises(
ValueError, match="Unexpected 'release' context for origin object"
):
RawExtrinsicMetadata(
target=_origin_swhid,
release=CoreSWHID(object_type=ObjectType.RELEASE, object_id=EXAMPLE_HASH,),
**_common_metadata_fields,
)
# but content can
RawExtrinsicMetadata(
target=_content_swhid,
release=CoreSWHID(object_type=ObjectType.RELEASE, object_id=EXAMPLE_HASH),
**_common_metadata_fields,
)
# SWHID type doesn't match the expected type of this context key
with pytest.raises(
ValueError, match="Expected SWHID type 'release', got 'content'"
):
RawExtrinsicMetadata(
target=_content_swhid,
release=CoreSWHID(object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH,),
**_common_metadata_fields,
)
def test_metadata_validate_context_revision():
"""Checks validation of RawExtrinsicMetadata.revision."""
# Origins can't have a 'revision' context
with pytest.raises(
ValueError, match="Unexpected 'revision' context for origin object"
):
RawExtrinsicMetadata(
target=_origin_swhid,
revision=CoreSWHID(
object_type=ObjectType.REVISION, object_id=EXAMPLE_HASH,
),
**_common_metadata_fields,
)
# but content can
RawExtrinsicMetadata(
target=_content_swhid,
revision=CoreSWHID(object_type=ObjectType.REVISION, object_id=EXAMPLE_HASH),
**_common_metadata_fields,
)
# SWHID type doesn't match the expected type of this context key
with pytest.raises(
ValueError, match="Expected SWHID type 'revision', got 'content'"
):
RawExtrinsicMetadata(
target=_content_swhid,
revision=CoreSWHID(object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH,),
**_common_metadata_fields,
)
def test_metadata_validate_context_path():
"""Checks validation of RawExtrinsicMetadata.path."""
# Origins can't have a 'path' context
with pytest.raises(ValueError, match="Unexpected 'path' context for origin object"):
RawExtrinsicMetadata(
target=_origin_swhid, path=b"/foo/bar", **_common_metadata_fields,
)
# but content can
RawExtrinsicMetadata(
target=_content_swhid, path=b"/foo/bar", **_common_metadata_fields,
)
def test_metadata_validate_context_directory():
"""Checks validation of RawExtrinsicMetadata.directory."""
# Origins can't have a 'directory' context
with pytest.raises(
ValueError, match="Unexpected 'directory' context for origin object"
):
RawExtrinsicMetadata(
target=_origin_swhid,
directory=CoreSWHID(
object_type=ObjectType.DIRECTORY, object_id=EXAMPLE_HASH,
),
**_common_metadata_fields,
)
# but content can
RawExtrinsicMetadata(
target=_content_swhid,
directory=CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=EXAMPLE_HASH,),
**_common_metadata_fields,
)
# SWHID type doesn't match the expected type of this context key
with pytest.raises(
ValueError, match="Expected SWHID type 'directory', got 'content'"
):
RawExtrinsicMetadata(
target=_content_swhid,
directory=CoreSWHID(
object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH,
),
**_common_metadata_fields,
)

File Metadata

Mime Type
text/x-diff
Expires
Fri, Jul 4, 3:31 PM (1 w, 21 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3323609

Event Timeline