diff --git a/swh/loader/package/debian/loader.py b/swh/loader/package/debian/loader.py index 823c4fb..015d70b 100644 --- a/swh/loader/package/debian/loader.py +++ b/swh/loader/package/debian/loader.py @@ -1,500 +1,500 @@ # Copyright (C) 2017-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import email.utils import logging from os import path import re import subprocess from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple import attr from dateutil.parser import parse as parse_date from debian.changelog import Changelog from debian.deb822 import Dsc from swh.loader.package.loader import BasePackageInfo, PackageLoader, PartialExtID from swh.loader.package.utils import download, release_name from swh.model.hashutil import hash_to_bytes from swh.model.model import ( Person, Revision, RevisionType, Sha1Git, TimestampWithTimezone, ) from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) UPLOADERS_SPLIT = re.compile(r"(?<=\>)\s*,\s*") EXTID_TYPE = "dsc-sha256" class DscCountError(ValueError): """Raised when an unexpected number of .dsc files is seen""" pass @attr.s class DebianFileMetadata: md5sum = attr.ib(type=str) name = attr.ib(type=str) """Filename""" sha256 = attr.ib(type=str) size = attr.ib(type=int) uri = attr.ib(type=str) """URL of this specific file""" @attr.s class DebianPackageChangelog: person = attr.ib(type=Dict[str, str]) """A dict with fields like, model.Person, except they are str instead of bytes, and 'email' is optional.""" date = attr.ib(type=str) """Date of the changelog entry.""" history = attr.ib(type=List[Tuple[str, str]]) """List of tuples (package_name, version)""" @attr.s class DebianPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) files = attr.ib(type=Dict[str, DebianFileMetadata]) """Metadata of the files (.deb, .dsc, ...) of the package.""" name = attr.ib(type=str) version = attr.ib(type=str) @classmethod def from_metadata(cls, a_metadata: Dict[str, Any], url: str) -> "DebianPackageInfo": return cls( url=url, filename=None, raw_info=a_metadata, files={ file_name: DebianFileMetadata(**file_metadata) for (file_name, file_metadata) in a_metadata.get("files", {}).items() }, name=a_metadata["name"], version=a_metadata["version"], ) def extid(self) -> Optional[PartialExtID]: dsc_files = [ file for (name, file) in self.files.items() if name.endswith(".dsc") ] if len(dsc_files) != 1: raise DscCountError( f"Expected exactly one .dsc file for package {self.name}, " f"got {len(dsc_files)}" ) return (EXTID_TYPE, hash_to_bytes(dsc_files[0].sha256)) @attr.s class IntrinsicPackageMetadata: """Metadata extracted from a package's .dsc file.""" name = attr.ib(type=str) version = attr.ib(type=str) changelog = attr.ib(type=DebianPackageChangelog) maintainers = attr.ib(type=List[Dict[str, str]]) """A list of dicts with fields like, model.Person, except they are str instead of bytes, and 'email' is optional.""" class DebianLoader(PackageLoader[DebianPackageInfo]): """Load debian origins into swh archive. """ visit_type = "deb" def __init__( self, storage: StorageInterface, url: str, date: str, packages: Mapping[str, Any], max_content_size: Optional[int] = None, ): """Debian Loader implementation. Args: url: Origin url (e.g. deb://Debian/packages/cicero) date: Ignored packages: versioned packages and associated artifacts, example:: { 'stretch/contrib/0.7.2-3': { 'name': 'cicero', 'version': '0.7.2-3' 'files': { 'cicero_0.7.2-3.diff.gz': { 'md5sum': 'a93661b6a48db48d59ba7d26796fc9ce', 'name': 'cicero_0.7.2-3.diff.gz', 'sha256': 'f039c9642fe15c75bed5254315e2a29f...', 'size': 3964, 'uri': 'http://d.d.o/cicero_0.7.2-3.diff.gz', }, 'cicero_0.7.2-3.dsc': { 'md5sum': 'd5dac83eb9cfc9bb52a15eb618b4670a', 'name': 'cicero_0.7.2-3.dsc', 'sha256': '35b7f1048010c67adfd8d70e4961aefb...', 'size': 1864, 'uri': 'http://d.d.o/cicero_0.7.2-3.dsc', }, 'cicero_0.7.2.orig.tar.gz': { 'md5sum': '4353dede07c5728319ba7f5595a7230a', 'name': 'cicero_0.7.2.orig.tar.gz', 'sha256': '63f40f2436ea9f67b44e2d4bd669dbab...', 'size': 96527, 'uri': 'http://d.d.o/cicero_0.7.2.orig.tar.gz', } }, }, # ... } """ super().__init__(storage=storage, url=url, max_content_size=max_content_size) self.packages = packages def get_versions(self) -> Sequence[str]: """Returns the keys of the packages input (e.g. stretch/contrib/0.7.2-3, etc...) """ return list(self.packages.keys()) def get_package_info(self, version: str) -> Iterator[Tuple[str, DebianPackageInfo]]: meta = self.packages[version] p_info = DebianPackageInfo.from_metadata(meta, url=self.url) yield release_name(version), p_info def known_artifact_to_extid(self, known_artifact: Dict) -> Optional[PartialExtID]: sha256 = _artifact_to_dsc_sha256(known_artifact, url=self.url) if sha256 is None: return None return (EXTID_TYPE, hash_to_bytes(sha256)) - def resolve_revision_from( + def resolve_revision_from_artifacts( self, known_artifacts: Dict, p_info: DebianPackageInfo, ) -> Optional[bytes]: try: - return super().resolve_revision_from(known_artifacts, p_info) + return super().resolve_revision_from_artifacts(known_artifacts, p_info) except DscCountError: # known_artifacts are corrupted, ignore them instead of crashing return None def download_package( self, p_info: DebianPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Contrary to other package loaders (1 package, 1 artifact), `p_info.files` represents the package's datafiles set to fetch: - .orig.tar.gz - .dsc - .diff.gz This is delegated to the `download_package` function. """ all_hashes = download_package(p_info, tmpdir) logger.debug("all_hashes: %s", all_hashes) res = [] for hashes in all_hashes.values(): res.append((tmpdir, hashes)) logger.debug("res: %s", res) return res def uncompress( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str ) -> str: logger.debug("dl_artifacts: %s", dl_artifacts) return extract_package(dl_artifacts, dest=dest) def build_revision( self, p_info: DebianPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: dsc_url, dsc_name = dsc_information(p_info) if not dsc_name: raise ValueError("dsc name for url %s should not be None" % dsc_url) dsc_path = path.join(path.dirname(uncompressed_path), dsc_name) intrinsic_metadata = get_intrinsic_package_metadata( p_info, dsc_path, uncompressed_path ) logger.debug("intrinsic_metadata: %s", intrinsic_metadata) logger.debug("p_info: %s", p_info) msg = "Synthetic revision for Debian source package %s version %s" % ( p_info.name, p_info.version, ) author = prepare_person(intrinsic_metadata.changelog.person) date = TimestampWithTimezone.from_iso8601(intrinsic_metadata.changelog.date) # inspired from swh.loader.debian.converters.package_metadata_to_revision # noqa return Revision( type=RevisionType.DSC, message=msg.encode("utf-8"), author=author, date=date, committer=author, committer_date=date, parents=(), directory=directory, synthetic=True, metadata={ "intrinsic": {"tool": "dsc", "raw": attr.asdict(intrinsic_metadata),}, "extrinsic": { "provider": dsc_url, "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) def _artifact_to_dsc_sha256(known_artifacts: Dict, url: str) -> Optional[str]: extrinsic = known_artifacts.get("extrinsic") if not extrinsic: return None known_p_info = DebianPackageInfo.from_metadata(extrinsic["raw"], url=url) dsc = [file for (name, file) in known_p_info.files.items() if name.endswith(".dsc")] if len(dsc) != 1: raise DscCountError( f"Expected exactly one known .dsc file for package {known_p_info.name}, " f"got {len(dsc)}" ) return dsc[0].sha256 def uid_to_person(uid: str) -> Dict[str, str]: """Convert an uid to a person suitable for insertion. Args: uid: an uid of the form "Name " Returns: a dictionary with the following keys: - name: the name associated to the uid - email: the mail associated to the uid - fullname: the actual uid input """ logger.debug("uid: %s", uid) ret = { "name": "", "email": "", "fullname": uid, } name, mail = email.utils.parseaddr(uid) if name and email: ret["name"] = name ret["email"] = mail else: ret["name"] = uid return ret def prepare_person(person: Mapping[str, str]) -> Person: """Prepare person for swh serialization... Args: A person dict Returns: A person ready for storage """ return Person.from_dict( {key: value.encode("utf-8") for (key, value) in person.items()} ) def download_package(p_info: DebianPackageInfo, tmpdir: Any) -> Mapping[str, Any]: """Fetch a source package in a temporary directory and check the checksums for all files. Args: p_info: Information on a package tmpdir: Where to download and extract the files to ingest Returns: Dict of swh hashes per filename key """ all_hashes = {} for filename, fileinfo in p_info.files.items(): uri = fileinfo.uri logger.debug("fileinfo: %s", fileinfo) extrinsic_hashes = {"sha256": fileinfo.sha256} logger.debug("extrinsic_hashes(%s): %s", filename, extrinsic_hashes) filepath, hashes = download( uri, dest=tmpdir, filename=filename, hashes=extrinsic_hashes ) all_hashes[filename] = hashes logger.debug("all_hashes: %s", all_hashes) return all_hashes def dsc_information(p_info: DebianPackageInfo) -> Tuple[Optional[str], Optional[str]]: """Retrieve dsc information from a package. Args: p_info: Package metadata information Returns: Tuple of dsc file's uri, dsc's full disk path """ dsc_name = None dsc_url = None for filename, fileinfo in p_info.files.items(): if filename.endswith(".dsc"): if dsc_name: raise DscCountError( "Package %s_%s references several dsc files." % (p_info.name, p_info.version) ) dsc_url = fileinfo.uri dsc_name = filename return dsc_url, dsc_name def extract_package(dl_artifacts: List[Tuple[str, Mapping]], dest: str) -> str: """Extract a Debian source package to a given directory. Note that after extraction the target directory will be the root of the extracted package, rather than containing it. Args: package: package information dictionary dest: directory where the package files are stored Returns: Package extraction directory """ a_path = dl_artifacts[0][0] logger.debug("dl_artifacts: %s", dl_artifacts) for _, hashes in dl_artifacts: logger.debug("hashes: %s", hashes) filename = hashes["filename"] if filename.endswith(".dsc"): dsc_name = filename break dsc_path = path.join(a_path, dsc_name) destdir = path.join(dest, "extracted") logfile = path.join(dest, "extract.log") logger.debug( "extract Debian source package %s in %s" % (dsc_path, destdir), extra={"swh_type": "deb_extract", "swh_dsc": dsc_path, "swh_destdir": destdir,}, ) cmd = [ "dpkg-source", "--no-copy", "--no-check", "--ignore-bad-version", "-x", dsc_path, destdir, ] try: with open(logfile, "w") as stdout: subprocess.check_call(cmd, stdout=stdout, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logdata = open(logfile, "r").read() raise ValueError( "dpkg-source exited with code %s: %s" % (e.returncode, logdata) ) from None return destdir def get_intrinsic_package_metadata( p_info: DebianPackageInfo, dsc_path: str, extracted_path: str ) -> IntrinsicPackageMetadata: """Get the package metadata from the source package at dsc_path, extracted in extracted_path. Args: p_info: the package information dsc_path: path to the package's dsc file extracted_path: the path where the package got extracted Returns: dict: a dictionary with the following keys: - history: list of (package_name, package_version) tuples parsed from the package changelog """ with open(dsc_path, "rb") as dsc: parsed_dsc = Dsc(dsc) # Parse the changelog to retrieve the rest of the package information changelog_path = path.join(extracted_path, "debian/changelog") with open(changelog_path, "rb") as changelog_file: try: parsed_changelog = Changelog(changelog_file) except UnicodeDecodeError: logger.warning( "Unknown encoding for changelog %s," " falling back to iso" % changelog_path, extra={ "swh_type": "deb_changelog_encoding", "swh_name": p_info.name, "swh_version": str(p_info.version), "swh_changelog": changelog_path, }, ) # need to reset as Changelog scrolls to the end of the file changelog_file.seek(0) parsed_changelog = Changelog(changelog_file, encoding="iso-8859-15") history: List[Tuple[str, str]] = [] for block in parsed_changelog: assert block.package is not None history.append((block.package, str(block.version))) changelog = DebianPackageChangelog( person=uid_to_person(parsed_changelog.author), date=parse_date(parsed_changelog.date).isoformat(), history=history[1:], ) maintainers = [ uid_to_person(parsed_dsc["Maintainer"]), ] maintainers.extend( uid_to_person(person) for person in UPLOADERS_SPLIT.split(parsed_dsc.get("Uploaders", "")) ) return IntrinsicPackageMetadata( name=p_info.name, version=str(p_info.version), changelog=changelog, maintainers=maintainers, ) diff --git a/swh/loader/package/debian/tests/test_debian.py b/swh/loader/package/debian/tests/test_debian.py index 2d5d9a4..e17b9fa 100644 --- a/swh/loader/package/debian/tests/test_debian.py +++ b/swh/loader/package/debian/tests/test_debian.py @@ -1,554 +1,562 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging from os import path import random import pytest from swh.loader.package.debian.loader import ( DebianLoader, DebianPackageChangelog, DebianPackageInfo, IntrinsicPackageMetadata, download_package, dsc_information, extract_package, get_intrinsic_package_metadata, prepare_person, uid_to_person, ) from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import Person, Snapshot, SnapshotBranch, TargetType logger = logging.getLogger(__name__) URL = "deb://Debian/packages/cicero" PACKAGE_FILES = { "name": "cicero", "version": "0.7.2-3", "files": { "cicero_0.7.2-3.diff.gz": { "md5sum": "a93661b6a48db48d59ba7d26796fc9ce", "name": "cicero_0.7.2-3.diff.gz", "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa "size": 3964, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.diff.gz", # noqa }, "cicero_0.7.2-3.dsc": { "md5sum": "d5dac83eb9cfc9bb52a15eb618b4670a", "name": "cicero_0.7.2-3.dsc", "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa "size": 1864, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc", # noqa }, # noqa "cicero_0.7.2.orig.tar.gz": { "md5sum": "4353dede07c5728319ba7f5595a7230a", "name": "cicero_0.7.2.orig.tar.gz", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa "size": 96527, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa }, }, } PACKAGE_FILES2 = { "name": "cicero", "version": "0.7.2-4", "files": { "cicero_0.7.2-4.diff.gz": { "md5sum": "1e7e6fc4a59d57c98082a3af78145734", "name": "cicero_0.7.2-4.diff.gz", "sha256": "2e6fa296ee7005473ff58d0971f4fd325617b445671480e9f2cfb738d5dbcd01", # noqa "size": 4038, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.diff.gz", # noqa }, "cicero_0.7.2-4.dsc": { "md5sum": "1a6c8855a73b4282bb31d15518f18cde", "name": "cicero_0.7.2-4.dsc", "sha256": "913ee52f7093913420de5cbe95d63cfa817f1a1daf997961149501894e754f8b", # noqa "size": 1881, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.dsc", # noqa }, # noqa "cicero_0.7.2.orig.tar.gz": { "md5sum": "4353dede07c5728319ba7f5595a7230a", "name": "cicero_0.7.2.orig.tar.gz", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa "size": 96527, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa }, }, } PACKAGE_PER_VERSION = { "stretch/contrib/0.7.2-3": PACKAGE_FILES, } PACKAGES_PER_VERSION = { "stretch/contrib/0.7.2-3": PACKAGE_FILES, "buster/contrib/0.7.2-4": PACKAGE_FILES2, } def test_debian_first_visit(swh_storage, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ loader = DebianLoader( swh_storage, URL, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGE_PER_VERSION, ) actual_load_status = loader.load() expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches(swh_storage, URL, status="full", type="deb") stats = get_stats(swh_storage) assert { "content": 42, "directory": 2, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, # all artifacts under 1 revision "skipped_content": 0, "snapshot": 1, } == stats expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"), ) }, ) # different than the previous loader as no release is done check_snapshot(expected_snapshot, swh_storage) def test_debian_first_visit_then_another_visit(swh_storage, requests_mock_datadir): """With no prior visit, load a debian project ends up with 1 snapshot """ loader = DebianLoader( swh_storage, URL, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGE_PER_VERSION, ) actual_load_status = loader.load() expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches(swh_storage, URL, status="full", type="deb") stats = get_stats(swh_storage) assert { "content": 42, "directory": 2, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, # all artifacts under 1 revision "skipped_content": 0, "snapshot": 1, } == stats expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"), ) }, ) # different than the previous loader as no release is done check_snapshot(expected_snapshot, swh_storage) # No change in between load actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" assert_last_visit_matches(swh_storage, URL, status="full", type="deb") stats2 = get_stats(swh_storage) assert { "content": 42 + 0, "directory": 2 + 0, "origin": 1, "origin_visit": 1 + 1, # a new visit occurred "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, # same snapshot across 2 visits } == stats2 urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("http://deb.debian.org") ] # visited each package artifact twice across 2 visits assert len(urls) == len(set(urls)) def test_debian_uid_to_person(): uid = "Someone Name " actual_person = uid_to_person(uid) assert actual_person == { "name": "Someone Name", "email": "someone@orga.org", "fullname": uid, } def test_debian_prepare_person(): actual_author = prepare_person( { "name": "Someone Name", "email": "someone@orga.org", "fullname": "Someone Name ", } ) assert actual_author == Person( name=b"Someone Name", email=b"someone@orga.org", fullname=b"Someone Name ", ) def test_debian_download_package(datadir, tmpdir, requests_mock_datadir): tmpdir = str(tmpdir) # py3.5 work around (LocalPath issue) p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) all_hashes = download_package(p_info, tmpdir) assert all_hashes == { "cicero_0.7.2-3.diff.gz": { "checksums": { "sha1": "0815282053f21601b0ec4adf7a8fe47eace3c0bc", "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa }, "filename": "cicero_0.7.2-3.diff.gz", "length": 3964, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/" "cicero_0.7.2-3.diff.gz" ), }, "cicero_0.7.2-3.dsc": { "checksums": { "sha1": "abbec4e8efbbc80278236e1dd136831eac08accd", "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa }, "filename": "cicero_0.7.2-3.dsc", "length": 1864, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc" ), }, "cicero_0.7.2.orig.tar.gz": { "checksums": { "sha1": "a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa }, "filename": "cicero_0.7.2.orig.tar.gz", "length": 96527, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/" "cicero_0.7.2.orig.tar.gz" ), }, } def test_debian_dsc_information_ok(): fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) dsc_url, dsc_name = dsc_information(p_info) assert dsc_url == PACKAGE_FILES["files"][fname]["uri"] assert dsc_name == PACKAGE_FILES["files"][fname]["name"] def test_debian_dsc_information_not_found(): fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) p_info.files.pop(fname) dsc_url, dsc_name = dsc_information(p_info) assert dsc_url is None assert dsc_name is None def test_debian_dsc_information_too_many_dsc_entries(): # craft an extra dsc file fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) data = p_info.files[fname] fname2 = fname.replace("cicero", "ciceroo") p_info.files[fname2] = data with pytest.raises( ValueError, match="Package %s_%s references several dsc" % (PACKAGE_FILES["name"], PACKAGE_FILES["version"]), ): dsc_information(p_info) def test_debian_get_intrinsic_package_metadata( requests_mock_datadir, datadir, tmp_path ): tmp_path = str(tmp_path) # py3.5 compat. p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) logger.debug("p_info: %s", p_info) # download the packages all_hashes = download_package(p_info, tmp_path) # Retrieve information from package _, dsc_name = dsc_information(p_info) dl_artifacts = [(tmp_path, hashes) for hashes in all_hashes.values()] # Extract information from package extracted_path = extract_package(dl_artifacts, tmp_path) # Retrieve information on package dsc_path = path.join(path.dirname(extracted_path), dsc_name) actual_package_info = get_intrinsic_package_metadata( p_info, dsc_path, extracted_path ) logger.debug("actual_package_info: %s", actual_package_info) assert actual_package_info == IntrinsicPackageMetadata( changelog=DebianPackageChangelog( date="2014-10-19T16:52:35+02:00", history=[ ("cicero", "0.7.2-2"), ("cicero", "0.7.2-1"), ("cicero", "0.7-1"), ], person={ "email": "sthibault@debian.org", "fullname": "Samuel Thibault ", "name": "Samuel Thibault", }, ), maintainers=[ { "email": "debian-accessibility@lists.debian.org", "fullname": "Debian Accessibility Team " "", "name": "Debian Accessibility Team", }, { "email": "sthibault@debian.org", "fullname": "Samuel Thibault ", "name": "Samuel Thibault", }, ], name="cicero", version="0.7.2-3", ) def test_debian_multiple_packages(swh_storage, requests_mock_datadir): loader = DebianLoader( swh_storage, URL, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGES_PER_VERSION, ) actual_load_status = loader.load() expected_snapshot_id = "defc19021187f3727293121fcf6c5c82cb923604" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches(swh_storage, URL, status="full", type="deb") expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"), ), b"releases/buster/contrib/0.7.2-4": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("8224139c274c984147ef4b09aa0e462c55a10bd3"), ), }, ) check_snapshot(expected_snapshot, swh_storage) -def test_debian_resolve_revision_from_edge_cases(): +def test_debian_resolve_revision_from_artifacts_edge_cases(): """Solving revision with empty data will result in unknown revision """ loader = DebianLoader(None, None, None, None) empty_artifact = { "name": PACKAGE_FILES["name"], "version": PACKAGE_FILES["version"], } for package_artifacts in [empty_artifact, PACKAGE_FILES]: p_info = DebianPackageInfo.from_metadata(package_artifacts, url=URL) - actual_revision = loader.resolve_revision_from({}, p_info) + actual_revision = loader.resolve_revision_from_artifacts({}, p_info) assert actual_revision is None for known_artifacts in [{}, PACKAGE_FILES]: - actual_revision = loader.resolve_revision_from( + actual_revision = loader.resolve_revision_from_artifacts( known_artifacts, DebianPackageInfo.from_metadata(empty_artifact, url=URL) ) assert actual_revision is None known_package_artifacts = { b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07": { "extrinsic": { # empty }, # ... removed the unnecessary intermediary data } } - assert not loader.resolve_revision_from( + assert not loader.resolve_revision_from_artifacts( known_package_artifacts, DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) ) -def test_debian_resolve_revision_from_edge_cases_hit_and_miss(): +def test_debian_resolve_revision_from_artifacts_edge_cases_hit_and_miss(): """Solving revision with inconsistent data will result in unknown revision """ loader = DebianLoader(None, None, None, None) artifact_metadata = PACKAGE_FILES2 p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL) expected_revision_id = ( b"(\x08\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xff\x85\x85O\xfe\xcf\x07" # noqa ) known_package_artifacts = { expected_revision_id: { "extrinsic": {"raw": PACKAGE_FILES,}, # ... removed the unnecessary intermediary data } } - actual_revision = loader.resolve_revision_from(known_package_artifacts, p_info) + actual_revision = loader.resolve_revision_from_artifacts( + known_package_artifacts, p_info + ) assert actual_revision is None -def test_debian_resolve_revision_from(): +def test_debian_resolve_revision_from_artifacts(): """Solving revision with consistent data will solve the revision """ loader = DebianLoader(None, None, None, None) artifact_metadata = PACKAGE_FILES p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL) expected_revision_id = ( b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07" # noqa ) files = artifact_metadata["files"] # shuffling dict's keys keys = list(files.keys()) random.shuffle(keys) package_files = { "name": PACKAGE_FILES["name"], "version": PACKAGE_FILES["version"], "files": {k: files[k] for k in keys}, } known_package_artifacts = { expected_revision_id: { "extrinsic": {"raw": package_files,}, # ... removed the unnecessary intermediary data } } - actual_revision = loader.resolve_revision_from(known_package_artifacts, p_info) + actual_revision = loader.resolve_revision_from_artifacts( + known_package_artifacts, p_info + ) assert actual_revision == expected_revision_id -def test_debian_resolve_revision_from_corrupt_known_artifact(): +def test_debian_resolve_revision_from_artifacts_corrupt_known_artifact(): """To many or not enough .dsc files in the known_artifacts dict""" loader = DebianLoader(None, None, None, None) artifact_metadata = PACKAGE_FILES p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL) expected_revision_id = ( b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07" ) files = dict(artifact_metadata["files"]) package_files = { "name": PACKAGE_FILES["name"], "version": PACKAGE_FILES["version"], "files": files, } known_package_artifacts = { expected_revision_id: { "extrinsic": {"raw": package_files,}, # ... removed the unnecessary intermediary data } } # Too many .dsc files["another.dsc"] = files["cicero_0.7.2-3.dsc"] - assert loader.resolve_revision_from(known_package_artifacts, p_info) is None + assert ( + loader.resolve_revision_from_artifacts(known_package_artifacts, p_info) is None + ) # Not enough .dsc del files["another.dsc"] del files["cicero_0.7.2-3.dsc"] - assert loader.resolve_revision_from(known_package_artifacts, p_info) is None + assert ( + loader.resolve_revision_from_artifacts(known_package_artifacts, p_info) is None + ) -def test_debian_resolve_revision_from_corrupt_new_artifact(): +def test_debian_resolve_revision_from_artifacts_corrupt_new_artifact(): loader = DebianLoader(None, None, None, None) artifact_metadata = PACKAGE_FILES files = PACKAGE_FILES["files"] files = {**files, "another.dsc": files["cicero_0.7.2-3.dsc"]} artifact_metadata = {**PACKAGE_FILES, "files": files} # Too many .dsc files["another.dsc"] = files["cicero_0.7.2-3.dsc"] p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL) - assert loader.resolve_revision_from(PACKAGE_FILES, p_info) is None + assert loader.resolve_revision_from_artifacts(PACKAGE_FILES, p_info) is None # Not enough .dsc del files["another.dsc"] del files["cicero_0.7.2-3.dsc"] p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL) - assert loader.resolve_revision_from(PACKAGE_FILES, p_info) is None + assert loader.resolve_revision_from_artifacts(PACKAGE_FILES, p_info) is None diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index 31e3f40..66702a6 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,885 +1,885 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib from itertools import islice import json import logging import os import string import sys import tempfile from typing import ( Any, Dict, Generic, Iterable, Iterator, List, Mapping, Optional, Sequence, Tuple, TypeVar, ) import attr import sentry_sdk from swh.core.tarball import uncompress from swh.loader.core.loader import BaseLoader from swh.loader.exception import NotFound from swh.loader.package.utils import download from swh.model import from_disk from swh.model.collections import ImmutableDict from swh.model.hashutil import hash_to_hex from swh.model.identifiers import ( CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType, ) from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, Origin, OriginVisit, OriginVisitStatus, RawExtrinsicMetadata, Revision, Sha1Git, Snapshot, TargetType, ) from swh.storage.algos.snapshot import snapshot_get_latest from swh.storage.interface import StorageInterface from swh.storage.utils import now logger = logging.getLogger(__name__) SWH_METADATA_AUTHORITY = MetadataAuthority( type=MetadataAuthorityType.REGISTRY, url="https://softwareheritage.org/", metadata={}, ) """Metadata authority for extrinsic metadata generated by Software Heritage. Used for metadata on "original artifacts", ie. length, filename, and checksums of downloaded archive files.""" PartialExtID = Tuple[str, bytes] """The ``extid_type`` and ``extid`` fields of an :ref:py:`ExtID` object.""" @attr.s class RawExtrinsicMetadataCore: """Contains the core of the metadata extracted by a loader, that will be used to build a full RawExtrinsicMetadata object by adding object identifier, context, and provenance information.""" format = attr.ib(type=str) metadata = attr.ib(type=bytes) discovery_date = attr.ib(type=Optional[datetime.datetime], default=None) """Defaults to the visit date.""" @attr.s class BasePackageInfo: """Compute the primary key for a dict using the id_keys as primary key composite. Args: d: A dict entry to compute the primary key on id_keys: Sequence of keys to use as primary key Returns: The identity for that dict entry """ url = attr.ib(type=str) filename = attr.ib(type=Optional[str]) MANIFEST_FORMAT: Optional[string.Template] = None """If not None, used by the default extid() implementation to format a manifest, before hashing it to produce an ExtID.""" EXTID_TYPE: str = "package-manifest-sha256" # The following attribute has kw_only=True in order to allow subclasses # to add attributes. Without kw_only, attributes without default values cannot # go after attributes with default values. # See directory_extrinsic_metadata = attr.ib( type=List[RawExtrinsicMetadataCore], default=[], kw_only=True, ) # TODO: add support for metadata for directories and contents def extid(self) -> Optional[PartialExtID]: """Returns a unique intrinsic identifier of this package info, or None if this package info is not 'deduplicatable' (meaning that we will always load it, instead of checking the ExtID storage to see if we already did)""" if self.MANIFEST_FORMAT is None: return None else: manifest = self.MANIFEST_FORMAT.substitute( {k: str(v) for (k, v) in attr.asdict(self).items()} ) return (self.EXTID_TYPE, hashlib.sha256(manifest.encode()).digest()) TPackageInfo = TypeVar("TPackageInfo", bound=BasePackageInfo) class PackageLoader(BaseLoader, Generic[TPackageInfo]): # Origin visit type (str) set by the loader visit_type = "" visit_date: datetime.datetime def __init__( self, storage: StorageInterface, url: str, max_content_size: Optional[int] = None, ): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: storage: Storage instance url: Origin url to load data from """ super().__init__(storage=storage, max_content_size=max_content_size) self.url = url self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Raises: `class:swh.loader.exception.NotFound` error when failing to read the published package versions. Returns: Sequence of published versions """ return [] def get_package_info(self, version: str) -> Iterator[Tuple[str, TPackageInfo]]: """Given a release version of a package, retrieve the associated package information for such version. Args: version: Package version Returns: (branch name, package metadata) """ yield from {} def build_revision( self, p_info: TPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: """Build the revision from the archive metadata (extrinsic artifact metadata) and the intrinsic metadata. Args: p_info: Package information uncompressed_path: Artifact uncompressed path on disk Returns: Revision object """ raise NotImplementedError("build_revision") def get_default_version(self) -> str: """Retrieve the latest release version if any. Returns: Latest version """ return "" def last_snapshot(self) -> Optional[Snapshot]: """Retrieve the last snapshot out of the last visit. """ return snapshot_get_latest(self.storage, self.url) def known_artifacts( self, snapshot: Optional[Snapshot] ) -> Dict[Sha1Git, Optional[ImmutableDict[str, object]]]: """Retrieve the known releases/artifact for the origin. Args snapshot: snapshot for the visit Returns: Dict of keys revision id (bytes), values a metadata Dict. """ if not snapshot: return {} # retrieve only revisions (e.g the alias we do not want here) revs = [ rev.target for rev in snapshot.branches.values() if rev and rev.target_type == TargetType.REVISION ] known_revisions = self.storage.revision_get(revs) return { revision.id: revision.metadata for revision in known_revisions if revision } def new_packageinfo_to_extid(self, p_info: TPackageInfo) -> Optional[PartialExtID]: return p_info.extid() def known_artifact_to_extid(self, known_artifact: Dict) -> Optional[PartialExtID]: """Returns a unique intrinsic identifier of a downloaded artifact, used to check if a new artifact is the same.""" return None - def resolve_revision_from( - self, known_artifacts: Dict, p_info: TPackageInfo, - ) -> Optional[bytes]: - """Resolve the revision from a snapshot and an artifact metadata dict. + def resolve_revision_from_artifacts( + self, known_artifacts: Dict[Sha1Git, Any], p_info: TPackageInfo, + ) -> Optional[Sha1Git]: + """Resolve the revision from known artifact metadata and a package info object. If the artifact has already been downloaded, this will return the existing revision targeting that uncompressed artifact directory. Otherwise, this returns None. Args: - snapshot: Snapshot + known_artifacts: dict from revision ids to revision metadata p_info: Package information Returns: None or revision identifier """ if not known_artifacts: # No known artifact, no need to compute the artifact's extid return None new_extid = self.new_packageinfo_to_extid(p_info) if new_extid is None: # This loader does not support deduplication, at least not for this # artifact. return None for rev_id, known_artifact in known_artifacts.items(): known_extid = self.known_artifact_to_extid(known_artifact) if new_extid == known_extid: return rev_id return None def download_package( self, p_info: TPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Download artifacts for a specific package. All downloads happen in in the tmpdir folder. Default implementation expects the artifacts package info to be about one artifact per package. Note that most implementation have 1 artifact per package. But some implementation have multiple artifacts per package (debian), some have none, the package is the artifact (gnu). Args: artifacts_package_info: Information on the package artifacts to download (url, filename, etc...) tmpdir: Location to retrieve such artifacts Returns: List of (path, computed hashes) """ return [download(p_info.url, dest=tmpdir, filename=p_info.filename)] def uncompress( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str ) -> str: """Uncompress the artifact(s) in the destination folder dest. Optionally, this could need to use the p_info dict for some more information (debian). """ uncompressed_path = os.path.join(dest, "src") for a_path, _ in dl_artifacts: uncompress(a_path, dest=uncompressed_path) return uncompressed_path def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]: """Return an extra dict of branches that are used to update the set of branches. """ return {} def finalize_visit( self, *, snapshot: Optional[Snapshot], visit: OriginVisit, status_visit: str, status_load: str, failed_branches: List[str], ) -> Dict[str, Any]: """Finalize the visit: - flush eventual unflushed data to storage - update origin visit's status - return the task's status """ self.storage.flush() snapshot_id: Optional[bytes] = None if snapshot and snapshot.id: # to prevent the snapshot.id to b"" snapshot_id = snapshot.id assert visit.visit visit_status = OriginVisitStatus( origin=self.url, visit=visit.visit, type=self.visit_type, date=now(), status=status_visit, snapshot=snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) result: Dict[str, Any] = { "status": status_load, } if snapshot_id: result["snapshot_id"] = hash_to_hex(snapshot_id) if failed_branches: logger.warning("%d failed branches", len(failed_branches)) for i, urls in enumerate(islice(failed_branches, 50)): prefix_url = "Failed branches: " if i == 0 else "" logger.warning("%s%s", prefix_url, urls) return result def load(self) -> Dict: """Load for a specific origin the associated contents. for each package version of the origin 1. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 2. Extract the downloaded files By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 3. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 4. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 5. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 6. Generate and load the snapshot for the visit Using the revisions/releases collected at step 5., and the branch information from step 0., generate a snapshot and load it into the Software Heritage archive """ status_load = "uneventful" # either: eventful, uneventful, failed status_visit = "full" # see swh.model.model.OriginVisitStatus snapshot = None failed_branches: List[str] = [] # Prepare origin and origin_visit origin = Origin(url=self.url) try: self.storage.origin_add([origin]) visit = list( self.storage.origin_visit_add( [ OriginVisit( origin=self.url, date=self.visit_date, type=self.visit_type, ) ] ) )[0] except Exception as e: logger.exception("Failed to initialize origin_visit for %s", self.url) sentry_sdk.capture_exception(e) return {"status": "failed"} try: last_snapshot = self.last_snapshot() logger.debug("last snapshot: %s", last_snapshot) known_artifacts = self.known_artifacts(last_snapshot) logger.debug("known artifacts: %s", known_artifacts) except Exception as e: logger.exception("Failed to get previous state for %s", self.url) sentry_sdk.capture_exception(e) return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", ) load_exceptions: List[Exception] = [] try: versions = self.get_versions() except NotFound: return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="not_found", status_load="failed", ) except Exception: return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", ) packages_info = [ (version, branch_name, p_info) for version in versions for (branch_name, p_info) in self.get_package_info(version) ] tmp_revisions: Dict[str, List[Tuple[str, Sha1Git]]] = { version: [] for version in versions } for (version, branch_name, p_info) in packages_info: logger.debug("package_info: %s", p_info) - revision_id = self.resolve_revision_from(known_artifacts, p_info) + revision_id = self.resolve_revision_from_artifacts(known_artifacts, p_info) if revision_id is None: try: res = self._load_revision(p_info, origin) if res: (revision_id, directory_id) = res assert revision_id assert directory_id self._load_extrinsic_directory_metadata( p_info, revision_id, directory_id ) self.storage.flush() status_load = "eventful" except Exception as e: self.storage.clear_buffers() load_exceptions.append(e) sentry_sdk.capture_exception(e) logger.exception( "Failed loading branch %s for %s", branch_name, self.url ) failed_branches.append(branch_name) continue if revision_id is None: continue tmp_revisions[version].append((branch_name, revision_id)) if load_exceptions: status_visit = "partial" if not tmp_revisions: # We could not load any revisions; fail completely return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", ) try: # Retrieve the default release version (the "latest" one) default_version = self.get_default_version() logger.debug("default version: %s", default_version) # Retrieve extra branches extra_branches = self.extra_branches() logger.debug("extra branches: %s", extra_branches) snapshot = self._load_snapshot( default_version, tmp_revisions, extra_branches ) self.storage.flush() except Exception as e: logger.exception("Failed to build snapshot for origin %s", self.url) sentry_sdk.capture_exception(e) status_visit = "failed" status_load = "failed" if snapshot: try: metadata_objects = self.build_extrinsic_snapshot_metadata(snapshot.id) self._load_metadata_objects(metadata_objects) except Exception as e: logger.exception( "Failed to load extrinsic snapshot metadata for %s", self.url ) sentry_sdk.capture_exception(e) status_visit = "partial" status_load = "failed" try: metadata_objects = self.build_extrinsic_origin_metadata() self._load_metadata_objects(metadata_objects) except Exception as e: logger.exception( "Failed to load extrinsic origin metadata for %s", self.url ) sentry_sdk.capture_exception(e) status_visit = "partial" status_load = "failed" return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit=status_visit, status_load=status_load, ) def _load_directory( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], tmpdir: str ) -> Tuple[str, from_disk.Directory]: uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir) logger.debug("uncompressed_path: %s", uncompressed_path) directory = from_disk.Directory.from_disk( path=uncompressed_path.encode("utf-8"), max_content_length=self.max_content_size, ) contents, skipped_contents, directories = from_disk.iter_directory(directory) logger.debug("Number of skipped contents: %s", len(skipped_contents)) self.storage.skipped_content_add(skipped_contents) logger.debug("Number of contents: %s", len(contents)) self.storage.content_add(contents) logger.debug("Number of directories: %s", len(directories)) self.storage.directory_add(directories) return (uncompressed_path, directory) def _load_revision( self, p_info: TPackageInfo, origin ) -> Optional[Tuple[Sha1Git, Sha1Git]]: """Does all the loading of a revision itself: * downloads a package and uncompresses it * loads it from disk * adds contents, directories, and revision to self.storage * returns (revision_id, directory_id) Raises exception when unable to download or uncompress artifacts """ with tempfile.TemporaryDirectory() as tmpdir: dl_artifacts = self.download_package(p_info, tmpdir) (uncompressed_path, directory) = self._load_directory(dl_artifacts, tmpdir) # FIXME: This should be release. cf. D409 revision = self.build_revision( p_info, uncompressed_path, directory=directory.hash ) if not revision: # Some artifacts are missing intrinsic metadata # skipping those return None metadata = [metadata for (filepath, metadata) in dl_artifacts] extra_metadata: Tuple[str, Any] = ( "original_artifact", metadata, ) if revision.metadata is not None: full_metadata = list(revision.metadata.items()) + [extra_metadata] else: full_metadata = [extra_metadata] # TODO: don't add these extrinsic metadata to the revision. revision = attr.evolve(revision, metadata=ImmutableDict(full_metadata)) original_artifact_metadata = RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=revision.directory ), discovery_date=self.visit_date, authority=SWH_METADATA_AUTHORITY, fetcher=self.get_metadata_fetcher(), format="original-artifacts-json", metadata=json.dumps(metadata).encode(), origin=self.url, revision=CoreSWHID(object_type=ObjectType.REVISION, object_id=revision.id), ) self._load_metadata_objects([original_artifact_metadata]) logger.debug("Revision: %s", revision) self.storage.revision_add([revision]) assert directory.hash return (revision.id, directory.hash) def _load_snapshot( self, default_version: str, revisions: Dict[str, List[Tuple[str, bytes]]], extra_branches: Dict[bytes, Mapping[str, Any]], ) -> Optional[Snapshot]: """Build snapshot out of the current revisions stored and extra branches. Then load it in the storage. """ logger.debug("revisions: %s", revisions) # Build and load the snapshot branches = {} # type: Dict[bytes, Mapping[str, Any]] for version, branch_name_revisions in revisions.items(): if version == default_version and len(branch_name_revisions) == 1: # only 1 branch (no ambiguity), we can create an alias # branch 'HEAD' branch_name, _ = branch_name_revisions[0] # except for some corner case (deposit) if branch_name != "HEAD": branches[b"HEAD"] = { "target_type": "alias", "target": branch_name.encode("utf-8"), } for branch_name, target in branch_name_revisions: branches[branch_name.encode("utf-8")] = { "target_type": "revision", "target": target, } # Deal with extra-branches for name, branch_target in extra_branches.items(): if name in branches: logger.error("Extra branch '%s' has been ignored", name) else: branches[name] = branch_target snapshot_data = {"branches": branches} logger.debug("snapshot: %s", snapshot_data) snapshot = Snapshot.from_dict(snapshot_data) logger.debug("snapshot: %s", snapshot) self.storage.snapshot_add([snapshot]) return snapshot def get_loader_name(self) -> str: """Returns a fully qualified name of this loader.""" return f"{self.__class__.__module__}.{self.__class__.__name__}" def get_loader_version(self) -> str: """Returns the version of the current loader.""" module_name = self.__class__.__module__ or "" module_name_parts = module_name.split(".") # Iterate rootward through the package hierarchy until we find a parent of this # loader's module with a __version__ attribute. for prefix_size in range(len(module_name_parts), 0, -1): package_name = ".".join(module_name_parts[0:prefix_size]) module = sys.modules[package_name] if hasattr(module, "__version__"): return module.__version__ # type: ignore # If this loader's class has no parent package with a __version__, # it should implement it itself. raise NotImplementedError( f"Could not dynamically find the version of {self.get_loader_name()}." ) def get_metadata_fetcher(self) -> MetadataFetcher: """Returns a MetadataFetcher instance representing this package loader; which is used to for adding provenance information to extracted extrinsic metadata, if any.""" return MetadataFetcher( name=self.get_loader_name(), version=self.get_loader_version(), metadata={}, ) def get_metadata_authority(self) -> MetadataAuthority: """For package loaders that get extrinsic metadata, returns the authority the metadata are coming from. """ raise NotImplementedError("get_metadata_authority") def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: """Returns metadata items, used by build_extrinsic_origin_metadata.""" return [] def build_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using metadata returned by get_extrinsic_origin_metadata.""" metadata_items = self.get_extrinsic_origin_metadata() if not metadata_items: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in metadata_items: metadata_objects.append( RawExtrinsicMetadata( target=Origin(self.url).swhid(), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, ) ) return metadata_objects def get_extrinsic_snapshot_metadata(self) -> List[RawExtrinsicMetadataCore]: """Returns metadata items, used by build_extrinsic_snapshot_metadata.""" return [] def build_extrinsic_snapshot_metadata( self, snapshot_id: Sha1Git ) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using metadata returned by get_extrinsic_snapshot_metadata.""" metadata_items = self.get_extrinsic_snapshot_metadata() if not metadata_items: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in metadata_items: metadata_objects.append( RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.SNAPSHOT, object_id=snapshot_id ), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, origin=self.url, ) ) return metadata_objects def build_extrinsic_directory_metadata( self, p_info: TPackageInfo, revision_id: Sha1Git, directory_id: Sha1Git, ) -> List[RawExtrinsicMetadata]: if not p_info.directory_extrinsic_metadata: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in p_info.directory_extrinsic_metadata: metadata_objects.append( RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id ), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, origin=self.url, revision=CoreSWHID( object_type=ObjectType.REVISION, object_id=revision_id ), ) ) return metadata_objects def _load_extrinsic_directory_metadata( self, p_info: TPackageInfo, revision_id: Sha1Git, directory_id: Sha1Git, ) -> None: metadata_objects = self.build_extrinsic_directory_metadata( p_info, revision_id, directory_id ) self._load_metadata_objects(metadata_objects) def _load_metadata_objects( self, metadata_objects: List[RawExtrinsicMetadata] ) -> None: if not metadata_objects: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return self._create_authorities(mo.authority for mo in metadata_objects) self._create_fetchers(mo.fetcher for mo in metadata_objects) self.storage.raw_extrinsic_metadata_add(metadata_objects) def _create_authorities(self, authorities: Iterable[MetadataAuthority]) -> None: deduplicated_authorities = { (authority.type, authority.url): authority for authority in authorities } if authorities: self.storage.metadata_authority_add(list(deduplicated_authorities.values())) def _create_fetchers(self, fetchers: Iterable[MetadataFetcher]) -> None: deduplicated_fetchers = { (fetcher.name, fetcher.version): fetcher for fetcher in fetchers } if fetchers: self.storage.metadata_fetcher_add(list(deduplicated_fetchers.values())) diff --git a/swh/loader/package/nixguix/tests/test_nixguix.py b/swh/loader/package/nixguix/tests/test_nixguix.py index 3cc27ea..9b84b39 100644 --- a/swh/loader/package/nixguix/tests/test_nixguix.py +++ b/swh/loader/package/nixguix/tests/test_nixguix.py @@ -1,706 +1,706 @@ # Copyright (C) 2020-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import os from typing import Dict, List, Optional, Tuple from unittest.mock import patch import attr import pytest from swh.loader.package import __version__ from swh.loader.package.archive.loader import ArchiveLoader from swh.loader.package.nixguix.loader import ( NixGuixLoader, NixGuixPackageInfo, clean_sources, make_pattern_unsupported_file_extension, parse_sources, retrieve_sources, ) from swh.loader.package.utils import download from swh.loader.tests import assert_last_visit_matches from swh.loader.tests import check_snapshot as check_snapshot_full from swh.loader.tests import get_stats from swh.model.hashutil import hash_to_bytes from swh.model.identifiers import ExtendedObjectType, ExtendedSWHID from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) from swh.storage.algos.origin import origin_get_latest_visit_status from swh.storage.algos.snapshot import snapshot_get_all_branches from swh.storage.exc import HashCollision from swh.storage.interface import PagedResult, StorageInterface sources_url = "https://nix-community.github.io/nixpkgs-swh/sources.json" @pytest.fixture def raw_sources(datadir) -> bytes: with open( os.path.join( datadir, "https_nix-community.github.io", "nixpkgs-swh_sources.json" ), "rb", ) as f: return f.read() SNAPSHOT1 = Snapshot( id=hash_to_bytes("0c5881c74283793ebe9a09a105a9381e41380383"), branches={ b"evaluation": SnapshotBranch( target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"), target_type=TargetType.REVISION, ), b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch( target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"), target_type=TargetType.REVISION, ), }, ) def check_snapshot(snapshot: Snapshot, storage: StorageInterface): # The `evaluation` branch is allowed to be unresolvable. It's possible at current # nixguix visit time, it is not yet visited (the git loader is in charge of its # visit for now). For more details, check the # swh.loader.package.nixguix.NixGuixLoader.extra_branches docstring. check_snapshot_full( snapshot, storage, allowed_empty=[(TargetType.REVISION, b"evaluation")] ) assert isinstance(snapshot, Snapshot) # then ensure the snapshot revisions are structurally as expected revision_ids = [] for name, branch in snapshot.branches.items(): if name == b"evaluation": continue # skipping that particular branch (cf. previous comment) if branch.target_type == TargetType.REVISION: revision_ids.append(branch.target) revisions = storage.revision_get(revision_ids) for rev in revisions: assert rev is not None metadata = rev.metadata assert metadata is not None raw = metadata["extrinsic"]["raw"] assert "url" in raw assert "integrity" in raw def test_retrieve_sources(swh_storage, requests_mock_datadir): j = parse_sources(retrieve_sources(sources_url)) assert "sources" in j.keys() assert len(j["sources"]) == 2 def test_nixguix_url_not_found(swh_storage, requests_mock_datadir): """When failing to read from the url, the visit is marked as not_found. Here the sources url does not exist, so requests_mock_datadir returns a 404. Resulting in a NotFound raised within the package loader's main loop. This results in the task with status failed and a visit_status with status "not_found". """ unknown_url = "https://non-existing-url/" loader = NixGuixLoader(swh_storage, unknown_url) # during the retrieval step load_status = loader.load() assert load_status == {"status": "failed"} assert_last_visit_matches( swh_storage, unknown_url, status="not_found", type="nixguix", snapshot=None ) assert len(requests_mock_datadir.request_history) == 1 assert requests_mock_datadir.request_history[0].url == unknown_url def test_nixguix_url_with_decoding_error(swh_storage, requests_mock_datadir): """Other errors during communication with the url, the visit is marked as failed requests_mock_datadir will intercept the requests to sources_url. Since the file exists, returns a 200 with the requested content of the query. As file.txt is no json, fails do decode and raises a JSONDecodeError. In effect failing the visit. """ sources_url = "https://example.com/file.txt" loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() assert load_status == {"status": "failed"} assert_last_visit_matches( swh_storage, sources_url, status="failed", type="nixguix", snapshot=None ) assert len(requests_mock_datadir.request_history) == 1 assert requests_mock_datadir.request_history[0].url == sources_url def test_clean_sources_invalid_schema(swh_storage, requests_mock_datadir): sources = {} with pytest.raises(ValueError, match="sources structure invalid, missing: .*"): clean_sources(sources) def test_clean_sources_invalid_version(swh_storage, requests_mock_datadir): for version_ok in [1, "1"]: # Check those versions are fine clean_sources({"version": version_ok, "sources": [], "revision": "my-revision"}) for version_ko in [0, "0", 2, "2"]: # Check version != 1 raise an error with pytest.raises( ValueError, match="sources structure version .* is not supported" ): clean_sources( {"version": version_ko, "sources": [], "revision": "my-revision"} ) def test_clean_sources_invalid_sources(swh_storage, requests_mock_datadir): valid_sources = [ # 1 valid source {"type": "url", "urls": ["my-url.tar.gz"], "integrity": "my-integrity"}, ] sources = { "version": 1, "sources": valid_sources + [ # integrity is missing {"type": "url", "urls": ["my-url.tgz"],}, # urls is not a list {"type": "url", "urls": "my-url.zip", "integrity": "my-integrity"}, # type is not url {"type": "git", "urls": ["my-url.zip"], "integrity": "my-integrity"}, # missing fields which got double-checked nonetheless... {"integrity": "my-integrity"}, ], "revision": "my-revision", } clean = clean_sources(sources) assert len(clean["sources"]) == len(valid_sources) def test_make_pattern_unsupported_file_extension(): unsupported_extensions = ["el", "c", "txt"] supported_extensions = ["Z", "7z"] # for test actual_unsupported_pattern = make_pattern_unsupported_file_extension( unsupported_extensions ) for supported_ext in supported_extensions: assert supported_ext not in unsupported_extensions supported_filepath = f"anything.{supported_ext}" actual_match = actual_unsupported_pattern.match(supported_filepath) assert not actual_match for unsupported_ext in unsupported_extensions: unsupported_filepath = f"something.{unsupported_ext}" actual_match = actual_unsupported_pattern.match(unsupported_filepath) assert actual_match def test_clean_sources_unsupported_artifacts(swh_storage, requests_mock_datadir): unsupported_file_extensions = [ "iso", "whl", "gem", "pom", "msi", "pod", "png", "rock", "ttf", "jar", "c", "el", "rpm", "diff", "patch", ] supported_sources = [ { "type": "url", "urls": [f"https://server.org/my-url.{ext}"], "integrity": "my-integrity", } for ext in [ "known-unknown-but-ok", # this is fine as well with the current approach "zip", "tar.gz", "tgz", "tar.bz2", "tbz", "tbz2", "tar.xz", "tar", "zip", "7z", "Z", ] ] unsupported_sources = [ { "type": "url", "urls": [f"https://server.org/my-url.{ext}"], "integrity": "my-integrity", } for ext in unsupported_file_extensions ] sources = { "version": 1, "sources": supported_sources + unsupported_sources, "revision": "my-revision", } clean = clean_sources(sources, unsupported_file_extensions) assert len(clean["sources"]) == len(supported_sources) def test_loader_one_visit(swh_storage, requests_mock_datadir, raw_sources): loader = NixGuixLoader(swh_storage, sources_url) res = loader.load() assert res["status"] == "eventful" stats = get_stats(swh_storage) assert { "content": 1, "directory": 3, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == stats # The visit is partial because urls pointing to non tarball file # are not handled yet assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix" ) visit_status = origin_get_latest_visit_status(swh_storage, sources_url) snapshot_swhid = ExtendedSWHID( object_type=ExtendedObjectType.SNAPSHOT, object_id=visit_status.snapshot ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url=sources_url, ) expected_metadata = [ RawExtrinsicMetadata( target=snapshot_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.nixguix.loader.NixGuixLoader", version=__version__, ), discovery_date=loader.visit_date, format="nixguix-sources-json", metadata=raw_sources, origin=sources_url, ) ] assert swh_storage.raw_extrinsic_metadata_get( snapshot_swhid, metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) def test_uncompress_failure(swh_storage, requests_mock_datadir): """Non tarball files are currently not supported and the uncompress function fails on such kind of files. However, even in this case of failure (because of the url https://example.com/file.txt), a snapshot and a visit has to be created (with a status partial since all files are not archived). """ loader = NixGuixLoader(swh_storage, sources_url) loader_status = loader.load() sources = loader.supported_sources()["sources"] urls = [s["urls"][0] for s in sources] assert "https://example.com/file.txt" in urls assert loader_status["status"] == "eventful" # The visit is partial because urls pointing to non tarball files # are not handled yet assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix" ) def test_loader_incremental(swh_storage, requests_mock_datadir): """Ensure a second visit do not download artifact already downloaded by the previous visit. """ loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() loader.load() assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()} assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix", snapshot=SNAPSHOT1.id, ) check_snapshot(SNAPSHOT1, storage=swh_storage) urls = [ m.url for m in requests_mock_datadir.request_history if m.url == ("https://github.com/owner-1/repository-1/revision-1.tgz") ] # The artifact # 'https://github.com/owner-1/repository-1/revision-1.tgz' is only # visited one time assert len(urls) == 1 def test_loader_two_visits(swh_storage, requests_mock_datadir_visits): """To ensure there is only one origin, but two visits, two revisions and two snapshots are created. The first visit creates a snapshot containing one tarball. The second visit creates a snapshot containing the same tarball and another tarball. """ loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()} assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix", snapshot=SNAPSHOT1.id, ) check_snapshot(SNAPSHOT1, storage=swh_storage) stats = get_stats(swh_storage) assert { "content": 1, "directory": 3, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == stats loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() expected_snapshot_id_hex = "b0bfa75cbd0cc90aac3b9e95fb0f59c731176d97" expected_snapshot_id = hash_to_bytes(expected_snapshot_id_hex) assert load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id_hex, } assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix", snapshot=expected_snapshot_id, ) # This ensures visits are incremental. Indeed, if we request a # second time an url, because of the requests_mock_datadir_visits # fixture, the file has to end with `_visit1`. expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"evaluation": SnapshotBranch( target=hash_to_bytes("602140776b2ce6c9159bcf52ada73a297c063d5e"), target_type=TargetType.REVISION, ), b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch( target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"), target_type=TargetType.REVISION, ), b"https://github.com/owner-2/repository-1/revision-1.tgz": SnapshotBranch( target=hash_to_bytes("85e0bad74e33e390aaeb74f139853ae3863ee544"), target_type=TargetType.REVISION, ), }, ) check_snapshot(expected_snapshot, storage=swh_storage) stats = get_stats(swh_storage) assert { "content": 2, "directory": 5, "origin": 1, "origin_visit": 2, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 2, } == stats -def test_resolve_revision_from(swh_storage, requests_mock_datadir, datadir): +def test_resolve_revision_from_artifacts(swh_storage, requests_mock_datadir, datadir): loader = NixGuixLoader(swh_storage, sources_url) known_artifacts = { "id1": {"extrinsic": {"raw": {"url": "url1", "integrity": "integrity1"}}}, "id2": {"extrinsic": {"raw": {"url": "url2", "integrity": "integrity2"}}}, } p_info = NixGuixPackageInfo.from_metadata( {"url": "url1", "integrity": "integrity1"} ) - assert loader.resolve_revision_from(known_artifacts, p_info) == "id1" + assert loader.resolve_revision_from_artifacts(known_artifacts, p_info) == "id1" p_info = NixGuixPackageInfo.from_metadata( {"url": "url3", "integrity": "integrity3"} ) - assert loader.resolve_revision_from(known_artifacts, p_info) == None # noqa + assert loader.resolve_revision_from_artifacts(known_artifacts, p_info) is None def test_evaluation_branch(swh_storage, requests_mock_datadir): loader = NixGuixLoader(swh_storage, sources_url) res = loader.load() assert res["status"] == "eventful" assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix", snapshot=SNAPSHOT1.id, ) check_snapshot(SNAPSHOT1, storage=swh_storage) def test_eoferror(swh_storage, requests_mock_datadir): """Load a truncated archive which is invalid to make the uncompress function raising the exception EOFError. We then check if a snapshot is created, meaning this error is well managed. """ sources = ( "https://nix-community.github.io/nixpkgs-swh/sources-EOFError.json" # noqa ) loader = NixGuixLoader(swh_storage, sources) loader.load() expected_snapshot = Snapshot( id=hash_to_bytes("4257fa2350168c6bfec726a06452ea27a2c0cb33"), branches={ b"evaluation": SnapshotBranch( target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"), target_type=TargetType.REVISION, ), }, ) check_snapshot(expected_snapshot, storage=swh_storage) def fake_download( url: str, dest: str, hashes: Dict = {}, filename: Optional[str] = None, auth: Optional[Tuple[str, str]] = None, ) -> Tuple[str, Dict]: """Fake download which raises HashCollision (for the sake of test simpliciy, let's accept that makes sense) For tests purpose only. """ if url == "https://example.com/file.txt": # instead of failing because it's a file not dealt with by the nix guix # loader, make it raise a hash collision raise HashCollision("sha1", "f92d74e3874587aaf443d1db961d4e26dde13e9c", []) return download(url, dest, hashes, filename, auth) def test_raise_exception(swh_storage, requests_mock_datadir, mocker): mock_download = mocker.patch("swh.loader.package.loader.download") mock_download.side_effect = fake_download loader = NixGuixLoader(swh_storage, sources_url) res = loader.load() assert res == { "status": "eventful", "snapshot_id": SNAPSHOT1.id.hex(), } check_snapshot(SNAPSHOT1, storage=swh_storage) assert len(mock_download.mock_calls) == 2 # The visit is partial because some artifact downloads failed assert_last_visit_matches( swh_storage, sources_url, status="partial", type="nixguix" ) def test_load_nixguix_one_common_artifact_from_other_loader( swh_storage, datadir, requests_mock_datadir_visits, caplog ): """Misformatted revision should be caught and logged, then loading continues """ caplog.set_level(logging.ERROR, "swh.loader.package.nixguix.loader") # 1. first ingest with for example the archive loader gnu_url = "https://ftp.gnu.org/gnu/8sync/" release = "0.1.0" artifact_url = f"https://ftp.gnu.org/gnu/8sync/8sync-{release}.tar.gz" gnu_artifacts = [ { "time": 944729610, "url": artifact_url, "length": 221837, "filename": f"8sync-{release}.tar.gz", "version": release, } ] archive_loader = ArchiveLoader(swh_storage, url=gnu_url, artifacts=gnu_artifacts) actual_load_status = archive_loader.load() expected_snapshot_id = "c419397fd912039825ebdbea378bc6283f006bf5" assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] == expected_snapshot_id # noqa assert_last_visit_matches( archive_loader.storage, gnu_url, status="full", type="tar" ) gnu_snapshot: Snapshot = snapshot_get_all_branches( archive_loader.storage, hash_to_bytes(expected_snapshot_id) ) first_revision = gnu_snapshot.branches[f"releases/{release}".encode("utf-8")] # 2. Then ingest with the nixguix loader which lists the same artifact within its # sources.json # ensure test setup is ok data_sources = os.path.join( datadir, "https_nix-community.github.io", "nixpkgs-swh_sources_special.json" ) all_sources = json.loads(open(data_sources).read()) found = False for source in all_sources["sources"]: if source["urls"][0] == artifact_url: found = True assert ( found is True ), f"test setup error: {artifact_url} must be in {data_sources}" # first visit with a snapshot, ok sources_url = "https://nix-community.github.io/nixpkgs-swh/sources_special.json" loader = NixGuixLoader(swh_storage, sources_url) actual_load_status2 = loader.load() assert actual_load_status2["status"] == "eventful" assert_last_visit_matches(swh_storage, sources_url, status="full", type="nixguix") snapshot_id = actual_load_status2["snapshot_id"] snapshot = snapshot_get_all_branches(swh_storage, hash_to_bytes(snapshot_id)) assert snapshot # 3. Then ingest again with the nixguix loader, with a different snapshot # and different source # simulate a snapshot already seen with a revision with the wrong metadata structure # This revision should be skipped, thus making the artifact being ingested again. with patch( "swh.loader.package.loader.PackageLoader.last_snapshot" ) as last_snapshot: # mutate the snapshot to target a revision with the wrong metadata structure # snapshot["branches"][artifact_url.encode("utf-8")] = first_revision old_revision = swh_storage.revision_get([first_revision.target])[0] # assert that revision is not in the right format assert old_revision.metadata["extrinsic"]["raw"].get("integrity", {}) == {} # mutate snapshot to create a clash snapshot = attr.evolve( snapshot, branches={ **snapshot.branches, artifact_url.encode("utf-8"): SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes(old_revision.id), ), }, ) # modify snapshot to actually change revision metadata structure so we simulate # a revision written by somebody else (structure different) last_snapshot.return_value = snapshot loader = NixGuixLoader(swh_storage, sources_url) actual_load_status3 = loader.load() assert last_snapshot.called assert actual_load_status3["status"] == "eventful" assert_last_visit_matches( swh_storage, sources_url, status="full", type="nixguix" ) new_snapshot_id = "32ff641e510aceefc3a6d0dcbf208b2854d2e965" assert actual_load_status3["snapshot_id"] == new_snapshot_id last_snapshot = snapshot_get_all_branches( swh_storage, hash_to_bytes(new_snapshot_id) ) new_revision_branch = last_snapshot.branches[artifact_url.encode("utf-8")] assert new_revision_branch.target_type == TargetType.REVISION new_revision = swh_storage.revision_get([new_revision_branch.target])[0] # the new revision has the correct structure, so it got ingested alright by the # new run assert new_revision.metadata["extrinsic"]["raw"]["integrity"] is not None actual_detections: List[Dict] = [] for record in caplog.records: logtext = record.getMessage() if "Unexpected metadata revision structure detected:" in logtext: actual_detections.append(record.args["context"]) expected_detections = [ {"reason": "'integrity'", "known_artifact": old_revision.metadata,}, {"reason": "'integrity'", "known_artifact": old_revision.metadata,}, ] # as many calls as there are sources listed in the sources.json assert len(expected_detections) == len(all_sources["sources"]) assert actual_detections == expected_detections diff --git a/swh/loader/package/tests/test_loader.py b/swh/loader/package/tests/test_loader.py index f56de08..28eb318 100644 --- a/swh/loader/package/tests/test_loader.py +++ b/swh/loader/package/tests/test_loader.py @@ -1,136 +1,136 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import hashlib import string from unittest.mock import MagicMock import attr import pytest from swh.loader.package.loader import BasePackageInfo, PackageLoader class FakeStorage: def origin_add(self, origins): raise ValueError("We refuse to add an origin") def origin_visit_get_latest(self, origin): return None class FakeStorage2(FakeStorage): def origin_add(self, origins): pass def origin_visit_add(self, visits): raise ValueError("We refuse to add an origin visit") def test_loader_origin_visit_failure(swh_storage): """Failure to add origin or origin visit should failed immediately """ loader = PackageLoader(swh_storage, "some-url") loader.storage = FakeStorage() actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} loader.storage = FakeStorage2() actual_load_status2 = loader.load() assert actual_load_status2 == {"status": "failed"} -def test_resolve_revision_from(): +def test_resolve_revision_from_artifacts(): loader = PackageLoader(None, None) loader.known_artifact_to_extid = MagicMock( wraps=lambda known_artifact: known_artifact["key"].encode() ) known_artifacts = { b"a" * 40: {"key": "extid-of-aaaa"}, b"b" * 40: {"key": "extid-of-bbbb"}, } p_info = MagicMock() # No known artifact -> it would be useless to compute the extid - assert loader.resolve_revision_from({}, p_info) is None + assert loader.resolve_revision_from_artifacts({}, p_info) is None p_info.extid.assert_not_called() loader.known_artifact_to_extid.assert_not_called() p_info.extid.reset_mock() # Some artifacts, but the PackageInfo does not support extids p_info.extid.return_value = None - assert loader.resolve_revision_from(known_artifacts, p_info) is None + assert loader.resolve_revision_from_artifacts(known_artifacts, p_info) is None p_info.extid.assert_called_once() loader.known_artifact_to_extid.assert_not_called() p_info.extid.reset_mock() # Some artifacts, and the PackageInfo is not one of them (ie. cache miss) p_info.extid.return_value = b"extid-of-cccc" - assert loader.resolve_revision_from(known_artifacts, p_info) is None + assert loader.resolve_revision_from_artifacts(known_artifacts, p_info) is None p_info.extid.assert_called_once() loader.known_artifact_to_extid.assert_any_call({"key": "extid-of-aaaa"}) loader.known_artifact_to_extid.assert_any_call({"key": "extid-of-bbbb"}) p_info.extid.reset_mock() loader.known_artifact_to_extid.reset_mock() # Some artifacts, and the PackageInfo is one of them (ie. cache hit) p_info.extid.return_value = b"extid-of-aaaa" - assert loader.resolve_revision_from(known_artifacts, p_info) == b"a" * 40 + assert loader.resolve_revision_from_artifacts(known_artifacts, p_info) == b"a" * 40 p_info.extid.assert_called_once() loader.known_artifact_to_extid.assert_called_once_with({"key": "extid-of-aaaa"}) def test_manifest_extid(): """Compute primary key should return the right identity """ @attr.s class TestPackageInfo(BasePackageInfo): a = attr.ib() b = attr.ib() length = attr.ib() filename = attr.ib() version = attr.ib() MANIFEST_FORMAT = string.Template("$a $b") p_info = TestPackageInfo( url="http://example.org/", a=1, b=2, length=221837, filename="8sync-0.1.0.tar.gz", version="0.1.0", ) actual_id = p_info.extid() assert actual_id == ("package-manifest-sha256", hashlib.sha256(b"1 2").digest()) def test_no_env_swh_config_filename_raise(monkeypatch): """No SWH_CONFIG_FILENAME environment variable makes package loader init raise """ class DummyPackageLoader(PackageLoader): """A dummy package loader for test purpose""" pass monkeypatch.delenv("SWH_CONFIG_FILENAME", raising=False) with pytest.raises( AssertionError, match="SWH_CONFIG_FILENAME environment variable is undefined" ): DummyPackageLoader.from_configfile(url="some-url")