diff --git a/CONTRIBUTORS b/CONTRIBUTORS index bcf34f8..f638976 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -1,2 +1,3 @@ Antoine Eiche Léo Andrès +Franck Bret diff --git a/PKG-INFO b/PKG-INFO index bc910d0..f91bfd7 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,56 +1,56 @@ Metadata-Version: 2.1 Name: swh.loader.core -Version: 3.3.0 +Version: 3.4.0 Summary: Software Heritage Base Loader Home-page: https://forge.softwareheritage.org/diffusion/DLDBASE Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-core Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-core/ Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS Software Heritage - Loader foundations ====================================== The Software Heritage Loader Core is a low-level loading utilities and helpers used by :term:`loaders `. The main entry points are classes: - :class:`swh.loader.core.loader.BaseLoader` for loaders (e.g. svn) - :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. hg, git, ...) - :class:`swh.loader.package.loader.PackageLoader` for Package loaders (e.g. PyPI, Npm, ...) Package loaders --------------- This package also implements many package loaders directly, out of convenience, as they usually are quite similar and each fits in a single file. They all roughly follow these steps, explained in the :py:meth:`swh.loader.package.loader.PackageLoader.load` documentation. See the :ref:`package-loader-tutorial` for details. VCS loaders ----------- Unlike package loaders, VCS loaders remain in separate packages, as they often need more advanced conversions and very VCS-specific operations. This usually involves getting the branches of a repository and recursively loading revisions in the history (and directory trees in these revisions), until a known revision is found diff --git a/docs/package-loader-specifications.rst b/docs/package-loader-specifications.rst index caad4ac..1d734b8 100644 --- a/docs/package-loader-specifications.rst +++ b/docs/package-loader-specifications.rst @@ -1,133 +1,133 @@ .. _package-loader-specifications: Package loader specifications ============================= Release fields -------------- Here is an overview of the fields (+ internal version name + branch name) used by each package loader, after D6616: .. list-table:: Fields used by each package loader :header-rows: 1 * - Loader - internal version - branch name - name - message - synthetic - author - date - Notes * - archive - passed as arg - ``release_name(​version)`` - =version - "Synthetic release for archive at {p_info.url}\n" - true - "" - passed as arg - * - cran - ``metadata.get(​"Version", passed as arg)`` - ``release_name(​version)`` - =version - standard message - true - ``metadata.get(​"Maintainer", "")`` - ``metadata.get(​"Date")`` - metadata is intrinsic * - crates - - passed as arg + - ``p_info.​version`` - ``release_name(​version, filename)`` - =version - Synthetic release for Crate source package {p_info.name} version {p_info.version} {description} - true - from intrinsic metadata - from extrinsic metadata - ``i_metadata`` for intrinsic metadata, ``e_metadata`` for extrinsic metadata * - debian - =``version`` - ``release_name(​version)`` - =``i_version`` - standard message (using ``i_version``) - true - ``metadata​.changelog​.person`` - ``metadata​.changelog​.date`` - metadata is intrinsic. Old revisions have ``dsc`` as type ``i_version`` is the intrinsic version (eg. ``0.7.2-3``) while ``version`` contains the debian suite name (eg. ``stretch/contrib/0.7.2-3``) and is passed as arg * - deposit - HEAD - only HEAD - HEAD - "{client}: Deposit {id} in collection {collection}\n" - true - original author - ```` from SWORD XML - revisions had parents * - maven-loader - passed as arg - HEAD - ``release_name(version)`` - "Synthetic release for archive at {p_info.url}\n" - true - "" - passed as arg - Only one artefact per url (jar/zip src) * - nixguix - URL - URL - URL - None - true - "" - None - it's the URL of the artifact referenced by the derivation * - npm - ``metadata​["version"]`` - ``release_name(​version)`` - =version - standard message - true - from int metadata or "" - from ext metadata or None - * - opam - as given by opam - "{opam_package}​.{version}" - =version - standard message - true - from metadata - None - "{self.opam_package}​.{version}" matches the version names used by opam's backend. metadata is extrinsic * - pypi - ``metadata​["version"]`` - ``release_name(​version)`` or ``release_name(​version, filename)`` - =version - ``metadata[​'comment_text']}`` or standard message - true - from int metadata or "" - from ext metadata or None - metadata is intrinsic using this function:: def release_name(version: str, filename: Optional[str] = None) -> str: if filename: return "releases/%s/%s" % (version, filename) return "releases/%s" % version and "standard message" being:: msg = ( f"Synthetic release for {PACKAGE_MANAGER} source package {name} " f"version {version}\n" ) The ``target_type`` field is always ``dir``, and the target the id of a directory loaded by unpacking a tarball/zip file/... diff --git a/swh.loader.core.egg-info/PKG-INFO b/swh.loader.core.egg-info/PKG-INFO index bc910d0..f91bfd7 100644 --- a/swh.loader.core.egg-info/PKG-INFO +++ b/swh.loader.core.egg-info/PKG-INFO @@ -1,56 +1,56 @@ Metadata-Version: 2.1 Name: swh.loader.core -Version: 3.3.0 +Version: 3.4.0 Summary: Software Heritage Base Loader Home-page: https://forge.softwareheritage.org/diffusion/DLDBASE Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-core Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-core/ Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS Software Heritage - Loader foundations ====================================== The Software Heritage Loader Core is a low-level loading utilities and helpers used by :term:`loaders `. The main entry points are classes: - :class:`swh.loader.core.loader.BaseLoader` for loaders (e.g. svn) - :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. hg, git, ...) - :class:`swh.loader.package.loader.PackageLoader` for Package loaders (e.g. PyPI, Npm, ...) Package loaders --------------- This package also implements many package loaders directly, out of convenience, as they usually are quite similar and each fits in a single file. They all roughly follow these steps, explained in the :py:meth:`swh.loader.package.loader.PackageLoader.load` documentation. See the :ref:`package-loader-tutorial` for details. VCS loaders ----------- Unlike package loaders, VCS loaders remain in separate packages, as they often need more advanced conversions and very VCS-specific operations. This usually involves getting the branches of a repository and recursively loading revisions in the history (and directory trees in these revisions), until a known revision is found diff --git a/swh/loader/core/loader.py b/swh/loader/core/loader.py index b8adfa5..d73611c 100644 --- a/swh/loader/core/loader.py +++ b/swh/loader/core/loader.py @@ -1,528 +1,606 @@ # Copyright (C) 2015-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib import logging import os -from typing import Any, Dict, Iterable, List, Optional +import time +from typing import Any, ContextManager, Dict, Iterable, List, Optional, Union import sentry_sdk from swh.core.config import load_from_envvar +from swh.core.statsd import Statsd from swh.loader.core.metadata_fetchers import CredentialsType, get_fetchers_for_lister from swh.loader.exception import NotFound from swh.model.model import ( BaseContent, Content, Directory, Origin, OriginVisit, OriginVisitStatus, RawExtrinsicMetadata, Release, Revision, Sha1Git, SkippedContent, Snapshot, ) from swh.storage import get_storage from swh.storage.interface import StorageInterface from swh.storage.utils import now DEFAULT_CONFIG: Dict[str, Any] = { "max_content_size": 100 * 1024 * 1024, } class BaseLoader: """Base class for (D)VCS loaders (e.g Svn, Git, Mercurial, ...) or PackageLoader (e.g PyPI, Npm, CRAN, ...) A loader retrieves origin information (git/mercurial/svn repositories, pypi/npm/... package artifacts), ingests the contents/directories/revisions/releases/snapshot read from those artifacts and send them to the archive through the storage backend. The main entry point for the loader is the :func:`load` function. 2 static methods (:func:`from_config`, :func:`from_configfile`) centralizes and eases the loader instantiation from either configuration dict or configuration file. Some class examples: - :class:`SvnLoader` - :class:`GitLoader` - :class:`PyPILoader` - :class:`NpmLoader` Args: lister_name: Name of the lister which triggered this load. If provided, the loader will try to use the forge's API to retrieve extrinsic metadata lister_instance_name: Name of the lister instance which triggered this load. Must be None iff lister_name is, but it may be the empty string for listers with a single instance. """ visit_type: str origin: Origin loaded_snapshot_id: Optional[Sha1Git] parent_origins: Optional[List[Origin]] """If the given origin is a "forge fork" (ie. created with the "Fork" button of GitHub-like forges), :meth:`build_extrinsic_origin_metadata` sets this to a list of origins it was forked from; closest parent first.""" def __init__( self, storage: StorageInterface, origin_url: str, logging_class: Optional[str] = None, save_data_path: Optional[str] = None, max_content_size: Optional[int] = None, lister_name: Optional[str] = None, lister_instance_name: Optional[str] = None, metadata_fetcher_credentials: CredentialsType = None, ): if lister_name == "": raise ValueError("lister_name must not be the empty string") if lister_name is None and lister_instance_name is not None: raise ValueError( f"lister_name is None but lister_instance_name is {lister_instance_name!r}" ) if lister_name is not None and lister_instance_name is None: raise ValueError( f"lister_instance_name is None but lister_name is {lister_name!r}" ) self.storage = storage self.origin = Origin(url=origin_url) self.max_content_size = int(max_content_size) if max_content_size else None self.lister_name = lister_name self.lister_instance_name = lister_instance_name self.metadata_fetcher_credentials = metadata_fetcher_credentials or {} if logging_class is None: logging_class = "%s.%s" % ( self.__class__.__module__, self.__class__.__name__, ) self.log = logging.getLogger(logging_class) _log = logging.getLogger("requests.packages.urllib3.connectionpool") _log.setLevel(logging.WARN) # possibly overridden in self.prepare method self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) self.loaded_snapshot_id = None if save_data_path: path = save_data_path os.stat(path) if not os.access(path, os.R_OK | os.W_OK): raise PermissionError("Permission denied: %r" % path) self.save_data_path = save_data_path self.parent_origins = None + self.statsd = Statsd( + namespace="swh_loader", constant_tags={"visit_type": self.visit_type} + ) + @classmethod def from_config(cls, storage: Dict[str, Any], **config: Any): """Instantiate a loader from a configuration dict. This is basically a backwards-compatibility shim for the CLI. Args: storage: instantiation config for the storage config: the configuration dict for the loader, with the following keys: - credentials (optional): credentials list for the scheduler - any other kwargs passed to the loader. Returns: the instantiated loader """ # Drop the legacy config keys which aren't used for this generation of loader. for legacy_key in ("storage", "celery"): config.pop(legacy_key, None) # Instantiate the storage storage_instance = get_storage(**storage) return cls(storage=storage_instance, **config) @classmethod def from_configfile(cls, **kwargs: Any): """Instantiate a loader from the configuration loaded from the SWH_CONFIG_FILENAME envvar, with potential extra keyword arguments if their value is not None. Args: kwargs: kwargs passed to the loader instantiation """ config = dict(load_from_envvar(DEFAULT_CONFIG)) config.update({k: v for k, v in kwargs.items() if v is not None}) return cls.from_config(**config) def save_data(self) -> None: """Save the data associated to the current load""" raise NotImplementedError def get_save_data_path(self) -> str: """The path to which we archive the loader's raw data""" if not hasattr(self, "__save_data_path"): year = str(self.visit_date.year) assert self.origin url = self.origin.url.encode("utf-8") origin_url_hash = hashlib.sha1(url).hexdigest() path = "%s/sha1:%s/%s/%s" % ( self.save_data_path, origin_url_hash[0:2], origin_url_hash, year, ) os.makedirs(path, exist_ok=True) self.__save_data_path = path return self.__save_data_path def flush(self) -> None: """Flush any potential buffered data not sent to swh-storage.""" self.storage.flush() def cleanup(self) -> None: """Last step executed by the loader.""" raise NotImplementedError def _store_origin_visit(self) -> None: """Store origin and visit references. Sets the self.visit references.""" assert self.origin self.storage.origin_add([self.origin]) assert isinstance(self.visit_type, str) self.visit = list( self.storage.origin_visit_add( [ OriginVisit( origin=self.origin.url, date=self.visit_date, type=self.visit_type, ) ] ) )[0] def prepare(self) -> None: """Second step executed by the loader to prepare some state needed by the loader. Raises NotFound exception if the origin to ingest is not found. """ raise NotImplementedError def get_origin(self) -> Origin: """Get the origin that is currently being loaded. self.origin should be set in :func:`prepare_origin` Returns: dict: an origin ready to be sent to storage by :func:`origin_add`. """ assert self.origin return self.origin def fetch_data(self) -> bool: """Fetch the data from the source the loader is currently loading (ex: git/hg/svn/... repository). Returns: a value that is interpreted as a boolean. If True, fetch_data needs to be called again to complete loading. """ raise NotImplementedError def store_data(self): """Store fetched data in the database. Should call the :func:`maybe_load_xyz` methods, which handle the bundles sent to storage, rather than send directly. """ raise NotImplementedError def load_status(self) -> Dict[str, str]: """Detailed loading status. Defaults to logging an eventful load. Returns: a dictionary that is eventually passed back as the task's result to the scheduler, allowing tuning of the task recurrence mechanism. """ return { "status": "eventful", } def post_load(self, success: bool = True) -> None: """Permit the loader to do some additional actions according to status after the loading is done. The flag success indicates the loading's status. Defaults to doing nothing. This is up to the implementer of this method to make sure this does not break. Args: success (bool): the success status of the loading """ pass def visit_status(self) -> str: """Detailed visit status. Defaults to logging a full visit. """ return "full" def pre_cleanup(self) -> None: """As a first step, will try and check for dangling data to cleanup. This should do its best to avoid raising issues. """ pass def load(self) -> Dict[str, str]: r"""Loading logic for the loader to follow: - Store the actual ``origin_visit`` to storage - Call :meth:`prepare` to prepare any eventual state - Call :meth:`get_origin` to get the origin we work with and store - while True: - Call :meth:`fetch_data` to fetch the data to store - Call :meth:`store_data` to store the data - Call :meth:`cleanup` to clean up any eventual state put in place in :meth:`prepare` method. """ try: - self.pre_cleanup() + with self.statsd_timed("pre_cleanup"): + self.pre_cleanup() except Exception: msg = "Cleaning up dangling data failed! Continue loading." self.log.warning(msg) + sentry_sdk.capture_exception() self._store_origin_visit() assert ( self.visit.visit ), "The method `_store_origin_visit` should set the visit (OriginVisit)" self.log.info( "Load origin '%s' with type '%s'", self.origin.url, self.visit.type ) try: - metadata = self.build_extrinsic_origin_metadata() + with self.statsd_timed("build_extrinsic_origin_metadata"): + metadata = self.build_extrinsic_origin_metadata() self.load_metadata_objects(metadata) except Exception as e: sentry_sdk.capture_exception(e) # Do not fail the whole task if this is the only failure self.log.exception( "Failure while loading extrinsic origin metadata.", extra={ "swh_task_args": [], "swh_task_kwargs": { "origin": self.origin.url, "lister_name": self.lister_name, "lister_instance_name": self.lister_instance_name, }, }, ) + total_time_fetch_data = 0.0 + total_time_store_data = 0.0 + try: - self.prepare() + with self.statsd_timed("prepare"): + self.prepare() while True: + t1 = time.monotonic() more_data_to_fetch = self.fetch_data() + t2 = time.monotonic() + total_time_fetch_data += t2 - t1 self.store_data() + t3 = time.monotonic() + total_time_store_data += t3 - t2 if not more_data_to_fetch: break + self.statsd_timing("fetch_data", total_time_fetch_data * 1000.0) + self.statsd_timing("store_data", total_time_store_data * 1000.0) + + status = self.visit_status() visit_status = OriginVisitStatus( origin=self.origin.url, visit=self.visit.visit, type=self.visit_type, date=now(), - status=self.visit_status(), + status=status, snapshot=self.loaded_snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) - self.post_load() + success = True + with self.statsd_timed( + "post_load", tags={"success": success, "status": status} + ): + self.post_load() except Exception as e: + success = False if isinstance(e, NotFound): status = "not_found" task_status = "uneventful" else: status = "partial" if self.loaded_snapshot_id else "failed" task_status = "failed" self.log.exception( "Loading failure, updating to `%s` status", status, extra={ "swh_task_args": [], "swh_task_kwargs": { "origin": self.origin.url, "lister_name": self.lister_name, "lister_instance_name": self.lister_instance_name, }, }, ) + sentry_sdk.capture_exception() visit_status = OriginVisitStatus( origin=self.origin.url, visit=self.visit.visit, type=self.visit_type, date=now(), status=status, snapshot=self.loaded_snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) - self.post_load(success=False) + with self.statsd_timed( + "post_load", tags={"success": success, "status": status} + ): + self.post_load(success=success) return {"status": task_status} finally: - self.flush() - self.cleanup() + with self.statsd_timed( + "flush", tags={"success": success, "status": status} + ): + self.flush() + with self.statsd_timed( + "cleanup", tags={"success": success, "status": status} + ): + self.cleanup() return self.load_status() def load_metadata_objects( self, metadata_objects: List[RawExtrinsicMetadata] ) -> None: if not metadata_objects: return authorities = {mo.authority for mo in metadata_objects} self.storage.metadata_authority_add(list(authorities)) fetchers = {mo.fetcher for mo in metadata_objects} self.storage.metadata_fetcher_add(list(fetchers)) self.storage.raw_extrinsic_metadata_add(metadata_objects) def build_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using a metadata fetcher returned by :func:`get_fetcher_classes`.""" if self.lister_name is None: self.log.debug("lister_not provided, skipping extrinsic origin metadata") return [] assert ( self.lister_instance_name is not None ), "lister_instance_name is None, but lister_name is not" metadata = [] - for cls in get_fetchers_for_lister(self.lister_name): + + fetcher_classes = get_fetchers_for_lister(self.lister_name) + + self.statsd_average("metadata_fetchers", len(fetcher_classes)) + + for cls in fetcher_classes: metadata_fetcher = cls( origin=self.origin, lister_name=self.lister_name, lister_instance_name=self.lister_instance_name, credentials=self.metadata_fetcher_credentials, ) - metadata.extend(metadata_fetcher.get_origin_metadata()) + with self.statsd_timed( + "fetch_one_metadata", tags={"fetcher": cls.FETCHER_NAME} + ): + metadata.extend(metadata_fetcher.get_origin_metadata()) if self.parent_origins is None: self.parent_origins = metadata_fetcher.get_parent_origins() + self.statsd_average( + "metadata_parent_origins", + len(self.parent_origins), + tags={"fetcher": cls.FETCHER_NAME}, + ) + self.statsd_average("metadata_objects", len(metadata)) return metadata + def statsd_timed(self, name: str, tags: Dict[str, Any] = {}) -> ContextManager: + """ + Wrapper for :meth:`swh.core.statsd.Statsd.timed`, which uses the standard + metric name and tags for loaders. + """ + return self.statsd.timed( + "operation_duration_seconds", tags={"operation": name, **tags} + ) + + def statsd_timing(self, name: str, value: float, tags: Dict[str, Any] = {}) -> None: + """ + Wrapper for :meth:`swh.core.statsd.Statsd.timing`, which uses the standard + metric name and tags for loaders. + """ + self.statsd.timing( + "operation_duration_seconds", value, tags={"operation": name, **tags} + ) + + def statsd_average( + self, name: str, value: Union[int, float], tags: Dict[str, Any] = {} + ) -> None: + """Increments both ``{name}_sum`` (by the ``value``) and ``{name}_count`` + (by ``1``), allowing to prometheus to compute the average ``value`` over + time.""" + self.statsd.increment(f"{name}_sum", value, tags=tags) + self.statsd.increment(f"{name}_count", tags=tags) + class DVCSLoader(BaseLoader): """This base class is a pattern for dvcs loaders (e.g. git, mercurial). Those loaders are able to load all the data in one go. For example, the loader defined in swh-loader-git :class:`BulkUpdater`. For other loaders (stateful one, (e.g :class:`SWHSvnLoader`), inherit directly from :class:`BaseLoader`. """ def cleanup(self) -> None: """Clean up an eventual state installed for computations.""" pass def has_contents(self) -> bool: """Checks whether we need to load contents""" return True def get_contents(self) -> Iterable[BaseContent]: """Get the contents that need to be loaded""" raise NotImplementedError def has_directories(self) -> bool: """Checks whether we need to load directories""" return True def get_directories(self) -> Iterable[Directory]: """Get the directories that need to be loaded""" raise NotImplementedError def has_revisions(self) -> bool: """Checks whether we need to load revisions""" return True def get_revisions(self) -> Iterable[Revision]: """Get the revisions that need to be loaded""" raise NotImplementedError def has_releases(self) -> bool: """Checks whether we need to load releases""" return True def get_releases(self) -> Iterable[Release]: """Get the releases that need to be loaded""" raise NotImplementedError def get_snapshot(self) -> Snapshot: """Get the snapshot that needs to be loaded""" raise NotImplementedError def eventful(self) -> bool: """Whether the load was eventful""" raise NotImplementedError def store_data(self) -> None: assert self.origin if self.save_data_path: self.save_data() if self.has_contents(): for obj in self.get_contents(): if isinstance(obj, Content): self.storage.content_add([obj]) elif isinstance(obj, SkippedContent): self.storage.skipped_content_add([obj]) else: raise TypeError(f"Unexpected content type: {obj}") if self.has_directories(): for directory in self.get_directories(): self.storage.directory_add([directory]) if self.has_revisions(): for revision in self.get_revisions(): self.storage.revision_add([revision]) if self.has_releases(): for release in self.get_releases(): self.storage.release_add([release]) snapshot = self.get_snapshot() self.storage.snapshot_add([snapshot]) self.flush() self.loaded_snapshot_id = snapshot.id diff --git a/swh/loader/core/metadata_fetchers.py b/swh/loader/core/metadata_fetchers.py index ae25be4..c03dd6a 100644 --- a/swh/loader/core/metadata_fetchers.py +++ b/swh/loader/core/metadata_fetchers.py @@ -1,50 +1,51 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import functools from typing import Dict, List, Optional, Set, Type import pkg_resources from typing_extensions import Protocol, runtime_checkable from swh.model.model import Origin, RawExtrinsicMetadata CredentialsType = Optional[Dict[str, Dict[str, List[Dict[str, str]]]]] @runtime_checkable class MetadataFetcherProtocol(Protocol): """Interface provided by :class:`swh.loader.metadata.base.BaseMetadataFetcher` to loaders, via setuptools entrypoints.""" SUPPORTED_LISTERS: Set[str] + FETCHER_NAME: str def __init__( self, origin: Origin, credentials: CredentialsType, lister_name: str, lister_instance_name: str, ): ... def get_origin_metadata(self) -> List[RawExtrinsicMetadata]: ... def get_parent_origins(self) -> List[Origin]: ... @functools.lru_cache() def _fetchers() -> List[Type[MetadataFetcherProtocol]]: classes = [] for entry_point in pkg_resources.iter_entry_points("swh.loader.metadata"): classes.append(entry_point.load()) return classes def get_fetchers_for_lister(lister_name: str) -> List[Type[MetadataFetcherProtocol]]: return [cls for cls in _fetchers() if lister_name in cls.SUPPORTED_LISTERS] diff --git a/swh/loader/core/tests/test_loader.py b/swh/loader/core/tests/test_loader.py index e8c20d0..ff3a359 100644 --- a/swh/loader/core/tests/test_loader.py +++ b/swh/loader/core/tests/test_loader.py @@ -1,361 +1,461 @@ # Copyright (C) 2018-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib import logging -from unittest.mock import MagicMock +import time +from unittest.mock import MagicMock, call import pytest from swh.loader.core.loader import BaseLoader, DVCSLoader from swh.loader.core.metadata_fetchers import MetadataFetcherProtocol from swh.loader.exception import NotFound from swh.loader.tests import assert_last_visit_matches from swh.model.hashutil import hash_to_bytes from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, Origin, RawExtrinsicMetadata, Snapshot, ) import swh.storage.exc ORIGIN = Origin(url="some-url") PARENT_ORIGIN = Origin(url="base-origin-url") METADATA_AUTHORITY = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="http://example.org/" ) REMD = RawExtrinsicMetadata( target=ORIGIN.swhid(), discovery_date=datetime.datetime.now(tz=datetime.timezone.utc), authority=METADATA_AUTHORITY, fetcher=MetadataFetcher( name="test fetcher", version="0.0.1", ), format="test-format", metadata=b'{"foo": "bar"}', ) class DummyLoader: """Base Loader to overload and simplify the base class (technical: to avoid repetition in other *Loader classes)""" visit_type = "git" def __init__(self, storage, *args, **kwargs): super().__init__(storage, ORIGIN.url, *args, **kwargs) def cleanup(self): pass def prepare(self, *args, **kwargs): pass def fetch_data(self): pass def get_snapshot_id(self): return None class DummyDVCSLoader(DummyLoader, DVCSLoader): """DVCS Loader that does nothing in regards to DAG objects.""" def get_contents(self): return [] def get_directories(self): return [] def get_revisions(self): return [] def get_releases(self): return [] def get_snapshot(self): return Snapshot(branches={}) def eventful(self): return False class DummyBaseLoader(DummyLoader, BaseLoader): """Buffered loader will send new data when threshold is reached""" def store_data(self): pass class DummyMetadataFetcher: - SUPPORTED_LISTERS = {"fake-lister"} + SUPPORTED_LISTERS = {"fake-forge"} + FETCHER_NAME = "fake-forge" def __init__(self, origin, credentials, lister_name, lister_instance_name): pass def get_origin_metadata(self): return [REMD] def get_parent_origins(self): return [] class DummyMetadataFetcherWithFork: - SUPPORTED_LISTERS = {"fake-lister"} + SUPPORTED_LISTERS = {"fake-forge"} + FETCHER_NAME = "fake-forge" def __init__(self, origin, credentials, lister_name, lister_instance_name): pass def get_origin_metadata(self): return [REMD] def get_parent_origins(self): return [PARENT_ORIGIN] def test_types(): assert isinstance( DummyMetadataFetcher(None, None, None, None), MetadataFetcherProtocol ) assert isinstance( DummyMetadataFetcherWithFork(None, None, None, None), MetadataFetcherProtocol ) def test_base_loader(swh_storage): loader = DummyBaseLoader(swh_storage) result = loader.load() assert result == {"status": "eventful"} def test_base_loader_with_config(swh_storage): loader = DummyBaseLoader(swh_storage, "logger-name") result = loader.load() assert result == {"status": "eventful"} def test_base_loader_with_known_lister_name(swh_storage, mocker): fetcher_cls = MagicMock(wraps=DummyMetadataFetcher) fetcher_cls.SUPPORTED_LISTERS = DummyMetadataFetcher.SUPPORTED_LISTERS + fetcher_cls.FETCHER_NAME = "fake-forge" mocker.patch( "swh.loader.core.metadata_fetchers._fetchers", return_value=[fetcher_cls] ) loader = DummyBaseLoader( - swh_storage, lister_name="fake-lister", lister_instance_name="" + swh_storage, lister_name="fake-forge", lister_instance_name="" ) + statsd_report = mocker.patch.object(loader.statsd, "_report") result = loader.load() assert result == {"status": "eventful"} fetcher_cls.assert_called_once() fetcher_cls.assert_called_once_with( origin=ORIGIN, credentials={}, - lister_name="fake-lister", + lister_name="fake-forge", lister_instance_name="", ) assert swh_storage.raw_extrinsic_metadata_get( ORIGIN.swhid(), METADATA_AUTHORITY ).results == [REMD] assert loader.parent_origins == [] + assert [ + call("metadata_fetchers_sum", "c", 1, {}, 1), + call("metadata_fetchers_count", "c", 1, {}, 1), + call("metadata_parent_origins_sum", "c", 0, {"fetcher": "fake-forge"}, 1), + call("metadata_parent_origins_count", "c", 1, {"fetcher": "fake-forge"}, 1), + call("metadata_objects_sum", "c", 1, {}, 1), + call("metadata_objects_count", "c", 1, {}, 1), + ] == [c for c in statsd_report.mock_calls if "metadata_" in c[1][0]] + assert loader.statsd.namespace == "swh_loader" + assert loader.statsd.constant_tags == {"visit_type": "git"} + def test_base_loader_with_unknown_lister_name(swh_storage, mocker): fetcher_cls = MagicMock(wraps=DummyMetadataFetcher) fetcher_cls.SUPPORTED_LISTERS = DummyMetadataFetcher.SUPPORTED_LISTERS mocker.patch( "swh.loader.core.metadata_fetchers._fetchers", return_value=[fetcher_cls] ) loader = DummyBaseLoader( swh_storage, lister_name="other-lister", lister_instance_name="" ) result = loader.load() assert result == {"status": "eventful"} fetcher_cls.assert_not_called() with pytest.raises(swh.storage.exc.StorageArgumentException): swh_storage.raw_extrinsic_metadata_get(ORIGIN.swhid(), METADATA_AUTHORITY) def test_base_loader_forked_origin(swh_storage, mocker): fetcher_cls = MagicMock(wraps=DummyMetadataFetcherWithFork) fetcher_cls.SUPPORTED_LISTERS = DummyMetadataFetcherWithFork.SUPPORTED_LISTERS + fetcher_cls.FETCHER_NAME = "fake-forge" mocker.patch( "swh.loader.core.metadata_fetchers._fetchers", return_value=[fetcher_cls] ) loader = DummyBaseLoader( - swh_storage, lister_name="fake-lister", lister_instance_name="" + swh_storage, lister_name="fake-forge", lister_instance_name="" ) + statsd_report = mocker.patch.object(loader.statsd, "_report") result = loader.load() assert result == {"status": "eventful"} fetcher_cls.assert_called_once() fetcher_cls.assert_called_once_with( origin=ORIGIN, credentials={}, - lister_name="fake-lister", + lister_name="fake-forge", lister_instance_name="", ) assert swh_storage.raw_extrinsic_metadata_get( ORIGIN.swhid(), METADATA_AUTHORITY ).results == [REMD] assert loader.parent_origins == [PARENT_ORIGIN] + assert [ + call("metadata_fetchers_sum", "c", 1, {}, 1), + call("metadata_fetchers_count", "c", 1, {}, 1), + call("metadata_parent_origins_sum", "c", 1, {"fetcher": "fake-forge"}, 1), + call("metadata_parent_origins_count", "c", 1, {"fetcher": "fake-forge"}, 1), + call("metadata_objects_sum", "c", 1, {}, 1), + call("metadata_objects_count", "c", 1, {}, 1), + ] == [c for c in statsd_report.mock_calls if "metadata_" in c[1][0]] + assert loader.statsd.namespace == "swh_loader" + assert loader.statsd.constant_tags == {"visit_type": "git"} + def test_dvcs_loader(swh_storage): loader = DummyDVCSLoader(swh_storage) result = loader.load() assert result == {"status": "eventful"} def test_dvcs_loader_with_config(swh_storage): loader = DummyDVCSLoader(swh_storage, "another-logger") result = loader.load() assert result == {"status": "eventful"} def test_loader_logger_default_name(swh_storage): loader = DummyBaseLoader(swh_storage) assert isinstance(loader.log, logging.Logger) assert loader.log.name == "swh.loader.core.tests.test_loader.DummyBaseLoader" loader = DummyDVCSLoader(swh_storage) assert isinstance(loader.log, logging.Logger) assert loader.log.name == "swh.loader.core.tests.test_loader.DummyDVCSLoader" def test_loader_logger_with_name(swh_storage): loader = DummyBaseLoader(swh_storage, "some.logger.name") assert isinstance(loader.log, logging.Logger) assert loader.log.name == "some.logger.name" def test_loader_save_data_path(swh_storage, tmp_path): loader = DummyBaseLoader(swh_storage, "some.logger.name.1", save_data_path=tmp_path) url = "http://bitbucket.org/something" loader.origin = Origin(url=url) loader.visit_date = datetime.datetime(year=2019, month=10, day=1) hash_url = hashlib.sha1(url.encode("utf-8")).hexdigest() expected_save_path = "%s/sha1:%s/%s/2019" % (str(tmp_path), hash_url[0:2], hash_url) save_path = loader.get_save_data_path() assert save_path == expected_save_path def _check_load_failure(caplog, loader, exc_class, exc_text, status="partial"): """Check whether a failed load properly logged its exception, and that the snapshot didn't get referenced in storage""" assert isinstance(loader, DVCSLoader) # was implicit so far for record in caplog.records: if record.levelname != "ERROR": continue assert "Loading failure" in record.message assert record.exc_info exc = record.exc_info[1] assert isinstance(exc, exc_class) assert exc_text in exc.args[0] # Check that the get_snapshot operation would have succeeded assert loader.get_snapshot() is not None # And confirm that the visit doesn't reference a snapshot visit = assert_last_visit_matches(loader.storage, ORIGIN.url, status) if status != "partial": assert visit.snapshot is None # But that the snapshot didn't get loaded assert loader.loaded_snapshot_id is None +@pytest.mark.parametrize("success", [True, False]) +def test_loader_timings(swh_storage, mocker, success): + current_time = time.time() + mocker.patch("time.monotonic", side_effect=lambda: current_time) + mocker.patch("swh.core.statsd.monotonic", side_effect=lambda: current_time) + + runtimes = { + "pre_cleanup": 2.0, + "build_extrinsic_origin_metadata": 3.0, + "prepare": 5.0, + "fetch_data": 7.0, + "store_data": 11.0, + "post_load": 13.0, + "flush": 17.0, + "cleanup": 23.0, + } + + class TimedLoader(BaseLoader): + visit_type = "my-visit-type" + + def __getattribute__(self, method_name): + if method_name == "visit_status" and not success: + + def crashy(): + raise Exception("oh no") + + return crashy + + if method_name not in runtimes: + return super().__getattribute__(method_name) + + def meth(*args, **kwargs): + nonlocal current_time + current_time += runtimes[method_name] + + return meth + + loader = TimedLoader(swh_storage, origin_url="http://example.org/hello.git") + statsd_report = mocker.patch.object(loader.statsd, "_report") + loader.load() + + if success: + expected_tags = { + "post_load": {"success": True, "status": "full"}, + "flush": {"success": True, "status": "full"}, + "cleanup": {"success": True, "status": "full"}, + } + else: + expected_tags = { + "post_load": {"success": False, "status": "failed"}, + "flush": {"success": False, "status": "failed"}, + "cleanup": {"success": False, "status": "failed"}, + } + + # note that this is a list equality, so order of entries in 'runtimes' matters. + # This is not perfect, but call() objects are not hashable so it's simpler this way, + # even if not perfect. + assert statsd_report.mock_calls == [ + call( + "operation_duration_seconds", + "ms", + value * 1000, + {"operation": key, **expected_tags.get(key, {})}, + 1, + ) + for (key, value) in runtimes.items() + ] + assert loader.statsd.namespace == "swh_loader" + assert loader.statsd.constant_tags == {"visit_type": "my-visit-type"} + + class DummyDVCSLoaderExc(DummyDVCSLoader): """A loader which raises an exception when loading some contents""" def get_contents(self): raise RuntimeError("Failed to get contents!") def test_dvcs_loader_exc_partial_visit(swh_storage, caplog): logger_name = "dvcsloaderexc" caplog.set_level(logging.ERROR, logger=logger_name) loader = DummyDVCSLoaderExc(swh_storage, logging_class=logger_name) # fake the loading ending up in a snapshot loader.loaded_snapshot_id = hash_to_bytes( "9e4dd2b40d1b46b70917c0949aa2195c823a648e" ) result = loader.load() # loading failed assert result == {"status": "failed"} # still resulted in a partial visit with a snapshot (somehow) _check_load_failure( caplog, loader, RuntimeError, "Failed to get contents!", ) class BrokenStorageProxy: def __init__(self, storage): self.storage = storage def __getattr__(self, attr): return getattr(self.storage, attr) def snapshot_add(self, snapshots): raise RuntimeError("Failed to add snapshot!") class DummyDVCSLoaderStorageExc(DummyDVCSLoader): """A loader which raises an exception when loading some contents""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.storage = BrokenStorageProxy(self.storage) def test_dvcs_loader_storage_exc_failed_visit(swh_storage, caplog): logger_name = "dvcsloaderexc" caplog.set_level(logging.ERROR, logger=logger_name) loader = DummyDVCSLoaderStorageExc(swh_storage, logging_class=logger_name) result = loader.load() assert result == {"status": "failed"} _check_load_failure( caplog, loader, RuntimeError, "Failed to add snapshot!", status="failed" ) class DummyDVCSLoaderNotFound(DummyDVCSLoader, BaseLoader): """A loader which raises a not_found exception during the prepare method call""" def prepare(*args, **kwargs): raise NotFound("Unknown origin!") def load_status(self): return { "status": "uneventful", } def test_loader_not_found(swh_storage, caplog): loader = DummyDVCSLoaderNotFound(swh_storage) result = loader.load() assert result == {"status": "uneventful"} _check_load_failure(caplog, loader, NotFound, "Unknown origin!", status="not_found") diff --git a/swh/loader/package/crates/loader.py b/swh/loader/package/crates/loader.py index 4b59015..c61d126 100644 --- a/swh/loader/package/crates/loader.py +++ b/swh/loader/package/crates/loader.py @@ -1,355 +1,354 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from distutils.version import StrictVersion import json from pathlib import Path from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple from urllib.parse import urlparse import attr import toml from typing_extensions import TypedDict from swh.loader.package.loader import BasePackageInfo, PackageLoader from swh.loader.package.utils import api_info, cached_method, release_name from swh.model.model import ObjectType, Person, Release, Sha1Git, TimestampWithTimezone from swh.storage.interface import StorageInterface class ExtrinsicPackageMetadata(TypedDict): """Data structure for package extrinsic metadata pulled from http api endpoint. We set only the keys we need according to what is available when querying https://crates.io/api/v1/crates/, where `name` is the name of the crate - package. - - Json response example for https://crates.io/api/v1/crates/hg-core : - - .. literalinclude:: ../../swh/loader/package/crates/tests/data/https_crates.io/api_v1_crates_hg-core + package (see JSON response example at https://crates.io/api/v1/crates/hg-core). Usage example: .. code-block:: python e_metadata = ExtrinsicPackageMetadata(**self.info()) """ # noqa categories: List[Dict[Any, Any]] """Related categories""" crate: Dict[Any, Any] """Crate project information""" keywords: List[Any] """Keywords""" versions: List[Dict[Any, Any]] """A list of released versions for a crate""" class ExtrinsicVersionPackageMetadata(TypedDict): """Data structure for specific package version extrinsic metadata, pulled from http api endpoint. Similar to `ExtrinsicPackageMetadata` in its usage, but we flatten the data related to a specific version. """ crate: str """The package name""" crate_size: int """The package size""" created_at: str """First released at""" downloads: str """Number of downloads""" license: str """Package license""" num: str """Package version""" published_by: Dict[Any, Any] """Publishers information""" updated_at: str """Last update""" yanked: bool """Is that version yanked? (yanked means release-level deprecation)""" class IntrinsicPackageMetadata(TypedDict): """Data structure for specific package version intrinsic metadata. Data is extracted from the crate package's .toml file. Then the data of the 'package' entry is flattened. Cargo.toml file content example: .. code-block:: toml [package] name = "hg-core" version = "0.0.1" authors = ["Georges Racinet "] description = "Mercurial pure Rust core library, with no assumption on Python bindings (FFI)" homepage = "https://mercurial-scm.org" license = "GPL-2.0-or-later" repository = "https://www.mercurial-scm.org/repo/hg" [lib] name = "hg" [dev-dependencies.rand] version = "~0.6" [dev-dependencies.rand_pcg] version = "~0.1" :param toml: toml object """ name: str """The package name""" version: str """Package version""" authors: List[str] """Authors""" description: str """Package and release description""" homepage: str """Homepage of the project""" license: str """Package license""" repository: str """Source code repository""" @attr.s class CratesPackageInfo(BasePackageInfo): name = attr.ib(type=str) """Name of the package""" version = attr.ib(type=str) """Current version""" e_metadata: Dict[str, Any] = attr.ib(factory=ExtrinsicPackageMetadata) """Extrinsic package metadata, common to all versions""" e_metadata_version: Dict[str, Any] = attr.ib( factory=ExtrinsicVersionPackageMetadata ) """Extrinsic package metadata specific to a version""" i_metadata: Dict[str, Any] = attr.ib(factory=IntrinsicPackageMetadata) """Intrinsic metadata of the current package version""" def extract_intrinsic_metadata(dir_path: Path) -> Dict[str, Any]: """Extract intrinsic metadata from Cargo.toml file at dir_path. Each crate archive has a Cargo.toml at the root of the archive. Args: dir_path: A directory on disk where a Cargo.toml must be present Returns: A dict mapping from toml parser """ return toml.load(dir_path / "Cargo.toml") def extract_author(p_info: CratesPackageInfo) -> Person: """Extract package author from intrinsic metadata and return it as a `Person` model. Args: p_info: CratesPackageInfo that should contains i_metadata entries Returns: Only one author (Person) of the package. Currently limited by internal detail of the swh stack (see T3887). """ authors = p_info.i_metadata["authors"] fullname = authors[0] # TODO: here we have a list of author, see T3887 return Person.from_fullname(fullname.encode()) def extract_description(p_info: CratesPackageInfo) -> str: """Extract package description from intrinsic metadata and return it as a string. Args: p_info: CratesPackageInfo that should contains i_metadata and entries Returns: Package description from metadata. """ return p_info.i_metadata["description"] class CratesLoader(PackageLoader[CratesPackageInfo]): """Load Crates package origins into swh archive.""" visit_type = "crates" def __init__( self, storage: StorageInterface, url: str, - package_name: str, - version: str, - checksum: Optional[str] = None, + artifacts: List[Dict[str, Any]], max_content_size: Optional[int] = None, ): """Constructor Args: - url : str - Origin url (e.g. - https://static.crates.io/crates//-.crate) + url: + Origin url, (e.g. https://crates.io/api/v1/crates/) - package_name : str - Crate package name + artifacts: + A list of dict listing all existing released versions for a + package (Usually set with crates lister `extra_loader_arguments`). + Each line is a dict that should have an `url` + (where to download package specific version) and a `version` entry. - version : str - Crate package version - checksum : str, optional - Checksum for the package file to download - """ + Example:: + + [ + { + "version": , + "url": "https://static.crates.io/crates//-.crate", + } + ] + """ # noqa super().__init__(storage=storage, url=url, max_content_size=max_content_size) - self.name = package_name - self.provider_url = f"https://crates.io/api/v1/crates/{self.name}" - # Check consistency of name, version, url - filename = urlparse(url).path.split("/")[-1] - assert f"{self.name}-{version}.crate" == filename + self.url = url + self.artifacts: Dict[str, Dict] = { + artifact["version"]: artifact for artifact in artifacts + } @cached_method def _raw_info(self) -> bytes: - """Get crate metadata (fetched from http api endpoint set as self.provider_url) + """Get crate metadata (fetched from http api endpoint set as self.url) Returns: Content response as bytes. Content response is a json document. """ - return api_info(self.provider_url) + return api_info(self.url) @cached_method def info(self) -> Dict: """Parse http api json response and return the crate metadata information as a Dict.""" return json.loads(self._raw_info()) def get_versions(self) -> Sequence[str]: """Get all released versions of a crate Returns: A sequence of versions Example:: ["0.1.1", "0.10.2"] """ - versions = [version["num"] for version in self.info()["versions"]] + versions = list(self.artifacts.keys()) versions.sort(key=StrictVersion) return versions def get_default_version(self) -> str: """Get the newest release version of a crate Returns: A string representing a version Example:: "0.1.2" """ - return self.info()["crate"]["newest_version"] + return self.get_versions()[-1] def get_package_info(self, version: str) -> Iterator[Tuple[str, CratesPackageInfo]]: """Get release name and package information from version Args: version: crate version (e.g: "0.1.0") Returns: Iterator of tuple (release_name, p_info) """ - filename = f"{self.name}-{version}.crate" - url = f"https://static.crates.io/crates/{self.name}/{self.name}-{version}.crate" + artifact = self.artifacts[version] + filename = artifact["filename"] + package_name = urlparse(self.url).path.split("/")[-1] + url = artifact["url"] # Get extrinsic metadata from http api - - # Raw crate info e_metadata = ExtrinsicPackageMetadata(**self.info()) # type: ignore[misc] + # Extract crate info for current version (One .crate file for a given version) (crate_version,) = [ crate for crate in e_metadata["versions"] if crate["num"] == version ] e_metadata_version = ExtrinsicVersionPackageMetadata( # type: ignore[misc] **crate_version ) p_info = CratesPackageInfo( - name=self.name, + name=package_name, filename=filename, url=url, version=version, e_metadata=e_metadata, e_metadata_version=e_metadata_version, ) yield release_name(version, filename), p_info def build_release( self, p_info: CratesPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: # Extract intrinsic metadata from dir_path/Cargo.toml name = p_info.name version = p_info.version dir_path = Path(uncompressed_path, f"{name}-{version}") i_metadata_raw = extract_intrinsic_metadata(dir_path) # Get only corresponding key of IntrinsicPackageMetadata i_metadata_keys = [k for k in IntrinsicPackageMetadata.__annotations__.keys()] # We use data only from "package" entry - i_metadata = { k: v for k, v in i_metadata_raw["package"].items() if k in i_metadata_keys } p_info.i_metadata = IntrinsicPackageMetadata(**i_metadata) # type: ignore[misc] author = extract_author(p_info) description = extract_description(p_info) message = ( f"Synthetic release for Crate source package {p_info.name} " f"version {p_info.version}\n" f"{description}\n" ) # The only way to get a value for updated_at is through extrinsic metadata updated_at = p_info.e_metadata_version.get("updated_at") return Release( name=version.encode(), author=author, date=TimestampWithTimezone.from_iso8601(updated_at), message=message.encode(), target_type=ObjectType.DIRECTORY, target=directory, synthetic=True, ) diff --git a/swh/loader/package/crates/tasks.py b/swh/loader/package/crates/tasks.py index 2d774c9..9385263 100644 --- a/swh/loader/package/crates/tasks.py +++ b/swh/loader/package/crates/tasks.py @@ -1,16 +1,14 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.crates.loader import CratesLoader @shared_task(name=__name__ + ".LoadCrates") -def load_crates(*, url=None, package_name: str, version: str, checksum=None): +def load_crates(*, url=None, artifacts: list): """Load Rust crate package""" - return CratesLoader.from_configfile( - url=url, package_name=package_name, version=version, checksum=checksum - ).load() + return CratesLoader.from_configfile(url=url, artifacts=artifacts).load() diff --git a/swh/loader/package/crates/tests/test_crates.py b/swh/loader/package/crates/tests/test_crates.py index e577462..8d7f750 100644 --- a/swh/loader/package/crates/tests/test_crates.py +++ b/swh/loader/package/crates/tests/test_crates.py @@ -1,214 +1,287 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information +import pytest + from swh.loader.package.crates.loader import CratesLoader from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import ( ObjectType, Person, Release, Snapshot, SnapshotBranch, TargetType, TimestampWithTimezone, ) CRATES_EXTRA = [ { - "name": "hg-core", - "version": "0.0.1", - "url": "https://static.crates.io/crates/hg-core/hg-core-0.0.1.crate", - "checksum": "7fe168efadebadb9da6a329fdc027036e233b662285730cad27220e11e53c384", + "url": "https://crates.io/api/v1/crates/hg-core", + "artifacts": [ + { + "checksums": { + "sha256": "48a45b46c2a8c38348adb1205b13c3c5eb0174e0c0fec52cc88e9fb1de14c54d", # noqa: B950 + }, + "filename": "hg-core-0.0.1.crate", + "url": "https://static.crates.io/crates/hg-core/hg-core-0.0.1.crate", + "version": "0.0.1", + }, + ], }, { - "name": "micro-timer", - "version": "0.4.0", - "url": "https://static.crates.io/crates/micro-timer/micro-timer-0.4.0.crate", - "checksum": "5de32cb59a062672560d6f0842c4aa7714727457b9fe2daf8987d995a176a405", + "url": "https://crates.io/api/v1/crates/micro-timer", + "artifacts": [ + { + "checksums": { + "sha256": "69ad8fd116f8af0298ae4e83e587b1600af12709022471e25581c3aeb1da77ce", # noqa: B950 + }, + "filename": "micro-timer-0.1.0.crate", + "url": "https://static.crates.io/crates/micro-timer/micro-timer-0.1.0.crate", + "version": "0.1.0", + }, + { + "checksums": { + "sha256": "7b3f65fe0e109daad8d47e1938c9b5f9353efacd86bbe7ff013f84ae7ca758bf", # noqa: B950 + }, + "filename": "micro-timer-0.1.1.crate", + "url": "https://static.crates.io/crates/micro-timer/micro-timer-0.1.1.crate", + "version": "0.1.1", + }, + { + "checksums": { + "sha256": "16439fea388f712c1df7737ceb8f784d407844624b4796faf1e1bf8bbaa97445", # noqa: B950 + }, + "filename": "micro-timer-0.1.2.crate", + "url": "https://static.crates.io/crates/micro-timer/micro-timer-0.1.2.crate", + "version": "0.1.2", + }, + { + "checksums": { + "sha256": "336b4c0f071d16674747faa4643d742cc096fec2bf8cf01bb1a98d984bedcaf1", # noqa: B950 + }, + "filename": "micro-timer-0.2.0.crate", + "url": "https://static.crates.io/crates/micro-timer/micro-timer-0.2.0.crate", + "version": "0.2.0", + }, + { + "checksums": { + "sha256": "987429cd6162a80ed5ff44fc790f5090b1c6d617ac73a2e272965ed91201d79b", # noqa: B950 + }, + "filename": "micro-timer-0.2.1.crate", + "url": "https://static.crates.io/crates/micro-timer/micro-timer-0.2.1.crate", + "version": "0.2.1", + }, + { + "checksums": { + "sha256": "25b31d6cb9112984323d05d7a353f272ae5d7a307074f9ab9b25c00121b8c947", # noqa: B950 + }, + "filename": "micro-timer-0.3.0.crate", + "url": "https://static.crates.io/crates/micro-timer/micro-timer-0.3.0.crate", + "version": "0.3.0", + }, + { + "checksums": { + "sha256": "2620153e1d903d26b72b89f0e9c48d8c4756cba941c185461dddc234980c298c", # noqa: B950 + }, + "filename": "micro-timer-0.3.1.crate", + "url": "https://static.crates.io/crates/micro-timer/micro-timer-0.3.1.crate", + "version": "0.3.1", + }, + { + "checksums": { + "sha256": "5de32cb59a062672560d6f0842c4aa7714727457b9fe2daf8987d995a176a405", # noqa: B950 + }, + "filename": "micro-timer-0.4.0.crate", + "url": "https://static.crates.io/crates/micro-timer/micro-timer-0.4.0.crate", + "version": "0.4.0", + }, + ], }, ] def test_get_versions(requests_mock_datadir, swh_storage): loader = CratesLoader( swh_storage, url=CRATES_EXTRA[1]["url"], - package_name=CRATES_EXTRA[1]["name"], - version=CRATES_EXTRA[1]["version"], + artifacts=CRATES_EXTRA[1]["artifacts"], ) assert loader.get_versions() == [ "0.1.0", "0.1.1", "0.1.2", "0.2.0", "0.2.1", "0.3.0", "0.3.1", "0.4.0", ] def test_get_default_version(requests_mock_datadir, swh_storage): loader = CratesLoader( swh_storage, url=CRATES_EXTRA[1]["url"], - package_name=CRATES_EXTRA[1]["name"], - version=CRATES_EXTRA[1]["version"], + artifacts=CRATES_EXTRA[1]["artifacts"], ) assert loader.get_default_version() == "0.4.0" -def test_crate_origin_not_found(swh_storage, requests_mock_datadir): - url = "https://nowhere-to-run/nowhere-to-hide-0.0.1.crate" +def test_crate_invalid_origin_archive_not_found(swh_storage, requests_mock_datadir): + url = "https://nowhere-to-run/nowhere-to-hide" loader = CratesLoader( swh_storage, url, - package_name="nowhere-to-hide", - version="0.0.1", + artifacts=[ + { + "filename": "nowhere-to-hide-0.0.1.crate", + "url": "https://nowhere-to-run/nowhere-to-hide-0.0.1.crate", + "version": "0.0.1", + }, + ], ) - assert loader.load() == {"status": "failed"} - - assert_last_visit_matches( - swh_storage, url, status="not_found", type="crates", snapshot=None - ) + with pytest.raises(Exception): + assert loader.load() == {"status": "failed"} + assert_last_visit_matches( + swh_storage, url, status="not_found", type="crates", snapshot=None + ) def test_crates_loader_load_one_version(datadir, requests_mock_datadir, swh_storage): loader = CratesLoader( swh_storage, url=CRATES_EXTRA[0]["url"], - package_name=CRATES_EXTRA[0]["name"], - version=CRATES_EXTRA[0]["version"], + artifacts=CRATES_EXTRA[0]["artifacts"], ) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None expected_snapshot_id = "353cd6858c88ee8210432ea1098993c2e9966561" expected_release_id = "d578833534017430f1b93eb741620899620c2505" assert expected_snapshot_id == actual_load_status["snapshot_id"] expected_snapshot = Snapshot( id=hash_to_bytes(actual_load_status["snapshot_id"]), branches={ b"releases/0.0.1/hg-core-0.0.1.crate": SnapshotBranch( target=hash_to_bytes(expected_release_id), target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/0.0.1/hg-core-0.0.1.crate", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 1, "directory": 2, "origin": 1, "origin_visit": 1, "release": 1, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats assert swh_storage.release_get([hash_to_bytes(expected_release_id)])[0] == Release( name=b"0.0.1", message=b"Synthetic release for Crate source package hg-core version " b"0.0.1\nMercurial pure Rust core library, with no assumption " b"on Python bindings (FFI)\n", target=hash_to_bytes("674c3b0b54628d55b93a79dc7adf304efc01b371"), target_type=ObjectType.DIRECTORY, synthetic=True, author=Person.from_fullname(b"Georges Racinet "), date=TimestampWithTimezone.from_iso8601("2019-04-16T18:48:11.404457+00:00"), id=hash_to_bytes(expected_release_id), ) def test_crates_loader_load_n_versions(datadir, requests_mock_datadir, swh_storage): url = CRATES_EXTRA[1]["url"] loader = CratesLoader( swh_storage, - url=url, - package_name=CRATES_EXTRA[1]["name"], - version=CRATES_EXTRA[1]["version"], - checksum=CRATES_EXTRA[1]["checksum"], + url=CRATES_EXTRA[1]["url"], + artifacts=CRATES_EXTRA[1]["artifacts"], ) + actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None expected_snapshot_id = "016cbbe3bb78424c35b898015a2d80d79359e2ad" assert expected_snapshot_id == actual_load_status["snapshot_id"] expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/0.4.0/micro-timer-0.4.0.crate": SnapshotBranch( target=hash_to_bytes("3237c1174c4ccfa8e934d1bfd8d80b3a89760e39"), target_type=TargetType.RELEASE, ), b"releases/0.3.1/micro-timer-0.3.1.crate": SnapshotBranch( target=hash_to_bytes("8b727a280051cdb90468ede2746e176e6fdf355f"), target_type=TargetType.RELEASE, ), b"releases/0.3.0/micro-timer-0.3.0.crate": SnapshotBranch( target=hash_to_bytes("f45ec236ae50fb37e924a3d2cc093e72b6cbf1cd"), target_type=TargetType.RELEASE, ), b"releases/0.2.1/micro-timer-0.2.1.crate": SnapshotBranch( target=hash_to_bytes("50a60a2c3696df7cd1b623bd7dbea2c89b994e42"), target_type=TargetType.RELEASE, ), b"releases/0.2.0/micro-timer-0.2.0.crate": SnapshotBranch( target=hash_to_bytes("f0592dc0ae05399d872017d0260c45b875cb590e"), target_type=TargetType.RELEASE, ), b"releases/0.1.2/micro-timer-0.1.2.crate": SnapshotBranch( target=hash_to_bytes("9220d7823fc40ab44e3ae3227522e7de672fad3e"), target_type=TargetType.RELEASE, ), b"releases/0.1.1/micro-timer-0.1.1.crate": SnapshotBranch( target=hash_to_bytes("38529b7e355f79fdce31a3ba891e146174e10237"), target_type=TargetType.RELEASE, ), b"releases/0.1.0/micro-timer-0.1.0.crate": SnapshotBranch( target=hash_to_bytes("5e5e6120af55b65c577e09331df54e70fad5e8b0"), target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/0.4.0/micro-timer-0.4.0.crate", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 8, "directory": 16, "origin": 1, "origin_visit": 1, "release": 8, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats assert_last_visit_matches( swh_storage, url, status="full", type="crates", snapshot=expected_snapshot.id, ) diff --git a/swh/loader/package/crates/tests/test_tasks.py b/swh/loader/package/crates/tests/test_tasks.py index b38412f..b60e2d4 100644 --- a/swh/loader/package/crates/tests/test_tasks.py +++ b/swh/loader/package/crates/tests/test_tasks.py @@ -1,25 +1,24 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def test_tasks_crates_loader( mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config ): mock_load = mocker.patch("swh.loader.package.crates.loader.CratesLoader.load") mock_load.return_value = {"status": "eventful"} res = swh_scheduler_celery_app.send_task( "swh.loader.package.crates.tasks.LoadCrates", kwargs=dict( - url="some-url/crates/some-package/some-package-0.0.1.crate", - package_name="some-package", - version="0.0.1", + url="some-url/api/v1/crates/some-package", + artifacts=[{"version": "0.0.1", "url": "some-package-0.0.1.crate"}], ), ) assert res res.wait() assert res.successful() assert mock_load.called assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/deposit/loader.py b/swh/loader/package/deposit/loader.py index f489c92..e53a753 100644 --- a/swh/loader/package/deposit/loader.py +++ b/swh/loader/package/deposit/loader.py @@ -1,376 +1,378 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from datetime import timezone import json import logging from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union import attr import requests +import sentry_sdk from swh.core.config import load_from_envvar from swh.loader.core.loader import DEFAULT_CONFIG from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import cached_method, download from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, ObjectType, Person, Release, Sha1Git, TimestampWithTimezone, ) from swh.storage.algos.snapshot import snapshot_get_all_branches from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) def now() -> datetime.datetime: return datetime.datetime.now(tz=timezone.utc) @attr.s class DepositPackageInfo(BasePackageInfo): filename = attr.ib(type=str) # instead of Optional[str] author_date = attr.ib(type=datetime.datetime) """codemeta:dateCreated if any, deposit completed_date otherwise""" commit_date = attr.ib(type=datetime.datetime) """codemeta:datePublished if any, deposit completed_date otherwise""" client = attr.ib(type=str) id = attr.ib(type=int) """Internal ID of the deposit in the deposit DB""" collection = attr.ib(type=str) """The collection in the deposit; see SWORD specification.""" author = attr.ib(type=Person) committer = attr.ib(type=Person) release_notes = attr.ib(type=Optional[str]) @classmethod def from_metadata( cls, metadata: Dict[str, Any], url: str, filename: str, version: str ) -> "DepositPackageInfo": # Note: # `date` and `committer_date` are always transmitted by the deposit read api # which computes itself the values. The loader needs to use those to create the # release. raw_metadata: str = metadata["raw_metadata"] depo = metadata["deposit"] return cls( url=url, filename=filename, version=version, author_date=depo["author_date"], commit_date=depo["committer_date"], client=depo["client"], id=depo["id"], collection=depo["collection"], author=parse_author(depo["author"]), committer=parse_author(depo["committer"]), release_notes=depo["release_notes"], directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( discovery_date=now(), metadata=raw_metadata.encode(), format="sword-v2-atom-codemeta-v2", ) ], ) def extid(self) -> None: # For now, we don't try to deduplicate deposits. There is little point anyway, # as it only happens when the exact same tarball was deposited twice. return None class DepositLoader(PackageLoader[DepositPackageInfo]): """Load a deposited artifact into swh archive.""" visit_type = "deposit" def __init__( self, storage: StorageInterface, url: str, deposit_id: str, deposit_client: "ApiClient", default_filename: str = "archive.tar", **kwargs: Any, ): """Constructor Args: url: Origin url to associate the artifacts/metadata to deposit_id: Deposit identity deposit_client: Deposit api client """ super().__init__(storage=storage, url=url, **kwargs) self.deposit_id = deposit_id self.client = deposit_client self.default_filename = default_filename @classmethod def from_configfile(cls, **kwargs: Any): """Instantiate a loader from the configuration loaded from the SWH_CONFIG_FILENAME envvar, with potential extra keyword arguments if their value is not None. Args: kwargs: kwargs passed to the loader instantiation """ config = dict(load_from_envvar(DEFAULT_CONFIG)) config.update({k: v for k, v in kwargs.items() if v is not None}) deposit_client = ApiClient(**config.pop("deposit")) return cls.from_config(deposit_client=deposit_client, **config) def get_versions(self) -> Sequence[str]: # only 1 branch 'HEAD' with no alias since we only have 1 snapshot # branch return ["HEAD"] def get_metadata_authority(self) -> MetadataAuthority: provider = self.metadata()["provider"] assert provider["provider_type"] == MetadataAuthorityType.DEPOSIT_CLIENT.value return MetadataAuthority( type=MetadataAuthorityType.DEPOSIT_CLIENT, url=provider["provider_url"], metadata={ "name": provider["provider_name"], **(provider["metadata"] or {}), }, ) def get_metadata_fetcher(self) -> MetadataFetcher: tool = self.metadata()["tool"] return MetadataFetcher( name=tool["name"], version=tool["version"], metadata=tool["configuration"], ) def get_package_info( self, version: str ) -> Iterator[Tuple[str, DepositPackageInfo]]: p_info = DepositPackageInfo.from_metadata( self.metadata(), url=self.origin.url, filename=self.default_filename, version=version, ) yield "HEAD", p_info def download_package( self, p_info: DepositPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Override to allow use of the dedicated deposit client""" return [self.client.archive_get(self.deposit_id, tmpdir, p_info.filename)] def build_release( self, p_info: DepositPackageInfo, uncompressed_path: str, directory: Sha1Git, ) -> Optional[Release]: message = ( f"{p_info.client}: Deposit {p_info.id} in collection {p_info.collection}" ) if p_info.release_notes: message += "\n\n" + p_info.release_notes if not message.endswith("\n"): message += "\n" return Release( name=p_info.version.encode(), message=message.encode(), author=p_info.author, date=TimestampWithTimezone.from_dict(p_info.author_date), target=directory, target_type=ObjectType.DIRECTORY, synthetic=True, ) def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: metadata = self.metadata() raw_metadata: str = metadata["raw_metadata"] origin_metadata = json.dumps( { "metadata": [raw_metadata], "provider": metadata["provider"], "tool": metadata["tool"], } ).encode() return [ RawExtrinsicMetadataCore( discovery_date=now(), metadata=raw_metadata.encode(), format="sword-v2-atom-codemeta-v2", ), RawExtrinsicMetadataCore( discovery_date=now(), metadata=origin_metadata, format="original-artifacts-json", ), ] @cached_method def metadata(self): """Returns metadata from the deposit server""" return self.client.metadata_get(self.deposit_id) def load(self) -> Dict: # First making sure the deposit is known on the deposit's RPC server # prior to trigger a loading try: self.metadata() except ValueError: - logger.error(f"Unknown deposit {self.deposit_id}, ignoring") + logger.exception(f"Unknown deposit {self.deposit_id}") + sentry_sdk.capture_exception() return {"status": "failed"} # Then usual loading return super().load() def finalize_visit( self, status_visit: str, errors: Optional[List[str]] = None, **kwargs ) -> Dict[str, Any]: r = super().finalize_visit(status_visit=status_visit, **kwargs) success = status_visit == "full" # Update deposit status try: if not success: self.client.status_update( self.deposit_id, status="failed", errors=errors, ) return r snapshot_id = hash_to_bytes(r["snapshot_id"]) snapshot = snapshot_get_all_branches(self.storage, snapshot_id) if not snapshot: return r branches = snapshot.branches logger.debug("branches: %s", branches) if not branches: return r rel_id = branches[b"HEAD"].target release = self.storage.release_get([rel_id])[0] if not release: return r # update the deposit's status to success with its # release-id and directory-id self.client.status_update( self.deposit_id, status="done", release_id=hash_to_hex(rel_id), directory_id=hash_to_hex(release.target), snapshot_id=r["snapshot_id"], origin_url=self.origin.url, ) except Exception: logger.exception("Problem when trying to update the deposit's status") + sentry_sdk.capture_exception() return {"status": "failed"} return r def parse_author(author) -> Person: """See prior fixme""" return Person( fullname=author["fullname"].encode("utf-8"), name=author["name"].encode("utf-8"), email=author["email"].encode("utf-8"), ) class ApiClient: """Private Deposit Api client""" def __init__(self, url, auth: Optional[Mapping[str, str]]): self.base_url = url.rstrip("/") self.auth = None if not auth else (auth["username"], auth["password"]) def do(self, method: str, url: str, *args, **kwargs): """Internal method to deal with requests, possibly with basic http authentication. Args: method (str): supported http methods as in get/post/put Returns: The request's execution output """ method_fn = getattr(requests, method) if self.auth: kwargs["auth"] = self.auth return method_fn(url, *args, **kwargs) def archive_get( self, deposit_id: Union[int, str], tmpdir: str, filename: str ) -> Tuple[str, Dict]: """Retrieve deposit's archive artifact locally""" url = f"{self.base_url}/{deposit_id}/raw/" return download(url, dest=tmpdir, filename=filename, auth=self.auth) def metadata_url(self, deposit_id: Union[int, str]) -> str: return f"{self.base_url}/{deposit_id}/meta/" def metadata_get(self, deposit_id: Union[int, str]) -> Dict[str, Any]: """Retrieve deposit's metadata artifact as json""" url = self.metadata_url(deposit_id) r = self.do("get", url) if r.ok: return r.json() msg = f"Problem when retrieving deposit metadata at {url}" - logger.error(msg) raise ValueError(msg) def status_update( self, deposit_id: Union[int, str], status: str, errors: Optional[List[str]] = None, release_id: Optional[str] = None, directory_id: Optional[str] = None, snapshot_id: Optional[str] = None, origin_url: Optional[str] = None, ): """Update deposit's information including status, and persistent identifiers result of the loading. """ url = f"{self.base_url}/{deposit_id}/update/" payload: Dict[str, Any] = {"status": status} if release_id: payload["release_id"] = release_id if directory_id: payload["directory_id"] = directory_id if snapshot_id: payload["snapshot_id"] = snapshot_id if origin_url: payload["origin_url"] = origin_url if errors: payload["status_detail"] = {"loading": errors} self.do("put", url, json=payload) diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index ed9bdc1..000ad98 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,1075 +1,1086 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib from itertools import islice import json import logging import os import string import sys import tempfile from typing import ( Any, Dict, Generic, Iterator, List, Mapping, Optional, Sequence, Set, Tuple, TypeVar, ) import attr from requests.exceptions import ContentDecodingError import sentry_sdk from swh.core.tarball import uncompress from swh.loader.core.loader import BaseLoader from swh.loader.exception import NotFound from swh.loader.package.utils import download from swh.model import from_disk from swh.model.hashutil import hash_to_hex from swh.model.model import ( ExtID, MetadataAuthority, MetadataAuthorityType, MetadataFetcher, ) from swh.model.model import ( Origin, OriginVisit, OriginVisitStatus, RawExtrinsicMetadata, Release, Revision, Sha1Git, Snapshot, ) from swh.model.model import ObjectType as ModelObjectType from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType from swh.storage.algos.snapshot import snapshot_get_latest from swh.storage.interface import StorageInterface from swh.storage.utils import now logger = logging.getLogger(__name__) SWH_METADATA_AUTHORITY = MetadataAuthority( type=MetadataAuthorityType.REGISTRY, url="https://softwareheritage.org/", metadata={}, ) """Metadata authority for extrinsic metadata generated by Software Heritage. Used for metadata on "original artifacts", ie. length, filename, and checksums of downloaded archive files.""" PartialExtID = Tuple[str, int, bytes] """The ``extid_type`` and ``extid`` fields of an :class:`ExtID` object.""" @attr.s class RawExtrinsicMetadataCore: """Contains the core of the metadata extracted by a loader, that will be used to build a full RawExtrinsicMetadata object by adding object identifier, context, and provenance information.""" format = attr.ib(type=str) metadata = attr.ib(type=bytes) discovery_date = attr.ib(type=Optional[datetime.datetime], default=None) """Defaults to the visit date.""" @attr.s class BasePackageInfo: """Compute the primary key for a dict using the id_keys as primary key composite. Args: d: A dict entry to compute the primary key on id_keys: Sequence of keys to use as primary key Returns: The identity for that dict entry """ url = attr.ib(type=str) filename = attr.ib(type=Optional[str]) version = attr.ib(type=str) """Version name/number.""" MANIFEST_FORMAT: Optional[string.Template] = None """If not None, used by the default extid() implementation to format a manifest, before hashing it to produce an ExtID.""" EXTID_TYPE: str = "package-manifest-sha256" EXTID_VERSION: int = 0 # The following attribute has kw_only=True in order to allow subclasses # to add attributes. Without kw_only, attributes without default values cannot # go after attributes with default values. # See directory_extrinsic_metadata = attr.ib( type=List[RawExtrinsicMetadataCore], default=[], kw_only=True, ) """:term:`extrinsic metadata` collected by the loader, that will be attached to the loaded directory and added to the Metadata storage.""" # TODO: add support for metadata for releases and contents def extid(self) -> Optional[PartialExtID]: """Returns a unique intrinsic identifier of this package info, or None if this package info is not 'deduplicatable' (meaning that we will always load it, instead of checking the ExtID storage to see if we already did)""" if self.MANIFEST_FORMAT is None: return None else: manifest = self.MANIFEST_FORMAT.substitute( {k: str(v) for (k, v) in attr.asdict(self).items()} ) return ( self.EXTID_TYPE, self.EXTID_VERSION, hashlib.sha256(manifest.encode()).digest(), ) TPackageInfo = TypeVar("TPackageInfo", bound=BasePackageInfo) class PackageLoader(BaseLoader, Generic[TPackageInfo]): def __init__(self, storage: StorageInterface, url: str, **kwargs: Any): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: storage: Storage instance url: Origin url to load data from """ super().__init__(storage=storage, origin_url=url, **kwargs) def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Raises: class:`swh.loader.exception.NotFound` error when failing to read the published package versions. Returns: Sequence of published versions """ return [] def get_package_info(self, version: str) -> Iterator[Tuple[str, TPackageInfo]]: """Given a release version of a package, retrieve the associated package information for such version. Args: version: Package version Returns: (branch name, package metadata) """ yield from {} def build_release( self, p_info: TPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: """Build the release from the archive metadata (extrinsic artifact metadata) and the intrinsic metadata. Args: p_info: Package information uncompressed_path: Artifact uncompressed path on disk """ raise NotImplementedError("build_release") def get_default_version(self) -> str: """Retrieve the latest release version if any. Returns: Latest version """ return "" def last_snapshot(self) -> Optional[Snapshot]: """Retrieve the last snapshot out of the last visit.""" return snapshot_get_latest(self.storage, self.origin.url) def new_packageinfo_to_extid(self, p_info: TPackageInfo) -> Optional[PartialExtID]: return p_info.extid() def _get_known_extids( self, packages_info: List[TPackageInfo] ) -> Dict[PartialExtID, List[CoreSWHID]]: """Compute the ExtIDs from new PackageInfo objects, searches which are already loaded in the archive, and returns them if any.""" # Compute the ExtIDs of all the new packages, grouped by extid type new_extids: Dict[Tuple[str, int], List[bytes]] = {} for p_info in packages_info: res = p_info.extid() if res is not None: (extid_type, extid_version, extid_extid) = res new_extids.setdefault((extid_type, extid_version), []).append( extid_extid ) # For each extid type, call extid_get_from_extid() with all the extids of # that type, and store them in the '(type, extid) -> target' map. known_extids: Dict[PartialExtID, List[CoreSWHID]] = {} for ((extid_type, extid_version), extids) in new_extids.items(): for extid in self.storage.extid_get_from_extid( extid_type, extids, version=extid_version ): if extid is not None: key = (extid.extid_type, extid_version, extid.extid) known_extids.setdefault(key, []).append(extid.target) return known_extids def resolve_object_from_extids( self, known_extids: Dict[PartialExtID, List[CoreSWHID]], p_info: TPackageInfo, whitelist: Set[Sha1Git], ) -> Optional[CoreSWHID]: """Resolve the revision/release from known ExtIDs and a package info object. If the artifact has already been downloaded, this will return the existing release (or revision) targeting that uncompressed artifact directory. Otherwise, this returns None. Args: known_extids: Dict built from a list of ExtID, with the target as value p_info: Package information whitelist: Any ExtID with target not in this set is filtered out Returns: None or release/revision SWHID """ new_extid = p_info.extid() if new_extid is None: return None extid_targets = set() for extid_target in known_extids.get(new_extid, []): if extid_target.object_id not in whitelist: # There is a known ExtID for this package, but its target is not # in the snapshot. # This can happen for three reasons: # # 1. a loader crashed after writing the ExtID, but before writing # the snapshot # 2. some other loader loaded the same artifact, but produced # a different revision, causing an additional ExtID object # to be written. We will probably find this loader's ExtID # in a future iteration of this loop. # Note that for now, this is impossible, as each loader has a # completely different extid_type, but this is an implementation # detail of each loader. # 3. we took a snapshot, then the package disappeared, # then we took another snapshot, and the package reappeared # # In case of 1, we must actually load the package now, # so let's do it. # TODO: detect when we are in case 3 using release_missing # or revision_missing instead of the snapshot. continue elif extid_target.object_type in (ObjectType.RELEASE, ObjectType.REVISION): extid_targets.add(extid_target) else: # Note that this case should never be reached unless there is a # collision between a revision hash and some non-revision object's # hash, but better safe than sorry. logger.warning( "%s is in the whitelist, but is not a revision/release.", hash_to_hex(extid_target.object_type), ) if extid_targets: # This is a known package version, as we have an extid to reference it. # Let's return one of them. # If there is a release extid, return it. release_extid_targets = { extid_target for extid_target in extid_targets if extid_target.object_type == ObjectType.RELEASE } # Exclude missing targets missing_releases = { CoreSWHID(object_type=ObjectType.RELEASE, object_id=id_) for id_ in self.storage.release_missing( [swhid.object_id for swhid in release_extid_targets] ) } if missing_releases: - logger.error( - "Found ExtIDs pointing to missing releases: %s", missing_releases - ) + + err_message = "Found ExtIDs pointing to missing releases" + + logger.error(err_message + ": %s", missing_releases) + + with sentry_sdk.push_scope() as scope: + scope.set_extra( + "missing_releases", [str(x) for x in missing_releases] + ) + sentry_sdk.capture_message(err_message, "error") + release_extid_targets -= missing_releases extid_target2 = self.select_extid_target(p_info, release_extid_targets) if extid_target2: return extid_target2 # If there is no release extid (ie. if the package was only loaded with # older versions of this loader, which produced revision objects instead # of releases), return a revision extid when possible. revision_extid_targets = { extid_target for extid_target in extid_targets if extid_target.object_type == ObjectType.REVISION } if revision_extid_targets: assert len(extid_targets) == 1, extid_targets extid_target = list(extid_targets)[0] return extid_target # No target found (this is probably a new package version) return None def select_extid_target( self, p_info: TPackageInfo, extid_targets: Set[CoreSWHID] ) -> Optional[CoreSWHID]: """Given a list of release extid targets, choses one appropriate for the given package info. Package loaders shyould implement this if their ExtIDs may map to multiple releases, so they can fetch releases from the storage and inspect their fields to select the right one for this ``p_info``. """ if extid_targets: # The base package loader does not have the domain-specific knowledge # to select the right release -> crash if there is more than one. assert len(extid_targets) == 1, extid_targets return list(extid_targets)[0] return None def download_package( self, p_info: TPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Download artifacts for a specific package. All downloads happen in in the tmpdir folder. Default implementation expects the artifacts package info to be about one artifact per package. Note that most implementation have 1 artifact per package. But some implementation have multiple artifacts per package (debian), some have none, the package is the artifact (gnu). Args: artifacts_package_info: Information on the package artifacts to download (url, filename, etc...) tmpdir: Location to retrieve such artifacts Returns: List of (path, computed hashes) """ try: return [download(p_info.url, dest=tmpdir, filename=p_info.filename)] except ContentDecodingError: # package might be erroneously marked as gzip compressed while is is not, # try to download its raw bytes again without attempting to uncompress # the input stream return [ download( p_info.url, dest=tmpdir, filename=p_info.filename, extra_request_headers={"Accept-Encoding": "identity"}, ) ] def uncompress( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str ) -> str: """Uncompress the artifact(s) in the destination folder dest. Optionally, this could need to use the p_info dict for some more information (debian). """ uncompressed_path = os.path.join(dest, "src") for a_path, _ in dl_artifacts: uncompress(a_path, dest=uncompressed_path) return uncompressed_path def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]: """Return an extra dict of branches that are used to update the set of branches. """ return {} def finalize_visit( self, *, snapshot: Optional[Snapshot], visit: OriginVisit, status_visit: str, status_load: str, failed_branches: List[str], errors: Optional[List[str]] = None, ) -> Dict[str, Any]: """Finalize the visit: - flush eventual unflushed data to storage - update origin visit's status - return the task's status """ self.storage.flush() snapshot_id: Optional[bytes] = None if snapshot and snapshot.id: # to prevent the snapshot.id to b"" snapshot_id = snapshot.id assert visit.visit visit_status = OriginVisitStatus( origin=self.origin.url, visit=visit.visit, type=self.visit_type, date=now(), status=status_visit, snapshot=snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) result: Dict[str, Any] = { "status": status_load, } if snapshot_id: result["snapshot_id"] = hash_to_hex(snapshot_id) if failed_branches: logger.warning("%d failed branches", len(failed_branches)) for i, urls in enumerate(islice(failed_branches, 50)): prefix_url = "Failed branches: " if i == 0 else "" logger.warning("%s%s", prefix_url, urls) return result def load(self) -> Dict: """Load for a specific origin the associated contents. 1. Get the list of versions in an origin. 2. Get the snapshot from the previous run of the loader, and filter out versions that were already loaded, if their :term:`extids ` match Then, for each remaining version in the origin 3. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 4. Extract the downloaded files. By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 5. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 6. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 7. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 8. Generate and load the snapshot for the visit Using the revisions/releases collected at step 7., and the branch information from step 2., generate a snapshot and load it into the Software Heritage archive """ status_load = "uneventful" # either: eventful, uneventful, failed status_visit = "full" # see swh.model.model.OriginVisitStatus snapshot = None failed_branches: List[str] = [] # Prepare origin and origin_visit origin = Origin(url=self.origin.url) try: self.storage.origin_add([origin]) visit = list( self.storage.origin_visit_add( [ OriginVisit( origin=self.origin.url, date=self.visit_date, type=self.visit_type, ) ] ) )[0] except Exception as e: logger.exception( "Failed to initialize origin_visit for %s", self.origin.url ) sentry_sdk.capture_exception(e) return {"status": "failed"} # Get the previous snapshot for this origin. It is then used to see which # of the package's versions are already loaded in the archive. try: last_snapshot = self.last_snapshot() logger.debug("last snapshot: %s", last_snapshot) except Exception as e: logger.exception("Failed to get previous state for %s", self.origin.url) sentry_sdk.capture_exception(e) return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", errors=[str(e)], ) load_exceptions: List[Exception] = [] # Get the list of all version names try: versions = self.get_versions() except NotFound as e: return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="not_found", status_load="failed", errors=[str(e)], ) except Exception as e: + sentry_sdk.capture_exception(e) return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", errors=[str(e)], ) # Get the metadata of each version's package packages_info: List[Tuple[str, TPackageInfo]] = [ (branch_name, p_info) for version in versions for (branch_name, p_info) in self.get_package_info(version) ] # Compute the ExtID of each of these packages known_extids = self._get_known_extids([p_info for (_, p_info) in packages_info]) if last_snapshot is None: last_snapshot_targets: Set[Sha1Git] = set() else: last_snapshot_targets = { branch.target for branch in last_snapshot.branches.values() } new_extids: Set[ExtID] = set() tmp_releases: Dict[str, List[Tuple[str, Sha1Git]]] = { version: [] for version in versions } errors = [] for (branch_name, p_info) in packages_info: logger.debug("package_info: %s", p_info) # Check if the package was already loaded, using its ExtID swhid = self.resolve_object_from_extids( known_extids, p_info, last_snapshot_targets ) if swhid is not None and swhid.object_type == ObjectType.REVISION: # This package was already loaded, but by an older version # of this loader, which produced revisions instead of releases. # Let's fetch the revision's data, and "upgrade" it into a release. (rev,) = self.storage.revision_get([swhid.object_id]) if not rev: logger.error( "Failed to upgrade branch %s from revision to " "release, %s is missing from the storage. " "Falling back to re-loading from the origin.", branch_name, swhid, ) else: rev = None if swhid is None or (swhid.object_type == ObjectType.REVISION and not rev): # No matching revision or release found in the last snapshot, load it. release_id = None try: res = self._load_release(p_info, origin) if res: (release_id, directory_id) = res assert release_id assert directory_id self._load_extrinsic_directory_metadata( p_info, release_id, directory_id ) self.storage.flush() status_load = "eventful" except Exception as e: self.storage.clear_buffers() load_exceptions.append(e) sentry_sdk.capture_exception(e) error = f"Failed to load branch {branch_name} for {self.origin.url}" logger.exception(error) failed_branches.append(branch_name) errors.append(f"{error}: {e}") continue if release_id is None: continue add_extid = True elif swhid.object_type == ObjectType.REVISION: # If 'rev' was None, the previous block would have run. assert rev is not None rel = rev2rel(rev, p_info.version) self.storage.release_add([rel]) logger.debug("Upgraded %s to %s", swhid, rel.swhid()) release_id = rel.id # Create a new extid for this package, so the next run of this loader # will be able to find the new release, and use it (instead of the # old revision) add_extid = True elif swhid.object_type == ObjectType.RELEASE: # This package was already loaded, nothing to do. release_id = swhid.object_id add_extid = False else: assert False, f"Unexpected object type: {swhid}" assert release_id is not None if add_extid: partial_extid = p_info.extid() if partial_extid is not None: (extid_type, extid_version, extid) = partial_extid release_swhid = CoreSWHID( object_type=ObjectType.RELEASE, object_id=release_id ) new_extids.add( ExtID( extid_type=extid_type, extid_version=extid_version, extid=extid, target=release_swhid, ) ) tmp_releases[p_info.version].append((branch_name, release_id)) if load_exceptions: status_visit = "partial" if not tmp_releases: # We could not load any releases; fail completely return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", errors=errors, ) try: # Retrieve the default release version (the "latest" one) default_version = self.get_default_version() logger.debug("default version: %s", default_version) # Retrieve extra branches extra_branches = self.extra_branches() logger.debug("extra branches: %s", extra_branches) snapshot = self._load_snapshot( default_version, tmp_releases, extra_branches ) self.storage.flush() except Exception as e: error = f"Failed to build snapshot for origin {self.origin.url}" logger.exception(error) errors.append(f"{error}: {e}") sentry_sdk.capture_exception(e) status_visit = "failed" status_load = "failed" if snapshot: try: metadata_objects = self.build_extrinsic_snapshot_metadata(snapshot.id) self.load_metadata_objects(metadata_objects) except Exception as e: error = ( f"Failed to load extrinsic snapshot metadata for {self.origin.url}" ) logger.exception(error) errors.append(f"{error}: {e}") sentry_sdk.capture_exception(e) status_visit = "partial" status_load = "failed" try: metadata_objects = self.build_extrinsic_origin_metadata() self.load_metadata_objects(metadata_objects) except Exception as e: error = f"Failed to load extrinsic origin metadata for {self.origin.url}" logger.exception(error) errors.append(f"{error}: {e}") sentry_sdk.capture_exception(e) status_visit = "partial" status_load = "failed" if status_load != "failed": self._load_extids(new_extids) return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit=status_visit, status_load=status_load, errors=errors, ) def _load_directory( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], tmpdir: str ) -> Tuple[str, from_disk.Directory]: uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir) logger.debug("uncompressed_path: %s", uncompressed_path) directory = from_disk.Directory.from_disk( path=uncompressed_path.encode("utf-8"), max_content_length=self.max_content_size, ) contents, skipped_contents, directories = from_disk.iter_directory(directory) logger.debug("Number of skipped contents: %s", len(skipped_contents)) self.storage.skipped_content_add(skipped_contents) logger.debug("Number of contents: %s", len(contents)) self.storage.content_add(contents) logger.debug("Number of directories: %s", len(directories)) self.storage.directory_add(directories) return (uncompressed_path, directory) def _load_release( self, p_info: TPackageInfo, origin ) -> Optional[Tuple[Sha1Git, Sha1Git]]: """Does all the loading of a release itself: * downloads a package and uncompresses it * loads it from disk * adds contents, directories, and release to self.storage * returns (release_id, directory_id) Raises exception when unable to download or uncompress artifacts """ with tempfile.TemporaryDirectory() as tmpdir: dl_artifacts = self.download_package(p_info, tmpdir) (uncompressed_path, directory) = self._load_directory(dl_artifacts, tmpdir) # FIXME: This should be release. cf. D409 release = self.build_release( p_info, uncompressed_path, directory=directory.hash ) if not release: # Some artifacts are missing intrinsic metadata # skipping those return None metadata = [metadata for (filepath, metadata) in dl_artifacts] assert release.target is not None, release assert release.target_type == ModelObjectType.DIRECTORY, release metadata_target = ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=release.target ) original_artifact_metadata = RawExtrinsicMetadata( target=metadata_target, discovery_date=self.visit_date, authority=SWH_METADATA_AUTHORITY, fetcher=self.get_metadata_fetcher(), format="original-artifacts-json", metadata=json.dumps(metadata).encode(), origin=self.origin.url, release=release.swhid(), ) self.load_metadata_objects([original_artifact_metadata]) logger.debug("Release: %s", release) self.storage.release_add([release]) assert directory.hash return (release.id, directory.hash) def _load_snapshot( self, default_version: str, releases: Dict[str, List[Tuple[str, bytes]]], extra_branches: Dict[bytes, Mapping[str, Any]], ) -> Optional[Snapshot]: """Build snapshot out of the current releases stored and extra branches. Then load it in the storage. """ logger.debug("releases: %s", releases) # Build and load the snapshot branches = {} # type: Dict[bytes, Mapping[str, Any]] for version, branch_name_releases in releases.items(): if version == default_version and len(branch_name_releases) == 1: # only 1 branch (no ambiguity), we can create an alias # branch 'HEAD' branch_name, _ = branch_name_releases[0] # except for some corner case (deposit) if branch_name != "HEAD": branches[b"HEAD"] = { "target_type": "alias", "target": branch_name.encode("utf-8"), } for branch_name, target in branch_name_releases: branches[branch_name.encode("utf-8")] = { "target_type": "release", "target": target, } # Deal with extra-branches for name, branch_target in extra_branches.items(): if name in branches: - logger.error("Extra branch '%s' has been ignored", name) + error_message = f"Extra branch '{name!r}' has been ignored" + logger.error(error_message) + sentry_sdk.capture_message(error_message, "error") else: branches[name] = branch_target snapshot_data = {"branches": branches} logger.debug("snapshot: %s", snapshot_data) snapshot = Snapshot.from_dict(snapshot_data) logger.debug("snapshot: %s", snapshot) self.storage.snapshot_add([snapshot]) return snapshot def get_loader_name(self) -> str: """Returns a fully qualified name of this loader.""" return f"{self.__class__.__module__}.{self.__class__.__name__}" def get_loader_version(self) -> str: """Returns the version of the current loader.""" module_name = self.__class__.__module__ or "" module_name_parts = module_name.split(".") # Iterate rootward through the package hierarchy until we find a parent of this # loader's module with a __version__ attribute. for prefix_size in range(len(module_name_parts), 0, -1): package_name = ".".join(module_name_parts[0:prefix_size]) module = sys.modules[package_name] if hasattr(module, "__version__"): return module.__version__ # If this loader's class has no parent package with a __version__, # it should implement it itself. raise NotImplementedError( f"Could not dynamically find the version of {self.get_loader_name()}." ) def get_metadata_fetcher(self) -> MetadataFetcher: """Returns a MetadataFetcher instance representing this package loader; which is used to for adding provenance information to extracted extrinsic metadata, if any.""" return MetadataFetcher( name=self.get_loader_name(), version=self.get_loader_version(), metadata={}, ) def get_metadata_authority(self) -> MetadataAuthority: """For package loaders that get extrinsic metadata, returns the authority the metadata are coming from. """ raise NotImplementedError("get_metadata_authority") def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: """Returns metadata items, used by build_extrinsic_origin_metadata.""" return [] def build_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using metadata returned by get_extrinsic_origin_metadata.""" metadata_items = self.get_extrinsic_origin_metadata() if not metadata_items: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in metadata_items: metadata_objects.append( RawExtrinsicMetadata( target=self.origin.swhid(), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, ) ) return metadata_objects def get_extrinsic_snapshot_metadata(self) -> List[RawExtrinsicMetadataCore]: """Returns metadata items, used by build_extrinsic_snapshot_metadata.""" return [] def build_extrinsic_snapshot_metadata( self, snapshot_id: Sha1Git ) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using metadata returned by get_extrinsic_snapshot_metadata.""" metadata_items = self.get_extrinsic_snapshot_metadata() if not metadata_items: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in metadata_items: metadata_objects.append( RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.SNAPSHOT, object_id=snapshot_id ), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, origin=self.origin.url, ) ) return metadata_objects def build_extrinsic_directory_metadata( self, p_info: TPackageInfo, release_id: Sha1Git, directory_id: Sha1Git, ) -> List[RawExtrinsicMetadata]: if not p_info.directory_extrinsic_metadata: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in p_info.directory_extrinsic_metadata: metadata_objects.append( RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id ), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, origin=self.origin.url, release=CoreSWHID( object_type=ObjectType.RELEASE, object_id=release_id ), ) ) return metadata_objects def _load_extrinsic_directory_metadata( self, p_info: TPackageInfo, release_id: Sha1Git, directory_id: Sha1Git, ) -> None: metadata_objects = self.build_extrinsic_directory_metadata( p_info, release_id, directory_id ) self.load_metadata_objects(metadata_objects) def _load_extids(self, extids: Set[ExtID]) -> None: if not extids: return try: self.storage.extid_add(list(extids)) except Exception as e: logger.exception("Failed to load new ExtIDs for %s", self.origin.url) sentry_sdk.capture_exception(e) # No big deal, it just means the next visit will load the same versions # again. def rev2rel(rev: Revision, version: str) -> Release: """Converts a revision to a release.""" message = rev.message if message and not message.endswith(b"\n"): message += b"\n" return Release( name=version.encode(), message=message, target=rev.directory, target_type=ModelObjectType.DIRECTORY, synthetic=rev.synthetic, author=rev.author, date=rev.date, ) diff --git a/swh/loader/package/maven/loader.py b/swh/loader/package/maven/loader.py index e2d9d05..140a703 100644 --- a/swh/loader/package/maven/loader.py +++ b/swh/loader/package/maven/loader.py @@ -1,206 +1,206 @@ # Copyright (C) 2021-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from __future__ import annotations from datetime import datetime, timezone import json import logging from os import path import string from typing import Any, Iterator, List, Optional, Sequence, Tuple import attr import iso8601 import requests from typing_extensions import TypedDict from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import EMPTY_AUTHOR, release_name from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, ObjectType, RawExtrinsicMetadata, Release, Sha1Git, TimestampWithTimezone, ) from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) class ArtifactDict(TypedDict): """Data about a Maven artifact, passed by the Maven Lister.""" time: str """the time of the last update of jar file on the server as an iso8601 date string """ url: str """the artifact url to retrieve filename""" filename: Optional[str] """optionally, the file's name""" gid: str """artifact's groupId""" aid: str """artifact's artifactId""" version: str """artifact's version""" base_url: str """root URL of the Maven instance""" @attr.s class MavenPackageInfo(BasePackageInfo): time = attr.ib(type=datetime) """Timestamp of the last update of jar file on the server.""" gid = attr.ib(type=str) """Group ID of the maven artifact""" aid = attr.ib(type=str) """Artifact ID of the maven artifact""" version = attr.ib(type=str) """Version of the maven artifact""" base_url = attr.ib(type=str) """Root URL of the Maven instance""" # default format for maven artifacts MANIFEST_FORMAT = string.Template("$gid $aid $version $url $time") EXTID_TYPE = "maven-jar" EXTID_VERSION = 0 @classmethod def from_metadata(cls, a_metadata: ArtifactDict) -> MavenPackageInfo: time = iso8601.parse_date(a_metadata["time"]).astimezone(tz=timezone.utc) url = a_metadata["url"] return cls( url=url, filename=a_metadata.get("filename") or path.split(url)[-1], time=time, gid=a_metadata["gid"], aid=a_metadata["aid"], version=a_metadata["version"], base_url=a_metadata["base_url"], directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( format="maven-json", metadata=json.dumps(a_metadata).encode(), ), ], ) class MavenLoader(PackageLoader[MavenPackageInfo]): """Load source code jar origin's artifact files into swh archive""" visit_type = "maven" def __init__( self, storage: StorageInterface, url: str, artifacts: Sequence[ArtifactDict], **kwargs: Any, ): """Loader constructor. For now, this is the lister's task output. There is one, and only one, artefact (jar or zip) per version, as guaranteed by the Maven coordinates system. Args: url: Origin url artifacts: List of single artifact information """ super().__init__(storage=storage, url=url, **kwargs) self.artifacts = artifacts # assume order is enforced in the lister self.version_artifact = { jar["version"]: jar for jar in artifacts if jar["version"] } if artifacts: base_urls = {jar["base_url"] for jar in artifacts} try: (self.base_url,) = base_urls except ValueError: raise ValueError( "Artifacts originate from more than one Maven instance: " + ", ".join(base_urls) ) from None else: # There is no artifact, so self.metadata_authority won't be called, # so self.base_url won't be accessed. pass def get_versions(self) -> Sequence[str]: return list(self.version_artifact) def get_default_version(self) -> str: - # Default version is the last item - return self.artifacts[-1]["version"] + # Default version is the one of the most recent artifact + return max(self.artifacts, key=lambda a: a["time"])["version"] def get_metadata_authority(self): return MetadataAuthority(type=MetadataAuthorityType.FORGE, url=self.base_url) def build_extrinsic_directory_metadata( self, p_info: MavenPackageInfo, release_id: Sha1Git, directory_id: Sha1Git, ) -> List[RawExtrinsicMetadata]: # Rebuild POM URL. pom_url = path.dirname(p_info.url) pom_url = f"{pom_url}/{p_info.aid}-{p_info.version}.pom" r = requests.get(pom_url, allow_redirects=True) if r.status_code == 200: metadata_pom = r.content else: metadata_pom = b"" p_info.directory_extrinsic_metadata.append( RawExtrinsicMetadataCore( format="maven-pom", metadata=metadata_pom, ) ) return super().build_extrinsic_directory_metadata( p_info=p_info, release_id=release_id, directory_id=directory_id, ) def get_package_info(self, version: str) -> Iterator[Tuple[str, MavenPackageInfo]]: a_metadata = self.version_artifact[version] rel_name = release_name(a_metadata["version"]) yield rel_name, MavenPackageInfo.from_metadata(a_metadata) def build_release( self, p_info: MavenPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: msg = f"Synthetic release for archive at {p_info.url}\n".encode("utf-8") normalized_time = TimestampWithTimezone.from_datetime(p_info.time) return Release( name=p_info.version.encode(), message=msg, date=normalized_time, author=EMPTY_AUTHOR, target=directory, target_type=ObjectType.DIRECTORY, synthetic=True, ) diff --git a/swh/loader/package/opam/loader.py b/swh/loader/package/opam/loader.py index c34b50d..d192695 100644 --- a/swh/loader/package/opam/loader.py +++ b/swh/loader/package/opam/loader.py @@ -1,265 +1,265 @@ # Copyright (C) 2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import io import os from subprocess import PIPE, Popen, call from typing import Any, Iterator, List, Optional, Tuple import attr from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import cached_method from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, ObjectType, Person, Release, Sha1Git, ) from swh.storage.interface import StorageInterface @attr.s class OpamPackageInfo(BasePackageInfo): author = attr.ib(type=Person) committer = attr.ib(type=Person) def opam_read( cmd: List[str], init_error_msg_if_any: Optional[str] = None ) -> Optional[str]: """This executes an opam command and returns the first line of the output. Args: cmd: Opam command to execute as a list of string init_error_msg_if_any: Error message to raise in case a problem occurs during initialization Raises: ValueError with the init_error_msg_if_any content in case stdout is not consumable and the variable is provided with non empty value. Returns: the first line of the executed command output """ with Popen(cmd, stdout=PIPE) as proc: if proc.stdout is not None: for line in io.TextIOWrapper(proc.stdout): # care only for the first line output result (mostly blank separated # values, callers will deal with the parsing of the line) return line elif init_error_msg_if_any: raise ValueError(init_error_msg_if_any) return None class OpamLoader(PackageLoader[OpamPackageInfo]): """Load all versions of a given package in a given opam repository. The state of the opam repository is stored in a directory called an opam root. This folder is a requisite for the opam binary to actually list information on package. When initialize_opam_root is False (the default for production workers), the opam root must already have been configured outside of the loading process. If not an error is raised, thus failing the loading. For standalone workers, initialize_opam_root must be set to True, so the ingestion can take care of installing the required opam root properly. The remaining ingestion uses the opam binary to give the versions of the given package. Then, for each version, the loader uses the opam binary to list the tarball url to fetch and ingest. """ visit_type = "opam" def __init__( self, storage: StorageInterface, url: str, opam_root: str, opam_instance: str, opam_url: str, opam_package: str, initialize_opam_root: bool = False, **kwargs: Any, ): super().__init__(storage=storage, url=url, **kwargs) self.opam_root = opam_root self.opam_instance = opam_instance self.opam_url = opam_url self.opam_package = opam_package self.initialize_opam_root = initialize_opam_root def get_package_dir(self) -> str: return ( f"{self.opam_root}/repo/{self.opam_instance}/packages/{self.opam_package}" ) def get_package_name(self, version: str) -> str: return f"{self.opam_package}.{version}" def get_package_file(self, version: str) -> str: return f"{self.get_package_dir()}/{self.get_package_name(version)}/opam" def get_metadata_authority(self): return MetadataAuthority(type=MetadataAuthorityType.FORGE, url=self.opam_url) @cached_method def _compute_versions(self) -> List[str]: """Compute the versions using opam internals Raises: ValueError in case the lister is not able to determine the list of versions Returns: The list of versions for the package """ # TODO: use `opam show` instead of this workaround when it support the `--repo` # flag package_dir = self.get_package_dir() if not os.path.exists(package_dir): raise ValueError( f"can't get versions for package {self.opam_package} " f"(at url {self.origin.url})." ) versions = [ ".".join(version.split(".")[1:]) for version in os.listdir(package_dir) ] if not versions: raise ValueError( f"can't get versions for package {self.opam_package} " f"(at url {self.origin.url})" ) versions.sort() return versions def get_versions(self) -> List[str]: """First initialize the opam root directory if needed then start listing the package versions. Raises: ValueError in case the lister is not able to determine the list of versions or if the opam root directory is invalid. """ if self.initialize_opam_root: # for standalone loader (e.g docker), loader must initialize the opam root # folder call( [ "opam", "init", "--reinit", "--bare", "--no-setup", "--root", self.opam_root, self.opam_instance, self.opam_url, ] ) else: # for standard/production loaders, no need to initialize the opam root # folder. It must be present though so check for it, if not present, raise if not os.path.isfile(os.path.join(self.opam_root, "config")): # so if not correctly setup, raise immediately raise ValueError("Invalid opam root") return self._compute_versions() def get_default_version(self) -> str: """Return the most recent version of the package as default.""" return self._compute_versions()[-1] def _opam_show_args(self, version: str): package_file = self.get_package_file(version) return [ "opam", "show", "--color", "never", "--safe", "--normalise", "--root", self.opam_root, "--file", package_file, ] def get_enclosed_single_line_field(self, field, version) -> Optional[str]: result = opam_read(self._opam_show_args(version) + ["--field", field]) # Sanitize the result if any (remove trailing \n and enclosing ") return result.strip().strip('"') if result else None def get_package_info(self, version: str) -> Iterator[Tuple[str, OpamPackageInfo]]: url = self.get_enclosed_single_line_field("url.src:", version) if url is None: raise ValueError( - f"can't get field url.src: for version {version} of package {self.opam_package}" - f" (at url {self.origin.url}) from `opam show`" + f"can't get field url.src: for version {version} of package" + f" {self.opam_package} (at url {self.origin.url}) from `opam show`" ) authors_field = self.get_enclosed_single_line_field("authors:", version) fullname = b"" if authors_field is None else str.encode(authors_field) author = Person.from_fullname(fullname) maintainer_field = self.get_enclosed_single_line_field("maintainer:", version) fullname = b"" if maintainer_field is None else str.encode(maintainer_field) committer = Person.from_fullname(fullname) with Popen(self._opam_show_args(version) + ["--raw"], stdout=PIPE) as proc: assert proc.stdout is not None metadata = proc.stdout.read() yield self.get_package_name(version), OpamPackageInfo( url=url, filename=None, author=author, committer=committer, version=version, directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( metadata=metadata, format="opam-package-definition", ) ], ) def build_release( self, p_info: OpamPackageInfo, uncompressed_path: str, directory: Sha1Git, ) -> Optional[Release]: msg = ( f"Synthetic release for OPAM source package {self.opam_package} " f"version {p_info.version}\n" ) return Release( name=p_info.version.encode(), author=p_info.author, message=msg.encode(), date=None, target=directory, target_type=ObjectType.DIRECTORY, synthetic=True, ) diff --git a/swh/loader/package/tests/test_loader.py b/swh/loader/package/tests/test_loader.py index 18a770c..ccf47c3 100644 --- a/swh/loader/package/tests/test_loader.py +++ b/swh/loader/package/tests/test_loader.py @@ -1,520 +1,520 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib import logging import string from unittest.mock import Mock, call, patch import attr import pytest from swh.loader.package.loader import BasePackageInfo, PackageLoader from swh.model.model import ( Origin, OriginVisit, OriginVisitStatus, Person, Release, Revision, RevisionType, Snapshot, SnapshotBranch, TargetType, TimestampWithTimezone, ) from swh.model.model import ExtID from swh.model.model import ObjectType as ModelObjectType from swh.model.swhids import CoreSWHID, ObjectType from swh.storage import get_storage from swh.storage.algos.snapshot import snapshot_get_latest class FakeStorage: def origin_add(self, origins): raise ValueError("We refuse to add an origin") def origin_visit_get_latest(self, origin): return None class FakeStorage2(FakeStorage): def origin_add(self, origins): pass def origin_visit_add(self, visits): raise ValueError("We refuse to add an origin visit") class StubPackageInfo(BasePackageInfo): pass class StubPackageLoader(PackageLoader[StubPackageInfo]): visit_type = "stub" def get_versions(self): return ["v1.0", "v2.0", "v3.0", "v4.0"] def get_package_info(self, version): p_info = StubPackageInfo( "http://example.org", f"example-{version}.tar", version=version ) extid_type = "extid-type1" if version in ("v1.0", "v2.0") else "extid-type2" # Versions 1.0 and 2.0 have an extid of a given type, v3.0 has an extid # of a different type patch.object( p_info, "extid", return_value=(extid_type, 0, f"extid-of-{version}".encode()), autospec=True, ).start() yield (f"branch-{version}", p_info) def _load_release(self, p_info, origin): return None def test_loader_origin_visit_failure(swh_storage): """Failure to add origin or origin visit should failed immediately""" - loader = PackageLoader(swh_storage, "some-url") + loader = StubPackageLoader(swh_storage, "some-url") loader.storage = FakeStorage() actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} loader.storage = FakeStorage2() actual_load_status2 = loader.load() assert actual_load_status2 == {"status": "failed"} def test_resolve_object_from_extids() -> None: storage = get_storage("memory") target = b"\x01" * 20 rel1 = Release( name=b"aaaa", message=b"aaaa", target=target, target_type=ModelObjectType.DIRECTORY, synthetic=False, ) rel2 = Release( name=b"bbbb", message=b"bbbb", target=target, target_type=ModelObjectType.DIRECTORY, synthetic=False, ) storage.release_add([rel1, rel2]) - loader = PackageLoader(storage, "http://example.org/") # type: ignore + loader = StubPackageLoader(storage, "http://example.org/") p_info = Mock(wraps=BasePackageInfo(None, None, None)) # type: ignore # The PackageInfo does not support extids p_info.extid.return_value = None known_extids = {("extid-type", 0, b"extid-of-aaaa"): [rel1.swhid()]} whitelist = {b"unused"} assert loader.resolve_object_from_extids(known_extids, p_info, whitelist) is None # Some known extid, and the PackageInfo is not one of them (ie. cache miss) p_info.extid.return_value = ("extid-type", 0, b"extid-of-cccc") assert loader.resolve_object_from_extids(known_extids, p_info, whitelist) is None # Some known extid, and the PackageInfo is one of them (ie. cache hit), # but the target release was not in the previous snapshot p_info.extid.return_value = ("extid-type", 0, b"extid-of-aaaa") assert loader.resolve_object_from_extids(known_extids, p_info, whitelist) is None # Some known extid, and the PackageInfo is one of them (ie. cache hit), # and the target release was in the previous snapshot whitelist = {rel1.id} assert ( loader.resolve_object_from_extids(known_extids, p_info, whitelist) == rel1.swhid() ) # Same as before, but there is more than one extid, and only one is an allowed # release whitelist = {rel1.id} known_extids = {("extid-type", 0, b"extid-of-aaaa"): [rel2.swhid(), rel1.swhid()]} assert ( loader.resolve_object_from_extids(known_extids, p_info, whitelist) == rel1.swhid() ) def test_resolve_object_from_extids_missing_target() -> None: storage = get_storage("memory") target = b"\x01" * 20 rel = Release( name=b"aaaa", message=b"aaaa", target=target, target_type=ModelObjectType.DIRECTORY, synthetic=False, ) - loader = PackageLoader(storage, "http://example.org/") # type: ignore + loader = StubPackageLoader(storage, "http://example.org/") p_info = Mock(wraps=BasePackageInfo(None, None, None)) # type: ignore known_extids = {("extid-type", 0, b"extid-of-aaaa"): [rel.swhid()]} p_info.extid.return_value = ("extid-type", 0, b"extid-of-aaaa") whitelist = {rel.id} # Targeted release is missing from the storage assert loader.resolve_object_from_extids(known_extids, p_info, whitelist) is None storage.release_add([rel]) # Targeted release now exists assert ( loader.resolve_object_from_extids(known_extids, p_info, whitelist) == rel.swhid() ) def test_load_get_known_extids() -> None: """Checks PackageLoader.load() fetches known extids efficiently""" storage = Mock(wraps=get_storage("memory")) loader = StubPackageLoader(storage, "http://example.org") loader.load() # Calls should be grouped by extid type storage.extid_get_from_extid.assert_has_calls( [ call("extid-type1", [b"extid-of-v1.0", b"extid-of-v2.0"], version=0), call("extid-type2", [b"extid-of-v3.0", b"extid-of-v4.0"], version=0), ], any_order=True, ) def test_load_extids() -> None: """Checks PackageLoader.load() skips iff it should, and writes (only) the new ExtIDs""" storage = get_storage("memory") dir_swhid = CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=b"e" * 20) rels = [ Release( name=f"v{i}.0".encode(), message=b"blah\n", target=dir_swhid.object_id, target_type=ModelObjectType.DIRECTORY, synthetic=True, ) for i in (1, 2, 3, 4) ] storage.release_add(rels[0:3]) origin = "http://example.org" rel1_swhid = rels[0].swhid() rel2_swhid = rels[1].swhid() rel3_swhid = rels[2].swhid() rel4_swhid = rels[3].swhid() # Results of a previous load storage.extid_add( [ ExtID("extid-type1", b"extid-of-v1.0", rel1_swhid), ExtID("extid-type2", b"extid-of-v2.0", rel2_swhid), ] ) last_snapshot = Snapshot( branches={ b"v1.0": SnapshotBranch( target_type=TargetType.RELEASE, target=rel1_swhid.object_id ), b"v2.0": SnapshotBranch( target_type=TargetType.RELEASE, target=rel2_swhid.object_id ), b"v3.0": SnapshotBranch( target_type=TargetType.RELEASE, target=rel3_swhid.object_id ), } ) storage.snapshot_add([last_snapshot]) date = datetime.datetime.now(tz=datetime.timezone.utc) storage.origin_add([Origin(url=origin)]) storage.origin_visit_add( [OriginVisit(origin="http://example.org", visit=1, date=date, type="tar")] ) storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin, visit=1, status="full", date=date, snapshot=last_snapshot.id, ) ] ) loader = StubPackageLoader(storage, "http://example.org") patch.object( loader, "_load_release", return_value=(rel4_swhid.object_id, dir_swhid.object_id), autospec=True, ).start() loader.load() assert loader._load_release.mock_calls == [ # type: ignore # v1.0: not loaded because there is already its (extid_type, extid, rel) # in the storage. # v2.0: loaded, because there is already a similar extid, but different type call( StubPackageInfo(origin, "example-v2.0.tar", "v2.0"), Origin(url=origin), ), # v3.0: loaded despite having an (extid_type, extid) in storage, because # the target of the extid is not in the previous snapshot call( StubPackageInfo(origin, "example-v3.0.tar", "v3.0"), Origin(url=origin), ), # v4.0: loaded, because there isn't its extid call( StubPackageInfo(origin, "example-v4.0.tar", "v4.0"), Origin(url=origin), ), ] # then check the snapshot has all the branches. # versions 2.0 to 4.0 all point to rel4_swhid (instead of the value of the last # snapshot), because they had to be loaded (mismatched extid), and the mocked # _load_release always returns rel4_swhid. snapshot = Snapshot( branches={ b"branch-v1.0": SnapshotBranch( target_type=TargetType.RELEASE, target=rel1_swhid.object_id ), b"branch-v2.0": SnapshotBranch( target_type=TargetType.RELEASE, target=rel4_swhid.object_id ), b"branch-v3.0": SnapshotBranch( target_type=TargetType.RELEASE, target=rel4_swhid.object_id ), b"branch-v4.0": SnapshotBranch( target_type=TargetType.RELEASE, target=rel4_swhid.object_id ), } ) assert snapshot_get_latest(storage, origin) == snapshot extids = storage.extid_get_from_target( ObjectType.RELEASE, [ rel1_swhid.object_id, rel2_swhid.object_id, rel3_swhid.object_id, rel4_swhid.object_id, ], ) assert set(extids) == { # What we inserted at the beginning of the test: ExtID("extid-type1", b"extid-of-v1.0", rel1_swhid), ExtID("extid-type2", b"extid-of-v2.0", rel2_swhid), # Added by the loader: ExtID("extid-type1", b"extid-of-v2.0", rel4_swhid), ExtID("extid-type2", b"extid-of-v3.0", rel4_swhid), ExtID("extid-type2", b"extid-of-v4.0", rel4_swhid), } def test_load_upgrade_from_revision_extids(caplog): """Tests that, when loading incrementally based on a snapshot made by an old version of the loader, the loader will convert revisions to releases and add them to the storage. Also checks that, if an extid exists pointing to a non-existent revision (which should never happen, but you never know...), the release is loaded from scratch.""" storage = get_storage("memory") origin = "http://example.org" dir1_swhid = CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=b"d" * 20) dir2_swhid = CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=b"e" * 20) date = TimestampWithTimezone.from_datetime( datetime.datetime.now(tz=datetime.timezone.utc) ) person = Person.from_fullname(b"Jane Doe ") rev1 = Revision( message=b"blah", author=person, date=date, committer=person, committer_date=date, directory=dir1_swhid.object_id, type=RevisionType.TAR, synthetic=True, ) rel1 = Release( name=b"v1.0", message=b"blah\n", author=person, date=date, target=dir1_swhid.object_id, target_type=ModelObjectType.DIRECTORY, synthetic=True, ) rev1_swhid = rev1.swhid() rel1_swhid = rel1.swhid() rev2_swhid = CoreSWHID(object_type=ObjectType.REVISION, object_id=b"b" * 20) rel2_swhid = CoreSWHID(object_type=ObjectType.RELEASE, object_id=b"c" * 20) # Results of a previous load storage.extid_add( [ ExtID("extid-type1", b"extid-of-v1.0", rev1_swhid, 0), ExtID("extid-type1", b"extid-of-v2.0", rev2_swhid, 0), ] ) storage.revision_add([rev1]) last_snapshot = Snapshot( branches={ b"v1.0": SnapshotBranch( target_type=TargetType.REVISION, target=rev1_swhid.object_id ), b"v2.0": SnapshotBranch( target_type=TargetType.REVISION, target=rev2_swhid.object_id ), } ) storage.snapshot_add([last_snapshot]) date = datetime.datetime.now(tz=datetime.timezone.utc) storage.origin_add([Origin(url=origin)]) storage.origin_visit_add( [OriginVisit(origin="http://example.org", visit=1, date=date, type="tar")] ) storage.origin_visit_status_add( [ OriginVisitStatus( origin=origin, visit=1, status="full", date=date, snapshot=last_snapshot.id, ) ] ) loader = StubPackageLoader(storage, "http://example.org") patch.object( loader, "_load_release", return_value=(rel2_swhid.object_id, dir2_swhid.object_id), autospec=True, ).start() patch.object( loader, "get_versions", return_value=["v1.0", "v2.0", "v3.0"], autospec=True, ).start() caplog.set_level(logging.ERROR) loader.load() assert len(caplog.records) == 1 (record,) = caplog.records assert record.levelname == "ERROR" assert "Failed to upgrade branch branch-v2.0" in record.message assert loader._load_release.mock_calls == [ # v1.0: not loaded because there is already a revision matching it # v2.0: loaded, as the revision is missing from the storage even though there # is an extid call(StubPackageInfo(origin, "example-v2.0.tar", "v2.0"), Origin(url=origin)), # v3.0: loaded (did not exist yet) call(StubPackageInfo(origin, "example-v3.0.tar", "v3.0"), Origin(url=origin)), ] snapshot = Snapshot( branches={ b"branch-v1.0": SnapshotBranch( target_type=TargetType.RELEASE, target=rel1_swhid.object_id ), b"branch-v2.0": SnapshotBranch( target_type=TargetType.RELEASE, target=rel2_swhid.object_id ), b"branch-v3.0": SnapshotBranch( target_type=TargetType.RELEASE, target=rel2_swhid.object_id ), } ) assert snapshot_get_latest(storage, origin) == snapshot extids = storage.extid_get_from_target( ObjectType.RELEASE, [ rel1_swhid.object_id, rel2_swhid.object_id, ], ) assert set(extids) == { ExtID("extid-type1", b"extid-of-v1.0", rel1_swhid), ExtID("extid-type1", b"extid-of-v2.0", rel2_swhid), ExtID("extid-type2", b"extid-of-v3.0", rel2_swhid), } def test_manifest_extid(): """Compute primary key should return the right identity""" @attr.s class TestPackageInfo(BasePackageInfo): a = attr.ib() b = attr.ib() length = attr.ib() filename = attr.ib() MANIFEST_FORMAT = string.Template("$a $b") p_info = TestPackageInfo( url="http://example.org/", a=1, b=2, length=221837, filename="8sync-0.1.0.tar.gz", version="0.1.0", ) actual_id = p_info.extid() assert actual_id == ("package-manifest-sha256", 0, hashlib.sha256(b"1 2").digest()) def test_no_env_swh_config_filename_raise(monkeypatch): """No SWH_CONFIG_FILENAME environment variable makes package loader init raise""" class DummyPackageLoader(PackageLoader): """A dummy package loader for test purpose""" pass monkeypatch.delenv("SWH_CONFIG_FILENAME", raising=False) with pytest.raises( AssertionError, match="SWH_CONFIG_FILENAME environment variable is undefined" ): DummyPackageLoader.from_configfile(url="some-url")