diff --git a/swh/loader/cli.py b/swh/loader/cli.py index 4fc20c8..f23f7ff 100644 --- a/swh/loader/cli.py +++ b/swh/loader/cli.py @@ -1,107 +1,130 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information # WARNING: do not import unnecessary things here to keep cli startup time under # control import logging from typing import Any import click import pkg_resources from swh.core.cli import CONTEXT_SETTINGS from swh.core.cli import swh as swh_cli_group logger = logging.getLogger(__name__) LOADERS = { entry_point.name.split(".", 1)[1]: entry_point for entry_point in pkg_resources.iter_entry_points("swh.workers") if entry_point.name.split(".", 1)[0] == "loader" } SUPPORTED_LOADERS = sorted(list(LOADERS)) def get_loader(name: str, **kwargs) -> Any: """Given a loader name, instantiate it. Args: name: Loader's name kwargs: Configuration dict (url...) Returns: An instantiated loader """ if name not in LOADERS: raise ValueError( "Invalid loader %s: only supported loaders are %s" % (name, SUPPORTED_LOADERS) ) registry_entry = LOADERS[name].load()() logger.debug(f"registry: {registry_entry}") loader_cls = registry_entry["loader"] logger.debug(f"loader class: {loader_cls}") - return loader_cls(**kwargs) + return loader_cls.from_config(**kwargs) @swh_cli_group.group(name="loader", context_settings=CONTEXT_SETTINGS) +@click.option( + "--config-file", + "-C", + default=None, + type=click.Path(exists=True, dir_okay=False,), + help="Configuration file.", +) @click.pass_context -def loader(ctx): +def loader(ctx, config_file): """Loader cli tools """ - pass + from os import environ + + from swh.core.config import read + + ctx.ensure_object(dict) + logger.debug("ctx: %s", ctx) + + if not config_file: + config_file = environ.get("SWH_CONFIG_FILENAME") + + ctx.obj["config"] = read(config_file) + logger.debug("config_file: %s", config_file) + logger.debug("config: ", ctx.obj["config"]) @loader.command(name="run", context_settings=CONTEXT_SETTINGS) @click.argument("type", type=click.Choice(SUPPORTED_LOADERS)) @click.argument("url") @click.argument("options", nargs=-1) @click.pass_context def run(ctx, type, url, options): """Ingest with loader the origin located at """ import iso8601 from swh.scheduler.cli.utils import parse_options + conf = ctx.obj.get("config", {}) + if "storage" not in conf: + raise ValueError("Missing storage configuration key") + (_, kw) = parse_options(options) logger.debug(f"kw: {kw}") visit_date = kw.get("visit_date") if visit_date and isinstance(visit_date, str): visit_date = iso8601.parse_date(visit_date) kw["visit_date"] = visit_date - loader = get_loader(type, url=url, **kw) + loader = get_loader(type, url=url, storage=conf["storage"], **kw) result = loader.load() click.echo(result) @loader.command(name="list", context_settings=CONTEXT_SETTINGS) @click.argument("type", default="all", type=click.Choice(["all"] + SUPPORTED_LOADERS)) @click.pass_context def list(ctx, type): """List supported loaders and optionally their arguments""" import inspect if type == "all": loaders = ", ".join(SUPPORTED_LOADERS) click.echo(f"Supported loaders: {loaders}") else: registry_entry = LOADERS[type].load()() loader_cls = registry_entry["loader"] doc = inspect.getdoc(loader_cls).strip() # Hack to get the signature of the class even though it subclasses # Generic, which reimplements __new__. # See signature = inspect.signature(loader_cls.__init__) signature_str = str(signature).replace("self, ", "") click.echo(f"Loader: {doc}\nsignature: {signature_str}") diff --git a/swh/loader/core/loader.py b/swh/loader/core/loader.py index 09204a9..95f441f 100644 --- a/swh/loader/core/loader.py +++ b/swh/loader/core/loader.py @@ -1,449 +1,494 @@ # Copyright (C) 2015-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -from abc import ABCMeta, abstractmethod import datetime import hashlib import logging import os from typing import Any, Dict, Iterable, Optional from swh.core.config import load_from_envvar from swh.loader.exception import NotFound from swh.model.model import ( BaseContent, Content, Directory, Origin, OriginVisit, OriginVisitStatus, Release, Revision, Sha1Git, SkippedContent, Snapshot, ) from swh.storage import get_storage +from swh.storage.interface import StorageInterface from swh.storage.utils import now DEFAULT_CONFIG: Dict[str, Any] = { "max_content_size": 100 * 1024 * 1024, - "save_data": False, - "save_data_path": "", - "storage": {"cls": "memory"}, } -class BaseLoader(metaclass=ABCMeta): - """Mixin base class for loader. +class Loader: + """The base class for a Software Heritage Loader. - To use this class, you must: + A loader retrieves origin information (git/mercurial/svn repositories, pypi/npm/... + package artifacts), ingests the contents/directories/revisions/releases/snapshot to + the storage backend. + + For now, this just exposes 2 static methods (from_config, from_configfile) to + centralize and ease the loader instantiation. + + Args: + storage: the instance of the Storage being used to register the + origin information + + """ + + def __init__( + self, storage: StorageInterface, max_content_size: Optional[int] = None, + ): + self.storage = storage + self.max_content_size = int(max_content_size) if max_content_size else None + + @classmethod + def from_config(cls, storage: Dict[str, Any], **config: Any): + """Instantiate a loader from a configuration dict. + + This is basically a backwards-compatibility shim for the CLI. + + Args: + storage: instantiation config for the storage + config: the configuration dict for the loader, with the following keys: + - credentials (optional): credentials list for the scheduler + - any other kwargs passed to the loader. + + Returns: + the instantiated loader + """ + # Drop the legacy config keys which aren't used for this generation of loader. + for legacy_key in ("storage", "celery"): + config.pop(legacy_key, None) + + # Instantiate the storage + storage_instance = get_storage(**storage) + return cls(storage=storage_instance, **config) + + @classmethod + def from_configfile(cls, **kwargs: Any): + """Instantiate a loader from the configuration loaded from the + SWH_CONFIG_FILENAME envvar, with potential extra keyword arguments if their + value is not None. + + Args: + kwargs: kwargs passed to the loader instantiation + + """ + config = dict(load_from_envvar(DEFAULT_CONFIG)) + config.update({k: v for k, v in kwargs.items() if v is not None}) + return cls.from_config(**config) + + +class BaseLoader(Loader): + """Mixin base class for (D)VCS loaders (e.g svn, git, mercurial, ...). + + To define such loaders, you must: - inherit from this class - - and implement the @abstractmethod methods: + - and implement following methods: - :func:`prepare`: First step executed by the loader to prepare some state needed by the `func`:load method. - :func:`get_origin`: Retrieve the origin that is currently being loaded. - :func:`fetch_data`: Fetch the data is actually the method to implement to compute data to inject in swh (through the store_data method) - :func:`store_data`: Store data fetched. - :func:`visit_status`: Explicit status of the visit ('partial' or 'full') - :func:`load_status`: Explicit status of the loading, for use by the scheduler (eventful/uneventful/temporary failure/permanent failure). - :func:`cleanup`: Last step executed by the loader. The entry point for the resulting loader is :func:`load`. You can take a look at some example classes: - :class:`SvnLoader` """ def __init__( self, + storage: StorageInterface, logging_class: Optional[str] = None, - config: Optional[Dict[str, Any]] = None, + save_data_path: Optional[str] = None, + max_content_size: Optional[int] = None, ): - if config: - self.config = config - else: - self.config = load_from_envvar(DEFAULT_CONFIG) - - self.storage = get_storage(**self.config["storage"]) + super().__init__(storage=storage, max_content_size=max_content_size) if logging_class is None: logging_class = "%s.%s" % ( self.__class__.__module__, self.__class__.__name__, ) self.log = logging.getLogger(logging_class) _log = logging.getLogger("requests.packages.urllib3.connectionpool") _log.setLevel(logging.WARN) - self.max_content_size = self.config["max_content_size"] - # possibly overridden in self.prepare method self.visit_date: Optional[datetime.datetime] = None - self.origin: Optional[Origin] = None if not hasattr(self, "visit_type"): self.visit_type: Optional[str] = None self.origin_metadata: Dict[str, Any] = {} - self.loaded_snapshot_id: Optional[Sha1Git] = None - # Make sure the config is sane - save_data = self.config.get("save_data") - if save_data: - path = self.config["save_data_path"] + if save_data_path: + path = save_data_path os.stat(path) if not os.access(path, os.R_OK | os.W_OK): raise PermissionError("Permission denied: %r" % path) + self.save_data_path = save_data_path + def save_data(self) -> None: """Save the data associated to the current load""" raise NotImplementedError def get_save_data_path(self) -> str: """The path to which we archive the loader's raw data""" if not hasattr(self, "__save_data_path"): year = str(self.visit_date.year) # type: ignore assert self.origin url = self.origin.url.encode("utf-8") origin_url_hash = hashlib.sha1(url).hexdigest() path = "%s/sha1:%s/%s/%s" % ( - self.config["save_data_path"], + self.save_data_path, origin_url_hash[0:2], origin_url_hash, year, ) os.makedirs(path, exist_ok=True) self.__save_data_path = path return self.__save_data_path def flush(self) -> None: """Flush any potential buffered data not sent to swh-storage. """ self.storage.flush() - @abstractmethod def cleanup(self) -> None: """Last step executed by the loader. """ - pass + raise NotImplementedError - @abstractmethod def prepare_origin_visit(self, *args, **kwargs) -> None: """First step executed by the loader to prepare origin and visit references. Set/update self.origin, and optionally self.origin_url, self.visit_date. """ - pass + raise NotImplementedError def _store_origin_visit(self) -> None: """Store origin and visit references. Sets the self.visit references. """ assert self.origin self.storage.origin_add([self.origin]) if not self.visit_date: # now as default visit_date if not provided self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) assert isinstance(self.visit_date, datetime.datetime) assert isinstance(self.visit_type, str) self.visit = list( self.storage.origin_visit_add( [ OriginVisit( origin=self.origin.url, date=self.visit_date, type=self.visit_type, ) ] ) )[0] - @abstractmethod def prepare(self, *args, **kwargs) -> None: """Second step executed by the loader to prepare some state needed by the loader. Raises NotFound exception if the origin to ingest is not found. """ - pass + raise NotImplementedError def get_origin(self) -> Origin: """Get the origin that is currently being loaded. self.origin should be set in :func:`prepare_origin` Returns: dict: an origin ready to be sent to storage by :func:`origin_add`. """ assert self.origin return self.origin - @abstractmethod def fetch_data(self) -> bool: """Fetch the data from the source the loader is currently loading (ex: git/hg/svn/... repository). Returns: a value that is interpreted as a boolean. If True, fetch_data needs to be called again to complete loading. """ - pass + raise NotImplementedError - @abstractmethod def store_data(self): """Store fetched data in the database. Should call the :func:`maybe_load_xyz` methods, which handle the bundles sent to storage, rather than send directly. """ - pass + raise NotImplementedError def store_metadata(self) -> None: """Store fetched metadata in the database. For more information, see implementation in :class:`DepositLoader`. """ pass def load_status(self) -> Dict[str, str]: """Detailed loading status. Defaults to logging an eventful load. Returns: a dictionary that is eventually passed back as the task's result to the scheduler, allowing tuning of the task recurrence mechanism. """ return { "status": "eventful", } def post_load(self, success: bool = True) -> None: """Permit the loader to do some additional actions according to status after the loading is done. The flag success indicates the loading's status. Defaults to doing nothing. This is up to the implementer of this method to make sure this does not break. Args: success (bool): the success status of the loading """ pass def visit_status(self) -> str: """Detailed visit status. Defaults to logging a full visit. """ return "full" def pre_cleanup(self) -> None: """As a first step, will try and check for dangling data to cleanup. This should do its best to avoid raising issues. """ pass def load(self, *args, **kwargs) -> Dict[str, str]: r"""Loading logic for the loader to follow: - 1. Call :meth:`prepare_origin_visit` to prepare the origin and visit we will associate loading data to - 2. Store the actual ``origin_visit`` to storage - 3. Call :meth:`prepare` to prepare any eventual state - 4. Call :meth:`get_origin` to get the origin we work with and store - while True: - 5. Call :meth:`fetch_data` to fetch the data to store - 6. Call :meth:`store_data` to store the data - 7. Call :meth:`cleanup` to clean up any eventual state put in place in :meth:`prepare` method. """ try: self.pre_cleanup() except Exception: msg = "Cleaning up dangling data failed! Continue loading." self.log.warning(msg) self.prepare_origin_visit(*args, **kwargs) self._store_origin_visit() assert ( self.origin ), "The method `prepare_origin_visit` call should set the origin (Origin)" assert ( self.visit.visit ), "The method `_store_origin_visit` should set the visit (OriginVisit)" self.log.info( "Load origin '%s' with type '%s'", self.origin.url, self.visit.type ) try: self.prepare(*args, **kwargs) while True: more_data_to_fetch = self.fetch_data() self.store_data() if not more_data_to_fetch: break self.store_metadata() visit_status = OriginVisitStatus( origin=self.origin.url, visit=self.visit.visit, type=self.visit_type, date=now(), status=self.visit_status(), snapshot=self.loaded_snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) self.post_load() except Exception as e: if isinstance(e, NotFound): status = "not_found" task_status = "uneventful" else: status = "partial" if self.loaded_snapshot_id else "failed" task_status = "failed" self.log.exception( "Loading failure, updating to `%s` status", status, extra={"swh_task_args": args, "swh_task_kwargs": kwargs,}, ) visit_status = OriginVisitStatus( origin=self.origin.url, visit=self.visit.visit, type=self.visit_type, date=now(), status=status, snapshot=self.loaded_snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) self.post_load(success=False) return {"status": task_status} finally: self.flush() self.cleanup() return self.load_status() class DVCSLoader(BaseLoader): """This base class is a pattern for dvcs loaders (e.g. git, mercurial). Those loaders are able to load all the data in one go. For example, the loader defined in swh-loader-git :class:`BulkUpdater`. For other loaders (stateful one, (e.g :class:`SWHSvnLoader`), inherit directly from :class:`BaseLoader`. """ def cleanup(self) -> None: """Clean up an eventual state installed for computations.""" pass def has_contents(self) -> bool: """Checks whether we need to load contents""" return True def get_contents(self) -> Iterable[BaseContent]: """Get the contents that need to be loaded""" raise NotImplementedError def has_directories(self) -> bool: """Checks whether we need to load directories""" return True def get_directories(self) -> Iterable[Directory]: """Get the directories that need to be loaded""" raise NotImplementedError def has_revisions(self) -> bool: """Checks whether we need to load revisions""" return True def get_revisions(self) -> Iterable[Revision]: """Get the revisions that need to be loaded""" raise NotImplementedError def has_releases(self) -> bool: """Checks whether we need to load releases""" return True def get_releases(self) -> Iterable[Release]: """Get the releases that need to be loaded""" raise NotImplementedError def get_snapshot(self) -> Snapshot: """Get the snapshot that needs to be loaded""" raise NotImplementedError def eventful(self) -> bool: """Whether the load was eventful""" raise NotImplementedError def store_data(self) -> None: assert self.origin - if self.config.get("save_data"): + if self.save_data_path: self.save_data() if self.has_contents(): for obj in self.get_contents(): if isinstance(obj, Content): self.storage.content_add([obj]) elif isinstance(obj, SkippedContent): self.storage.skipped_content_add([obj]) else: raise TypeError(f"Unexpected content type: {obj}") if self.has_directories(): for directory in self.get_directories(): self.storage.directory_add([directory]) if self.has_revisions(): for revision in self.get_revisions(): self.storage.revision_add([revision]) if self.has_releases(): for release in self.get_releases(): self.storage.release_add([release]) snapshot = self.get_snapshot() self.storage.snapshot_add([snapshot]) self.flush() self.loaded_snapshot_id = snapshot.id diff --git a/swh/loader/core/tests/test_loader.py b/swh/loader/core/tests/test_loader.py index 80600b9..2ac2636 100644 --- a/swh/loader/core/tests/test_loader.py +++ b/swh/loader/core/tests/test_loader.py @@ -1,240 +1,237 @@ # Copyright (C) 2018-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib import logging -from swh.loader.core.loader import DEFAULT_CONFIG, BaseLoader, DVCSLoader +from swh.loader.core.loader import BaseLoader, DVCSLoader from swh.loader.exception import NotFound from swh.loader.tests import assert_last_visit_matches from swh.model.hashutil import hash_to_bytes from swh.model.model import Origin, OriginVisit, Snapshot ORIGIN = Origin(url="some-url") class DummyLoader: """Base Loader to overload and simplify the base class (technical: to avoid repetition in other *Loader classes)""" def cleanup(self): pass def prepare(self, *args, **kwargs): pass def fetch_data(self): pass def get_snapshot_id(self): return None def prepare_origin_visit(self, *args, **kwargs): self.origin = ORIGIN self.origin_url = ORIGIN.url self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) self.visit_type = "git" self.storage.origin_add([ORIGIN]) visit = OriginVisit( origin=self.origin_url, date=self.visit_date, type=self.visit_type, ) self.visit = self.storage.origin_visit_add([visit])[0] class DummyDVCSLoader(DummyLoader, DVCSLoader): """DVCS Loader that does nothing in regards to DAG objects. """ def get_contents(self): return [] def get_directories(self): return [] def get_revisions(self): return [] def get_releases(self): return [] def get_snapshot(self): return Snapshot(branches={}) def eventful(self): return False class DummyBaseLoader(DummyLoader, BaseLoader): """Buffered loader will send new data when threshold is reached """ def store_data(self): pass -def test_base_loader(swh_config): - loader = DummyBaseLoader() +def test_base_loader(swh_storage): + loader = DummyBaseLoader(swh_storage) result = loader.load() assert result == {"status": "eventful"} -def test_base_loader_with_config(swh_config): - loader = DummyBaseLoader("logger-name", DEFAULT_CONFIG) +def test_base_loader_with_config(swh_storage): + loader = DummyBaseLoader(swh_storage, "logger-name") result = loader.load() assert result == {"status": "eventful"} -def test_dvcs_loader(swh_config): - loader = DummyDVCSLoader() +def test_dvcs_loader(swh_storage): + loader = DummyDVCSLoader(swh_storage) result = loader.load() assert result == {"status": "eventful"} -def test_dvcs_loader_with_config(swh_config): - loader = DummyDVCSLoader("another-logger", DEFAULT_CONFIG) +def test_dvcs_loader_with_config(swh_storage): + loader = DummyDVCSLoader(swh_storage, "another-logger") result = loader.load() assert result == {"status": "eventful"} -def test_loader_logger_default_name(swh_config): - loader = DummyBaseLoader() +def test_loader_logger_default_name(swh_storage): + loader = DummyBaseLoader(swh_storage) assert isinstance(loader.log, logging.Logger) assert loader.log.name == "swh.loader.core.tests.test_loader.DummyBaseLoader" - loader = DummyDVCSLoader() + loader = DummyDVCSLoader(swh_storage) assert isinstance(loader.log, logging.Logger) assert loader.log.name == "swh.loader.core.tests.test_loader.DummyDVCSLoader" -def test_loader_logger_with_name(swh_config): - loader = DummyBaseLoader("some.logger.name") +def test_loader_logger_with_name(swh_storage): + loader = DummyBaseLoader(swh_storage, "some.logger.name") assert isinstance(loader.log, logging.Logger) assert loader.log.name == "some.logger.name" -def test_loader_save_data_path(swh_config, tmp_path): - loader = DummyBaseLoader("some.logger.name.1") +def test_loader_save_data_path(swh_storage, tmp_path): + loader = DummyBaseLoader(swh_storage, "some.logger.name.1", save_data_path=tmp_path) url = "http://bitbucket.org/something" loader.origin = Origin(url=url) loader.visit_date = datetime.datetime(year=2019, month=10, day=1) - loader.config = { - "save_data_path": tmp_path, - } hash_url = hashlib.sha1(url.encode("utf-8")).hexdigest() expected_save_path = "%s/sha1:%s/%s/2019" % (str(tmp_path), hash_url[0:2], hash_url) save_path = loader.get_save_data_path() assert save_path == expected_save_path def _check_load_failure(caplog, loader, exc_class, exc_text, status="partial"): """Check whether a failed load properly logged its exception, and that the snapshot didn't get referenced in storage""" assert isinstance(loader, DVCSLoader) # was implicit so far for record in caplog.records: if record.levelname != "ERROR": continue assert "Loading failure" in record.message assert record.exc_info exc = record.exc_info[1] assert isinstance(exc, exc_class) assert exc_text in exc.args[0] # Check that the get_snapshot operation would have succeeded assert loader.get_snapshot() is not None # And confirm that the visit doesn't reference a snapshot visit = assert_last_visit_matches(loader.storage, ORIGIN.url, status) if status != "partial": assert visit.snapshot is None # But that the snapshot didn't get loaded assert loader.loaded_snapshot_id is None class DummyDVCSLoaderExc(DummyDVCSLoader): """A loader which raises an exception when loading some contents""" def get_contents(self): raise RuntimeError("Failed to get contents!") -def test_dvcs_loader_exc_partial_visit(swh_config, caplog): +def test_dvcs_loader_exc_partial_visit(swh_storage, caplog): logger_name = "dvcsloaderexc" caplog.set_level(logging.ERROR, logger=logger_name) - loader = DummyDVCSLoaderExc(logging_class=logger_name) + loader = DummyDVCSLoaderExc(swh_storage, logging_class=logger_name) # fake the loading ending up in a snapshot loader.loaded_snapshot_id = hash_to_bytes( "9e4dd2b40d1b46b70917c0949aa2195c823a648e" ) result = loader.load() # loading failed assert result == {"status": "failed"} # still resulted in a partial visit with a snapshot (somehow) _check_load_failure( caplog, loader, RuntimeError, "Failed to get contents!", ) class BrokenStorageProxy: def __init__(self, storage): self.storage = storage def __getattr__(self, attr): return getattr(self.storage, attr) def snapshot_add(self, snapshots): raise RuntimeError("Failed to add snapshot!") class DummyDVCSLoaderStorageExc(DummyDVCSLoader): """A loader which raises an exception when loading some contents""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.storage = BrokenStorageProxy(self.storage) -def test_dvcs_loader_storage_exc_failed_visit(swh_config, caplog): +def test_dvcs_loader_storage_exc_failed_visit(swh_storage, caplog): logger_name = "dvcsloaderexc" caplog.set_level(logging.ERROR, logger=logger_name) - loader = DummyDVCSLoaderStorageExc(logging_class=logger_name) + loader = DummyDVCSLoaderStorageExc(swh_storage, logging_class=logger_name) result = loader.load() assert result == {"status": "failed"} _check_load_failure( caplog, loader, RuntimeError, "Failed to add snapshot!", status="failed" ) class DummyDVCSLoaderNotFound(DummyDVCSLoader, BaseLoader): """A loader which raises a not_found exception during the prepare method call """ def prepare(*args, **kwargs): raise NotFound("Unknown origin!") def load_status(self): return { "status": "uneventful", } -def test_loader_not_found(swh_config, caplog): - loader = DummyDVCSLoaderNotFound() +def test_loader_not_found(swh_storage, caplog): + loader = DummyDVCSLoaderNotFound(swh_storage) result = loader.load() assert result == {"status": "uneventful"} _check_load_failure(caplog, loader, NotFound, "Unknown origin!", status="not_found") diff --git a/swh/loader/package/archive/loader.py b/swh/loader/package/archive/loader.py index 26dcb42..82d2cc5 100644 --- a/swh/loader/package/archive/loader.py +++ b/swh/loader/package/archive/loader.py @@ -1,171 +1,174 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import logging from os import path from typing import Any, Dict, Iterator, Optional, Sequence, Tuple, Union import attr import iso8601 from swh.loader.package.loader import BasePackageInfo, PackageLoader from swh.loader.package.utils import release_name from swh.model.model import ( Person, Revision, RevisionType, Sha1Git, TimestampWithTimezone, ) +from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) SWH_PERSON = Person( name=b"Software Heritage", fullname=b"Software Heritage", email=b"robot@softwareheritage.org", ) REVISION_MESSAGE = b"swh-loader-package: synthetic revision message" @attr.s class ArchivePackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) length = attr.ib(type=int) """Size of the archive file""" time = attr.ib(type=Union[str, datetime.datetime]) """Timestamp of the archive file on the server""" version = attr.ib(type=str) # default keys for gnu ID_KEYS = ["time", "url", "length", "version"] def artifact_identity(self, id_keys=None): if id_keys is None: id_keys = self.ID_KEYS # TODO: use parsed attributes instead of self.raw_info return [self.raw_info.get(k) for k in id_keys] @classmethod def from_metadata(cls, a_metadata: Dict[str, Any]) -> "ArchivePackageInfo": url = a_metadata["url"] filename = a_metadata.get("filename") return cls( url=url, filename=filename if filename else path.split(url)[-1], raw_info=a_metadata, length=a_metadata["length"], time=a_metadata["time"], version=a_metadata["version"], ) class ArchiveLoader(PackageLoader[ArchivePackageInfo]): """Load archive origin's artifact files into swh archive """ visit_type = "tar" def __init__( self, + storage: StorageInterface, url: str, artifacts: Sequence[Dict[str, Any]], identity_artifact_keys: Optional[Sequence[str]] = None, + max_content_size: Optional[int] = None, ): """Loader constructor. For now, this is the lister's task output. Args: url: Origin url artifacts: List of artifact information with keys: - **time**: last modification time as either isoformat date string or timestamp - **url**: the artifact url to retrieve filename - **filename**: optionally, the file's name - **version**: artifact's version - **length**: artifact's length identity_artifact_keys: Optional List of keys forming the "identity" of an artifact """ - super().__init__(url=url) + super().__init__(storage=storage, url=url, max_content_size=max_content_size) self.artifacts = artifacts # assume order is enforced in the lister self.identity_artifact_keys = identity_artifact_keys def get_versions(self) -> Sequence[str]: versions = [] for archive in self.artifacts: v = archive.get("version") if v: versions.append(v) return versions def get_default_version(self) -> str: # It's the most recent, so for this loader, it's the last one return self.artifacts[-1]["version"] def get_package_info( self, version: str ) -> Iterator[Tuple[str, ArchivePackageInfo]]: for a_metadata in self.artifacts: p_info = ArchivePackageInfo.from_metadata(a_metadata) if version == p_info.version: # FIXME: this code assumes we have only 1 artifact per # versioned package yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Dict, p_info: ArchivePackageInfo ) -> Optional[bytes]: identity = p_info.artifact_identity(id_keys=self.identity_artifact_keys) for rev_id, known_artifact in known_artifacts.items(): logging.debug("known_artifact: %s", known_artifact) reference_artifact = known_artifact["extrinsic"]["raw"] reference_artifact_info = ArchivePackageInfo.from_metadata( reference_artifact ) known_identity = reference_artifact_info.artifact_identity( id_keys=self.identity_artifact_keys ) if identity == known_identity: return rev_id return None def build_revision( self, p_info: ArchivePackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: time = p_info.time # assume it's a timestamp if isinstance(time, str): # otherwise, assume it's a parsable date parsed_time = iso8601.parse_date(time) else: parsed_time = time normalized_time = TimestampWithTimezone.from_datetime(parsed_time) return Revision( type=RevisionType.TAR, message=REVISION_MESSAGE, date=normalized_time, author=SWH_PERSON, committer=SWH_PERSON, committer_date=normalized_time, parents=(), directory=directory, synthetic=True, metadata={ "intrinsic": {}, "extrinsic": { "provider": self.url, "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) diff --git a/swh/loader/package/archive/tasks.py b/swh/loader/package/archive/tasks.py index 1f1d2f5..4ac17f1 100644 --- a/swh/loader/package/archive/tasks.py +++ b/swh/loader/package/archive/tasks.py @@ -1,14 +1,15 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.archive.loader import ArchiveLoader @shared_task(name=__name__ + ".LoadArchive") def load_archive_files(*, url=None, artifacts=None): """Load archive's artifacts (e.g gnu, etc...)""" - return ArchiveLoader(url, artifacts).load() + loader = ArchiveLoader.from_configfile(url=url, artifacts=artifacts) + return loader.load() diff --git a/swh/loader/package/archive/tests/test_archive.py b/swh/loader/package/archive/tests/test_archive.py index 4bfcf78..9c88fe6 100644 --- a/swh/loader/package/archive/tests/test_archive.py +++ b/swh/loader/package/archive/tests/test_archive.py @@ -1,370 +1,374 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import attr from swh.loader.package.archive.loader import ArchiveLoader, ArchivePackageInfo from swh.loader.package.tests.common import check_metadata_paths from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import Snapshot, SnapshotBranch, TargetType URL = "https://ftp.gnu.org/gnu/8sync/" GNU_ARTIFACTS = [ { "time": 944729610, "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz", "length": 221837, "filename": "8sync-0.1.0.tar.gz", "version": "0.1.0", } ] _expected_new_contents_first_visit = [ "e9258d81faf5881a2f96a77ba609396f82cb97ad", "1170cf105b04b7e2822a0e09d2acf71da7b9a130", "fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac", "0057bec9b5422aff9256af240b177ac0e3ac2608", "2b8d0d0b43a1078fc708930c8ddc2956a86c566e", "27de3b3bc6545d2a797aeeb4657c0e215a0c2e55", "2e6db43f5cd764e677f416ff0d0c78c7a82ef19b", "ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62", "edeb33282b2bffa0e608e9d2fd960fd08093c0ea", "d64e64d4c73679323f8d4cde2643331ba6c20af9", "7a756602914be889c0a2d3952c710144b3e64cb0", "84fb589b554fcb7f32b806951dcf19518d67b08f", "8624bcdae55baeef00cd11d5dfcfa60f68710a02", "e08441aeab02704cfbd435d6445f7c072f8f524e", "f67935bc3a83a67259cda4b2d43373bd56703844", "809788434b433eb2e3cfabd5d591c9a659d5e3d8", "7d7c6c8c5ebaeff879f61f37083a3854184f6c41", "b99fec102eb24bffd53ab61fc30d59e810f116a2", "7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68", "f0c97052e567948adf03e641301e9983c478ccff", "7fb724242e2b62b85ca64190c31dcae5303e19b3", "4f9709e64a9134fe8aefb36fd827b84d8b617ab5", "7350628ccf194c2c3afba4ac588c33e3f3ac778d", "0bb892d9391aa706dc2c3b1906567df43cbe06a2", "49d4c0ce1a16601f1e265d446b6c5ea6b512f27c", "6b5cc594ac466351450f7f64a0b79fdaf4435ad3", "3046e5d1f70297e2a507b98224b6222c9688d610", "1572607d456d7f633bc6065a2b3048496d679a31", ] _expected_new_directories_first_visit = [ "daabc65ec75d487b1335ffc101c0ac11c803f8fc", "263be23b4a8101d3ad0d9831319a3e0f2b065f36", "7f6e63ba6eb3e2236f65892cd822041f1a01dd5c", "4db0a3ecbc976083e2dac01a62f93729698429a3", "dfef1c80e1098dd5deda664bb44a9ab1f738af13", "eca971d346ea54d95a6e19d5051f900237fafdaa", "3aebc29ed1fccc4a6f2f2010fb8e57882406b528", ] _expected_new_revisions_first_visit = { "44183488c0774ce3c957fa19ba695cf18a4a42b3": ( "3aebc29ed1fccc4a6f2f2010fb8e57882406b528" ) } -def visit_with_no_artifact_found(swh_config, requests_mock_datadir): +def test_archive_visit_with_no_artifact_found(swh_storage, requests_mock_datadir): url = URL unknown_artifact_url = "https://ftp.g.o/unknown/8sync-0.1.0.tar.gz" loader = ArchiveLoader( + swh_storage, url, artifacts=[ { "time": 944729610, "url": unknown_artifact_url, # unknown artifact "length": 221837, "filename": "8sync-0.1.0.tar.gz", "version": "0.1.0", } ], ) actual_load_status = loader.load() assert actual_load_status["status"] == "uneventful" assert actual_load_status["snapshot_id"] is not None - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats - assert_last_visit_matches(loader.storage, url, status="partial", type="tar") + assert_last_visit_matches(swh_storage, url, status="partial", type="tar") -def test_check_revision_metadata_structure(swh_config, requests_mock_datadir): - loader = ArchiveLoader(url=URL, artifacts=GNU_ARTIFACTS) +def test_archive_check_revision_metadata_structure(swh_storage, requests_mock_datadir): + loader = ArchiveLoader(swh_storage, URL, artifacts=GNU_ARTIFACTS) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None - assert_last_visit_matches(loader.storage, URL, status="full", type="tar") + assert_last_visit_matches(swh_storage, URL, status="full", type="tar") expected_revision_id = hash_to_bytes("44183488c0774ce3c957fa19ba695cf18a4a42b3") - revision = loader.storage.revision_get([expected_revision_id])[0] + revision = swh_storage.revision_get([expected_revision_id])[0] assert revision is not None check_metadata_paths( revision.metadata, paths=[ ("intrinsic", dict), ("extrinsic.provider", str), ("extrinsic.when", str), ("extrinsic.raw", dict), ("original_artifact", list), ], ) for original_artifact in revision.metadata["original_artifact"]: check_metadata_paths( original_artifact, paths=[("filename", str), ("length", int), ("checksums", dict),], ) -def test_visit_with_release_artifact_no_prior_visit(swh_config, requests_mock_datadir): +def test_archive_visit_with_release_artifact_no_prior_visit( + swh_storage, requests_mock_datadir +): """With no prior visit, load a gnu project ends up with 1 snapshot """ - loader = ArchiveLoader(url=URL, artifacts=GNU_ARTIFACTS) + loader = ArchiveLoader(swh_storage, URL, artifacts=GNU_ARTIFACTS) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" expected_snapshot_first_visit_id = hash_to_bytes( "c419397fd912039825ebdbea378bc6283f006bf5" ) assert ( hash_to_bytes(actual_load_status["snapshot_id"]) == expected_snapshot_first_visit_id ) - assert_last_visit_matches(loader.storage, URL, status="full", type="tar") + assert_last_visit_matches(swh_storage, URL, status="full", type="tar") - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, "release": 0, "revision": len(_expected_new_revisions_first_visit), "skipped_content": 0, "snapshot": 1, } == stats expected_contents = map(hash_to_bytes, _expected_new_contents_first_visit) - assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] + assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit) - assert list(loader.storage.directory_missing(expected_dirs)) == [] + assert list(swh_storage.directory_missing(expected_dirs)) == [] expected_revs = map(hash_to_bytes, _expected_new_revisions_first_visit) - assert list(loader.storage.revision_missing(expected_revs)) == [] + assert list(swh_storage.revision_missing(expected_revs)) == [] expected_snapshot = Snapshot( id=expected_snapshot_first_visit_id, branches={ b"HEAD": SnapshotBranch( target_type=TargetType.ALIAS, target=b"releases/0.1.0", ), b"releases/0.1.0": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("44183488c0774ce3c957fa19ba695cf18a4a42b3"), ), }, ) - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) -def test_2_visits_without_change(swh_config, requests_mock_datadir): +def test_archive_2_visits_without_change(swh_storage, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ url = URL - loader = ArchiveLoader(url, artifacts=GNU_ARTIFACTS) + loader = ArchiveLoader(swh_storage, url, artifacts=GNU_ARTIFACTS) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None - assert_last_visit_matches(loader.storage, url, status="full", type="tar") + assert_last_visit_matches(swh_storage, url, status="full", type="tar") actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" assert actual_load_status2["snapshot_id"] is not None assert actual_load_status["snapshot_id"] == actual_load_status2["snapshot_id"] - assert_last_visit_matches(loader.storage, url, status="full", type="tar") + assert_last_visit_matches(swh_storage, url, status="full", type="tar") urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 -def test_2_visits_with_new_artifact(swh_config, requests_mock_datadir): +def test_archive_2_visits_with_new_artifact(swh_storage, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ url = URL artifact1 = GNU_ARTIFACTS[0] - loader = ArchiveLoader(url, [artifact1]) + loader = ArchiveLoader(swh_storage, url, [artifact1]) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None - assert_last_visit_matches(loader.storage, url, status="full", type="tar") + assert_last_visit_matches(swh_storage, url, status="full", type="tar") - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, "release": 0, "revision": len(_expected_new_revisions_first_visit), "skipped_content": 0, "snapshot": 1, } == stats urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 artifact2 = { "time": 1480991830, "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz", "length": 238466, "filename": "8sync-0.2.0.tar.gz", "version": "0.2.0", } - loader2 = ArchiveLoader(url, [artifact1, artifact2]) - # implementation detail: share the storage in between visits - loader2.storage = loader.storage - stats2 = get_stats(loader2.storage) + loader2 = ArchiveLoader(swh_storage, url, [artifact1, artifact2]) + stats2 = get_stats(swh_storage) assert stats == stats2 # ensure we share the storage actual_load_status2 = loader2.load() assert actual_load_status2["status"] == "eventful" assert actual_load_status2["snapshot_id"] is not None - stats2 = get_stats(loader.storage) + stats2 = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit) + 14, "directory": len(_expected_new_directories_first_visit) + 8, "origin": 1, "origin_visit": 1 + 1, "release": 0, "revision": len(_expected_new_revisions_first_visit) + 1, "skipped_content": 0, "snapshot": 1 + 1, } == stats2 - assert_last_visit_matches(loader.storage, url, status="full", type="tar") + assert_last_visit_matches(swh_storage, url, status="full", type="tar") urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] # 1 artifact (2nd time no modification) + 1 new artifact assert len(urls) == 2 -def test_2_visits_without_change_not_gnu(swh_config, requests_mock_datadir): +def test_archive_2_visits_without_change_not_gnu(swh_storage, requests_mock_datadir): """Load a project archive (not gnu) ends up with 1 snapshot """ url = "https://something.else.org/8sync/" artifacts = [ # this is not a gnu artifact { "time": "1999-12-09T09:53:30+00:00", # it's also not a timestamp "sha256": "d5d1051e59b2be6f065a9fc6aedd3a391e44d0274b78b9bb4e2b57a09134dbe4", # noqa # keep a gnu artifact reference to avoid adding other test files "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz", "length": 238466, "filename": "8sync-0.2.0.tar.gz", "version": "0.2.0", } ] # Here the loader defines the id_keys to use for existence in the snapshot # It's not the default archive loader which loader = ArchiveLoader( - url, artifacts=artifacts, identity_artifact_keys=["sha256", "length", "url"] + swh_storage, + url, + artifacts=artifacts, + identity_artifact_keys=["sha256", "length", "url"], ) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None - assert_last_visit_matches(loader.storage, url, status="full", type="tar") + assert_last_visit_matches(swh_storage, url, status="full", type="tar") actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" assert actual_load_status2["snapshot_id"] == actual_load_status["snapshot_id"] - assert_last_visit_matches(loader.storage, url, status="full", type="tar") + assert_last_visit_matches(swh_storage, url, status="full", type="tar") urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 -def test_artifact_identity(): +def test_archive_artifact_identity(): """Compute primary key should return the right identity """ @attr.s class TestPackageInfo(ArchivePackageInfo): a = attr.ib() b = attr.ib() metadata = GNU_ARTIFACTS[0] p_info = TestPackageInfo( raw_info={**metadata, "a": 1, "b": 2}, a=1, b=2, **metadata, ) for id_keys, expected_id in [ (["a", "b"], [1, 2]), ([], []), (["a", "key-that-does-not-exist"], [1, None]), ( None, [ metadata["time"], metadata["url"], metadata["length"], metadata["version"], ], ), ]: actual_id = p_info.artifact_identity(id_keys=id_keys) assert actual_id == expected_id diff --git a/swh/loader/package/archive/tests/test_tasks.py b/swh/loader/package/archive/tests/test_tasks.py index 2edf325..8029eaf 100644 --- a/swh/loader/package/archive/tests/test_tasks.py +++ b/swh/loader/package/archive/tests/test_tasks.py @@ -1,21 +1,21 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -def test_archive_loader( +def test_tasks_archive_loader( mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config ): - mock_loader = mocker.patch("swh.loader.package.archive.loader.ArchiveLoader.load") - mock_loader.return_value = {"status": "eventful"} + mock_load = mocker.patch("swh.loader.package.archive.loader.ArchiveLoader.load") + mock_load.return_value = {"status": "eventful"} res = swh_scheduler_celery_app.send_task( "swh.loader.package.archive.tasks.LoadArchive", - kwargs={"url": "some-url", "artifacts": []}, + kwargs=dict(url="https://gnu.org/", artifacts=[]), ) assert res res.wait() assert res.successful() - + assert mock_load.called assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/cran/loader.py b/swh/loader/package/cran/loader.py index 2325735..4af0e8f 100644 --- a/swh/loader/package/cran/loader.py +++ b/swh/loader/package/cran/loader.py @@ -1,202 +1,209 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from datetime import timezone import logging import os from os import path import re from typing import Any, Dict, Iterator, List, Mapping, Optional, Tuple import attr import dateutil.parser from debian.deb822 import Deb822 from swh.loader.package.loader import BasePackageInfo, PackageLoader from swh.loader.package.utils import release_name from swh.model.model import ( Person, Revision, RevisionType, Sha1Git, TimestampWithTimezone, ) +from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) DATE_PATTERN = re.compile(r"^(?P\d{4})-(?P\d{2})$") @attr.s class CRANPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) version = attr.ib(type=str) ID_KEYS = ["url", "version"] @classmethod def from_metadata(cls, a_metadata: Dict[str, Any]) -> "CRANPackageInfo": url = a_metadata["url"] return CRANPackageInfo( url=url, filename=path.basename(url), raw_info=a_metadata, version=a_metadata["version"], ) class CRANLoader(PackageLoader[CRANPackageInfo]): visit_type = "cran" - def __init__(self, url: str, artifacts: List[Dict]): + def __init__( + self, + storage: StorageInterface, + url: str, + artifacts: List[Dict], + max_content_size: Optional[int] = None, + ): """Loader constructor. Args: url: Origin url to retrieve cran artifact(s) from artifacts: List of associated artifact for the origin url """ - super().__init__(url=url) + super().__init__(storage=storage, url=url, max_content_size=max_content_size) # explicit what we consider the artifact identity self.artifacts = artifacts def get_versions(self) -> List[str]: versions = [] for artifact in self.artifacts: versions.append(artifact["version"]) return versions def get_default_version(self) -> str: return self.artifacts[-1]["version"] def get_package_info(self, version: str) -> Iterator[Tuple[str, CRANPackageInfo]]: for a_metadata in self.artifacts: p_info = CRANPackageInfo.from_metadata(a_metadata) if version == p_info.version: yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Mapping[bytes, Mapping], p_info: CRANPackageInfo, ) -> Optional[bytes]: """Given known_artifacts per revision, try to determine the revision for artifact_metadata """ new_identity = p_info.artifact_identity() for rev_id, known_artifact_meta in known_artifacts.items(): logging.debug("known_artifact_meta: %s", known_artifact_meta) known_artifact = known_artifact_meta["extrinsic"]["raw"] known_identity = CRANPackageInfo.from_metadata( known_artifact ).artifact_identity() if new_identity == known_identity: return rev_id return None def build_revision( self, p_info: CRANPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: # a_metadata is empty metadata = extract_intrinsic_metadata(uncompressed_path) date = parse_date(metadata.get("Date")) author = Person.from_fullname(metadata.get("Maintainer", "").encode()) version = metadata.get("Version", p_info.version) return Revision( message=version.encode("utf-8"), type=RevisionType.TAR, date=date, author=author, committer=author, committer_date=date, parents=(), directory=directory, synthetic=True, metadata={ "intrinsic": {"tool": "DESCRIPTION", "raw": metadata,}, "extrinsic": { "provider": self.url, "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) def parse_debian_control(filepath: str) -> Dict[str, Any]: """Parse debian control at filepath""" metadata: Dict = {} logger.debug("Debian control file %s", filepath) for paragraph in Deb822.iter_paragraphs(open(filepath, "rb")): logger.debug("paragraph: %s", paragraph) metadata.update(**paragraph) logger.debug("metadata parsed: %s", metadata) return metadata def extract_intrinsic_metadata(dir_path: str) -> Dict[str, Any]: """Given an uncompressed path holding the DESCRIPTION file, returns a DESCRIPTION parsed structure as a dict. Cran origins describes their intrinsic metadata within a DESCRIPTION file at the root tree of a tarball. This DESCRIPTION uses a simple file format called DCF, the Debian control format. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the DESCRIPTION parsed structure as a dict (or empty dict if missing) """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] description_path = os.path.join(dir_path, project_dirname, "DESCRIPTION") if not os.path.exists(description_path): return {} return parse_debian_control(description_path) def parse_date(date: Optional[str]) -> Optional[TimestampWithTimezone]: """Parse a date into a datetime """ assert not date or isinstance(date, str) dt: Optional[datetime.datetime] = None if not date: return None try: specific_date = DATE_PATTERN.match(date) if specific_date: year = int(specific_date.group("year")) month = int(specific_date.group("month")) dt = datetime.datetime(year, month, 1) else: dt = dateutil.parser.parse(date) if not dt.tzinfo: # up for discussion the timezone needs to be set or # normalize_timestamp is not happy: ValueError: normalize_timestamp # received datetime without timezone: 2001-06-08 00:00:00 dt = dt.replace(tzinfo=timezone.utc) except Exception as e: logger.warning("Fail to parse date %s. Reason: %s", date, e) if dt: return TimestampWithTimezone.from_datetime(dt) else: return None diff --git a/swh/loader/package/cran/tasks.py b/swh/loader/package/cran/tasks.py index 5afe0aa..ac5c53a 100644 --- a/swh/loader/package/cran/tasks.py +++ b/swh/loader/package/cran/tasks.py @@ -1,14 +1,14 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.cran.loader import CRANLoader @shared_task(name=__name__ + ".LoadCRAN") def load_cran(url=None, artifacts=[]): """Load CRAN's artifacts""" - return CRANLoader(url, artifacts).load() + return CRANLoader.from_configfile(url=url, artifacts=artifacts).load() diff --git a/swh/loader/package/cran/tests/test_cran.py b/swh/loader/package/cran/tests/test_cran.py index cde9964..1b2d28d 100644 --- a/swh/loader/package/cran/tests/test_cran.py +++ b/swh/loader/package/cran/tests/test_cran.py @@ -1,362 +1,364 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime, timezone import os from os import path from unittest.mock import patch from dateutil.tz import tzlocal import pytest from swh.core.tarball import uncompress from swh.loader.package.cran.loader import ( CRANLoader, extract_intrinsic_metadata, parse_date, parse_debian_control, ) from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import Snapshot, SnapshotBranch, TargetType, TimestampWithTimezone SNAPSHOT = Snapshot( id=hash_to_bytes("920adcccc78aaeedd3cfa4459dd900d8c3431a21"), branches={ b"HEAD": SnapshotBranch( target=b"releases/2.22-6", target_type=TargetType.ALIAS ), b"releases/2.22-6": SnapshotBranch( target=hash_to_bytes("42bdb16facd5140424359c8ce89a28ecfa1ce603"), target_type=TargetType.REVISION, ), }, ) def test_cran_parse_date(): data = [ # parsable, some have debatable results though ("2001-June-08", datetime(2001, 6, 8, 0, 0, tzinfo=timezone.utc)), ( "Tue Dec 27 15:06:08 PST 2011", datetime(2011, 12, 27, 15, 6, 8, tzinfo=timezone.utc), ), ("8-14-2013", datetime(2013, 8, 14, 0, 0, tzinfo=timezone.utc)), ("2011-01", datetime(2011, 1, 1, 0, 0, tzinfo=timezone.utc)), ("201109", datetime(2009, 11, 20, 0, 0, tzinfo=timezone.utc)), ("04-12-2014", datetime(2014, 4, 12, 0, 0, tzinfo=timezone.utc)), ( "2018-08-24, 10:40:10", datetime(2018, 8, 24, 10, 40, 10, tzinfo=timezone.utc), ), ("2013-October-16", datetime(2013, 10, 16, 0, 0, tzinfo=timezone.utc)), ("Aug 23, 2013", datetime(2013, 8, 23, 0, 0, tzinfo=timezone.utc)), ("27-11-2014", datetime(2014, 11, 27, 0, 0, tzinfo=timezone.utc)), ("2019-09-26,", datetime(2019, 9, 26, 0, 0, tzinfo=timezone.utc)), ("9/25/2014", datetime(2014, 9, 25, 0, 0, tzinfo=timezone.utc)), ( "Fri Jun 27 17:23:53 2014", datetime(2014, 6, 27, 17, 23, 53, tzinfo=timezone.utc), ), ("28-04-2014", datetime(2014, 4, 28, 0, 0, tzinfo=timezone.utc)), ("04-14-2014", datetime(2014, 4, 14, 0, 0, tzinfo=timezone.utc)), ( "2019-05-08 14:17:31 UTC", datetime(2019, 5, 8, 14, 17, 31, tzinfo=timezone.utc), ), ( "Wed May 21 13:50:39 CEST 2014", datetime(2014, 5, 21, 13, 50, 39, tzinfo=tzlocal()), ), ( "2018-04-10 00:01:04 KST", datetime(2018, 4, 10, 0, 1, 4, tzinfo=timezone.utc), ), ("2019-08-25 10:45", datetime(2019, 8, 25, 10, 45, tzinfo=timezone.utc)), ("March 9, 2015", datetime(2015, 3, 9, 0, 0, tzinfo=timezone.utc)), ("Aug. 18, 2012", datetime(2012, 8, 18, 0, 0, tzinfo=timezone.utc)), ("2014-Dec-17", datetime(2014, 12, 17, 0, 0, tzinfo=timezone.utc)), ("March 01, 2013", datetime(2013, 3, 1, 0, 0, tzinfo=timezone.utc)), ("2017-04-08.", datetime(2017, 4, 8, 0, 0, tzinfo=timezone.utc)), ("2014-Apr-22", datetime(2014, 4, 22, 0, 0, tzinfo=timezone.utc)), ( "Mon Jan 12 19:54:04 2015", datetime(2015, 1, 12, 19, 54, 4, tzinfo=timezone.utc), ), ("May 22, 2014", datetime(2014, 5, 22, 0, 0, tzinfo=timezone.utc)), ( "2014-08-12 09:55:10 EDT", datetime(2014, 8, 12, 9, 55, 10, tzinfo=timezone.utc), ), # unparsable ("Fabruary 21, 2012", None), ('2019-05-28"', None), ("2017-03-01 today", None), ("2016-11-0110.1093/icesjms/fsw182", None), ("2019-07-010", None), ("2015-02.23", None), ("20013-12-30", None), ("2016-08-017", None), ("2019-02-07l", None), ("2018-05-010", None), ("2019-09-27 KST", None), ("$Date$", None), ("2019-09-27 KST", None), ("2019-06-22 $Date$", None), ("$Date: 2013-01-18 12:49:03 -0600 (Fri, 18 Jan 2013) $", None), ("2015-7-013", None), ("2018-05-023", None), ("Check NEWS file for changes: news(package='simSummary')", None), ] for date, expected_date in data: actual_tstz = parse_date(date) if expected_date is None: assert actual_tstz is None, date else: expected_tstz = TimestampWithTimezone.from_datetime(expected_date) assert actual_tstz == expected_tstz, date @pytest.mark.fs -def test_extract_intrinsic_metadata(tmp_path, datadir): +def test_cran_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) # sample url # https://cran.r-project.org/src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz # noqa archive_path = path.join( datadir, "https_cran.r-project.org", "src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz", ) uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { "Package": "KernSmooth", "Priority": "recommended", "Version": "2.22-6", "Date": "2001-June-08", "Title": "Functions for kernel smoothing for Wand & Jones (1995)", "Author": "S original by Matt Wand.\n\tR port by Brian Ripley .", # noqa "Maintainer": "Brian Ripley ", "Description": 'functions for kernel smoothing (and density estimation)\n corresponding to the book: \n Wand, M.P. and Jones, M.C. (1995) "Kernel Smoothing".', # noqa "License": "Unlimited use and distribution (see LICENCE).", "URL": "http://www.biostat.harvard.edu/~mwand", } assert actual_metadata == expected_metadata @pytest.mark.fs -def test_extract_intrinsic_metadata_failures(tmp_path): +def test_cran_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" # inexistent first level path assert extract_intrinsic_metadata("/something-inexistent") == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path existing_path_no_pkginfo = str(tmp_path / "something") os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} -def test_cran_one_visit(swh_config, requests_mock_datadir): +def test_cran_one_visit(swh_storage, requests_mock_datadir): version = "2.22-6" base_url = "https://cran.r-project.org" origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" artifact_url = ( f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa ) loader = CRANLoader( - origin_url, artifacts=[{"url": artifact_url, "version": version,}] + swh_storage, origin_url, artifacts=[{"url": artifact_url, "version": version,}] ) actual_load_status = loader.load() assert actual_load_status == { "status": "eventful", "snapshot_id": SNAPSHOT.id.hex(), } - check_snapshot(SNAPSHOT, loader.storage) + check_snapshot(SNAPSHOT, swh_storage) - assert_last_visit_matches(loader.storage, origin_url, status="full", type="cran") + assert_last_visit_matches(swh_storage, origin_url, status="full", type="cran") - visit_stats = get_stats(loader.storage) + visit_stats = get_stats(swh_storage) assert { "content": 33, "directory": 7, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == visit_stats urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith(base_url) ] # visited each artifact once across 2 visits assert len(urls) == 1 -def test_cran_2_visits_same_origin(swh_config, requests_mock_datadir): +def test_cran_2_visits_same_origin(swh_storage, requests_mock_datadir): """Multiple visits on the same origin, only 1 archive fetch""" version = "2.22-6" base_url = "https://cran.r-project.org" origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" artifact_url = ( f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa ) loader = CRANLoader( - origin_url, artifacts=[{"url": artifact_url, "version": version}] + swh_storage, origin_url, artifacts=[{"url": artifact_url, "version": version}] ) # first visit actual_load_status = loader.load() expected_snapshot_id = "920adcccc78aaeedd3cfa4459dd900d8c3431a21" assert actual_load_status == { "status": "eventful", "snapshot_id": SNAPSHOT.id.hex(), } - check_snapshot(SNAPSHOT, loader.storage) + check_snapshot(SNAPSHOT, swh_storage) - assert_last_visit_matches(loader.storage, origin_url, status="full", type="cran") + assert_last_visit_matches(swh_storage, origin_url, status="full", type="cran") - visit_stats = get_stats(loader.storage) + visit_stats = get_stats(swh_storage) assert { "content": 33, "directory": 7, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == visit_stats # second visit actual_load_status2 = loader.load() assert actual_load_status2 == { "status": "uneventful", "snapshot_id": expected_snapshot_id, } - assert_last_visit_matches(loader.storage, origin_url, status="full", type="cran") + assert_last_visit_matches(swh_storage, origin_url, status="full", type="cran") - visit_stats2 = get_stats(loader.storage) + visit_stats2 = get_stats(swh_storage) visit_stats["origin_visit"] += 1 assert visit_stats2 == visit_stats, "same stats as 1st visit, +1 visit" urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith(base_url) ] assert len(urls) == 1, "visited one time artifact url (across 2 visits)" -def test_parse_debian_control(datadir): +def test_cran_parse_debian_control(datadir): description_file = os.path.join(datadir, "description", "acepack") actual_metadata = parse_debian_control(description_file) assert actual_metadata == { "Package": "acepack", "Maintainer": "Shawn Garbett", "Version": "1.4.1", "Author": "Phil Spector, Jerome Friedman, Robert Tibshirani...", "Description": "Two nonparametric methods for multiple regression...", "Title": "ACE & AVAS 4 Selecting Multiple Regression Transformations", "License": "MIT + file LICENSE", "Suggests": "testthat", "Packaged": "2016-10-28 15:38:59 UTC; garbetsp", "Repository": "CRAN", "Date/Publication": "2016-10-29 00:11:52", "NeedsCompilation": "yes", } -def test_parse_debian_control_unicode_issue(datadir): +def test_cran_parse_debian_control_unicode_issue(datadir): # iso-8859-1 caused failure, now fixed description_file = os.path.join(datadir, "description", "KnownBR") actual_metadata = parse_debian_control(description_file) assert actual_metadata == { "Package": "KnowBR", "Version": "2.0", "Title": """Discriminating Well Surveyed Spatial Units from Exhaustive Biodiversity Databases""", "Author": "Cástor Guisande González and Jorge M. Lobo", "Maintainer": "Cástor Guisande González ", "Description": "It uses species accumulation curves and diverse estimators...", "License": "GPL (>= 2)", "Encoding": "latin1", "Depends": "R (>= 3.0), fossil, mgcv, plotrix, sp, vegan", "Suggests": "raster, rgbif", "NeedsCompilation": "no", "Packaged": "2019-01-30 13:27:29 UTC; castor", "Repository": "CRAN", "Date/Publication": "2019-01-31 20:53:50 UTC", } @pytest.mark.parametrize( "method_name", ["build_extrinsic_snapshot_metadata", "build_extrinsic_origin_metadata",], ) def test_cran_fail_to_build_or_load_extrinsic_metadata( - method_name, swh_config, requests_mock_datadir + method_name, swh_storage, requests_mock_datadir ): """problem during loading: {visit: failed, status: failed, no snapshot} """ version = "2.22-6" base_url = "https://cran.r-project.org" origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" artifact_url = ( f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa ) full_method_name = f"swh.loader.package.cran.loader.CRANLoader.{method_name}" with patch( full_method_name, side_effect=ValueError("Fake to fail to build or load extrinsic metadata"), ): loader = CRANLoader( - origin_url, artifacts=[{"url": artifact_url, "version": version}] + swh_storage, + origin_url, + artifacts=[{"url": artifact_url, "version": version}], ) actual_load_status = loader.load() assert actual_load_status == { "status": "failed", "snapshot_id": SNAPSHOT.id.hex(), } - visit_stats = get_stats(loader.storage) + visit_stats = get_stats(swh_storage) assert { "content": 33, "directory": 7, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == visit_stats assert_last_visit_matches( - loader.storage, origin_url, status="partial", type="cran" + swh_storage, origin_url, status="partial", type="cran" ) diff --git a/swh/loader/package/cran/tests/test_tasks.py b/swh/loader/package/cran/tests/test_tasks.py index b54c729..f944f0c 100644 --- a/swh/loader/package/cran/tests/test_tasks.py +++ b/swh/loader/package/cran/tests/test_tasks.py @@ -1,24 +1,23 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -def test_cran_loader( +def test_tasks_cran_loader( mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config ): - mock_loader = mocker.patch("swh.loader.package.cran.loader.CRANLoader.load") - mock_loader.return_value = {"status": "eventful"} + mock_load = mocker.patch("swh.loader.package.cran.loader.CRANLoader.load") + mock_load.return_value = {"status": "eventful"} res = swh_scheduler_celery_app.send_task( "swh.loader.package.cran.tasks.LoadCRAN", - kwargs={ - "url": "some-url", - "artifacts": {"version": "1.2.3", "url": "artifact-url"}, - }, + kwargs=dict( + url="some-url", artifacts=[{"version": "1.2.3", "url": "artifact-url"}], + ), ) assert res res.wait() assert res.successful() - + assert mock_load.called assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/debian/loader.py b/swh/loader/package/debian/loader.py index 40999fd..90bac6d 100644 --- a/swh/loader/package/debian/loader.py +++ b/swh/loader/package/debian/loader.py @@ -1,485 +1,493 @@ -# Copyright (C) 2017-2019 The Software Heritage developers +# Copyright (C) 2017-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import email.utils import logging from os import path import re import subprocess from typing import ( Any, Dict, FrozenSet, Iterator, List, Mapping, Optional, Sequence, Tuple, ) import attr from dateutil.parser import parse as parse_date from debian.changelog import Changelog from debian.deb822 import Dsc from swh.loader.package.loader import BasePackageInfo, PackageLoader from swh.loader.package.utils import download, release_name from swh.model.model import ( Person, Revision, RevisionType, Sha1Git, TimestampWithTimezone, ) +from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) UPLOADERS_SPLIT = re.compile(r"(?<=\>)\s*,\s*") @attr.s class DebianFileMetadata: md5sum = attr.ib(type=str) name = attr.ib(type=str) """Filename""" sha256 = attr.ib(type=str) size = attr.ib(type=int) uri = attr.ib(type=str) """URL of this specific file""" @attr.s class DebianPackageChangelog: person = attr.ib(type=Dict[str, str]) """A dict with fields like, model.Person, except they are str instead of bytes, and 'email' is optional.""" date = attr.ib(type=str) """Date of the changelog entry.""" history = attr.ib(type=List[Tuple[str, str]]) """List of tuples (package_name, version)""" @attr.s class DebianPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) files = attr.ib(type=Dict[str, DebianFileMetadata]) """Metadata of the files (.deb, .dsc, ...) of the package.""" name = attr.ib(type=str) version = attr.ib(type=str) @classmethod def from_metadata(cls, a_metadata: Dict[str, Any], url: str) -> "DebianPackageInfo": return cls( url=url, filename=None, raw_info=a_metadata, files={ file_name: DebianFileMetadata(**file_metadata) for (file_name, file_metadata) in a_metadata.get("files", {}).items() }, name=a_metadata["name"], version=a_metadata["version"], ) @attr.s class IntrinsicPackageMetadata: """Metadata extracted from a package's .dsc file.""" name = attr.ib(type=str) version = attr.ib(type=str) changelog = attr.ib(type=DebianPackageChangelog) maintainers = attr.ib(type=List[Dict[str, str]]) """A list of dicts with fields like, model.Person, except they are str instead of bytes, and 'email' is optional.""" class DebianLoader(PackageLoader[DebianPackageInfo]): """Load debian origins into swh archive. """ visit_type = "deb" - def __init__(self, url: str, date: str, packages: Mapping[str, Any]): + def __init__( + self, + storage: StorageInterface, + url: str, + date: str, + packages: Mapping[str, Any], + max_content_size: Optional[int] = None, + ): """Debian Loader implementation. Args: url: Origin url (e.g. deb://Debian/packages/cicero) date: Ignored packages: versioned packages and associated artifacts, example:: { 'stretch/contrib/0.7.2-3': { 'name': 'cicero', 'version': '0.7.2-3' 'files': { 'cicero_0.7.2-3.diff.gz': { 'md5sum': 'a93661b6a48db48d59ba7d26796fc9ce', 'name': 'cicero_0.7.2-3.diff.gz', 'sha256': 'f039c9642fe15c75bed5254315e2a29f...', 'size': 3964, 'uri': 'http://d.d.o/cicero_0.7.2-3.diff.gz', }, 'cicero_0.7.2-3.dsc': { 'md5sum': 'd5dac83eb9cfc9bb52a15eb618b4670a', 'name': 'cicero_0.7.2-3.dsc', 'sha256': '35b7f1048010c67adfd8d70e4961aefb...', 'size': 1864, 'uri': 'http://d.d.o/cicero_0.7.2-3.dsc', }, 'cicero_0.7.2.orig.tar.gz': { 'md5sum': '4353dede07c5728319ba7f5595a7230a', 'name': 'cicero_0.7.2.orig.tar.gz', 'sha256': '63f40f2436ea9f67b44e2d4bd669dbab...', 'size': 96527, 'uri': 'http://d.d.o/cicero_0.7.2.orig.tar.gz', } }, }, # ... } """ - super().__init__(url=url) + super().__init__(storage=storage, url=url, max_content_size=max_content_size) self.packages = packages def get_versions(self) -> Sequence[str]: """Returns the keys of the packages input (e.g. stretch/contrib/0.7.2-3, etc...) """ return list(self.packages.keys()) def get_package_info(self, version: str) -> Iterator[Tuple[str, DebianPackageInfo]]: meta = self.packages[version] p_info = DebianPackageInfo.from_metadata(meta, url=self.url) yield release_name(version), p_info def resolve_revision_from( self, known_package_artifacts: Mapping, p_info: DebianPackageInfo ) -> Optional[bytes]: return resolve_revision_from(known_package_artifacts, p_info) def download_package( self, p_info: DebianPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Contrary to other package loaders (1 package, 1 artifact), `p_info.files` represents the package's datafiles set to fetch: - .orig.tar.gz - .dsc - .diff.gz This is delegated to the `download_package` function. """ all_hashes = download_package(p_info, tmpdir) logger.debug("all_hashes: %s", all_hashes) res = [] for hashes in all_hashes.values(): res.append((tmpdir, hashes)) logger.debug("res: %s", res) return res def uncompress( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str ) -> str: logger.debug("dl_artifacts: %s", dl_artifacts) return extract_package(dl_artifacts, dest=dest) def build_revision( self, p_info: DebianPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: dsc_url, dsc_name = dsc_information(p_info) if not dsc_name: raise ValueError("dsc name for url %s should not be None" % dsc_url) dsc_path = path.join(path.dirname(uncompressed_path), dsc_name) intrinsic_metadata = get_intrinsic_package_metadata( p_info, dsc_path, uncompressed_path ) logger.debug("intrinsic_metadata: %s", intrinsic_metadata) logger.debug("p_info: %s", p_info) msg = "Synthetic revision for Debian source package %s version %s" % ( p_info.name, p_info.version, ) author = prepare_person(intrinsic_metadata.changelog.person) date = TimestampWithTimezone.from_iso8601(intrinsic_metadata.changelog.date) # inspired from swh.loader.debian.converters.package_metadata_to_revision # noqa return Revision( type=RevisionType.DSC, message=msg.encode("utf-8"), author=author, date=date, committer=author, committer_date=date, parents=(), directory=directory, synthetic=True, metadata={ "intrinsic": {"tool": "dsc", "raw": attr.asdict(intrinsic_metadata),}, "extrinsic": { "provider": dsc_url, "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) def resolve_revision_from( known_package_artifacts: Mapping, p_info: DebianPackageInfo ) -> Optional[bytes]: """Given known package artifacts (resolved from the snapshot of previous visit) and the new artifact to fetch, try to solve the corresponding revision. """ artifacts_to_fetch = p_info.files if not artifacts_to_fetch: return None def to_set(data: DebianPackageInfo) -> FrozenSet[Tuple[str, str, int]]: return frozenset( (name, meta.sha256, meta.size) for name, meta in data.files.items() ) # what we want to avoid downloading back if we have them already set_new_artifacts = to_set(p_info) known_artifacts_revision_id = {} for rev_id, known_artifacts in known_package_artifacts.items(): extrinsic = known_artifacts.get("extrinsic") if not extrinsic: continue s = to_set(DebianPackageInfo.from_metadata(extrinsic["raw"], url=p_info.url)) known_artifacts_revision_id[s] = rev_id return known_artifacts_revision_id.get(set_new_artifacts) def uid_to_person(uid: str) -> Dict[str, str]: """Convert an uid to a person suitable for insertion. Args: uid: an uid of the form "Name " Returns: a dictionary with the following keys: - name: the name associated to the uid - email: the mail associated to the uid - fullname: the actual uid input """ logger.debug("uid: %s", uid) ret = { "name": "", "email": "", "fullname": uid, } name, mail = email.utils.parseaddr(uid) if name and email: ret["name"] = name ret["email"] = mail else: ret["name"] = uid return ret def prepare_person(person: Mapping[str, str]) -> Person: """Prepare person for swh serialization... Args: A person dict Returns: A person ready for storage """ return Person.from_dict( {key: value.encode("utf-8") for (key, value) in person.items()} ) def download_package(p_info: DebianPackageInfo, tmpdir: Any) -> Mapping[str, Any]: """Fetch a source package in a temporary directory and check the checksums for all files. Args: p_info: Information on a package tmpdir: Where to download and extract the files to ingest Returns: Dict of swh hashes per filename key """ all_hashes = {} for filename, fileinfo in p_info.files.items(): uri = fileinfo.uri logger.debug("fileinfo: %s", fileinfo) extrinsic_hashes = {"sha256": fileinfo.sha256} logger.debug("extrinsic_hashes(%s): %s", filename, extrinsic_hashes) filepath, hashes = download( uri, dest=tmpdir, filename=filename, hashes=extrinsic_hashes ) all_hashes[filename] = hashes logger.debug("all_hashes: %s", all_hashes) return all_hashes def dsc_information(p_info: DebianPackageInfo) -> Tuple[Optional[str], Optional[str]]: """Retrieve dsc information from a package. Args: p_info: Package metadata information Returns: Tuple of dsc file's uri, dsc's full disk path """ dsc_name = None dsc_url = None for filename, fileinfo in p_info.files.items(): if filename.endswith(".dsc"): if dsc_name: raise ValueError( "Package %s_%s references several dsc files." % (p_info.name, p_info.version) ) dsc_url = fileinfo.uri dsc_name = filename return dsc_url, dsc_name def extract_package(dl_artifacts: List[Tuple[str, Mapping]], dest: str) -> str: """Extract a Debian source package to a given directory. Note that after extraction the target directory will be the root of the extracted package, rather than containing it. Args: package: package information dictionary dest: directory where the package files are stored Returns: Package extraction directory """ a_path = dl_artifacts[0][0] logger.debug("dl_artifacts: %s", dl_artifacts) for _, hashes in dl_artifacts: logger.debug("hashes: %s", hashes) filename = hashes["filename"] if filename.endswith(".dsc"): dsc_name = filename break dsc_path = path.join(a_path, dsc_name) destdir = path.join(dest, "extracted") logfile = path.join(dest, "extract.log") logger.debug( "extract Debian source package %s in %s" % (dsc_path, destdir), extra={"swh_type": "deb_extract", "swh_dsc": dsc_path, "swh_destdir": destdir,}, ) cmd = [ "dpkg-source", "--no-copy", "--no-check", "--ignore-bad-version", "-x", dsc_path, destdir, ] try: with open(logfile, "w") as stdout: subprocess.check_call(cmd, stdout=stdout, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logdata = open(logfile, "r").read() raise ValueError( "dpkg-source exited with code %s: %s" % (e.returncode, logdata) ) from None return destdir def get_intrinsic_package_metadata( p_info: DebianPackageInfo, dsc_path: str, extracted_path: str ) -> IntrinsicPackageMetadata: """Get the package metadata from the source package at dsc_path, extracted in extracted_path. Args: p_info: the package information dsc_path: path to the package's dsc file extracted_path: the path where the package got extracted Returns: dict: a dictionary with the following keys: - history: list of (package_name, package_version) tuples parsed from the package changelog """ with open(dsc_path, "rb") as dsc: parsed_dsc = Dsc(dsc) # Parse the changelog to retrieve the rest of the package information changelog_path = path.join(extracted_path, "debian/changelog") with open(changelog_path, "rb") as changelog_file: try: parsed_changelog = Changelog(changelog_file) except UnicodeDecodeError: logger.warning( "Unknown encoding for changelog %s," " falling back to iso" % changelog_path, extra={ "swh_type": "deb_changelog_encoding", "swh_name": p_info.name, "swh_version": str(p_info.version), "swh_changelog": changelog_path, }, ) # need to reset as Changelog scrolls to the end of the file changelog_file.seek(0) parsed_changelog = Changelog(changelog_file, encoding="iso-8859-15") history: List[Tuple[str, str]] = [] for block in parsed_changelog: assert block.package is not None history.append((block.package, str(block.version))) changelog = DebianPackageChangelog( person=uid_to_person(parsed_changelog.author), date=parse_date(parsed_changelog.date).isoformat(), history=history[1:], ) maintainers = [ uid_to_person(parsed_dsc["Maintainer"]), ] maintainers.extend( uid_to_person(person) for person in UPLOADERS_SPLIT.split(parsed_dsc.get("Uploaders", "")) ) return IntrinsicPackageMetadata( name=p_info.name, version=str(p_info.version), changelog=changelog, maintainers=maintainers, ) diff --git a/swh/loader/package/debian/tasks.py b/swh/loader/package/debian/tasks.py index 1b09e22..e57bcec 100644 --- a/swh/loader/package/debian/tasks.py +++ b/swh/loader/package/debian/tasks.py @@ -1,14 +1,15 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.debian.loader import DebianLoader @shared_task(name=__name__ + ".LoadDebian") def load_deb_package(*, url, date, packages): """Load Debian package""" - return DebianLoader(url, date, packages).load() + loader = DebianLoader.from_configfile(url=url, date=date, packages=packages) + return loader.load() diff --git a/swh/loader/package/debian/tests/test_debian.py b/swh/loader/package/debian/tests/test_debian.py index 28515fc..373a904 100644 --- a/swh/loader/package/debian/tests/test_debian.py +++ b/swh/loader/package/debian/tests/test_debian.py @@ -1,488 +1,499 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging from os import path import random import pytest from swh.loader.package.debian.loader import ( DebianLoader, DebianPackageChangelog, DebianPackageInfo, IntrinsicPackageMetadata, download_package, dsc_information, extract_package, get_intrinsic_package_metadata, prepare_person, resolve_revision_from, uid_to_person, ) from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import Person, Snapshot, SnapshotBranch, TargetType logger = logging.getLogger(__name__) URL = "deb://Debian/packages/cicero" PACKAGE_FILES = { "name": "cicero", "version": "0.7.2-3", "files": { "cicero_0.7.2-3.diff.gz": { "md5sum": "a93661b6a48db48d59ba7d26796fc9ce", "name": "cicero_0.7.2-3.diff.gz", "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa "size": 3964, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.diff.gz", # noqa }, "cicero_0.7.2-3.dsc": { "md5sum": "d5dac83eb9cfc9bb52a15eb618b4670a", "name": "cicero_0.7.2-3.dsc", "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa "size": 1864, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc", # noqa }, # noqa "cicero_0.7.2.orig.tar.gz": { "md5sum": "4353dede07c5728319ba7f5595a7230a", "name": "cicero_0.7.2.orig.tar.gz", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa "size": 96527, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa }, }, } PACKAGE_FILES2 = { "name": "cicero", "version": "0.7.2-4", "files": { "cicero_0.7.2-4.diff.gz": { "md5sum": "1e7e6fc4a59d57c98082a3af78145734", "name": "cicero_0.7.2-4.diff.gz", "sha256": "2e6fa296ee7005473ff58d0971f4fd325617b445671480e9f2cfb738d5dbcd01", # noqa "size": 4038, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.diff.gz", # noqa }, "cicero_0.7.2-4.dsc": { "md5sum": "1a6c8855a73b4282bb31d15518f18cde", "name": "cicero_0.7.2-4.dsc", "sha256": "913ee52f7093913420de5cbe95d63cfa817f1a1daf997961149501894e754f8b", # noqa "size": 1881, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.dsc", # noqa }, # noqa "cicero_0.7.2.orig.tar.gz": { "md5sum": "4353dede07c5728319ba7f5595a7230a", "name": "cicero_0.7.2.orig.tar.gz", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa "size": 96527, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa }, }, } PACKAGE_PER_VERSION = { "stretch/contrib/0.7.2-3": PACKAGE_FILES, } PACKAGES_PER_VERSION = { "stretch/contrib/0.7.2-3": PACKAGE_FILES, "buster/contrib/0.7.2-4": PACKAGE_FILES2, } -def test_debian_first_visit(swh_config, requests_mock_datadir): +def test_debian_first_visit(swh_storage, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ loader = DebianLoader( - url=URL, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGE_PER_VERSION, + swh_storage, + URL, + date="2019-10-12T05:58:09.165557+00:00", + packages=PACKAGE_PER_VERSION, ) actual_load_status = loader.load() expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } - assert_last_visit_matches(loader.storage, URL, status="full", type="deb") + assert_last_visit_matches(swh_storage, URL, status="full", type="deb") - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 42, "directory": 2, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, # all artifacts under 1 revision "skipped_content": 0, "snapshot": 1, } == stats expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"), ) }, ) # different than the previous loader as no release is done - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) -def test_debian_first_visit_then_another_visit(swh_config, requests_mock_datadir): +def test_debian_first_visit_then_another_visit(swh_storage, requests_mock_datadir): """With no prior visit, load a debian project ends up with 1 snapshot """ loader = DebianLoader( - url=URL, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGE_PER_VERSION + swh_storage, + URL, + date="2019-10-12T05:58:09.165557+00:00", + packages=PACKAGE_PER_VERSION, ) actual_load_status = loader.load() expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } - assert_last_visit_matches(loader.storage, URL, status="full", type="deb") + assert_last_visit_matches(swh_storage, URL, status="full", type="deb") - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 42, "directory": 2, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, # all artifacts under 1 revision "skipped_content": 0, "snapshot": 1, } == stats expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"), ) }, ) # different than the previous loader as no release is done - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) # No change in between load actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" - assert_last_visit_matches(loader.storage, URL, status="full", type="deb") + assert_last_visit_matches(swh_storage, URL, status="full", type="deb") - stats2 = get_stats(loader.storage) + stats2 = get_stats(swh_storage) assert { "content": 42 + 0, "directory": 2 + 0, "origin": 1, "origin_visit": 1 + 1, # a new visit occurred "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, # same snapshot across 2 visits } == stats2 urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("http://deb.debian.org") ] # visited each package artifact twice across 2 visits assert len(urls) == len(set(urls)) -def test_uid_to_person(): +def test_debian_uid_to_person(): uid = "Someone Name " actual_person = uid_to_person(uid) assert actual_person == { "name": "Someone Name", "email": "someone@orga.org", "fullname": uid, } -def test_prepare_person(): +def test_debian_prepare_person(): actual_author = prepare_person( { "name": "Someone Name", "email": "someone@orga.org", "fullname": "Someone Name ", } ) assert actual_author == Person( name=b"Someone Name", email=b"someone@orga.org", fullname=b"Someone Name ", ) -def test_download_package(datadir, tmpdir, requests_mock_datadir): +def test_debian_download_package(datadir, tmpdir, requests_mock_datadir): tmpdir = str(tmpdir) # py3.5 work around (LocalPath issue) p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) all_hashes = download_package(p_info, tmpdir) assert all_hashes == { "cicero_0.7.2-3.diff.gz": { "checksums": { "sha1": "0815282053f21601b0ec4adf7a8fe47eace3c0bc", "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa }, "filename": "cicero_0.7.2-3.diff.gz", "length": 3964, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/" "cicero_0.7.2-3.diff.gz" ), }, "cicero_0.7.2-3.dsc": { "checksums": { "sha1": "abbec4e8efbbc80278236e1dd136831eac08accd", "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa }, "filename": "cicero_0.7.2-3.dsc", "length": 1864, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc" ), }, "cicero_0.7.2.orig.tar.gz": { "checksums": { "sha1": "a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa }, "filename": "cicero_0.7.2.orig.tar.gz", "length": 96527, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/" "cicero_0.7.2.orig.tar.gz" ), }, } -def test_dsc_information_ok(): +def test_debian_dsc_information_ok(): fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) dsc_url, dsc_name = dsc_information(p_info) assert dsc_url == PACKAGE_FILES["files"][fname]["uri"] assert dsc_name == PACKAGE_FILES["files"][fname]["name"] -def test_dsc_information_not_found(): +def test_debian_dsc_information_not_found(): fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) p_info.files.pop(fname) dsc_url, dsc_name = dsc_information(p_info) assert dsc_url is None assert dsc_name is None -def test_dsc_information_too_many_dsc_entries(): +def test_debian_dsc_information_too_many_dsc_entries(): # craft an extra dsc file fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) data = p_info.files[fname] fname2 = fname.replace("cicero", "ciceroo") p_info.files[fname2] = data with pytest.raises( ValueError, match="Package %s_%s references several dsc" % (PACKAGE_FILES["name"], PACKAGE_FILES["version"]), ): dsc_information(p_info) -def test_get_intrinsic_package_metadata(requests_mock_datadir, datadir, tmp_path): +def test_debian_get_intrinsic_package_metadata( + requests_mock_datadir, datadir, tmp_path +): tmp_path = str(tmp_path) # py3.5 compat. p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) logger.debug("p_info: %s", p_info) # download the packages all_hashes = download_package(p_info, tmp_path) # Retrieve information from package _, dsc_name = dsc_information(p_info) dl_artifacts = [(tmp_path, hashes) for hashes in all_hashes.values()] # Extract information from package extracted_path = extract_package(dl_artifacts, tmp_path) # Retrieve information on package dsc_path = path.join(path.dirname(extracted_path), dsc_name) actual_package_info = get_intrinsic_package_metadata( p_info, dsc_path, extracted_path ) logger.debug("actual_package_info: %s", actual_package_info) assert actual_package_info == IntrinsicPackageMetadata( changelog=DebianPackageChangelog( date="2014-10-19T16:52:35+02:00", history=[ ("cicero", "0.7.2-2"), ("cicero", "0.7.2-1"), ("cicero", "0.7-1"), ], person={ "email": "sthibault@debian.org", "fullname": "Samuel Thibault ", "name": "Samuel Thibault", }, ), maintainers=[ { "email": "debian-accessibility@lists.debian.org", "fullname": "Debian Accessibility Team " "", "name": "Debian Accessibility Team", }, { "email": "sthibault@debian.org", "fullname": "Samuel Thibault ", "name": "Samuel Thibault", }, ], name="cicero", version="0.7.2-3", ) -def test_debian_multiple_packages(swh_config, requests_mock_datadir): +def test_debian_multiple_packages(swh_storage, requests_mock_datadir): loader = DebianLoader( - url=URL, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGES_PER_VERSION + swh_storage, + URL, + date="2019-10-12T05:58:09.165557+00:00", + packages=PACKAGES_PER_VERSION, ) actual_load_status = loader.load() expected_snapshot_id = "defc19021187f3727293121fcf6c5c82cb923604" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } - assert_last_visit_matches(loader.storage, URL, status="full", type="deb") + assert_last_visit_matches(swh_storage, URL, status="full", type="deb") expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("2807f5b3f84368b4889a9ae827fe85854ffecf07"), ), b"releases/buster/contrib/0.7.2-4": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("8224139c274c984147ef4b09aa0e462c55a10bd3"), ), }, ) - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) -def test_resolve_revision_from_edge_cases(): +def test_debian_resolve_revision_from_edge_cases(): """Solving revision with empty data will result in unknown revision """ empty_artifact = { "name": PACKAGE_FILES["name"], "version": PACKAGE_FILES["version"], } for package_artifacts in [empty_artifact, PACKAGE_FILES]: p_info = DebianPackageInfo.from_metadata(package_artifacts, url=URL) actual_revision = resolve_revision_from({}, p_info) assert actual_revision is None for known_artifacts in [{}, PACKAGE_FILES]: actual_revision = resolve_revision_from( known_artifacts, DebianPackageInfo.from_metadata(empty_artifact, url=URL) ) assert actual_revision is None known_package_artifacts = { b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07": { "extrinsic": { # empty }, # ... removed the unnecessary intermediary data } } assert not resolve_revision_from( known_package_artifacts, DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL) ) -def test_resolve_revision_from_edge_cases_hit_and_miss(): +def test_debian_resolve_revision_from_edge_cases_hit_and_miss(): """Solving revision with inconsistent data will result in unknown revision """ artifact_metadata = PACKAGE_FILES2 p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL) expected_revision_id = ( b"(\x08\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xff\x85\x85O\xfe\xcf\x07" # noqa ) known_package_artifacts = { expected_revision_id: { "extrinsic": {"raw": PACKAGE_FILES,}, # ... removed the unnecessary intermediary data } } actual_revision = resolve_revision_from(known_package_artifacts, p_info) assert actual_revision is None -def test_resolve_revision_from(): +def test_debian_resolve_revision_from(): """Solving revision with consistent data will solve the revision """ artifact_metadata = PACKAGE_FILES p_info = DebianPackageInfo.from_metadata(artifact_metadata, url=URL) expected_revision_id = ( b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07" # noqa ) files = artifact_metadata["files"] # shuffling dict's keys keys = list(files.keys()) random.shuffle(keys) package_files = { "name": PACKAGE_FILES["name"], "version": PACKAGE_FILES["version"], "files": {k: files[k] for k in keys}, } known_package_artifacts = { expected_revision_id: { "extrinsic": {"raw": package_files,}, # ... removed the unnecessary intermediary data } } actual_revision = resolve_revision_from(known_package_artifacts, p_info) assert actual_revision == expected_revision_id diff --git a/swh/loader/package/debian/tests/test_tasks.py b/swh/loader/package/debian/tests/test_tasks.py index 62b39af..2668cb5 100644 --- a/swh/loader/package/debian/tests/test_tasks.py +++ b/swh/loader/package/debian/tests/test_tasks.py @@ -1,21 +1,21 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -def test_debian_loader( +def test_tasks_debian_loader( mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config ): - mock_loader = mocker.patch("swh.loader.package.debian.loader.DebianLoader.load") - mock_loader.return_value = {"status": "eventful"} + mock_load = mocker.patch("swh.loader.package.debian.loader.DebianLoader.load") + mock_load.return_value = {"status": "eventful"} res = swh_scheduler_celery_app.send_task( "swh.loader.package.debian.tasks.LoadDebian", - kwargs={"url": "some-url", "date": "some-date", "packages": {}}, + kwargs=dict(url="some-url", date="some-date", packages={}), ) assert res res.wait() assert res.successful() - + assert mock_load.called assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/deposit/loader.py b/swh/loader/package/deposit/loader.py index a198191..f8d5646 100644 --- a/swh/loader/package/deposit/loader.py +++ b/swh/loader/package/deposit/loader.py @@ -1,353 +1,378 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from datetime import timezone import json import logging from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union import attr import requests +from swh.core.config import load_from_envvar +from swh.loader.core.loader import DEFAULT_CONFIG from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import cached_method, download from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, Person, Revision, RevisionType, Sha1Git, TimestampWithTimezone, ) from swh.storage.algos.snapshot import snapshot_get_all_branches +from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) def now() -> datetime.datetime: return datetime.datetime.now(tz=timezone.utc) @attr.s class DepositPackageInfo(BasePackageInfo): filename = attr.ib(type=str) # instead of Optional[str] raw_info = attr.ib(type=Dict[str, Any]) author_date = attr.ib(type=datetime.datetime) """codemeta:dateCreated if any, deposit completed_date otherwise""" commit_date = attr.ib(type=datetime.datetime) """codemeta:datePublished if any, deposit completed_date otherwise""" client = attr.ib(type=str) id = attr.ib(type=int) """Internal ID of the deposit in the deposit DB""" collection = attr.ib(type=str) """The collection in the deposit; see SWORD specification.""" author = attr.ib(type=Person) committer = attr.ib(type=Person) revision_parents = attr.ib(type=Tuple[Sha1Git, ...]) """Revisions created from previous deposits, that will be used as parents of the revision created for this deposit.""" @classmethod def from_metadata( cls, metadata: Dict[str, Any], url: str, filename: str ) -> "DepositPackageInfo": # Note: # `date` and `committer_date` are always transmitted by the deposit read api # which computes itself the values. The loader needs to use those to create the # revision. all_metadata_raw: List[str] = metadata["metadata_raw"] raw_info = { "origin": metadata["origin"], "origin_metadata": { "metadata": metadata["metadata_dict"], "provider": metadata["provider"], "tool": metadata["tool"], }, } depo = metadata["deposit"] return cls( url=url, filename=filename, author_date=depo["author_date"], commit_date=depo["committer_date"], client=depo["client"], id=depo["id"], collection=depo["collection"], author=parse_author(depo["author"]), committer=parse_author(depo["committer"]), revision_parents=tuple(hash_to_bytes(p) for p in depo["revision_parents"]), raw_info=raw_info, directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( discovery_date=now(), metadata=raw_metadata.encode(), format="sword-v2-atom-codemeta-v2", ) for raw_metadata in all_metadata_raw ], ) class DepositLoader(PackageLoader[DepositPackageInfo]): """Load a deposited artifact into swh archive. """ visit_type = "deposit" - def __init__(self, url: str, deposit_id: str): + def __init__( + self, + storage: StorageInterface, + url: str, + deposit_id: str, + deposit_client: "ApiClient", + max_content_size: Optional[int] = None, + ): """Constructor Args: url: Origin url to associate the artifacts/metadata to deposit_id: Deposit identity + deposit_client: Deposit api client """ - super().__init__(url=url) + super().__init__(storage=storage, url=url, max_content_size=max_content_size) - config_deposit = self.config["deposit"] self.deposit_id = deposit_id - self.client = ApiClient(url=config_deposit["url"], auth=config_deposit["auth"]) + self.client = deposit_client + + @classmethod + def from_configfile(cls, **kwargs: Any): + """Instantiate a loader from the configuration loaded from the + SWH_CONFIG_FILENAME envvar, with potential extra keyword arguments if their + value is not None. + + Args: + kwargs: kwargs passed to the loader instantiation + + """ + config = dict(load_from_envvar(DEFAULT_CONFIG)) + config.update({k: v for k, v in kwargs.items() if v is not None}) + deposit_client = ApiClient(**config.pop("deposit")) + return cls.from_config(deposit_client=deposit_client, **config) def get_versions(self) -> Sequence[str]: # only 1 branch 'HEAD' with no alias since we only have 1 snapshot # branch return ["HEAD"] def get_metadata_authority(self) -> MetadataAuthority: provider = self.metadata()["provider"] assert provider["provider_type"] == MetadataAuthorityType.DEPOSIT_CLIENT.value return MetadataAuthority( type=MetadataAuthorityType.DEPOSIT_CLIENT, url=provider["provider_url"], metadata={ "name": provider["provider_name"], **(provider["metadata"] or {}), }, ) def get_metadata_fetcher(self) -> MetadataFetcher: tool = self.metadata()["tool"] return MetadataFetcher( name=tool["name"], version=tool["version"], metadata=tool["configuration"], ) def get_package_info( self, version: str ) -> Iterator[Tuple[str, DepositPackageInfo]]: p_info = DepositPackageInfo.from_metadata( self.metadata(), url=self.url, filename="archive.zip", ) yield "HEAD", p_info def download_package( self, p_info: DepositPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Override to allow use of the dedicated deposit client """ return [self.client.archive_get(self.deposit_id, tmpdir, p_info.filename)] def build_revision( self, p_info: DepositPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: message = ( f"{p_info.client}: Deposit {p_info.id} in collection {p_info.collection}" ).encode("utf-8") return Revision( type=RevisionType.TAR, message=message, author=p_info.author, date=TimestampWithTimezone.from_dict(p_info.author_date), committer=p_info.committer, committer_date=TimestampWithTimezone.from_dict(p_info.commit_date), parents=p_info.revision_parents, directory=directory, synthetic=True, metadata={ "extrinsic": { "provider": self.client.metadata_url(self.deposit_id), "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: metadata = self.metadata() all_metadata_raw: List[str] = metadata["metadata_raw"] origin_metadata = json.dumps( { "metadata": all_metadata_raw, "provider": metadata["provider"], "tool": metadata["tool"], } ).encode() return [ RawExtrinsicMetadataCore( discovery_date=now(), metadata=raw_meta.encode(), format="sword-v2-atom-codemeta-v2", ) for raw_meta in all_metadata_raw ] + [ RawExtrinsicMetadataCore( discovery_date=now(), metadata=origin_metadata, format="original-artifacts-json", ) ] @cached_method def metadata(self): """Returns metadata from the deposit server""" return self.client.metadata_get(self.deposit_id) def load(self) -> Dict: # First making sure the deposit is known on the deposit's RPC server # prior to trigger a loading try: self.metadata() except ValueError: logger.error(f"Unknown deposit {self.deposit_id}, ignoring") return {"status": "failed"} # Then usual loading r = super().load() success = r["status"] != "failed" # Update deposit status try: if not success: self.client.status_update(self.deposit_id, status="failed") return r snapshot_id = hash_to_bytes(r["snapshot_id"]) snapshot = snapshot_get_all_branches(self.storage, snapshot_id) if not snapshot: return r branches = snapshot.branches logger.debug("branches: %s", branches) if not branches: return r rev_id = branches[b"HEAD"].target revision = self.storage.revision_get([rev_id])[0] if not revision: return r # update the deposit's status to success with its # revision-id and directory-id self.client.status_update( self.deposit_id, status="done", revision_id=hash_to_hex(rev_id), directory_id=hash_to_hex(revision.directory), snapshot_id=r["snapshot_id"], origin_url=self.url, ) except Exception: logger.exception("Problem when trying to update the deposit's status") return {"status": "failed"} return r def parse_author(author) -> Person: """See prior fixme """ return Person( fullname=author["fullname"].encode("utf-8"), name=author["name"].encode("utf-8"), email=author["email"].encode("utf-8"), ) class ApiClient: """Private Deposit Api client """ def __init__(self, url, auth: Optional[Mapping[str, str]]): self.base_url = url.rstrip("/") self.auth = None if not auth else (auth["username"], auth["password"]) def do(self, method: str, url: str, *args, **kwargs): """Internal method to deal with requests, possibly with basic http authentication. Args: method (str): supported http methods as in get/post/put Returns: The request's execution output """ method_fn = getattr(requests, method) if self.auth: kwargs["auth"] = self.auth return method_fn(url, *args, **kwargs) def archive_get( self, deposit_id: Union[int, str], tmpdir: str, filename: str ) -> Tuple[str, Dict]: """Retrieve deposit's archive artifact locally """ url = f"{self.base_url}/{deposit_id}/raw/" return download(url, dest=tmpdir, filename=filename, auth=self.auth) def metadata_url(self, deposit_id: Union[int, str]) -> str: return f"{self.base_url}/{deposit_id}/meta/" def metadata_get(self, deposit_id: Union[int, str]) -> Dict[str, Any]: """Retrieve deposit's metadata artifact as json """ url = self.metadata_url(deposit_id) r = self.do("get", url) if r.ok: return r.json() msg = f"Problem when retrieving deposit metadata at {url}" logger.error(msg) raise ValueError(msg) def status_update( self, deposit_id: Union[int, str], status: str, revision_id: Optional[str] = None, directory_id: Optional[str] = None, snapshot_id: Optional[str] = None, origin_url: Optional[str] = None, ): """Update deposit's information including status, and persistent identifiers result of the loading. """ url = f"{self.base_url}/{deposit_id}/update/" payload = {"status": status} if revision_id: payload["revision_id"] = revision_id if directory_id: payload["directory_id"] = directory_id if snapshot_id: payload["snapshot_id"] = snapshot_id if origin_url: payload["origin_url"] = origin_url self.do("put", url, json=payload) diff --git a/swh/loader/package/deposit/tasks.py b/swh/loader/package/deposit/tasks.py index ff8e089..187651b 100644 --- a/swh/loader/package/deposit/tasks.py +++ b/swh/loader/package/deposit/tasks.py @@ -1,14 +1,14 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.deposit.loader import DepositLoader @shared_task(name=__name__ + ".LoadDeposit") def load_deposit(*, url, deposit_id): """Load Deposit artifacts""" - return DepositLoader(url, deposit_id).load() + return DepositLoader.from_configfile(url=url, deposit_id=deposit_id).load() diff --git a/swh/loader/package/deposit/tests/conftest.py b/swh/loader/package/deposit/tests/conftest.py index fb23642..a326aa1 100644 --- a/swh/loader/package/deposit/tests/conftest.py +++ b/swh/loader/package/deposit/tests/conftest.py @@ -1,23 +1,30 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy from typing import Any, Dict import pytest +from swh.loader.package.deposit.loader import ApiClient + @pytest.fixture def swh_loader_config(swh_loader_config) -> Dict[str, Any]: config = copy.deepcopy(swh_loader_config) config.update( { "deposit": { "url": "https://deposit.softwareheritage.org/1/private", "auth": {"username": "user", "password": "pass",}, }, } ) return config + + +@pytest.fixture +def deposit_client(swh_loader_config): + return ApiClient(**swh_loader_config["deposit"]) diff --git a/swh/loader/package/deposit/tests/test_deposit.py b/swh/loader/package/deposit/tests/test_deposit.py index c87c01c..f03cd3a 100644 --- a/swh/loader/package/deposit/tests/test_deposit.py +++ b/swh/loader/package/deposit/tests/test_deposit.py @@ -1,433 +1,450 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import re from typing import List import attr import pytest from swh.core.pytest_plugin import requests_mock_datadir_factory -from swh.loader.package.deposit.loader import DepositLoader +from swh.loader.package.deposit.loader import ApiClient, DepositLoader from swh.loader.package.loader import now from swh.loader.package.tests.common import check_metadata_paths from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.identifiers import SWHID from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MetadataTargetType, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) DEPOSIT_URL = "https://deposit.softwareheritage.org/1/private" @pytest.fixture def requests_mock_datadir(requests_mock_datadir): """Enhance default mock data to mock put requests as the loader does some internal update queries there. """ requests_mock_datadir.put(re.compile("https")) return requests_mock_datadir -def test_deposit_init_ok(swh_config, swh_loader_config): +def test_deposit_init_ok(swh_storage, deposit_client, swh_loader_config): url = "some-url" deposit_id = 999 - loader = DepositLoader(url, deposit_id) # Something that does not exist + loader = DepositLoader( + swh_storage, url, deposit_id, deposit_client + ) # Something that does not exist assert loader.url == url assert loader.client is not None assert loader.client.base_url == swh_loader_config["deposit"]["url"] -def test_deposit_loading_unknown_deposit(swh_config, requests_mock_datadir): +def test_deposit_from_configfile(swh_config): + """Ensure the deposit instantiation is ok + + """ + loader = DepositLoader.from_configfile(url="some-url", deposit_id="666") + + assert isinstance(loader.client, ApiClient) + + +def test_deposit_loading_unknown_deposit( + swh_storage, deposit_client, requests_mock_datadir +): """Loading an unknown deposit should fail no origin, no visit, no snapshot """ # private api url form: 'https://deposit.s.o/1/private/hal/666/raw/' url = "some-url" unknown_deposit_id = 667 - loader = DepositLoader(url, unknown_deposit_id) # does not exist + loader = DepositLoader( + swh_storage, url, unknown_deposit_id, deposit_client + ) # does not exist actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} stats = get_stats(loader.storage) assert { "content": 0, "directory": 0, "origin": 0, "origin_visit": 0, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 0, } == stats requests_mock_datadir_missing_one = requests_mock_datadir_factory( ignore_urls=[f"{DEPOSIT_URL}/666/raw/",] ) def test_deposit_loading_failure_to_retrieve_1_artifact( - swh_config, requests_mock_datadir_missing_one + swh_storage, deposit_client, requests_mock_datadir_missing_one ): """Deposit with missing artifact ends up with an uneventful/partial visit """ # private api url form: 'https://deposit.s.o/1/private/hal/666/raw/' url = "some-url-2" deposit_id = 666 - loader = DepositLoader(url, deposit_id) + loader = DepositLoader(swh_storage, url, deposit_id, deposit_client) actual_load_status = loader.load() assert actual_load_status["status"] == "uneventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(loader.storage, url, status="partial", type="deposit") stats = get_stats(loader.storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats -def test_revision_metadata_structure(swh_config, requests_mock_datadir): +def test_deposit_revision_metadata_structure( + swh_storage, deposit_client, requests_mock_datadir +): url = "https://hal-test.archives-ouvertes.fr/some-external-id" deposit_id = 666 - loader = DepositLoader(url, deposit_id) + loader = DepositLoader(swh_storage, url, deposit_id, deposit_client) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None expected_revision_id = hash_to_bytes("637318680351f5d78856d13264faebbd91efe9bb") revision = loader.storage.revision_get([expected_revision_id])[0] assert revision is not None check_metadata_paths( revision.metadata, paths=[ ("extrinsic.provider", str), ("extrinsic.when", str), ("extrinsic.raw", dict), ("original_artifact", list), ], ) # Only 2 top-level keys now assert set(revision.metadata.keys()) == {"extrinsic", "original_artifact"} for original_artifact in revision.metadata["original_artifact"]: check_metadata_paths( original_artifact, paths=[("filename", str), ("length", int), ("checksums", dict),], ) -def test_deposit_loading_ok(swh_config, requests_mock_datadir): +def test_deposit_loading_ok(swh_storage, deposit_client, requests_mock_datadir): url = "https://hal-test.archives-ouvertes.fr/some-external-id" deposit_id = 666 - loader = DepositLoader(url, deposit_id) + loader = DepositLoader(swh_storage, url, deposit_id, deposit_client) actual_load_status = loader.load() expected_snapshot_id = "b2b327b33dc85818bd23c3ccda8b7e675a66ecbd" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches(loader.storage, url, status="full", type="deposit") stats = get_stats(loader.storage) assert { "content": 303, "directory": 12, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == stats revision_id_hex = "637318680351f5d78856d13264faebbd91efe9bb" revision_id = hash_to_bytes(revision_id_hex) expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"HEAD": SnapshotBranch( target=revision_id, target_type=TargetType.REVISION, ), }, ) check_snapshot(expected_snapshot, storage=loader.storage) revision = loader.storage.revision_get([revision_id])[0] assert revision is not None # check metadata fetcher = MetadataFetcher(name="swh-deposit", version="0.0.1",) authority = MetadataAuthority( type=MetadataAuthorityType.DEPOSIT_CLIENT, url="https://hal-test.archives-ouvertes.fr/", ) # Check origin metadata orig_meta = loader.storage.raw_extrinsic_metadata_get( MetadataTargetType.ORIGIN, url, authority ) assert orig_meta.next_page_token is None raw_meta = loader.client.metadata_get(deposit_id) all_metadata_raw: List[str] = raw_meta["metadata_raw"] # 2 raw metadata xml + 1 json dict assert len(orig_meta.results) == len(all_metadata_raw) + 1 orig_meta0 = orig_meta.results[0] assert orig_meta0.authority == authority assert orig_meta0.fetcher == fetcher # Check directory metadata directory_id = hash_to_hex(revision.directory) directory_swhid = SWHID(object_type="directory", object_id=directory_id) actual_dir_meta = loader.storage.raw_extrinsic_metadata_get( MetadataTargetType.DIRECTORY, directory_swhid, authority ) assert actual_dir_meta.next_page_token is None assert len(actual_dir_meta.results) == len(all_metadata_raw) for dir_meta in actual_dir_meta.results: assert dir_meta.authority == authority assert dir_meta.fetcher == fetcher assert dir_meta.metadata.decode() in all_metadata_raw # Retrieve the information for deposit status update query to the deposit urls = [ m for m in requests_mock_datadir.request_history if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/" ] assert len(urls) == 1 update_query = urls[0] body = update_query.json() expected_body = { "status": "done", "revision_id": revision_id_hex, "directory_id": hash_to_hex(revision.directory), "snapshot_id": expected_snapshot_id, "origin_url": url, } assert body == expected_body -def test_deposit_loading_ok_2(swh_config, requests_mock_datadir): +def test_deposit_loading_ok_2(swh_storage, deposit_client, requests_mock_datadir): """Field dates should be se appropriately """ external_id = "some-external-id" url = f"https://hal-test.archives-ouvertes.fr/{external_id}" deposit_id = 777 - loader = DepositLoader(url, deposit_id) + loader = DepositLoader(swh_storage, url, deposit_id, deposit_client) actual_load_status = loader.load() expected_snapshot_id = "3e68440fdd7c81d283f8f3aebb6f0c8657864192" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches(loader.storage, url, status="full", type="deposit") revision_id = "564d18943d71be80d0d73b43a77cfb205bcde96c" expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"HEAD": SnapshotBranch( target=hash_to_bytes(revision_id), target_type=TargetType.REVISION ) }, ) check_snapshot(expected_snapshot, storage=loader.storage) raw_meta = loader.client.metadata_get(deposit_id) # Ensure the date fields are set appropriately in the revision # Retrieve the revision revision = loader.storage.revision_get([hash_to_bytes(revision_id)])[0] assert revision assert revision.date.to_dict() == raw_meta["deposit"]["author_date"] assert revision.committer_date.to_dict() == raw_meta["deposit"]["committer_date"] read_api = f"{DEPOSIT_URL}/{deposit_id}/meta/" provider = { "provider_name": "hal", "provider_type": "deposit_client", "provider_url": "https://hal-test.archives-ouvertes.fr/", "metadata": None, } tool = { "name": "swh-deposit", "version": "0.0.1", "configuration": {"sword_version": "2"}, } assert revision.metadata == { "extrinsic": { "provider": read_api, "raw": { "origin": {"type": "deposit", "url": url,}, "origin_metadata": { "metadata": raw_meta["metadata_dict"], "provider": provider, "tool": tool, }, }, "when": revision.metadata["extrinsic"]["when"], # dynamic }, "original_artifact": [ { "checksums": { "sha1": "f8c63d7c890a7453498e6cf9fef215d85ec6801d", "sha256": "474bf646aeeff6d945eb752b1a9f8a40f3d81a88909ee7bd2d08cc822aa361e6", # noqa }, "filename": "archive.zip", "length": 956830, "url": "https://deposit.softwareheritage.org/1/private/777/raw/", } ], } fetcher = MetadataFetcher(name="swh-deposit", version="0.0.1",) authority = MetadataAuthority( type=MetadataAuthorityType.DEPOSIT_CLIENT, url="https://hal-test.archives-ouvertes.fr/", ) # Check the origin metadata swh side origin_extrinsic_metadata = loader.storage.raw_extrinsic_metadata_get( MetadataTargetType.ORIGIN, url, authority ) assert origin_extrinsic_metadata.next_page_token is None all_metadata_raw: List[str] = raw_meta["metadata_raw"] # 1 raw metadata xml + 1 json dict assert len(origin_extrinsic_metadata.results) == len(all_metadata_raw) + 1 expected_metadata = [] for idx, raw_meta in enumerate(all_metadata_raw): origin_meta = origin_extrinsic_metadata.results[idx] expected_metadata.append( RawExtrinsicMetadata( type=MetadataTargetType.ORIGIN, target=url, discovery_date=origin_meta.discovery_date, metadata=raw_meta.encode(), format="sword-v2-atom-codemeta-v2", authority=authority, fetcher=fetcher, ) ) origin_metadata = { "metadata": all_metadata_raw, "provider": provider, "tool": tool, } expected_metadata.append( RawExtrinsicMetadata( type=MetadataTargetType.ORIGIN, target=url, discovery_date=origin_extrinsic_metadata.results[-1].discovery_date, metadata=json.dumps(origin_metadata).encode(), format="original-artifacts-json", authority=authority, fetcher=fetcher, ) ) assert len(origin_extrinsic_metadata.results) == len(expected_metadata) for orig_meta in origin_extrinsic_metadata.results: assert orig_meta in expected_metadata # Check the revision metadata swh side directory_id = hash_to_hex(revision.directory) directory_swhid = SWHID(object_type="directory", object_id=directory_id) actual_directory_metadata = loader.storage.raw_extrinsic_metadata_get( MetadataTargetType.DIRECTORY, directory_swhid, authority ) assert actual_directory_metadata.next_page_token is None assert len(actual_directory_metadata.results) == len(all_metadata_raw) revision_swhid = SWHID(object_type="revision", object_id=revision_id) dir_metadata_template = RawExtrinsicMetadata( type=MetadataTargetType.DIRECTORY, target=directory_swhid, format="sword-v2-atom-codemeta-v2", authority=authority, fetcher=fetcher, origin=url, revision=revision_swhid, # to satisfy the constructor discovery_date=now(), metadata=b"", ) expected_directory_metadata = [] for idx, raw_meta in enumerate(all_metadata_raw): dir_metadata = actual_directory_metadata.results[idx] expected_directory_metadata.append( attr.evolve( dir_metadata_template, discovery_date=dir_metadata.discovery_date, metadata=raw_meta.encode(), ) ) assert actual_directory_metadata.results == expected_directory_metadata # Retrieve the information for deposit status update query to the deposit urls = [ m for m in requests_mock_datadir.request_history if m.url == f"{DEPOSIT_URL}/{deposit_id}/update/" ] assert len(urls) == 1 update_query = urls[0] body = update_query.json() expected_body = { "status": "done", "revision_id": revision_id, "directory_id": hash_to_hex(revision.directory), "snapshot_id": expected_snapshot_id, "origin_url": url, } assert body == expected_body diff --git a/swh/loader/package/deposit/tests/test_tasks.py b/swh/loader/package/deposit/tests/test_tasks.py index 0e2b739..248b88b 100644 --- a/swh/loader/package/deposit/tests/test_tasks.py +++ b/swh/loader/package/deposit/tests/test_tasks.py @@ -1,21 +1,24 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -def test_deposit_loader( +def test_tasks_deposit_loader( mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config ): - mock_loader = mocker.patch("swh.loader.package.deposit.loader.DepositLoader.load") - mock_loader.return_value = {"status": "eventful"} + mock_loader = mocker.patch( + "swh.loader.package.deposit.loader.DepositLoader.from_configfile" + ) + mock_loader.return_value = mock_loader + mock_loader.load.return_value = {"status": "eventful"} res = swh_scheduler_celery_app.send_task( "swh.loader.package.deposit.tasks.LoadDeposit", - kwargs={"url": "some-url", "deposit_id": "some-d-id",}, + kwargs=dict(url="some-url", deposit_id="some-d-id",), ) assert res res.wait() assert res.successful() - + assert mock_loader.called assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index bb93a3b..a3eaafc 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,819 +1,803 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from itertools import islice import json import logging import os import sys import tempfile from typing import ( Any, Dict, Generic, Iterable, Iterator, List, Mapping, Optional, Sequence, Tuple, TypeVar, ) import attr import sentry_sdk -from swh.core.config import load_from_envvar from swh.core.tarball import uncompress +from swh.loader.core.loader import Loader from swh.loader.exception import NotFound from swh.loader.package.utils import download from swh.model import from_disk from swh.model.collections import ImmutableDict from swh.model.hashutil import hash_to_hex from swh.model.identifiers import SWHID from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MetadataTargetType, Origin, OriginVisit, OriginVisitStatus, RawExtrinsicMetadata, Revision, Sha1Git, Snapshot, TargetType, ) -from swh.storage import get_storage from swh.storage.algos.snapshot import snapshot_get_latest from swh.storage.interface import StorageInterface from swh.storage.utils import now logger = logging.getLogger(__name__) SWH_METADATA_AUTHORITY = MetadataAuthority( type=MetadataAuthorityType.REGISTRY, url="https://softwareheritage.org/", metadata={}, ) """Metadata authority for extrinsic metadata generated by Software Heritage. Used for metadata on "original artifacts", ie. length, filename, and checksums of downloaded archive files.""" @attr.s class RawExtrinsicMetadataCore: """Contains the core of the metadata extracted by a loader, that will be used to build a full RawExtrinsicMetadata object by adding object identifier, context, and provenance information.""" format = attr.ib(type=str) metadata = attr.ib(type=bytes) discovery_date = attr.ib(type=Optional[datetime.datetime], default=None) """Defaults to the visit date.""" @attr.s class BasePackageInfo: """Compute the primary key for a dict using the id_keys as primary key composite. Args: d: A dict entry to compute the primary key on id_keys: Sequence of keys to use as primary key Returns: The identity for that dict entry """ url = attr.ib(type=str) filename = attr.ib(type=Optional[str]) # The following attribute has kw_only=True in order to allow subclasses # to add attributes. Without kw_only, attributes without default values cannot # go after attributes with default values. # See directory_extrinsic_metadata = attr.ib( type=List[RawExtrinsicMetadataCore], default=[], kw_only=True, ) # TODO: add support for metadata for directories and contents @property def ID_KEYS(self): raise NotImplementedError(f"{self.__class__.__name__} is missing ID_KEYS") def artifact_identity(self): return [getattr(self, k) for k in self.ID_KEYS] TPackageInfo = TypeVar("TPackageInfo", bound=BasePackageInfo) -DEFAULT_CONFIG = { - "max_content_size": 100 * 1024 * 1024, - "create_authorities": True, - "create_fetchers": True, -} - - -class PackageLoader(Generic[TPackageInfo]): +class PackageLoader(Loader, Generic[TPackageInfo]): # Origin visit type (str) set by the loader visit_type = "" - def __init__(self, url): + def __init__( + self, + storage: StorageInterface, + url: str, + max_content_size: Optional[int] = None, + ): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: - url (str): Origin url to load data from + storage: Storage instance + url: Origin url to load data from """ - # This expects to use the environment variable SWH_CONFIG_FILENAME - self.config = load_from_envvar(DEFAULT_CONFIG) - self._check_configuration() - self.storage: StorageInterface = get_storage(**self.config["storage"]) + super().__init__(storage=storage, max_content_size=max_content_size) self.url = url self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) - self.max_content_size = self.config["max_content_size"] - - def _check_configuration(self): - """Checks the minimal configuration required is set for the loader. - - If some required configuration is missing, exception detailing the - issue is raised. - - """ - if "storage" not in self.config: - raise ValueError("Misconfiguration, at least the storage key should be set") def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Raises: `class:swh.loader.exception.NotFound` error when failing to read the published package versions. Returns: Sequence of published versions """ return [] def get_package_info(self, version: str) -> Iterator[Tuple[str, TPackageInfo]]: """Given a release version of a package, retrieve the associated package information for such version. Args: version: Package version Returns: (branch name, package metadata) """ yield from {} def build_revision( self, p_info: TPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: """Build the revision from the archive metadata (extrinsic artifact metadata) and the intrinsic metadata. Args: p_info: Package information uncompressed_path: Artifact uncompressed path on disk Returns: Revision object """ raise NotImplementedError("build_revision") def get_default_version(self) -> str: """Retrieve the latest release version if any. Returns: Latest version """ return "" def last_snapshot(self) -> Optional[Snapshot]: """Retrieve the last snapshot out of the last visit. """ return snapshot_get_latest(self.storage, self.url) def known_artifacts( self, snapshot: Optional[Snapshot] ) -> Dict[Sha1Git, Optional[ImmutableDict[str, object]]]: """Retrieve the known releases/artifact for the origin. Args snapshot: snapshot for the visit Returns: Dict of keys revision id (bytes), values a metadata Dict. """ if not snapshot: return {} # retrieve only revisions (e.g the alias we do not want here) revs = [ rev.target for rev in snapshot.branches.values() if rev and rev.target_type == TargetType.REVISION ] known_revisions = self.storage.revision_get(revs) return { revision.id: revision.metadata for revision in known_revisions if revision } def resolve_revision_from( self, known_artifacts: Dict, p_info: TPackageInfo, ) -> Optional[bytes]: """Resolve the revision from a snapshot and an artifact metadata dict. If the artifact has already been downloaded, this will return the existing revision targeting that uncompressed artifact directory. Otherwise, this returns None. Args: snapshot: Snapshot p_info: Package information Returns: None or revision identifier """ return None def download_package( self, p_info: TPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Download artifacts for a specific package. All downloads happen in in the tmpdir folder. Default implementation expects the artifacts package info to be about one artifact per package. Note that most implementation have 1 artifact per package. But some implementation have multiple artifacts per package (debian), some have none, the package is the artifact (gnu). Args: artifacts_package_info: Information on the package artifacts to download (url, filename, etc...) tmpdir: Location to retrieve such artifacts Returns: List of (path, computed hashes) """ return [download(p_info.url, dest=tmpdir, filename=p_info.filename)] def uncompress( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str ) -> str: """Uncompress the artifact(s) in the destination folder dest. Optionally, this could need to use the p_info dict for some more information (debian). """ uncompressed_path = os.path.join(dest, "src") for a_path, _ in dl_artifacts: uncompress(a_path, dest=uncompressed_path) return uncompressed_path def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]: """Return an extra dict of branches that are used to update the set of branches. """ return {} def load(self) -> Dict: """Load for a specific origin the associated contents. for each package version of the origin 1. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 2. Extract the downloaded files By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 3. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 4. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 5. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 6. Generate and load the snapshot for the visit Using the revisions/releases collected at step 5., and the branch information from step 0., generate a snapshot and load it into the Software Heritage archive """ status_load = "uneventful" # either: eventful, uneventful, failed status_visit = "full" # see swh.model.model.OriginVisitStatus tmp_revisions = {} # type: Dict[str, List] snapshot = None failed_branches: List[str] = [] def finalize_visit() -> Dict[str, Any]: """Finalize the visit: - flush eventual unflushed data to storage - update origin visit's status - return the task's status """ self.storage.flush() snapshot_id: Optional[bytes] = None if snapshot and snapshot.id: # to prevent the snapshot.id to b"" snapshot_id = snapshot.id assert visit.visit visit_status = OriginVisitStatus( origin=self.url, visit=visit.visit, type=self.visit_type, date=now(), status=status_visit, snapshot=snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) result: Dict[str, Any] = { "status": status_load, } if snapshot_id: result["snapshot_id"] = hash_to_hex(snapshot_id) if failed_branches: logger.warning("%d failed branches", len(failed_branches)) for i, urls in enumerate(islice(failed_branches, 50)): prefix_url = "Failed branches: " if i == 0 else "" logger.warning("%s%s", prefix_url, urls) return result # Prepare origin and origin_visit origin = Origin(url=self.url) try: self.storage.origin_add([origin]) visit = list( self.storage.origin_visit_add( [ OriginVisit( origin=self.url, date=self.visit_date, type=self.visit_type, ) ] ) )[0] except Exception as e: logger.exception("Failed to initialize origin_visit for %s", self.url) sentry_sdk.capture_exception(e) return {"status": "failed"} try: last_snapshot = self.last_snapshot() logger.debug("last snapshot: %s", last_snapshot) known_artifacts = self.known_artifacts(last_snapshot) logger.debug("known artifacts: %s", known_artifacts) except Exception as e: logger.exception("Failed to get previous state for %s", self.url) sentry_sdk.capture_exception(e) status_visit = "failed" status_load = "failed" return finalize_visit() load_exceptions: List[Exception] = [] try: versions = self.get_versions() except NotFound: status_visit = "not_found" status_load = "failed" return finalize_visit() except Exception: status_visit = "failed" status_load = "failed" return finalize_visit() for version in versions: logger.debug("version: %s", version) tmp_revisions[version] = [] # `p_` stands for `package_` for branch_name, p_info in self.get_package_info(version): logger.debug("package_info: %s", p_info) revision_id = self.resolve_revision_from(known_artifacts, p_info) if revision_id is None: try: res = self._load_revision(p_info, origin) if res: (revision_id, directory_id) = res assert revision_id assert directory_id self._load_extrinsic_directory_metadata( p_info, revision_id, directory_id ) self.storage.flush() status_load = "eventful" except Exception as e: self.storage.clear_buffers() load_exceptions.append(e) sentry_sdk.capture_exception(e) logger.exception( "Failed loading branch %s for %s", branch_name, self.url ) failed_branches.append(branch_name) continue if revision_id is None: continue tmp_revisions[version].append((branch_name, revision_id)) if load_exceptions: status_visit = "partial" if not tmp_revisions: # We could not load any revisions; fail completely status_visit = "failed" status_load = "failed" return finalize_visit() try: # Retrieve the default release version (the "latest" one) default_version = self.get_default_version() logger.debug("default version: %s", default_version) # Retrieve extra branches extra_branches = self.extra_branches() logger.debug("extra branches: %s", extra_branches) snapshot = self._load_snapshot( default_version, tmp_revisions, extra_branches ) self.storage.flush() except Exception as e: logger.exception("Failed to build snapshot for origin %s", self.url) sentry_sdk.capture_exception(e) status_visit = "failed" status_load = "failed" if snapshot: try: metadata_objects = self.build_extrinsic_snapshot_metadata(snapshot.id) self._load_metadata_objects(metadata_objects) except Exception as e: logger.exception( "Failed to load extrinsic snapshot metadata for %s", self.url ) sentry_sdk.capture_exception(e) status_visit = "partial" status_load = "failed" try: metadata_objects = self.build_extrinsic_origin_metadata() self._load_metadata_objects(metadata_objects) except Exception as e: logger.exception( "Failed to load extrinsic origin metadata for %s", self.url ) sentry_sdk.capture_exception(e) status_visit = "partial" status_load = "failed" return finalize_visit() def _load_directory( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], tmpdir: str ) -> Tuple[str, from_disk.Directory]: uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir) logger.debug("uncompressed_path: %s", uncompressed_path) directory = from_disk.Directory.from_disk( path=uncompressed_path.encode("utf-8"), max_content_length=self.max_content_size, ) contents, skipped_contents, directories = from_disk.iter_directory(directory) logger.debug("Number of skipped contents: %s", len(skipped_contents)) self.storage.skipped_content_add(skipped_contents) logger.debug("Number of contents: %s", len(contents)) self.storage.content_add(contents) logger.debug("Number of directories: %s", len(directories)) self.storage.directory_add(directories) return (uncompressed_path, directory) def _load_revision( self, p_info: TPackageInfo, origin ) -> Optional[Tuple[Sha1Git, Sha1Git]]: """Does all the loading of a revision itself: * downloads a package and uncompresses it * loads it from disk * adds contents, directories, and revision to self.storage * returns (revision_id, directory_id) Raises exception when unable to download or uncompress artifacts """ with tempfile.TemporaryDirectory() as tmpdir: dl_artifacts = self.download_package(p_info, tmpdir) (uncompressed_path, directory) = self._load_directory(dl_artifacts, tmpdir) # FIXME: This should be release. cf. D409 revision = self.build_revision( p_info, uncompressed_path, directory=directory.hash ) if not revision: # Some artifacts are missing intrinsic metadata # skipping those return None metadata = [metadata for (filepath, metadata) in dl_artifacts] extra_metadata: Tuple[str, Any] = ( "original_artifact", metadata, ) if revision.metadata is not None: full_metadata = list(revision.metadata.items()) + [extra_metadata] else: full_metadata = [extra_metadata] # TODO: don't add these extrinsic metadata to the revision. revision = attr.evolve(revision, metadata=ImmutableDict(full_metadata)) original_artifact_metadata = RawExtrinsicMetadata( type=MetadataTargetType.DIRECTORY, target=SWHID(object_type="directory", object_id=revision.directory), discovery_date=self.visit_date, authority=SWH_METADATA_AUTHORITY, fetcher=self.get_metadata_fetcher(), format="original-artifacts-json", metadata=json.dumps(metadata).encode(), origin=self.url, revision=SWHID(object_type="revision", object_id=revision.id), ) self._load_metadata_objects([original_artifact_metadata]) logger.debug("Revision: %s", revision) self.storage.revision_add([revision]) assert directory.hash return (revision.id, directory.hash) def _load_snapshot( self, default_version: str, revisions: Dict[str, List[Tuple[str, bytes]]], extra_branches: Dict[bytes, Mapping[str, Any]], ) -> Optional[Snapshot]: """Build snapshot out of the current revisions stored and extra branches. Then load it in the storage. """ logger.debug("revisions: %s", revisions) # Build and load the snapshot branches = {} # type: Dict[bytes, Mapping[str, Any]] for version, branch_name_revisions in revisions.items(): if version == default_version and len(branch_name_revisions) == 1: # only 1 branch (no ambiguity), we can create an alias # branch 'HEAD' branch_name, _ = branch_name_revisions[0] # except for some corner case (deposit) if branch_name != "HEAD": branches[b"HEAD"] = { "target_type": "alias", "target": branch_name.encode("utf-8"), } for branch_name, target in branch_name_revisions: branches[branch_name.encode("utf-8")] = { "target_type": "revision", "target": target, } # Deal with extra-branches for name, branch_target in extra_branches.items(): if name in branches: logger.error("Extra branch '%s' has been ignored", name) else: branches[name] = branch_target snapshot_data = {"branches": branches} logger.debug("snapshot: %s", snapshot_data) snapshot = Snapshot.from_dict(snapshot_data) logger.debug("snapshot: %s", snapshot) self.storage.snapshot_add([snapshot]) return snapshot def get_loader_name(self) -> str: """Returns a fully qualified name of this loader.""" return f"{self.__class__.__module__}.{self.__class__.__name__}" def get_loader_version(self) -> str: """Returns the version of the current loader.""" module_name = self.__class__.__module__ or "" module_name_parts = module_name.split(".") # Iterate rootward through the package hierarchy until we find a parent of this # loader's module with a __version__ attribute. for prefix_size in range(len(module_name_parts), 0, -1): package_name = ".".join(module_name_parts[0:prefix_size]) module = sys.modules[package_name] if hasattr(module, "__version__"): return module.__version__ # type: ignore # If this loader's class has no parent package with a __version__, # it should implement it itself. raise NotImplementedError( f"Could not dynamically find the version of {self.get_loader_name()}." ) def get_metadata_fetcher(self) -> MetadataFetcher: """Returns a MetadataFetcher instance representing this package loader; which is used to for adding provenance information to extracted extrinsic metadata, if any.""" return MetadataFetcher( name=self.get_loader_name(), version=self.get_loader_version(), metadata={}, ) def get_metadata_authority(self) -> MetadataAuthority: """For package loaders that get extrinsic metadata, returns the authority the metadata are coming from. """ raise NotImplementedError("get_metadata_authority") def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: """Returns metadata items, used by build_extrinsic_origin_metadata.""" return [] def build_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using metadata returned by get_extrinsic_origin_metadata.""" metadata_items = self.get_extrinsic_origin_metadata() if not metadata_items: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in metadata_items: metadata_objects.append( RawExtrinsicMetadata( type=MetadataTargetType.ORIGIN, target=self.url, discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, ) ) return metadata_objects def get_extrinsic_snapshot_metadata(self) -> List[RawExtrinsicMetadataCore]: """Returns metadata items, used by build_extrinsic_snapshot_metadata.""" return [] def build_extrinsic_snapshot_metadata( self, snapshot_id: Sha1Git ) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using metadata returned by get_extrinsic_snapshot_metadata.""" metadata_items = self.get_extrinsic_snapshot_metadata() if not metadata_items: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in metadata_items: metadata_objects.append( RawExtrinsicMetadata( type=MetadataTargetType.SNAPSHOT, target=SWHID(object_type="snapshot", object_id=snapshot_id), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, origin=self.url, ) ) return metadata_objects def build_extrinsic_directory_metadata( self, p_info: TPackageInfo, revision_id: Sha1Git, directory_id: Sha1Git, ) -> List[RawExtrinsicMetadata]: if not p_info.directory_extrinsic_metadata: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in p_info.directory_extrinsic_metadata: metadata_objects.append( RawExtrinsicMetadata( type=MetadataTargetType.DIRECTORY, target=SWHID(object_type="directory", object_id=directory_id), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, origin=self.url, revision=SWHID( object_type="revision", object_id=hash_to_hex(revision_id) ), ) ) return metadata_objects def _load_extrinsic_directory_metadata( self, p_info: TPackageInfo, revision_id: Sha1Git, directory_id: Sha1Git, ) -> None: metadata_objects = self.build_extrinsic_directory_metadata( p_info, revision_id, directory_id ) self._load_metadata_objects(metadata_objects) def _load_metadata_objects( self, metadata_objects: List[RawExtrinsicMetadata] ) -> None: if not metadata_objects: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return self._create_authorities(mo.authority for mo in metadata_objects) self._create_fetchers(mo.fetcher for mo in metadata_objects) self.storage.raw_extrinsic_metadata_add(metadata_objects) def _create_authorities(self, authorities: Iterable[MetadataAuthority]) -> None: deduplicated_authorities = { (authority.type, authority.url): authority for authority in authorities } if authorities: self.storage.metadata_authority_add(list(deduplicated_authorities.values())) def _create_fetchers(self, fetchers: Iterable[MetadataFetcher]) -> None: deduplicated_fetchers = { (fetcher.name, fetcher.version): fetcher for fetcher in fetchers } if fetchers: self.storage.metadata_fetcher_add(list(deduplicated_fetchers.values())) diff --git a/swh/loader/package/nixguix/loader.py b/swh/loader/package/nixguix/loader.py index b09e63d..2725d9a 100644 --- a/swh/loader/package/nixguix/loader.py +++ b/swh/loader/package/nixguix/loader.py @@ -1,327 +1,335 @@ # Copyright (C) 2020-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy import json import logging import re from typing import Any, Dict, Iterator, List, Mapping, Optional, Tuple import attr from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import EMPTY_AUTHOR, api_info, cached_method from swh.model import hashutil from swh.model.collections import ImmutableDict from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, Revision, RevisionType, Sha1Git, Snapshot, TargetType, ) +from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) @attr.s class NixGuixPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) integrity = attr.ib(type=str) """Hash of the archive, formatted as in the Subresource Integrity specification.""" @classmethod def from_metadata(cls, metadata: Dict[str, Any]) -> "NixGuixPackageInfo": return cls( url=metadata["url"], filename=None, integrity=metadata["integrity"], raw_info=metadata, ) class NixGuixLoader(PackageLoader[NixGuixPackageInfo]): """Load sources from a sources.json file. This loader is used to load sources used by functional package manager (eg. Nix and Guix). """ visit_type = "nixguix" - def __init__(self, url): - super().__init__(url=url) + def __init__( + self, + storage: StorageInterface, + url: str, + unsupported_file_extensions: List[str] = [], + ): + super().__init__(storage=storage, url=url) self.provider_url = url + self.unsupported_file_extensions = unsupported_file_extensions # Note: this could be renamed get_artifacts in the PackageLoader # base class. @cached_method def raw_sources(self): return retrieve_sources(self.url) @cached_method def supported_sources(self): raw_sources = self.raw_sources() - unsupported_file_extensions = self.config.get("unsupported_file_extensions", []) - return clean_sources(parse_sources(raw_sources), unsupported_file_extensions) + return clean_sources( + parse_sources(raw_sources), self.unsupported_file_extensions + ) @cached_method def integrity_by_url(self) -> Dict[str, Any]: sources = self.supported_sources() return {s["urls"][0]: s["integrity"] for s in sources["sources"]} def get_versions(self) -> List[str]: """The first mirror of the mirror list is used as branch name in the snapshot. """ return list(self.integrity_by_url().keys()) def get_metadata_authority(self): return MetadataAuthority( type=MetadataAuthorityType.FORGE, url=self.url, metadata={}, ) def get_extrinsic_snapshot_metadata(self): return [ RawExtrinsicMetadataCore( format="nixguix-sources-json", metadata=self.raw_sources(), ), ] # Note: this could be renamed get_artifact_info in the PackageLoader # base class. def get_package_info(self, url) -> Iterator[Tuple[str, NixGuixPackageInfo]]: # TODO: try all mirrors and not only the first one. A source # can be fetched from several urls, called mirrors. We # currently only use the first one, but if the first one # fails, we should try the second one and so on. integrity = self.integrity_by_url()[url] p_info = NixGuixPackageInfo.from_metadata({"url": url, "integrity": integrity}) yield url, p_info def known_artifacts( self, snapshot: Optional[Snapshot] ) -> Dict[Sha1Git, Optional[ImmutableDict[str, object]]]: """Almost same implementation as the default one except it filters out the extra "evaluation" branch which does not have the right metadata structure. """ if not snapshot: return {} # Skip evaluation revision which has no metadata revs = [ rev.target for branch_name, rev in snapshot.branches.items() if ( rev and rev.target_type == TargetType.REVISION and branch_name != b"evaluation" ) ] known_revisions = self.storage.revision_get(revs) ret = {} for revision in known_revisions: if not revision: # revision_get can return None continue ret[revision.id] = revision.metadata return ret def resolve_revision_from( self, known_artifacts: Dict, p_info: NixGuixPackageInfo, ) -> Optional[bytes]: for rev_id, known_artifact in known_artifacts.items(): try: known_integrity = known_artifact["extrinsic"]["raw"]["integrity"] except KeyError as e: logger.exception( "Unexpected metadata revision structure detected: %(context)s", { "context": { "revision": hashutil.hash_to_hex(rev_id), "reason": str(e), "known_artifact": known_artifact, } }, ) # metadata field for the revision is not as expected by the loader # nixguix. We consider this not the right revision and continue checking # the other revisions continue else: if p_info.integrity == known_integrity: return rev_id return None def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]: """We add a branch to the snapshot called 'evaluation' pointing to the revision used to generate the sources.json file. This revision is specified in the sources.json file itself. For the nixpkgs origin, this revision is coming from the github.com/nixos/nixpkgs repository. Note this repository is not loaded explicitly. So, this pointer can target a nonexistent revision for a time. However, the github and gnu loaders are supposed to load this revision and should create the revision pointed by this branch. This branch can be used to identify the snapshot associated to a Nix/Guix evaluation. """ # The revision used to create the sources.json file. For Nix, # this revision belongs to the github.com/nixos/nixpkgs # repository revision = self.supported_sources()["revision"] return { b"evaluation": { "target_type": "revision", "target": hashutil.hash_to_bytes(revision), } } def build_revision( self, p_info: NixGuixPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: return Revision( type=RevisionType.TAR, message=b"", author=EMPTY_AUTHOR, date=None, committer=EMPTY_AUTHOR, committer_date=None, parents=(), directory=directory, synthetic=True, metadata={ "extrinsic": { "provider": self.provider_url, "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) def retrieve_sources(url: str) -> bytes: """Retrieve sources. Potentially raise NotFound error.""" return api_info(url, allow_redirects=True) def parse_sources(raw_sources: bytes) -> Dict[str, Any]: return json.loads(raw_sources.decode("utf-8")) def make_pattern_unsupported_file_extension(unsupported_file_extensions: List[str],): """Make a regexp pattern for unsupported file extension out of a list of unsupported archive extension list. """ return re.compile( rf".*\.({'|'.join(map(re.escape, unsupported_file_extensions))})$", re.DOTALL ) def clean_sources( sources: Dict[str, Any], unsupported_file_extensions=[] ) -> Dict[str, Any]: """Validate and clean the sources structure. First, ensure all top level keys are present. Then, walk the sources list and remove sources that do not contain required keys. Filter out source entries whose: - required keys are missing - source type is not supported - urls attribute type is not a list - extension is known not to be supported by the loader Raises: ValueError if: - a required top level key is missing - top-level version is not 1 Returns: source Dict cleaned up """ pattern_unsupported_file = make_pattern_unsupported_file_extension( unsupported_file_extensions ) # Required top level keys required_keys = ["version", "revision", "sources"] missing_keys = [] for required_key in required_keys: if required_key not in sources: missing_keys.append(required_key) if missing_keys != []: raise ValueError( f"sources structure invalid, missing: {','.join(missing_keys)}" ) # Only the version 1 is currently supported version = int(sources["version"]) if version != 1: raise ValueError( f"The sources structure version '{sources['version']}' is not supported" ) # If a source doesn't contain required attributes, this source is # skipped but others could still be archived. verified_sources = [] for source in sources["sources"]: valid = True required_keys = ["urls", "integrity", "type"] for required_key in required_keys: if required_key not in source: logger.info( f"Skip source '{source}' because key '{required_key}' is missing", ) valid = False if valid and source["type"] != "url": logger.info( f"Skip source '{source}' because the type {source['type']} " "is not supported", ) valid = False if valid and not isinstance(source["urls"], list): logger.info( f"Skip source {source} because the urls attribute is not a list" ) valid = False if valid and len(source["urls"]) > 0: # Filter out unsupported archives supported_sources: List[str] = [] for source_url in source["urls"]: if pattern_unsupported_file.match(source_url): logger.info(f"Skip unsupported artifact url {source_url}") continue supported_sources.append(source_url) if len(supported_sources) == 0: logger.info( f"Skip source {source} because urls only reference " "unsupported artifacts. Unsupported " f"artifacts so far: {pattern_unsupported_file}" ) continue new_source = copy.deepcopy(source) new_source["urls"] = supported_sources verified_sources.append(new_source) sources["sources"] = verified_sources return sources diff --git a/swh/loader/package/nixguix/tasks.py b/swh/loader/package/nixguix/tasks.py index c6f60de..39ddf48 100644 --- a/swh/loader/package/nixguix/tasks.py +++ b/swh/loader/package/nixguix/tasks.py @@ -1,14 +1,14 @@ -# Copyright (C) 2020 The Software Heritage developers +# Copyright (C) 2020-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.nixguix.loader import NixGuixLoader @shared_task(name=__name__ + ".LoadNixguix") def load_nixguix(*, url=None): """Load functional (e.g. guix/nix) package""" - return NixGuixLoader(url).load() + return NixGuixLoader.from_configfile(url=url).load() diff --git a/swh/loader/package/nixguix/tests/test_nixguix.py b/swh/loader/package/nixguix/tests/test_nixguix.py index 92b8a5f..240e509 100644 --- a/swh/loader/package/nixguix/tests/test_nixguix.py +++ b/swh/loader/package/nixguix/tests/test_nixguix.py @@ -1,709 +1,707 @@ # Copyright (C) 2020-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import os from typing import Dict, Optional, Tuple from unittest.mock import patch import attr import pytest from swh.loader.package import __version__ from swh.loader.package.archive.loader import ArchiveLoader from swh.loader.package.nixguix.loader import ( NixGuixLoader, NixGuixPackageInfo, clean_sources, make_pattern_unsupported_file_extension, parse_sources, retrieve_sources, ) from swh.loader.package.utils import download from swh.loader.tests import assert_last_visit_matches from swh.loader.tests import check_snapshot as check_snapshot_full from swh.loader.tests import get_stats from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.identifiers import SWHID from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MetadataTargetType, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) from swh.storage.algos.origin import origin_get_latest_visit_status from swh.storage.algos.snapshot import snapshot_get_all_branches from swh.storage.exc import HashCollision from swh.storage.interface import PagedResult, StorageInterface sources_url = "https://nix-community.github.io/nixpkgs-swh/sources.json" @pytest.fixture def raw_sources(datadir) -> bytes: with open( os.path.join( datadir, "https_nix-community.github.io", "nixpkgs-swh_sources.json" ), "rb", ) as f: return f.read() SNAPSHOT1 = Snapshot( id=hash_to_bytes("0c5881c74283793ebe9a09a105a9381e41380383"), branches={ b"evaluation": SnapshotBranch( target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"), target_type=TargetType.REVISION, ), b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch( target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"), target_type=TargetType.REVISION, ), }, ) def check_snapshot(snapshot: Snapshot, storage: StorageInterface): # The `evaluation` branch is allowed to be unresolvable. It's possible at current # nixguix visit time, it is not yet visited (the git loader is in charge of its # visit for now). For more details, check the # swh.loader.package.nixguix.NixGuixLoader.extra_branches docstring. check_snapshot_full( snapshot, storage, allowed_empty=[(TargetType.REVISION, b"evaluation")] ) assert isinstance(snapshot, Snapshot) # then ensure the snapshot revisions are structurally as expected revision_ids = [] for name, branch in snapshot.branches.items(): if name == b"evaluation": continue # skipping that particular branch (cf. previous comment) if branch.target_type == TargetType.REVISION: revision_ids.append(branch.target) revisions = storage.revision_get(revision_ids) for rev in revisions: assert rev is not None metadata = rev.metadata assert metadata is not None raw = metadata["extrinsic"]["raw"] assert "url" in raw assert "integrity" in raw -def test_retrieve_sources(swh_config, requests_mock_datadir): +def test_retrieve_sources(swh_storage, requests_mock_datadir): j = parse_sources(retrieve_sources(sources_url)) assert "sources" in j.keys() assert len(j["sources"]) == 2 -def test_nixguix_url_not_found(swh_config, requests_mock_datadir): +def test_nixguix_url_not_found(swh_storage, requests_mock_datadir): """When failing to read from the url, the visit is marked as not_found. Here the sources url does not exist, so requests_mock_datadir returns a 404. Resulting in a NotFound raised within the package loader's main loop. This results in the task with status failed and a visit_status with status "not_found". """ unknown_url = "https://non-existing-url/" - loader = NixGuixLoader(unknown_url) + loader = NixGuixLoader(swh_storage, unknown_url) # during the retrieval step load_status = loader.load() assert load_status == {"status": "failed"} assert_last_visit_matches( - loader.storage, unknown_url, status="not_found", type="nixguix", snapshot=None + swh_storage, unknown_url, status="not_found", type="nixguix", snapshot=None ) assert len(requests_mock_datadir.request_history) == 1 assert requests_mock_datadir.request_history[0].url == unknown_url -def test_nixguix_url_with_decoding_error(swh_config, requests_mock_datadir): +def test_nixguix_url_with_decoding_error(swh_storage, requests_mock_datadir): """Other errors during communication with the url, the visit is marked as failed requests_mock_datadir will intercept the requests to sources_url. Since the file exists, returns a 200 with the requested content of the query. As file.txt is no json, fails do decode and raises a JSONDecodeError. In effect failing the visit. """ sources_url = "https://example.com/file.txt" - loader = NixGuixLoader(sources_url) + loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() assert load_status == {"status": "failed"} assert_last_visit_matches( - loader.storage, sources_url, status="failed", type="nixguix", snapshot=None + swh_storage, sources_url, status="failed", type="nixguix", snapshot=None ) assert len(requests_mock_datadir.request_history) == 1 assert requests_mock_datadir.request_history[0].url == sources_url -def test_clean_sources_invalid_schema(swh_config, requests_mock_datadir): +def test_clean_sources_invalid_schema(swh_storage, requests_mock_datadir): sources = {} with pytest.raises(ValueError, match="sources structure invalid, missing: .*"): clean_sources(sources) -def test_clean_sources_invalid_version(swh_config, requests_mock_datadir): +def test_clean_sources_invalid_version(swh_storage, requests_mock_datadir): for version_ok in [1, "1"]: # Check those versions are fine clean_sources({"version": version_ok, "sources": [], "revision": "my-revision"}) for version_ko in [0, "0", 2, "2"]: # Check version != 1 raise an error with pytest.raises( ValueError, match="sources structure version .* is not supported" ): clean_sources( {"version": version_ko, "sources": [], "revision": "my-revision"} ) -def test_clean_sources_invalid_sources(swh_config, requests_mock_datadir): +def test_clean_sources_invalid_sources(swh_storage, requests_mock_datadir): valid_sources = [ # 1 valid source {"type": "url", "urls": ["my-url.tar.gz"], "integrity": "my-integrity"}, ] sources = { "version": 1, "sources": valid_sources + [ # integrity is missing {"type": "url", "urls": ["my-url.tgz"],}, # urls is not a list {"type": "url", "urls": "my-url.zip", "integrity": "my-integrity"}, # type is not url {"type": "git", "urls": ["my-url.zip"], "integrity": "my-integrity"}, # missing fields which got double-checked nonetheless... {"integrity": "my-integrity"}, ], "revision": "my-revision", } clean = clean_sources(sources) assert len(clean["sources"]) == len(valid_sources) def test_make_pattern_unsupported_file_extension(): unsupported_extensions = ["el", "c", "txt"] supported_extensions = ["Z", "7z"] # for test actual_unsupported_pattern = make_pattern_unsupported_file_extension( unsupported_extensions ) for supported_ext in supported_extensions: assert supported_ext not in unsupported_extensions supported_filepath = f"anything.{supported_ext}" actual_match = actual_unsupported_pattern.match(supported_filepath) assert not actual_match for unsupported_ext in unsupported_extensions: unsupported_filepath = f"something.{unsupported_ext}" actual_match = actual_unsupported_pattern.match(unsupported_filepath) assert actual_match -def test_clean_sources_unsupported_artifacts(swh_config, requests_mock_datadir): +def test_clean_sources_unsupported_artifacts(swh_storage, requests_mock_datadir): unsupported_file_extensions = [ "iso", "whl", "gem", "pom", "msi", "pod", "png", "rock", "ttf", "jar", "c", "el", "rpm", "diff", "patch", ] supported_sources = [ { "type": "url", "urls": [f"https://server.org/my-url.{ext}"], "integrity": "my-integrity", } for ext in [ "known-unknown-but-ok", # this is fine as well with the current approach "zip", "tar.gz", "tgz", "tar.bz2", "tbz", "tbz2", "tar.xz", "tar", "zip", "7z", "Z", ] ] unsupported_sources = [ { "type": "url", "urls": [f"https://server.org/my-url.{ext}"], "integrity": "my-integrity", } for ext in unsupported_file_extensions ] sources = { "version": 1, "sources": supported_sources + unsupported_sources, "revision": "my-revision", } clean = clean_sources(sources, unsupported_file_extensions) assert len(clean["sources"]) == len(supported_sources) -def test_loader_one_visit(swh_config, requests_mock_datadir, raw_sources): - loader = NixGuixLoader(sources_url) +def test_loader_one_visit(swh_storage, requests_mock_datadir, raw_sources): + loader = NixGuixLoader(swh_storage, sources_url) res = loader.load() assert res["status"] == "eventful" - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 1, "directory": 3, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == stats # The visit is partial because urls pointing to non tarball file # are not handled yet assert_last_visit_matches( - loader.storage, sources_url, status="partial", type="nixguix" + swh_storage, sources_url, status="partial", type="nixguix" ) - visit_status = origin_get_latest_visit_status(loader.storage, sources_url) + visit_status = origin_get_latest_visit_status(swh_storage, sources_url) snapshot_swhid = SWHID( object_type="snapshot", object_id=hash_to_hex(visit_status.snapshot) ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url=sources_url, ) expected_metadata = [ RawExtrinsicMetadata( type=MetadataTargetType.SNAPSHOT, target=snapshot_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.nixguix.loader.NixGuixLoader", version=__version__, ), discovery_date=loader.visit_date, format="nixguix-sources-json", metadata=raw_sources, origin=sources_url, ) ] - assert loader.storage.raw_extrinsic_metadata_get( + assert swh_storage.raw_extrinsic_metadata_get( MetadataTargetType.SNAPSHOT, snapshot_swhid, metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) -def test_uncompress_failure(swh_config, requests_mock_datadir): +def test_uncompress_failure(swh_storage, requests_mock_datadir): """Non tarball files are currently not supported and the uncompress function fails on such kind of files. However, even in this case of failure (because of the url https://example.com/file.txt), a snapshot and a visit has to be created (with a status partial since all files are not archived). """ - loader = NixGuixLoader(sources_url) + loader = NixGuixLoader(swh_storage, sources_url) loader_status = loader.load() sources = loader.supported_sources()["sources"] urls = [s["urls"][0] for s in sources] assert "https://example.com/file.txt" in urls assert loader_status["status"] == "eventful" # The visit is partial because urls pointing to non tarball files # are not handled yet assert_last_visit_matches( - loader.storage, sources_url, status="partial", type="nixguix" + swh_storage, sources_url, status="partial", type="nixguix" ) -def test_loader_incremental(swh_config, requests_mock_datadir): +def test_loader_incremental(swh_storage, requests_mock_datadir): """Ensure a second visit do not download artifact already downloaded by the previous visit. """ - loader = NixGuixLoader(sources_url) + loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() loader.load() assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()} assert_last_visit_matches( - loader.storage, + swh_storage, sources_url, status="partial", type="nixguix", snapshot=SNAPSHOT1.id, ) - check_snapshot(SNAPSHOT1, storage=loader.storage) + check_snapshot(SNAPSHOT1, storage=swh_storage) urls = [ m.url for m in requests_mock_datadir.request_history if m.url == ("https://github.com/owner-1/repository-1/revision-1.tgz") ] # The artifact # 'https://github.com/owner-1/repository-1/revision-1.tgz' is only # visited one time assert len(urls) == 1 -def test_loader_two_visits(swh_config, requests_mock_datadir_visits): +def test_loader_two_visits(swh_storage, requests_mock_datadir_visits): """To ensure there is only one origin, but two visits, two revisions and two snapshots are created. The first visit creates a snapshot containing one tarball. The second visit creates a snapshot containing the same tarball and another tarball. """ - loader = NixGuixLoader(sources_url) + loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() assert load_status == {"status": "eventful", "snapshot_id": SNAPSHOT1.id.hex()} assert_last_visit_matches( - loader.storage, + swh_storage, sources_url, status="partial", type="nixguix", snapshot=SNAPSHOT1.id, ) - check_snapshot(SNAPSHOT1, storage=loader.storage) + check_snapshot(SNAPSHOT1, storage=swh_storage) - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 1, "directory": 3, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == stats - loader = NixGuixLoader(sources_url) + loader = NixGuixLoader(swh_storage, sources_url) load_status = loader.load() expected_snapshot_id_hex = "b0bfa75cbd0cc90aac3b9e95fb0f59c731176d97" expected_snapshot_id = hash_to_bytes(expected_snapshot_id_hex) assert load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id_hex, } assert_last_visit_matches( - loader.storage, + swh_storage, sources_url, status="partial", type="nixguix", snapshot=expected_snapshot_id, ) # This ensures visits are incremental. Indeed, if we request a # second time an url, because of the requests_mock_datadir_visits # fixture, the file has to end with `_visit1`. expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"evaluation": SnapshotBranch( target=hash_to_bytes("602140776b2ce6c9159bcf52ada73a297c063d5e"), target_type=TargetType.REVISION, ), b"https://github.com/owner-1/repository-1/revision-1.tgz": SnapshotBranch( target=hash_to_bytes("488ad4e7b8e2511258725063cf43a2b897c503b4"), target_type=TargetType.REVISION, ), b"https://github.com/owner-2/repository-1/revision-1.tgz": SnapshotBranch( target=hash_to_bytes("85e0bad74e33e390aaeb74f139853ae3863ee544"), target_type=TargetType.REVISION, ), }, ) - check_snapshot(expected_snapshot, storage=loader.storage) + check_snapshot(expected_snapshot, storage=swh_storage) - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 2, "directory": 5, "origin": 1, "origin_visit": 2, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 2, } == stats -def test_resolve_revision_from(swh_config, requests_mock_datadir, datadir): - loader = NixGuixLoader(sources_url) +def test_resolve_revision_from(swh_storage, requests_mock_datadir, datadir): + loader = NixGuixLoader(swh_storage, sources_url) known_artifacts = { "id1": {"extrinsic": {"raw": {"url": "url1", "integrity": "integrity1"}}}, "id2": {"extrinsic": {"raw": {"url": "url2", "integrity": "integrity2"}}}, } p_info = NixGuixPackageInfo.from_metadata( {"url": "url1", "integrity": "integrity1"} ) assert loader.resolve_revision_from(known_artifacts, p_info) == "id1" p_info = NixGuixPackageInfo.from_metadata( {"url": "url3", "integrity": "integrity3"} ) assert loader.resolve_revision_from(known_artifacts, p_info) == None # noqa -def test_evaluation_branch(swh_config, requests_mock_datadir): - loader = NixGuixLoader(sources_url) +def test_evaluation_branch(swh_storage, requests_mock_datadir): + loader = NixGuixLoader(swh_storage, sources_url) res = loader.load() assert res["status"] == "eventful" assert_last_visit_matches( - loader.storage, + swh_storage, sources_url, status="partial", type="nixguix", snapshot=SNAPSHOT1.id, ) - check_snapshot(SNAPSHOT1, storage=loader.storage) + check_snapshot(SNAPSHOT1, storage=swh_storage) -def test_eoferror(swh_config, requests_mock_datadir): +def test_eoferror(swh_storage, requests_mock_datadir): """Load a truncated archive which is invalid to make the uncompress function raising the exception EOFError. We then check if a snapshot is created, meaning this error is well managed. """ sources = ( "https://nix-community.github.io/nixpkgs-swh/sources-EOFError.json" # noqa ) - loader = NixGuixLoader(sources) + loader = NixGuixLoader(swh_storage, sources) loader.load() expected_snapshot = Snapshot( id=hash_to_bytes("4257fa2350168c6bfec726a06452ea27a2c0cb33"), branches={ b"evaluation": SnapshotBranch( target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"), target_type=TargetType.REVISION, ), }, ) - check_snapshot(expected_snapshot, storage=loader.storage) + check_snapshot(expected_snapshot, storage=swh_storage) def fake_download( url: str, dest: str, hashes: Dict = {}, filename: Optional[str] = None, auth: Optional[Tuple[str, str]] = None, ) -> Tuple[str, Dict]: """Fake download which raises HashCollision (for the sake of test simpliciy, let's accept that makes sense) For tests purpose only. """ if url == "https://example.com/file.txt": # instead of failing because it's a file not dealt with by the nix guix # loader, make it raise a hash collision raise HashCollision("sha1", "f92d74e3874587aaf443d1db961d4e26dde13e9c", []) return download(url, dest, hashes, filename, auth) -def test_raise_exception(swh_config, requests_mock_datadir, mocker): +def test_raise_exception(swh_storage, requests_mock_datadir, mocker): mock_download = mocker.patch("swh.loader.package.loader.download") mock_download.side_effect = fake_download - loader = NixGuixLoader(sources_url) + loader = NixGuixLoader(swh_storage, sources_url) res = loader.load() assert res == { "status": "eventful", "snapshot_id": SNAPSHOT1.id.hex(), } - check_snapshot(SNAPSHOT1, storage=loader.storage) + check_snapshot(SNAPSHOT1, storage=swh_storage) assert len(mock_download.mock_calls) == 2 # The visit is partial because some artifact downloads failed assert_last_visit_matches( - loader.storage, sources_url, status="partial", type="nixguix" + swh_storage, sources_url, status="partial", type="nixguix" ) def test_load_nixguix_one_common_artifact_from_other_loader( - swh_config, datadir, requests_mock_datadir_visits, caplog + swh_storage, datadir, requests_mock_datadir_visits, caplog ): """Misformatted revision should be caught and logged, then loading continues """ caplog.set_level(logging.ERROR, "swh.loader.package.nixguix.loader") # 1. first ingest with for example the archive loader gnu_url = "https://ftp.gnu.org/gnu/8sync/" release = "0.1.0" artifact_url = f"https://ftp.gnu.org/gnu/8sync/8sync-{release}.tar.gz" gnu_artifacts = [ { "time": 944729610, "url": artifact_url, "length": 221837, "filename": f"8sync-{release}.tar.gz", "version": release, } ] - archive_loader = ArchiveLoader(url=gnu_url, artifacts=gnu_artifacts) + archive_loader = ArchiveLoader(swh_storage, url=gnu_url, artifacts=gnu_artifacts) actual_load_status = archive_loader.load() expected_snapshot_id = "c419397fd912039825ebdbea378bc6283f006bf5" assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] == expected_snapshot_id # noqa assert_last_visit_matches( archive_loader.storage, gnu_url, status="full", type="tar" ) gnu_snapshot: Snapshot = snapshot_get_all_branches( archive_loader.storage, hash_to_bytes(expected_snapshot_id) ) first_revision = gnu_snapshot.branches[f"releases/{release}".encode("utf-8")] # 2. Then ingest with the nixguix loader which lists the same artifact within its # sources.json # ensure test setup is ok data_sources = os.path.join( datadir, "https_nix-community.github.io", "nixpkgs-swh_sources_special.json" ) all_sources = json.loads(open(data_sources).read()) found = False for source in all_sources["sources"]: if source["urls"][0] == artifact_url: found = True assert ( found is True ), f"test setup error: {artifact_url} must be in {data_sources}" # first visit with a snapshot, ok sources_url = "https://nix-community.github.io/nixpkgs-swh/sources_special.json" - loader = NixGuixLoader(sources_url) + loader = NixGuixLoader(swh_storage, sources_url) actual_load_status2 = loader.load() assert actual_load_status2["status"] == "eventful" - assert_last_visit_matches( - loader.storage, sources_url, status="full", type="nixguix" - ) + assert_last_visit_matches(swh_storage, sources_url, status="full", type="nixguix") snapshot_id = actual_load_status2["snapshot_id"] - snapshot = snapshot_get_all_branches(loader.storage, hash_to_bytes(snapshot_id)) + snapshot = snapshot_get_all_branches(swh_storage, hash_to_bytes(snapshot_id)) assert snapshot # simulate a snapshot already seen with a revision with the wrong metadata structure # This revision should be skipped, thus making the artifact being ingested again. with patch( "swh.loader.package.loader.PackageLoader.last_snapshot" ) as last_snapshot: # mutate the snapshot to target a revision with the wrong metadata structure # snapshot["branches"][artifact_url.encode("utf-8")] = first_revision - old_revision = loader.storage.revision_get([first_revision.target])[0] + old_revision = swh_storage.revision_get([first_revision.target])[0] # assert that revision is not in the right format assert old_revision.metadata["extrinsic"]["raw"].get("integrity", {}) == {} # mutate snapshot to create a clash snapshot = attr.evolve( snapshot, branches={ **snapshot.branches, artifact_url.encode("utf-8"): SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes(old_revision.id), ), }, ) # modify snapshot to actually change revision metadata structure so we simulate # a revision written by somebody else (structure different) last_snapshot.return_value = snapshot - loader = NixGuixLoader(sources_url) + loader = NixGuixLoader(swh_storage, sources_url) actual_load_status3 = loader.load() assert last_snapshot.called assert actual_load_status3["status"] == "eventful" assert_last_visit_matches( - loader.storage, sources_url, status="full", type="nixguix" + swh_storage, sources_url, status="full", type="nixguix" ) new_snapshot_id = "32ff641e510aceefc3a6d0dcbf208b2854d2e965" assert actual_load_status3["snapshot_id"] == new_snapshot_id last_snapshot = snapshot_get_all_branches( - loader.storage, hash_to_bytes(new_snapshot_id) + swh_storage, hash_to_bytes(new_snapshot_id) ) new_revision_branch = last_snapshot.branches[artifact_url.encode("utf-8")] assert new_revision_branch.target_type == TargetType.REVISION - new_revision = loader.storage.revision_get([new_revision_branch.target])[0] + new_revision = swh_storage.revision_get([new_revision_branch.target])[0] # the new revision has the correct structure, so it got ingested alright by the # new run assert new_revision.metadata["extrinsic"]["raw"]["integrity"] is not None nb_detections = 0 actual_detection: Dict for record in caplog.records: logtext = record.getMessage() if "Unexpected metadata revision structure detected:" in logtext: nb_detections += 1 actual_detection = record.args["context"] assert actual_detection # as many calls as there are sources listed in the sources.json assert nb_detections == len(all_sources["sources"]) assert actual_detection == { "revision": hash_to_hex(old_revision.id), "reason": "'integrity'", "known_artifact": old_revision.metadata, } diff --git a/swh/loader/package/nixguix/tests/test_tasks.py b/swh/loader/package/nixguix/tests/test_tasks.py index b519c70..edb06e2 100644 --- a/swh/loader/package/nixguix/tests/test_tasks.py +++ b/swh/loader/package/nixguix/tests/test_tasks.py @@ -1,29 +1,23 @@ -# Copyright (C) 2020 The Software Heritage developers +# Copyright (C) 2020-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -import json - -def test_nixguix_loader( +def test_tasks_nixguix_loader( mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config ): - mock_loader = mocker.patch("swh.loader.package.nixguix.loader.NixGuixLoader.load") - mock_loader.return_value = {"status": "eventful"} - - mock_retrieve_sources = mocker.patch( - "swh.loader.package.nixguix.loader.retrieve_sources" + mock_loader = mocker.patch( + "swh.loader.package.nixguix.loader.NixGuixLoader.from_configfile" ) - mock_retrieve_sources.return_value = json.dumps( - {"version": 1, "sources": [], "revision": "some-revision",} - ).encode() + mock_loader.return_value = mock_loader + mock_loader.load.return_value = {"status": "eventful"} res = swh_scheduler_celery_app.send_task( "swh.loader.package.nixguix.tasks.LoadNixguix", kwargs=dict(url="some-url") ) assert res res.wait() assert res.successful() - + assert mock_loader.called assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/npm/loader.py b/swh/loader/package/npm/loader.py index 5f5387a..a860cdf 100644 --- a/swh/loader/package/npm/loader.py +++ b/swh/loader/package/npm/loader.py @@ -1,339 +1,345 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from codecs import BOM_UTF8 import json import logging import os from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union from urllib.parse import quote import attr import chardet from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import api_info, cached_method, release_name from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, Person, Revision, RevisionType, Sha1Git, TimestampWithTimezone, ) +from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) EMPTY_PERSON = Person(fullname=b"", name=None, email=None) @attr.s class NpmPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) date = attr.ib(type=Optional[str]) shasum = attr.ib(type=str) """sha1 checksum""" version = attr.ib(type=str) @classmethod def from_metadata( cls, project_metadata: Dict[str, Any], version: str ) -> "NpmPackageInfo": package_metadata = project_metadata["versions"][version] url = package_metadata["dist"]["tarball"] # No date available in intrinsic metadata: retrieve it from the API # metadata, using the version number that the API claims this package # has. extrinsic_version = package_metadata["version"] if "time" in project_metadata: date = project_metadata["time"][extrinsic_version] elif "mtime" in package_metadata: date = package_metadata["mtime"] else: date = None return cls( url=url, filename=os.path.basename(url), date=date, shasum=package_metadata["dist"]["shasum"], version=extrinsic_version, raw_info=package_metadata, directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( format="replicate-npm-package-json", metadata=json.dumps(package_metadata).encode(), ) ], ) class NpmLoader(PackageLoader[NpmPackageInfo]): """Load npm origin's artifact releases into swh archive. """ visit_type = "npm" - def __init__(self, url: str): + def __init__( + self, + storage: StorageInterface, + url: str, + max_content_size: Optional[int] = None, + ): """Constructor Args str: origin url (e.g. https://www.npmjs.com/package/) """ - super().__init__(url=url) + super().__init__(storage=storage, url=url, max_content_size=max_content_size) package_name = url.split("https://www.npmjs.com/package/")[1] safe_name = quote(package_name, safe="") self.provider_url = f"https://replicate.npmjs.com/{safe_name}/" self._info: Dict[str, Any] = {} self._versions = None @cached_method def _raw_info(self) -> bytes: return api_info(self.provider_url) @cached_method def info(self) -> Dict: """Return the project metadata information (fetched from npm registry) """ return json.loads(self._raw_info()) def get_versions(self) -> Sequence[str]: return sorted(list(self.info()["versions"].keys())) def get_default_version(self) -> str: return self.info()["dist-tags"].get("latest", "") def get_metadata_authority(self): return MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://npmjs.com/", metadata={}, ) def get_package_info(self, version: str) -> Iterator[Tuple[str, NpmPackageInfo]]: p_info = NpmPackageInfo.from_metadata( project_metadata=self.info(), version=version ) yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Dict, p_info: NpmPackageInfo ) -> Optional[bytes]: return artifact_to_revision_id(known_artifacts, p_info) def build_revision( self, p_info: NpmPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: i_metadata = extract_intrinsic_metadata(uncompressed_path) if not i_metadata: return None author = extract_npm_package_author(i_metadata) message = i_metadata["version"].encode("ascii") if p_info.date is None: url = p_info.url artifact_name = os.path.basename(url) raise ValueError( "Origin %s: Cannot determine upload time for artifact %s." % (p_info.url, artifact_name) ) date = TimestampWithTimezone.from_iso8601(p_info.date) # FIXME: this is to remain bug-compatible with earlier versions: date = attr.evolve(date, timestamp=attr.evolve(date.timestamp, microseconds=0)) r = Revision( type=RevisionType.TAR, message=message, author=author, date=date, committer=author, committer_date=date, parents=(), directory=directory, synthetic=True, metadata={ "intrinsic": {"tool": "package.json", "raw": i_metadata,}, "extrinsic": { "provider": self.provider_url, "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) return r def artifact_to_revision_id( known_artifacts: Dict, p_info: NpmPackageInfo ) -> Optional[bytes]: """Given metadata artifact, solves the associated revision id. The following code allows to deal with 2 metadata formats: - old format sample:: { 'package_source': { 'sha1': '05181c12cd8c22035dd31155656826b85745da37', } } - new format sample:: { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, }], ... } """ shasum = p_info.shasum for rev_id, known_artifact in known_artifacts.items(): known_original_artifact = known_artifact.get("original_artifact") if not known_original_artifact: # previous loader-npm version kept original artifact elsewhere known_original_artifact = known_artifact.get("package_source") if not known_original_artifact: continue original_hash = known_original_artifact["sha1"] else: assert isinstance(known_original_artifact, list) original_hash = known_original_artifact[0]["checksums"]["sha1"] if shasum == original_hash: return rev_id return None def _author_str(author_data: Union[Dict, List, str]) -> str: """Parse author from package.json author fields """ if isinstance(author_data, dict): author_str = "" name = author_data.get("name") if name is not None: if isinstance(name, str): author_str += name elif isinstance(name, list): author_str += _author_str(name[0]) if len(name) > 0 else "" email = author_data.get("email") if email is not None: author_str += f" <{email}>" result = author_str elif isinstance(author_data, list): result = _author_str(author_data[0]) if len(author_data) > 0 else "" else: result = author_data return result def extract_npm_package_author(package_json: Dict[str, Any]) -> Person: """ Extract package author from a ``package.json`` file content and return it in swh format. Args: package_json: Dict holding the content of parsed ``package.json`` file Returns: Person """ for author_key in ("author", "authors"): if author_key in package_json: author_data = package_json[author_key] if author_data is None: return EMPTY_PERSON author_str = _author_str(author_data) return Person.from_fullname(author_str.encode()) return EMPTY_PERSON def _lstrip_bom(s, bom=BOM_UTF8): if s.startswith(bom): return s[len(bom) :] else: return s def load_json(json_bytes): """ Try to load JSON from bytes and return a dictionary. First try to decode from utf-8. If the decoding failed, try to detect the encoding and decode again with replace error handling. If JSON is malformed, an empty dictionary will be returned. Args: json_bytes (bytes): binary content of a JSON file Returns: dict: JSON data loaded in a dictionary """ json_data = {} try: json_str = _lstrip_bom(json_bytes).decode("utf-8") except UnicodeDecodeError: encoding = chardet.detect(json_bytes)["encoding"] if encoding: json_str = json_bytes.decode(encoding, "replace") try: json_data = json.loads(json_str) except json.decoder.JSONDecodeError: pass return json_data def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from npm. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) == 0: return {} project_dirname = lst[0] package_json_path = os.path.join(dir_path, project_dirname, "package.json") if not os.path.exists(package_json_path): return {} with open(package_json_path, "rb") as package_json_file: package_json_bytes = package_json_file.read() return load_json(package_json_bytes) diff --git a/swh/loader/package/npm/tasks.py b/swh/loader/package/npm/tasks.py index d796a23..43fc0bf 100644 --- a/swh/loader/package/npm/tasks.py +++ b/swh/loader/package/npm/tasks.py @@ -1,14 +1,14 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.npm.loader import NpmLoader @shared_task(name=__name__ + ".LoadNpm") def load_npm(*, url: str): """Load Npm package""" - return NpmLoader(url).load() + return NpmLoader.from_configfile(url=url).load() diff --git a/swh/loader/package/npm/tests/test_npm.py b/swh/loader/package/npm/tests/test_npm.py index 13e2bc1..baa43a3 100644 --- a/swh/loader/package/npm/tests/test_npm.py +++ b/swh/loader/package/npm/tests/test_npm.py @@ -1,714 +1,711 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import os import pytest from swh.loader.package import __version__ from swh.loader.package.npm.loader import ( NpmLoader, _author_str, artifact_to_revision_id, extract_npm_package_author, ) from swh.loader.package.tests.common import check_metadata_paths from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.identifiers import SWHID from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MetadataTargetType, Person, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) from swh.storage.interface import PagedResult @pytest.fixture def org_api_info(datadir) -> bytes: with open(os.path.join(datadir, "https_replicate.npmjs.com", "org"), "rb",) as f: return f.read() def test_npm_author_str(): for author, expected_author in [ ("author", "author"), ( ["Al from quantum leap", "hal from 2001 space odyssey"], "Al from quantum leap", ), ([], ""), ({"name": "groot", "email": "groot@galaxy.org",}, "groot "), ({"name": "somebody",}, "somebody"), ({"email": "no@one.org"}, " "), # note first elt is an extra blank ({"name": "no one", "email": None,}, "no one"), ({"email": None,}, ""), ({"name": None}, ""), ({"name": None, "email": None,}, ""), ({}, ""), (None, None), ({"name": []}, "",), ( {"name": ["Susan McSween", "William H. Bonney", "Doc Scurlock",]}, "Susan McSween", ), (None, None), ]: assert _author_str(author) == expected_author -def test_extract_npm_package_author(datadir): +def test_npm_extract_npm_package_author(datadir): package_metadata_filepath = os.path.join( datadir, "https_replicate.npmjs.com", "org_visit1" ) with open(package_metadata_filepath) as json_file: package_metadata = json.load(json_file) extract_npm_package_author(package_metadata["versions"]["0.0.2"]) == Person( fullname=b"mooz ", name=b"mooz", email=b"stillpedant@gmail.com", ) assert extract_npm_package_author(package_metadata["versions"]["0.0.3"]) == Person( fullname=b"Masafumi Oyamada ", name=b"Masafumi Oyamada", email=b"stillpedant@gmail.com", ) package_json = json.loads( """ { "name": "highlightjs-line-numbers.js", "version": "2.7.0", "description": "Highlight.js line numbers plugin.", "main": "src/highlightjs-line-numbers.js", "dependencies": {}, "devDependencies": { "gulp": "^4.0.0", "gulp-rename": "^1.4.0", "gulp-replace": "^0.6.1", "gulp-uglify": "^1.2.0" }, "repository": { "type": "git", "url": "https://github.com/wcoder/highlightjs-line-numbers.js.git" }, "author": "Yauheni Pakala ", "license": "MIT", "bugs": { "url": "https://github.com/wcoder/highlightjs-line-numbers.js/issues" }, "homepage": "http://wcoder.github.io/highlightjs-line-numbers.js/" }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"Yauheni Pakala ", name=b"Yauheni Pakala", email=b"evgeniy.pakalo@gmail.com", ) package_json = json.loads( """ { "name": "3-way-diff", "version": "0.0.1", "description": "3-way diffing of JavaScript objects", "main": "index.js", "authors": [ { "name": "Shawn Walsh", "url": "https://github.com/shawnpwalsh" }, { "name": "Markham F Rollins IV", "url": "https://github.com/mrollinsiv" } ], "keywords": [ "3-way diff", "3 way diff", "three-way diff", "three way diff" ], "devDependencies": { "babel-core": "^6.20.0", "babel-preset-es2015": "^6.18.0", "mocha": "^3.0.2" }, "dependencies": { "lodash": "^4.15.0" } }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"Shawn Walsh", name=b"Shawn Walsh", email=None ) package_json = json.loads( """ { "name": "yfe-ynpm", "version": "1.0.0", "homepage": "http://gitlab.ywwl.com/yfe/yfe-ynpm", "repository": { "type": "git", "url": "git@gitlab.ywwl.com:yfe/yfe-ynpm.git" }, "author": [ "fengmk2 (https://fengmk2.com)", "xufuzi (https://7993.org)" ], "license": "MIT" }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"fengmk2 (https://fengmk2.com)", name=b"fengmk2", email=b"fengmk2@gmail.com", ) package_json = json.loads( """ { "name": "umi-plugin-whale", "version": "0.0.8", "description": "Internal contract component", "authors": { "name": "xiaohuoni", "email": "448627663@qq.com" }, "repository": "alitajs/whale", "devDependencies": { "np": "^3.0.4", "umi-tools": "*" }, "license": "MIT" }""" ) assert extract_npm_package_author(package_json) == Person( fullname=b"xiaohuoni <448627663@qq.com>", name=b"xiaohuoni", email=b"448627663@qq.com", ) package_json_no_authors = json.loads( """{ "authors": null, "license": "MIT" }""" ) assert extract_npm_package_author(package_json_no_authors) == Person( fullname=b"", name=None, email=None ) def normalize_hashes(hashes): if isinstance(hashes, str): return hash_to_bytes(hashes) if isinstance(hashes, list): return [hash_to_bytes(x) for x in hashes] return {hash_to_bytes(k): hash_to_bytes(v) for k, v in hashes.items()} _expected_new_contents_first_visit = normalize_hashes( [ "4ce3058e16ab3d7e077f65aabf855c34895bf17c", "858c3ceee84c8311adc808f8cdb30d233ddc9d18", "0fa33b4f5a4e0496da6843a38ff1af8b61541996", "85a410f8ef8eb8920f2c384a9555566ad4a2e21b", "9163ac8025923d5a45aaac482262893955c9b37b", "692cf623b8dd2c5df2c2998fd95ae4ec99882fb4", "18c03aac6d3e910efb20039c15d70ab5e0297101", "41265c42446aac17ca769e67d1704f99e5a1394d", "783ff33f5882813dca9239452c4a7cadd4dba778", "b029cfb85107aee4590c2434a3329bfcf36f8fa1", "112d1900b4c2e3e9351050d1b542c9744f9793f3", "5439bbc4bd9a996f1a38244e6892b71850bc98fd", "d83097a2f994b503185adf4e719d154123150159", "d0939b4898e83090ee55fd9d8a60e312cfadfbaf", "b3523a26f7147e4af40d9d462adaae6d49eda13e", "cd065fb435d6fb204a8871bcd623d0d0e673088c", "2854a40855ad839a54f4b08f5cff0cf52fca4399", "b8a53bbaac34ebb8c6169d11a4b9f13b05c583fe", "0f73d56e1cf480bded8a1ecf20ec6fc53c574713", "0d9882b2dfafdce31f4e77fe307d41a44a74cefe", "585fc5caab9ead178a327d3660d35851db713df1", "e8cd41a48d79101977e3036a87aeb1aac730686f", "5414efaef33cceb9f3c9eb5c4cc1682cd62d14f7", "9c3cc2763bf9e9e37067d3607302c4776502df98", "3649a68410e354c83cd4a38b66bd314de4c8f5c9", "e96ed0c091de1ebdf587104eaf63400d1974a1fe", "078ca03d2f99e4e6eab16f7b75fbb7afb699c86c", "38de737da99514de6559ff163c988198bc91367a", ] ) _expected_new_directories_first_visit = normalize_hashes( [ "3370d20d6f96dc1c9e50f083e2134881db110f4f", "42753c0c2ab00c4501b552ac4671c68f3cf5aece", "d7895533ef5edbcffdea3f057d9fef3a1ef845ce", "80579be563e2ef3e385226fe7a3f079b377f142c", "3b0ddc6a9e58b4b53c222da4e27b280b6cda591c", "bcad03ce58ac136f26f000990fc9064e559fe1c0", "5fc7e82a1bc72e074665c6078c6d3fad2f13d7ca", "e3cd26beba9b1e02f6762ef54bd9ac80cc5f25fd", "584b5b4b6cf7f038095e820b99386a9c232de931", "184c8d6d0d242f2b1792ef9d3bf396a5434b7f7a", "bb5f4ee143c970367eb409f2e4c1104898048b9d", "1b95491047add1103db0dfdfa84a9735dcb11e88", "a00c6de13471a2d66e64aca140ddb21ef5521e62", "5ce6c1cd5cda2d546db513aaad8c72a44c7771e2", "c337091e349b6ac10d38a49cdf8c2401ef9bb0f2", "202fafcd7c0f8230e89d5496ad7f44ab12b807bf", "775cc516543be86c15c1dc172f49c0d4e6e78235", "ff3d1ead85a14f891e8b3fa3a89de39db1b8de2e", ] ) _expected_new_revisions_first_visit = normalize_hashes( { "d8a1c7474d2956ac598a19f0f27d52f7015f117e": ( "42753c0c2ab00c4501b552ac4671c68f3cf5aece" ), "5f9eb78af37ffd12949f235e86fac04898f9f72a": ( "3370d20d6f96dc1c9e50f083e2134881db110f4f" ), "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a": ( "d7895533ef5edbcffdea3f057d9fef3a1ef845ce" ), } ) def package_url(package): return "https://www.npmjs.com/package/%s" % package def package_metadata_url(package): return "https://replicate.npmjs.com/%s/" % package -def test_revision_metadata_structure(swh_config, requests_mock_datadir): +def test_npm_revision_metadata_structure(swh_storage, requests_mock_datadir): package = "org" - loader = NpmLoader(package_url(package)) + loader = NpmLoader(swh_storage, package_url(package)) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None expected_revision_id = hash_to_bytes("d8a1c7474d2956ac598a19f0f27d52f7015f117e") - revision = loader.storage.revision_get([expected_revision_id])[0] + revision = swh_storage.revision_get([expected_revision_id])[0] assert revision is not None check_metadata_paths( revision.metadata, paths=[ ("intrinsic.tool", str), ("intrinsic.raw", dict), ("extrinsic.provider", str), ("extrinsic.when", str), ("extrinsic.raw", dict), ("original_artifact", list), ], ) for original_artifact in revision.metadata["original_artifact"]: check_metadata_paths( original_artifact, paths=[("filename", str), ("length", int), ("checksums", dict),], ) -def test_npm_loader_first_visit(swh_config, requests_mock_datadir, org_api_info): +def test_npm_loader_first_visit(swh_storage, requests_mock_datadir, org_api_info): package = "org" url = package_url(package) - loader = NpmLoader(url) + loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( - loader.storage, url, status="full", type="npm", snapshot=expected_snapshot_id + swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id ) - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, "release": 0, "revision": len(_expected_new_revisions_first_visit), "skipped_content": 0, "snapshot": 1, } == stats - contents = loader.storage.content_get(_expected_new_contents_first_visit) + contents = swh_storage.content_get(_expected_new_contents_first_visit) count = sum(0 if content is None else 1 for content in contents) assert count == len(_expected_new_contents_first_visit) assert ( - list(loader.storage.directory_missing(_expected_new_directories_first_visit)) - == [] + list(swh_storage.directory_missing(_expected_new_directories_first_visit)) == [] ) - assert ( - list(loader.storage.revision_missing(_expected_new_revisions_first_visit)) == [] - ) + assert list(swh_storage.revision_missing(_expected_new_revisions_first_visit)) == [] versions = [ ("0.0.2", "d8a1c7474d2956ac598a19f0f27d52f7015f117e"), ("0.0.3", "5f9eb78af37ffd12949f235e86fac04898f9f72a"), ("0.0.4", "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a"), ] expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target=b"releases/0.0.4", target_type=TargetType.ALIAS ), **{ b"releases/" + version_name.encode(): SnapshotBranch( target=hash_to_bytes(version_id), target_type=TargetType.REVISION, ) for (version_name, version_id) in versions }, }, ) - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://npmjs.com/", ) for (version_name, revision_id) in versions: - revision = loader.storage.revision_get([hash_to_bytes(revision_id)])[0] + revision = swh_storage.revision_get([hash_to_bytes(revision_id)])[0] directory_id = revision.directory directory_swhid = SWHID(object_type="directory", object_id=directory_id,) revision_swhid = SWHID(object_type="revision", object_id=revision_id,) expected_metadata = [ RawExtrinsicMetadata( type=MetadataTargetType.DIRECTORY, target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.npm.loader.NpmLoader", version=__version__, ), discovery_date=loader.visit_date, format="replicate-npm-package-json", metadata=json.dumps( json.loads(org_api_info)["versions"][version_name] ).encode(), origin="https://www.npmjs.com/package/org", revision=revision_swhid, ) ] - assert loader.storage.raw_extrinsic_metadata_get( + assert swh_storage.raw_extrinsic_metadata_get( MetadataTargetType.DIRECTORY, directory_swhid, metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) -def test_npm_loader_incremental_visit(swh_config, requests_mock_datadir_visits): +def test_npm_loader_incremental_visit(swh_storage, requests_mock_datadir_visits): package = "org" url = package_url(package) - loader = NpmLoader(url) + loader = NpmLoader(swh_storage, url) expected_snapshot_id = hash_to_bytes("d0587e1195aed5a8800411a008f2f2d627f18e2d") actual_load_status = loader.load() assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( - loader.storage, url, status="full", type="npm", snapshot=expected_snapshot_id + swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id ) - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, "release": 0, "revision": len(_expected_new_revisions_first_visit), "skipped_content": 0, "snapshot": 1, } == stats # reset loader internal state del loader._cached_info del loader._cached__raw_info actual_load_status2 = loader.load() assert actual_load_status2["status"] == "eventful" snap_id2 = actual_load_status2["snapshot_id"] assert snap_id2 is not None assert snap_id2 != actual_load_status["snapshot_id"] - assert_last_visit_matches(loader.storage, url, status="full", type="npm") + assert_last_visit_matches(swh_storage, url, status="full", type="npm") - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { # 3 new releases artifacts "content": len(_expected_new_contents_first_visit) + 14, "directory": len(_expected_new_directories_first_visit) + 15, "origin": 1, "origin_visit": 2, "release": 0, "revision": len(_expected_new_revisions_first_visit) + 3, "skipped_content": 0, "snapshot": 2, } == stats urls = [ m.url for m in requests_mock_datadir_visits.request_history if m.url.startswith("https://registry.npmjs.org") ] assert len(urls) == len(set(urls)) # we visited each artifact once across @pytest.mark.usefixtures("requests_mock_datadir") -def test_npm_loader_version_divergence(swh_config): +def test_npm_loader_version_divergence(swh_storage): package = "@aller_shared" url = package_url(package) - loader = NpmLoader(url) + loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( - loader.storage, url, status="full", type="npm", snapshot=expected_snapshot_id + swh_storage, url, status="full", type="npm", snapshot=expected_snapshot_id ) - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { # 1 new releases artifacts "content": 534, "directory": 153, "origin": 1, "origin_visit": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == stats expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target_type=TargetType.ALIAS, target=b"releases/0.1.0" ), b"releases/0.1.0": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"), ), b"releases/0.1.1-alpha.14": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("05181c12cd8c22035dd31155656826b85745da37"), ), }, ) - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) def test_npm_artifact_to_revision_id_none(): """Current loader version should stop soon if nothing can be found """ class artifact_metadata: shasum = "05181c12cd8c22035dd31155656826b85745da37" known_artifacts = { "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92": {}, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None def test_npm_artifact_to_revision_id_old_loader_version(): """Current loader version should solve old metadata scheme """ class artifact_metadata: shasum = "05181c12cd8c22035dd31155656826b85745da37" known_artifacts = { hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { "package_source": {"sha1": "something-wrong"} }, hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { "package_source": {"sha1": "05181c12cd8c22035dd31155656826b85745da37",} }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( "845673bfe8cbd31b1eaf757745a964137e6f9116" ) def test_npm_artifact_to_revision_id_current_loader_version(): """Current loader version should be able to solve current metadata scheme """ class artifact_metadata: shasum = "05181c12cd8c22035dd31155656826b85745da37" known_artifacts = { hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { "original_artifact": [ {"checksums": {"sha1": "05181c12cd8c22035dd31155656826b85745da37"},} ], }, hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { "original_artifact": [{"checksums": {"sha1": "something-wrong"},}], }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92" ) -def test_npm_artifact_with_no_intrinsic_metadata(swh_config, requests_mock_datadir): +def test_npm_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ package = "nativescript-telerik-analytics" url = package_url(package) - loader = NpmLoader(url) + loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() # no branch as one artifact without any intrinsic metadata expected_snapshot = Snapshot( id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={}, ) assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot.id.hex(), } - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) assert_last_visit_matches( - loader.storage, url, status="full", type="npm", snapshot=expected_snapshot.id + swh_storage, url, status="full", type="npm", snapshot=expected_snapshot.id ) -def test_npm_artifact_with_no_upload_time(swh_config, requests_mock_datadir): +def test_npm_artifact_with_no_upload_time(swh_storage, requests_mock_datadir): """With no time upload, artifact is skipped """ package = "jammit-no-time" url = package_url(package) - loader = NpmLoader(url) + loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() # no branch as one artifact without any intrinsic metadata expected_snapshot = Snapshot( id=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), branches={}, ) assert actual_load_status == { "status": "uneventful", "snapshot_id": expected_snapshot.id.hex(), } - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) assert_last_visit_matches( - loader.storage, url, status="partial", type="npm", snapshot=expected_snapshot.id + swh_storage, url, status="partial", type="npm", snapshot=expected_snapshot.id ) -def test_npm_artifact_use_mtime_if_no_time(swh_config, requests_mock_datadir): +def test_npm_artifact_use_mtime_if_no_time(swh_storage, requests_mock_datadir): """With no time upload, artifact is skipped """ package = "jammit-express" url = package_url(package) - loader = NpmLoader(url) + loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("d6e08e19159f77983242877c373c75222d5ae9dd") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } # artifact is used expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target_type=TargetType.ALIAS, target=b"releases/0.0.1" ), b"releases/0.0.1": SnapshotBranch( target_type=TargetType.REVISION, target=hash_to_bytes("9e4dd2b40d1b46b70917c0949aa2195c823a648e"), ), }, ) - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) assert_last_visit_matches( - loader.storage, url, status="full", type="npm", snapshot=expected_snapshot.id + swh_storage, url, status="full", type="npm", snapshot=expected_snapshot.id ) -def test_npm_no_artifact(swh_config, requests_mock_datadir): +def test_npm_no_artifact(swh_storage, requests_mock_datadir): """If no artifacts at all is found for origin, the visit fails completely """ package = "catify" url = package_url(package) - loader = NpmLoader(url) + loader = NpmLoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status == { "status": "failed", } - assert_last_visit_matches(loader.storage, url, status="failed", type="npm") + assert_last_visit_matches(swh_storage, url, status="failed", type="npm") -def test_npm_origin_not_found(swh_config, requests_mock_datadir): +def test_npm_origin_not_found(swh_storage, requests_mock_datadir): url = package_url("non-existent-url") - loader = NpmLoader(url) + loader = NpmLoader(swh_storage, url) assert loader.load() == {"status": "failed"} assert_last_visit_matches( - loader.storage, url, status="not_found", type="npm", snapshot=None + swh_storage, url, status="not_found", type="npm", snapshot=None ) diff --git a/swh/loader/package/npm/tests/test_tasks.py b/swh/loader/package/npm/tests/test_tasks.py index c0b3a5f..4cdbb36 100644 --- a/swh/loader/package/npm/tests/test_tasks.py +++ b/swh/loader/package/npm/tests/test_tasks.py @@ -1,21 +1,21 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -def test_npm_loader( +def test_tasks_npm_loader( mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config ): - mock_loader = mocker.patch("swh.loader.package.npm.loader.NpmLoader.load") - mock_loader.return_value = {"status": "eventful"} + mock_load = mocker.patch("swh.loader.package.npm.loader.NpmLoader.load") + mock_load.return_value = {"status": "eventful"} res = swh_scheduler_celery_app.send_task( "swh.loader.package.npm.tasks.LoadNpm", kwargs=dict(url="https://www.npmjs.com/package/some-package"), ) assert res res.wait() assert res.successful() - + assert mock_load.called assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/pypi/loader.py b/swh/loader/package/pypi/loader.py index 6936c36..942547f 100644 --- a/swh/loader/package/pypi/loader.py +++ b/swh/loader/package/pypi/loader.py @@ -1,284 +1,290 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import os from typing import Any, Dict, Iterator, Optional, Sequence, Tuple from urllib.parse import urlparse import attr from pkginfo import UnpackedSDist from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import EMPTY_AUTHOR, api_info, cached_method, release_name from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, Person, Revision, RevisionType, Sha1Git, TimestampWithTimezone, ) +from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) @attr.s class PyPIPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) comment_text = attr.ib(type=Optional[str]) sha256 = attr.ib(type=str) upload_time = attr.ib(type=str) @classmethod def from_metadata(cls, metadata: Dict[str, Any]) -> "PyPIPackageInfo": return cls( url=metadata["url"], filename=metadata["filename"], raw_info=metadata, comment_text=metadata.get("comment_text"), sha256=metadata["digests"]["sha256"], upload_time=metadata["upload_time"], directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( format="pypi-project-json", metadata=json.dumps(metadata).encode(), ) ], ) class PyPILoader(PackageLoader[PyPIPackageInfo]): """Load pypi origin's artifact releases into swh archive. """ visit_type = "pypi" - def __init__(self, url): - super().__init__(url=url) + def __init__( + self, + storage: StorageInterface, + url: str, + max_content_size: Optional[int] = None, + ): + super().__init__(storage=storage, url=url, max_content_size=max_content_size) self.provider_url = pypi_api_url(self.url) @cached_method def _raw_info(self) -> bytes: return api_info(self.provider_url) @cached_method def info(self) -> Dict: """Return the project metadata information (fetched from pypi registry) """ return json.loads(self._raw_info()) def get_versions(self) -> Sequence[str]: return self.info()["releases"].keys() def get_default_version(self) -> str: return self.info()["info"]["version"] def get_metadata_authority(self): p_url = urlparse(self.url) return MetadataAuthority( type=MetadataAuthorityType.FORGE, url=f"{p_url.scheme}://{p_url.netloc}/", metadata={}, ) def get_package_info(self, version: str) -> Iterator[Tuple[str, PyPIPackageInfo]]: res = [] for meta in self.info()["releases"][version]: if meta["packagetype"] != "sdist": continue p_info = PyPIPackageInfo.from_metadata(meta) res.append((version, p_info)) if len(res) == 1: version, p_info = res[0] yield release_name(version), p_info else: for version, p_info in res: yield release_name(version, p_info.filename), p_info def resolve_revision_from( self, known_artifacts: Dict, p_info: PyPIPackageInfo ) -> Optional[bytes]: return artifact_to_revision_id(known_artifacts, p_info) def build_revision( self, p_info: PyPIPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Revision]: i_metadata = extract_intrinsic_metadata(uncompressed_path) if not i_metadata: return None # from intrinsic metadata name = i_metadata["version"] _author = author(i_metadata) # from extrinsic metadata message = p_info.comment_text or "" message = "%s: %s" % (name, message) if message else name date = TimestampWithTimezone.from_iso8601(p_info.upload_time) return Revision( type=RevisionType.TAR, message=message.encode("utf-8"), author=_author, date=date, committer=_author, committer_date=date, parents=(), directory=directory, synthetic=True, metadata={ "intrinsic": {"tool": "PKG-INFO", "raw": i_metadata,}, "extrinsic": { "provider": self.provider_url, "when": self.visit_date.isoformat(), "raw": p_info.raw_info, }, }, ) def artifact_to_revision_id( known_artifacts: Dict, p_info: PyPIPackageInfo ) -> Optional[bytes]: """Given metadata artifact, solves the associated revision id. The following code allows to deal with 2 metadata formats (column metadata in 'revision') - old format sample:: { 'original_artifact': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, ... } - new format sample:: { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, }], ... } """ sha256 = p_info.sha256 for rev_id, known_artifact in known_artifacts.items(): original_artifact = known_artifact["original_artifact"] if isinstance(original_artifact, dict): # previous loader-pypi version stored metadata as dict original_sha256 = original_artifact["sha256"] if sha256 == original_sha256: return rev_id continue # new pypi loader actually store metadata dict differently... assert isinstance(original_artifact, list) # current loader-pypi stores metadata as list of dict for original_artifact in known_artifact["original_artifact"]: if sha256 == original_artifact["checksums"]["sha256"]: return rev_id return None def pypi_api_url(url: str) -> str: """Compute api url from a project url Args: url (str): PyPI instance's url (e.g: https://pypi.org/project/requests) This deals with correctly transforming the project's api url (e.g https://pypi.org/pypi/requests/json) Returns: api url """ p_url = urlparse(url) project_name = p_url.path.rstrip("/").split("/")[-1] url = "%s://%s/pypi/%s/json" % (p_url.scheme, p_url.netloc, project_name) return url def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] pkginfo_path = os.path.join(dir_path, project_dirname, "PKG-INFO") if not os.path.exists(pkginfo_path): return {} pkginfo = UnpackedSDist(pkginfo_path) raw = pkginfo.__dict__ raw.pop("filename") # this gets added with the ondisk location return raw def author(data: Dict) -> Person: """Given a dict of project/release artifact information (coming from PyPI), returns an author subset. Args: data (dict): Representing either artifact information or release information. Returns: swh-model dict representing a person. """ name = data.get("author") email = data.get("author_email") fullname = None # type: Optional[str] if email: fullname = "%s <%s>" % (name, email) else: fullname = name if not fullname: return EMPTY_AUTHOR if name is not None: name = name.encode("utf-8") if email is not None: email = email.encode("utf-8") return Person(fullname=fullname.encode("utf-8"), name=name, email=email) diff --git a/swh/loader/package/pypi/tasks.py b/swh/loader/package/pypi/tasks.py index 933f7d2..45a60c3 100644 --- a/swh/loader/package/pypi/tasks.py +++ b/swh/loader/package/pypi/tasks.py @@ -1,14 +1,14 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.pypi.loader import PyPILoader @shared_task(name=__name__ + ".LoadPyPI") def load_pypi(*, url=None): """Load PyPI package""" - return PyPILoader(url).load() + return PyPILoader.from_configfile(url=url).load() diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py index b55df1f..836f0c5 100644 --- a/swh/loader/package/pypi/tests/test_pypi.py +++ b/swh/loader/package/pypi/tests/test_pypi.py @@ -1,917 +1,899 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -import copy import json import os from os import path from unittest.mock import patch import pytest -import yaml from swh.core.pytest_plugin import requests_mock_datadir_factory from swh.core.tarball import uncompress from swh.loader.package import __version__ from swh.loader.package.pypi.loader import ( PyPILoader, artifact_to_revision_id, author, extract_intrinsic_metadata, pypi_api_url, ) from swh.loader.package.tests.common import check_metadata_paths from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.identifiers import SWHID from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MetadataTargetType, Person, RawExtrinsicMetadata, Snapshot, SnapshotBranch, TargetType, ) from swh.storage.interface import PagedResult @pytest.fixture def _0805nexter_api_info(datadir) -> bytes: with open( os.path.join(datadir, "https_pypi.org", "pypi_0805nexter_json"), "rb", ) as f: return f.read() -def test_author_basic(): +def test_pypi_author_basic(): data = { "author": "i-am-groot", "author_email": "iam@groot.org", } actual_author = author(data) expected_author = Person( fullname=b"i-am-groot ", name=b"i-am-groot", email=b"iam@groot.org", ) assert actual_author == expected_author -def test_author_empty_email(): +def test_pypi_author_empty_email(): data = { "author": "i-am-groot", "author_email": "", } actual_author = author(data) expected_author = Person(fullname=b"i-am-groot", name=b"i-am-groot", email=b"",) assert actual_author == expected_author -def test_author_empty_name(): +def test_pypi_author_empty_name(): data = { "author": "", "author_email": "iam@groot.org", } actual_author = author(data) expected_author = Person( fullname=b" ", name=b"", email=b"iam@groot.org", ) assert actual_author == expected_author -def test_author_malformed(): +def test_pypi_author_malformed(): data = { "author": "['pierre', 'paul', 'jacques']", "author_email": None, } actual_author = author(data) expected_author = Person( fullname=b"['pierre', 'paul', 'jacques']", name=b"['pierre', 'paul', 'jacques']", email=None, ) assert actual_author == expected_author -def test_author_malformed_2(): +def test_pypi_author_malformed_2(): data = { "author": "[marie, jeanne]", "author_email": "[marie@some, jeanne@thing]", } actual_author = author(data) expected_author = Person( fullname=b"[marie, jeanne] <[marie@some, jeanne@thing]>", name=b"[marie, jeanne]", email=b"[marie@some, jeanne@thing]", ) assert actual_author == expected_author -def test_author_malformed_3(): +def test_pypi_author_malformed_3(): data = { "author": "[marie, jeanne, pierre]", "author_email": "[marie@somewhere.org, jeanne@somewhere.org]", } actual_author = author(data) expected_author = Person( fullname=( b"[marie, jeanne, pierre] " b"<[marie@somewhere.org, jeanne@somewhere.org]>" ), name=b"[marie, jeanne, pierre]", email=b"[marie@somewhere.org, jeanne@somewhere.org]", ) actual_author == expected_author # configuration error # -def test_badly_configured_loader_raise(tmp_path, swh_loader_config, monkeypatch): - """Badly configured loader should raise""" - wrong_config = copy.deepcopy(swh_loader_config) - wrong_config.pop("storage") - - conf_path = os.path.join(str(tmp_path), "loader.yml") - with open(conf_path, "w") as f: - f.write(yaml.dump(wrong_config)) - monkeypatch.setenv("SWH_CONFIG_FILENAME", conf_path) - - with pytest.raises(ValueError, match="Misconfiguration"): - PyPILoader(url="some-url") - - def test_pypi_api_url(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url("https://pypi.org/project/requests") assert url == "https://pypi.org/pypi/requests/json" def test_pypi_api_url_with_slash(): """Compute pypi api url from the pypi project url should be ok""" url = pypi_api_url("https://pypi.org/project/requests/") assert url == "https://pypi.org/pypi/requests/json" @pytest.mark.fs -def test_extract_intrinsic_metadata(tmp_path, datadir): +def test_pypi_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) archive_path = path.join( datadir, "https_files.pythonhosted.org", "0805nexter-1.1.0.zip" ) uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { "metadata_version": "1.0", "name": "0805nexter", "version": "1.1.0", "summary": "a simple printer of nested lest", "home_page": "http://www.hp.com", "author": "hgtkpython", "author_email": "2868989685@qq.com", "platforms": ["UNKNOWN"], } assert actual_metadata == expected_metadata @pytest.mark.fs -def test_extract_intrinsic_metadata_failures(tmp_path): +def test_pypi_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" tmp_path = str(tmp_path) # py3.5 work around (PosixPath issue) # inexistent first level path assert extract_intrinsic_metadata("/something-inexistent") == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path existing_path_no_pkginfo = path.join(tmp_path, "something") os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} # LOADER SCENARIO # # "edge" cases (for the same origin) # # no release artifact: # {visit full, status: uneventful, no contents, etc...} requests_mock_datadir_missing_all = requests_mock_datadir_factory( ignore_urls=[ "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa "https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip", # noqa ] ) -def test_no_release_artifact(swh_config, requests_mock_datadir_missing_all): +def test_pypi_no_release_artifact(swh_storage, requests_mock_datadir_missing_all): """Load a pypi project with all artifacts missing ends up with no snapshot """ url = "https://pypi.org/project/0805nexter" - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status["status"] == "uneventful" assert actual_load_status["snapshot_id"] is not None - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats - assert_last_visit_matches(loader.storage, url, status="partial", type="pypi") + assert_last_visit_matches(swh_storage, url, status="partial", type="pypi") -def test_pypi_fail__load_snapshot(swh_config, requests_mock_datadir): +def test_pypi_fail__load_snapshot(swh_storage, requests_mock_datadir): """problem during loading: {visit: failed, status: failed, no snapshot} """ url = "https://pypi.org/project/0805nexter" with patch( "swh.loader.package.pypi.loader.PyPILoader._load_snapshot", side_effect=ValueError("Fake problem to fail visit"), ): - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} stats = get_stats(loader.storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 0, } == stats - assert_last_visit_matches(loader.storage, url, status="failed", type="pypi") + assert_last_visit_matches(swh_storage, url, status="failed", type="pypi") # problem during loading: # {visit: partial, status: uneventful, no snapshot} -def test_release_with_traceback(swh_config, requests_mock_datadir): +def test_pypi_release_with_traceback(swh_storage, requests_mock_datadir): url = "https://pypi.org/project/0805nexter" with patch( "swh.loader.package.pypi.loader.PyPILoader.last_snapshot", side_effect=ValueError("Fake problem to fail the visit"), ): - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 0, } == stats - assert_last_visit_matches(loader.storage, url, status="failed", type="pypi") + assert_last_visit_matches(swh_storage, url, status="failed", type="pypi") # problem during loading: failure early enough in between swh contents... # some contents (contents, directories, etc...) have been written in storage # {visit: partial, status: eventful, no snapshot} # problem during loading: failure late enough we can have snapshots (some # revisions are written in storage already) # {visit: partial, status: eventful, snapshot} # "normal" cases (for the same origin) # requests_mock_datadir_missing_one = requests_mock_datadir_factory( ignore_urls=[ "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa ] ) # some missing release artifacts: # {visit partial, status: eventful, 1 snapshot} -def test_revision_metadata_structure( - swh_config, requests_mock_datadir, _0805nexter_api_info +def test_pypi_revision_metadata_structure( + swh_storage, requests_mock_datadir, _0805nexter_api_info ): url = "https://pypi.org/project/0805nexter" - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None expected_revision_id = hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21") - revision = loader.storage.revision_get([expected_revision_id])[0] + revision = swh_storage.revision_get([expected_revision_id])[0] assert revision is not None check_metadata_paths( revision.metadata, paths=[ ("intrinsic.tool", str), ("intrinsic.raw", dict), ("extrinsic.provider", str), ("extrinsic.when", str), ("extrinsic.raw", dict), ("original_artifact", list), ], ) for original_artifact in revision.metadata["original_artifact"]: check_metadata_paths( original_artifact, paths=[("filename", str), ("length", int), ("checksums", dict),], ) revision_swhid = SWHID( object_type="revision", object_id=hash_to_hex(expected_revision_id) ) directory_swhid = SWHID( object_type="directory", object_id=hash_to_hex(revision.directory) ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://pypi.org/", ) expected_metadata = [ RawExtrinsicMetadata( type=MetadataTargetType.DIRECTORY, target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.pypi.loader.PyPILoader", version=__version__, ), discovery_date=loader.visit_date, format="pypi-project-json", metadata=json.dumps( json.loads(_0805nexter_api_info)["releases"]["1.2.0"][0] ).encode(), origin=url, revision=revision_swhid, ) ] - assert loader.storage.raw_extrinsic_metadata_get( + assert swh_storage.raw_extrinsic_metadata_get( MetadataTargetType.DIRECTORY, directory_swhid, metadata_authority, ) == PagedResult(next_page_token=None, results=expected_metadata,) -def test_visit_with_missing_artifact(swh_config, requests_mock_datadir_missing_one): +def test_pypi_visit_with_missing_artifact( + swh_storage, requests_mock_datadir_missing_one +): """Load a pypi project with some missing artifacts ends up with 1 snapshot """ url = "https://pypi.org/project/0805nexter" - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("dd0e4201a232b1c104433741dbf45895b8ac9355") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 3, "directory": 2, "origin": 1, "origin_visit": 1, "release": 0, "revision": 1, "skipped_content": 0, "snapshot": 1, } == stats expected_contents = map( hash_to_bytes, [ "405859113963cb7a797642b45f171d6360425d16", "e5686aa568fdb1d19d7f1329267082fe40482d31", "83ecf6ec1114fd260ca7a833a2d165e71258c338", ], ) - assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] + assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] expected_dirs = map( hash_to_bytes, [ "b178b66bd22383d5f16f4f5c923d39ca798861b4", "c3a58f8b57433a4b56caaa5033ae2e0931405338", ], ) - assert list(loader.storage.directory_missing(expected_dirs)) == [] + assert list(swh_storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( "b178b66bd22383d5f16f4f5c923d39ca798861b4" ), # noqa } - assert list(loader.storage.revision_missing(expected_revs)) == [] + assert list(swh_storage.revision_missing(expected_revs)) == [] expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) - check_snapshot(expected_snapshot, storage=loader.storage) + check_snapshot(expected_snapshot, storage=swh_storage) assert_last_visit_matches( - loader.storage, - url, - status="partial", - type="pypi", - snapshot=expected_snapshot_id, + swh_storage, url, status="partial", type="pypi", snapshot=expected_snapshot_id, ) -def test_visit_with_1_release_artifact(swh_config, requests_mock_datadir): +def test_pypi_visit_with_1_release_artifact(swh_storage, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = "https://pypi.org/project/0805nexter" - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == stats expected_contents = map( hash_to_bytes, [ "a61e24cdfdab3bb7817f6be85d37a3e666b34566", "938c33483285fd8ad57f15497f538320df82aeb8", "a27576d60e08c94a05006d2e6d540c0fdb5f38c8", "405859113963cb7a797642b45f171d6360425d16", "e5686aa568fdb1d19d7f1329267082fe40482d31", "83ecf6ec1114fd260ca7a833a2d165e71258c338", ], ) - assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] + assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] expected_dirs = map( hash_to_bytes, [ "05219ba38bc542d4345d5638af1ed56c7d43ca7d", "cf019eb456cf6f78d8c4674596f1c9a97ece8f44", "b178b66bd22383d5f16f4f5c923d39ca798861b4", "c3a58f8b57433a4b56caaa5033ae2e0931405338", ], ) - assert list(loader.storage.directory_missing(expected_dirs)) == [] + assert list(swh_storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes( "05219ba38bc542d4345d5638af1ed56c7d43ca7d" ), # noqa hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( "b178b66bd22383d5f16f4f5c923d39ca798861b4" ), # noqa } - assert list(loader.storage.revision_missing(expected_revs)) == [] + assert list(swh_storage.revision_missing(expected_revs)) == [] expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"releases/1.1.0": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) assert_last_visit_matches( - loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot_id + swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id ) -def test_multiple_visits_with_no_change(swh_config, requests_mock_datadir): +def test_pypi_multiple_visits_with_no_change(swh_storage, requests_mock_datadir): """Multiple visits with no changes results in 1 same snapshot """ url = "https://pypi.org/project/0805nexter" - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") assert actual_load_status == { "status": "eventful", "snapshot_id": snapshot_id.hex(), } assert_last_visit_matches( - loader.storage, url, status="full", type="pypi", snapshot=snapshot_id + swh_storage, url, status="full", type="pypi", snapshot=snapshot_id ) - stats = get_stats(loader.storage) + stats = get_stats(swh_storage) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == stats expected_snapshot = Snapshot( id=snapshot_id, branches={ b"releases/1.1.0": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.2.0", target_type=TargetType.ALIAS, ), }, ) - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) actual_load_status2 = loader.load() assert actual_load_status2 == { "status": "uneventful", "snapshot_id": actual_load_status2["snapshot_id"], } visit_status2 = assert_last_visit_matches( - loader.storage, url, status="full", type="pypi" + swh_storage, url, status="full", type="pypi" ) - stats2 = get_stats(loader.storage) + stats2 = get_stats(swh_storage) expected_stats2 = stats.copy() expected_stats2["origin_visit"] = 1 + 1 assert expected_stats2 == stats2 # same snapshot assert visit_status2.snapshot == snapshot_id -def test_incremental_visit(swh_config, requests_mock_datadir_visits): +def test_pypi_incremental_visit(swh_storage, requests_mock_datadir_visits): """With prior visit, 2nd load will result with a different snapshot """ url = "https://pypi.org/project/0805nexter" - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) visit1_actual_load_status = loader.load() - visit1_stats = get_stats(loader.storage) + visit1_stats = get_stats(swh_storage) expected_snapshot_id = hash_to_bytes("ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a") assert visit1_actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( - loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot_id + swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id ) assert { "content": 6, "directory": 4, "origin": 1, "origin_visit": 1, "release": 0, "revision": 2, "skipped_content": 0, "snapshot": 1, } == visit1_stats # Reset internal state del loader._cached__raw_info del loader._cached_info visit2_actual_load_status = loader.load() - visit2_stats = get_stats(loader.storage) + visit2_stats = get_stats(swh_storage) assert visit2_actual_load_status["status"] == "eventful", visit2_actual_load_status expected_snapshot_id2 = hash_to_bytes("2e5149a7b0725d18231a37b342e9b7c4e121f283") assert visit2_actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id2.hex(), } assert_last_visit_matches( - loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot_id2 + swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot_id2 ) assert { "content": 6 + 1, # 1 more content "directory": 4 + 2, # 2 more directories "origin": 1, "origin_visit": 1 + 1, "release": 0, "revision": 2 + 1, # 1 more revision "skipped_content": 0, "snapshot": 1 + 1, # 1 more snapshot } == visit2_stats expected_contents = map( hash_to_bytes, [ "a61e24cdfdab3bb7817f6be85d37a3e666b34566", "938c33483285fd8ad57f15497f538320df82aeb8", "a27576d60e08c94a05006d2e6d540c0fdb5f38c8", "405859113963cb7a797642b45f171d6360425d16", "e5686aa568fdb1d19d7f1329267082fe40482d31", "83ecf6ec1114fd260ca7a833a2d165e71258c338", "92689fa2b7fb4d4fc6fb195bf73a50c87c030639", ], ) - assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] + assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] expected_dirs = map( hash_to_bytes, [ "05219ba38bc542d4345d5638af1ed56c7d43ca7d", "cf019eb456cf6f78d8c4674596f1c9a97ece8f44", "b178b66bd22383d5f16f4f5c923d39ca798861b4", "c3a58f8b57433a4b56caaa5033ae2e0931405338", "e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a", "52604d46843b898f5a43208045d09fcf8731631b", ], ) - assert list(loader.storage.directory_missing(expected_dirs)) == [] + assert list(swh_storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes( "05219ba38bc542d4345d5638af1ed56c7d43ca7d" ), # noqa hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( "b178b66bd22383d5f16f4f5c923d39ca798861b4" ), # noqa hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"): hash_to_bytes( "e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a" ), # noqa } - assert list(loader.storage.revision_missing(expected_revs)) == [] + assert list(swh_storage.revision_missing(expected_revs)) == [] expected_snapshot = Snapshot( id=expected_snapshot_id2, branches={ b"releases/1.1.0": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.2.0": SnapshotBranch( target=hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"), target_type=TargetType.REVISION, ), b"releases/1.3.0": SnapshotBranch( target=hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"), target_type=TargetType.REVISION, ), b"HEAD": SnapshotBranch( target=b"releases/1.3.0", target_type=TargetType.ALIAS, ), }, ) - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) assert_last_visit_matches( - loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot.id + swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id ) urls = [ m.url for m in requests_mock_datadir_visits.request_history if m.url.startswith("https://files.pythonhosted.org") ] # visited each artifact once across 2 visits assert len(urls) == len(set(urls)) # release artifact, no new artifact # {visit full, status uneventful, same snapshot as before} # release artifact, old artifact with different checksums # {visit full, status full, new snapshot with shared history and some new # different history} # release with multiple sdist artifacts per pypi "version" # snapshot branch output is different -def test_visit_1_release_with_2_artifacts(swh_config, requests_mock_datadir): +def test_pypi_visit_1_release_with_2_artifacts(swh_storage, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ url = "https://pypi.org/project/nexter" - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"releases/1.1.0/nexter-1.1.0.zip": SnapshotBranch( target=hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"), target_type=TargetType.REVISION, ), b"releases/1.1.0/nexter-1.1.0.tar.gz": SnapshotBranch( target=hash_to_bytes("0bf88f5760cca7665d0af4d6575d9301134fe11a"), target_type=TargetType.REVISION, ), }, ) - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) assert_last_visit_matches( - loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot.id + swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id ) def test_pypi_artifact_to_revision_id_none(): """Current loader version should stop soon if nothing can be found """ class artifact_metadata: sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec" assert artifact_to_revision_id({}, artifact_metadata) is None known_artifacts = { "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92": { "original_artifact": {"sha256": "something-irrelevant",}, }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None def test_pypi_artifact_to_revision_id_old_loader_version(): """Current loader version should solve old metadata scheme """ class artifact_metadata: sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec" known_artifacts = { hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { "original_artifact": {"sha256": "something-wrong",}, }, hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { "original_artifact": { "sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa }, }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( "845673bfe8cbd31b1eaf757745a964137e6f9116" ) def test_pypi_artifact_to_revision_id_current_loader_version(): """Current loader version should be able to solve current metadata scheme """ class artifact_metadata: sha256 = "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec" known_artifacts = { hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { "original_artifact": [ { "checksums": { "sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa }, } ], }, hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { "original_artifact": [{"checksums": {"sha256": "something-wrong"},}], }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92" ) -def test_pypi_artifact_with_no_intrinsic_metadata(swh_config, requests_mock_datadir): +def test_pypi_artifact_with_no_intrinsic_metadata(swh_storage, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ url = "https://pypi.org/project/upymenu" - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } # no branch as one artifact without any intrinsic metadata expected_snapshot = Snapshot(id=expected_snapshot_id, branches={}) - check_snapshot(expected_snapshot, loader.storage) + check_snapshot(expected_snapshot, swh_storage) assert_last_visit_matches( - loader.storage, url, status="full", type="pypi", snapshot=expected_snapshot.id + swh_storage, url, status="full", type="pypi", snapshot=expected_snapshot.id ) -def test_pypi_origin_not_found(swh_config, requests_mock_datadir): +def test_pypi_origin_not_found(swh_storage, requests_mock_datadir): url = "https://pypi.org/project/unknown" - loader = PyPILoader(url) + loader = PyPILoader(swh_storage, url) assert loader.load() == {"status": "failed"} assert_last_visit_matches( - loader.storage, url, status="not_found", type="pypi", snapshot=None + swh_storage, url, status="not_found", type="pypi", snapshot=None ) diff --git a/swh/loader/package/pypi/tests/test_tasks.py b/swh/loader/package/pypi/tests/test_tasks.py index 19886b5..35387b0 100644 --- a/swh/loader/package/pypi/tests/test_tasks.py +++ b/swh/loader/package/pypi/tests/test_tasks.py @@ -1,20 +1,20 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -def test_pypi_loader( +def test_tasks_pypi_loader( mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config ): - mock_loader = mocker.patch("swh.loader.package.pypi.loader.PyPILoader.load") - mock_loader.return_value = {"status": "eventful"} + mock_load = mocker.patch("swh.loader.package.pypi.loader.PyPILoader.load") + mock_load.return_value = {"status": "eventful"} res = swh_scheduler_celery_app.send_task( "swh.loader.package.pypi.tasks.LoadPyPI", kwargs=dict(url="some-url") ) assert res res.wait() assert res.successful() - + assert mock_load.called assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/tests/test_loader.py b/swh/loader/package/tests/test_loader.py index e8aedfa..ac9ee79 100644 --- a/swh/loader/package/tests/test_loader.py +++ b/swh/loader/package/tests/test_loader.py @@ -1,87 +1,87 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import attr import pytest from swh.loader.package.loader import BasePackageInfo, PackageLoader class FakeStorage: def origin_add(self, origins): raise ValueError("We refuse to add an origin") def origin_visit_get_latest(self, origin): return None class FakeStorage2(FakeStorage): def origin_add(self, origins): pass def origin_visit_add(self, visits): raise ValueError("We refuse to add an origin visit") -def test_loader_origin_visit_failure(swh_config): +def test_loader_origin_visit_failure(swh_storage): """Failure to add origin or origin visit should failed immediately """ - loader = PackageLoader("some-url") + loader = PackageLoader(swh_storage, "some-url") loader.storage = FakeStorage() actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} loader.storage = FakeStorage2() actual_load_status2 = loader.load() assert actual_load_status2 == {"status": "failed"} def test_artifact_identity(): """Compute primary key should return the right identity """ @attr.s class TestPackageInfo(BasePackageInfo): a = attr.ib() b = attr.ib() length = attr.ib() filename = attr.ib() version = attr.ib() ID_KEYS = ["a", "b"] p_info = TestPackageInfo( url="http://example.org/", a=1, b=2, length=221837, filename="8sync-0.1.0.tar.gz", version="0.1.0", ) actual_id = p_info.artifact_identity() assert actual_id == [1, 2] def test_no_env_swh_config_filename_raise(monkeypatch): """No SWH_CONFIG_FILENAME environment variable makes package loader init raise """ class DummyPackageLoader(PackageLoader): """A dummy package loader for test purpose""" pass monkeypatch.delenv("SWH_CONFIG_FILENAME", raising=False) with pytest.raises( AssertionError, match="SWH_CONFIG_FILENAME environment variable is undefined" ): - DummyPackageLoader(url="some-url") + DummyPackageLoader.from_configfile(url="some-url") diff --git a/swh/loader/package/tests/test_loader_metadata.py b/swh/loader/package/tests/test_loader_metadata.py index 3ddc849..411c40e 100644 --- a/swh/loader/package/tests/test_loader_metadata.py +++ b/swh/loader/package/tests/test_loader_metadata.py @@ -1,247 +1,228 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from typing import Iterator, List, Sequence, Tuple import attr from swh.loader.package import __version__ from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.model.hashutil import hash_to_bytes from swh.model.identifiers import SWHID from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MetadataTargetType, Person, RawExtrinsicMetadata, Revision, RevisionType, Sha1Git, ) -from swh.storage import get_storage EMPTY_SNAPSHOT_ID = "1a8893e6a86f444e8be8e7bda6cb34fb1735a00e" FULL_SNAPSHOT_ID = "4a9b608c9f01860a627237dd2409d1d50ec4b054" AUTHORITY = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="http://example.org/", ) ORIGIN_URL = "http://example.org/archive.tgz" REVISION_ID = hash_to_bytes("8ff44f081d43176474b267de5451f2c2e88089d0") REVISION_SWHID = SWHID(object_type="revision", object_id=REVISION_ID) DIRECTORY_ID = hash_to_bytes("aa" * 20) DIRECTORY_SWHID = SWHID(object_type="directory", object_id=DIRECTORY_ID) FETCHER = MetadataFetcher( name="swh.loader.package.tests.test_loader_metadata.MetadataTestLoader", version=__version__, ) DISCOVERY_DATE = datetime.datetime.now(tz=datetime.timezone.utc) DIRECTORY_METADATA = [ RawExtrinsicMetadata( type=MetadataTargetType.DIRECTORY, target=DIRECTORY_SWHID, discovery_date=DISCOVERY_DATE, authority=AUTHORITY, fetcher=FETCHER, format="test-format1", metadata=b"foo bar", origin=ORIGIN_URL, revision=REVISION_SWHID, ), RawExtrinsicMetadata( type=MetadataTargetType.DIRECTORY, target=DIRECTORY_SWHID, discovery_date=DISCOVERY_DATE + datetime.timedelta(seconds=1), authority=AUTHORITY, fetcher=FETCHER, format="test-format2", metadata=b"bar baz", origin=ORIGIN_URL, revision=REVISION_SWHID, ), ] ORIGIN_METADATA = [ RawExtrinsicMetadata( type=MetadataTargetType.ORIGIN, target=ORIGIN_URL, discovery_date=datetime.datetime.now(tz=datetime.timezone.utc), authority=AUTHORITY, fetcher=FETCHER, format="test-format3", metadata=b"baz qux", ), ] class MetadataTestLoader(PackageLoader[BasePackageInfo]): def get_versions(self) -> Sequence[str]: return ["v1.0.0"] def _load_directory(self, dl_artifacts, tmpdir): class directory: hash = DIRECTORY_ID return (None, directory) # just enough for _load_revision to work def download_package(self, p_info: BasePackageInfo, tmpdir: str): return [("path", {"artifact_key": "value", "length": 0})] def build_revision( self, p_info: BasePackageInfo, uncompressed_path: str, directory: Sha1Git ): return Revision( id=REVISION_ID, message=b"", author=Person.from_fullname(b""), committer=Person.from_fullname(b""), date=None, committer_date=None, type=RevisionType.TAR, directory=DIRECTORY_ID, synthetic=False, ) def get_metadata_authority(self): return attr.evolve(AUTHORITY, metadata={}) def get_package_info(self, version: str) -> Iterator[Tuple[str, BasePackageInfo]]: m0 = DIRECTORY_METADATA[0] m1 = DIRECTORY_METADATA[1] p_info = BasePackageInfo( url=ORIGIN_URL, filename="archive.tgz", directory_extrinsic_metadata=[ RawExtrinsicMetadataCore(m0.format, m0.metadata, m0.discovery_date), RawExtrinsicMetadataCore(m1.format, m1.metadata, m1.discovery_date), ], ) yield (version, p_info) def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: m = ORIGIN_METADATA[0] return [RawExtrinsicMetadataCore(m.format, m.metadata, m.discovery_date)] -def test_load_artifact_metadata(swh_config, caplog): - storage = get_storage("memory") - - loader = MetadataTestLoader(ORIGIN_URL) - loader.storage = storage +def test_load_artifact_metadata(swh_storage, caplog): + loader = MetadataTestLoader(swh_storage, ORIGIN_URL) load_status = loader.load() assert load_status == { "status": "eventful", "snapshot_id": FULL_SNAPSHOT_ID, } authority = MetadataAuthority( type=MetadataAuthorityType.REGISTRY, url="https://softwareheritage.org/", ) - result = storage.raw_extrinsic_metadata_get( + result = swh_storage.raw_extrinsic_metadata_get( MetadataTargetType.DIRECTORY, DIRECTORY_SWHID, authority, ) assert result.next_page_token is None assert len(result.results) == 1 assert result.results[0] == RawExtrinsicMetadata( type=MetadataTargetType.DIRECTORY, target=DIRECTORY_SWHID, discovery_date=result.results[0].discovery_date, authority=authority, fetcher=FETCHER, format="original-artifacts-json", metadata=b'[{"artifact_key": "value", "length": 0}]', origin=ORIGIN_URL, revision=REVISION_SWHID, ) -def test_load_metadata(swh_config, caplog): - storage = get_storage("memory") - - loader = MetadataTestLoader(ORIGIN_URL) - loader.storage = storage +def test_load_metadata(swh_storage, caplog): + loader = MetadataTestLoader(swh_storage, ORIGIN_URL) load_status = loader.load() assert load_status == { "status": "eventful", "snapshot_id": FULL_SNAPSHOT_ID, } - result = storage.raw_extrinsic_metadata_get( + result = swh_storage.raw_extrinsic_metadata_get( MetadataTargetType.DIRECTORY, DIRECTORY_SWHID, AUTHORITY, ) assert result.next_page_token is None assert result.results == DIRECTORY_METADATA - result = storage.raw_extrinsic_metadata_get( + result = swh_storage.raw_extrinsic_metadata_get( MetadataTargetType.ORIGIN, ORIGIN_URL, AUTHORITY, ) assert result.next_page_token is None assert result.results == ORIGIN_METADATA assert caplog.text == "" -def test_existing_authority(swh_config, caplog): - storage = get_storage("memory") - - loader = MetadataTestLoader(ORIGIN_URL) - loader.storage = storage - loader.config["create_authorities"] = False - - storage.metadata_authority_add([attr.evolve(AUTHORITY, metadata={})]) +def test_existing_authority(swh_storage, caplog): + loader = MetadataTestLoader(swh_storage, ORIGIN_URL) load_status = loader.load() assert load_status == { "status": "eventful", "snapshot_id": FULL_SNAPSHOT_ID, } - result = storage.raw_extrinsic_metadata_get( + result = swh_storage.raw_extrinsic_metadata_get( MetadataTargetType.DIRECTORY, DIRECTORY_SWHID, AUTHORITY, ) assert result.next_page_token is None assert result.results == DIRECTORY_METADATA assert caplog.text == "" -def test_existing_fetcher(swh_config, caplog): - storage = get_storage("memory") - - loader = MetadataTestLoader(ORIGIN_URL) - loader.storage = storage - loader.config["create_fetchers"] = False - - storage.metadata_fetcher_add([attr.evolve(FETCHER, metadata={})]) +def test_existing_fetcher(swh_storage, caplog): + loader = MetadataTestLoader(swh_storage, ORIGIN_URL) load_status = loader.load() assert load_status == { "status": "eventful", "snapshot_id": FULL_SNAPSHOT_ID, } - result = storage.raw_extrinsic_metadata_get( + result = swh_storage.raw_extrinsic_metadata_get( MetadataTargetType.DIRECTORY, DIRECTORY_SWHID, AUTHORITY, ) assert result.next_page_token is None assert result.results == DIRECTORY_METADATA assert caplog.text == "" diff --git a/swh/loader/pytest_plugin.py b/swh/loader/pytest_plugin.py index 5626038..500740c 100644 --- a/swh/loader/pytest_plugin.py +++ b/swh/loader/pytest_plugin.py @@ -1,48 +1,54 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from typing import Any, Dict import pytest import yaml @pytest.fixture -def swh_loader_config(swh_storage_postgresql) -> Dict[str, Any]: +def swh_storage_backend_config(swh_storage_postgresql) -> Dict[str, Any]: return { + "cls": "retry", "storage": { - "cls": "pipeline", - "steps": [ - {"cls": "retry"}, - {"cls": "filter"}, - {"cls": "buffer"}, - { + "cls": "filter", + "storage": { + "cls": "buffer", + "storage": { "cls": "local", "db": swh_storage_postgresql.dsn, "objstorage": {"cls": "memory"}, }, - ], + }, }, } @pytest.fixture -def swh_config(swh_loader_config, monkeypatch, tmp_path): +def swh_loader_config(swh_storage_backend_config) -> Dict[str, Any]: + return { + "storage": swh_storage_backend_config, + } + + +@pytest.fixture +def swh_config(swh_loader_config, monkeypatch, tmp_path) -> str: conffile = os.path.join(str(tmp_path), "loader.yml") with open(conffile, "w") as f: f.write(yaml.dump(swh_loader_config)) - monkeypatch.setenv("SWH_CONFIG_FILENAME", conffile) + monkeypatch.setenv("SWH_CONFIG_FILENAME", conffile) return conffile @pytest.fixture(autouse=True, scope="session") def swh_proxy(): """Automatically inject this fixture in all tests to ensure no outside connection takes place. """ os.environ["http_proxy"] = "http://localhost:999" os.environ["https_proxy"] = "http://localhost:999" diff --git a/swh/loader/tests/conftest.py b/swh/loader/tests/conftest.py index be4f594..6639b5d 100644 --- a/swh/loader/tests/conftest.py +++ b/swh/loader/tests/conftest.py @@ -1,19 +1,19 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Dict import pytest @pytest.fixture def swh_loader_config() -> Dict[str, Any]: return { - "storage": {"cls": "pipeline", "steps": [{"cls": "memory",},],}, + "storage": {"cls": "memory",}, "deposit": { "url": "https://deposit.softwareheritage.org/1/private", "auth": {"username": "user", "password": "pass",}, }, } diff --git a/swh/loader/tests/test_cli.py b/swh/loader/tests/test_cli.py index b24da2a..b584deb 100644 --- a/swh/loader/tests/test_cli.py +++ b/swh/loader/tests/test_cli.py @@ -1,131 +1,148 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime +import os from click.formatting import HelpFormatter from click.testing import CliRunner import pytest +import yaml -from swh.loader.cli import SUPPORTED_LOADERS, get_loader, list, run +from swh.loader.cli import SUPPORTED_LOADERS, get_loader +from swh.loader.cli import loader as loader_cli from swh.loader.package.loader import PackageLoader def test_get_loader_wrong_input(swh_config): """Unsupported loader should raise """ loader_type = "unknown" assert loader_type not in SUPPORTED_LOADERS with pytest.raises(ValueError, match="Invalid loader"): get_loader(loader_type, url="db-url") -def test_get_loader(swh_config): +def test_get_loader(swh_loader_config): """Instantiating a supported loader should be ok """ loader_input = { - "archive": {"url": "some-url", "artifacts": [],}, + "archive": {"url": "some-url", "artifacts": []}, "debian": {"url": "some-url", "date": "something", "packages": [],}, - "deposit": {"url": "some-url", "deposit_id": 1,}, "npm": {"url": "https://www.npmjs.com/package/onepackage",}, "pypi": {"url": "some-url",}, } for loader_type, kwargs in loader_input.items(): + kwargs["storage"] = swh_loader_config["storage"] loader = get_loader(loader_type, **kwargs) assert isinstance(loader, PackageLoader) def _write_usage(command, args, max_width=80): hf = HelpFormatter(width=max_width) hf.write_usage(command, args) return hf.getvalue()[:-1] def test_run_help(swh_config): """Help message should be ok """ runner = CliRunner() - result = runner.invoke(run, ["-h"]) + + result = runner.invoke(loader_cli, ["run", "-h"]) assert result.exit_code == 0 usage_prefix = _write_usage( - "run", f"[OPTIONS] [{'|'.join(SUPPORTED_LOADERS)}] URL [OPTIONS]..." + "loader", f"run [OPTIONS] [{'|'.join(SUPPORTED_LOADERS)}]\n" ) - expected_help_msg = f"""{usage_prefix} + assert result.output.startswith(usage_prefix) - Ingest with loader the origin located at -Options: - -h, --help Show this message and exit. -""" - assert result.output.startswith(expected_help_msg) +def test_run_with_configuration_failure(tmp_path): + """Triggering a load should fail since configuration is incomplete + + """ + runner = CliRunner() + + conf_path = os.path.join(str(tmp_path), "cli.yml") + with open(conf_path, "w") as f: + f.write(yaml.dump({})) + + with pytest.raises(ValueError, match="Missing storage"): + runner.invoke( + loader_cli, ["-C", conf_path, "run", "pypi", "url=https://some-url",], + catch_exceptions=False + ) def test_run_pypi(mocker, swh_config): """Triggering a load should be ok """ - mock_loader = mocker.patch("swh.loader.package.pypi.loader.PyPILoader") + mock_loader = mocker.patch("swh.loader.package.pypi.loader.PyPILoader.load") runner = CliRunner() - result = runner.invoke(run, ["pypi", "https://some-url"]) + result = runner.invoke( + loader_cli, ["-C", swh_config, "run", "pypi", "url=https://some-url",] + ) assert result.exit_code == 0 - mock_loader.assert_called_once_with(url="https://some-url") # constructor + mock_loader.assert_called_once_with() def test_run_with_visit_date(mocker, swh_config): """iso visit_date parameter should be parsed as datetime """ mock_loader = mocker.patch("swh.loader.cli.get_loader") runner = CliRunner() input_date = "2016-05-03 15:16:32+00" result = runner.invoke( - run, ["npm", "https://some-url", f"visit_date='{input_date}'"] + loader_cli, ["run", "npm", "https://some-url", f"visit_date='{input_date}'"] ) assert result.exit_code == 0 expected_parsed_date = datetime.datetime( 2016, 5, 3, 15, 16, 32, tzinfo=datetime.timezone.utc ) mock_loader.assert_called_once_with( - "npm", url="https://some-url", visit_date=expected_parsed_date + "npm", + storage={"cls": "memory"}, + url="https://some-url", + visit_date=expected_parsed_date, ) def test_list_help(mocker, swh_config): """Triggering a load should be ok """ runner = CliRunner() - result = runner.invoke(list, ["--help"]) + result = runner.invoke(loader_cli, ["list", "--help"]) assert result.exit_code == 0 - usage_prefix = _write_usage( - "list", f"[OPTIONS] [[{'|'.join(['all'] + SUPPORTED_LOADERS)}]]" - ) + usage_prefix = _write_usage("loader", "list [OPTIONS]\n") expected_help_msg = f"""{usage_prefix} + [[{'|'.join(['all'] + SUPPORTED_LOADERS)}]] List supported loaders and optionally their arguments Options: -h, --help Show this message and exit. """ assert result.output.startswith(expected_help_msg) def test_list_help_npm(mocker, swh_config): """Triggering a load should be ok """ runner = CliRunner() - result = runner.invoke(list, ["npm"]) + result = runner.invoke(loader_cli, ["list", "npm"]) assert result.exit_code == 0 expected_help_msg = """ Loader: Load npm origin's artifact releases into swh archive. -signature: (url: str) """ assert result.output.startswith(expected_help_msg[1:]) diff --git a/swh/loader/tests/test_init.py b/swh/loader/tests/test_init.py index 0f83780..6c93bd1 100644 --- a/swh/loader/tests/test_init.py +++ b/swh/loader/tests/test_init.py @@ -1,507 +1,516 @@ -# Copyright (C) 2020 The Software Heritage developers +# Copyright (C) 2020-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import os import subprocess import attr import pytest from swh.loader.tests import ( InconsistentAliasBranchError, InexistentObjectsError, assert_last_visit_matches, check_snapshot, encode_target, prepare_repository_from_archive, ) from swh.model.from_disk import DentryPerms from swh.model.hashutil import hash_to_bytes from swh.model.model import ( Content, Directory, DirectoryEntry, ObjectType, OriginVisit, OriginVisitStatus, Person, Release, Revision, RevisionType, Snapshot, SnapshotBranch, TargetType, Timestamp, TimestampWithTimezone, ) hash_hex = "43e45d56f88993aae6a0198013efa80716fd8920" ORIGIN_VISIT = OriginVisit( origin="some-url", visit=1, date=datetime.datetime.now(tz=datetime.timezone.utc), type="archive", ) ORIGIN_VISIT_STATUS = OriginVisitStatus( origin="some-url", visit=1, type="archive", date=datetime.datetime.now(tz=datetime.timezone.utc), status="full", snapshot=hash_to_bytes("d81cc0710eb6cf9efd5b920a8453e1e07157b6cd"), metadata=None, ) CONTENT = Content( data=b"42\n", length=3, sha1=hash_to_bytes("34973274ccef6ab4dfaaf86599792fa9c3fe4689"), sha1_git=hash_to_bytes("d81cc0710eb6cf9efd5b920a8453e1e07157b6cd"), sha256=hash_to_bytes( "673650f936cb3b0a2f93ce09d81be10748b1b203c19e8176b4eefc1964a0cf3a" ), blake2s256=hash_to_bytes( "d5fe1939576527e42cfd76a9455a2432fe7f56669564577dd93c4280e76d661d" ), status="visible", ) DIRECTORY = Directory( id=hash_to_bytes("34f335a750111ca0a8b64d8034faec9eedc396be"), entries=tuple( [ DirectoryEntry( name=b"foo", type="file", target=CONTENT.sha1_git, perms=DentryPerms.content, ) ] ), ) REVISION = Revision( id=hash_to_bytes("066b1b62dbfa033362092af468bf6cfabec230e7"), message=b"hello", author=Person( name=b"Nicolas Dandrimont", email=b"nicolas@example.com", fullname=b"Nicolas Dandrimont ", ), date=TimestampWithTimezone( timestamp=Timestamp(seconds=1234567890, microseconds=0), offset=120, negative_utc=False, ), committer=Person( name=b"St\xc3fano Zacchiroli", email=b"stefano@example.com", fullname=b"St\xc3fano Zacchiroli ", ), committer_date=TimestampWithTimezone( timestamp=Timestamp(seconds=1123456789, microseconds=0), offset=0, negative_utc=True, ), parents=(), type=RevisionType.GIT, directory=DIRECTORY.id, metadata={ "checksums": {"sha1": "tarball-sha1", "sha256": "tarball-sha256",}, "signed-off-by": "some-dude", }, extra_headers=( (b"gpgsig", b"test123"), (b"mergetag", b"foo\\bar"), (b"mergetag", b"\x22\xaf\x89\x80\x01\x00"), ), synthetic=True, ) RELEASE = Release( id=hash_to_bytes("3e9050196aa288264f2a9d279d6abab8b158448b"), name=b"v0.0.2", author=Person( name=b"tony", email=b"tony@ardumont.fr", fullname=b"tony ", ), date=TimestampWithTimezone( timestamp=Timestamp(seconds=1634336813, microseconds=0), offset=0, negative_utc=False, ), target=REVISION.id, target_type=ObjectType.REVISION, message=b"yet another synthetic release", synthetic=True, ) SNAPSHOT = Snapshot( id=hash_to_bytes("2498dbf535f882bc7f9a18fb16c9ad27fda7bab7"), branches={ b"release/0.1.0": SnapshotBranch( target=RELEASE.id, target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch(target=REVISION.id, target_type=TargetType.REVISION,), b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,), b"evaluation": SnapshotBranch( # branch dedicated to not exist in storage target=hash_to_bytes("cc4e04c26672dd74e5fd0fecb78b435fb55368f7"), target_type=TargetType.REVISION, ), }, ) +@pytest.fixture +def swh_storage_backend_config(swh_storage_postgresql): + return { + "cls": "local", + "db": swh_storage_postgresql.dsn, + "objstorage": {"cls": "memory"}, + } + + @pytest.fixture def mock_storage(mocker): mock_storage = mocker.patch("swh.loader.tests.origin_get_latest_visit_status") mock_storage.return_value = ORIGIN_VISIT_STATUS return mock_storage def test_assert_last_visit_matches_raise(mock_storage, mocker): """Not finding origin visit_and_statu should raise """ # overwrite so we raise because we do not find the right visit mock_storage.return_value = None with pytest.raises(AssertionError, match="Origin url has no visits"): assert_last_visit_matches(mock_storage, "url", status="full") assert mock_storage.called is True def test_assert_last_visit_matches_wrong_status(mock_storage, mocker): """Wrong visit detected should raise AssertionError """ expected_status = "partial" assert ORIGIN_VISIT_STATUS.status != expected_status with pytest.raises(AssertionError, match="Visit_status has status"): assert_last_visit_matches(mock_storage, "url", status=expected_status) assert mock_storage.called is True def test_assert_last_visit_matches_wrong_type(mock_storage, mocker): """Wrong visit detected should raise AssertionError """ expected_type = "git" assert ORIGIN_VISIT.type != expected_type with pytest.raises(AssertionError, match="Visit has type"): assert_last_visit_matches( mock_storage, "url", status=ORIGIN_VISIT_STATUS.status, type=expected_type, # mismatched type will raise ) assert mock_storage.called is True def test_assert_last_visit_matches_wrong_snapshot(mock_storage, mocker): """Wrong visit detected should raise AssertionError """ expected_snapshot_id = hash_to_bytes("e92cc0710eb6cf9efd5b920a8453e1e07157b6cd") assert ORIGIN_VISIT_STATUS.snapshot != expected_snapshot_id with pytest.raises(AssertionError, match="Visit_status points to snapshot"): assert_last_visit_matches( mock_storage, "url", status=ORIGIN_VISIT_STATUS.status, snapshot=expected_snapshot_id, # mismatched snapshot will raise ) assert mock_storage.called is True def test_assert_last_visit_matches(mock_storage, mocker): """Correct visit detected should return the visit_status """ visit_type = ORIGIN_VISIT.type visit_status = ORIGIN_VISIT_STATUS.status visit_snapshot = ORIGIN_VISIT_STATUS.snapshot actual_visit_status = assert_last_visit_matches( mock_storage, "url", type=visit_type, status=visit_status, snapshot=visit_snapshot, ) assert actual_visit_status == ORIGIN_VISIT_STATUS assert mock_storage.called is True def test_prepare_repository_from_archive_failure(): # does not deal with inexistent archive so raise assert os.path.exists("unknown-archive") is False with pytest.raises(subprocess.CalledProcessError, match="exit status 2"): prepare_repository_from_archive("unknown-archive") def test_prepare_repository_from_archive(datadir, tmp_path): archive_name = "0805nexter-1.1.0" archive_path = os.path.join(str(datadir), f"{archive_name}.tar.gz") assert os.path.exists(archive_path) is True tmp_path = str(tmp_path) # deals with path string repo_url = prepare_repository_from_archive( archive_path, filename=archive_name, tmp_path=tmp_path ) expected_uncompressed_archive_path = os.path.join(tmp_path, archive_name) assert repo_url == f"file://{expected_uncompressed_archive_path}" assert os.path.exists(expected_uncompressed_archive_path) def test_prepare_repository_from_archive_no_filename(datadir, tmp_path): archive_name = "0805nexter-1.1.0" archive_path = os.path.join(str(datadir), f"{archive_name}.tar.gz") assert os.path.exists(archive_path) is True # deals with path as posix path (for tmp_path) repo_url = prepare_repository_from_archive(archive_path, tmp_path=tmp_path) tmp_path = str(tmp_path) expected_uncompressed_archive_path = os.path.join(tmp_path, archive_name) expected_repo_url = os.path.join(tmp_path, f"{archive_name}.tar.gz") assert repo_url == f"file://{expected_repo_url}" # passing along the filename does not influence the on-disk extraction # just the repo-url computation assert os.path.exists(expected_uncompressed_archive_path) def test_encode_target(): assert encode_target(None) is None for target_alias in ["something", b"something"]: target = { "target_type": "alias", "target": target_alias, } actual_alias_encode_target = encode_target(target) assert actual_alias_encode_target == { "target_type": "alias", "target": b"something", } for hash_ in [hash_hex, hash_to_bytes(hash_hex)]: target = {"target_type": "revision", "target": hash_} actual_encode_target = encode_target(target) assert actual_encode_target == { "target_type": "revision", "target": hash_to_bytes(hash_hex), } def test_check_snapshot(swh_storage): """Everything should be fine when snapshot is found and the snapshot reference up to the revision exist in the storage. """ # Create a consistent snapshot arborescence tree in storage found = False for entry in DIRECTORY.entries: if entry.target == CONTENT.sha1_git: found = True break assert found is True assert REVISION.directory == DIRECTORY.id assert RELEASE.target == REVISION.id for branch, target in SNAPSHOT.branches.items(): if branch == b"alias": assert target.target in SNAPSHOT.branches elif branch == b"evaluation": # this one does not exist and we are safelisting its check below continue else: assert target.target in [REVISION.id, RELEASE.id] swh_storage.content_add([CONTENT]) swh_storage.directory_add([DIRECTORY]) swh_storage.revision_add([REVISION]) swh_storage.release_add([RELEASE]) s = swh_storage.snapshot_add([SNAPSHOT]) assert s == { "snapshot:add": 1, } # all should be fine! check_snapshot( SNAPSHOT, swh_storage, allowed_empty=[(TargetType.REVISION, b"evaluation")] ) def test_check_snapshot_failures(swh_storage): """Failure scenarios: 0. snapshot parameter is not a snapshot 1. snapshot id is correct but branches mismatched 2. snapshot id is not correct, it's not found in the storage 3. snapshot reference an alias which does not exist 4. snapshot is found in storage, targeted revision does not exist 5. snapshot is found in storage, targeted revision exists but the directory the revision targets does not exist 6. snapshot is found in storage, target revision exists, targeted directory by the revision exist. Content targeted by the directory does not exist. 7. snapshot is found in storage, targeted release does not exist """ snap_id_hex = "2498dbf535f882bc7f9a18fb16c9ad27fda7bab7" snapshot = Snapshot( id=hash_to_bytes(snap_id_hex), branches={ b"master": SnapshotBranch( target=hash_to_bytes(hash_hex), target_type=TargetType.REVISION, ), }, ) s = swh_storage.snapshot_add([snapshot]) assert s == { "snapshot:add": 1, } unexpected_snapshot = Snapshot( branches={ b"tip": SnapshotBranch( # wrong branch target=hash_to_bytes(hash_hex), target_type=TargetType.RELEASE ) }, ) # 0. not a Snapshot object, raise! with pytest.raises(AssertionError, match="variable 'snapshot' must be a snapshot"): check_snapshot(ORIGIN_VISIT, swh_storage) # 1. snapshot id is correct but branches mismatched with pytest.raises(AssertionError): # sadly debian build raises only assertion check_snapshot(attr.evolve(unexpected_snapshot, id=snapshot.id), swh_storage) # 2. snapshot id is not correct, it's not found in the storage wrong_snap_id = hash_to_bytes("999666f535f882bc7f9a18fb16c9ad27fda7bab7") with pytest.raises(AssertionError, match="is not found"): check_snapshot(attr.evolve(unexpected_snapshot, id=wrong_snap_id), swh_storage) # 3. snapshot references an inexistent alias snapshot0 = Snapshot( id=hash_to_bytes("123666f535f882bc7f9a18fb16c9ad27fda7bab7"), branches={ b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,), }, ) swh_storage.snapshot_add([snapshot0]) with pytest.raises(InconsistentAliasBranchError, match="Alias branch HEAD"): check_snapshot(snapshot0, swh_storage) # 4. snapshot is found in storage, targeted revision does not exist rev_not_found = list(swh_storage.revision_missing([REVISION.id])) assert len(rev_not_found) == 1 snapshot1 = Snapshot( id=hash_to_bytes("456666f535f882bc7f9a18fb16c9ad27fda7bab7"), branches={ b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,), b"HEAD": SnapshotBranch( target=REVISION.id, target_type=TargetType.REVISION, ), }, ) swh_storage.snapshot_add([snapshot1]) with pytest.raises(InexistentObjectsError, match="Branch/Revision"): check_snapshot(snapshot1, swh_storage) # 5. snapshot is found in storage, targeted revision exists but the directory the # revision targets does not exist swh_storage.revision_add([REVISION]) dir_not_found = list(swh_storage.directory_missing([REVISION.directory])) assert len(dir_not_found) == 1 snapshot2 = Snapshot( id=hash_to_bytes("987123f535f882bc7f9a18fb16c9ad27fda7bab7"), branches={ b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,), b"HEAD": SnapshotBranch( target=REVISION.id, target_type=TargetType.REVISION, ), }, ) swh_storage.snapshot_add([snapshot2]) with pytest.raises(InexistentObjectsError, match="Missing directories"): check_snapshot(snapshot2, swh_storage) assert DIRECTORY.id == REVISION.directory swh_storage.directory_add([DIRECTORY]) # 6. snapshot is found in storage, target revision exists, targeted directory by the # revision exist. Content targeted by the directory does not exist. assert DIRECTORY.entries[0].target == CONTENT.sha1_git not_found = list(swh_storage.content_missing_per_sha1_git([CONTENT.sha1_git])) assert len(not_found) == 1 swh_storage.directory_add([DIRECTORY]) snapshot3 = Snapshot( id=hash_to_bytes("091456f535f882bc7f9a18fb16c9ad27fda7bab7"), branches={ b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,), b"HEAD": SnapshotBranch( target=REVISION.id, target_type=TargetType.REVISION, ), }, ) swh_storage.snapshot_add([snapshot3]) with pytest.raises(InexistentObjectsError, match="Missing content(s)"): check_snapshot(snapshot3, swh_storage) # 7. snapshot is found in storage, targeted release does not exist # release targets the revisions which exists assert RELEASE.target == REVISION.id snapshot4 = Snapshot( id=hash_to_bytes("789666f535f882bc7f9a18fb16c9ad27fda7bab7"), branches={ b"alias": SnapshotBranch(target=b"HEAD", target_type=TargetType.ALIAS,), b"HEAD": SnapshotBranch( target=REVISION.id, target_type=TargetType.REVISION, ), b"release/0.1.0": SnapshotBranch( target=RELEASE.id, target_type=TargetType.RELEASE, ), }, ) swh_storage.snapshot_add([snapshot4]) with pytest.raises(InexistentObjectsError, match="Branch/Release"): check_snapshot(snapshot4, swh_storage)