diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 58e32ff..1471b55 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,47 +1,41 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.4.0 hooks: - id: trailing-whitespace - id: flake8 - id: check-json - id: check-yaml - repo: https://github.com/codespell-project/codespell rev: v1.16.0 hooks: - id: codespell exclude: ^(swh/loader/package/.*[/]+tests/data/.*)$ - repo: local hooks: - id: mypy name: mypy entry: mypy args: [swh] pass_filenames: false language: system types: [python] +- repo: https://github.com/python/black + rev: 19.10b0 + hooks: + - id: black + # unfortunately, we are far from being able to enable this... # - repo: https://github.com/PyCQA/pydocstyle.git # rev: 4.0.0 # hooks: # - id: pydocstyle # name: pydocstyle # description: pydocstyle is a static analysis tool for checking compliance with Python docstring conventions. # entry: pydocstyle --convention=google # language: python # types: [python] -# black requires py3.6+ -#- repo: https://github.com/python/black -# rev: 19.3b0 -# hooks: -# - id: black -# language_version: python3 -#- repo: https://github.com/asottile/blacken-docs -# rev: v1.0.0-1 -# hooks: -# - id: blacken-docs -# additional_dependencies: [black==19.3b0] diff --git a/conftest.py b/conftest.py index 23c9f57..2c0fba0 100644 --- a/conftest.py +++ b/conftest.py @@ -1,76 +1,70 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import pytest import yaml from typing import Any, Dict -from swh.storage.tests.conftest import * # noqa +from swh.storage.tests.conftest import * # noqa from swh.scheduler.tests.conftest import * # noqa @pytest.fixture def swh_loader_config(swh_storage_postgresql) -> Dict[str, Any]: return { - 'storage': { - 'cls': 'pipeline', - 'steps': [ - {'cls': 'retry'}, - {'cls': 'filter'}, - {'cls': 'buffer'}, + "storage": { + "cls": "pipeline", + "steps": [ + {"cls": "retry"}, + {"cls": "filter"}, + {"cls": "buffer"}, { - 'cls': 'local', - 'args': { - 'db': swh_storage_postgresql.dsn, - 'objstorage': { - 'cls': 'memory', - 'args': {} - }, - } - } - ] + "cls": "local", + "args": { + "db": swh_storage_postgresql.dsn, + "objstorage": {"cls": "memory", "args": {}}, + }, + }, + ], }, - 'deposit': { - 'url': 'https://deposit.softwareheritage.org/1/private', - 'auth': { - 'username': 'user', - 'password': 'pass', - } + "deposit": { + "url": "https://deposit.softwareheritage.org/1/private", + "auth": {"username": "user", "password": "pass",}, }, } @pytest.fixture def swh_config(swh_loader_config, monkeypatch, tmp_path): - conffile = os.path.join(str(tmp_path), 'loader.yml') - with open(conffile, 'w') as f: + conffile = os.path.join(str(tmp_path), "loader.yml") + with open(conffile, "w") as f: f.write(yaml.dump(swh_loader_config)) - monkeypatch.setenv('SWH_CONFIG_FILENAME', conffile) + monkeypatch.setenv("SWH_CONFIG_FILENAME", conffile) return conffile -@pytest.fixture(autouse=True, scope='session') +@pytest.fixture(autouse=True, scope="session") def swh_proxy(): """Automatically inject this fixture in all tests to ensure no outside connection takes place. """ - os.environ['http_proxy'] = 'http://localhost:999' - os.environ['https_proxy'] = 'http://localhost:999' + os.environ["http_proxy"] = "http://localhost:999" + os.environ["https_proxy"] = "http://localhost:999" -@pytest.fixture(scope='session') # type: ignore # expected redefinition +@pytest.fixture(scope="session") # type: ignore # expected redefinition def celery_includes(): return [ - 'swh.loader.package.archive.tasks', - 'swh.loader.package.cran.tasks', - 'swh.loader.package.debian.tasks', - 'swh.loader.package.deposit.tasks', - 'swh.loader.package.npm.tasks', - 'swh.loader.package.pypi.tasks', - 'swh.loader.package.nixguix.tasks', + "swh.loader.package.archive.tasks", + "swh.loader.package.cran.tasks", + "swh.loader.package.debian.tasks", + "swh.loader.package.deposit.tasks", + "swh.loader.package.npm.tasks", + "swh.loader.package.pypi.tasks", + "swh.loader.package.nixguix.tasks", ] diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..8d79b7e --- /dev/null +++ b/setup.cfg @@ -0,0 +1,6 @@ +[flake8] +# E203: whitespaces before ':' +# E231: missing whitespace after ',' +# W503: line break before binary operator +ignore = E203,E231,W503 +max-line-length = 88 diff --git a/setup.py b/setup.py index 620202a..ccacdd6 100755 --- a/setup.py +++ b/setup.py @@ -1,77 +1,77 @@ #!/usr/bin/env python3 # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from setuptools import setup, find_packages from os import path from io import open here = path.abspath(path.dirname(__file__)) # Get the long description from the README file -with open(path.join(here, 'README.md'), encoding='utf-8') as f: +with open(path.join(here, "README.md"), encoding="utf-8") as f: long_description = f.read() def parse_requirements(name=None): if name: - reqf = 'requirements-%s.txt' % name + reqf = "requirements-%s.txt" % name else: - reqf = 'requirements.txt' + reqf = "requirements.txt" requirements = [] if not path.exists(reqf): return requirements with open(reqf) as f: for line in f.readlines(): line = line.strip() - if not line or line.startswith('#'): + if not line or line.startswith("#"): continue requirements.append(line) return requirements setup( - name='swh.loader.core', - description='Software Heritage Base Loader', + name="swh.loader.core", + description="Software Heritage Base Loader", long_description=long_description, - long_description_content_type='text/markdown', - author='Software Heritage developers', - author_email='swh-devel@inria.fr', - url='https://forge.softwareheritage.org/diffusion/DLDBASE', + long_description_content_type="text/markdown", + author="Software Heritage developers", + author_email="swh-devel@inria.fr", + url="https://forge.softwareheritage.org/diffusion/DLDBASE", packages=find_packages(), # packages's modules - scripts=[], # scripts to package - install_requires=parse_requirements() + parse_requirements('swh'), - setup_requires=['vcversioner'], - extras_require={'testing': parse_requirements('test')}, + scripts=[], # scripts to package + install_requires=parse_requirements() + parse_requirements("swh"), + setup_requires=["vcversioner"], + extras_require={"testing": parse_requirements("test")}, vcversioner={}, include_package_data=True, - entry_points=''' + entry_points=""" [swh.workers] loader.archive=swh.loader.package.archive:register loader.cran=swh.loader.package.cran:register loader.debian=swh.loader.package.debian:register loader.deposit=swh.loader.package.deposit:register loader.nixguix=swh.loader.package.nixguix:register loader.npm=swh.loader.package.npm:register loader.pypi=swh.loader.package.pypi:register [swh.cli.subcommands] loader=swh.loader.cli:loader - ''', + """, classifiers=[ "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", "Operating System :: OS Independent", "Development Status :: 5 - Production/Stable", ], project_urls={ - 'Bug Reports': 'https://forge.softwareheritage.org/maniphest', - 'Funding': 'https://www.softwareheritage.org/donate', - 'Source': 'https://forge.softwareheritage.org/source/swh-loader-core', + "Bug Reports": "https://forge.softwareheritage.org/maniphest", + "Funding": "https://www.softwareheritage.org/donate", + "Source": "https://forge.softwareheritage.org/source/swh-loader-core", }, ) diff --git a/swh/loader/cli.py b/swh/loader/cli.py index 12a0e6b..a2b341d 100644 --- a/swh/loader/cli.py +++ b/swh/loader/cli.py @@ -1,90 +1,91 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import inspect import logging import click import pkg_resources from typing import Any from swh.core.cli import CONTEXT_SETTINGS from swh.scheduler.cli.utils import parse_options logger = logging.getLogger(__name__) -LOADERS = {entry_point.name.split('.', 1)[1]: entry_point - for entry_point in pkg_resources.iter_entry_points('swh.workers') - if entry_point.name.split('.', 1)[0] == 'loader'} +LOADERS = { + entry_point.name.split(".", 1)[1]: entry_point + for entry_point in pkg_resources.iter_entry_points("swh.workers") + if entry_point.name.split(".", 1)[0] == "loader" +} SUPPORTED_LOADERS = list(LOADERS) def get_loader(name: str, **kwargs) -> Any: """Given a loader name, instantiate it. Args: name: Loader's name kwargs: Configuration dict (url...) Returns: An instantiated loader """ if name not in LOADERS: raise ValueError( - 'Invalid loader %s: only supported loaders are %s' % - (name, SUPPORTED_LOADERS)) + "Invalid loader %s: only supported loaders are %s" + % (name, SUPPORTED_LOADERS) + ) registry_entry = LOADERS[name].load()() - logger.debug(f'registry: {registry_entry}') - loader_cls = registry_entry['loader'] - logger.debug(f'loader class: {loader_cls}') + logger.debug(f"registry: {registry_entry}") + loader_cls = registry_entry["loader"] + logger.debug(f"loader class: {loader_cls}") return loader_cls(**kwargs) -@click.group(name='loader', context_settings=CONTEXT_SETTINGS) +@click.group(name="loader", context_settings=CONTEXT_SETTINGS) @click.pass_context def loader(ctx): """Loader cli tools """ pass -@loader.command(name='run', context_settings=CONTEXT_SETTINGS) -@click.argument('type', - type=click.Choice(SUPPORTED_LOADERS)) -@click.argument('url') -@click.argument('options', nargs=-1) +@loader.command(name="run", context_settings=CONTEXT_SETTINGS) +@click.argument("type", type=click.Choice(SUPPORTED_LOADERS)) +@click.argument("url") +@click.argument("options", nargs=-1) @click.pass_context def run(ctx, type, url, options): """Ingest with loader the origin located at """ (_, kw) = parse_options(options) - logger.debug(f'kw: {kw}') + logger.debug(f"kw: {kw}") loader = get_loader(type, url=url, **kw) result = loader.load() click.echo(result) -@loader.command(name='list', context_settings=CONTEXT_SETTINGS) -@click.argument('type', default='all', - type=click.Choice(['all'] + SUPPORTED_LOADERS)) +@loader.command(name="list", context_settings=CONTEXT_SETTINGS) +@click.argument("type", default="all", type=click.Choice(["all"] + SUPPORTED_LOADERS)) @click.pass_context def list(ctx, type): """List supported loaders and optionally their arguments""" - if type == 'all': - loaders = ', '.join(SUPPORTED_LOADERS) - click.echo(f'Supported loaders: {loaders}') + if type == "all": + loaders = ", ".join(SUPPORTED_LOADERS) + click.echo(f"Supported loaders: {loaders}") else: registry_entry = LOADERS[type].load()() - loader_cls = registry_entry['loader'] + loader_cls = registry_entry["loader"] doc = inspect.getdoc(loader_cls).strip() signature = inspect.signature(loader_cls) click.echo(f"Loader: {doc}\nsignature: {signature}") diff --git a/swh/loader/core/converters.py b/swh/loader/core/converters.py index a738ec2..e9d0f12 100644 --- a/swh/loader/core/converters.py +++ b/swh/loader/core/converters.py @@ -1,77 +1,81 @@ # Copyright (C) 2015-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """Convert objects to dictionaries suitable for swh.storage""" import logging from typing import Dict, Iterable, List, Optional, Tuple from swh.model.hashutil import hash_to_hex from swh.model.model import BaseContent, Content, SkippedContent logger = logging.getLogger(__name__) def prepare_contents( - contents: Iterable[Dict], max_content_size: Optional[int] = None, - origin_url: Optional[str] = None) -> Tuple[ - List[Dict], List[Dict]]: + contents: Iterable[Dict], + max_content_size: Optional[int] = None, + origin_url: Optional[str] = None, +) -> Tuple[List[Dict], List[Dict]]: """Prepare contents for storage from a list of contents Returns tuple of content iterable, skipped content iterable """ present_contents: List[Dict] = [] skipped_contents: List[Dict] = [] for _content in contents: content = content_for_storage( - _content, max_content_size=max_content_size, origin_url=origin_url) + _content, max_content_size=max_content_size, origin_url=origin_url + ) if isinstance(content, SkippedContent): skipped_contents.append(content.to_dict()) else: present_contents.append(content.to_dict()) return present_contents, skipped_contents def content_for_storage( - content: Dict, max_content_size: Optional[int] = None, - origin_url: Optional[str] = None) -> BaseContent: + content: Dict, + max_content_size: Optional[int] = None, + origin_url: Optional[str] = None, +) -> BaseContent: """Prepare content to be ready for storage Note: - 'data' is returned only if max_content_size is not reached. Returns: content with added data (or reason for being missing) """ ret = content.copy() - ret.pop('perms', None) - - if max_content_size and ret['length'] > max_content_size: - logger.info('Skipping content %s, too large (%s > %s)' % - (hash_to_hex(content['sha1_git']), - ret['length'], - max_content_size)) - ret.pop('data', None) - ret.update({'status': 'absent', - 'reason': 'Content too large', - 'origin': origin_url}) + ret.pop("perms", None) + + if max_content_size and ret["length"] > max_content_size: + logger.info( + "Skipping content %s, too large (%s > %s)" + % (hash_to_hex(content["sha1_git"]), ret["length"], max_content_size) + ) + ret.pop("data", None) + ret.update( + {"status": "absent", "reason": "Content too large", "origin": origin_url} + ) return SkippedContent.from_dict(ret) - if 'data' not in ret: - with open(ret['path'], 'rb') as f: - ret['data'] = f.read() + if "data" not in ret: + with open(ret["path"], "rb") as f: + ret["data"] = f.read() # Extra keys added by swh.model.from_disk, that are not accepted # by swh-storage - ret.pop('path', None) + ret.pop("path", None) - ret['status'] = 'visible' + ret["status"] = "visible" return Content.from_dict(ret) diff --git a/swh/loader/core/loader.py b/swh/loader/core/loader.py index fd9e1eb..6012608 100644 --- a/swh/loader/core/loader.py +++ b/swh/loader/core/loader.py @@ -1,418 +1,431 @@ # Copyright (C) 2015-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib import logging import os from abc import ABCMeta, abstractmethod from typing import Any, Dict, Iterable, Optional, Tuple, Union from swh.core import config from swh.model.model import ( - BaseContent, Content, SkippedContent, Directory, Origin, Revision, - Release, Sha1Git, Snapshot + BaseContent, + Content, + SkippedContent, + Directory, + Origin, + Revision, + Release, + Sha1Git, + Snapshot, ) from swh.storage import get_storage class BaseLoader(config.SWHConfig, metaclass=ABCMeta): """Mixin base class for loader. To use this class, you must: - inherit from this class - and implement the @abstractmethod methods: - :func:`prepare`: First step executed by the loader to prepare some state needed by the `func`:load method. - :func:`get_origin`: Retrieve the origin that is currently being loaded. - :func:`fetch_data`: Fetch the data is actually the method to implement to compute data to inject in swh (through the store_data method) - :func:`store_data`: Store data fetched. - :func:`visit_status`: Explicit status of the visit ('partial' or 'full') - :func:`load_status`: Explicit status of the loading, for use by the scheduler (eventful/uneventful/temporary failure/permanent failure). - :func:`cleanup`: Last step executed by the loader. The entry point for the resulting loader is :func:`load`. You can take a look at some example classes: - :class:`BaseSvnLoader` """ + CONFIG_BASE_FILENAME = None # type: Optional[str] DEFAULT_CONFIG = { - 'storage': ('dict', { - 'cls': 'remote', - 'args': { - 'url': 'http://localhost:5002/', - } - }), - - 'max_content_size': ('int', 100 * 1024 * 1024), - 'save_data': ('bool', False), - 'save_data_path': ('str', ''), - + "storage": ( + "dict", + {"cls": "remote", "args": {"url": "http://localhost:5002/",}}, + ), + "max_content_size": ("int", 100 * 1024 * 1024), + "save_data": ("bool", False), + "save_data_path": ("str", ""), } # type: Dict[str, Tuple[str, Any]] ADDITIONAL_CONFIG = {} # type: Dict[str, Tuple[str, Any]] - def __init__(self, logging_class: Optional[str] = None, - config: Dict[str, Any] = {}): + def __init__( + self, logging_class: Optional[str] = None, config: Dict[str, Any] = {} + ): if config: self.config = config else: self.config = self.parse_config_file( - additional_configs=[self.ADDITIONAL_CONFIG]) + additional_configs=[self.ADDITIONAL_CONFIG] + ) - self.storage = get_storage(**self.config['storage']) + self.storage = get_storage(**self.config["storage"]) if logging_class is None: - logging_class = '%s.%s' % (self.__class__.__module__, - self.__class__.__name__) + logging_class = "%s.%s" % ( + self.__class__.__module__, + self.__class__.__name__, + ) self.log = logging.getLogger(logging_class) - _log = logging.getLogger('requests.packages.urllib3.connectionpool') + _log = logging.getLogger("requests.packages.urllib3.connectionpool") _log.setLevel(logging.WARN) - self.max_content_size = self.config['max_content_size'] + self.max_content_size = self.config["max_content_size"] # possibly overridden in self.prepare method self.visit_date: Optional[Union[str, datetime.datetime]] = None self.origin: Optional[Origin] = None - if not hasattr(self, 'visit_type'): + if not hasattr(self, "visit_type"): self.visit_type: Optional[str] = None self.origin_metadata: Dict[str, Any] = {} # Make sure the config is sane - save_data = self.config.get('save_data') + save_data = self.config.get("save_data") if save_data: - path = self.config['save_data_path'] + path = self.config["save_data_path"] os.stat(path) if not os.access(path, os.R_OK | os.W_OK): raise PermissionError("Permission denied: %r" % path) def save_data(self) -> None: """Save the data associated to the current load""" raise NotImplementedError def get_save_data_path(self) -> str: """The path to which we archive the loader's raw data""" - if not hasattr(self, '__save_data_path'): + if not hasattr(self, "__save_data_path"): year = str(self.visit_date.year) # type: ignore assert self.origin - url = self.origin.url.encode('utf-8') + url = self.origin.url.encode("utf-8") origin_url_hash = hashlib.sha1(url).hexdigest() - path = '%s/sha1:%s/%s/%s' % ( - self.config['save_data_path'], + path = "%s/sha1:%s/%s/%s" % ( + self.config["save_data_path"], origin_url_hash[0:2], origin_url_hash, year, ) os.makedirs(path, exist_ok=True) self.__save_data_path = path return self.__save_data_path def flush(self) -> None: """Flush any potential dangling data not sent to swh-storage. Bypass the maybe_load_* methods which awaits threshold reached signal. We actually want to store those as we are done loading. """ - if hasattr(self.storage, 'flush'): + if hasattr(self.storage, "flush"): self.storage.flush() @abstractmethod def cleanup(self) -> None: """Last step executed by the loader. """ pass @abstractmethod def prepare_origin_visit(self, *args, **kwargs) -> None: """First step executed by the loader to prepare origin and visit references. Set/update self.origin, and optionally self.origin_url, self.visit_date. """ pass def _store_origin_visit(self) -> None: """Store origin and visit references. Sets the self.visit references. """ assert self.origin self.storage.origin_add_one(self.origin) if not self.visit_date: # now as default visit_date if not provided self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) self.visit = self.storage.origin_visit_add( - self.origin.url, self.visit_date, self.visit_type) + self.origin.url, self.visit_date, self.visit_type + ) @abstractmethod def prepare(self, *args, **kwargs) -> None: """Second step executed by the loader to prepare some state needed by the loader. """ pass def get_origin(self) -> Origin: """Get the origin that is currently being loaded. self.origin should be set in :func:`prepare_origin` Returns: dict: an origin ready to be sent to storage by :func:`origin_add_one`. """ assert self.origin return self.origin @abstractmethod def fetch_data(self) -> bool: """Fetch the data from the source the loader is currently loading (ex: git/hg/svn/... repository). Returns: a value that is interpreted as a boolean. If True, fetch_data needs to be called again to complete loading. """ pass @abstractmethod def store_data(self): """Store fetched data in the database. Should call the :func:`maybe_load_xyz` methods, which handle the bundles sent to storage, rather than send directly. """ pass def store_metadata(self) -> None: """Store fetched metadata in the database. For more information, see implementation in :class:`DepositLoader`. """ pass def load_status(self) -> Dict[str, str]: """Detailed loading status. Defaults to logging an eventful load. Returns: a dictionary that is eventually passed back as the task's result to the scheduler, allowing tuning of the task recurrence mechanism. """ return { - 'status': 'eventful', + "status": "eventful", } def post_load(self, success: bool = True) -> None: """Permit the loader to do some additional actions according to status after the loading is done. The flag success indicates the loading's status. Defaults to doing nothing. This is up to the implementer of this method to make sure this does not break. Args: success (bool): the success status of the loading """ pass def visit_status(self) -> str: """Detailed visit status. Defaults to logging a full visit. """ - return 'full' + return "full" def get_snapshot_id(self) -> Optional[Sha1Git]: """Get the snapshot id that needs to be loaded""" raise NotImplementedError def pre_cleanup(self) -> None: """As a first step, will try and check for dangling data to cleanup. This should do its best to avoid raising issues. """ pass def load(self, *args, **kwargs) -> Dict[str, str]: r"""Loading logic for the loader to follow: - 1. Call :meth:`prepare_origin_visit` to prepare the origin and visit we will associate loading data to - 2. Store the actual ``origin_visit`` to storage - 3. Call :meth:`prepare` to prepare any eventual state - 4. Call :meth:`get_origin` to get the origin we work with and store - while True: - 5. Call :meth:`fetch_data` to fetch the data to store - 6. Call :meth:`store_data` to store the data - 7. Call :meth:`cleanup` to clean up any eventual state put in place in :meth:`prepare` method. """ try: self.pre_cleanup() except Exception: - msg = 'Cleaning up dangling data failed! Continue loading.' + msg = "Cleaning up dangling data failed! Continue loading." self.log.warning(msg) self.prepare_origin_visit(*args, **kwargs) self._store_origin_visit() assert self.origin try: self.prepare(*args, **kwargs) while True: more_data_to_fetch = self.fetch_data() self.store_data() if not more_data_to_fetch: break self.store_metadata() self.storage.origin_visit_update( - self.origin.url, self.visit.visit, self.visit_status(), - snapshot=self.get_snapshot_id() + self.origin.url, + self.visit.visit, + self.visit_status(), + snapshot=self.get_snapshot_id(), ) self.post_load() except Exception: - self.log.exception('Loading failure, updating to `partial` status', - extra={ - 'swh_task_args': args, - 'swh_task_kwargs': kwargs, - }) + self.log.exception( + "Loading failure, updating to `partial` status", + extra={"swh_task_args": args, "swh_task_kwargs": kwargs,}, + ) self.storage.origin_visit_update( - self.origin.url, self.visit.visit, 'partial', - snapshot=self.get_snapshot_id() + self.origin.url, + self.visit.visit, + "partial", + snapshot=self.get_snapshot_id(), ) self.post_load(success=False) - return {'status': 'failed'} + return {"status": "failed"} finally: self.flush() self.cleanup() return self.load_status() class DVCSLoader(BaseLoader): """This base class is a pattern for dvcs loaders (e.g. git, mercurial). Those loaders are able to load all the data in one go. For example, the loader defined in swh-loader-git :class:`BulkUpdater`. For other loaders (stateful one, (e.g :class:`SWHSvnLoader`), inherit directly from :class:`BaseLoader`. """ + ADDITIONAL_CONFIG = {} # type: Dict[str, Tuple[str, Any]] def cleanup(self) -> None: """Clean up an eventual state installed for computations.""" pass def has_contents(self) -> bool: """Checks whether we need to load contents""" return True def get_contents(self) -> Iterable[BaseContent]: """Get the contents that need to be loaded""" raise NotImplementedError def has_directories(self) -> bool: """Checks whether we need to load directories""" return True def get_directories(self) -> Iterable[Directory]: """Get the directories that need to be loaded""" raise NotImplementedError def has_revisions(self) -> bool: """Checks whether we need to load revisions""" return True def get_revisions(self) -> Iterable[Revision]: """Get the revisions that need to be loaded""" raise NotImplementedError def has_releases(self) -> bool: """Checks whether we need to load releases""" return True def get_releases(self) -> Iterable[Release]: """Get the releases that need to be loaded""" raise NotImplementedError def get_snapshot(self) -> Snapshot: """Get the snapshot that needs to be loaded""" raise NotImplementedError def get_snapshot_id(self) -> Optional[Sha1Git]: snapshot = self.get_snapshot() return snapshot.id if snapshot else None def eventful(self) -> bool: """Whether the load was eventful""" raise NotImplementedError def store_data(self) -> None: assert self.origin - if self.config['save_data']: + if self.config["save_data"]: self.save_data() if self.has_contents(): contents = [] skipped_contents = [] for obj in self.get_contents(): if isinstance(obj, Content): contents.append(obj) elif isinstance(obj, SkippedContent): skipped_contents.append(obj) else: - raise TypeError(f'Unexpected content type: {obj}') + raise TypeError(f"Unexpected content type: {obj}") self.storage.skipped_content_add(skipped_contents) self.storage.content_add(contents) if self.has_directories(): self.storage.directory_add(self.get_directories()) if self.has_revisions(): self.storage.revision_add(self.get_revisions()) if self.has_releases(): self.storage.release_add(self.get_releases()) self.flush() # to ensure the snapshot targets existing objects snapshot = self.get_snapshot() self.storage.snapshot_add([snapshot]) self.flush() diff --git a/swh/loader/core/tests/__init__.py b/swh/loader/core/tests/__init__.py index a2e0d6a..bf2e584 100644 --- a/swh/loader/core/tests/__init__.py +++ b/swh/loader/core/tests/__init__.py @@ -1,218 +1,234 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import pytest import shutil import subprocess import tempfile from unittest import TestCase from swh.model import hashutil from swh.model.hashutil import hash_to_bytes class BaseLoaderStorageTest: def _assertCountEqual(self, type, expected_length, msg=None): """Check typed 'type' state to have the same expected length. """ self.storage.refresh_stat_counters() - self.assertEqual(self.storage.stat_counters()[type], - expected_length, msg=msg) + self.assertEqual(self.storage.stat_counters()[type], expected_length, msg=msg) def assertCountContents(self, len_expected_contents, msg=None): - self._assertCountEqual('content', len_expected_contents, msg=msg) + self._assertCountEqual("content", len_expected_contents, msg=msg) def assertCountDirectories(self, len_expected_directories, msg=None): - self._assertCountEqual('directory', len_expected_directories, - msg=msg) + self._assertCountEqual("directory", len_expected_directories, msg=msg) def assertCountReleases(self, len_expected_releases, msg=None): - self._assertCountEqual('release', len_expected_releases, msg=msg) + self._assertCountEqual("release", len_expected_releases, msg=msg) def assertCountRevisions(self, len_expected_revisions, msg=None): - self._assertCountEqual('revision', len_expected_revisions, msg=msg) + self._assertCountEqual("revision", len_expected_revisions, msg=msg) def assertCountSnapshots(self, len_expected_snapshot, msg=None): - self._assertCountEqual('snapshot', len_expected_snapshot, msg=msg) + self._assertCountEqual("snapshot", len_expected_snapshot, msg=msg) def assertContentsContain(self, expected_contents): """Check the provided content are a subset of the stored ones. Args: expected_contents ([sha1]): List of content ids""" - missing = list(self.storage.content_missing( - {'sha1': hash_to_bytes(content_hash)} - for content_hash in expected_contents)) + missing = list( + self.storage.content_missing( + {"sha1": hash_to_bytes(content_hash)} + for content_hash in expected_contents + ) + ) self.assertEqual(missing, []) def assertDirectoriesContain(self, expected_directories): """Check the provided directories are a subset of the stored ones. Args: expected_directories ([sha1]): List of directory ids.""" - missing = list(self.storage.directory_missing( - hash_to_bytes(dir_) for dir_ in expected_directories)) + missing = list( + self.storage.directory_missing( + hash_to_bytes(dir_) for dir_ in expected_directories + ) + ) self.assertEqual(missing, []) def assertReleasesContain(self, expected_releases): """Check the provided releases are a subset of the stored ones. Args: releases (list): list of swh releases' identifiers. """ - missing = list(self.storage.release_missing( - hash_to_bytes(rel) for rel in expected_releases)) + missing = list( + self.storage.release_missing( + hash_to_bytes(rel) for rel in expected_releases + ) + ) self.assertEqual(missing, []) def assertRevisionsContain(self, expected_revisions): """Check the provided revisions are a subset of the stored ones. Expects self.loader to be instantiated and ready to be inspected (meaning the loading took place). Args: expected_revisions (dict): Dict with key revision id, value the targeted directory id. """ - revs = list(self.storage.revision_get( - hashutil.hash_to_bytes(rev_id) for rev_id in expected_revisions)) + revs = list( + self.storage.revision_get( + hashutil.hash_to_bytes(rev_id) for rev_id in expected_revisions + ) + ) self.assertNotIn(None, revs) self.assertEqual( - {rev['id']: rev['directory'] for rev in revs}, - {hash_to_bytes(rev_id): hash_to_bytes(rev_dir) - for (rev_id, rev_dir) in expected_revisions.items()}) + {rev["id"]: rev["directory"] for rev in revs}, + { + hash_to_bytes(rev_id): hash_to_bytes(rev_dir) + for (rev_id, rev_dir) in expected_revisions.items() + }, + ) def assertSnapshotEqual(self, expected_snapshot, expected_branches=[]): """Check for snapshot match. Provide the hashes as hexadecimal, the conversion is done within the method. Args: expected_snapshot (str/dict): Either the snapshot identifier or the full snapshot expected_branches (dict): expected branches or nothing is the full snapshot is provided """ if isinstance(expected_snapshot, dict) and not expected_branches: - expected_snapshot_id = expected_snapshot['id'] - expected_branches = expected_snapshot['branches'] + expected_snapshot_id = expected_snapshot["id"] + expected_branches = expected_snapshot["branches"] else: expected_snapshot_id = expected_snapshot snap = self.storage.snapshot_get(hash_to_bytes(expected_snapshot_id)) self.assertIsNotNone(snap) def decode_target(target): if not target: return target - target_type = target['target_type'] + target_type = target["target_type"] - if target_type == 'alias': - decoded_target = target['target'].decode('utf-8') + if target_type == "alias": + decoded_target = target["target"].decode("utf-8") else: - decoded_target = hashutil.hash_to_hex(target['target']) + decoded_target = hashutil.hash_to_hex(target["target"]) - return { - 'target': decoded_target, - 'target_type': target_type - } + return {"target": decoded_target, "target_type": target_type} branches = { - branch.decode('utf-8'): decode_target(target) - for branch, target in snap['branches'].items() + branch.decode("utf-8"): decode_target(target) + for branch, target in snap["branches"].items() } self.assertEqual(expected_branches, branches) - def assertOriginMetadataContains(self, origin_url, - expected_origin_metadata): + def assertOriginMetadataContains(self, origin_url, expected_origin_metadata): """Check the storage contains this metadata for the given origin. Args: origin_url (str): URL of the origin expected_origin_metadata (dict): Extrinsic metadata of the origin """ - origin = self.storage.origin_get({'url': origin_url}) - results = self.storage.origin_metadata_get_by(origin['url']) + origin = self.storage.origin_get({"url": origin_url}) + results = self.storage.origin_metadata_get_by(origin["url"]) self.assertEqual(len(results), 1, results) result = results[0] - self.assertEqual(result['metadata'], expected_origin_metadata) + self.assertEqual(result["metadata"], expected_origin_metadata) @pytest.mark.fs class BaseLoaderTest(TestCase, BaseLoaderStorageTest): """Mixin base loader test class. This allows to uncompress archives (mercurial, svn, git, ... repositories) into a temporary folder so that the loader under test can work with this. When setUp() is done, the following variables are defined: - self.repo_url: can be used as an origin_url for example - self.destination_path: can be used as a path to ingest the repository. Args: archive_name (str): Name of the archive holding the repository (folder, repository, dump, etc...) start_path (str): (mandatory) Path from where starting to look for resources filename (Optional[str]): Name of the filename/folder once the archive is uncompressed. When the filename is not provided, the archive name is used as a derivative. This is used both for the self.repo_url and self.destination_path computation (this one only when provided) resources_path (str): Folder name to look for archive prefix_tmp_folder_name (str): Prefix name to name the temporary folder uncompress_archive (bool): Uncompress the archive passed as parameters (default to True). It so happens we could avoid doing anything to the tarball. """ - def setUp(self, archive_name, *, start_path, filename=None, - resources_path='resources', prefix_tmp_folder_name='', - uncompress_archive=True): + + def setUp( + self, + archive_name, + *, + start_path, + filename=None, + resources_path="resources", + prefix_tmp_folder_name="", + uncompress_archive=True + ): super().setUp() repo_path = os.path.join(start_path, resources_path, archive_name) if not uncompress_archive: # In that case, simply sets the archive's path self.destination_path = repo_path self.tmp_root_path = None - self.repo_url = 'file://' + repo_path + self.repo_url = "file://" + repo_path return - tmp_root_path = tempfile.mkdtemp( - prefix=prefix_tmp_folder_name, suffix='-tests') + tmp_root_path = tempfile.mkdtemp(prefix=prefix_tmp_folder_name, suffix="-tests") # uncompress folder/repositories/dump for the loader to ingest - subprocess.check_output(['tar', 'xf', repo_path, '-C', tmp_root_path]) + subprocess.check_output(["tar", "xf", repo_path, "-C", tmp_root_path]) # build the origin url (or some derivative form) _fname = filename if filename else os.path.basename(archive_name) - self.repo_url = 'file://' + tmp_root_path + '/' + _fname + self.repo_url = "file://" + tmp_root_path + "/" + _fname # where is the data to ingest? if filename: # archive holds one folder with name self.destination_path = os.path.join(tmp_root_path, filename) else: self.destination_path = tmp_root_path self.tmp_root_path = tmp_root_path def tearDown(self): """Clean up temporary working directory """ if self.tmp_root_path and os.path.exists(self.tmp_root_path): shutil.rmtree(self.tmp_root_path) diff --git a/swh/loader/core/tests/test_converters.py b/swh/loader/core/tests/test_converters.py index 1adab49..8ef0a96 100644 --- a/swh/loader/core/tests/test_converters.py +++ b/swh/loader/core/tests/test_converters.py @@ -1,112 +1,111 @@ # Copyright (C) 2015-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import tempfile from swh.loader.core import converters from swh.model import from_disk from swh.model.model import Content, SkippedContent def tmpfile_with_content(fromdir, contentfile): """Create a temporary file with content contentfile in directory fromdir. """ tmpfilepath = tempfile.mktemp( - suffix='.swh', - prefix='tmp-file-for-test', - dir=str(fromdir)) + suffix=".swh", prefix="tmp-file-for-test", dir=str(fromdir) + ) - with open(tmpfilepath, 'wb') as f: + with open(tmpfilepath, "wb") as f: f.write(contentfile) return tmpfilepath def test_content_for_storage_path(tmpdir): # given - data = b'temp file for testing content storage conversion' + data = b"temp file for testing content storage conversion" tmpfile = tmpfile_with_content(tmpdir, data) obj = from_disk.Content.from_file(path=os.fsdecode(tmpfile)).get_data() expected_content = obj.copy() - expected_content['data'] = data - expected_content['status'] = 'visible' - del expected_content['path'] - del expected_content['perms'] + expected_content["data"] = data + expected_content["status"] = "visible" + del expected_content["path"] + del expected_content["perms"] expected_content = Content.from_dict(expected_content) # when content = converters.content_for_storage(obj) # then assert content == expected_content def test_content_for_storage_data(tmpdir): # given - data = b'temp file for testing content storage conversion' + data = b"temp file for testing content storage conversion" obj = from_disk.Content.from_bytes(data=data, mode=0o100644).get_data() - del obj['perms'] + del obj["perms"] expected_content = obj.copy() - expected_content['status'] = 'visible' + expected_content["status"] = "visible" expected_content = Content.from_dict(expected_content) # when content = converters.content_for_storage(obj) # then assert content == expected_content def test_content_for_storage_too_long(tmpdir): # given - data = b'temp file for testing content storage conversion' + data = b"temp file for testing content storage conversion" obj = from_disk.Content.from_bytes(data=data, mode=0o100644).get_data() - del obj['perms'] + del obj["perms"] expected_content = obj.copy() - expected_content.pop('data') - expected_content['status'] = 'absent' - expected_content['origin'] = 'http://example.org/' - expected_content['reason'] = 'Content too large' + expected_content.pop("data") + expected_content["status"] = "absent" + expected_content["origin"] = "http://example.org/" + expected_content["reason"] = "Content too large" expected_content = SkippedContent.from_dict(expected_content) # when content = converters.content_for_storage( - obj, max_content_size=len(data) - 1, - origin_url=expected_content.origin, + obj, max_content_size=len(data) - 1, origin_url=expected_content.origin, ) # then assert content == expected_content def test_prepare_contents(tmpdir): contents = [] - data_fine = b'tmp file fine' + data_fine = b"tmp file fine" max_size = len(data_fine) - for data in [b'tmp file with too much data', data_fine]: + for data in [b"tmp file with too much data", data_fine]: obj = from_disk.Content.from_bytes(data=data, mode=0o100644).get_data() - del obj['perms'] + del obj["perms"] contents.append(obj) actual_contents, actual_skipped_contents = converters.prepare_contents( - contents, max_content_size=max_size, origin_url='some-origin') + contents, max_content_size=max_size, origin_url="some-origin" + ) assert len(actual_contents) == 1 assert len(actual_skipped_contents) == 1 actual_content = actual_contents[0] - assert 'reason' not in actual_content - assert actual_content['status'] == 'visible' + assert "reason" not in actual_content + assert actual_content["status"] == "visible" actual_skipped_content = actual_skipped_contents[0] - assert actual_skipped_content['reason'] == 'Content too large' - assert actual_skipped_content['status'] == 'absent' - assert actual_skipped_content['origin'] == 'some-origin' + assert actual_skipped_content["reason"] == "Content too large" + assert actual_skipped_content["status"] == "absent" + assert actual_skipped_content["origin"] == "some-origin" diff --git a/swh/loader/core/tests/test_loader.py b/swh/loader/core/tests/test_loader.py index 4761f98..f8f8c82 100644 --- a/swh/loader/core/tests/test_loader.py +++ b/swh/loader/core/tests/test_loader.py @@ -1,149 +1,131 @@ # Copyright (C) 2018-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib import logging import pytest from swh.model.model import Origin from swh.loader.core.loader import BaseLoader, DVCSLoader class DummyLoader: def cleanup(self): pass def prepare(self, *args, **kwargs): pass def fetch_data(self): pass def store_data(self): pass def get_snapshot_id(self): return None def prepare_origin_visit(self, *args, **kwargs): - origin = Origin(url='some-url') + origin = Origin(url="some-url") self.origin = origin self.origin_url = origin.url self.visit_date = datetime.datetime.utcnow() - self.visit_type = 'git' + self.visit_type = "git" origin_url = self.storage.origin_add_one(origin) self.visit = self.storage.origin_visit_add( - origin_url, self.visit_date, self.visit_type) + origin_url, self.visit_date, self.visit_type + ) class DummyDVCSLoader(DummyLoader, DVCSLoader): """Unbuffered loader will send directly to storage new data """ + def parse_config_file(self, *args, **kwargs): return { - 'max_content_size': 100 * 1024 * 1024, - 'storage': { - 'cls': 'pipeline', - 'steps': [ - { - 'cls': 'retry', - }, - { - 'cls': 'filter', - }, - { - 'cls': 'memory', - }, - ] + "max_content_size": 100 * 1024 * 1024, + "storage": { + "cls": "pipeline", + "steps": [{"cls": "retry",}, {"cls": "filter",}, {"cls": "memory",},], }, } class DummyBaseLoader(DummyLoader, BaseLoader): """Buffered loader will send new data when threshold is reached """ + def parse_config_file(self, *args, **kwargs): return { - 'max_content_size': 100 * 1024 * 1024, - 'storage': { - 'cls': 'pipeline', - 'steps': [ - { - 'cls': 'retry', - }, + "max_content_size": 100 * 1024 * 1024, + "storage": { + "cls": "pipeline", + "steps": [ + {"cls": "retry",}, + {"cls": "filter",}, { - 'cls': 'filter', - }, - { - 'cls': 'buffer', - 'min_batch_size': { - 'content': 2, - 'content_bytes': 8, - 'directory': 2, - 'revision': 2, - 'release': 2, + "cls": "buffer", + "min_batch_size": { + "content": 2, + "content_bytes": 8, + "directory": 2, + "revision": 2, + "release": 2, }, }, - { - 'cls': 'memory', - }, - ] + {"cls": "memory",}, + ], }, } def test_base_loader(): loader = DummyBaseLoader() result = loader.load() - assert result == {'status': 'eventful'} + assert result == {"status": "eventful"} def test_dvcs_loader(): loader = DummyDVCSLoader() result = loader.load() - assert result == {'status': 'eventful'} + assert result == {"status": "eventful"} def test_loader_logger_default_name(): loader = DummyBaseLoader() assert isinstance(loader.log, logging.Logger) - assert loader.log.name == \ - 'swh.loader.core.tests.test_loader.DummyBaseLoader' + assert loader.log.name == "swh.loader.core.tests.test_loader.DummyBaseLoader" loader = DummyDVCSLoader() assert isinstance(loader.log, logging.Logger) - assert loader.log.name == \ - 'swh.loader.core.tests.test_loader.DummyDVCSLoader' + assert loader.log.name == "swh.loader.core.tests.test_loader.DummyDVCSLoader" def test_loader_logger_with_name(): - loader = DummyBaseLoader('some.logger.name') + loader = DummyBaseLoader("some.logger.name") assert isinstance(loader.log, logging.Logger) - assert loader.log.name == \ - 'some.logger.name' + assert loader.log.name == "some.logger.name" @pytest.mark.fs def test_loader_save_data_path(tmp_path): - loader = DummyBaseLoader('some.logger.name.1') - url = 'http://bitbucket.org/something' + loader = DummyBaseLoader("some.logger.name.1") + url = "http://bitbucket.org/something" loader.origin = Origin(url=url) loader.visit_date = datetime.datetime(year=2019, month=10, day=1) loader.config = { - 'save_data_path': tmp_path, + "save_data_path": tmp_path, } - hash_url = hashlib.sha1(url.encode('utf-8')).hexdigest() - expected_save_path = '%s/sha1:%s/%s/2019' % ( - str(tmp_path), hash_url[0:2], hash_url - ) + hash_url = hashlib.sha1(url.encode("utf-8")).hexdigest() + expected_save_path = "%s/sha1:%s/%s/2019" % (str(tmp_path), hash_url[0:2], hash_url) save_path = loader.get_save_data_path() assert save_path == expected_save_path diff --git a/swh/loader/core/tests/test_tests.py b/swh/loader/core/tests/test_tests.py index b138b37..6bc946c 100644 --- a/swh/loader/core/tests/test_tests.py +++ b/swh/loader/core/tests/test_tests.py @@ -1,87 +1,91 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os.path from os.path import join import pytest import re import tarfile import tempfile from unittest import TestCase from . import BaseLoaderStorageTest from swh.loader.core.tests import BaseLoaderTest class TestsTest(BaseLoaderStorageTest, TestCase): """Test the helpers provided to other loaders' tests.""" def _build_archive(self, fd): - with tarfile.open(mode='w', fileobj=fd) as tf: + with tarfile.open(mode="w", fileobj=fd) as tf: with tempfile.TemporaryDirectory() as src_dir: - with open(join(src_dir, 'hello.txt'), 'a') as src_file: - src_file.write('world\n') - tf.add(src_dir, arcname='test_dir') + with open(join(src_dir, "hello.txt"), "a") as src_file: + src_file.write("world\n") + tf.add(src_dir, arcname="test_dir") def _build_workdir(self, workdir): - os.mkdir(join(workdir, 'resources')) - tarball_path = join(workdir, 'resources', 'test_archive.tar') - with open(tarball_path, 'a+b') as tar_fd: + os.mkdir(join(workdir, "resources")) + tarball_path = join(workdir, "resources", "test_archive.tar") + with open(tarball_path, "a+b") as tar_fd: self._build_archive(tar_fd) @pytest.mark.fs def test_uncompress_setup_auto_name(self): loader_test = BaseLoaderTest() with tempfile.TemporaryDirectory() as workdir: self._build_workdir(workdir) - loader_test.setUp('test_archive.tar', start_path=workdir) + loader_test.setUp("test_archive.tar", start_path=workdir) - self.assertTrue(re.match('^file://.*-tests/test_archive.tar$', - loader_test.repo_url), - msg=loader_test.repo_url) + self.assertTrue( + re.match("^file://.*-tests/test_archive.tar$", loader_test.repo_url), + msg=loader_test.repo_url, + ) self.assertTrue(os.path.isdir(loader_test.destination_path)) - self.assertTrue(os.path.isdir(join(loader_test.destination_path, - 'test_dir'))) - self.assertTrue(os.path.isfile(join(loader_test.destination_path, - 'test_dir', - 'hello.txt'))) + self.assertTrue(os.path.isdir(join(loader_test.destination_path, "test_dir"))) + self.assertTrue( + os.path.isfile(join(loader_test.destination_path, "test_dir", "hello.txt")) + ) loader_test.tearDown() self.assertFalse(os.path.isdir(loader_test.destination_path)) @pytest.mark.fs def test_uncompress_setup_provided_name(self): loader_test = BaseLoaderTest() with tempfile.TemporaryDirectory() as workdir: self._build_workdir(workdir) - loader_test.setUp('test_archive.tar', start_path=workdir, - filename='test_dir') + loader_test.setUp( + "test_archive.tar", start_path=workdir, filename="test_dir" + ) - self.assertTrue(re.match('^file://.*-tests/test_dir$', - loader_test.repo_url), - msg=loader_test.repo_url) + self.assertTrue( + re.match("^file://.*-tests/test_dir$", loader_test.repo_url), + msg=loader_test.repo_url, + ) self.assertTrue(os.path.isdir(loader_test.destination_path)) - self.assertTrue(os.path.isfile(join(loader_test.destination_path, - 'hello.txt'))) + self.assertTrue(os.path.isfile(join(loader_test.destination_path, "hello.txt"))) loader_test.tearDown() self.assertFalse(os.path.isdir(loader_test.destination_path)) @pytest.mark.fs def test_setup_no_uncompress(self): loader_test = BaseLoaderTest() with tempfile.TemporaryDirectory() as workdir: self._build_workdir(workdir) - loader_test.setUp('test_archive.tar', start_path=workdir, - uncompress_archive=False) + loader_test.setUp( + "test_archive.tar", start_path=workdir, uncompress_archive=False + ) - self.assertEqual('file://' + workdir + '/resources/test_archive.tar', - loader_test.repo_url) - self.assertEqual(workdir + '/resources/test_archive.tar', - loader_test.destination_path) + self.assertEqual( + "file://" + workdir + "/resources/test_archive.tar", loader_test.repo_url + ) + self.assertEqual( + workdir + "/resources/test_archive.tar", loader_test.destination_path + ) diff --git a/swh/loader/core/tests/test_utils.py b/swh/loader/core/tests/test_utils.py index f0f89ca..029c8ce 100644 --- a/swh/loader/core/tests/test_utils.py +++ b/swh/loader/core/tests/test_utils.py @@ -1,102 +1,96 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from unittest.mock import patch from swh.loader.core.utils import clean_dangling_folders def prepare_arborescence_from(tmpdir, folder_names): """Prepare arborescence tree with folders Args: tmpdir (Either[LocalPath, str]): Root temporary directory folder_names (List[str]): List of folder names Returns: List of folders """ dangling_folders = [] for dname in folder_names: d = str(tmpdir / dname) os.mkdir(d) dangling_folders.append(d) return str(tmpdir), dangling_folders def assert_dirs(actual_dirs, expected_dirs): """Assert that the directory actual and expected match """ for d in actual_dirs: assert d in expected_dirs assert len(actual_dirs) == len(expected_dirs) def test_clean_dangling_folders_0(tmpdir): """Folder does not exist, do nothing""" - r = clean_dangling_folders('/path/does/not/exist', 'unused-pattern') + r = clean_dangling_folders("/path/does/not/exist", "unused-pattern") assert r is None -@patch('swh.loader.core.utils.psutil.pid_exists', return_value=False) +@patch("swh.loader.core.utils.psutil.pid_exists", return_value=False) def test_clean_dangling_folders_1(mock_pid_exists, tmpdir): """Folder which matches pattern with dead pid are cleaned up """ - rootpath, dangling = prepare_arborescence_from(tmpdir, [ - 'something', - 'swh.loader.svn-4321.noisynoise', - ]) + rootpath, dangling = prepare_arborescence_from( + tmpdir, ["something", "swh.loader.svn-4321.noisynoise",] + ) - clean_dangling_folders(rootpath, 'swh.loader.svn') + clean_dangling_folders(rootpath, "swh.loader.svn") actual_dirs = os.listdir(rootpath) mock_pid_exists.assert_called_once_with(4321) - assert_dirs(actual_dirs, ['something']) + assert_dirs(actual_dirs, ["something"]) -@patch('swh.loader.core.utils.psutil.pid_exists', return_value=True) +@patch("swh.loader.core.utils.psutil.pid_exists", return_value=True) def test_clean_dangling_folders_2(mock_pid_exists, tmpdir): """Folder which matches pattern with live pid are skipped """ - rootpath, dangling = prepare_arborescence_from(tmpdir, [ - 'something', - 'swh.loader.hg-1234.noisynoise', - ]) + rootpath, dangling = prepare_arborescence_from( + tmpdir, ["something", "swh.loader.hg-1234.noisynoise",] + ) - clean_dangling_folders(rootpath, 'swh.loader.hg') + clean_dangling_folders(rootpath, "swh.loader.hg") actual_dirs = os.listdir(rootpath) mock_pid_exists.assert_called_once_with(1234) - assert_dirs(actual_dirs, [ - 'something', 'swh.loader.hg-1234.noisynoise', - ]) + assert_dirs(actual_dirs, ["something", "swh.loader.hg-1234.noisynoise",]) -@patch('swh.loader.core.utils.psutil.pid_exists', - return_value=False) -@patch('swh.loader.core.utils.shutil.rmtree', - side_effect=ValueError('Could not remove for reasons')) +@patch("swh.loader.core.utils.psutil.pid_exists", return_value=False) +@patch( + "swh.loader.core.utils.shutil.rmtree", + side_effect=ValueError("Could not remove for reasons"), +) def test_clean_dangling_folders_3(mock_rmtree, mock_pid_exists, tmpdir): """Error in trying to clean dangling folders are skipped """ - path1 = 'thingy' - path2 = 'swh.loader.git-1468.noisy' - rootpath, dangling = prepare_arborescence_from(tmpdir, [ - path1, - path2, - ]) - - clean_dangling_folders(rootpath, 'swh.loader.git') + path1 = "thingy" + path2 = "swh.loader.git-1468.noisy" + rootpath, dangling = prepare_arborescence_from(tmpdir, [path1, path2,]) + + clean_dangling_folders(rootpath, "swh.loader.git") actual_dirs = os.listdir(rootpath) mock_pid_exists.assert_called_once_with(1468) mock_rmtree.assert_called_once_with(os.path.join(rootpath, path2)) assert_dirs(actual_dirs, [path2, path1]) diff --git a/swh/loader/core/utils.py b/swh/loader/core/utils.py index f1f43cc..ce9e0c6 100644 --- a/swh/loader/core/utils.py +++ b/swh/loader/core/utils.py @@ -1,47 +1,47 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import shutil import psutil def clean_dangling_folders(dirpath, pattern_check, log=None): """Clean up potential dangling temporary working folder rooted at `dirpath`. Those folders must match a dedicated pattern and not belonging to a live pid. Args: dirpath (str): Path to check for dangling files pattern_check (str): A dedicated pattern to check on first level directory (e.g `swh.loader.mercurial.`, `swh.loader.svn.`) log (Logger): Optional logger """ if not os.path.exists(dirpath): return for filename in os.listdir(dirpath): try: # pattern: `swh.loader.svn-pid.{noise}` - if pattern_check not in filename or \ - '-' not in filename: # silently ignore unknown patterns + if ( + pattern_check not in filename or "-" not in filename + ): # silently ignore unknown patterns continue - _, pid = filename.split('-') - pid = int(pid.split('.')[0]) + _, pid = filename.split("-") + pid = int(pid.split(".")[0]) if psutil.pid_exists(pid): if log: - log.debug('PID %s is live, skipping' % pid) + log.debug("PID %s is live, skipping" % pid) continue path_to_cleanup = os.path.join(dirpath, filename) # could be removed concurrently, so check before removal if os.path.exists(path_to_cleanup): shutil.rmtree(path_to_cleanup) except Exception as e: if log: - msg = 'Fail to clean dangling path %s: %s' % ( - path_to_cleanup, e) + msg = "Fail to clean dangling path %s: %s" % (path_to_cleanup, e) log.warn(msg) diff --git a/swh/loader/package/__init__.py b/swh/loader/package/__init__.py index ee2d113..3579fb2 100644 --- a/swh/loader/package/__init__.py +++ b/swh/loader/package/__init__.py @@ -1,23 +1,19 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Dict import pkg_resources try: - __version__ = pkg_resources.get_distribution('swh.loader.core').version + __version__ = pkg_resources.get_distribution("swh.loader.core").version except pkg_resources.DistributionNotFound: - __version__ = 'devel' + __version__ = "devel" DEFAULT_PARAMS: Dict[str, Any] = { - 'headers': { - 'User-Agent': 'Software Heritage Loader (%s)' % ( - __version__ - ) - } + "headers": {"User-Agent": "Software Heritage Loader (%s)" % (__version__)} } diff --git a/swh/loader/package/archive/__init__.py b/swh/loader/package/archive/__init__.py index bb824e7..61ea19b 100644 --- a/swh/loader/package/archive/__init__.py +++ b/swh/loader/package/archive/__init__.py @@ -1,16 +1,17 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Mapping def register() -> Mapping[str, Any]: """Register the current worker module's definition""" from .loader import ArchiveLoader + return { - 'task_modules': [f'{__name__}.tasks'], - 'loader': ArchiveLoader, + "task_modules": [f"{__name__}.tasks"], + "loader": ArchiveLoader, } diff --git a/swh/loader/package/archive/loader.py b/swh/loader/package/archive/loader.py index cca2035..9c3ea48 100644 --- a/swh/loader/package/archive/loader.py +++ b/swh/loader/package/archive/loader.py @@ -1,131 +1,143 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import iso8601 import logging from os import path from typing import Any, Dict, Generator, Mapping, Optional, Sequence, Tuple from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import release_name, artifact_identity from swh.model.model import ( - Sha1Git, Person, TimestampWithTimezone, Revision, RevisionType, + Sha1Git, + Person, + TimestampWithTimezone, + Revision, + RevisionType, ) logger = logging.getLogger(__name__) SWH_PERSON = Person( - name=b'Software Heritage', - fullname=b'Software Heritage', - email=b'robot@softwareheritage.org' + name=b"Software Heritage", + fullname=b"Software Heritage", + email=b"robot@softwareheritage.org", ) -REVISION_MESSAGE = b'swh-loader-package: synthetic revision message' +REVISION_MESSAGE = b"swh-loader-package: synthetic revision message" class ArchiveLoader(PackageLoader): """Load archive origin's artifact files into swh archive """ - visit_type = 'tar' - def __init__(self, url: str, artifacts: Sequence[Mapping[str, Any]], - identity_artifact_keys: Optional[Sequence[str]] = None): + visit_type = "tar" + + def __init__( + self, + url: str, + artifacts: Sequence[Mapping[str, Any]], + identity_artifact_keys: Optional[Sequence[str]] = None, + ): """Loader constructor. For now, this is the lister's task output. Args: url: Origin url artifacts: List of artifact information with keys: - **time**: last modification time as either isoformat date string or timestamp - **url**: the artifact url to retrieve filename - **artifact's filename version**: artifact's version length - **length**: artifact's length identity_artifact_keys: Optional List of keys forming the "identity" of an artifact """ super().__init__(url=url) self.artifacts = artifacts # assume order is enforced in the lister if not identity_artifact_keys: # default keys for gnu - identity_artifact_keys = ['time', 'url', 'length', 'version'] + identity_artifact_keys = ["time", "url", "length", "version"] self.identity_artifact_keys = identity_artifact_keys def get_versions(self) -> Sequence[str]: versions = [] for archive in self.artifacts: - v = archive.get('version') + v = archive.get("version") if v: versions.append(v) return versions def get_default_version(self) -> str: # It's the most recent, so for this loader, it's the last one - return self.artifacts[-1]['version'] + return self.artifacts[-1]["version"] - def get_package_info(self, version: str) -> Generator[ - Tuple[str, Mapping[str, Any]], None, None]: + def get_package_info( + self, version: str + ) -> Generator[Tuple[str, Mapping[str, Any]], None, None]: for a_metadata in self.artifacts: - url = a_metadata['url'] - package_version = a_metadata['version'] + url = a_metadata["url"] + package_version = a_metadata["version"] if version == package_version: - filename = a_metadata.get('filename') + filename = a_metadata.get("filename") p_info = { - 'url': url, - 'filename': filename if filename else path.split(url)[-1], - 'raw': a_metadata, + "url": url, + "filename": filename if filename else path.split(url)[-1], + "raw": a_metadata, } # FIXME: this code assumes we have only 1 artifact per # versioned package yield release_name(version), p_info def resolve_revision_from( - self, known_artifacts: Dict, artifact_metadata: Dict) \ - -> Optional[bytes]: + self, known_artifacts: Dict, artifact_metadata: Dict + ) -> Optional[bytes]: identity = artifact_identity( - artifact_metadata, id_keys=self.identity_artifact_keys) + artifact_metadata, id_keys=self.identity_artifact_keys + ) for rev_id, known_artifact in known_artifacts.items(): - logging.debug('known_artifact: %s', known_artifact) - reference_artifact = known_artifact['extrinsic']['raw'] + logging.debug("known_artifact: %s", known_artifact) + reference_artifact = known_artifact["extrinsic"]["raw"] known_identity = artifact_identity( - reference_artifact, id_keys=self.identity_artifact_keys) + reference_artifact, id_keys=self.identity_artifact_keys + ) if identity == known_identity: return rev_id return None def build_revision( - self, a_metadata: Mapping[str, Any], uncompressed_path: str, - directory: Sha1Git) -> Optional[Revision]: - time = a_metadata['time'] # assume it's a timestamp + self, a_metadata: Mapping[str, Any], uncompressed_path: str, directory: Sha1Git + ) -> Optional[Revision]: + time = a_metadata["time"] # assume it's a timestamp if isinstance(time, str): # otherwise, assume it's a parsable date time = iso8601.parse_date(time) normalized_time = TimestampWithTimezone.from_datetime(time) return Revision( type=RevisionType.TAR, message=REVISION_MESSAGE, date=normalized_time, author=SWH_PERSON, committer=SWH_PERSON, committer_date=normalized_time, parents=[], directory=directory, synthetic=True, metadata={ - 'intrinsic': {}, - 'extrinsic': { - 'provider': self.url, - 'when': self.visit_date.isoformat(), - 'raw': a_metadata, + "intrinsic": {}, + "extrinsic": { + "provider": self.url, + "when": self.visit_date.isoformat(), + "raw": a_metadata, }, }, ) diff --git a/swh/loader/package/archive/tasks.py b/swh/loader/package/archive/tasks.py index 4313326..a9cb9e4 100644 --- a/swh/loader/package/archive/tasks.py +++ b/swh/loader/package/archive/tasks.py @@ -1,16 +1,16 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.archive.loader import ArchiveLoader -@shared_task(name=__name__ + '.LoadArchive') -def load_archive_files( - *, url=None, artifacts=None, identity_artifact_keys=None): +@shared_task(name=__name__ + ".LoadArchive") +def load_archive_files(*, url=None, artifacts=None, identity_artifact_keys=None): """Load archive's artifacts (e.g gnu, etc...)""" - return ArchiveLoader(url, artifacts, - identity_artifact_keys=identity_artifact_keys).load() + return ArchiveLoader( + url, artifacts, identity_artifact_keys=identity_artifact_keys + ).load() diff --git a/swh/loader/package/archive/tests/test_archive.py b/swh/loader/package/archive/tests/test_archive.py index 8e207c4..bc5a137 100644 --- a/swh/loader/package/archive/tests/test_archive.py +++ b/swh/loader/package/archive/tests/test_archive.py @@ -1,345 +1,353 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.model.hashutil import hash_to_bytes from swh.loader.package.archive.loader import ArchiveLoader from swh.loader.package.tests.common import ( - check_snapshot, check_metadata_paths, get_stats + check_snapshot, + check_metadata_paths, + get_stats, ) -URL = 'https://ftp.gnu.org/gnu/8sync/' +URL = "https://ftp.gnu.org/gnu/8sync/" GNU_ARTIFACTS = [ { - 'time': 944729610, - 'url': 'https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz', - 'length': 221837, - 'filename': '8sync-0.1.0.tar.gz', - 'version': '0.1.0', + "time": 944729610, + "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz", + "length": 221837, + "filename": "8sync-0.1.0.tar.gz", + "version": "0.1.0", } ] _expected_new_contents_first_visit = [ - 'e9258d81faf5881a2f96a77ba609396f82cb97ad', - '1170cf105b04b7e2822a0e09d2acf71da7b9a130', - 'fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac', - '0057bec9b5422aff9256af240b177ac0e3ac2608', - '2b8d0d0b43a1078fc708930c8ddc2956a86c566e', - '27de3b3bc6545d2a797aeeb4657c0e215a0c2e55', - '2e6db43f5cd764e677f416ff0d0c78c7a82ef19b', - 'ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62', - 'edeb33282b2bffa0e608e9d2fd960fd08093c0ea', - 'd64e64d4c73679323f8d4cde2643331ba6c20af9', - '7a756602914be889c0a2d3952c710144b3e64cb0', - '84fb589b554fcb7f32b806951dcf19518d67b08f', - '8624bcdae55baeef00cd11d5dfcfa60f68710a02', - 'e08441aeab02704cfbd435d6445f7c072f8f524e', - 'f67935bc3a83a67259cda4b2d43373bd56703844', - '809788434b433eb2e3cfabd5d591c9a659d5e3d8', - '7d7c6c8c5ebaeff879f61f37083a3854184f6c41', - 'b99fec102eb24bffd53ab61fc30d59e810f116a2', - '7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68', - 'f0c97052e567948adf03e641301e9983c478ccff', - '7fb724242e2b62b85ca64190c31dcae5303e19b3', - '4f9709e64a9134fe8aefb36fd827b84d8b617ab5', - '7350628ccf194c2c3afba4ac588c33e3f3ac778d', - '0bb892d9391aa706dc2c3b1906567df43cbe06a2', - '49d4c0ce1a16601f1e265d446b6c5ea6b512f27c', - '6b5cc594ac466351450f7f64a0b79fdaf4435ad3', - '3046e5d1f70297e2a507b98224b6222c9688d610', - '1572607d456d7f633bc6065a2b3048496d679a31', + "e9258d81faf5881a2f96a77ba609396f82cb97ad", + "1170cf105b04b7e2822a0e09d2acf71da7b9a130", + "fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac", + "0057bec9b5422aff9256af240b177ac0e3ac2608", + "2b8d0d0b43a1078fc708930c8ddc2956a86c566e", + "27de3b3bc6545d2a797aeeb4657c0e215a0c2e55", + "2e6db43f5cd764e677f416ff0d0c78c7a82ef19b", + "ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62", + "edeb33282b2bffa0e608e9d2fd960fd08093c0ea", + "d64e64d4c73679323f8d4cde2643331ba6c20af9", + "7a756602914be889c0a2d3952c710144b3e64cb0", + "84fb589b554fcb7f32b806951dcf19518d67b08f", + "8624bcdae55baeef00cd11d5dfcfa60f68710a02", + "e08441aeab02704cfbd435d6445f7c072f8f524e", + "f67935bc3a83a67259cda4b2d43373bd56703844", + "809788434b433eb2e3cfabd5d591c9a659d5e3d8", + "7d7c6c8c5ebaeff879f61f37083a3854184f6c41", + "b99fec102eb24bffd53ab61fc30d59e810f116a2", + "7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68", + "f0c97052e567948adf03e641301e9983c478ccff", + "7fb724242e2b62b85ca64190c31dcae5303e19b3", + "4f9709e64a9134fe8aefb36fd827b84d8b617ab5", + "7350628ccf194c2c3afba4ac588c33e3f3ac778d", + "0bb892d9391aa706dc2c3b1906567df43cbe06a2", + "49d4c0ce1a16601f1e265d446b6c5ea6b512f27c", + "6b5cc594ac466351450f7f64a0b79fdaf4435ad3", + "3046e5d1f70297e2a507b98224b6222c9688d610", + "1572607d456d7f633bc6065a2b3048496d679a31", ] _expected_new_directories_first_visit = [ - 'daabc65ec75d487b1335ffc101c0ac11c803f8fc', - '263be23b4a8101d3ad0d9831319a3e0f2b065f36', - '7f6e63ba6eb3e2236f65892cd822041f1a01dd5c', - '4db0a3ecbc976083e2dac01a62f93729698429a3', - 'dfef1c80e1098dd5deda664bb44a9ab1f738af13', - 'eca971d346ea54d95a6e19d5051f900237fafdaa', - '3aebc29ed1fccc4a6f2f2010fb8e57882406b528', + "daabc65ec75d487b1335ffc101c0ac11c803f8fc", + "263be23b4a8101d3ad0d9831319a3e0f2b065f36", + "7f6e63ba6eb3e2236f65892cd822041f1a01dd5c", + "4db0a3ecbc976083e2dac01a62f93729698429a3", + "dfef1c80e1098dd5deda664bb44a9ab1f738af13", + "eca971d346ea54d95a6e19d5051f900237fafdaa", + "3aebc29ed1fccc4a6f2f2010fb8e57882406b528", ] _expected_new_revisions_first_visit = { - '44183488c0774ce3c957fa19ba695cf18a4a42b3': - '3aebc29ed1fccc4a6f2f2010fb8e57882406b528' + "44183488c0774ce3c957fa19ba695cf18a4a42b3": ( + "3aebc29ed1fccc4a6f2f2010fb8e57882406b528" + ) } _expected_branches_first_visit = { - 'HEAD': { - 'target_type': 'alias', - 'target': 'releases/0.1.0', - }, - 'releases/0.1.0': { - 'target_type': 'revision', - 'target': '44183488c0774ce3c957fa19ba695cf18a4a42b3', + "HEAD": {"target_type": "alias", "target": "releases/0.1.0",}, + "releases/0.1.0": { + "target_type": "revision", + "target": "44183488c0774ce3c957fa19ba695cf18a4a42b3", }, } # hash is different then before as we changed the snapshot # gnu used to use `release/` (singular) instead of plural -_expected_new_snapshot_first_visit_id = 'c419397fd912039825ebdbea378bc6283f006bf5' # noqa +_expected_new_snapshot_first_visit_id = ( + "c419397fd912039825ebdbea378bc6283f006bf5" # noqa +) def visit_with_no_artifact_found(swh_config, requests_mock_datadir): url = URL - unknown_artifact_url = 'https://ftp.g.o/unknown/8sync-0.1.0.tar.gz' - loader = ArchiveLoader(url, artifacts=[ - { - 'time': 944729610, - 'url': unknown_artifact_url, # unknown artifact - 'length': 221837, - 'filename': '8sync-0.1.0.tar.gz', - 'version': '0.1.0', - } - ]) + unknown_artifact_url = "https://ftp.g.o/unknown/8sync-0.1.0.tar.gz" + loader = ArchiveLoader( + url, + artifacts=[ + { + "time": 944729610, + "url": unknown_artifact_url, # unknown artifact + "length": 221837, + "filename": "8sync-0.1.0.tar.gz", + "version": "0.1.0", + } + ], + ) actual_load_status = loader.load() - assert actual_load_status['status'] == 'uneventful' - assert actual_load_status['snapshot_id'] is not None + assert actual_load_status["status"] == "uneventful" + assert actual_load_status["snapshot_id"] is not None stats = get_stats(loader.storage) assert { - 'content': 0, - 'directory': 0, - 'origin': 1, - 'origin_visit': 1, - 'person': 0, - 'release': 0, - 'revision': 0, - 'skipped_content': 0, - 'snapshot': 1, + "content": 0, + "directory": 0, + "origin": 1, + "origin_visit": 1, + "person": 0, + "release": 0, + "revision": 0, + "skipped_content": 0, + "snapshot": 1, } == stats origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'partial' - assert origin_visit['type'] == 'tar' + assert origin_visit["status"] == "partial" + assert origin_visit["type"] == "tar" def test_check_revision_metadata_structure(swh_config, requests_mock_datadir): loader = ArchiveLoader(url=URL, artifacts=GNU_ARTIFACTS) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' - assert actual_load_status['snapshot_id'] is not None + assert actual_load_status["status"] == "eventful" + assert actual_load_status["snapshot_id"] is not None - expected_revision_id = hash_to_bytes( - '44183488c0774ce3c957fa19ba695cf18a4a42b3') + expected_revision_id = hash_to_bytes("44183488c0774ce3c957fa19ba695cf18a4a42b3") revision = list(loader.storage.revision_get([expected_revision_id]))[0] assert revision is not None - check_metadata_paths(revision['metadata'], paths=[ - ('intrinsic', dict), - ('extrinsic.provider', str), - ('extrinsic.when', str), - ('extrinsic.raw', dict), - ('original_artifact', list), - ]) - - for original_artifact in revision['metadata']['original_artifact']: - check_metadata_paths(original_artifact, paths=[ - ('filename', str), - ('length', int), - ('checksums', dict), - ]) - - -def test_visit_with_release_artifact_no_prior_visit( - swh_config, requests_mock_datadir): + check_metadata_paths( + revision["metadata"], + paths=[ + ("intrinsic", dict), + ("extrinsic.provider", str), + ("extrinsic.when", str), + ("extrinsic.raw", dict), + ("original_artifact", list), + ], + ) + + for original_artifact in revision["metadata"]["original_artifact"]: + check_metadata_paths( + original_artifact, + paths=[("filename", str), ("length", int), ("checksums", dict),], + ) + + +def test_visit_with_release_artifact_no_prior_visit(swh_config, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ loader = ArchiveLoader(url=URL, artifacts=GNU_ARTIFACTS) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' - assert actual_load_status['snapshot_id'] == _expected_new_snapshot_first_visit_id # noqa + assert actual_load_status["status"] == "eventful" + assert ( + actual_load_status["snapshot_id"] == _expected_new_snapshot_first_visit_id + ) # noqa stats = get_stats(loader.storage) assert { - 'content': len(_expected_new_contents_first_visit), - 'directory': len(_expected_new_directories_first_visit), - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': len(_expected_new_revisions_first_visit), - 'skipped_content': 0, - 'snapshot': 1 + "content": len(_expected_new_contents_first_visit), + "directory": len(_expected_new_directories_first_visit), + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": len(_expected_new_revisions_first_visit), + "skipped_content": 0, + "snapshot": 1, } == stats expected_contents = map(hash_to_bytes, _expected_new_contents_first_visit) - assert list(loader.storage.content_missing_per_sha1(expected_contents)) \ - == [] + assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit) assert list(loader.storage.directory_missing(expected_dirs)) == [] expected_revs = map(hash_to_bytes, _expected_new_revisions_first_visit) assert list(loader.storage.revision_missing(expected_revs)) == [] expected_snapshot = { - 'id': _expected_new_snapshot_first_visit_id, - 'branches': _expected_branches_first_visit, + "id": _expected_new_snapshot_first_visit_id, + "branches": _expected_branches_first_visit, } check_snapshot(expected_snapshot, loader.storage) def test_2_visits_without_change(swh_config, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ url = URL loader = ArchiveLoader(url, artifacts=GNU_ARTIFACTS) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' - assert actual_load_status['snapshot_id'] is not None + assert actual_load_status["status"] == "eventful" + assert actual_load_status["snapshot_id"] is not None origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'tar' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "tar" actual_load_status2 = loader.load() - assert actual_load_status2['status'] == 'uneventful' - assert actual_load_status2['snapshot_id'] is not None + assert actual_load_status2["status"] == "uneventful" + assert actual_load_status2["snapshot_id"] is not None - assert actual_load_status['snapshot_id'] == actual_load_status2[ - 'snapshot_id'] + assert actual_load_status["snapshot_id"] == actual_load_status2["snapshot_id"] origin_visit2 = loader.storage.origin_visit_get_latest(url) - assert origin_visit2['status'] == 'full' - assert origin_visit2['type'] == 'tar' + assert origin_visit2["status"] == "full" + assert origin_visit2["type"] == "tar" urls = [ - m.url for m in requests_mock_datadir.request_history - if m.url.startswith('https://ftp.gnu.org') + m.url + for m in requests_mock_datadir.request_history + if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 def test_2_visits_with_new_artifact(swh_config, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ url = URL artifact1 = GNU_ARTIFACTS[0] loader = ArchiveLoader(url, [artifact1]) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' - assert actual_load_status['snapshot_id'] is not None + assert actual_load_status["status"] == "eventful" + assert actual_load_status["snapshot_id"] is not None origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'tar' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "tar" stats = get_stats(loader.storage) assert { - 'content': len(_expected_new_contents_first_visit), - 'directory': len(_expected_new_directories_first_visit), - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': len(_expected_new_revisions_first_visit), - 'skipped_content': 0, - 'snapshot': 1 + "content": len(_expected_new_contents_first_visit), + "directory": len(_expected_new_directories_first_visit), + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": len(_expected_new_revisions_first_visit), + "skipped_content": 0, + "snapshot": 1, } == stats urls = [ - m.url for m in requests_mock_datadir.request_history - if m.url.startswith('https://ftp.gnu.org') + m.url + for m in requests_mock_datadir.request_history + if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 artifact2 = { - 'time': 1480991830, - 'url': 'https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz', - 'length': 238466, - 'filename': '8sync-0.2.0.tar.gz', - 'version': '0.2.0', + "time": 1480991830, + "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz", + "length": 238466, + "filename": "8sync-0.2.0.tar.gz", + "version": "0.2.0", } loader2 = ArchiveLoader(url, [artifact1, artifact2]) # implementation detail: share the storage in between visits loader2.storage = loader.storage stats2 = get_stats(loader2.storage) assert stats == stats2 # ensure we share the storage actual_load_status2 = loader2.load() - assert actual_load_status2['status'] == 'eventful' - assert actual_load_status2['snapshot_id'] is not None + assert actual_load_status2["status"] == "eventful" + assert actual_load_status2["snapshot_id"] is not None stats2 = get_stats(loader.storage) assert { - 'content': len(_expected_new_contents_first_visit) + 14, - 'directory': len(_expected_new_directories_first_visit) + 8, - 'origin': 1, - 'origin_visit': 1 + 1, - 'person': 1, - 'release': 0, - 'revision': len(_expected_new_revisions_first_visit) + 1, - 'skipped_content': 0, - 'snapshot': 1 + 1, + "content": len(_expected_new_contents_first_visit) + 14, + "directory": len(_expected_new_directories_first_visit) + 8, + "origin": 1, + "origin_visit": 1 + 1, + "person": 1, + "release": 0, + "revision": len(_expected_new_revisions_first_visit) + 1, + "skipped_content": 0, + "snapshot": 1 + 1, } == stats2 origin_visit2 = loader.storage.origin_visit_get_latest(url) - assert origin_visit2['status'] == 'full' - assert origin_visit2['type'] == 'tar' + assert origin_visit2["status"] == "full" + assert origin_visit2["type"] == "tar" urls = [ - m.url for m in requests_mock_datadir.request_history - if m.url.startswith('https://ftp.gnu.org') + m.url + for m in requests_mock_datadir.request_history + if m.url.startswith("https://ftp.gnu.org") ] # 1 artifact (2nd time no modification) + 1 new artifact assert len(urls) == 2 def test_2_visits_without_change_not_gnu(swh_config, requests_mock_datadir): """Load a project archive (not gnu) ends up with 1 snapshot """ - url = 'https://something.else.org/8sync/' + url = "https://something.else.org/8sync/" artifacts = [ # this is not a gnu artifact { - 'time': '1999-12-09T09:53:30+00:00', # it's also not a timestamp - 'sha256': 'd5d1051e59b2be6f065a9fc6aedd3a391e44d0274b78b9bb4e2b57a09134dbe4', # noqa + "time": "1999-12-09T09:53:30+00:00", # it's also not a timestamp + "sha256": "d5d1051e59b2be6f065a9fc6aedd3a391e44d0274b78b9bb4e2b57a09134dbe4", # noqa # keep a gnu artifact reference to avoid adding other test files - 'url': 'https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz', - 'length': 238466, - 'filename': '8sync-0.2.0.tar.gz', - 'version': '0.2.0', + "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz", + "length": 238466, + "filename": "8sync-0.2.0.tar.gz", + "version": "0.2.0", } ] # Here the loader defines the id_keys to use for existence in the snapshot # It's not the default archive loader which loader = ArchiveLoader( - url, artifacts=artifacts, identity_artifact_keys=[ - 'sha256', 'length', 'url']) + url, artifacts=artifacts, identity_artifact_keys=["sha256", "length", "url"] + ) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' - assert actual_load_status['snapshot_id'] is not None + assert actual_load_status["status"] == "eventful" + assert actual_load_status["snapshot_id"] is not None origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'tar' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "tar" actual_load_status2 = loader.load() - assert actual_load_status2['status'] == 'uneventful' - assert actual_load_status2['snapshot_id'] == actual_load_status[ - 'snapshot_id'] + assert actual_load_status2["status"] == "uneventful" + assert actual_load_status2["snapshot_id"] == actual_load_status["snapshot_id"] origin_visit2 = loader.storage.origin_visit_get_latest(url) - assert origin_visit2['status'] == 'full' - assert origin_visit2['type'] == 'tar' + assert origin_visit2["status"] == "full" + assert origin_visit2["type"] == "tar" urls = [ - m.url for m in requests_mock_datadir.request_history - if m.url.startswith('https://ftp.gnu.org') + m.url + for m in requests_mock_datadir.request_history + if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 diff --git a/swh/loader/package/archive/tests/test_tasks.py b/swh/loader/package/archive/tests/test_tasks.py index cb56c57..09fc47a 100644 --- a/swh/loader/package/archive/tests/test_tasks.py +++ b/swh/loader/package/archive/tests/test_tasks.py @@ -1,22 +1,19 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def test_archive_loader(mocker, swh_app, celery_session_worker, swh_config): - mock_loader = mocker.patch( - 'swh.loader.package.archive.loader.ArchiveLoader.load') - mock_loader.return_value = {'status': 'eventful'} + mock_loader = mocker.patch("swh.loader.package.archive.loader.ArchiveLoader.load") + mock_loader.return_value = {"status": "eventful"} res = swh_app.send_task( - 'swh.loader.package.archive.tasks.LoadArchive', - kwargs={ - 'url': 'some-url', - 'artifacts': [] - }) + "swh.loader.package.archive.tasks.LoadArchive", + kwargs={"url": "some-url", "artifacts": []}, + ) assert res res.wait() assert res.successful() - assert res.result == {'status': 'eventful'} + assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/cran/__init__.py b/swh/loader/package/cran/__init__.py index cb8bd97..884db19 100644 --- a/swh/loader/package/cran/__init__.py +++ b/swh/loader/package/cran/__init__.py @@ -1,16 +1,17 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Mapping def register() -> Mapping[str, Any]: """Register the current worker module's definition""" from .loader import CRANLoader + return { - 'task_modules': [f'{__name__}.tasks'], - 'loader': CRANLoader, + "task_modules": [f"{__name__}.tasks"], + "loader": CRANLoader, } diff --git a/swh/loader/package/cran/loader.py b/swh/loader/package/cran/loader.py index 10eb4e2..c3c29e0 100644 --- a/swh/loader/package/cran/loader.py +++ b/swh/loader/package/cran/loader.py @@ -1,194 +1,194 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import dateutil.parser import datetime import os import logging import re from datetime import timezone from os import path from typing import Any, Generator, Dict, List, Mapping, Optional, Tuple from debian.deb822 import Deb822 from swh.loader.package.loader import PackageLoader -from swh.loader.package.utils import ( - release_name, artifact_identity -) +from swh.loader.package.utils import release_name, artifact_identity from swh.model.model import ( - Person, TimestampWithTimezone, Sha1Git, Revision, RevisionType, + Person, + TimestampWithTimezone, + Sha1Git, + Revision, + RevisionType, ) logger = logging.getLogger(__name__) -DATE_PATTERN = re.compile(r'^(?P\d{4})-(?P\d{2})$') +DATE_PATTERN = re.compile(r"^(?P\d{4})-(?P\d{2})$") class CRANLoader(PackageLoader): - visit_type = 'cran' + visit_type = "cran" def __init__(self, url: str, artifacts: List[Dict]): """Loader constructor. Args: url: Origin url to retrieve cran artifact(s) from artifacts: List of associated artifact for the origin url """ super().__init__(url=url) # explicit what we consider the artifact identity - self.id_keys = ['url', 'version'] + self.id_keys = ["url", "version"] self.artifacts = artifacts def get_versions(self) -> List[str]: versions = [] for artifact in self.artifacts: - versions.append(artifact['version']) + versions.append(artifact["version"]) return versions def get_default_version(self) -> str: - return self.artifacts[-1]['version'] + return self.artifacts[-1]["version"] - def get_package_info(self, version: str) -> Generator[ - Tuple[str, Dict[str, Any]], None, None]: + def get_package_info( + self, version: str + ) -> Generator[Tuple[str, Dict[str, Any]], None, None]: for a_metadata in self.artifacts: - url = a_metadata['url'] - package_version = a_metadata['version'] + url = a_metadata["url"] + package_version = a_metadata["version"] if version == package_version: p_info = { - 'url': url, - 'filename': path.basename(url), - 'raw': a_metadata, + "url": url, + "filename": path.basename(url), + "raw": a_metadata, } yield release_name(version), p_info def resolve_revision_from( - self, known_artifacts: Mapping[bytes, Mapping], - artifact_metadata: Mapping[str, Any]) \ - -> Optional[bytes]: + self, + known_artifacts: Mapping[bytes, Mapping], + artifact_metadata: Mapping[str, Any], + ) -> Optional[bytes]: """Given known_artifacts per revision, try to determine the revision for artifact_metadata """ new_identity = artifact_identity(artifact_metadata, self.id_keys) for rev_id, known_artifact_meta in known_artifacts.items(): - logging.debug('known_artifact_meta: %s', known_artifact_meta) - known_artifact = known_artifact_meta['extrinsic']['raw'] + logging.debug("known_artifact_meta: %s", known_artifact_meta) + known_artifact = known_artifact_meta["extrinsic"]["raw"] known_identity = artifact_identity(known_artifact, self.id_keys) if new_identity == known_identity: return rev_id return None def build_revision( - self, a_metadata: Mapping[str, Any], - uncompressed_path: str, - directory: Sha1Git) -> Optional[Revision]: + self, a_metadata: Mapping[str, Any], uncompressed_path: str, directory: Sha1Git + ) -> Optional[Revision]: # a_metadata is empty metadata = extract_intrinsic_metadata(uncompressed_path) - date = parse_date(metadata.get('Date')) - author = Person.from_fullname(metadata.get('Maintainer', '').encode()) - version = metadata.get('Version', a_metadata['version']) + date = parse_date(metadata.get("Date")) + author = Person.from_fullname(metadata.get("Maintainer", "").encode()) + version = metadata.get("Version", a_metadata["version"]) return Revision( - message=version.encode('utf-8'), + message=version.encode("utf-8"), type=RevisionType.TAR, date=date, author=author, committer=author, committer_date=date, parents=[], directory=directory, synthetic=True, metadata={ - 'intrinsic': { - 'tool': 'DESCRIPTION', - 'raw': metadata, - }, - 'extrinsic': { - 'provider': self.url, - 'when': self.visit_date.isoformat(), - 'raw': a_metadata, + "intrinsic": {"tool": "DESCRIPTION", "raw": metadata,}, + "extrinsic": { + "provider": self.url, + "when": self.visit_date.isoformat(), + "raw": a_metadata, }, }, ) def parse_debian_control(filepath: str) -> Dict[str, Any]: """Parse debian control at filepath""" metadata: Dict = {} - logger.debug('Debian control file %s', filepath) - for paragraph in Deb822.iter_paragraphs(open(filepath, 'rb')): - logger.debug('paragraph: %s', paragraph) + logger.debug("Debian control file %s", filepath) + for paragraph in Deb822.iter_paragraphs(open(filepath, "rb")): + logger.debug("paragraph: %s", paragraph) metadata.update(**paragraph) - logger.debug('metadata parsed: %s', metadata) + logger.debug("metadata parsed: %s", metadata) return metadata def extract_intrinsic_metadata(dir_path: str) -> Dict[str, Any]: """Given an uncompressed path holding the DESCRIPTION file, returns a DESCRIPTION parsed structure as a dict. Cran origins describes their intrinsic metadata within a DESCRIPTION file at the root tree of a tarball. This DESCRIPTION uses a simple file format called DCF, the Debian control format. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the DESCRIPTION parsed structure as a dict (or empty dict if missing) """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] - description_path = os.path.join(dir_path, project_dirname, 'DESCRIPTION') + description_path = os.path.join(dir_path, project_dirname, "DESCRIPTION") if not os.path.exists(description_path): return {} return parse_debian_control(description_path) def parse_date(date: Optional[str]) -> Optional[TimestampWithTimezone]: """Parse a date into a datetime """ assert not date or isinstance(date, str) dt: Optional[datetime.datetime] = None if not date: return None try: specific_date = DATE_PATTERN.match(date) if specific_date: - year = int(specific_date.group('year')) - month = int(specific_date.group('month')) + year = int(specific_date.group("year")) + month = int(specific_date.group("month")) dt = datetime.datetime(year, month, 1) else: dt = dateutil.parser.parse(date) if not dt.tzinfo: # up for discussion the timezone needs to be set or # normalize_timestamp is not happy: ValueError: normalize_timestamp # received datetime without timezone: 2001-06-08 00:00:00 dt = dt.replace(tzinfo=timezone.utc) except Exception as e: - logger.warning('Fail to parse date %s. Reason: %s', (date, e)) + logger.warning("Fail to parse date %s. Reason: %s", (date, e)) if dt: return TimestampWithTimezone.from_datetime(dt) else: return None diff --git a/swh/loader/package/cran/tasks.py b/swh/loader/package/cran/tasks.py index cd6111a..5afe0aa 100644 --- a/swh/loader/package/cran/tasks.py +++ b/swh/loader/package/cran/tasks.py @@ -1,14 +1,14 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.cran.loader import CRANLoader -@shared_task(name=__name__ + '.LoadCRAN') +@shared_task(name=__name__ + ".LoadCRAN") def load_cran(url=None, artifacts=[]): """Load CRAN's artifacts""" return CRANLoader(url, artifacts).load() diff --git a/swh/loader/package/cran/tests/test_cran.py b/swh/loader/package/cran/tests/test_cran.py index 730a3a3..2ba5dc8 100644 --- a/swh/loader/package/cran/tests/test_cran.py +++ b/swh/loader/package/cran/tests/test_cran.py @@ -1,330 +1,328 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import pytest from datetime import datetime, timezone from dateutil.tz import tzlocal from os import path from swh.loader.package.cran.loader import ( - extract_intrinsic_metadata, CRANLoader, parse_date, - parse_debian_control + extract_intrinsic_metadata, + CRANLoader, + parse_date, + parse_debian_control, ) from swh.core.tarball import uncompress from swh.model.model import TimestampWithTimezone -from swh.loader.package.tests.common import ( - check_snapshot, get_stats -) +from swh.loader.package.tests.common import check_snapshot, get_stats def test_cran_parse_date(): data = [ # parsable, some have debatable results though - ('2001-June-08', - datetime(2001, 6, 8, 0, 0, tzinfo=timezone.utc)), - ('Tue Dec 27 15:06:08 PST 2011', - datetime(2011, 12, 27, 15, 6, 8, tzinfo=timezone.utc)), - ('8-14-2013', - datetime(2013, 8, 14, 0, 0, tzinfo=timezone.utc)), - ('2011-01', - datetime(2011, 1, 1, 0, 0, tzinfo=timezone.utc)), - ('201109', - datetime(2009, 11, 20, 0, 0, tzinfo=timezone.utc)), - ('04-12-2014', - datetime(2014, 4, 12, 0, 0, tzinfo=timezone.utc)), - ('2018-08-24, 10:40:10', - datetime(2018, 8, 24, 10, 40, 10, tzinfo=timezone.utc)), - ('2013-October-16', - datetime(2013, 10, 16, 0, 0, tzinfo=timezone.utc)), - ('Aug 23, 2013', - datetime(2013, 8, 23, 0, 0, tzinfo=timezone.utc)), - ('27-11-2014', - datetime(2014, 11, 27, 0, 0, tzinfo=timezone.utc)), - ('2019-09-26,', - datetime(2019, 9, 26, 0, 0, tzinfo=timezone.utc)), - ('9/25/2014', - datetime(2014, 9, 25, 0, 0, tzinfo=timezone.utc)), - ('Fri Jun 27 17:23:53 2014', - datetime(2014, 6, 27, 17, 23, 53, tzinfo=timezone.utc)), - ('28-04-2014', - datetime(2014, 4, 28, 0, 0, tzinfo=timezone.utc)), - ('04-14-2014', - datetime(2014, 4, 14, 0, 0, tzinfo=timezone.utc)), - ('2019-05-08 14:17:31 UTC', - datetime(2019, 5, 8, 14, 17, 31, tzinfo=timezone.utc)), - ('Wed May 21 13:50:39 CEST 2014', - datetime(2014, 5, 21, 13, 50, 39, tzinfo=tzlocal())), - ('2018-04-10 00:01:04 KST', - datetime(2018, 4, 10, 0, 1, 4, tzinfo=timezone.utc)), - ('2019-08-25 10:45', - datetime(2019, 8, 25, 10, 45, tzinfo=timezone.utc)), - ('March 9, 2015', - datetime(2015, 3, 9, 0, 0, tzinfo=timezone.utc)), - ('Aug. 18, 2012', - datetime(2012, 8, 18, 0, 0, tzinfo=timezone.utc)), - ('2014-Dec-17', - datetime(2014, 12, 17, 0, 0, tzinfo=timezone.utc)), - ('March 01, 2013', - datetime(2013, 3, 1, 0, 0, tzinfo=timezone.utc)), - ('2017-04-08.', - datetime(2017, 4, 8, 0, 0, tzinfo=timezone.utc)), - ('2014-Apr-22', - datetime(2014, 4, 22, 0, 0, tzinfo=timezone.utc)), - ('Mon Jan 12 19:54:04 2015', - datetime(2015, 1, 12, 19, 54, 4, tzinfo=timezone.utc)), - ('May 22, 2014', - datetime(2014, 5, 22, 0, 0, tzinfo=timezone.utc)), - ('2014-08-12 09:55:10 EDT', - datetime(2014, 8, 12, 9, 55, 10, tzinfo=timezone.utc)), + ("2001-June-08", datetime(2001, 6, 8, 0, 0, tzinfo=timezone.utc)), + ( + "Tue Dec 27 15:06:08 PST 2011", + datetime(2011, 12, 27, 15, 6, 8, tzinfo=timezone.utc), + ), + ("8-14-2013", datetime(2013, 8, 14, 0, 0, tzinfo=timezone.utc)), + ("2011-01", datetime(2011, 1, 1, 0, 0, tzinfo=timezone.utc)), + ("201109", datetime(2009, 11, 20, 0, 0, tzinfo=timezone.utc)), + ("04-12-2014", datetime(2014, 4, 12, 0, 0, tzinfo=timezone.utc)), + ( + "2018-08-24, 10:40:10", + datetime(2018, 8, 24, 10, 40, 10, tzinfo=timezone.utc), + ), + ("2013-October-16", datetime(2013, 10, 16, 0, 0, tzinfo=timezone.utc)), + ("Aug 23, 2013", datetime(2013, 8, 23, 0, 0, tzinfo=timezone.utc)), + ("27-11-2014", datetime(2014, 11, 27, 0, 0, tzinfo=timezone.utc)), + ("2019-09-26,", datetime(2019, 9, 26, 0, 0, tzinfo=timezone.utc)), + ("9/25/2014", datetime(2014, 9, 25, 0, 0, tzinfo=timezone.utc)), + ( + "Fri Jun 27 17:23:53 2014", + datetime(2014, 6, 27, 17, 23, 53, tzinfo=timezone.utc), + ), + ("28-04-2014", datetime(2014, 4, 28, 0, 0, tzinfo=timezone.utc)), + ("04-14-2014", datetime(2014, 4, 14, 0, 0, tzinfo=timezone.utc)), + ( + "2019-05-08 14:17:31 UTC", + datetime(2019, 5, 8, 14, 17, 31, tzinfo=timezone.utc), + ), + ( + "Wed May 21 13:50:39 CEST 2014", + datetime(2014, 5, 21, 13, 50, 39, tzinfo=tzlocal()), + ), + ( + "2018-04-10 00:01:04 KST", + datetime(2018, 4, 10, 0, 1, 4, tzinfo=timezone.utc), + ), + ("2019-08-25 10:45", datetime(2019, 8, 25, 10, 45, tzinfo=timezone.utc)), + ("March 9, 2015", datetime(2015, 3, 9, 0, 0, tzinfo=timezone.utc)), + ("Aug. 18, 2012", datetime(2012, 8, 18, 0, 0, tzinfo=timezone.utc)), + ("2014-Dec-17", datetime(2014, 12, 17, 0, 0, tzinfo=timezone.utc)), + ("March 01, 2013", datetime(2013, 3, 1, 0, 0, tzinfo=timezone.utc)), + ("2017-04-08.", datetime(2017, 4, 8, 0, 0, tzinfo=timezone.utc)), + ("2014-Apr-22", datetime(2014, 4, 22, 0, 0, tzinfo=timezone.utc)), + ( + "Mon Jan 12 19:54:04 2015", + datetime(2015, 1, 12, 19, 54, 4, tzinfo=timezone.utc), + ), + ("May 22, 2014", datetime(2014, 5, 22, 0, 0, tzinfo=timezone.utc)), + ( + "2014-08-12 09:55:10 EDT", + datetime(2014, 8, 12, 9, 55, 10, tzinfo=timezone.utc), + ), # unparsable - ('Fabruary 21, 2012', None), + ("Fabruary 21, 2012", None), ('2019-05-28"', None), - ('2017-03-01 today', None), - ('2016-11-0110.1093/icesjms/fsw182', None), - ('2019-07-010', None), - ('2015-02.23', None), - ('20013-12-30', None), - ('2016-08-017', None), - ('2019-02-07l', None), - ('2018-05-010', None), - ('2019-09-27 KST', None), - ('$Date$', None), - ('2019-09-27 KST', None), - ('2019-06-22 $Date$', None), - ('$Date: 2013-01-18 12:49:03 -0600 (Fri, 18 Jan 2013) $', None), - ('2015-7-013', None), - ('2018-05-023', None), - ("Check NEWS file for changes: news(package='simSummary')", None) + ("2017-03-01 today", None), + ("2016-11-0110.1093/icesjms/fsw182", None), + ("2019-07-010", None), + ("2015-02.23", None), + ("20013-12-30", None), + ("2016-08-017", None), + ("2019-02-07l", None), + ("2018-05-010", None), + ("2019-09-27 KST", None), + ("$Date$", None), + ("2019-09-27 KST", None), + ("2019-06-22 $Date$", None), + ("$Date: 2013-01-18 12:49:03 -0600 (Fri, 18 Jan 2013) $", None), + ("2015-7-013", None), + ("2018-05-023", None), + ("Check NEWS file for changes: news(package='simSummary')", None), ] for date, expected_date in data: actual_tstz = parse_date(date) if expected_date is None: assert actual_tstz is None, date else: expected_tstz = TimestampWithTimezone.from_datetime(expected_date) assert actual_tstz == expected_tstz, date @pytest.mark.fs def test_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) # sample url # https://cran.r-project.org/src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz # noqa archive_path = path.join( - datadir, 'https_cran.r-project.org', - 'src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz') + datadir, + "https_cran.r-project.org", + "src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz", + ) uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { - 'Package': 'KernSmooth', - 'Priority': 'recommended', - 'Version': '2.22-6', - 'Date': '2001-June-08', - 'Title': 'Functions for kernel smoothing for Wand & Jones (1995)', - 'Author': 'S original by Matt Wand.\n\tR port by Brian Ripley .', # noqa - 'Maintainer': 'Brian Ripley ', - 'Description': 'functions for kernel smoothing (and density estimation)\n corresponding to the book: \n Wand, M.P. and Jones, M.C. (1995) "Kernel Smoothing".', # noqa - 'License': 'Unlimited use and distribution (see LICENCE).', - 'URL': 'http://www.biostat.harvard.edu/~mwand' + "Package": "KernSmooth", + "Priority": "recommended", + "Version": "2.22-6", + "Date": "2001-June-08", + "Title": "Functions for kernel smoothing for Wand & Jones (1995)", + "Author": "S original by Matt Wand.\n\tR port by Brian Ripley .", # noqa + "Maintainer": "Brian Ripley ", + "Description": 'functions for kernel smoothing (and density estimation)\n corresponding to the book: \n Wand, M.P. and Jones, M.C. (1995) "Kernel Smoothing".', # noqa + "License": "Unlimited use and distribution (see LICENCE).", + "URL": "http://www.biostat.harvard.edu/~mwand", } assert actual_metadata == expected_metadata @pytest.mark.fs def test_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" # inexistent first level path - assert extract_intrinsic_metadata('/something-inexistent') == {} + assert extract_intrinsic_metadata("/something-inexistent") == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path - existing_path_no_pkginfo = str(tmp_path / 'something') + existing_path_no_pkginfo = str(tmp_path / "something") os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} def test_cran_one_visit(swh_config, requests_mock_datadir): - version = '2.22-6' - base_url = 'https://cran.r-project.org' - origin_url = f'{base_url}/Packages/Recommended_KernSmooth/index.html' - artifact_url = f'{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz' # noqa - loader = CRANLoader(origin_url, artifacts=[{ - 'url': artifact_url, - 'version': version, - }]) + version = "2.22-6" + base_url = "https://cran.r-project.org" + origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" + artifact_url = ( + f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa + ) + loader = CRANLoader( + origin_url, artifacts=[{"url": artifact_url, "version": version,}] + ) actual_load_status = loader.load() - expected_snapshot_id = '920adcccc78aaeedd3cfa4459dd900d8c3431a21' + expected_snapshot_id = "920adcccc78aaeedd3cfa4459dd900d8c3431a21" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id + "status": "eventful", + "snapshot_id": expected_snapshot_id, } expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': { - 'HEAD': {'target': f'releases/{version}', 'target_type': 'alias'}, - f'releases/{version}': { - 'target': '42bdb16facd5140424359c8ce89a28ecfa1ce603', - 'target_type': 'revision' - } - } + "id": expected_snapshot_id, + "branches": { + "HEAD": {"target": f"releases/{version}", "target_type": "alias"}, + f"releases/{version}": { + "target": "42bdb16facd5140424359c8ce89a28ecfa1ce603", + "target_type": "revision", + }, + }, } check_snapshot(expected_snapshot, loader.storage) origin_visit = loader.storage.origin_visit_get_latest(origin_url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'cran' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "cran" visit_stats = get_stats(loader.storage) assert { - 'content': 33, - 'directory': 7, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 1, - 'skipped_content': 0, - 'snapshot': 1 + "content": 33, + "directory": 7, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 1, + "skipped_content": 0, + "snapshot": 1, } == visit_stats urls = [ - m.url for m in requests_mock_datadir.request_history + m.url + for m in requests_mock_datadir.request_history if m.url.startswith(base_url) ] # visited each artifact once across 2 visits assert len(urls) == 1 -def test_cran_2_visits_same_origin( - swh_config, requests_mock_datadir): +def test_cran_2_visits_same_origin(swh_config, requests_mock_datadir): """Multiple visits on the same origin, only 1 archive fetch""" - version = '2.22-6' - base_url = 'https://cran.r-project.org' - origin_url = f'{base_url}/Packages/Recommended_KernSmooth/index.html' - artifact_url = f'{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz' # noqa - loader = CRANLoader(origin_url, artifacts=[{ - 'url': artifact_url, - 'version': version - }]) + version = "2.22-6" + base_url = "https://cran.r-project.org" + origin_url = f"{base_url}/Packages/Recommended_KernSmooth/index.html" + artifact_url = ( + f"{base_url}/src_contrib_1.4.0_Recommended_KernSmooth_{version}.tar.gz" # noqa + ) + loader = CRANLoader( + origin_url, artifacts=[{"url": artifact_url, "version": version}] + ) # first visit actual_load_status = loader.load() - expected_snapshot_id = '920adcccc78aaeedd3cfa4459dd900d8c3431a21' + expected_snapshot_id = "920adcccc78aaeedd3cfa4459dd900d8c3431a21" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id + "status": "eventful", + "snapshot_id": expected_snapshot_id, } expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': { - 'HEAD': {'target': f'releases/{version}', 'target_type': 'alias'}, - f'releases/{version}': { - 'target': '42bdb16facd5140424359c8ce89a28ecfa1ce603', - 'target_type': 'revision' - } - } + "id": expected_snapshot_id, + "branches": { + "HEAD": {"target": f"releases/{version}", "target_type": "alias"}, + f"releases/{version}": { + "target": "42bdb16facd5140424359c8ce89a28ecfa1ce603", + "target_type": "revision", + }, + }, } check_snapshot(expected_snapshot, loader.storage) origin_visit = loader.storage.origin_visit_get_latest(origin_url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'cran' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "cran" visit_stats = get_stats(loader.storage) assert { - 'content': 33, - 'directory': 7, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 1, - 'skipped_content': 0, - 'snapshot': 1 + "content": 33, + "directory": 7, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 1, + "skipped_content": 0, + "snapshot": 1, } == visit_stats # second visit actual_load_status2 = loader.load() assert actual_load_status2 == { - 'status': 'uneventful', - 'snapshot_id': expected_snapshot_id + "status": "uneventful", + "snapshot_id": expected_snapshot_id, } origin_visit2 = loader.storage.origin_visit_get_latest(origin_url) - assert origin_visit2['status'] == 'full' - assert origin_visit2['type'] == 'cran' + assert origin_visit2["status"] == "full" + assert origin_visit2["type"] == "cran" visit_stats2 = get_stats(loader.storage) - visit_stats['origin_visit'] += 1 - assert visit_stats2 == visit_stats, 'same stats as 1st visit, +1 visit' + visit_stats["origin_visit"] += 1 + assert visit_stats2 == visit_stats, "same stats as 1st visit, +1 visit" urls = [ - m.url for m in requests_mock_datadir.request_history + m.url + for m in requests_mock_datadir.request_history if m.url.startswith(base_url) ] - assert len(urls) == 1, 'visited one time artifact url (across 2 visits)' + assert len(urls) == 1, "visited one time artifact url (across 2 visits)" def test_parse_debian_control(datadir): - description_file = os.path.join(datadir, 'description', 'acepack') + description_file = os.path.join(datadir, "description", "acepack") actual_metadata = parse_debian_control(description_file) assert actual_metadata == { - 'Package': 'acepack', - 'Maintainer': 'Shawn Garbett', - 'Version': '1.4.1', - 'Author': 'Phil Spector, Jerome Friedman, Robert Tibshirani...', - 'Description': 'Two nonparametric methods for multiple regression...', - 'Title': 'ACE & AVAS 4 Selecting Multiple Regression Transformations', - 'License': 'MIT + file LICENSE', - 'Suggests': 'testthat', - 'Packaged': '2016-10-28 15:38:59 UTC; garbetsp', - 'Repository': 'CRAN', - 'Date/Publication': '2016-10-29 00:11:52', - 'NeedsCompilation': 'yes' + "Package": "acepack", + "Maintainer": "Shawn Garbett", + "Version": "1.4.1", + "Author": "Phil Spector, Jerome Friedman, Robert Tibshirani...", + "Description": "Two nonparametric methods for multiple regression...", + "Title": "ACE & AVAS 4 Selecting Multiple Regression Transformations", + "License": "MIT + file LICENSE", + "Suggests": "testthat", + "Packaged": "2016-10-28 15:38:59 UTC; garbetsp", + "Repository": "CRAN", + "Date/Publication": "2016-10-29 00:11:52", + "NeedsCompilation": "yes", } def test_parse_debian_control_unicode_issue(datadir): # iso-8859-1 caused failure, now fixed - description_file = os.path.join( - datadir, 'description', 'KnownBR' - ) + description_file = os.path.join(datadir, "description", "KnownBR") actual_metadata = parse_debian_control(description_file) assert actual_metadata == { - 'Package': 'KnowBR', - 'Version': '2.0', - 'Title': '''Discriminating Well Surveyed Spatial Units from Exhaustive - Biodiversity Databases''', - 'Author': 'Cástor Guisande González and Jorge M. Lobo', - 'Maintainer': 'Cástor Guisande González ', - 'Description': - 'It uses species accumulation curves and diverse estimators...', - 'License': 'GPL (>= 2)', - 'Encoding': 'latin1', - 'Depends': 'R (>= 3.0), fossil, mgcv, plotrix, sp, vegan', - 'Suggests': 'raster, rgbif', - 'NeedsCompilation': 'no', - 'Packaged': '2019-01-30 13:27:29 UTC; castor', - 'Repository': 'CRAN', - 'Date/Publication': '2019-01-31 20:53:50 UTC' + "Package": "KnowBR", + "Version": "2.0", + "Title": """Discriminating Well Surveyed Spatial Units from Exhaustive + Biodiversity Databases""", + "Author": "Cástor Guisande González and Jorge M. Lobo", + "Maintainer": "Cástor Guisande González ", + "Description": "It uses species accumulation curves and diverse estimators...", + "License": "GPL (>= 2)", + "Encoding": "latin1", + "Depends": "R (>= 3.0), fossil, mgcv, plotrix, sp, vegan", + "Suggests": "raster, rgbif", + "NeedsCompilation": "no", + "Packaged": "2019-01-30 13:27:29 UTC; castor", + "Repository": "CRAN", + "Date/Publication": "2019-01-31 20:53:50 UTC", } diff --git a/swh/loader/package/cran/tests/test_tasks.py b/swh/loader/package/cran/tests/test_tasks.py index 28edb9a..aa4ea00 100644 --- a/swh/loader/package/cran/tests/test_tasks.py +++ b/swh/loader/package/cran/tests/test_tasks.py @@ -1,26 +1,22 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def test_cran_loader(mocker, swh_app, celery_session_worker, swh_config): - mock_loader = mocker.patch( - 'swh.loader.package.cran.loader.CRANLoader.load') - mock_loader.return_value = {'status': 'eventful'} + mock_loader = mocker.patch("swh.loader.package.cran.loader.CRANLoader.load") + mock_loader.return_value = {"status": "eventful"} res = swh_app.send_task( - 'swh.loader.package.cran.tasks.LoadCRAN', + "swh.loader.package.cran.tasks.LoadCRAN", kwargs={ - 'url': 'some-url', - 'artifacts': { - 'version': '1.2.3', - 'url': 'artifact-url' - } - } + "url": "some-url", + "artifacts": {"version": "1.2.3", "url": "artifact-url"}, + }, ) assert res res.wait() assert res.successful() - assert res.result == {'status': 'eventful'} + assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/debian/__init__.py b/swh/loader/package/debian/__init__.py index a28f3c9..15234a1 100644 --- a/swh/loader/package/debian/__init__.py +++ b/swh/loader/package/debian/__init__.py @@ -1,16 +1,17 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Mapping def register() -> Mapping[str, Any]: """Register the current worker module's definition""" from .loader import DebianLoader + return { - 'task_modules': [f'{__name__}.tasks'], - 'loader': DebianLoader, + "task_modules": [f"{__name__}.tasks"], + "loader": DebianLoader, } diff --git a/swh/loader/package/debian/loader.py b/swh/loader/package/debian/loader.py index c68f709..fc1e2e4 100644 --- a/swh/loader/package/debian/loader.py +++ b/swh/loader/package/debian/loader.py @@ -1,410 +1,418 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import email.utils import logging from os import path import re import subprocess from dateutil.parser import parse as parse_date from debian.changelog import Changelog from debian.deb822 import Dsc -from typing import ( - Any, Generator, List, Mapping, Optional, Sequence, Tuple -) +from typing import Any, Generator, List, Mapping, Optional, Sequence, Tuple from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import download, release_name from swh.model.model import ( - Sha1Git, Person, Revision, RevisionType, TimestampWithTimezone + Sha1Git, + Person, + Revision, + RevisionType, + TimestampWithTimezone, ) logger = logging.getLogger(__name__) -UPLOADERS_SPLIT = re.compile(r'(?<=\>)\s*,\s*') +UPLOADERS_SPLIT = re.compile(r"(?<=\>)\s*,\s*") class DebianLoader(PackageLoader): """Load debian origins into swh archive. """ - visit_type = 'deb' + + visit_type = "deb" def __init__(self, url: str, date: str, packages: Mapping[str, Any]): """Debian Loader implementation. Args: url: Origin url (e.g. deb://Debian/packages/cicero) date: Ignored packages: versioned packages and associated artifacts, example:: { 'stretch/contrib/0.7.2-3': { 'name': 'cicero', 'version': '0.7.2-3' 'files': { 'cicero_0.7.2-3.diff.gz': { 'md5sum': 'a93661b6a48db48d59ba7d26796fc9ce', 'name': 'cicero_0.7.2-3.diff.gz', 'sha256': 'f039c9642fe15c75bed5254315e2a29f...', 'size': 3964, 'uri': 'http://d.d.o/cicero_0.7.2-3.diff.gz', }, 'cicero_0.7.2-3.dsc': { 'md5sum': 'd5dac83eb9cfc9bb52a15eb618b4670a', 'name': 'cicero_0.7.2-3.dsc', 'sha256': '35b7f1048010c67adfd8d70e4961aefb...', 'size': 1864, 'uri': 'http://d.d.o/cicero_0.7.2-3.dsc', }, 'cicero_0.7.2.orig.tar.gz': { 'md5sum': '4353dede07c5728319ba7f5595a7230a', 'name': 'cicero_0.7.2.orig.tar.gz', 'sha256': '63f40f2436ea9f67b44e2d4bd669dbab...', 'size': 96527, 'uri': 'http://d.d.o/cicero_0.7.2.orig.tar.gz', } }, }, # ... } """ super().__init__(url=url) self.packages = packages def get_versions(self) -> Sequence[str]: """Returns the keys of the packages input (e.g. stretch/contrib/0.7.2-3, etc...) """ return list(self.packages.keys()) - def get_package_info(self, version: str) -> Generator[ - Tuple[str, Mapping[str, Any]], None, None]: + def get_package_info( + self, version: str + ) -> Generator[Tuple[str, Mapping[str, Any]], None, None]: meta = self.packages[version] p_info = meta.copy() - p_info['raw'] = meta + p_info["raw"] = meta yield release_name(version), p_info def resolve_revision_from( - self, known_package_artifacts: Mapping, - artifact_metadata: Mapping) \ - -> Optional[bytes]: - return resolve_revision_from( - known_package_artifacts, artifact_metadata) - - def download_package(self, p_info: Mapping[str, Any], - tmpdir: str) -> List[Tuple[str, Mapping]]: + self, known_package_artifacts: Mapping, artifact_metadata: Mapping + ) -> Optional[bytes]: + return resolve_revision_from(known_package_artifacts, artifact_metadata) + + def download_package( + self, p_info: Mapping[str, Any], tmpdir: str + ) -> List[Tuple[str, Mapping]]: """Contrary to other package loaders (1 package, 1 artifact), `a_metadata` represents the package's datafiles set to fetch: - .orig.tar.gz - .dsc - .diff.gz This is delegated to the `download_package` function. """ all_hashes = download_package(p_info, tmpdir) - logger.debug('all_hashes: %s', all_hashes) + logger.debug("all_hashes: %s", all_hashes) res = [] for hashes in all_hashes.values(): res.append((tmpdir, hashes)) - logger.debug('res: %s', res) + logger.debug("res: %s", res) return res - def uncompress(self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], - dest: str) -> str: - logger.debug('dl_artifacts: %s', dl_artifacts) + def uncompress( + self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str + ) -> str: + logger.debug("dl_artifacts: %s", dl_artifacts) return extract_package(dl_artifacts, dest=dest) def build_revision( - self, a_metadata: Mapping[str, Any], uncompressed_path: str, - directory: Sha1Git) -> Optional[Revision]: + self, a_metadata: Mapping[str, Any], uncompressed_path: str, directory: Sha1Git + ) -> Optional[Revision]: dsc_url, dsc_name = dsc_information(a_metadata) if not dsc_name: - raise ValueError( - 'dsc name for url %s should not be None' % dsc_url) + raise ValueError("dsc name for url %s should not be None" % dsc_url) dsc_path = path.join(path.dirname(uncompressed_path), dsc_name) - i_metadata = get_package_metadata( - a_metadata, dsc_path, uncompressed_path) + i_metadata = get_package_metadata(a_metadata, dsc_path, uncompressed_path) - logger.debug('i_metadata: %s', i_metadata) - logger.debug('a_metadata: %s', a_metadata) + logger.debug("i_metadata: %s", i_metadata) + logger.debug("a_metadata: %s", a_metadata) - msg = 'Synthetic revision for Debian source package %s version %s' % ( - a_metadata['name'], a_metadata['version']) + msg = "Synthetic revision for Debian source package %s version %s" % ( + a_metadata["name"], + a_metadata["version"], + ) - date = TimestampWithTimezone.from_iso8601( - i_metadata['changelog']['date']) - author = prepare_person(i_metadata['changelog']['person']) + date = TimestampWithTimezone.from_iso8601(i_metadata["changelog"]["date"]) + author = prepare_person(i_metadata["changelog"]["person"]) # inspired from swh.loader.debian.converters.package_metadata_to_revision # noqa return Revision( type=RevisionType.DSC, - message=msg.encode('utf-8'), + message=msg.encode("utf-8"), author=author, date=date, committer=author, committer_date=date, parents=[], directory=directory, synthetic=True, metadata={ - 'intrinsic': { - 'tool': 'dsc', - 'raw': i_metadata, - }, - 'extrinsic': { - 'provider': dsc_url, - 'when': self.visit_date.isoformat(), - 'raw': a_metadata, + "intrinsic": {"tool": "dsc", "raw": i_metadata,}, + "extrinsic": { + "provider": dsc_url, + "when": self.visit_date.isoformat(), + "raw": a_metadata, }, }, ) -def resolve_revision_from(known_package_artifacts: Mapping, - artifact_metadata: Mapping) -> Optional[bytes]: +def resolve_revision_from( + known_package_artifacts: Mapping, artifact_metadata: Mapping +) -> Optional[bytes]: """Given known package artifacts (resolved from the snapshot of previous visit) and the new artifact to fetch, try to solve the corresponding revision. """ - artifacts_to_fetch = artifact_metadata.get('files') + artifacts_to_fetch = artifact_metadata.get("files") if not artifacts_to_fetch: return None def to_set(data): - return frozenset([ - (name, meta['sha256'], meta['size']) - for name, meta in data['files'].items() - ]) + return frozenset( + [ + (name, meta["sha256"], meta["size"]) + for name, meta in data["files"].items() + ] + ) # what we want to avoid downloading back if we have them already set_new_artifacts = to_set(artifact_metadata) known_artifacts_revision_id = {} for rev_id, known_artifacts in known_package_artifacts.items(): - extrinsic = known_artifacts.get('extrinsic') + extrinsic = known_artifacts.get("extrinsic") if not extrinsic: continue - s = to_set(extrinsic['raw']) + s = to_set(extrinsic["raw"]) known_artifacts_revision_id[s] = rev_id return known_artifacts_revision_id.get(set_new_artifacts) def uid_to_person(uid: str) -> Mapping[str, str]: """Convert an uid to a person suitable for insertion. Args: uid: an uid of the form "Name " Returns: a dictionary with the following keys: - name: the name associated to the uid - email: the mail associated to the uid - fullname: the actual uid input """ - logger.debug('uid: %s', uid) + logger.debug("uid: %s", uid) ret = { - 'name': '', - 'email': '', - 'fullname': uid, + "name": "", + "email": "", + "fullname": uid, } name, mail = email.utils.parseaddr(uid) if name and email: - ret['name'] = name - ret['email'] = mail + ret["name"] = name + ret["email"] = mail else: - ret['name'] = uid + ret["name"] = uid return ret def prepare_person(person: Mapping[str, str]) -> Person: """Prepare person for swh serialization... Args: A person dict Returns: A person ready for storage """ - return Person.from_dict({ - key: value.encode('utf-8') - for (key, value) in person.items() - }) + return Person.from_dict( + {key: value.encode("utf-8") for (key, value) in person.items()} + ) -def download_package( - package: Mapping[str, Any], tmpdir: Any) -> Mapping[str, Any]: +def download_package(package: Mapping[str, Any], tmpdir: Any) -> Mapping[str, Any]: """Fetch a source package in a temporary directory and check the checksums for all files. Args: package: Dict defining the set of files representing a debian package tmpdir: Where to download and extract the files to ingest Returns: Dict of swh hashes per filename key """ all_hashes = {} - for filename, fileinfo in package['files'].items(): - uri = fileinfo['uri'] - logger.debug('fileinfo: %s', fileinfo) - extrinsic_hashes = {'sha256': fileinfo['sha256']} - logger.debug('extrinsic_hashes(%s): %s', filename, extrinsic_hashes) - filepath, hashes = download(uri, dest=tmpdir, filename=filename, - hashes=extrinsic_hashes) + for filename, fileinfo in package["files"].items(): + uri = fileinfo["uri"] + logger.debug("fileinfo: %s", fileinfo) + extrinsic_hashes = {"sha256": fileinfo["sha256"]} + logger.debug("extrinsic_hashes(%s): %s", filename, extrinsic_hashes) + filepath, hashes = download( + uri, dest=tmpdir, filename=filename, hashes=extrinsic_hashes + ) all_hashes[filename] = hashes - logger.debug('all_hashes: %s', all_hashes) + logger.debug("all_hashes: %s", all_hashes) return all_hashes -def dsc_information(package: Mapping[str, Any]) -> Tuple[ - Optional[str], Optional[str]]: +def dsc_information(package: Mapping[str, Any]) -> Tuple[Optional[str], Optional[str]]: """Retrieve dsc information from a package. Args: package: Package metadata information Returns: Tuple of dsc file's uri, dsc's full disk path """ dsc_name = None dsc_url = None - for filename, fileinfo in package['files'].items(): - if filename.endswith('.dsc'): + for filename, fileinfo in package["files"].items(): + if filename.endswith(".dsc"): if dsc_name: raise ValueError( - 'Package %s_%s references several dsc files.' % - (package['name'], package['version']) + "Package %s_%s references several dsc files." + % (package["name"], package["version"]) ) - dsc_url = fileinfo['uri'] + dsc_url = fileinfo["uri"] dsc_name = filename return dsc_url, dsc_name def extract_package(dl_artifacts: List[Tuple[str, Mapping]], dest: str) -> str: """Extract a Debian source package to a given directory. Note that after extraction the target directory will be the root of the extracted package, rather than containing it. Args: package: package information dictionary dest: directory where the package files are stored Returns: Package extraction directory """ a_path = dl_artifacts[0][0] - logger.debug('dl_artifacts: %s', dl_artifacts) + logger.debug("dl_artifacts: %s", dl_artifacts) for _, hashes in dl_artifacts: - logger.debug('hashes: %s', hashes) - filename = hashes['filename'] - if filename.endswith('.dsc'): + logger.debug("hashes: %s", hashes) + filename = hashes["filename"] + if filename.endswith(".dsc"): dsc_name = filename break dsc_path = path.join(a_path, dsc_name) - destdir = path.join(dest, 'extracted') - logfile = path.join(dest, 'extract.log') - logger.debug('extract Debian source package %s in %s' % - (dsc_path, destdir), extra={ - 'swh_type': 'deb_extract', - 'swh_dsc': dsc_path, - 'swh_destdir': destdir, - }) - - cmd = ['dpkg-source', - '--no-copy', '--no-check', - '--ignore-bad-version', - '-x', dsc_path, - destdir] + destdir = path.join(dest, "extracted") + logfile = path.join(dest, "extract.log") + logger.debug( + "extract Debian source package %s in %s" % (dsc_path, destdir), + extra={"swh_type": "deb_extract", "swh_dsc": dsc_path, "swh_destdir": destdir,}, + ) + + cmd = [ + "dpkg-source", + "--no-copy", + "--no-check", + "--ignore-bad-version", + "-x", + dsc_path, + destdir, + ] try: - with open(logfile, 'w') as stdout: + with open(logfile, "w") as stdout: subprocess.check_call(cmd, stdout=stdout, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: - logdata = open(logfile, 'r').read() - raise ValueError('dpkg-source exited with code %s: %s' % - (e.returncode, logdata)) from None + logdata = open(logfile, "r").read() + raise ValueError( + "dpkg-source exited with code %s: %s" % (e.returncode, logdata) + ) from None return destdir -def get_package_metadata(package: Mapping[str, Any], dsc_path: str, - extracted_path: str) -> Mapping[str, Any]: +def get_package_metadata( + package: Mapping[str, Any], dsc_path: str, extracted_path: str +) -> Mapping[str, Any]: """Get the package metadata from the source package at dsc_path, extracted in extracted_path. Args: package: the package dict (with a dsc_path key) dsc_path: path to the package's dsc file extracted_path: the path where the package got extracted Returns: dict: a dictionary with the following keys: - history: list of (package_name, package_version) tuples parsed from the package changelog """ - with open(dsc_path, 'rb') as dsc: + with open(dsc_path, "rb") as dsc: parsed_dsc = Dsc(dsc) # Parse the changelog to retrieve the rest of the package information - changelog_path = path.join(extracted_path, 'debian/changelog') - with open(changelog_path, 'rb') as changelog: + changelog_path = path.join(extracted_path, "debian/changelog") + with open(changelog_path, "rb") as changelog: try: parsed_changelog = Changelog(changelog) except UnicodeDecodeError: - logger.warning('Unknown encoding for changelog %s,' - ' falling back to iso' % - changelog_path, extra={ - 'swh_type': 'deb_changelog_encoding', - 'swh_name': package['name'], - 'swh_version': str(package['version']), - 'swh_changelog': changelog_path, - }) + logger.warning( + "Unknown encoding for changelog %s," + " falling back to iso" % changelog_path, + extra={ + "swh_type": "deb_changelog_encoding", + "swh_name": package["name"], + "swh_version": str(package["version"]), + "swh_changelog": changelog_path, + }, + ) # need to reset as Changelog scrolls to the end of the file changelog.seek(0) - parsed_changelog = Changelog(changelog, encoding='iso-8859-15') + parsed_changelog = Changelog(changelog, encoding="iso-8859-15") package_info = { - 'name': package['name'], - 'version': str(package['version']), - 'changelog': { - 'person': uid_to_person(parsed_changelog.author), - 'date': parse_date(parsed_changelog.date).isoformat(), - 'history': [(block.package, str(block.version)) - for block in parsed_changelog][1:], - } + "name": package["name"], + "version": str(package["version"]), + "changelog": { + "person": uid_to_person(parsed_changelog.author), + "date": parse_date(parsed_changelog.date).isoformat(), + "history": [ + (block.package, str(block.version)) for block in parsed_changelog + ][1:], + }, } maintainers = [ - uid_to_person(parsed_dsc['Maintainer']), + uid_to_person(parsed_dsc["Maintainer"]), ] maintainers.extend( uid_to_person(person) - for person in UPLOADERS_SPLIT.split(parsed_dsc.get('Uploaders', '')) + for person in UPLOADERS_SPLIT.split(parsed_dsc.get("Uploaders", "")) ) - package_info['maintainers'] = maintainers + package_info["maintainers"] = maintainers return package_info diff --git a/swh/loader/package/debian/tasks.py b/swh/loader/package/debian/tasks.py index 212ac4c..1b09e22 100644 --- a/swh/loader/package/debian/tasks.py +++ b/swh/loader/package/debian/tasks.py @@ -1,14 +1,14 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.debian.loader import DebianLoader -@shared_task(name=__name__ + '.LoadDebian') +@shared_task(name=__name__ + ".LoadDebian") def load_deb_package(*, url, date, packages): """Load Debian package""" return DebianLoader(url, date, packages).load() diff --git a/swh/loader/package/debian/tests/test_debian.py b/swh/loader/package/debian/tests/test_debian.py index f48544b..8ef8744 100644 --- a/swh/loader/package/debian/tests/test_debian.py +++ b/swh/loader/package/debian/tests/test_debian.py @@ -1,464 +1,466 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy import logging import pytest import random from os import path from swh.loader.package.debian.loader import ( - DebianLoader, download_package, dsc_information, uid_to_person, - prepare_person, get_package_metadata, extract_package + DebianLoader, + download_package, + dsc_information, + uid_to_person, + prepare_person, + get_package_metadata, + extract_package, ) from swh.loader.package.tests.common import check_snapshot, get_stats from swh.loader.package.debian.loader import resolve_revision_from from swh.model.model import Person logger = logging.getLogger(__name__) PACKAGE_FILES = { - 'name': 'cicero', - 'version': '0.7.2-3', - 'files': { - 'cicero_0.7.2-3.diff.gz': { - 'md5sum': 'a93661b6a48db48d59ba7d26796fc9ce', - 'name': 'cicero_0.7.2-3.diff.gz', - 'sha256': 'f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c', # noqa - 'size': 3964, - 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.diff.gz' # noqa + "name": "cicero", + "version": "0.7.2-3", + "files": { + "cicero_0.7.2-3.diff.gz": { + "md5sum": "a93661b6a48db48d59ba7d26796fc9ce", + "name": "cicero_0.7.2-3.diff.gz", + "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa + "size": 3964, + "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.diff.gz", # noqa + }, + "cicero_0.7.2-3.dsc": { + "md5sum": "d5dac83eb9cfc9bb52a15eb618b4670a", + "name": "cicero_0.7.2-3.dsc", + "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa + "size": 1864, + "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc", # noqa + }, # noqa + "cicero_0.7.2.orig.tar.gz": { + "md5sum": "4353dede07c5728319ba7f5595a7230a", + "name": "cicero_0.7.2.orig.tar.gz", + "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa + "size": 96527, + "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa }, - 'cicero_0.7.2-3.dsc': { - 'md5sum': 'd5dac83eb9cfc9bb52a15eb618b4670a', - 'name': 'cicero_0.7.2-3.dsc', - 'sha256': '35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03', # noqa - 'size': 1864, - 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc'}, # noqa - 'cicero_0.7.2.orig.tar.gz': { - 'md5sum': '4353dede07c5728319ba7f5595a7230a', - 'name': 'cicero_0.7.2.orig.tar.gz', - 'sha256': '63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786', # noqa - 'size': 96527, - 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz' # noqa - } }, } PACKAGE_FILES2 = { - 'name': 'cicero', - 'version': '0.7.2-4', - 'files': { - 'cicero_0.7.2-4.diff.gz': { - 'md5sum': '1e7e6fc4a59d57c98082a3af78145734', - 'name': 'cicero_0.7.2-4.diff.gz', - 'sha256': '2e6fa296ee7005473ff58d0971f4fd325617b445671480e9f2cfb738d5dbcd01', # noqa - 'size': 4038, - 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.diff.gz' # noqa + "name": "cicero", + "version": "0.7.2-4", + "files": { + "cicero_0.7.2-4.diff.gz": { + "md5sum": "1e7e6fc4a59d57c98082a3af78145734", + "name": "cicero_0.7.2-4.diff.gz", + "sha256": "2e6fa296ee7005473ff58d0971f4fd325617b445671480e9f2cfb738d5dbcd01", # noqa + "size": 4038, + "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.diff.gz", # noqa }, - 'cicero_0.7.2-4.dsc': { - 'md5sum': '1a6c8855a73b4282bb31d15518f18cde', - 'name': 'cicero_0.7.2-4.dsc', - 'sha256': '913ee52f7093913420de5cbe95d63cfa817f1a1daf997961149501894e754f8b', # noqa - 'size': 1881, - 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.dsc'}, # noqa - 'cicero_0.7.2.orig.tar.gz': { - 'md5sum': '4353dede07c5728319ba7f5595a7230a', - 'name': 'cicero_0.7.2.orig.tar.gz', - 'sha256': '63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786', # noqa - 'size': 96527, - 'uri': 'http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz' # noqa - } - } + "cicero_0.7.2-4.dsc": { + "md5sum": "1a6c8855a73b4282bb31d15518f18cde", + "name": "cicero_0.7.2-4.dsc", + "sha256": "913ee52f7093913420de5cbe95d63cfa817f1a1daf997961149501894e754f8b", # noqa + "size": 1881, + "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.dsc", # noqa + }, # noqa + "cicero_0.7.2.orig.tar.gz": { + "md5sum": "4353dede07c5728319ba7f5595a7230a", + "name": "cicero_0.7.2.orig.tar.gz", + "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa + "size": 96527, + "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa + }, + }, } PACKAGE_PER_VERSION = { - 'stretch/contrib/0.7.2-3': PACKAGE_FILES, + "stretch/contrib/0.7.2-3": PACKAGE_FILES, } PACKAGES_PER_VERSION = { - 'stretch/contrib/0.7.2-3': PACKAGE_FILES, - 'buster/contrib/0.7.2-4': PACKAGE_FILES2, + "stretch/contrib/0.7.2-3": PACKAGE_FILES, + "buster/contrib/0.7.2-4": PACKAGE_FILES2, } -def test_debian_first_visit( - swh_config, requests_mock_datadir): +def test_debian_first_visit(swh_config, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ loader = DebianLoader( - url='deb://Debian/packages/cicero', - date='2019-10-12T05:58:09.165557+00:00', - packages=PACKAGE_PER_VERSION) + url="deb://Debian/packages/cicero", + date="2019-10-12T05:58:09.165557+00:00", + packages=PACKAGE_PER_VERSION, + ) actual_load_status = loader.load() - expected_snapshot_id = '3b6b66e6ee4e7d903a379a882684a2a50480c0b4' + expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id + "status": "eventful", + "snapshot_id": expected_snapshot_id, } stats = get_stats(loader.storage) assert { - 'content': 42, - 'directory': 2, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 1, # all artifacts under 1 revision - 'skipped_content': 0, - 'snapshot': 1 + "content": 42, + "directory": 2, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 1, # all artifacts under 1 revision + "skipped_content": 0, + "snapshot": 1, } == stats expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': { - 'releases/stretch/contrib/0.7.2-3': { - 'target_type': 'revision', - 'target': '2807f5b3f84368b4889a9ae827fe85854ffecf07', + "id": expected_snapshot_id, + "branches": { + "releases/stretch/contrib/0.7.2-3": { + "target_type": "revision", + "target": "2807f5b3f84368b4889a9ae827fe85854ffecf07", } }, } # different than the previous loader as no release is done check_snapshot(expected_snapshot, loader.storage) -def test_debian_first_visit_then_another_visit( - swh_config, requests_mock_datadir): +def test_debian_first_visit_then_another_visit(swh_config, requests_mock_datadir): """With no prior visit, load a debian project ends up with 1 snapshot """ - url = 'deb://Debian/packages/cicero' + url = "deb://Debian/packages/cicero" loader = DebianLoader( - url=url, - date='2019-10-12T05:58:09.165557+00:00', - packages=PACKAGE_PER_VERSION) + url=url, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGE_PER_VERSION + ) actual_load_status = loader.load() - expected_snapshot_id = '3b6b66e6ee4e7d903a379a882684a2a50480c0b4' + expected_snapshot_id = "3b6b66e6ee4e7d903a379a882684a2a50480c0b4" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id + "status": "eventful", + "snapshot_id": expected_snapshot_id, } origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'deb' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "deb" stats = get_stats(loader.storage) assert { - 'content': 42, - 'directory': 2, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 1, # all artifacts under 1 revision - 'skipped_content': 0, - 'snapshot': 1 + "content": 42, + "directory": 2, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 1, # all artifacts under 1 revision + "skipped_content": 0, + "snapshot": 1, } == stats expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': { - 'releases/stretch/contrib/0.7.2-3': { - 'target_type': 'revision', - 'target': '2807f5b3f84368b4889a9ae827fe85854ffecf07', + "id": expected_snapshot_id, + "branches": { + "releases/stretch/contrib/0.7.2-3": { + "target_type": "revision", + "target": "2807f5b3f84368b4889a9ae827fe85854ffecf07", } }, } # different than the previous loader as no release is done check_snapshot(expected_snapshot, loader.storage) # No change in between load actual_load_status2 = loader.load() - assert actual_load_status2['status'] == 'uneventful' + assert actual_load_status2["status"] == "uneventful" origin_visit2 = loader.storage.origin_visit_get_latest(url) - assert origin_visit2['status'] == 'full' - assert origin_visit2['type'] == 'deb' + assert origin_visit2["status"] == "full" + assert origin_visit2["type"] == "deb" stats2 = get_stats(loader.storage) assert { - 'content': 42 + 0, - 'directory': 2 + 0, - 'origin': 1, - 'origin_visit': 1 + 1, # a new visit occurred - 'person': 1, - 'release': 0, - 'revision': 1, - 'skipped_content': 0, - 'snapshot': 1, # same snapshot across 2 visits + "content": 42 + 0, + "directory": 2 + 0, + "origin": 1, + "origin_visit": 1 + 1, # a new visit occurred + "person": 1, + "release": 0, + "revision": 1, + "skipped_content": 0, + "snapshot": 1, # same snapshot across 2 visits } == stats2 urls = [ - m.url for m in requests_mock_datadir.request_history - if m.url.startswith('http://deb.debian.org') + m.url + for m in requests_mock_datadir.request_history + if m.url.startswith("http://deb.debian.org") ] # visited each package artifact twice across 2 visits assert len(urls) == len(set(urls)) def test_uid_to_person(): - uid = 'Someone Name ' + uid = "Someone Name " actual_person = uid_to_person(uid) assert actual_person == { - 'name': 'Someone Name', - 'email': 'someone@orga.org', - 'fullname': uid, + "name": "Someone Name", + "email": "someone@orga.org", + "fullname": uid, } def test_prepare_person(): - actual_author = prepare_person({ - 'name': 'Someone Name', - 'email': 'someone@orga.org', - 'fullname': 'Someone Name ', - }) + actual_author = prepare_person( + { + "name": "Someone Name", + "email": "someone@orga.org", + "fullname": "Someone Name ", + } + ) assert actual_author == Person( - name=b'Someone Name', - email=b'someone@orga.org', - fullname=b'Someone Name ', + name=b"Someone Name", + email=b"someone@orga.org", + fullname=b"Someone Name ", ) def test_download_package(datadir, tmpdir, requests_mock_datadir): tmpdir = str(tmpdir) # py3.5 work around (LocalPath issue) all_hashes = download_package(PACKAGE_FILES, tmpdir) assert all_hashes == { - 'cicero_0.7.2-3.diff.gz': { - 'checksums': { - 'sha1': '0815282053f21601b0ec4adf7a8fe47eace3c0bc', - 'sha256': 'f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c' # noqa + "cicero_0.7.2-3.diff.gz": { + "checksums": { + "sha1": "0815282053f21601b0ec4adf7a8fe47eace3c0bc", + "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa }, - 'filename': 'cicero_0.7.2-3.diff.gz', - 'length': 3964}, - 'cicero_0.7.2-3.dsc': { - 'checksums': { - 'sha1': 'abbec4e8efbbc80278236e1dd136831eac08accd', - 'sha256': '35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03' # noqa + "filename": "cicero_0.7.2-3.diff.gz", + "length": 3964, + }, + "cicero_0.7.2-3.dsc": { + "checksums": { + "sha1": "abbec4e8efbbc80278236e1dd136831eac08accd", + "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa }, - 'filename': 'cicero_0.7.2-3.dsc', - 'length': 1864}, - 'cicero_0.7.2.orig.tar.gz': { - 'checksums': { - 'sha1': 'a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43', - 'sha256': '63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786' # noqa + "filename": "cicero_0.7.2-3.dsc", + "length": 1864, + }, + "cicero_0.7.2.orig.tar.gz": { + "checksums": { + "sha1": "a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43", + "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa }, - 'filename': 'cicero_0.7.2.orig.tar.gz', - 'length': 96527 - } + "filename": "cicero_0.7.2.orig.tar.gz", + "length": 96527, + }, } def test_dsc_information_ok(): - fname = 'cicero_0.7.2-3.dsc' + fname = "cicero_0.7.2-3.dsc" dsc_url, dsc_name = dsc_information(PACKAGE_FILES) - assert dsc_url == PACKAGE_FILES['files'][fname]['uri'] - assert dsc_name == PACKAGE_FILES['files'][fname]['name'] + assert dsc_url == PACKAGE_FILES["files"][fname]["uri"] + assert dsc_name == PACKAGE_FILES["files"][fname]["name"] def test_dsc_information_not_found(): - fname = 'cicero_0.7.2-3.dsc' + fname = "cicero_0.7.2-3.dsc" package_files = copy.deepcopy(PACKAGE_FILES) - package_files['files'].pop(fname) + package_files["files"].pop(fname) dsc_url, dsc_name = dsc_information(package_files) assert dsc_url is None assert dsc_name is None def test_dsc_information_too_many_dsc_entries(): # craft an extra dsc file - fname = 'cicero_0.7.2-3.dsc' + fname = "cicero_0.7.2-3.dsc" package_files = copy.deepcopy(PACKAGE_FILES) - data = package_files['files'][fname] - fname2 = fname.replace('cicero', 'ciceroo') - package_files['files'][fname2] = data + data = package_files["files"][fname] + fname2 = fname.replace("cicero", "ciceroo") + package_files["files"][fname2] = data with pytest.raises( - ValueError, match='Package %s_%s references several dsc' % ( - package_files['name'], package_files['version'])): + ValueError, + match="Package %s_%s references several dsc" + % (package_files["name"], package_files["version"]), + ): dsc_information(package_files) def test_get_package_metadata(requests_mock_datadir, datadir, tmp_path): tmp_path = str(tmp_path) # py3.5 compat. package = PACKAGE_FILES - logger.debug('package: %s', package) + logger.debug("package: %s", package) # download the packages all_hashes = download_package(package, tmp_path) # Retrieve information from package _, dsc_name = dsc_information(package) dl_artifacts = [(tmp_path, hashes) for hashes in all_hashes.values()] # Extract information from package extracted_path = extract_package(dl_artifacts, tmp_path) # Retrieve information on package dsc_path = path.join(path.dirname(extracted_path), dsc_name) - actual_package_info = get_package_metadata( - package, dsc_path, extracted_path) + actual_package_info = get_package_metadata(package, dsc_path, extracted_path) - logger.debug('actual_package_info: %s', actual_package_info) + logger.debug("actual_package_info: %s", actual_package_info) assert actual_package_info == { - 'changelog': { - 'date': '2014-10-19T16:52:35+02:00', - 'history': [ - ('cicero', '0.7.2-2'), - ('cicero', '0.7.2-1'), - ('cicero', '0.7-1') + "changelog": { + "date": "2014-10-19T16:52:35+02:00", + "history": [ + ("cicero", "0.7.2-2"), + ("cicero", "0.7.2-1"), + ("cicero", "0.7-1"), ], - 'person': { - 'email': 'sthibault@debian.org', - 'fullname': 'Samuel Thibault ', - 'name': 'Samuel Thibault' - } + "person": { + "email": "sthibault@debian.org", + "fullname": "Samuel Thibault ", + "name": "Samuel Thibault", + }, }, - 'maintainers': [ + "maintainers": [ { - 'email': 'debian-accessibility@lists.debian.org', - 'fullname': 'Debian Accessibility Team ' - '', - 'name': 'Debian Accessibility Team' + "email": "debian-accessibility@lists.debian.org", + "fullname": "Debian Accessibility Team " + "", + "name": "Debian Accessibility Team", }, { - 'email': 'sthibault@debian.org', - 'fullname': 'Samuel Thibault ', - 'name': 'Samuel Thibault' - } + "email": "sthibault@debian.org", + "fullname": "Samuel Thibault ", + "name": "Samuel Thibault", + }, ], - 'name': 'cicero', - 'version': '0.7.2-3' + "name": "cicero", + "version": "0.7.2-3", } def test_debian_multiple_packages(swh_config, requests_mock_datadir): - url = 'deb://Debian/packages/cicero' + url = "deb://Debian/packages/cicero" loader = DebianLoader( - url=url, - date='2019-10-12T05:58:09.165557+00:00', - packages=PACKAGES_PER_VERSION) + url=url, date="2019-10-12T05:58:09.165557+00:00", packages=PACKAGES_PER_VERSION + ) actual_load_status = loader.load() - expected_snapshot_id = 'defc19021187f3727293121fcf6c5c82cb923604' + expected_snapshot_id = "defc19021187f3727293121fcf6c5c82cb923604" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id + "status": "eventful", + "snapshot_id": expected_snapshot_id, } origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'deb' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "deb" expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': { - 'releases/stretch/contrib/0.7.2-3': { - 'target_type': 'revision', - 'target': '2807f5b3f84368b4889a9ae827fe85854ffecf07', + "id": expected_snapshot_id, + "branches": { + "releases/stretch/contrib/0.7.2-3": { + "target_type": "revision", + "target": "2807f5b3f84368b4889a9ae827fe85854ffecf07", + }, + "releases/buster/contrib/0.7.2-4": { + "target_type": "revision", + "target": "8224139c274c984147ef4b09aa0e462c55a10bd3", }, - 'releases/buster/contrib/0.7.2-4': { - 'target_type': 'revision', - 'target': '8224139c274c984147ef4b09aa0e462c55a10bd3', - } }, } check_snapshot(expected_snapshot, loader.storage) def test_resolve_revision_from_edge_cases(): """Solving revision with empty data will result in unknown revision """ for package_artifacts in [{}, PACKAGE_FILES]: - actual_revision = resolve_revision_from( - package_artifacts, {}) + actual_revision = resolve_revision_from(package_artifacts, {}) assert actual_revision is None for known_artifacts in [{}, PACKAGE_FILES]: - actual_revision = resolve_revision_from( - {}, known_artifacts) + actual_revision = resolve_revision_from({}, known_artifacts) assert actual_revision is None known_package_artifacts = { b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07": { - 'extrinsic': { + "extrinsic": { # empty }, # ... removed the unnecessary intermediary data } } assert not resolve_revision_from(known_package_artifacts, PACKAGE_FILES) def test_resolve_revision_from_edge_cases_hit_and_miss(): """Solving revision with inconsistent data will result in unknown revision """ artifact_metadata = PACKAGE_FILES2 - expected_revision_id = b"(\x08\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xff\x85\x85O\xfe\xcf\x07" # noqa + expected_revision_id = ( + b"(\x08\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xff\x85\x85O\xfe\xcf\x07" # noqa + ) known_package_artifacts = { expected_revision_id: { - 'extrinsic': { - 'raw': PACKAGE_FILES, - }, + "extrinsic": {"raw": PACKAGE_FILES,}, # ... removed the unnecessary intermediary data } } - actual_revision = resolve_revision_from( - known_package_artifacts, artifact_metadata - ) + actual_revision = resolve_revision_from(known_package_artifacts, artifact_metadata) assert actual_revision is None def test_resolve_revision_from(): """Solving revision with consistent data will solve the revision """ artifact_metadata = PACKAGE_FILES - expected_revision_id = b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07" # noqa + expected_revision_id = ( + b"(\x07\xf5\xb3\xf8Ch\xb4\x88\x9a\x9a\xe8'\xfe\x85\x85O\xfe\xcf\x07" # noqa + ) - files = artifact_metadata['files'] + files = artifact_metadata["files"] # shuffling dict's keys keys = list(files.keys()) random.shuffle(keys) - package_files = { - 'files': {k: files[k] for k in keys} - } + package_files = {"files": {k: files[k] for k in keys}} known_package_artifacts = { expected_revision_id: { - 'extrinsic': { - 'raw': package_files, - }, + "extrinsic": {"raw": package_files,}, # ... removed the unnecessary intermediary data } } - actual_revision = resolve_revision_from( - known_package_artifacts, artifact_metadata - ) + actual_revision = resolve_revision_from(known_package_artifacts, artifact_metadata) assert actual_revision == expected_revision_id diff --git a/swh/loader/package/debian/tests/test_tasks.py b/swh/loader/package/debian/tests/test_tasks.py index a8b2800..1cc1c9d 100644 --- a/swh/loader/package/debian/tests/test_tasks.py +++ b/swh/loader/package/debian/tests/test_tasks.py @@ -1,23 +1,19 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def test_debian_loader(mocker, swh_app, celery_session_worker, swh_config): - mock_loader = mocker.patch( - 'swh.loader.package.debian.loader.DebianLoader.load') - mock_loader.return_value = {'status': 'eventful'} + mock_loader = mocker.patch("swh.loader.package.debian.loader.DebianLoader.load") + mock_loader.return_value = {"status": "eventful"} res = swh_app.send_task( - 'swh.loader.package.debian.tasks.LoadDebian', - kwargs={ - 'url': 'some-url', - 'date': 'some-date', - 'packages': {} - }) + "swh.loader.package.debian.tasks.LoadDebian", + kwargs={"url": "some-url", "date": "some-date", "packages": {}}, + ) assert res res.wait() assert res.successful() - assert res.result == {'status': 'eventful'} + assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/deposit/__init__.py b/swh/loader/package/deposit/__init__.py index 0de5433..11ce63d 100644 --- a/swh/loader/package/deposit/__init__.py +++ b/swh/loader/package/deposit/__init__.py @@ -1,16 +1,17 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Mapping def register() -> Mapping[str, Any]: """Register the current worker module's definition""" from .loader import DepositLoader + return { - 'task_modules': [f'{__name__}.tasks'], - 'loader': DepositLoader, + "task_modules": [f"{__name__}.tasks"], + "loader": DepositLoader, } diff --git a/swh/loader/package/deposit/loader.py b/swh/loader/package/deposit/loader.py index 2fabd6a..30178d9 100644 --- a/swh/loader/package/deposit/loader.py +++ b/swh/loader/package/deposit/loader.py @@ -1,245 +1,256 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import requests import types -from typing import ( - Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple, Union -) +from typing import Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple, Union from swh.model.hashutil import hash_to_hex, hash_to_bytes from swh.model.model import ( - Person, Revision, RevisionType, TimestampWithTimezone, Sha1Git, + Person, + Revision, + RevisionType, + TimestampWithTimezone, + Sha1Git, ) from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import download logger = logging.getLogger(__name__) class DepositLoader(PackageLoader): """Load pypi origin's artifact releases into swh archive. """ - visit_type = 'deposit' + + visit_type = "deposit" def __init__(self, url: str, deposit_id: str): """Constructor Args: url: Origin url to associate the artifacts/metadata to deposit_id: Deposit identity """ super().__init__(url=url) - config_deposit = self.config['deposit'] + config_deposit = self.config["deposit"] self.deposit_id = deposit_id - self.client = ApiClient(url=config_deposit['url'], - auth=config_deposit['auth']) + self.client = ApiClient(url=config_deposit["url"], auth=config_deposit["auth"]) self.metadata: Dict[str, Any] = {} def get_versions(self) -> Sequence[str]: # only 1 branch 'HEAD' with no alias since we only have 1 snapshot # branch - return ['HEAD'] + return ["HEAD"] - def get_package_info(self, version: str) -> Generator[ - Tuple[str, Mapping[str, Any]], None, None]: + def get_package_info( + self, version: str + ) -> Generator[Tuple[str, Mapping[str, Any]], None, None]: p_info = { - 'filename': 'archive.zip', - 'raw': self.metadata, + "filename": "archive.zip", + "raw": self.metadata, } - yield 'HEAD', p_info + yield "HEAD", p_info - def download_package(self, p_info: Mapping[str, Any], - tmpdir: str) -> List[Tuple[str, Mapping]]: + def download_package( + self, p_info: Mapping[str, Any], tmpdir: str + ) -> List[Tuple[str, Mapping]]: """Override to allow use of the dedicated deposit client """ - return [self.client.archive_get( - self.deposit_id, tmpdir, p_info['filename'])] + return [self.client.archive_get(self.deposit_id, tmpdir, p_info["filename"])] def build_revision( - self, a_metadata: Dict, uncompressed_path: str, - directory: Sha1Git) -> Optional[Revision]: - revision_data = a_metadata.pop('revision') + self, a_metadata: Dict, uncompressed_path: str, directory: Sha1Git + ) -> Optional[Revision]: + revision_data = a_metadata.pop("revision") # FIXME: the deposit no longer needs to build the revision - date = TimestampWithTimezone.from_dict(revision_data['date']) - metadata = revision_data['metadata'] - metadata.update({ - 'extrinsic': { - 'provider': self.client.metadata_url(self.deposit_id), - 'when': self.visit_date.isoformat(), - 'raw': a_metadata, - }, - }) + date = TimestampWithTimezone.from_dict(revision_data["date"]) + metadata = revision_data["metadata"] + metadata.update( + { + "extrinsic": { + "provider": self.client.metadata_url(self.deposit_id), + "when": self.visit_date.isoformat(), + "raw": a_metadata, + }, + } + ) return Revision( type=RevisionType.TAR, - message=revision_data['message'].encode('utf-8'), - author=parse_author(revision_data['author']), + message=revision_data["message"].encode("utf-8"), + author=parse_author(revision_data["author"]), date=date, - committer=parse_author(revision_data['committer']), + committer=parse_author(revision_data["committer"]), committer_date=date, - parents=[hash_to_bytes(p) - for p in revision_data.get('parents', [])], + parents=[hash_to_bytes(p) for p in revision_data.get("parents", [])], directory=directory, synthetic=True, metadata=metadata, ) def load(self) -> Dict: # First making sure the deposit is known prior to trigger a loading try: self.metadata = self.client.metadata_get(self.deposit_id) except ValueError: - logger.error(f'Unknown deposit {self.deposit_id}, ignoring') - return {'status': 'failed'} + logger.error(f"Unknown deposit {self.deposit_id}, ignoring") + return {"status": "failed"} # Then usual loading r = super().load() - success = r['status'] != 'failed' + success = r["status"] != "failed" if success: # Update archive with metadata information - origin_metadata = self.metadata['origin_metadata'] + origin_metadata = self.metadata["origin_metadata"] - logger.debug('origin_metadata: %s', origin_metadata) - tools = self.storage.tool_add([origin_metadata['tool']]) - logger.debug('tools: %s', tools) - tool_id = tools[0]['id'] + logger.debug("origin_metadata: %s", origin_metadata) + tools = self.storage.tool_add([origin_metadata["tool"]]) + logger.debug("tools: %s", tools) + tool_id = tools[0]["id"] - provider = origin_metadata['provider'] + provider = origin_metadata["provider"] # FIXME: Shall we delete this info? provider_id = self.storage.metadata_provider_add( - provider['provider_name'], - provider['provider_type'], - provider['provider_url'], - metadata=None) + provider["provider_name"], + provider["provider_type"], + provider["provider_url"], + metadata=None, + ) - metadata = origin_metadata['metadata'] + metadata = origin_metadata["metadata"] self.storage.origin_metadata_add( - self.url, self.visit_date, provider_id, tool_id, metadata) + self.url, self.visit_date, provider_id, tool_id, metadata + ) # Update deposit status try: if not success: - self.client.status_update(self.deposit_id, status='failed') + self.client.status_update(self.deposit_id, status="failed") return r - snapshot_id = hash_to_bytes(r['snapshot_id']) - branches = self.storage.snapshot_get(snapshot_id)['branches'] - logger.debug('branches: %s', branches) + snapshot_id = hash_to_bytes(r["snapshot_id"]) + branches = self.storage.snapshot_get(snapshot_id)["branches"] + logger.debug("branches: %s", branches) if not branches: return r - rev_id = branches[b'HEAD']['target'] + rev_id = branches[b"HEAD"]["target"] revisions = self.storage.revision_get([rev_id]) # FIXME: inconsistency between tests and production code if isinstance(revisions, types.GeneratorType): revisions = list(revisions) revision = revisions[0] # Retrieve the revision identifier - dir_id = revision['directory'] + dir_id = revision["directory"] # update the deposit's status to success with its # revision-id and directory-id self.client.status_update( self.deposit_id, - status='done', + status="done", revision_id=hash_to_hex(rev_id), directory_id=hash_to_hex(dir_id), - origin_url=self.url) + origin_url=self.url, + ) except Exception: - logger.exception( - 'Problem when trying to update the deposit\'s status') - return {'status': 'failed'} + logger.exception("Problem when trying to update the deposit's status") + return {"status": "failed"} return r def parse_author(author) -> Person: """See prior fixme """ return Person( - fullname=author['fullname'].encode('utf-8'), - name=author['name'].encode('utf-8'), - email=author['email'].encode('utf-8'), + fullname=author["fullname"].encode("utf-8"), + name=author["name"].encode("utf-8"), + email=author["email"].encode("utf-8"), ) class ApiClient: """Private Deposit Api client """ + def __init__(self, url, auth: Optional[Mapping[str, str]]): - self.base_url = url.rstrip('/') - self.auth = None if not auth else (auth['username'], auth['password']) + self.base_url = url.rstrip("/") + self.auth = None if not auth else (auth["username"], auth["password"]) def do(self, method: str, url: str, *args, **kwargs): """Internal method to deal with requests, possibly with basic http authentication. Args: method (str): supported http methods as in get/post/put Returns: The request's execution output """ method_fn = getattr(requests, method) if self.auth: - kwargs['auth'] = self.auth + kwargs["auth"] = self.auth return method_fn(url, *args, **kwargs) def archive_get( - self, deposit_id: Union[int, str], tmpdir: str, - filename: str) -> Tuple[str, Dict]: + self, deposit_id: Union[int, str], tmpdir: str, filename: str + ) -> Tuple[str, Dict]: """Retrieve deposit's archive artifact locally """ - url = f'{self.base_url}/{deposit_id}/raw/' + url = f"{self.base_url}/{deposit_id}/raw/" return download(url, dest=tmpdir, filename=filename, auth=self.auth) def metadata_url(self, deposit_id: Union[int, str]) -> str: - return f'{self.base_url}/{deposit_id}/meta/' + return f"{self.base_url}/{deposit_id}/meta/" def metadata_get(self, deposit_id: Union[int, str]) -> Dict[str, Any]: """Retrieve deposit's metadata artifact as json """ url = self.metadata_url(deposit_id) - r = self.do('get', url) + r = self.do("get", url) if r.ok: return r.json() - msg = f'Problem when retrieving deposit metadata at {url}' + msg = f"Problem when retrieving deposit metadata at {url}" logger.error(msg) raise ValueError(msg) - def status_update(self, deposit_id: Union[int, str], status: str, - revision_id: Optional[str] = None, - directory_id: Optional[str] = None, - origin_url: Optional[str] = None): + def status_update( + self, + deposit_id: Union[int, str], + status: str, + revision_id: Optional[str] = None, + directory_id: Optional[str] = None, + origin_url: Optional[str] = None, + ): """Update deposit's information including status, and persistent identifiers result of the loading. """ - url = f'{self.base_url}/{deposit_id}/update/' - payload = {'status': status} + url = f"{self.base_url}/{deposit_id}/update/" + payload = {"status": status} if revision_id: - payload['revision_id'] = revision_id + payload["revision_id"] = revision_id if directory_id: - payload['directory_id'] = directory_id + payload["directory_id"] = directory_id if origin_url: - payload['origin_url'] = origin_url + payload["origin_url"] = origin_url - self.do('put', url, json=payload) + self.do("put", url, json=payload) diff --git a/swh/loader/package/deposit/tasks.py b/swh/loader/package/deposit/tasks.py index 08dc376..ff8e089 100644 --- a/swh/loader/package/deposit/tasks.py +++ b/swh/loader/package/deposit/tasks.py @@ -1,14 +1,14 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.deposit.loader import DepositLoader -@shared_task(name=__name__ + '.LoadDeposit') +@shared_task(name=__name__ + ".LoadDeposit") def load_deposit(*, url, deposit_id): """Load Deposit artifacts""" return DepositLoader(url, deposit_id).load() diff --git a/swh/loader/package/deposit/tests/test_deposit.py b/swh/loader/package/deposit/tests/test_deposit.py index 102e1b5..9035924 100644 --- a/swh/loader/package/deposit/tests/test_deposit.py +++ b/swh/loader/package/deposit/tests/test_deposit.py @@ -1,205 +1,207 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import re from swh.model.hashutil import hash_to_bytes from swh.loader.package.deposit.loader import DepositLoader from swh.loader.package.tests.common import ( - check_snapshot, check_metadata_paths, get_stats + check_snapshot, + check_metadata_paths, + get_stats, ) from swh.core.pytest_plugin import requests_mock_datadir_factory def test_deposit_init_ok(swh_config, swh_loader_config): - url = 'some-url' + url = "some-url" deposit_id = 999 loader = DepositLoader(url, deposit_id) # Something that does not exist assert loader.url == url assert loader.client is not None - assert loader.client.base_url == swh_loader_config['deposit']['url'] + assert loader.client.base_url == swh_loader_config["deposit"]["url"] -def test_deposit_loading_unknown_deposit( - swh_config, requests_mock_datadir): +def test_deposit_loading_unknown_deposit(swh_config, requests_mock_datadir): """Loading an unknown deposit should fail no origin, no visit, no snapshot """ # private api url form: 'https://deposit.s.o/1/private/hal/666/raw/' - url = 'some-url' + url = "some-url" unknown_deposit_id = 667 loader = DepositLoader(url, unknown_deposit_id) # does not exist actual_load_status = loader.load() - assert actual_load_status == {'status': 'failed'} + assert actual_load_status == {"status": "failed"} stats = get_stats(loader.storage) assert { - 'content': 0, - 'directory': 0, - 'origin': 0, - 'origin_visit': 0, - 'person': 0, - 'release': 0, - 'revision': 0, - 'skipped_content': 0, - 'snapshot': 0, + "content": 0, + "directory": 0, + "origin": 0, + "origin_visit": 0, + "person": 0, + "release": 0, + "revision": 0, + "skipped_content": 0, + "snapshot": 0, } == stats -requests_mock_datadir_missing_one = requests_mock_datadir_factory(ignore_urls=[ - 'https://deposit.softwareheritage.org/1/private/666/raw/', -]) +requests_mock_datadir_missing_one = requests_mock_datadir_factory( + ignore_urls=["https://deposit.softwareheritage.org/1/private/666/raw/",] +) def test_deposit_loading_failure_to_retrieve_1_artifact( - swh_config, requests_mock_datadir_missing_one): + swh_config, requests_mock_datadir_missing_one +): """Deposit with missing artifact ends up with an uneventful/partial visit """ # private api url form: 'https://deposit.s.o/1/private/hal/666/raw/' - url = 'some-url-2' + url = "some-url-2" deposit_id = 666 loader = DepositLoader(url, deposit_id) actual_load_status = loader.load() - assert actual_load_status['status'] == 'uneventful' - assert actual_load_status['snapshot_id'] is not None + assert actual_load_status["status"] == "uneventful" + assert actual_load_status["snapshot_id"] is not None stats = get_stats(loader.storage) assert { - 'content': 0, - 'directory': 0, - 'origin': 1, - 'origin_visit': 1, - 'person': 0, - 'release': 0, - 'revision': 0, - 'skipped_content': 0, - 'snapshot': 1, + "content": 0, + "directory": 0, + "origin": 1, + "origin_visit": 1, + "person": 0, + "release": 0, + "revision": 0, + "skipped_content": 0, + "snapshot": 1, } == stats origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'partial' - assert origin_visit['type'] == 'deposit' + assert origin_visit["status"] == "partial" + assert origin_visit["type"] == "deposit" def test_revision_metadata_structure(swh_config, requests_mock_datadir): # do not care for deposit update query - requests_mock_datadir.put(re.compile('https')) + requests_mock_datadir.put(re.compile("https")) - url = 'https://hal-test.archives-ouvertes.fr/some-external-id' + url = "https://hal-test.archives-ouvertes.fr/some-external-id" deposit_id = 666 loader = DepositLoader(url, deposit_id) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' - assert actual_load_status['snapshot_id'] is not None - expected_revision_id = hash_to_bytes( - '637318680351f5d78856d13264faebbd91efe9bb') + assert actual_load_status["status"] == "eventful" + assert actual_load_status["snapshot_id"] is not None + expected_revision_id = hash_to_bytes("637318680351f5d78856d13264faebbd91efe9bb") revision = list(loader.storage.revision_get([expected_revision_id]))[0] assert revision is not None - check_metadata_paths(revision['metadata'], paths=[ - ('extrinsic.provider', str), - ('extrinsic.when', str), - ('extrinsic.raw', dict), - ('original_artifact', list), - ]) + check_metadata_paths( + revision["metadata"], + paths=[ + ("extrinsic.provider", str), + ("extrinsic.when", str), + ("extrinsic.raw", dict), + ("original_artifact", list), + ], + ) - for original_artifact in revision['metadata']['original_artifact']: - check_metadata_paths(original_artifact, paths=[ - ('filename', str), - ('length', int), - ('checksums', dict), - ]) + for original_artifact in revision["metadata"]["original_artifact"]: + check_metadata_paths( + original_artifact, + paths=[("filename", str), ("length", int), ("checksums", dict),], + ) def test_deposit_loading_ok(swh_config, requests_mock_datadir): - requests_mock_datadir.put(re.compile('https')) # do not care for put + requests_mock_datadir.put(re.compile("https")) # do not care for put - url = 'https://hal-test.archives-ouvertes.fr/some-external-id' + url = "https://hal-test.archives-ouvertes.fr/some-external-id" deposit_id = 666 loader = DepositLoader(url, deposit_id) actual_load_status = loader.load() - expected_snapshot_id = 'b2b327b33dc85818bd23c3ccda8b7e675a66ecbd' + expected_snapshot_id = "b2b327b33dc85818bd23c3ccda8b7e675a66ecbd" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id, + "status": "eventful", + "snapshot_id": expected_snapshot_id, } stats = get_stats(loader.storage) assert { - 'content': 303, - 'directory': 12, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 1, - 'skipped_content': 0, - 'snapshot': 1, + "content": 303, + "directory": 12, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 1, + "skipped_content": 0, + "snapshot": 1, } == stats origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'deposit' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "deposit" expected_branches = { - 'HEAD': { - 'target': '637318680351f5d78856d13264faebbd91efe9bb', - 'target_type': 'revision', + "HEAD": { + "target": "637318680351f5d78856d13264faebbd91efe9bb", + "target_type": "revision", }, } expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': expected_branches, + "id": expected_snapshot_id, + "branches": expected_branches, } check_snapshot(expected_snapshot, storage=loader.storage) # check metadata tool = { "name": "swh-deposit", "version": "0.0.1", - "configuration": { - "sword_version": "2", - } + "configuration": {"sword_version": "2",}, } tool = loader.storage.tool_get(tool) assert tool is not None - assert tool['id'] is not None + assert tool["id"] is not None provider = { "provider_name": "hal", "provider_type": "deposit_client", "provider_url": "https://hal-test.archives-ouvertes.fr/", "metadata": None, } provider = loader.storage.metadata_provider_get_by(provider) assert provider is not None - assert provider['id'] is not None + assert provider["id"] is not None - metadata = list(loader.storage.origin_metadata_get_by( - url, provider_type='deposit_client')) + metadata = list( + loader.storage.origin_metadata_get_by(url, provider_type="deposit_client") + ) assert metadata is not None assert isinstance(metadata, list) assert len(metadata) == 1 metadata0 = metadata[0] - assert metadata0['provider_id'] == provider['id'] - assert metadata0['provider_type'] == 'deposit_client' - assert metadata0['tool_id'] == tool['id'] + assert metadata0["provider_id"] == provider["id"] + assert metadata0["provider_type"] == "deposit_client" + assert metadata0["tool_id"] == tool["id"] diff --git a/swh/loader/package/deposit/tests/test_tasks.py b/swh/loader/package/deposit/tests/test_tasks.py index 9e65609..dc7aa7c 100644 --- a/swh/loader/package/deposit/tests/test_tasks.py +++ b/swh/loader/package/deposit/tests/test_tasks.py @@ -1,22 +1,19 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def test_deposit_loader(mocker, swh_app, celery_session_worker, swh_config): - mock_loader = mocker.patch( - 'swh.loader.package.deposit.loader.DepositLoader.load') - mock_loader.return_value = {'status': 'eventful'} + mock_loader = mocker.patch("swh.loader.package.deposit.loader.DepositLoader.load") + mock_loader.return_value = {"status": "eventful"} res = swh_app.send_task( - 'swh.loader.package.deposit.tasks.LoadDeposit', - kwargs={ - 'url': 'some-url', - 'deposit_id': 'some-d-id', - }) + "swh.loader.package.deposit.tasks.LoadDeposit", + kwargs={"url": "some-url", "deposit_id": "some-d-id",}, + ) assert res res.wait() assert res.successful() - assert res.result == {'status': 'eventful'} + assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index b1d027f..096ae79 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,487 +1,490 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import logging import tempfile import os -from typing import ( - Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple -) +from typing import Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple import attr import sentry_sdk from swh.core.tarball import uncompress from swh.core.config import SWHConfig from swh.model import from_disk from swh.model.hashutil import hash_to_hex from swh.model.model import ( - BaseModel, Sha1Git, - Content, SkippedContent, Directory, + BaseModel, + Sha1Git, + Content, + SkippedContent, + Directory, Revision, - TargetType, Snapshot, - Origin + TargetType, + Snapshot, + Origin, ) from swh.storage import get_storage from swh.storage.algos.snapshot import snapshot_get_all_branches from swh.loader.package.utils import download logger = logging.getLogger(__name__) class PackageLoader: # Origin visit type (str) set by the loader - visit_type = '' + visit_type = "" def __init__(self, url): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: url (str): Origin url to load data from """ # This expects to use the environment variable SWH_CONFIG_FILENAME self.config = SWHConfig.parse_config_file() self._check_configuration() - self.storage = get_storage(**self.config['storage']) + self.storage = get_storage(**self.config["storage"]) self.url = url self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) - self.max_content_size = self.config['max_content_size'] + self.max_content_size = self.config["max_content_size"] def _check_configuration(self): """Checks the minimal configuration required is set for the loader. If some required configuration is missing, exception detailing the issue is raised. """ - if 'storage' not in self.config: - raise ValueError( - 'Misconfiguration, at least the storage key should be set') + if "storage" not in self.config: + raise ValueError("Misconfiguration, at least the storage key should be set") def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Returns: Sequence of published versions """ return [] - def get_package_info(self, version: str) -> Generator[ - Tuple[str, Mapping[str, Any]], None, None]: + def get_package_info( + self, version: str + ) -> Generator[Tuple[str, Mapping[str, Any]], None, None]: """Given a release version of a package, retrieve the associated package information for such version. Args: version: Package version Returns: (branch name, package metadata) """ yield from {} def build_revision( - self, a_metadata: Dict, uncompressed_path: str, - directory: Sha1Git) -> Optional[Revision]: + self, a_metadata: Dict, uncompressed_path: str, directory: Sha1Git + ) -> Optional[Revision]: """Build the revision from the archive metadata (extrinsic artifact metadata) and the intrinsic metadata. Args: a_metadata: Artifact metadata uncompressed_path: Artifact uncompressed path on disk Returns: SWH data dict """ - raise NotImplementedError('build_revision') + raise NotImplementedError("build_revision") def get_default_version(self) -> str: """Retrieve the latest release version if any. Returns: Latest version """ - return '' + return "" def last_snapshot(self) -> Optional[Snapshot]: """Retrieve the last snapshot """ snapshot = None - visit = self.storage.origin_visit_get_latest( - self.url, require_snapshot=True) - if visit and visit.get('snapshot'): - snapshot = Snapshot.from_dict(snapshot_get_all_branches( - self.storage, visit['snapshot'])) + visit = self.storage.origin_visit_get_latest(self.url, require_snapshot=True) + if visit and visit.get("snapshot"): + snapshot = Snapshot.from_dict( + snapshot_get_all_branches(self.storage, visit["snapshot"]) + ) return snapshot - def known_artifacts( - self, snapshot: Optional[Snapshot]) -> Dict[Sha1Git, BaseModel]: + def known_artifacts(self, snapshot: Optional[Snapshot]) -> Dict[Sha1Git, BaseModel]: """Retrieve the known releases/artifact for the origin. Args snapshot: snapshot for the visit Returns: Dict of keys revision id (bytes), values a metadata Dict. """ if not snapshot: return {} # retrieve only revisions (e.g the alias we do not want here) - revs = [rev.target - for rev in snapshot.branches.values() - if rev and rev.target_type == TargetType.REVISION] + revs = [ + rev.target + for rev in snapshot.branches.values() + if rev and rev.target_type == TargetType.REVISION + ] known_revisions = self.storage.revision_get(revs) ret = {} for revision in known_revisions: if not revision: # revision_get can return None continue - ret[revision['id']] = revision['metadata'] + ret[revision["id"]] = revision["metadata"] return ret def resolve_revision_from( - self, known_artifacts: Dict, artifact_metadata: Dict) \ - -> Optional[bytes]: + self, known_artifacts: Dict, artifact_metadata: Dict + ) -> Optional[bytes]: """Resolve the revision from a snapshot and an artifact metadata dict. If the artifact has already been downloaded, this will return the existing revision targeting that uncompressed artifact directory. Otherwise, this returns None. Args: snapshot: Snapshot artifact_metadata: Information dict Returns: None or revision identifier """ return None - def download_package(self, p_info: Mapping[str, Any], - tmpdir: str) -> List[Tuple[str, Mapping]]: + def download_package( + self, p_info: Mapping[str, Any], tmpdir: str + ) -> List[Tuple[str, Mapping]]: """Download artifacts for a specific package. All downloads happen in in the tmpdir folder. Default implementation expects the artifacts package info to be about one artifact per package. Note that most implementation have 1 artifact per package. But some implementation have multiple artifacts per package (debian), some have none, the package is the artifact (gnu). Args: artifacts_package_info: Information on the package artifacts to download (url, filename, etc...) tmpdir: Location to retrieve such artifacts Returns: List of (path, computed hashes) """ - a_uri = p_info['url'] - filename = p_info.get('filename') + a_uri = p_info["url"] + filename = p_info.get("filename") return [download(a_uri, dest=tmpdir, filename=filename)] - def uncompress(self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], - dest: str) -> str: + def uncompress( + self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str + ) -> str: """Uncompress the artifact(s) in the destination folder dest. Optionally, this could need to use the p_info dict for some more information (debian). """ - uncompressed_path = os.path.join(dest, 'src') + uncompressed_path = os.path.join(dest, "src") for a_path, _ in dl_artifacts: uncompress(a_path, dest=uncompressed_path) return uncompressed_path def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]: """Return an extra dict of branches that are used to update the set of branches. """ return {} def load(self) -> Dict: """Load for a specific origin the associated contents. for each package version of the origin 1. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 2. Extract the downloaded files By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 3. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 4. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 5. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 6. Generate and load the snapshot for the visit Using the revisions/releases collected at step 5., and the branch information from step 0., generate a snapshot and load it into the Software Heritage archive """ - status_load = 'uneventful' # either: eventful, uneventful, failed - status_visit = 'full' # either: partial, full + status_load = "uneventful" # either: eventful, uneventful, failed + status_visit = "full" # either: partial, full tmp_revisions = {} # type: Dict[str, List] snapshot = None def finalize_visit() -> Dict[str, Any]: """Finalize the visit: - flush eventual unflushed data to storage - update origin visit's status - return the task's status """ - if hasattr(self.storage, 'flush'): + if hasattr(self.storage, "flush"): self.storage.flush() self.storage.origin_visit_update( - origin=self.url, visit_id=visit.visit, status=status_visit, - snapshot=snapshot and snapshot.id) + origin=self.url, + visit_id=visit.visit, + status=status_visit, + snapshot=snapshot and snapshot.id, + ) result: Dict[str, Any] = { - 'status': status_load, + "status": status_load, } if snapshot: - result['snapshot_id'] = hash_to_hex(snapshot.id) + result["snapshot_id"] = hash_to_hex(snapshot.id) return result # Prepare origin and origin_visit origin = Origin(url=self.url) try: self.storage.origin_add_one(origin) visit = self.storage.origin_visit_add( - self.url, date=self.visit_date, type=self.visit_type) + self.url, date=self.visit_date, type=self.visit_type + ) except Exception as e: - logger.exception('Failed to initialize origin_visit for %s', - self.url) + logger.exception("Failed to initialize origin_visit for %s", self.url) sentry_sdk.capture_exception(e) - return {'status': 'failed'} + return {"status": "failed"} try: last_snapshot = self.last_snapshot() - logger.debug('last snapshot: %s', last_snapshot) + logger.debug("last snapshot: %s", last_snapshot) known_artifacts = self.known_artifacts(last_snapshot) - logger.debug('known artifacts: %s', known_artifacts) + logger.debug("known artifacts: %s", known_artifacts) except Exception as e: - logger.exception('Failed to get previous state for %s', self.url) + logger.exception("Failed to get previous state for %s", self.url) sentry_sdk.capture_exception(e) - status_visit = 'partial' - status_load = 'failed' + status_visit = "partial" + status_load = "failed" return finalize_visit() load_exceptions = [] for version in self.get_versions(): # for each - logger.debug('version: %s', version) + logger.debug("version: %s", version) tmp_revisions[version] = [] # `p_` stands for `package_` for branch_name, p_info in self.get_package_info(version): - logger.debug('package_info: %s', p_info) - revision_id = self.resolve_revision_from( - known_artifacts, p_info['raw']) + logger.debug("package_info: %s", p_info) + revision_id = self.resolve_revision_from(known_artifacts, p_info["raw"]) if revision_id is None: try: revision_id = self._load_revision(p_info, origin) - status_load = 'eventful' + status_load = "eventful" except Exception as e: load_exceptions.append(e) sentry_sdk.capture_exception(e) - logger.exception('Failed loading branch %s for %s', - branch_name, self.url) + logger.exception( + "Failed loading branch %s for %s", branch_name, self.url + ) continue if revision_id is None: continue tmp_revisions[version].append((branch_name, revision_id)) if load_exceptions: - status_visit = 'partial' + status_visit = "partial" if not tmp_revisions: # We could not load any revisions; fail completely - status_visit = 'partial' - status_load = 'failed' + status_visit = "partial" + status_load = "failed" return finalize_visit() try: # Retrieve the default release version (the "latest" one) default_version = self.get_default_version() - logger.debug('default version: %s', default_version) + logger.debug("default version: %s", default_version) # Retrieve extra branches extra_branches = self.extra_branches() - logger.debug('extra branches: %s', extra_branches) + logger.debug("extra branches: %s", extra_branches) - snapshot = self._load_snapshot(default_version, tmp_revisions, - extra_branches) + snapshot = self._load_snapshot( + default_version, tmp_revisions, extra_branches + ) except Exception as e: - logger.exception('Failed to build snapshot for origin %s', - self.url) + logger.exception("Failed to build snapshot for origin %s", self.url) sentry_sdk.capture_exception(e) - status_visit = 'partial' - status_load = 'failed' + status_visit = "partial" + status_load = "failed" return finalize_visit() def _load_revision(self, p_info, origin) -> Optional[Sha1Git]: """Does all the loading of a revision itself: * downloads a package and uncompresses it * loads it from disk * adds contents, directories, and revision to self.storage * returns (revision_id, loaded) Raises exception when unable to download or uncompress artifacts """ with tempfile.TemporaryDirectory() as tmpdir: dl_artifacts = self.download_package(p_info, tmpdir) uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir) - logger.debug('uncompressed_path: %s', uncompressed_path) + logger.debug("uncompressed_path: %s", uncompressed_path) directory = from_disk.Directory.from_disk( - path=uncompressed_path.encode('utf-8'), - max_content_length=self.max_content_size) + path=uncompressed_path.encode("utf-8"), + max_content_length=self.max_content_size, + ) contents: List[Content] = [] skipped_contents: List[SkippedContent] = [] directories: List[Directory] = [] for obj in directory.iter_tree(): obj = obj.to_model() if isinstance(obj, Content): # FIXME: read the data from disk later (when the # storage buffer is flushed). obj = obj.with_data() contents.append(obj) elif isinstance(obj, SkippedContent): skipped_contents.append(obj) elif isinstance(obj, Directory): directories.append(obj) else: - raise TypeError( - f'Unexpected content type from disk: {obj}') + raise TypeError(f"Unexpected content type from disk: {obj}") - logger.debug('Number of skipped contents: %s', - len(skipped_contents)) + logger.debug("Number of skipped contents: %s", len(skipped_contents)) self.storage.skipped_content_add(skipped_contents) - logger.debug('Number of contents: %s', len(contents)) + logger.debug("Number of contents: %s", len(contents)) self.storage.content_add(contents) - logger.debug('Number of directories: %s', len(directories)) + logger.debug("Number of directories: %s", len(directories)) self.storage.directory_add(directories) # FIXME: This should be release. cf. D409 revision = self.build_revision( - p_info['raw'], uncompressed_path, directory=directory.hash) + p_info["raw"], uncompressed_path, directory=directory.hash + ) if not revision: # Some artifacts are missing intrinsic metadata # skipping those return None metadata = revision.metadata or {} - metadata.update({ - 'original_artifact': [ - hashes for _, hashes in dl_artifacts - ], - }) + metadata.update( + {"original_artifact": [hashes for _, hashes in dl_artifacts],} + ) revision = attr.evolve(revision, metadata=metadata) - logger.debug('Revision: %s', revision) + logger.debug("Revision: %s", revision) self.storage.revision_add([revision]) return revision.id def _load_snapshot( - self, default_version: str, - revisions: Dict[str, List[Tuple[str, bytes]]], - extra_branches: Dict[bytes, Mapping[str, Any]] + self, + default_version: str, + revisions: Dict[str, List[Tuple[str, bytes]]], + extra_branches: Dict[bytes, Mapping[str, Any]], ) -> Optional[Snapshot]: """Build snapshot out of the current revisions stored and extra branches. Then load it in the storage. """ - logger.debug('revisions: %s', revisions) + logger.debug("revisions: %s", revisions) # Build and load the snapshot branches = {} # type: Dict[bytes, Mapping[str, Any]] for version, branch_name_revisions in revisions.items(): - if version == default_version and \ - len(branch_name_revisions) == 1: + if version == default_version and len(branch_name_revisions) == 1: # only 1 branch (no ambiguity), we can create an alias # branch 'HEAD' branch_name, _ = branch_name_revisions[0] # except for some corner case (deposit) - if branch_name != 'HEAD': - branches[b'HEAD'] = { - 'target_type': 'alias', - 'target': branch_name.encode('utf-8'), + if branch_name != "HEAD": + branches[b"HEAD"] = { + "target_type": "alias", + "target": branch_name.encode("utf-8"), } for branch_name, target in branch_name_revisions: - branches[branch_name.encode('utf-8')] = { - 'target_type': 'revision', - 'target': target, + branches[branch_name.encode("utf-8")] = { + "target_type": "revision", + "target": target, } # Deal with extra-branches for name, branch_target in extra_branches.items(): if name in branches: - logger.error("Extra branch '%s' has been ignored", - name) + logger.error("Extra branch '%s' has been ignored", name) else: branches[name] = branch_target - snapshot_data = { - 'branches': branches - } - logger.debug('snapshot: %s', snapshot_data) + snapshot_data = {"branches": branches} + logger.debug("snapshot: %s", snapshot_data) snapshot = Snapshot.from_dict(snapshot_data) - logger.debug('snapshot: %s', snapshot) + logger.debug("snapshot: %s", snapshot) self.storage.snapshot_add([snapshot]) return snapshot diff --git a/swh/loader/package/nixguix/__init__.py b/swh/loader/package/nixguix/__init__.py index 3378d59..a82f24b 100644 --- a/swh/loader/package/nixguix/__init__.py +++ b/swh/loader/package/nixguix/__init__.py @@ -1,16 +1,17 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Mapping def register() -> Mapping[str, Any]: """Register the current worker module's definition""" from .loader import NixGuixLoader + return { - 'task_modules': [f'{__name__}.tasks'], - 'loader': NixGuixLoader, + "task_modules": [f"{__name__}.tasks"], + "loader": NixGuixLoader, } diff --git a/swh/loader/package/nixguix/loader.py b/swh/loader/package/nixguix/loader.py index 2ca7755..b173eb6 100644 --- a/swh/loader/package/nixguix/loader.py +++ b/swh/loader/package/nixguix/loader.py @@ -1,185 +1,184 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import requests from typing import Dict, Optional, Any, Mapping from swh.model import hashutil -from swh.model.model import ( - Sha1Git, Revision, RevisionType -) +from swh.model.model import Sha1Git, Revision, RevisionType from swh.loader.package.utils import EMPTY_AUTHOR from swh.loader.package.loader import PackageLoader logger = logging.getLogger(__name__) def retrieve_sources(url: str) -> Dict[str, Any]: - response = requests.get(url, - allow_redirects=True) + response = requests.get(url, allow_redirects=True) if response.status_code != 200: - raise ValueError("Got %d HTTP code on %s", - response.status_code, url) + raise ValueError("Got %d HTTP code on %s", response.status_code, url) - return json.loads(response.content.decode('utf-8')) + return json.loads(response.content.decode("utf-8")) def clean_sources(sources: Dict[str, Any]) -> Dict[str, Any]: """Validate and clean the sources structure. First, it ensures all top level keys are presents. Then, it walks on the sources list and removes sources that don't contain required keys. Raises: ValueError: if a top level key is missing """ # Required top level keys - required_keys = ['version', 'revision', 'sources'] + required_keys = ["version", "revision", "sources"] missing_keys = [] for required_key in required_keys: if required_key not in sources: missing_keys.append(required_key) if missing_keys != []: - raise ValueError("sources structure invalid, missing: %s", - ",".join(missing_keys)) + raise ValueError( + "sources structure invalid, missing: %s", ",".join(missing_keys) + ) # Only the version 1 is currently supported - if sources['version'] != 1: - raise ValueError("The sources structure version '%d' is not supported", - sources['version']) + if sources["version"] != 1: + raise ValueError( + "The sources structure version '%d' is not supported", sources["version"] + ) # If a source doesn't contain required attributes, this source is # skipped but others could still be archived. verified_sources = [] - for source in sources['sources']: + for source in sources["sources"]: valid = True - required_keys = ['urls', 'integrity', 'type'] + required_keys = ["urls", "integrity", "type"] for required_key in required_keys: if required_key not in source: - logger.info("Skip source '%s' because key '%s' is missing", - source, required_key) + logger.info( + "Skip source '%s' because key '%s' is missing", source, required_key + ) valid = False - if source['type'] != 'url': + if source["type"] != "url": logger.info( "Skip source '%s' because the type %s is not supported", - source, source['type']) + source, + source["type"], + ) valid = False - if not isinstance(source['urls'], list): + if not isinstance(source["urls"], list): logger.info( - "Skip source '%s' because the urls attribute is not a list", - source) + "Skip source '%s' because the urls attribute is not a list", source + ) valid = False if valid: verified_sources.append(source) - sources['sources'] = verified_sources + sources["sources"] = verified_sources return sources class NixGuixLoader(PackageLoader): """Load sources from a sources.json file. This loader is used to load sources used by functional package manager (eg. Nix and Guix). """ - visit_type = 'nixguix' + + visit_type = "nixguix" def __init__(self, url): super().__init__(url=url) raw = retrieve_sources(url) clean = clean_sources(raw) - self.sources = clean['sources'] + self.sources = clean["sources"] self.provider_url = url - self._integrityByUrl = {s['urls'][0]: s['integrity'] - for s in self.sources} + self._integrityByUrl = {s["urls"][0]: s["integrity"] for s in self.sources} # The revision used to create the sources.json file. For Nix, # this revision belongs to the github.com/nixos/nixpkgs # repository - self.revision = clean['revision'] + self.revision = clean["revision"] # Note: this could be renamed get_artifacts in the PackageLoader # base class. def get_versions(self): """The first mirror of the mirror list is used as branch name in the snapshot. """ return self._integrityByUrl.keys() # Note: this could be renamed get_artifact_info in the PackageLoader # base class. def get_package_info(self, url): # TODO: try all mirrors and not only the first one. A source # can be fetched from several urls, called mirrors. We # currently only use the first one, but if the first one # fails, we should try the second one and so on. integrity = self._integrityByUrl[url] - yield url, {'url': url, - 'raw': { - 'url': url, - 'integrity': integrity}} + yield url, {"url": url, "raw": {"url": url, "integrity": integrity}} def resolve_revision_from( - self, known_artifacts: Dict, artifact_metadata: Dict) \ - -> Optional[bytes]: + self, known_artifacts: Dict, artifact_metadata: Dict + ) -> Optional[bytes]: for rev_id, known_artifact in known_artifacts.items(): - known_integrity = known_artifact['extrinsic']['raw']['integrity'] - if artifact_metadata['integrity'] == known_integrity: + known_integrity = known_artifact["extrinsic"]["raw"]["integrity"] + if artifact_metadata["integrity"] == known_integrity: return rev_id return None def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]: """We add a branch to the snapshot called 'evaluation' pointing to the revision used to generate the sources.json file. This revision is specified in the sources.json file itself. For the nixpkgs origin, this revision is coming from the github.com/nixos/nixpkgs repository. Note this repository is not loaded explicitly. So, this pointer can target a nonexistent revision for a time. However, the github and gnu loaders are supposed to load this revision and should create the revision pointed by this branch. This branch can be used to identify the snapshot associated to a Nix/Guix evaluation. """ return { - b'evaluation': { - 'target_type': 'revision', - 'target': hashutil.hash_to_bytes(self.revision) + b"evaluation": { + "target_type": "revision", + "target": hashutil.hash_to_bytes(self.revision), } } - def build_revision(self, a_metadata: Dict, uncompressed_path: str, - directory: Sha1Git) -> Optional[Revision]: + def build_revision( + self, a_metadata: Dict, uncompressed_path: str, directory: Sha1Git + ) -> Optional[Revision]: return Revision( type=RevisionType.TAR, - message=b'', + message=b"", author=EMPTY_AUTHOR, date=None, committer=EMPTY_AUTHOR, committer_date=None, parents=[], directory=directory, synthetic=True, metadata={ - 'extrinsic': { - 'provider': self.provider_url, - 'when': self.visit_date.isoformat(), - 'raw': a_metadata, + "extrinsic": { + "provider": self.provider_url, + "when": self.visit_date.isoformat(), + "raw": a_metadata, }, - } + }, ) diff --git a/swh/loader/package/nixguix/tasks.py b/swh/loader/package/nixguix/tasks.py index 75517a6..c6f60de 100644 --- a/swh/loader/package/nixguix/tasks.py +++ b/swh/loader/package/nixguix/tasks.py @@ -1,16 +1,14 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task -from swh.loader.package.nixguix.loader import ( - NixGuixLoader -) +from swh.loader.package.nixguix.loader import NixGuixLoader -@shared_task(name=__name__ + '.LoadNixguix') +@shared_task(name=__name__ + ".LoadNixguix") def load_nixguix(*, url=None): """Load functional (e.g. guix/nix) package""" return NixGuixLoader(url).load() diff --git a/swh/loader/package/nixguix/tests/test_nixguix.py b/swh/loader/package/nixguix/tests/test_nixguix.py index f6d23db..503081a 100644 --- a/swh/loader/package/nixguix/tests/test_nixguix.py +++ b/swh/loader/package/nixguix/tests/test_nixguix.py @@ -1,333 +1,304 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest from json.decoder import JSONDecodeError from swh.loader.package.nixguix.loader import ( - NixGuixLoader, retrieve_sources, clean_sources + NixGuixLoader, + retrieve_sources, + clean_sources, ) -from swh.loader.package.tests.common import ( - get_stats, check_snapshot -) +from swh.loader.package.tests.common import get_stats, check_snapshot -sources_url = 'https://nix-community.github.io/nixpkgs-swh/sources.json' +sources_url = "https://nix-community.github.io/nixpkgs-swh/sources.json" def test_retrieve_sources(swh_config, requests_mock_datadir): j = retrieve_sources(sources_url) assert "sources" in j.keys() assert len(j["sources"]) == 2 def test_retrieve_non_existing(swh_config, requests_mock_datadir): with pytest.raises(ValueError): - NixGuixLoader('https://non-existing-url') + NixGuixLoader("https://non-existing-url") def test_retrieve_non_json(swh_config, requests_mock_datadir): with pytest.raises(JSONDecodeError): - NixGuixLoader('https://example.com/file.txt') + NixGuixLoader("https://example.com/file.txt") def test_clean_sources_invalid_schema(swh_config, requests_mock_datadir): sources = {} - with pytest.raises(ValueError, - match="sources structure invalid, missing: .*"): + with pytest.raises(ValueError, match="sources structure invalid, missing: .*"): clean_sources(sources) def test_clean_sources_invalid_version(swh_config, requests_mock_datadir): - sources = { - 'version': 2, - 'sources': [], - 'revision': 'my-revision' - } + sources = {"version": 2, "sources": [], "revision": "my-revision"} - with pytest.raises(ValueError, - match="sources structure version .* is not supported"): + with pytest.raises( + ValueError, match="sources structure version .* is not supported" + ): clean_sources(sources) def test_clean_sources_invalid_sources(swh_config, requests_mock_datadir): sources = { - 'version': 1, - 'sources': [ + "version": 1, + "sources": [ # Valid source - { - 'type': 'url', - 'urls': ['my-url'], - 'integrity': 'my-integrity' - }, + {"type": "url", "urls": ["my-url"], "integrity": "my-integrity"}, # integrity is missing - { - 'type': 'url', - 'urls': ['my-url'], - }, + {"type": "url", "urls": ["my-url"],}, # urls is not a list - { - 'type': 'url', - 'urls': 'my-url', - 'integrity': 'my-integrity' - }, + {"type": "url", "urls": "my-url", "integrity": "my-integrity"}, # type is not url - { - 'type': 'git', - 'urls': ['my-url'], - 'integrity': 'my-integrity' - } + {"type": "git", "urls": ["my-url"], "integrity": "my-integrity"}, ], - 'revision': 'my-revision' + "revision": "my-revision", } clean = clean_sources(sources) - assert len(clean['sources']) == 1 + assert len(clean["sources"]) == 1 def test_loader_one_visit(swh_config, requests_mock_datadir): loader = NixGuixLoader(sources_url) res = loader.load() - assert res['status'] == 'eventful' + assert res["status"] == "eventful" stats = get_stats(loader.storage) assert { - 'content': 1, - 'directory': 3, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 1, - 'skipped_content': 0, - 'snapshot': 1 + "content": 1, + "directory": 3, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 1, + "skipped_content": 0, + "snapshot": 1, } == stats origin_visit = loader.storage.origin_visit_get_latest(sources_url) # The visit is partial because urls pointing to non tarball file # are not handled yet - assert origin_visit['status'] == 'partial' - assert origin_visit['type'] == 'nixguix' + assert origin_visit["status"] == "partial" + assert origin_visit["type"] == "nixguix" def test_uncompress_failure(swh_config, requests_mock_datadir): """Non tarball files are currently not supported and the uncompress function fails on such kind of files. However, even in this case of failure (because of the url https://example.com/file.txt), a snapshot and a visit has to be created (with a status partial since all files are not archived). """ loader = NixGuixLoader(sources_url) loader_status = loader.load() - urls = [s['urls'][0] for s in loader.sources] + urls = [s["urls"][0] for s in loader.sources] assert "https://example.com/file.txt" in urls - assert loader_status['status'] == 'eventful' + assert loader_status["status"] == "eventful" origin_visit = loader.storage.origin_visit_get_latest(sources_url) # The visit is partial because urls pointing to non tarball files # are not handled yet - assert origin_visit['status'] == 'partial' + assert origin_visit["status"] == "partial" def test_loader_incremental(swh_config, requests_mock_datadir): """Ensure a second visit do not download artifact already downloaded by the previous visit. """ loader = NixGuixLoader(sources_url) load_status = loader.load() loader.load() - expected_snapshot_id = '0c5881c74283793ebe9a09a105a9381e41380383' - assert load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id - } + expected_snapshot_id = "0c5881c74283793ebe9a09a105a9381e41380383" + assert load_status == {"status": "eventful", "snapshot_id": expected_snapshot_id} expected_branches = { - 'evaluation': { - 'target': 'cc4e04c26672dd74e5fd0fecb78b435fb55368f7', - 'target_type': 'revision' + "evaluation": { + "target": "cc4e04c26672dd74e5fd0fecb78b435fb55368f7", + "target_type": "revision", }, - 'https://github.com/owner-1/repository-1/revision-1.tgz': { - 'target': '488ad4e7b8e2511258725063cf43a2b897c503b4', - 'target_type': 'revision' + "https://github.com/owner-1/repository-1/revision-1.tgz": { + "target": "488ad4e7b8e2511258725063cf43a2b897c503b4", + "target_type": "revision", }, } expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': expected_branches, + "id": expected_snapshot_id, + "branches": expected_branches, } check_snapshot(expected_snapshot, storage=loader.storage) urls = [ - m.url for m in requests_mock_datadir.request_history - if m.url == ('https://github.com/owner-1/repository-1/revision-1.tgz') + m.url + for m in requests_mock_datadir.request_history + if m.url == ("https://github.com/owner-1/repository-1/revision-1.tgz") ] # The artifact # 'https://github.com/owner-1/repository-1/revision-1.tgz' is only # visited one time assert len(urls) == 1 def test_loader_two_visits(swh_config, requests_mock_datadir_visits): """To ensure there is only one origin, but two visits, two revisions and two snapshots are created. The first visit creates a snapshot containing one tarball. The second visit creates a snapshot containing the same tarball and another tarball. """ loader = NixGuixLoader(sources_url) load_status = loader.load() - expected_snapshot_id = '0c5881c74283793ebe9a09a105a9381e41380383' - assert load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id - } + expected_snapshot_id = "0c5881c74283793ebe9a09a105a9381e41380383" + assert load_status == {"status": "eventful", "snapshot_id": expected_snapshot_id} expected_branches = { - 'evaluation': { - 'target': 'cc4e04c26672dd74e5fd0fecb78b435fb55368f7', - 'target_type': 'revision' + "evaluation": { + "target": "cc4e04c26672dd74e5fd0fecb78b435fb55368f7", + "target_type": "revision", + }, + "https://github.com/owner-1/repository-1/revision-1.tgz": { + "target": "488ad4e7b8e2511258725063cf43a2b897c503b4", + "target_type": "revision", }, - 'https://github.com/owner-1/repository-1/revision-1.tgz': { - 'target': '488ad4e7b8e2511258725063cf43a2b897c503b4', - 'target_type': 'revision' - } } expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': expected_branches, + "id": expected_snapshot_id, + "branches": expected_branches, } check_snapshot(expected_snapshot, storage=loader.storage) stats = get_stats(loader.storage) assert { - 'content': 1, - 'directory': 3, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 1, - 'skipped_content': 0, - 'snapshot': 1 + "content": 1, + "directory": 3, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 1, + "skipped_content": 0, + "snapshot": 1, } == stats loader = NixGuixLoader(sources_url) load_status = loader.load() - expected_snapshot_id = 'b0bfa75cbd0cc90aac3b9e95fb0f59c731176d97' - assert load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id - } + expected_snapshot_id = "b0bfa75cbd0cc90aac3b9e95fb0f59c731176d97" + assert load_status == {"status": "eventful", "snapshot_id": expected_snapshot_id} # This ensures visits are incremental. Indeed, if we request a # second time an url, because of the requests_mock_datadir_visits # fixture, the file has to end with `_visit1`. expected_branches = { - 'evaluation': { - 'target': '602140776b2ce6c9159bcf52ada73a297c063d5e', - 'target_type': 'revision' + "evaluation": { + "target": "602140776b2ce6c9159bcf52ada73a297c063d5e", + "target_type": "revision", }, - 'https://github.com/owner-1/repository-1/revision-1.tgz': { - 'target': '488ad4e7b8e2511258725063cf43a2b897c503b4', - 'target_type': 'revision' + "https://github.com/owner-1/repository-1/revision-1.tgz": { + "target": "488ad4e7b8e2511258725063cf43a2b897c503b4", + "target_type": "revision", + }, + "https://github.com/owner-2/repository-1/revision-1.tgz": { + "target": "85e0bad74e33e390aaeb74f139853ae3863ee544", + "target_type": "revision", }, - 'https://github.com/owner-2/repository-1/revision-1.tgz': { - 'target': '85e0bad74e33e390aaeb74f139853ae3863ee544', - 'target_type': 'revision' - } } expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': expected_branches, + "id": expected_snapshot_id, + "branches": expected_branches, } check_snapshot(expected_snapshot, storage=loader.storage) stats = get_stats(loader.storage) assert { - 'content': 2, - 'directory': 5, - 'origin': 1, - 'origin_visit': 2, - 'person': 1, - 'release': 0, - 'revision': 2, - 'skipped_content': 0, - 'snapshot': 2 + "content": 2, + "directory": 5, + "origin": 1, + "origin_visit": 2, + "person": 1, + "release": 0, + "revision": 2, + "skipped_content": 0, + "snapshot": 2, } == stats def test_resolve_revision_from(swh_config, requests_mock_datadir): loader = NixGuixLoader(sources_url) known_artifacts = { - 'id1': {'extrinsic': {'raw': { - 'url': "url1", - 'integrity': 'integrity1'}}}, - 'id2': {'extrinsic': {'raw': { - 'url': "url2", - 'integrity': 'integrity2'}}}, - } + "id1": {"extrinsic": {"raw": {"url": "url1", "integrity": "integrity1"}}}, + "id2": {"extrinsic": {"raw": {"url": "url2", "integrity": "integrity2"}}}, + } - metadata = {'url': 'url1', 'integrity': 'integrity1'} - assert loader.resolve_revision_from(known_artifacts, metadata) == 'id1' - metadata = {'url': 'url3', 'integrity': 'integrity3'} - assert loader.resolve_revision_from(known_artifacts, metadata) == None # noqa + metadata = {"url": "url1", "integrity": "integrity1"} + assert loader.resolve_revision_from(known_artifacts, metadata) == "id1" + metadata = {"url": "url3", "integrity": "integrity3"} + assert loader.resolve_revision_from(known_artifacts, metadata) == None # noqa def test_evaluation_branch(swh_config, requests_mock_datadir): loader = NixGuixLoader(sources_url) res = loader.load() - assert res['status'] == 'eventful' + assert res["status"] == "eventful" expected_branches = { - 'https://github.com/owner-1/repository-1/revision-1.tgz': { - 'target': '488ad4e7b8e2511258725063cf43a2b897c503b4', - 'target_type': 'revision', + "https://github.com/owner-1/repository-1/revision-1.tgz": { + "target": "488ad4e7b8e2511258725063cf43a2b897c503b4", + "target_type": "revision", }, - 'evaluation': { - 'target': 'cc4e04c26672dd74e5fd0fecb78b435fb55368f7', - 'target_type': 'revision', + "evaluation": { + "target": "cc4e04c26672dd74e5fd0fecb78b435fb55368f7", + "target_type": "revision", }, } expected_snapshot = { - 'id': '0c5881c74283793ebe9a09a105a9381e41380383', - 'branches': expected_branches, + "id": "0c5881c74283793ebe9a09a105a9381e41380383", + "branches": expected_branches, } check_snapshot(expected_snapshot, storage=loader.storage) def test_eoferror(swh_config, requests_mock_datadir): """Load a truncated archive which is invalid to make the uncompress function raising the exception EOFError. We then check if a snapshot is created, meaning this error is well managed. """ - sources = "https://nix-community.github.io/nixpkgs-swh/sources-EOFError.json" # noqa + sources = ( + "https://nix-community.github.io/nixpkgs-swh/sources-EOFError.json" # noqa + ) loader = NixGuixLoader(sources) loader.load() expected_branches = { - 'evaluation': { - 'target': 'cc4e04c26672dd74e5fd0fecb78b435fb55368f7', - 'target_type': 'revision', + "evaluation": { + "target": "cc4e04c26672dd74e5fd0fecb78b435fb55368f7", + "target_type": "revision", }, } expected_snapshot = { - 'id': '4257fa2350168c6bfec726a06452ea27a2c0cb33', - 'branches': expected_branches, + "id": "4257fa2350168c6bfec726a06452ea27a2c0cb33", + "branches": expected_branches, } check_snapshot(expected_snapshot, storage=loader.storage) diff --git a/swh/loader/package/nixguix/tests/test_tasks.py b/swh/loader/package/nixguix/tests/test_tasks.py index a329d72..df66239 100644 --- a/swh/loader/package/nixguix/tests/test_tasks.py +++ b/swh/loader/package/nixguix/tests/test_tasks.py @@ -1,27 +1,27 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def test_nixguix_loader(mocker, swh_app, celery_session_worker, swh_config): - mock_loader = mocker.patch( - 'swh.loader.package.nixguix.loader.NixGuixLoader.load') - mock_loader.return_value = {'status': 'eventful'} + mock_loader = mocker.patch("swh.loader.package.nixguix.loader.NixGuixLoader.load") + mock_loader.return_value = {"status": "eventful"} mock_retrieve_sources = mocker.patch( - 'swh.loader.package.nixguix.loader.retrieve_sources') + "swh.loader.package.nixguix.loader.retrieve_sources" + ) mock_retrieve_sources.return_value = { - 'version': 1, - 'sources': [], - 'revision': 'some-revision' + "version": 1, + "sources": [], + "revision": "some-revision", } res = swh_app.send_task( - 'swh.loader.package.nixguix.tasks.LoadNixguix', - kwargs=dict(url='some-url')) + "swh.loader.package.nixguix.tasks.LoadNixguix", kwargs=dict(url="some-url") + ) assert res res.wait() assert res.successful() - assert res.result == {'status': 'eventful'} + assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/npm/__init__.py b/swh/loader/package/npm/__init__.py index a35af88..296d23f 100644 --- a/swh/loader/package/npm/__init__.py +++ b/swh/loader/package/npm/__init__.py @@ -1,16 +1,17 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Mapping def register() -> Mapping[str, Any]: """Register the current worker module's definition""" from .loader import NpmLoader + return { - 'task_modules': [f'{__name__}.tasks'], - 'loader': NpmLoader, + "task_modules": [f"{__name__}.tasks"], + "loader": NpmLoader, } diff --git a/swh/loader/package/npm/loader.py b/swh/loader/package/npm/loader.py index 4a1cf88..a9b6d48 100644 --- a/swh/loader/package/npm/loader.py +++ b/swh/loader/package/npm/loader.py @@ -1,286 +1,287 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import os from codecs import BOM_UTF8 from typing import Any, Dict, Generator, Mapping, Sequence, Tuple, Optional import attr import chardet from urllib.parse import quote from swh.model.model import ( - Person, RevisionType, Revision, TimestampWithTimezone, Sha1Git, + Person, + RevisionType, + Revision, + TimestampWithTimezone, + Sha1Git, ) from swh.loader.package.loader import PackageLoader -from swh.loader.package.utils import ( - api_info, release_name -) +from swh.loader.package.utils import api_info, release_name logger = logging.getLogger(__name__) class NpmLoader(PackageLoader): """Load npm origin's artifact releases into swh archive. """ - visit_type = 'npm' + + visit_type = "npm" def __init__(self, url: str): """Constructor Args str: origin url (e.g. https://www.npmjs.com/package/) """ super().__init__(url=url) - package_name = url.split('https://www.npmjs.com/package/')[1] - safe_name = quote(package_name, safe='') - self.provider_url = f'https://replicate.npmjs.com/{safe_name}/' + package_name = url.split("https://www.npmjs.com/package/")[1] + safe_name = quote(package_name, safe="") + self.provider_url = f"https://replicate.npmjs.com/{safe_name}/" self._info: Dict[str, Any] = {} self._versions = None @property def info(self) -> Dict[str, Any]: """Return the project metadata information (fetched from npm registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: - return sorted(list(self.info['versions'].keys())) + return sorted(list(self.info["versions"].keys())) def get_default_version(self) -> str: - return self.info['dist-tags'].get('latest', '') + return self.info["dist-tags"].get("latest", "") - def get_package_info(self, version: str) -> Generator[ - Tuple[str, Mapping[str, Any]], None, None]: - meta = self.info['versions'][version] - url = meta['dist']['tarball'] + def get_package_info( + self, version: str + ) -> Generator[Tuple[str, Mapping[str, Any]], None, None]: + meta = self.info["versions"][version] + url = meta["dist"]["tarball"] p_info = { - 'url': url, - 'filename': os.path.basename(url), - 'raw': meta, + "url": url, + "filename": os.path.basename(url), + "raw": meta, } yield release_name(version), p_info def resolve_revision_from( - self, known_artifacts: Dict, artifact_metadata: Dict) \ - -> Optional[bytes]: + self, known_artifacts: Dict, artifact_metadata: Dict + ) -> Optional[bytes]: return artifact_to_revision_id(known_artifacts, artifact_metadata) def build_revision( - self, a_metadata: Dict, uncompressed_path: str, - directory: Sha1Git) -> Optional[Revision]: + self, a_metadata: Dict, uncompressed_path: str, directory: Sha1Git + ) -> Optional[Revision]: i_metadata = extract_intrinsic_metadata(uncompressed_path) if not i_metadata: return None # from intrinsic metadata author = extract_npm_package_author(i_metadata) - message = i_metadata['version'].encode('ascii') + message = i_metadata["version"].encode("ascii") # from extrinsic metadata # No date available in intrinsic metadata: retrieve it from the API # metadata, using the version number that the API claims this package # has. - extrinsic_version = a_metadata['version'] + extrinsic_version = a_metadata["version"] - if 'time' in self.info: - date = self.info['time'][extrinsic_version] - elif 'mtime' in a_metadata: - date = a_metadata['mtime'] + if "time" in self.info: + date = self.info["time"][extrinsic_version] + elif "mtime" in a_metadata: + date = a_metadata["mtime"] else: - artifact_name = os.path.basename(a_metadata['dist']['tarball']) + artifact_name = os.path.basename(a_metadata["dist"]["tarball"]) raise ValueError( - 'Origin %s: Cannot determine upload time for artifact %s.' % - (self.url, artifact_name) + "Origin %s: Cannot determine upload time for artifact %s." + % (self.url, artifact_name) ) date = TimestampWithTimezone.from_iso8601(date) # FIXME: this is to remain bug-compatible with earlier versions: - date = attr.evolve(date, timestamp=attr.evolve( - date.timestamp, microseconds=0)) + date = attr.evolve(date, timestamp=attr.evolve(date.timestamp, microseconds=0)) r = Revision( type=RevisionType.TAR, message=message, author=author, date=date, committer=author, committer_date=date, parents=[], directory=directory, synthetic=True, metadata={ - 'intrinsic': { - 'tool': 'package.json', - 'raw': i_metadata, - }, - 'extrinsic': { - 'provider': self.provider_url, - 'when': self.visit_date.isoformat(), - 'raw': a_metadata, + "intrinsic": {"tool": "package.json", "raw": i_metadata,}, + "extrinsic": { + "provider": self.provider_url, + "when": self.visit_date.isoformat(), + "raw": a_metadata, }, }, ) return r def artifact_to_revision_id( - known_artifacts: Dict, artifact_metadata: Dict) -> Optional[bytes]: + known_artifacts: Dict, artifact_metadata: Dict +) -> Optional[bytes]: """Given metadata artifact, solves the associated revision id. The following code allows to deal with 2 metadata formats: - old format sample:: { 'package_source': { 'sha1': '05181c12cd8c22035dd31155656826b85745da37', } } - new format sample:: { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, }], ... } """ - shasum = artifact_metadata['dist']['shasum'] + shasum = artifact_metadata["dist"]["shasum"] for rev_id, known_artifact in known_artifacts.items(): - known_original_artifact = known_artifact.get('original_artifact') + known_original_artifact = known_artifact.get("original_artifact") if not known_original_artifact: # previous loader-npm version kept original artifact elsewhere - known_original_artifact = known_artifact.get('package_source') + known_original_artifact = known_artifact.get("package_source") if not known_original_artifact: continue - original_hash = known_original_artifact['sha1'] + original_hash = known_original_artifact["sha1"] else: assert isinstance(known_original_artifact, list) - original_hash = known_original_artifact[0]['checksums']['sha1'] + original_hash = known_original_artifact[0]["checksums"]["sha1"] if shasum == original_hash: return rev_id return None def extract_npm_package_author(package_json) -> Person: """ Extract package author from a ``package.json`` file content and return it in swh format. Args: package_json (dict): Dict holding the content of parsed ``package.json`` file Returns: Person """ def _author_str(author_data): if type(author_data) is dict: - author_str = '' - if 'name' in author_data: - author_str += author_data['name'] - if 'email' in author_data: - author_str += ' <%s>' % author_data['email'] + author_str = "" + if "name" in author_data: + author_str += author_data["name"] + if "email" in author_data: + author_str += " <%s>" % author_data["email"] return author_str elif type(author_data) is list: - return _author_str(author_data[0]) if len(author_data) > 0 else '' + return _author_str(author_data[0]) if len(author_data) > 0 else "" else: return author_data - for author_key in ('author', 'authors'): + for author_key in ("author", "authors"): if author_key in package_json: author_str = _author_str(package_json[author_key]) return Person.from_fullname(author_str.encode()) - return Person(fullname=b'', name=None, email=None) + return Person(fullname=b"", name=None, email=None) def _lstrip_bom(s, bom=BOM_UTF8): if s.startswith(bom): - return s[len(bom):] + return s[len(bom) :] else: return s def load_json(json_bytes): """ Try to load JSON from bytes and return a dictionary. First try to decode from utf-8. If the decoding failed, try to detect the encoding and decode again with replace error handling. If JSON is malformed, an empty dictionary will be returned. Args: json_bytes (bytes): binary content of a JSON file Returns: dict: JSON data loaded in a dictionary """ json_data = {} try: - json_str = _lstrip_bom(json_bytes).decode('utf-8') + json_str = _lstrip_bom(json_bytes).decode("utf-8") except UnicodeDecodeError: - encoding = chardet.detect(json_bytes)['encoding'] + encoding = chardet.detect(json_bytes)["encoding"] if encoding: - json_str = json_bytes.decode(encoding, 'replace') + json_str = json_bytes.decode(encoding, "replace") try: json_data = json.loads(json_str) except json.decoder.JSONDecodeError: pass return json_data def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from npm. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) == 0: return {} project_dirname = lst[0] - package_json_path = os.path.join(dir_path, project_dirname, 'package.json') + package_json_path = os.path.join(dir_path, project_dirname, "package.json") if not os.path.exists(package_json_path): return {} - with open(package_json_path, 'rb') as package_json_file: + with open(package_json_path, "rb") as package_json_file: package_json_bytes = package_json_file.read() return load_json(package_json_bytes) diff --git a/swh/loader/package/npm/tasks.py b/swh/loader/package/npm/tasks.py index c50377a..d796a23 100644 --- a/swh/loader/package/npm/tasks.py +++ b/swh/loader/package/npm/tasks.py @@ -1,14 +1,14 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.npm.loader import NpmLoader -@shared_task(name=__name__ + '.LoadNpm') +@shared_task(name=__name__ + ".LoadNpm") def load_npm(*, url: str): """Load Npm package""" return NpmLoader(url).load() diff --git a/swh/loader/package/npm/tests/test_npm.py b/swh/loader/package/npm/tests/test_npm.py index cc2ae32..89adfbe 100644 --- a/swh/loader/package/npm/tests/test_npm.py +++ b/swh/loader/package/npm/tests/test_npm.py @@ -1,608 +1,598 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import os import pytest from swh.model.hashutil import hash_to_bytes from swh.model.model import Person from swh.loader.package.npm.loader import ( - NpmLoader, extract_npm_package_author, - artifact_to_revision_id + NpmLoader, + extract_npm_package_author, + artifact_to_revision_id, ) from swh.loader.package.tests.common import ( - check_snapshot, check_metadata_paths, get_stats + check_snapshot, + check_metadata_paths, + get_stats, ) def test_extract_npm_package_author(datadir): package_metadata_filepath = os.path.join( - datadir, 'https_replicate.npmjs.com', 'org_visit1') + datadir, "https_replicate.npmjs.com", "org_visit1" + ) with open(package_metadata_filepath) as json_file: package_metadata = json.load(json_file) - extract_npm_package_author(package_metadata['versions']['0.0.2']) == \ - Person( - fullname=b'mooz ', - name=b'mooz', - email=b'stillpedant@gmail.com' - ) + extract_npm_package_author(package_metadata["versions"]["0.0.2"]) == Person( + fullname=b"mooz ", + name=b"mooz", + email=b"stillpedant@gmail.com", + ) - assert ( - extract_npm_package_author(package_metadata['versions']['0.0.3']) == - Person( - fullname=b'Masafumi Oyamada ', - name=b'Masafumi Oyamada', - email=b'stillpedant@gmail.com' - ) + assert extract_npm_package_author(package_metadata["versions"]["0.0.3"]) == Person( + fullname=b"Masafumi Oyamada ", + name=b"Masafumi Oyamada", + email=b"stillpedant@gmail.com", ) - package_json = json.loads(''' + package_json = json.loads( + """ { "name": "highlightjs-line-numbers.js", "version": "2.7.0", "description": "Highlight.js line numbers plugin.", "main": "src/highlightjs-line-numbers.js", "dependencies": {}, "devDependencies": { "gulp": "^4.0.0", "gulp-rename": "^1.4.0", "gulp-replace": "^0.6.1", "gulp-uglify": "^1.2.0" }, "repository": { "type": "git", "url": "https://github.com/wcoder/highlightjs-line-numbers.js.git" }, "author": "Yauheni Pakala ", "license": "MIT", "bugs": { "url": "https://github.com/wcoder/highlightjs-line-numbers.js/issues" }, "homepage": "http://wcoder.github.io/highlightjs-line-numbers.js/" - }''') # noqa + }""" + ) # noqa - assert extract_npm_package_author(package_json) == \ - Person( - fullname=b'Yauheni Pakala ', - name=b'Yauheni Pakala', - email=b'evgeniy.pakalo@gmail.com' - ) + assert extract_npm_package_author(package_json) == Person( + fullname=b"Yauheni Pakala ", + name=b"Yauheni Pakala", + email=b"evgeniy.pakalo@gmail.com", + ) - package_json = json.loads(''' + package_json = json.loads( + """ { "name": "3-way-diff", "version": "0.0.1", "description": "3-way diffing of JavaScript objects", "main": "index.js", "authors": [ { "name": "Shawn Walsh", "url": "https://github.com/shawnpwalsh" }, { "name": "Markham F Rollins IV", "url": "https://github.com/mrollinsiv" } ], "keywords": [ "3-way diff", "3 way diff", "three-way diff", "three way diff" ], "devDependencies": { "babel-core": "^6.20.0", "babel-preset-es2015": "^6.18.0", "mocha": "^3.0.2" }, "dependencies": { "lodash": "^4.15.0" } - }''') + }""" + ) - assert extract_npm_package_author(package_json) == \ - Person( - fullname=b'Shawn Walsh', - name=b'Shawn Walsh', - email=None - ) + assert extract_npm_package_author(package_json) == Person( + fullname=b"Shawn Walsh", name=b"Shawn Walsh", email=None + ) - package_json = json.loads(''' + package_json = json.loads( + """ { "name": "yfe-ynpm", "version": "1.0.0", "homepage": "http://gitlab.ywwl.com/yfe/yfe-ynpm", "repository": { "type": "git", "url": "git@gitlab.ywwl.com:yfe/yfe-ynpm.git" }, "author": [ "fengmk2 (https://fengmk2.com)", "xufuzi (https://7993.org)" ], "license": "MIT" - }''') + }""" + ) - assert extract_npm_package_author(package_json) == \ - Person( - fullname=b'fengmk2 (https://fengmk2.com)', - name=b'fengmk2', - email=b'fengmk2@gmail.com', - ) + assert extract_npm_package_author(package_json) == Person( + fullname=b"fengmk2 (https://fengmk2.com)", + name=b"fengmk2", + email=b"fengmk2@gmail.com", + ) - package_json = json.loads(''' + package_json = json.loads( + """ { "name": "umi-plugin-whale", "version": "0.0.8", "description": "Internal contract component", "authors": { "name": "xiaohuoni", "email": "448627663@qq.com" }, "repository": "alitajs/whale", "devDependencies": { "np": "^3.0.4", "umi-tools": "*" }, "license": "MIT" - }''') + }""" + ) - assert extract_npm_package_author(package_json) == \ - Person( - fullname=b'xiaohuoni <448627663@qq.com>', - name=b'xiaohuoni', - email=b'448627663@qq.com' - ) + assert extract_npm_package_author(package_json) == Person( + fullname=b"xiaohuoni <448627663@qq.com>", + name=b"xiaohuoni", + email=b"448627663@qq.com", + ) def normalize_hashes(hashes): if isinstance(hashes, str): return hash_to_bytes(hashes) if isinstance(hashes, list): return [hash_to_bytes(x) for x in hashes] return {hash_to_bytes(k): hash_to_bytes(v) for k, v in hashes.items()} -_expected_new_contents_first_visit = normalize_hashes([ - '4ce3058e16ab3d7e077f65aabf855c34895bf17c', - '858c3ceee84c8311adc808f8cdb30d233ddc9d18', - '0fa33b4f5a4e0496da6843a38ff1af8b61541996', - '85a410f8ef8eb8920f2c384a9555566ad4a2e21b', - '9163ac8025923d5a45aaac482262893955c9b37b', - '692cf623b8dd2c5df2c2998fd95ae4ec99882fb4', - '18c03aac6d3e910efb20039c15d70ab5e0297101', - '41265c42446aac17ca769e67d1704f99e5a1394d', - '783ff33f5882813dca9239452c4a7cadd4dba778', - 'b029cfb85107aee4590c2434a3329bfcf36f8fa1', - '112d1900b4c2e3e9351050d1b542c9744f9793f3', - '5439bbc4bd9a996f1a38244e6892b71850bc98fd', - 'd83097a2f994b503185adf4e719d154123150159', - 'd0939b4898e83090ee55fd9d8a60e312cfadfbaf', - 'b3523a26f7147e4af40d9d462adaae6d49eda13e', - 'cd065fb435d6fb204a8871bcd623d0d0e673088c', - '2854a40855ad839a54f4b08f5cff0cf52fca4399', - 'b8a53bbaac34ebb8c6169d11a4b9f13b05c583fe', - '0f73d56e1cf480bded8a1ecf20ec6fc53c574713', - '0d9882b2dfafdce31f4e77fe307d41a44a74cefe', - '585fc5caab9ead178a327d3660d35851db713df1', - 'e8cd41a48d79101977e3036a87aeb1aac730686f', - '5414efaef33cceb9f3c9eb5c4cc1682cd62d14f7', - '9c3cc2763bf9e9e37067d3607302c4776502df98', - '3649a68410e354c83cd4a38b66bd314de4c8f5c9', - 'e96ed0c091de1ebdf587104eaf63400d1974a1fe', - '078ca03d2f99e4e6eab16f7b75fbb7afb699c86c', - '38de737da99514de6559ff163c988198bc91367a', -]) - -_expected_new_directories_first_visit = normalize_hashes([ - '3370d20d6f96dc1c9e50f083e2134881db110f4f', - '42753c0c2ab00c4501b552ac4671c68f3cf5aece', - 'd7895533ef5edbcffdea3f057d9fef3a1ef845ce', - '80579be563e2ef3e385226fe7a3f079b377f142c', - '3b0ddc6a9e58b4b53c222da4e27b280b6cda591c', - 'bcad03ce58ac136f26f000990fc9064e559fe1c0', - '5fc7e82a1bc72e074665c6078c6d3fad2f13d7ca', - 'e3cd26beba9b1e02f6762ef54bd9ac80cc5f25fd', - '584b5b4b6cf7f038095e820b99386a9c232de931', - '184c8d6d0d242f2b1792ef9d3bf396a5434b7f7a', - 'bb5f4ee143c970367eb409f2e4c1104898048b9d', - '1b95491047add1103db0dfdfa84a9735dcb11e88', - 'a00c6de13471a2d66e64aca140ddb21ef5521e62', - '5ce6c1cd5cda2d546db513aaad8c72a44c7771e2', - 'c337091e349b6ac10d38a49cdf8c2401ef9bb0f2', - '202fafcd7c0f8230e89d5496ad7f44ab12b807bf', - '775cc516543be86c15c1dc172f49c0d4e6e78235', - 'ff3d1ead85a14f891e8b3fa3a89de39db1b8de2e', -]) - -_expected_new_revisions_first_visit = normalize_hashes({ - 'd8a1c7474d2956ac598a19f0f27d52f7015f117e': - '42753c0c2ab00c4501b552ac4671c68f3cf5aece', - '5f9eb78af37ffd12949f235e86fac04898f9f72a': - '3370d20d6f96dc1c9e50f083e2134881db110f4f', - 'ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a': - 'd7895533ef5edbcffdea3f057d9fef3a1ef845ce'} +_expected_new_contents_first_visit = normalize_hashes( + [ + "4ce3058e16ab3d7e077f65aabf855c34895bf17c", + "858c3ceee84c8311adc808f8cdb30d233ddc9d18", + "0fa33b4f5a4e0496da6843a38ff1af8b61541996", + "85a410f8ef8eb8920f2c384a9555566ad4a2e21b", + "9163ac8025923d5a45aaac482262893955c9b37b", + "692cf623b8dd2c5df2c2998fd95ae4ec99882fb4", + "18c03aac6d3e910efb20039c15d70ab5e0297101", + "41265c42446aac17ca769e67d1704f99e5a1394d", + "783ff33f5882813dca9239452c4a7cadd4dba778", + "b029cfb85107aee4590c2434a3329bfcf36f8fa1", + "112d1900b4c2e3e9351050d1b542c9744f9793f3", + "5439bbc4bd9a996f1a38244e6892b71850bc98fd", + "d83097a2f994b503185adf4e719d154123150159", + "d0939b4898e83090ee55fd9d8a60e312cfadfbaf", + "b3523a26f7147e4af40d9d462adaae6d49eda13e", + "cd065fb435d6fb204a8871bcd623d0d0e673088c", + "2854a40855ad839a54f4b08f5cff0cf52fca4399", + "b8a53bbaac34ebb8c6169d11a4b9f13b05c583fe", + "0f73d56e1cf480bded8a1ecf20ec6fc53c574713", + "0d9882b2dfafdce31f4e77fe307d41a44a74cefe", + "585fc5caab9ead178a327d3660d35851db713df1", + "e8cd41a48d79101977e3036a87aeb1aac730686f", + "5414efaef33cceb9f3c9eb5c4cc1682cd62d14f7", + "9c3cc2763bf9e9e37067d3607302c4776502df98", + "3649a68410e354c83cd4a38b66bd314de4c8f5c9", + "e96ed0c091de1ebdf587104eaf63400d1974a1fe", + "078ca03d2f99e4e6eab16f7b75fbb7afb699c86c", + "38de737da99514de6559ff163c988198bc91367a", + ] +) + +_expected_new_directories_first_visit = normalize_hashes( + [ + "3370d20d6f96dc1c9e50f083e2134881db110f4f", + "42753c0c2ab00c4501b552ac4671c68f3cf5aece", + "d7895533ef5edbcffdea3f057d9fef3a1ef845ce", + "80579be563e2ef3e385226fe7a3f079b377f142c", + "3b0ddc6a9e58b4b53c222da4e27b280b6cda591c", + "bcad03ce58ac136f26f000990fc9064e559fe1c0", + "5fc7e82a1bc72e074665c6078c6d3fad2f13d7ca", + "e3cd26beba9b1e02f6762ef54bd9ac80cc5f25fd", + "584b5b4b6cf7f038095e820b99386a9c232de931", + "184c8d6d0d242f2b1792ef9d3bf396a5434b7f7a", + "bb5f4ee143c970367eb409f2e4c1104898048b9d", + "1b95491047add1103db0dfdfa84a9735dcb11e88", + "a00c6de13471a2d66e64aca140ddb21ef5521e62", + "5ce6c1cd5cda2d546db513aaad8c72a44c7771e2", + "c337091e349b6ac10d38a49cdf8c2401ef9bb0f2", + "202fafcd7c0f8230e89d5496ad7f44ab12b807bf", + "775cc516543be86c15c1dc172f49c0d4e6e78235", + "ff3d1ead85a14f891e8b3fa3a89de39db1b8de2e", + ] +) + +_expected_new_revisions_first_visit = normalize_hashes( + { + "d8a1c7474d2956ac598a19f0f27d52f7015f117e": ( + "42753c0c2ab00c4501b552ac4671c68f3cf5aece" + ), + "5f9eb78af37ffd12949f235e86fac04898f9f72a": ( + "3370d20d6f96dc1c9e50f083e2134881db110f4f" + ), + "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a": ( + "d7895533ef5edbcffdea3f057d9fef3a1ef845ce" + ), + } ) def package_url(package): - return 'https://www.npmjs.com/package/%s' % package + return "https://www.npmjs.com/package/%s" % package def package_metadata_url(package): - return 'https://replicate.npmjs.com/%s/' % package + return "https://replicate.npmjs.com/%s/" % package def test_revision_metadata_structure(swh_config, requests_mock_datadir): - package = 'org' + package = "org" loader = NpmLoader(package_url(package)) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' - assert actual_load_status['snapshot_id'] is not None + assert actual_load_status["status"] == "eventful" + assert actual_load_status["snapshot_id"] is not None - expected_revision_id = hash_to_bytes( - 'd8a1c7474d2956ac598a19f0f27d52f7015f117e') + expected_revision_id = hash_to_bytes("d8a1c7474d2956ac598a19f0f27d52f7015f117e") revision = list(loader.storage.revision_get([expected_revision_id]))[0] assert revision is not None - check_metadata_paths(revision['metadata'], paths=[ - ('intrinsic.tool', str), - ('intrinsic.raw', dict), - ('extrinsic.provider', str), - ('extrinsic.when', str), - ('extrinsic.raw', dict), - ('original_artifact', list), - ]) + check_metadata_paths( + revision["metadata"], + paths=[ + ("intrinsic.tool", str), + ("intrinsic.raw", dict), + ("extrinsic.provider", str), + ("extrinsic.when", str), + ("extrinsic.raw", dict), + ("original_artifact", list), + ], + ) - for original_artifact in revision['metadata']['original_artifact']: - check_metadata_paths(original_artifact, paths=[ - ('filename', str), - ('length', int), - ('checksums', dict), - ]) + for original_artifact in revision["metadata"]["original_artifact"]: + check_metadata_paths( + original_artifact, + paths=[("filename", str), ("length", int), ("checksums", dict),], + ) def test_npm_loader_first_visit(swh_config, requests_mock_datadir): - package = 'org' + package = "org" loader = NpmLoader(package_url(package)) actual_load_status = loader.load() - expected_snapshot_id = 'd0587e1195aed5a8800411a008f2f2d627f18e2d' + expected_snapshot_id = "d0587e1195aed5a8800411a008f2f2d627f18e2d" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id + "status": "eventful", + "snapshot_id": expected_snapshot_id, } stats = get_stats(loader.storage) assert { - 'content': len(_expected_new_contents_first_visit), - 'directory': len(_expected_new_directories_first_visit), - 'origin': 1, - 'origin_visit': 1, - 'person': 2, - 'release': 0, - 'revision': len(_expected_new_revisions_first_visit), - 'skipped_content': 0, - 'snapshot': 1, + "content": len(_expected_new_contents_first_visit), + "directory": len(_expected_new_directories_first_visit), + "origin": 1, + "origin_visit": 1, + "person": 2, + "release": 0, + "revision": len(_expected_new_revisions_first_visit), + "skipped_content": 0, + "snapshot": 1, } == stats - assert len(list(loader.storage.content_get( - _expected_new_contents_first_visit))) == len( - _expected_new_contents_first_visit) + assert len( + list(loader.storage.content_get(_expected_new_contents_first_visit)) + ) == len(_expected_new_contents_first_visit) - assert list(loader.storage.directory_missing( - _expected_new_directories_first_visit)) == [] + assert ( + list(loader.storage.directory_missing(_expected_new_directories_first_visit)) + == [] + ) - assert list(loader.storage.revision_missing( - _expected_new_revisions_first_visit)) == [] + assert ( + list(loader.storage.revision_missing(_expected_new_revisions_first_visit)) == [] + ) expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': { - 'HEAD': { - 'target': 'releases/0.0.4', - 'target_type': 'alias' + "id": expected_snapshot_id, + "branches": { + "HEAD": {"target": "releases/0.0.4", "target_type": "alias"}, + "releases/0.0.2": { + "target": "d8a1c7474d2956ac598a19f0f27d52f7015f117e", + "target_type": "revision", }, - 'releases/0.0.2': { - 'target': 'd8a1c7474d2956ac598a19f0f27d52f7015f117e', - 'target_type': 'revision' + "releases/0.0.3": { + "target": "5f9eb78af37ffd12949f235e86fac04898f9f72a", + "target_type": "revision", }, - 'releases/0.0.3': { - 'target': '5f9eb78af37ffd12949f235e86fac04898f9f72a', - 'target_type': 'revision' + "releases/0.0.4": { + "target": "ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a", + "target_type": "revision", }, - 'releases/0.0.4': { - 'target': 'ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a', - 'target_type': 'revision' - } - } + }, } check_snapshot(expected_snapshot, loader.storage) -def test_npm_loader_incremental_visit( - swh_config, requests_mock_datadir_visits): - package = 'org' +def test_npm_loader_incremental_visit(swh_config, requests_mock_datadir_visits): + package = "org" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' - assert actual_load_status['status'] is not None + assert actual_load_status["status"] == "eventful" + assert actual_load_status["status"] is not None origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'npm' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "npm" stats = get_stats(loader.storage) assert { - 'content': len(_expected_new_contents_first_visit), - 'directory': len(_expected_new_directories_first_visit), - 'origin': 1, - 'origin_visit': 1, - 'person': 2, - 'release': 0, - 'revision': len(_expected_new_revisions_first_visit), - 'skipped_content': 0, - 'snapshot': 1, + "content": len(_expected_new_contents_first_visit), + "directory": len(_expected_new_directories_first_visit), + "origin": 1, + "origin_visit": 1, + "person": 2, + "release": 0, + "revision": len(_expected_new_revisions_first_visit), + "skipped_content": 0, + "snapshot": 1, } == stats loader._info = None # reset loader internal state actual_load_status2 = loader.load() - assert actual_load_status2['status'] == 'eventful' - snap_id2 = actual_load_status2['snapshot_id'] + assert actual_load_status2["status"] == "eventful" + snap_id2 = actual_load_status2["snapshot_id"] assert snap_id2 is not None - assert snap_id2 != actual_load_status['snapshot_id'] + assert snap_id2 != actual_load_status["snapshot_id"] origin_visit2 = loader.storage.origin_visit_get_latest(url) - assert origin_visit2['status'] == 'full' - assert origin_visit2['type'] == 'npm' + assert origin_visit2["status"] == "full" + assert origin_visit2["type"] == "npm" stats = get_stats(loader.storage) assert { # 3 new releases artifacts - 'content': len(_expected_new_contents_first_visit) + 14, - 'directory': len(_expected_new_directories_first_visit) + 15, - 'origin': 1, - 'origin_visit': 2, - 'person': 2, - 'release': 0, - 'revision': len(_expected_new_revisions_first_visit) + 3, - 'skipped_content': 0, - 'snapshot': 2, + "content": len(_expected_new_contents_first_visit) + 14, + "directory": len(_expected_new_directories_first_visit) + 15, + "origin": 1, + "origin_visit": 2, + "person": 2, + "release": 0, + "revision": len(_expected_new_revisions_first_visit) + 3, + "skipped_content": 0, + "snapshot": 2, } == stats urls = [ - m.url for m in requests_mock_datadir_visits.request_history - if m.url.startswith('https://registry.npmjs.org') + m.url + for m in requests_mock_datadir_visits.request_history + if m.url.startswith("https://registry.npmjs.org") ] assert len(urls) == len(set(urls)) # we visited each artifact once across -@pytest.mark.usefixtures('requests_mock_datadir') +@pytest.mark.usefixtures("requests_mock_datadir") def test_npm_loader_version_divergence(swh_config): - package = '@aller_shared' + package = "@aller_shared" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' - assert actual_load_status['status'] is not None + assert actual_load_status["status"] == "eventful" + assert actual_load_status["status"] is not None origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'npm' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "npm" stats = get_stats(loader.storage) assert { # 1 new releases artifacts - 'content': 534, - 'directory': 153, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 2, - 'skipped_content': 0, - 'snapshot': 1, + "content": 534, + "directory": 153, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 2, + "skipped_content": 0, + "snapshot": 1, } == stats expected_snapshot = { - 'id': 'b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92', - 'branches': { - 'HEAD': { - 'target_type': 'alias', - 'target': 'releases/0.1.0' - }, - 'releases/0.1.0': { - 'target_type': 'revision', - 'target': '845673bfe8cbd31b1eaf757745a964137e6f9116', + "id": "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92", + "branches": { + "HEAD": {"target_type": "alias", "target": "releases/0.1.0"}, + "releases/0.1.0": { + "target_type": "revision", + "target": "845673bfe8cbd31b1eaf757745a964137e6f9116", }, - 'releases/0.1.1-alpha.14': { - 'target_type': 'revision', - 'target': '05181c12cd8c22035dd31155656826b85745da37', + "releases/0.1.1-alpha.14": { + "target_type": "revision", + "target": "05181c12cd8c22035dd31155656826b85745da37", }, }, } check_snapshot(expected_snapshot, loader.storage) def test_npm_artifact_to_revision_id_none(): """Current loader version should stop soon if nothing can be found """ artifact_metadata = { - 'dist': { - 'shasum': '05181c12cd8c22035dd31155656826b85745da37', - }, + "dist": {"shasum": "05181c12cd8c22035dd31155656826b85745da37",}, } known_artifacts = { - 'b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92': {}, + "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92": {}, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None def test_npm_artifact_to_revision_id_old_loader_version(): """Current loader version should solve old metadata scheme """ artifact_metadata = { - 'dist': { - 'shasum': '05181c12cd8c22035dd31155656826b85745da37', - } + "dist": {"shasum": "05181c12cd8c22035dd31155656826b85745da37",} } known_artifacts = { - hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92'): { - 'package_source': { - 'sha1': "something-wrong" - } + hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { + "package_source": {"sha1": "something-wrong"} + }, + hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { + "package_source": {"sha1": "05181c12cd8c22035dd31155656826b85745da37",} }, - hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116'): { - 'package_source': { - 'sha1': '05181c12cd8c22035dd31155656826b85745da37', - } - } - } - assert artifact_to_revision_id(known_artifacts, artifact_metadata) \ - == hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116') + assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( + "845673bfe8cbd31b1eaf757745a964137e6f9116" + ) def test_npm_artifact_to_revision_id_current_loader_version(): """Current loader version should be able to solve current metadata scheme """ artifact_metadata = { - 'dist': { - 'shasum': '05181c12cd8c22035dd31155656826b85745da37', - } + "dist": {"shasum": "05181c12cd8c22035dd31155656826b85745da37",} } known_artifacts = { - hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92'): { - 'original_artifact': [{ - 'checksums': { - 'sha1': "05181c12cd8c22035dd31155656826b85745da37" - }, - }], + hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { + "original_artifact": [ + {"checksums": {"sha1": "05181c12cd8c22035dd31155656826b85745da37"},} + ], }, - hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116'): { - 'original_artifact': [{ - 'checksums': { - 'sha1': 'something-wrong' - }, - }], + hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { + "original_artifact": [{"checksums": {"sha1": "something-wrong"},}], }, } - assert artifact_to_revision_id(known_artifacts, artifact_metadata) \ - == hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92') + assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( + "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92" + ) -def test_npm_artifact_with_no_intrinsic_metadata( - swh_config, requests_mock_datadir): +def test_npm_artifact_with_no_intrinsic_metadata(swh_config, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ - package = 'nativescript-telerik-analytics' + package = "nativescript-telerik-analytics" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' + assert actual_load_status["status"] == "eventful" # no branch as one artifact without any intrinsic metadata expected_snapshot = { - 'id': '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e', - 'branches': {}, + "id": "1a8893e6a86f444e8be8e7bda6cb34fb1735a00e", + "branches": {}, } check_snapshot(expected_snapshot, loader.storage) origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'npm' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "npm" def test_npm_artifact_with_no_upload_time(swh_config, requests_mock_datadir): """With no time upload, artifact is skipped """ - package = 'jammit-no-time' + package = "jammit-no-time" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() - assert actual_load_status['status'] == 'uneventful' + assert actual_load_status["status"] == "uneventful" # no branch as one artifact without any intrinsic metadata expected_snapshot = { - 'id': '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e', - 'branches': {}, + "id": "1a8893e6a86f444e8be8e7bda6cb34fb1735a00e", + "branches": {}, } check_snapshot(expected_snapshot, loader.storage) origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'partial' - assert origin_visit['type'] == 'npm' + assert origin_visit["status"] == "partial" + assert origin_visit["type"] == "npm" def test_npm_artifact_use_mtime_if_no_time(swh_config, requests_mock_datadir): """With no time upload, artifact is skipped """ - package = 'jammit-express' + package = "jammit-express" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' + assert actual_load_status["status"] == "eventful" # artifact is used expected_snapshot = { - 'id': 'd6e08e19159f77983242877c373c75222d5ae9dd', - 'branches': { - 'HEAD': { - 'target_type': 'alias', - 'target': 'releases/0.0.1' + "id": "d6e08e19159f77983242877c373c75222d5ae9dd", + "branches": { + "HEAD": {"target_type": "alias", "target": "releases/0.0.1"}, + "releases/0.0.1": { + "target_type": "revision", + "target": "9e4dd2b40d1b46b70917c0949aa2195c823a648e", }, - 'releases/0.0.1': { - 'target_type': 'revision', - 'target': '9e4dd2b40d1b46b70917c0949aa2195c823a648e', - } - } + }, } check_snapshot(expected_snapshot, loader.storage) origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'npm' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "npm" def test_npm_no_artifact(swh_config, requests_mock_datadir): """If no artifacts at all is found for origin, the visit fails completely """ - package = 'catify' + package = "catify" url = package_url(package) loader = NpmLoader(url) actual_load_status = loader.load() assert actual_load_status == { - 'status': 'failed', + "status": "failed", } origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'partial' - assert origin_visit['type'] == 'npm' + assert origin_visit["status"] == "partial" + assert origin_visit["type"] == "npm" diff --git a/swh/loader/package/npm/tests/test_tasks.py b/swh/loader/package/npm/tests/test_tasks.py index 21687ef..9501907 100644 --- a/swh/loader/package/npm/tests/test_tasks.py +++ b/swh/loader/package/npm/tests/test_tasks.py @@ -1,18 +1,19 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def test_npm_loader(mocker, swh_app, celery_session_worker, swh_config): - mock_loader = mocker.patch('swh.loader.package.npm.loader.NpmLoader.load') - mock_loader.return_value = {'status': 'eventful'} + mock_loader = mocker.patch("swh.loader.package.npm.loader.NpmLoader.load") + mock_loader.return_value = {"status": "eventful"} res = swh_app.send_task( - 'swh.loader.package.npm.tasks.LoadNpm', - kwargs=dict(url='https://www.npmjs.com/package/some-package')) + "swh.loader.package.npm.tasks.LoadNpm", + kwargs=dict(url="https://www.npmjs.com/package/some-package"), + ) assert res res.wait() assert res.successful() - assert res.result == {'status': 'eventful'} + assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/pypi/__init__.py b/swh/loader/package/pypi/__init__.py index 63f8fae..880d306 100644 --- a/swh/loader/package/pypi/__init__.py +++ b/swh/loader/package/pypi/__init__.py @@ -1,16 +1,17 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Mapping def register() -> Mapping[str, Any]: """Register the current worker module's definition""" from .loader import PyPILoader + return { - 'task_modules': [f'{__name__}.tasks'], - 'loader': PyPILoader, + "task_modules": [f"{__name__}.tasks"], + "loader": PyPILoader, } diff --git a/swh/loader/package/pypi/loader.py b/swh/loader/package/pypi/loader.py index f7fe35a..2a2e0d3 100644 --- a/swh/loader/package/pypi/loader.py +++ b/swh/loader/package/pypi/loader.py @@ -1,250 +1,250 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import logging from typing import Any, Dict, Generator, Mapping, Optional, Sequence, Tuple from urllib.parse import urlparse from pkginfo import UnpackedSDist from swh.model.model import ( - Person, Sha1Git, TimestampWithTimezone, Revision, RevisionType + Person, + Sha1Git, + TimestampWithTimezone, + Revision, + RevisionType, ) from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import api_info, release_name, EMPTY_AUTHOR logger = logging.getLogger(__name__) class PyPILoader(PackageLoader): """Load pypi origin's artifact releases into swh archive. """ - visit_type = 'pypi' + + visit_type = "pypi" def __init__(self, url): super().__init__(url=url) self._info = None self.provider_url = pypi_api_url(self.url) @property def info(self) -> Dict: """Return the project metadata information (fetched from pypi registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: - return self.info['releases'].keys() + return self.info["releases"].keys() def get_default_version(self) -> str: - return self.info['info']['version'] + return self.info["info"]["version"] - def get_package_info(self, version: str) -> Generator[ - Tuple[str, Mapping[str, Any]], None, None]: + def get_package_info( + self, version: str + ) -> Generator[Tuple[str, Mapping[str, Any]], None, None]: res = [] - for meta in self.info['releases'][version]: - if meta['packagetype'] != 'sdist': + for meta in self.info["releases"][version]: + if meta["packagetype"] != "sdist": continue - filename = meta['filename'] + filename = meta["filename"] p_info = { - 'url': meta['url'], - 'filename': filename, - 'raw': meta, + "url": meta["url"], + "filename": filename, + "raw": meta, } res.append((version, p_info)) if len(res) == 1: version, p_info = res[0] yield release_name(version), p_info else: for version, p_info in res: - yield release_name(version, p_info['filename']), p_info + yield release_name(version, p_info["filename"]), p_info def resolve_revision_from( - self, known_artifacts: Dict, artifact_metadata: Dict) \ - -> Optional[bytes]: + self, known_artifacts: Dict, artifact_metadata: Dict + ) -> Optional[bytes]: return artifact_to_revision_id(known_artifacts, artifact_metadata) def build_revision( - self, a_metadata: Dict, uncompressed_path: str, - directory: Sha1Git) -> Optional[Revision]: + self, a_metadata: Dict, uncompressed_path: str, directory: Sha1Git + ) -> Optional[Revision]: i_metadata = extract_intrinsic_metadata(uncompressed_path) if not i_metadata: return None # from intrinsic metadata - name = i_metadata['version'] + name = i_metadata["version"] _author = author(i_metadata) # from extrinsic metadata - message = a_metadata.get('comment_text', '') - message = '%s: %s' % (name, message) if message else name - date = TimestampWithTimezone.from_iso8601(a_metadata['upload_time']) + message = a_metadata.get("comment_text", "") + message = "%s: %s" % (name, message) if message else name + date = TimestampWithTimezone.from_iso8601(a_metadata["upload_time"]) return Revision( type=RevisionType.TAR, - message=message.encode('utf-8'), + message=message.encode("utf-8"), author=_author, date=date, committer=_author, committer_date=date, parents=[], directory=directory, synthetic=True, metadata={ - 'intrinsic': { - 'tool': 'PKG-INFO', - 'raw': i_metadata, - }, - 'extrinsic': { - 'provider': self.provider_url, - 'when': self.visit_date.isoformat(), - 'raw': a_metadata, + "intrinsic": {"tool": "PKG-INFO", "raw": i_metadata,}, + "extrinsic": { + "provider": self.provider_url, + "when": self.visit_date.isoformat(), + "raw": a_metadata, }, - } + }, ) def artifact_to_revision_id( - known_artifacts: Dict, artifact_metadata: Dict) -> Optional[bytes]: + known_artifacts: Dict, artifact_metadata: Dict +) -> Optional[bytes]: """Given metadata artifact, solves the associated revision id. The following code allows to deal with 2 metadata formats (column metadata in 'revision') - old format sample:: { 'original_artifact': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, ... } - new format sample:: { 'original_artifact': [{ 'checksums': { 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa ... }, }], ... } """ - sha256 = artifact_metadata['digests']['sha256'] + sha256 = artifact_metadata["digests"]["sha256"] for rev_id, known_artifact in known_artifacts.items(): - original_artifact = known_artifact['original_artifact'] + original_artifact = known_artifact["original_artifact"] if isinstance(original_artifact, dict): # previous loader-pypi version stored metadata as dict - original_sha256 = original_artifact['sha256'] + original_sha256 = original_artifact["sha256"] if sha256 == original_sha256: return rev_id continue # new pypi loader actually store metadata dict differently... assert isinstance(original_artifact, list) # current loader-pypi stores metadata as list of dict - for original_artifact in known_artifact['original_artifact']: - if sha256 == original_artifact['checksums']['sha256']: + for original_artifact in known_artifact["original_artifact"]: + if sha256 == original_artifact["checksums"]["sha256"]: return rev_id return None def pypi_api_url(url: str) -> str: """Compute api url from a project url Args: url (str): PyPI instance's url (e.g: https://pypi.org/project/requests) This deals with correctly transforming the project's api url (e.g https://pypi.org/pypi/requests/json) Returns: api url """ p_url = urlparse(url) - project_name = p_url.path.rstrip('/').split('/')[-1] - url = '%s://%s/pypi/%s/json' % (p_url.scheme, p_url.netloc, project_name) + project_name = p_url.path.rstrip("/").split("/")[-1] + url = "%s://%s/pypi/%s/json" % (p_url.scheme, p_url.netloc, project_name) return url def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] - pkginfo_path = os.path.join(dir_path, project_dirname, 'PKG-INFO') + pkginfo_path = os.path.join(dir_path, project_dirname, "PKG-INFO") if not os.path.exists(pkginfo_path): return {} pkginfo = UnpackedSDist(pkginfo_path) raw = pkginfo.__dict__ - raw.pop('filename') # this gets added with the ondisk location + raw.pop("filename") # this gets added with the ondisk location return raw def author(data: Dict) -> Person: """Given a dict of project/release artifact information (coming from PyPI), returns an author subset. Args: data (dict): Representing either artifact information or release information. Returns: swh-model dict representing a person. """ - name = data.get('author') - email = data.get('author_email') + name = data.get("author") + email = data.get("author_email") fullname = None # type: Optional[str] if email: - fullname = '%s <%s>' % (name, email) + fullname = "%s <%s>" % (name, email) else: fullname = name if not fullname: return EMPTY_AUTHOR if name is not None: - name = name.encode('utf-8') + name = name.encode("utf-8") if email is not None: - email = email.encode('utf-8') + email = email.encode("utf-8") - return Person( - fullname=fullname.encode('utf-8'), - name=name, - email=email - ) + return Person(fullname=fullname.encode("utf-8"), name=name, email=email) diff --git a/swh/loader/package/pypi/tasks.py b/swh/loader/package/pypi/tasks.py index 748ace4..933f7d2 100644 --- a/swh/loader/package/pypi/tasks.py +++ b/swh/loader/package/pypi/tasks.py @@ -1,14 +1,14 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from swh.loader.package.pypi.loader import PyPILoader -@shared_task(name=__name__ + '.LoadPyPI') +@shared_task(name=__name__ + ".LoadPyPI") def load_pypi(*, url=None): """Load PyPI package""" return PyPILoader(url).load() diff --git a/swh/loader/package/pypi/tests/test_pypi.py b/swh/loader/package/pypi/tests/test_pypi.py index 9c38c39..6e701f7 100644 --- a/swh/loader/package/pypi/tests/test_pypi.py +++ b/swh/loader/package/pypi/tests/test_pypi.py @@ -1,834 +1,848 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from os import path import pytest from unittest.mock import patch from swh.core.tarball import uncompress from swh.core.pytest_plugin import requests_mock_datadir_factory from swh.model.hashutil import hash_to_bytes from swh.model.model import Person from swh.loader.package.pypi.loader import ( - PyPILoader, pypi_api_url, author, extract_intrinsic_metadata, - artifact_to_revision_id + PyPILoader, + pypi_api_url, + author, + extract_intrinsic_metadata, + artifact_to_revision_id, ) from swh.loader.package.tests.common import ( - check_snapshot, check_metadata_paths, get_stats + check_snapshot, + check_metadata_paths, + get_stats, ) def test_author_basic(): data = { - 'author': "i-am-groot", - 'author_email': 'iam@groot.org', + "author": "i-am-groot", + "author_email": "iam@groot.org", } actual_author = author(data) expected_author = Person( - fullname=b'i-am-groot ', - name=b'i-am-groot', - email=b'iam@groot.org', + fullname=b"i-am-groot ", + name=b"i-am-groot", + email=b"iam@groot.org", ) assert actual_author == expected_author def test_author_empty_email(): data = { - 'author': 'i-am-groot', - 'author_email': '', + "author": "i-am-groot", + "author_email": "", } actual_author = author(data) - expected_author = Person( - fullname=b'i-am-groot', - name=b'i-am-groot', - email=b'', - ) + expected_author = Person(fullname=b"i-am-groot", name=b"i-am-groot", email=b"",) assert actual_author == expected_author def test_author_empty_name(): data = { - 'author': "", - 'author_email': 'iam@groot.org', + "author": "", + "author_email": "iam@groot.org", } actual_author = author(data) expected_author = Person( - fullname=b' ', - name=b'', - email=b'iam@groot.org', + fullname=b" ", name=b"", email=b"iam@groot.org", ) assert actual_author == expected_author def test_author_malformed(): data = { - 'author': "['pierre', 'paul', 'jacques']", - 'author_email': None, + "author": "['pierre', 'paul', 'jacques']", + "author_email": None, } actual_author = author(data) expected_author = Person( fullname=b"['pierre', 'paul', 'jacques']", name=b"['pierre', 'paul', 'jacques']", email=None, ) assert actual_author == expected_author def test_author_malformed_2(): data = { - 'author': '[marie, jeanne]', - 'author_email': '[marie@some, jeanne@thing]', + "author": "[marie, jeanne]", + "author_email": "[marie@some, jeanne@thing]", } actual_author = author(data) expected_author = Person( - fullname=b'[marie, jeanne] <[marie@some, jeanne@thing]>', - name=b'[marie, jeanne]', - email=b'[marie@some, jeanne@thing]', + fullname=b"[marie, jeanne] <[marie@some, jeanne@thing]>", + name=b"[marie, jeanne]", + email=b"[marie@some, jeanne@thing]", ) assert actual_author == expected_author def test_author_malformed_3(): data = { - 'author': '[marie, jeanne, pierre]', - 'author_email': '[marie@somewhere.org, jeanne@somewhere.org]', + "author": "[marie, jeanne, pierre]", + "author_email": "[marie@somewhere.org, jeanne@somewhere.org]", } actual_author = author(data) expected_author = Person( fullname=( - b'[marie, jeanne, pierre] ' - b'<[marie@somewhere.org, jeanne@somewhere.org]>' + b"[marie, jeanne, pierre] " b"<[marie@somewhere.org, jeanne@somewhere.org]>" ), - name=b'[marie, jeanne, pierre]', - email=b'[marie@somewhere.org, jeanne@somewhere.org]', + name=b"[marie, jeanne, pierre]", + email=b"[marie@somewhere.org, jeanne@somewhere.org]", ) actual_author == expected_author # configuration error # + def test_badly_configured_loader_raise(monkeypatch): """Badly configured loader should raise""" - monkeypatch.delenv('SWH_CONFIG_FILENAME', raising=False) + monkeypatch.delenv("SWH_CONFIG_FILENAME", raising=False) with pytest.raises(ValueError) as e: - PyPILoader(url='some-url') + PyPILoader(url="some-url") - assert 'Misconfiguration' in e.value.args[0] + assert "Misconfiguration" in e.value.args[0] def test_pypi_api_url(): """Compute pypi api url from the pypi project url should be ok""" - url = pypi_api_url('https://pypi.org/project/requests') - assert url == 'https://pypi.org/pypi/requests/json' + url = pypi_api_url("https://pypi.org/project/requests") + assert url == "https://pypi.org/pypi/requests/json" def test_pypi_api_url_with_slash(): """Compute pypi api url from the pypi project url should be ok""" - url = pypi_api_url('https://pypi.org/project/requests/') - assert url == 'https://pypi.org/pypi/requests/json' + url = pypi_api_url("https://pypi.org/project/requests/") + assert url == "https://pypi.org/pypi/requests/json" @pytest.mark.fs def test_extract_intrinsic_metadata(tmp_path, datadir): """Parsing existing archive's PKG-INFO should yield results""" uncompressed_archive_path = str(tmp_path) archive_path = path.join( - datadir, 'https_files.pythonhosted.org', '0805nexter-1.1.0.zip') + datadir, "https_files.pythonhosted.org", "0805nexter-1.1.0.zip" + ) uncompress(archive_path, dest=uncompressed_archive_path) actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path) expected_metadata = { - 'metadata_version': '1.0', - 'name': '0805nexter', - 'version': '1.1.0', - 'summary': 'a simple printer of nested lest', - 'home_page': 'http://www.hp.com', - 'author': 'hgtkpython', - 'author_email': '2868989685@qq.com', - 'platforms': ['UNKNOWN'], + "metadata_version": "1.0", + "name": "0805nexter", + "version": "1.1.0", + "summary": "a simple printer of nested lest", + "home_page": "http://www.hp.com", + "author": "hgtkpython", + "author_email": "2868989685@qq.com", + "platforms": ["UNKNOWN"], } assert actual_metadata == expected_metadata @pytest.mark.fs def test_extract_intrinsic_metadata_failures(tmp_path): """Parsing inexistent path/archive/PKG-INFO yield None""" tmp_path = str(tmp_path) # py3.5 work around (PosixPath issue) # inexistent first level path - assert extract_intrinsic_metadata('/something-inexistent') == {} + assert extract_intrinsic_metadata("/something-inexistent") == {} # inexistent second level path (as expected by pypi archives) assert extract_intrinsic_metadata(tmp_path) == {} # inexistent PKG-INFO within second level path - existing_path_no_pkginfo = path.join(tmp_path, 'something') + existing_path_no_pkginfo = path.join(tmp_path, "something") os.mkdir(existing_path_no_pkginfo) assert extract_intrinsic_metadata(tmp_path) == {} # LOADER SCENARIO # # "edge" cases (for the same origin) # # no release artifact: # {visit full, status: uneventful, no contents, etc...} -requests_mock_datadir_missing_all = requests_mock_datadir_factory(ignore_urls=[ - 'https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip', # noqa - 'https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip', # noqa -]) +requests_mock_datadir_missing_all = requests_mock_datadir_factory( + ignore_urls=[ + "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa + "https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip", # noqa + ] +) def test_no_release_artifact(swh_config, requests_mock_datadir_missing_all): """Load a pypi project with all artifacts missing ends up with no snapshot """ - url = 'https://pypi.org/project/0805nexter' + url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() - assert actual_load_status['status'] == 'uneventful' - assert actual_load_status['snapshot_id'] is not None + assert actual_load_status["status"] == "uneventful" + assert actual_load_status["snapshot_id"] is not None stats = get_stats(loader.storage) assert { - 'content': 0, - 'directory': 0, - 'origin': 1, - 'origin_visit': 1, - 'person': 0, - 'release': 0, - 'revision': 0, - 'skipped_content': 0, - 'snapshot': 1, + "content": 0, + "directory": 0, + "origin": 1, + "origin_visit": 1, + "person": 0, + "release": 0, + "revision": 0, + "skipped_content": 0, + "snapshot": 1, } == stats origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'partial' - assert origin_visit['type'] == 'pypi' + assert origin_visit["status"] == "partial" + assert origin_visit["type"] == "pypi" # problem during loading: # {visit: partial, status: uneventful, no snapshot} def test_release_with_traceback(swh_config, requests_mock_datadir): - url = 'https://pypi.org/project/0805nexter' - with patch('swh.loader.package.pypi.loader.PyPILoader.last_snapshot', - side_effect=ValueError('Fake problem to fail the visit')): + url = "https://pypi.org/project/0805nexter" + with patch( + "swh.loader.package.pypi.loader.PyPILoader.last_snapshot", + side_effect=ValueError("Fake problem to fail the visit"), + ): loader = PyPILoader(url) actual_load_status = loader.load() - assert actual_load_status == {'status': 'failed'} + assert actual_load_status == {"status": "failed"} stats = get_stats(loader.storage) assert { - 'content': 0, - 'directory': 0, - 'origin': 1, - 'origin_visit': 1, - 'person': 0, - 'release': 0, - 'revision': 0, - 'skipped_content': 0, - 'snapshot': 0, + "content": 0, + "directory": 0, + "origin": 1, + "origin_visit": 1, + "person": 0, + "release": 0, + "revision": 0, + "skipped_content": 0, + "snapshot": 0, } == stats origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'partial' - assert origin_visit['type'] == 'pypi' + assert origin_visit["status"] == "partial" + assert origin_visit["type"] == "pypi" # problem during loading: failure early enough in between swh contents... # some contents (contents, directories, etc...) have been written in storage # {visit: partial, status: eventful, no snapshot} # problem during loading: failure late enough we can have snapshots (some # revisions are written in storage already) # {visit: partial, status: eventful, snapshot} # "normal" cases (for the same origin) # -requests_mock_datadir_missing_one = requests_mock_datadir_factory(ignore_urls=[ - 'https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip', # noqa -]) +requests_mock_datadir_missing_one = requests_mock_datadir_factory( + ignore_urls=[ + "https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip", # noqa + ] +) # some missing release artifacts: # {visit partial, status: eventful, 1 snapshot} def test_revision_metadata_structure(swh_config, requests_mock_datadir): - url = 'https://pypi.org/project/0805nexter' + url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() - assert actual_load_status['status'] == 'eventful' - assert actual_load_status['snapshot_id'] is not None + assert actual_load_status["status"] == "eventful" + assert actual_load_status["snapshot_id"] is not None - expected_revision_id = hash_to_bytes( - 'e445da4da22b31bfebb6ffc4383dbf839a074d21') + expected_revision_id = hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21") revision = list(loader.storage.revision_get([expected_revision_id]))[0] assert revision is not None - check_metadata_paths(revision['metadata'], paths=[ - ('intrinsic.tool', str), - ('intrinsic.raw', dict), - ('extrinsic.provider', str), - ('extrinsic.when', str), - ('extrinsic.raw', dict), - ('original_artifact', list), - ]) - - for original_artifact in revision['metadata']['original_artifact']: - check_metadata_paths(original_artifact, paths=[ - ('filename', str), - ('length', int), - ('checksums', dict), - ]) - - -def test_visit_with_missing_artifact( - swh_config, requests_mock_datadir_missing_one): + check_metadata_paths( + revision["metadata"], + paths=[ + ("intrinsic.tool", str), + ("intrinsic.raw", dict), + ("extrinsic.provider", str), + ("extrinsic.when", str), + ("extrinsic.raw", dict), + ("original_artifact", list), + ], + ) + + for original_artifact in revision["metadata"]["original_artifact"]: + check_metadata_paths( + original_artifact, + paths=[("filename", str), ("length", int), ("checksums", dict),], + ) + + +def test_visit_with_missing_artifact(swh_config, requests_mock_datadir_missing_one): """Load a pypi project with some missing artifacts ends up with 1 snapshot """ - url = 'https://pypi.org/project/0805nexter' + url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() - expected_snapshot_id = 'dd0e4201a232b1c104433741dbf45895b8ac9355' + expected_snapshot_id = "dd0e4201a232b1c104433741dbf45895b8ac9355" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id + "status": "eventful", + "snapshot_id": expected_snapshot_id, } stats = get_stats(loader.storage) assert { - 'content': 3, - 'directory': 2, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 1, - 'skipped_content': 0, - 'snapshot': 1 + "content": 3, + "directory": 2, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 1, + "skipped_content": 0, + "snapshot": 1, } == stats - expected_contents = map(hash_to_bytes, [ - '405859113963cb7a797642b45f171d6360425d16', - 'e5686aa568fdb1d19d7f1329267082fe40482d31', - '83ecf6ec1114fd260ca7a833a2d165e71258c338', - ]) + expected_contents = map( + hash_to_bytes, + [ + "405859113963cb7a797642b45f171d6360425d16", + "e5686aa568fdb1d19d7f1329267082fe40482d31", + "83ecf6ec1114fd260ca7a833a2d165e71258c338", + ], + ) - assert list(loader.storage.content_missing_per_sha1(expected_contents))\ - == [] + assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] - expected_dirs = map(hash_to_bytes, [ - 'b178b66bd22383d5f16f4f5c923d39ca798861b4', - 'c3a58f8b57433a4b56caaa5033ae2e0931405338', - ]) + expected_dirs = map( + hash_to_bytes, + [ + "b178b66bd22383d5f16f4f5c923d39ca798861b4", + "c3a58f8b57433a4b56caaa5033ae2e0931405338", + ], + ) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { - hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa + hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( + "b178b66bd22383d5f16f4f5c923d39ca798861b4" + ), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_branches = { - 'releases/1.2.0': { - 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', - 'target_type': 'revision', - }, - 'HEAD': { - 'target': 'releases/1.2.0', - 'target_type': 'alias', + "releases/1.2.0": { + "target": "e445da4da22b31bfebb6ffc4383dbf839a074d21", + "target_type": "revision", }, + "HEAD": {"target": "releases/1.2.0", "target_type": "alias",}, } expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': expected_branches, + "id": expected_snapshot_id, + "branches": expected_branches, } check_snapshot(expected_snapshot, storage=loader.storage) origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'partial' - assert origin_visit['type'] == 'pypi' + assert origin_visit["status"] == "partial" + assert origin_visit["type"] == "pypi" def test_visit_with_1_release_artifact(swh_config, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ - url = 'https://pypi.org/project/0805nexter' + url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() - expected_snapshot_id = 'ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a' + expected_snapshot_id = "ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id + "status": "eventful", + "snapshot_id": expected_snapshot_id, } stats = get_stats(loader.storage) assert { - 'content': 6, - 'directory': 4, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 2, - 'skipped_content': 0, - 'snapshot': 1 + "content": 6, + "directory": 4, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 2, + "skipped_content": 0, + "snapshot": 1, } == stats - expected_contents = map(hash_to_bytes, [ - 'a61e24cdfdab3bb7817f6be85d37a3e666b34566', - '938c33483285fd8ad57f15497f538320df82aeb8', - 'a27576d60e08c94a05006d2e6d540c0fdb5f38c8', - '405859113963cb7a797642b45f171d6360425d16', - 'e5686aa568fdb1d19d7f1329267082fe40482d31', - '83ecf6ec1114fd260ca7a833a2d165e71258c338', - ]) - - assert list(loader.storage.content_missing_per_sha1(expected_contents))\ - == [] - - expected_dirs = map(hash_to_bytes, [ - '05219ba38bc542d4345d5638af1ed56c7d43ca7d', - 'cf019eb456cf6f78d8c4674596f1c9a97ece8f44', - 'b178b66bd22383d5f16f4f5c923d39ca798861b4', - 'c3a58f8b57433a4b56caaa5033ae2e0931405338', - ]) + expected_contents = map( + hash_to_bytes, + [ + "a61e24cdfdab3bb7817f6be85d37a3e666b34566", + "938c33483285fd8ad57f15497f538320df82aeb8", + "a27576d60e08c94a05006d2e6d540c0fdb5f38c8", + "405859113963cb7a797642b45f171d6360425d16", + "e5686aa568fdb1d19d7f1329267082fe40482d31", + "83ecf6ec1114fd260ca7a833a2d165e71258c338", + ], + ) + + assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] + + expected_dirs = map( + hash_to_bytes, + [ + "05219ba38bc542d4345d5638af1ed56c7d43ca7d", + "cf019eb456cf6f78d8c4674596f1c9a97ece8f44", + "b178b66bd22383d5f16f4f5c923d39ca798861b4", + "c3a58f8b57433a4b56caaa5033ae2e0931405338", + ], + ) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { - hash_to_bytes('4c99891f93b81450385777235a37b5e966dd1571'): hash_to_bytes('05219ba38bc542d4345d5638af1ed56c7d43ca7d'), # noqa - hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa + hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes( + "05219ba38bc542d4345d5638af1ed56c7d43ca7d" + ), # noqa + hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( + "b178b66bd22383d5f16f4f5c923d39ca798861b4" + ), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_branches = { - 'releases/1.1.0': { - 'target': '4c99891f93b81450385777235a37b5e966dd1571', - 'target_type': 'revision', - }, - 'releases/1.2.0': { - 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', - 'target_type': 'revision', + "releases/1.1.0": { + "target": "4c99891f93b81450385777235a37b5e966dd1571", + "target_type": "revision", }, - 'HEAD': { - 'target': 'releases/1.2.0', - 'target_type': 'alias', + "releases/1.2.0": { + "target": "e445da4da22b31bfebb6ffc4383dbf839a074d21", + "target_type": "revision", }, + "HEAD": {"target": "releases/1.2.0", "target_type": "alias",}, } expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': expected_branches, + "id": expected_snapshot_id, + "branches": expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'pypi' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "pypi" def test_multiple_visits_with_no_change(swh_config, requests_mock_datadir): """Multiple visits with no changes results in 1 same snapshot """ - url = 'https://pypi.org/project/0805nexter' + url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) actual_load_status = loader.load() - snapshot_id = 'ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a' + snapshot_id = "ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': snapshot_id, + "status": "eventful", + "snapshot_id": snapshot_id, } stats = get_stats(loader.storage) assert { - 'content': 6, - 'directory': 4, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 2, - 'skipped_content': 0, - 'snapshot': 1 + "content": 6, + "directory": 4, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 2, + "skipped_content": 0, + "snapshot": 1, } == stats expected_branches = { - 'releases/1.1.0': { - 'target': '4c99891f93b81450385777235a37b5e966dd1571', - 'target_type': 'revision', + "releases/1.1.0": { + "target": "4c99891f93b81450385777235a37b5e966dd1571", + "target_type": "revision", }, - 'releases/1.2.0': { - 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', - 'target_type': 'revision', - }, - 'HEAD': { - 'target': 'releases/1.2.0', - 'target_type': 'alias', + "releases/1.2.0": { + "target": "e445da4da22b31bfebb6ffc4383dbf839a074d21", + "target_type": "revision", }, + "HEAD": {"target": "releases/1.2.0", "target_type": "alias",}, } expected_snapshot = { - 'id': snapshot_id, - 'branches': expected_branches, + "id": snapshot_id, + "branches": expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'pypi' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "pypi" actual_load_status2 = loader.load() assert actual_load_status2 == { - 'status': 'uneventful', - 'snapshot_id': actual_load_status2['snapshot_id'] + "status": "uneventful", + "snapshot_id": actual_load_status2["snapshot_id"], } stats2 = get_stats(loader.storage) expected_stats2 = stats.copy() - expected_stats2['origin_visit'] = 1 + 1 + expected_stats2["origin_visit"] = 1 + 1 assert expected_stats2 == stats2 # same snapshot - actual_snapshot_id = origin_visit['snapshot'] + actual_snapshot_id = origin_visit["snapshot"] assert actual_snapshot_id == hash_to_bytes(snapshot_id) def test_incremental_visit(swh_config, requests_mock_datadir_visits): """With prior visit, 2nd load will result with a different snapshot """ - url = 'https://pypi.org/project/0805nexter' + url = "https://pypi.org/project/0805nexter" loader = PyPILoader(url) visit1_actual_load_status = loader.load() visit1_stats = get_stats(loader.storage) - expected_snapshot_id = 'ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a' + expected_snapshot_id = "ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a" assert visit1_actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id + "status": "eventful", + "snapshot_id": expected_snapshot_id, } origin_visit1 = loader.storage.origin_visit_get_latest(url) - assert origin_visit1['status'] == 'full' - assert origin_visit1['type'] == 'pypi' + assert origin_visit1["status"] == "full" + assert origin_visit1["type"] == "pypi" assert { - 'content': 6, - 'directory': 4, - 'origin': 1, - 'origin_visit': 1, - 'person': 1, - 'release': 0, - 'revision': 2, - 'skipped_content': 0, - 'snapshot': 1 + "content": 6, + "directory": 4, + "origin": 1, + "origin_visit": 1, + "person": 1, + "release": 0, + "revision": 2, + "skipped_content": 0, + "snapshot": 1, } == visit1_stats # Reset internal state loader._info = None visit2_actual_load_status = loader.load() visit2_stats = get_stats(loader.storage) - assert visit2_actual_load_status['status'] == 'eventful' - expected_snapshot_id2 = '2e5149a7b0725d18231a37b342e9b7c4e121f283' + assert visit2_actual_load_status["status"] == "eventful" + expected_snapshot_id2 = "2e5149a7b0725d18231a37b342e9b7c4e121f283" assert visit2_actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id2 + "status": "eventful", + "snapshot_id": expected_snapshot_id2, } visits = list(loader.storage.origin_visit_get(url)) assert len(visits) == 2 - assert visits[1]['status'] == 'full' - assert visits[1]['type'] == 'pypi' + assert visits[1]["status"] == "full" + assert visits[1]["type"] == "pypi" assert { - 'content': 6 + 1, # 1 more content - 'directory': 4 + 2, # 2 more directories - 'origin': 1, - 'origin_visit': 1 + 1, - 'person': 1, - 'release': 0, - 'revision': 2 + 1, # 1 more revision - 'skipped_content': 0, - 'snapshot': 1 + 1, # 1 more snapshot + "content": 6 + 1, # 1 more content + "directory": 4 + 2, # 2 more directories + "origin": 1, + "origin_visit": 1 + 1, + "person": 1, + "release": 0, + "revision": 2 + 1, # 1 more revision + "skipped_content": 0, + "snapshot": 1 + 1, # 1 more snapshot } == visit2_stats - expected_contents = map(hash_to_bytes, [ - 'a61e24cdfdab3bb7817f6be85d37a3e666b34566', - '938c33483285fd8ad57f15497f538320df82aeb8', - 'a27576d60e08c94a05006d2e6d540c0fdb5f38c8', - '405859113963cb7a797642b45f171d6360425d16', - 'e5686aa568fdb1d19d7f1329267082fe40482d31', - '83ecf6ec1114fd260ca7a833a2d165e71258c338', - '92689fa2b7fb4d4fc6fb195bf73a50c87c030639' - ]) - - assert list(loader.storage.content_missing_per_sha1(expected_contents))\ - == [] - - expected_dirs = map(hash_to_bytes, [ - '05219ba38bc542d4345d5638af1ed56c7d43ca7d', - 'cf019eb456cf6f78d8c4674596f1c9a97ece8f44', - 'b178b66bd22383d5f16f4f5c923d39ca798861b4', - 'c3a58f8b57433a4b56caaa5033ae2e0931405338', - 'e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a', - '52604d46843b898f5a43208045d09fcf8731631b', - - ]) + expected_contents = map( + hash_to_bytes, + [ + "a61e24cdfdab3bb7817f6be85d37a3e666b34566", + "938c33483285fd8ad57f15497f538320df82aeb8", + "a27576d60e08c94a05006d2e6d540c0fdb5f38c8", + "405859113963cb7a797642b45f171d6360425d16", + "e5686aa568fdb1d19d7f1329267082fe40482d31", + "83ecf6ec1114fd260ca7a833a2d165e71258c338", + "92689fa2b7fb4d4fc6fb195bf73a50c87c030639", + ], + ) + + assert list(loader.storage.content_missing_per_sha1(expected_contents)) == [] + + expected_dirs = map( + hash_to_bytes, + [ + "05219ba38bc542d4345d5638af1ed56c7d43ca7d", + "cf019eb456cf6f78d8c4674596f1c9a97ece8f44", + "b178b66bd22383d5f16f4f5c923d39ca798861b4", + "c3a58f8b57433a4b56caaa5033ae2e0931405338", + "e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a", + "52604d46843b898f5a43208045d09fcf8731631b", + ], + ) assert list(loader.storage.directory_missing(expected_dirs)) == [] # {revision hash: directory hash} expected_revs = { - hash_to_bytes('4c99891f93b81450385777235a37b5e966dd1571'): hash_to_bytes('05219ba38bc542d4345d5638af1ed56c7d43ca7d'), # noqa - hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa - hash_to_bytes('51247143b01445c9348afa9edfae31bf7c5d86b1'): hash_to_bytes('e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a'), # noqa + hash_to_bytes("4c99891f93b81450385777235a37b5e966dd1571"): hash_to_bytes( + "05219ba38bc542d4345d5638af1ed56c7d43ca7d" + ), # noqa + hash_to_bytes("e445da4da22b31bfebb6ffc4383dbf839a074d21"): hash_to_bytes( + "b178b66bd22383d5f16f4f5c923d39ca798861b4" + ), # noqa + hash_to_bytes("51247143b01445c9348afa9edfae31bf7c5d86b1"): hash_to_bytes( + "e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a" + ), # noqa } assert list(loader.storage.revision_missing(expected_revs)) == [] expected_branches = { - 'releases/1.1.0': { - 'target': '4c99891f93b81450385777235a37b5e966dd1571', - 'target_type': 'revision', + "releases/1.1.0": { + "target": "4c99891f93b81450385777235a37b5e966dd1571", + "target_type": "revision", }, - 'releases/1.2.0': { - 'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21', - 'target_type': 'revision', + "releases/1.2.0": { + "target": "e445da4da22b31bfebb6ffc4383dbf839a074d21", + "target_type": "revision", }, - 'releases/1.3.0': { - 'target': '51247143b01445c9348afa9edfae31bf7c5d86b1', - 'target_type': 'revision', - }, - 'HEAD': { - 'target': 'releases/1.3.0', - 'target_type': 'alias', + "releases/1.3.0": { + "target": "51247143b01445c9348afa9edfae31bf7c5d86b1", + "target_type": "revision", }, + "HEAD": {"target": "releases/1.3.0", "target_type": "alias",}, } expected_snapshot = { - 'id': expected_snapshot_id2, - 'branches': expected_branches, + "id": expected_snapshot_id2, + "branches": expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'pypi' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "pypi" urls = [ - m.url for m in requests_mock_datadir_visits.request_history - if m.url.startswith('https://files.pythonhosted.org') + m.url + for m in requests_mock_datadir_visits.request_history + if m.url.startswith("https://files.pythonhosted.org") ] # visited each artifact once across 2 visits assert len(urls) == len(set(urls)) # release artifact, no new artifact # {visit full, status uneventful, same snapshot as before} # release artifact, old artifact with different checksums # {visit full, status full, new snapshot with shared history and some new # different history} # release with multiple sdist artifacts per pypi "version" # snapshot branch output is different + def test_visit_1_release_with_2_artifacts(swh_config, requests_mock_datadir): """With no prior visit, load a pypi project ends up with 1 snapshot """ - url = 'https://pypi.org/project/nexter' + url = "https://pypi.org/project/nexter" loader = PyPILoader(url) actual_load_status = loader.load() - expected_snapshot_id = 'a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6' + expected_snapshot_id = "a27e638a4dad6fbfa273c6ebec1c4bf320fb84c6" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id, + "status": "eventful", + "snapshot_id": expected_snapshot_id, } expected_branches = { - 'releases/1.1.0/nexter-1.1.0.zip': { - 'target': '4c99891f93b81450385777235a37b5e966dd1571', - 'target_type': 'revision', + "releases/1.1.0/nexter-1.1.0.zip": { + "target": "4c99891f93b81450385777235a37b5e966dd1571", + "target_type": "revision", }, - 'releases/1.1.0/nexter-1.1.0.tar.gz': { - 'target': '0bf88f5760cca7665d0af4d6575d9301134fe11a', - 'target_type': 'revision', + "releases/1.1.0/nexter-1.1.0.tar.gz": { + "target": "0bf88f5760cca7665d0af4d6575d9301134fe11a", + "target_type": "revision", }, } expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': expected_branches, + "id": expected_snapshot_id, + "branches": expected_branches, } check_snapshot(expected_snapshot, loader.storage) origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'pypi' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "pypi" def test_pypi_artifact_to_revision_id_none(): """Current loader version should stop soon if nothing can be found """ artifact_metadata = { - 'digests': { - 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa + "digests": { + "sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa }, } assert artifact_to_revision_id({}, artifact_metadata) is None known_artifacts = { - 'b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92': { - 'original_artifact': { - 'sha256': 'something-irrelevant', - }, + "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92": { + "original_artifact": {"sha256": "something-irrelevant",}, }, } assert artifact_to_revision_id(known_artifacts, artifact_metadata) is None def test_pypi_artifact_to_revision_id_old_loader_version(): """Current loader version should solve old metadata scheme """ artifact_metadata = { - 'digests': { - 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa + "digests": { + "sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa } } known_artifacts = { - hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92'): { - 'original_artifact': { - 'sha256': "something-wrong", - }, + hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { + "original_artifact": {"sha256": "something-wrong",}, + }, + hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { + "original_artifact": { + "sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa + }, }, - hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116'): { - 'original_artifact': { - 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa - }, - } } - assert artifact_to_revision_id(known_artifacts, artifact_metadata) \ - == hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116') + assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( + "845673bfe8cbd31b1eaf757745a964137e6f9116" + ) def test_pypi_artifact_to_revision_id_current_loader_version(): """Current loader version should be able to solve current metadata scheme """ artifact_metadata = { - 'digests': { - 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa + "digests": { + "sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa } } known_artifacts = { - hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92'): { - 'original_artifact': [{ - 'checksums': { - 'sha256': '6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec', # noqa - }, - }], + hash_to_bytes("b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92"): { + "original_artifact": [ + { + "checksums": { + "sha256": "6975816f2c5ad4046acc676ba112f2fff945b01522d63948531f11f11e0892ec", # noqa + }, + } + ], }, - hash_to_bytes('845673bfe8cbd31b1eaf757745a964137e6f9116'): { - 'original_artifact': [{ - 'checksums': { - 'sha256': 'something-wrong' - }, - }], + hash_to_bytes("845673bfe8cbd31b1eaf757745a964137e6f9116"): { + "original_artifact": [{"checksums": {"sha256": "something-wrong"},}], }, } - assert artifact_to_revision_id(known_artifacts, artifact_metadata) \ - == hash_to_bytes('b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92') + assert artifact_to_revision_id(known_artifacts, artifact_metadata) == hash_to_bytes( + "b11ebac8c9d0c9e5063a2df693a18e3aba4b2f92" + ) def test_pypi_artifact_to_revision_id_failures(): - with pytest.raises(KeyError, match='sha256'): + with pytest.raises(KeyError, match="sha256"): artifact_metadata = { - 'digests': {}, + "digests": {}, } assert artifact_to_revision_id({}, artifact_metadata) - with pytest.raises(KeyError, match='digests'): + with pytest.raises(KeyError, match="digests"): artifact_metadata = { - 'something': 'wrong', + "something": "wrong", } assert artifact_to_revision_id({}, artifact_metadata) -def test_pypi_artifact_with_no_intrinsic_metadata( - swh_config, requests_mock_datadir): +def test_pypi_artifact_with_no_intrinsic_metadata(swh_config, requests_mock_datadir): """Skip artifact with no intrinsic metadata during ingestion """ - url = 'https://pypi.org/project/upymenu' + url = "https://pypi.org/project/upymenu" loader = PyPILoader(url) actual_load_status = loader.load() - expected_snapshot_id = '1a8893e6a86f444e8be8e7bda6cb34fb1735a00e' + expected_snapshot_id = "1a8893e6a86f444e8be8e7bda6cb34fb1735a00e" assert actual_load_status == { - 'status': 'eventful', - 'snapshot_id': expected_snapshot_id, + "status": "eventful", + "snapshot_id": expected_snapshot_id, } # no branch as one artifact without any intrinsic metadata - expected_snapshot = { - 'id': expected_snapshot_id, - 'branches': {} - } + expected_snapshot = {"id": expected_snapshot_id, "branches": {}} check_snapshot(expected_snapshot, loader.storage) origin_visit = loader.storage.origin_visit_get_latest(url) - assert origin_visit['status'] == 'full' - assert origin_visit['type'] == 'pypi' + assert origin_visit["status"] == "full" + assert origin_visit["type"] == "pypi" diff --git a/swh/loader/package/pypi/tests/test_tasks.py b/swh/loader/package/pypi/tests/test_tasks.py index fcd2aa0..152fd45 100644 --- a/swh/loader/package/pypi/tests/test_tasks.py +++ b/swh/loader/package/pypi/tests/test_tasks.py @@ -1,19 +1,18 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def test_pypi_loader(mocker, swh_app, celery_session_worker, swh_config): - mock_loader = mocker.patch( - 'swh.loader.package.pypi.loader.PyPILoader.load') - mock_loader.return_value = {'status': 'eventful'} + mock_loader = mocker.patch("swh.loader.package.pypi.loader.PyPILoader.load") + mock_loader.return_value = {"status": "eventful"} res = swh_app.send_task( - 'swh.loader.package.pypi.tasks.LoadPyPI', - kwargs=dict(url='some-url')) + "swh.loader.package.pypi.tasks.LoadPyPI", kwargs=dict(url="some-url") + ) assert res res.wait() assert res.successful() - assert res.result == {'status': 'eventful'} + assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/tests/common.py b/swh/loader/package/tests/common.py index b9f1e12..8ecab13 100644 --- a/swh/loader/package/tests/common.py +++ b/swh/loader/package/tests/common.py @@ -1,127 +1,134 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from os import path import logging from typing import Dict, List, Tuple from swh.model.hashutil import hash_to_bytes, hash_to_hex logger = logging.getLogger(__file__) -DATADIR = path.join(path.abspath(path.dirname(__file__)), 'resources') +DATADIR = path.join(path.abspath(path.dirname(__file__)), "resources") def decode_target(target): """Test helper to ease readability in test """ if not target: return target - target_type = target['target_type'] + target_type = target["target_type"] - if target_type == 'alias': - decoded_target = target['target'].decode('utf-8') + if target_type == "alias": + decoded_target = target["target"].decode("utf-8") else: - decoded_target = hash_to_hex(target['target']) + decoded_target = hash_to_hex(target["target"]) - return { - 'target': decoded_target, - 'target_type': target_type - } + return {"target": decoded_target, "target_type": target_type} def check_snapshot(expected_snapshot, storage): """Check for snapshot match. Provide the hashes as hexadecimal, the conversion is done within the method. Args: expected_snapshot (dict): full snapshot with hex ids storage (Storage): expected storage """ - expected_snapshot_id = expected_snapshot['id'] - expected_branches = expected_snapshot['branches'] + expected_snapshot_id = expected_snapshot["id"] + expected_branches = expected_snapshot["branches"] snap = storage.snapshot_get(hash_to_bytes(expected_snapshot_id)) if snap is None: # display known snapshots instead if possible - if hasattr(storage, '_snapshots'): # in-mem storage + if hasattr(storage, "_snapshots"): # in-mem storage from pprint import pprint + for snap_id, (_snap, _) in storage._snapshots.items(): snapd = _snap.to_dict() - snapd['id'] = hash_to_hex(snapd['id']) + snapd["id"] = hash_to_hex(snapd["id"]) branches = { - branch.decode('utf-8'): decode_target(target) - for branch, target in snapd['branches'].items() + branch.decode("utf-8"): decode_target(target) + for branch, target in snapd["branches"].items() } - snapd['branches'] = branches + snapd["branches"] = branches pprint(snapd) - raise AssertionError('Snapshot is not found') + raise AssertionError("Snapshot is not found") branches = { - branch.decode('utf-8'): decode_target(target) - for branch, target in snap['branches'].items() + branch.decode("utf-8"): decode_target(target) + for branch, target in snap["branches"].items() } assert expected_branches == branches def check_metadata(metadata: Dict, key_path: str, raw_type: str): """Given a metadata dict, ensure the associated key_path value is of type raw_type. Args: metadata: Dict to check key_path: Path to check raw_type: Type to check the path with Raises: Assertion error in case of mismatch """ data = metadata - keys = key_path.split('.') + keys = key_path.split(".") for k in keys: try: data = data[k] except (TypeError, KeyError) as e: # KeyError: because path too long # TypeError: data is not a dict raise AssertionError(e) assert isinstance(data, raw_type) # type: ignore def check_metadata_paths(metadata: Dict, paths: List[Tuple[str, str]]): """Given a metadata dict, ensure the keys are of expected types Args: metadata: Dict to check key_path: Path to check raw_type: Type to check the path with Raises: Assertion error in case of mismatch """ for key_path, raw_type in paths: check_metadata(metadata, key_path, raw_type) def get_stats(storage) -> Dict: """Adaptation utils to unify the stats counters across storage implementation. """ storage.refresh_stat_counters() stats = storage.stat_counters() - keys = ['content', 'directory', 'origin', 'origin_visit', 'person', - 'release', 'revision', 'skipped_content', 'snapshot'] + keys = [ + "content", + "directory", + "origin", + "origin_visit", + "person", + "release", + "revision", + "skipped_content", + "snapshot", + ] return {k: stats.get(k) for k in keys} diff --git a/swh/loader/package/tests/test_common.py b/swh/loader/package/tests/test_common.py index 83bffc1..73ccfcc 100644 --- a/swh/loader/package/tests/test_common.py +++ b/swh/loader/package/tests/test_common.py @@ -1,186 +1,152 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest from swh.model.hashutil import hash_to_bytes from swh.model.model import Snapshot, SnapshotBranch, TargetType from swh.loader.package.tests.common import ( - decode_target, check_snapshot, check_metadata, check_metadata_paths + decode_target, + check_snapshot, + check_metadata, + check_metadata_paths, ) from swh.storage import get_storage -hash_hex = '43e45d56f88993aae6a0198013efa80716fd8920' +hash_hex = "43e45d56f88993aae6a0198013efa80716fd8920" -storage_config = { - 'cls': 'pipeline', - 'steps': [ - { - 'cls': 'memory', - } - ] -} +storage_config = {"cls": "pipeline", "steps": [{"cls": "memory",}]} def test_decode_target_edge(): assert not decode_target(None) def test_decode_target(): - actual_alias_decode_target = decode_target({ - 'target_type': 'alias', - 'target': b'something', - }) + actual_alias_decode_target = decode_target( + {"target_type": "alias", "target": b"something",} + ) assert actual_alias_decode_target == { - 'target_type': 'alias', - 'target': 'something', + "target_type": "alias", + "target": "something", } - actual_decode_target = decode_target({ - 'target_type': 'revision', - 'target': hash_to_bytes(hash_hex), - }) + actual_decode_target = decode_target( + {"target_type": "revision", "target": hash_to_bytes(hash_hex),} + ) assert actual_decode_target == { - 'target_type': 'revision', - 'target': hash_hex, + "target_type": "revision", + "target": hash_hex, } def test_check_snapshot(): storage = get_storage(**storage_config) - snap_id = '2498dbf535f882bc7f9a18fb16c9ad27fda7bab7' + snap_id = "2498dbf535f882bc7f9a18fb16c9ad27fda7bab7" snapshot = Snapshot( id=hash_to_bytes(snap_id), branches={ - b'master': SnapshotBranch( - target=hash_to_bytes(hash_hex), - target_type=TargetType.REVISION, + b"master": SnapshotBranch( + target=hash_to_bytes(hash_hex), target_type=TargetType.REVISION, ), }, ) s = storage.snapshot_add([snapshot]) assert s == { - 'snapshot:add': 1, + "snapshot:add": 1, } expected_snapshot = { - 'id': snap_id, - 'branches': { - 'master': { - 'target': hash_hex, - 'target_type': 'revision', - } - } + "id": snap_id, + "branches": {"master": {"target": hash_hex, "target_type": "revision",}}, } check_snapshot(expected_snapshot, storage) def test_check_snapshot_failure(): storage = get_storage(**storage_config) snapshot = Snapshot( - id=hash_to_bytes('2498dbf535f882bc7f9a18fb16c9ad27fda7bab7'), + id=hash_to_bytes("2498dbf535f882bc7f9a18fb16c9ad27fda7bab7"), branches={ - b'master': SnapshotBranch( - target=hash_to_bytes(hash_hex), - target_type=TargetType.REVISION, + b"master": SnapshotBranch( + target=hash_to_bytes(hash_hex), target_type=TargetType.REVISION, ), }, ) s = storage.snapshot_add([snapshot]) assert s == { - 'snapshot:add': 1, + "snapshot:add": 1, } unexpected_snapshot = { - 'id': '2498dbf535f882bc7f9a18fb16c9ad27fda7bab7', - 'branches': { - 'master': { - 'target': hash_hex, - 'target_type': 'release', # wrong value - } - } + "id": "2498dbf535f882bc7f9a18fb16c9ad27fda7bab7", + "branches": { + "master": {"target": hash_hex, "target_type": "release",} # wrong value + }, } with pytest.raises(AssertionError): check_snapshot(unexpected_snapshot, storage) def test_check_metadata(): metadata = { - 'a': { - 'raw': { - 'time': 'something', - }, - }, - 'b': [], - 'c': 1, + "a": {"raw": {"time": "something",},}, + "b": [], + "c": 1, } for raw_path, raw_type in [ - ('a.raw', dict), - ('a.raw.time', str), - ('b', list), - ('c', int), + ("a.raw", dict), + ("a.raw.time", str), + ("b", list), + ("c", int), ]: check_metadata(metadata, raw_path, raw_type) def test_check_metadata_ko(): metadata = { - 'a': { - 'raw': 'hello', - }, - 'b': [], - 'c': 1, + "a": {"raw": "hello",}, + "b": [], + "c": 1, } for raw_path, raw_type in [ - ('a.b', dict), - ('a.raw.time', str), + ("a.b", dict), + ("a.raw.time", str), ]: with pytest.raises(AssertionError): check_metadata(metadata, raw_path, raw_type) def test_check_metadata_paths(): metadata = { - 'a': { - 'raw': { - 'time': 'something', - }, - }, - 'b': [], - 'c': 1, + "a": {"raw": {"time": "something",},}, + "b": [], + "c": 1, } - check_metadata_paths(metadata, [ - ('a.raw', dict), - ('a.raw.time', str), - ('b', list), - ('c', int), - ]) + check_metadata_paths( + metadata, [("a.raw", dict), ("a.raw.time", str), ("b", list), ("c", int),] + ) def test_check_metadata_paths_ko(): metadata = { - 'a': { - 'raw': 'hello', - }, - 'b': [], - 'c': 1, + "a": {"raw": "hello",}, + "b": [], + "c": 1, } with pytest.raises(AssertionError): - check_metadata_paths(metadata, [ - ('a.b', dict), - ('a.raw.time', str), - ]) + check_metadata_paths(metadata, [("a.b", dict), ("a.raw.time", str),]) diff --git a/swh/loader/package/tests/test_conftest.py b/swh/loader/package/tests/test_conftest.py index d9d16e0..b132520 100644 --- a/swh/loader/package/tests/test_conftest.py +++ b/swh/loader/package/tests/test_conftest.py @@ -1,12 +1,12 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest import requests def test_swh_proxy(): with pytest.raises(requests.exceptions.ProxyError): - requests.get('https://www.softwareheritage.org') + requests.get("https://www.softwareheritage.org") diff --git a/swh/loader/package/tests/test_loader.py b/swh/loader/package/tests/test_loader.py index 7022fd8..4f5ca6e 100644 --- a/swh/loader/package/tests/test_loader.py +++ b/swh/loader/package/tests/test_loader.py @@ -1,35 +1,35 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.loader.package.loader import PackageLoader class FakeStorage: def origin_add_one(self, origin): - raise ValueError('We refuse to add an origin') + raise ValueError("We refuse to add an origin") class FakeStorage2: def origin_add_one(self, origin): return origin def origin_visit_add(self, origin, date, type): - raise ValueError('We refuse to add an origin visit') + raise ValueError("We refuse to add an origin visit") def test_loader_origin_visit_failure(swh_config): """Failure to add origin or origin visit should failed immediately """ - loader = PackageLoader('some-url') + loader = PackageLoader("some-url") loader.storage = FakeStorage() actual_load_status = loader.load() - assert actual_load_status == {'status': 'failed'} + assert actual_load_status == {"status": "failed"} loader.storage = FakeStorage2() actual_load_status2 = loader.load() - assert actual_load_status2 == {'status': 'failed'} + assert actual_load_status2 == {"status": "failed"} diff --git a/swh/loader/package/tests/test_utils.py b/swh/loader/package/tests/test_utils.py index f2a888f..5116ba9 100644 --- a/swh/loader/package/tests/test_utils.py +++ b/swh/loader/package/tests/test_utils.py @@ -1,180 +1,179 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import pytest import swh.loader.package -from swh.loader.package.utils import ( - download, api_info, release_name, artifact_identity -) +from swh.loader.package.utils import download, api_info, release_name, artifact_identity def test_version_generation(): - assert swh.loader.package.__version__ != 'devel', \ - "Make sure swh.loader.core is installed (e.g. pip install -e .)" + assert ( + swh.loader.package.__version__ != "devel" + ), "Make sure swh.loader.core is installed (e.g. pip install -e .)" @pytest.mark.fs def test_download_fail_to_download(tmp_path, requests_mock): - url = 'https://pypi.org/pypi/arrow/json' + url = "https://pypi.org/pypi/arrow/json" status_code = 404 requests_mock.get(url, status_code=status_code) with pytest.raises(ValueError) as e: download(url, tmp_path) - assert e.value.args[0] == "Fail to query '%s'. Reason: %s" % ( - url, status_code) + assert e.value.args[0] == "Fail to query '%s'. Reason: %s" % (url, status_code) @pytest.mark.fs def test_download_ok(tmp_path, requests_mock): """Download without issue should provide filename and hashes""" - filename = 'requests-0.0.1.tar.gz' - url = 'https://pypi.org/pypi/requests/%s' % filename - data = 'this is something' - requests_mock.get(url, text=data, headers={ - 'content-length': str(len(data)) - }) + filename = "requests-0.0.1.tar.gz" + url = "https://pypi.org/pypi/requests/%s" % filename + data = "this is something" + requests_mock.get(url, text=data, headers={"content-length": str(len(data))}) actual_filepath, actual_hashes = download(url, dest=str(tmp_path)) actual_filename = os.path.basename(actual_filepath) assert actual_filename == filename - assert actual_hashes['length'] == len(data) - assert actual_hashes['checksums']['sha1'] == 'fdd1ce606a904b08c816ba84f3125f2af44d92b2' # noqa - assert (actual_hashes['checksums']['sha256'] == - '1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5') + assert actual_hashes["length"] == len(data) + assert ( + actual_hashes["checksums"]["sha1"] == "fdd1ce606a904b08c816ba84f3125f2af44d92b2" + ) # noqa + assert ( + actual_hashes["checksums"]["sha256"] + == "1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5" + ) @pytest.mark.fs def test_download_ok_no_header(tmp_path, requests_mock): """Download without issue should provide filename and hashes""" - filename = 'requests-0.0.1.tar.gz' - url = 'https://pypi.org/pypi/requests/%s' % filename - data = 'this is something' + filename = "requests-0.0.1.tar.gz" + url = "https://pypi.org/pypi/requests/%s" % filename + data = "this is something" requests_mock.get(url, text=data) # no header information actual_filepath, actual_hashes = download(url, dest=str(tmp_path)) actual_filename = os.path.basename(actual_filepath) assert actual_filename == filename - assert actual_hashes['length'] == len(data) - assert actual_hashes['checksums']['sha1'] == 'fdd1ce606a904b08c816ba84f3125f2af44d92b2' # noqa - assert (actual_hashes['checksums']['sha256'] == - '1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5') + assert actual_hashes["length"] == len(data) + assert ( + actual_hashes["checksums"]["sha1"] == "fdd1ce606a904b08c816ba84f3125f2af44d92b2" + ) # noqa + assert ( + actual_hashes["checksums"]["sha256"] + == "1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5" + ) @pytest.mark.fs def test_download_ok_with_hashes(tmp_path, requests_mock): """Download without issue should provide filename and hashes""" - filename = 'requests-0.0.1.tar.gz' - url = 'https://pypi.org/pypi/requests/%s' % filename - data = 'this is something' - requests_mock.get(url, text=data, headers={ - 'content-length': str(len(data)) - }) + filename = "requests-0.0.1.tar.gz" + url = "https://pypi.org/pypi/requests/%s" % filename + data = "this is something" + requests_mock.get(url, text=data, headers={"content-length": str(len(data))}) # good hashes for such file good = { - 'sha1': 'fdd1ce606a904b08c816ba84f3125f2af44d92b2', - 'sha256': '1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5', # noqa + "sha1": "fdd1ce606a904b08c816ba84f3125f2af44d92b2", + "sha256": "1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5", # noqa } - actual_filepath, actual_hashes = download(url, dest=str(tmp_path), - hashes=good) + actual_filepath, actual_hashes = download(url, dest=str(tmp_path), hashes=good) actual_filename = os.path.basename(actual_filepath) assert actual_filename == filename - assert actual_hashes['length'] == len(data) - assert actual_hashes['checksums']['sha1'] == good['sha1'] - assert actual_hashes['checksums']['sha256'] == good['sha256'] + assert actual_hashes["length"] == len(data) + assert actual_hashes["checksums"]["sha1"] == good["sha1"] + assert actual_hashes["checksums"]["sha256"] == good["sha256"] @pytest.mark.fs def test_download_fail_hashes_mismatch(tmp_path, requests_mock): """Mismatch hash after download should raise """ - filename = 'requests-0.0.1.tar.gz' - url = 'https://pypi.org/pypi/requests/%s' % filename - data = 'this is something' - requests_mock.get(url, text=data, headers={ - 'content-length': str(len(data)) - }) + filename = "requests-0.0.1.tar.gz" + url = "https://pypi.org/pypi/requests/%s" % filename + data = "this is something" + requests_mock.get(url, text=data, headers={"content-length": str(len(data))}) # good hashes for such file good = { - 'sha1': 'fdd1ce606a904b08c816ba84f3125f2af44d92b2', - 'sha256': '1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5', # noqa + "sha1": "fdd1ce606a904b08c816ba84f3125f2af44d92b2", + "sha256": "1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5", # noqa } for hash_algo in good.keys(): - wrong_hash = good[hash_algo].replace('1', '0') + wrong_hash = good[hash_algo].replace("1", "0") expected_hashes = good.copy() expected_hashes[hash_algo] = wrong_hash # set the wrong hash - expected_msg = ("Failure when fetching %s. " - "Checksum mismatched: %s != %s" % ( - url, wrong_hash, good[hash_algo] - )) + expected_msg = "Failure when fetching %s. " "Checksum mismatched: %s != %s" % ( + url, + wrong_hash, + good[hash_algo], + ) with pytest.raises(ValueError, match=expected_msg): download(url, dest=str(tmp_path), hashes=expected_hashes) def test_api_info_failure(requests_mock): """Failure to fetch info/release information should raise""" - url = 'https://pypi.org/pypi/requests/json' + url = "https://pypi.org/pypi/requests/json" status_code = 400 requests_mock.get(url, status_code=status_code) with pytest.raises(ValueError) as e0: api_info(url) - assert e0.value.args[0] == "Fail to query '%s'. Reason: %s" % ( - url, status_code - ) + assert e0.value.args[0] == "Fail to query '%s'. Reason: %s" % (url, status_code) def test_api_info(requests_mock): """Fetching json info from pypi project should be ok""" - url = 'https://pypi.org/pypi/requests/json' + url = "https://pypi.org/pypi/requests/json" requests_mock.get(url, text='{"version": "0.0.1"}') actual_info = api_info(url) assert actual_info == { - 'version': '0.0.1', + "version": "0.0.1", } def test_release_name(): for version, filename, expected_release in [ - ('0.0.1', None, 'releases/0.0.1'), - ('0.0.2', 'something', 'releases/0.0.2/something')]: + ("0.0.1", None, "releases/0.0.1"), + ("0.0.2", "something", "releases/0.0.2/something"), + ]: assert release_name(version, filename) == expected_release def test_artifact_identity(): """Compute primary key should return the right identity """ data = { - 'a': 1, - 'b': 2, - 'length': 221837, - 'filename': '8sync-0.1.0.tar.gz', - 'version': '0.1.0', + "a": 1, + "b": 2, + "length": 221837, + "filename": "8sync-0.1.0.tar.gz", + "version": "0.1.0", } for id_keys, expected_id in [ - (['a', 'b'], [1, 2]), - ([], []), - (['a', 'key-that-does-not-exist'], [1, None]) + (["a", "b"], [1, 2]), + ([], []), + (["a", "key-that-does-not-exist"], [1, None]), ]: actual_id = artifact_identity(data, id_keys=id_keys) assert actual_id == expected_id diff --git a/swh/loader/package/utils.py b/swh/loader/package/utils.py index 7408cfd..cdf6c46 100644 --- a/swh/loader/package/utils.py +++ b/swh/loader/package/utils.py @@ -1,138 +1,136 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy import logging import os import requests from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple from swh.model.hashutil import MultiHash, HASH_BLOCK_SIZE from swh.model.model import Person from swh.loader.package import DEFAULT_PARAMS logger = logging.getLogger(__name__) -DOWNLOAD_HASHES = set(['sha1', 'sha256', 'length']) +DOWNLOAD_HASHES = set(["sha1", "sha256", "length"]) -EMPTY_AUTHOR = Person( - fullname=b'', - name=None, - email=None, -) +EMPTY_AUTHOR = Person(fullname=b"", name=None, email=None,) def api_info(url: str) -> Dict: """Basic api client to retrieve information on project. This deals with fetching json metadata about pypi projects. Args: url (str): The api url (e.g PyPI, npm, etc...) Raises: ValueError in case of query failures (for some reasons: 404, ...) Returns: The associated response's information dict """ response = requests.get(url, **DEFAULT_PARAMS) if response.status_code != 200: - raise ValueError("Fail to query '%s'. Reason: %s" % ( - url, response.status_code)) + raise ValueError("Fail to query '%s'. Reason: %s" % (url, response.status_code)) return response.json() -def download(url: str, dest: str, hashes: Dict = {}, - filename: Optional[str] = None, - auth: Optional[Tuple[str, str]] = None) -> Tuple[str, Dict]: +def download( + url: str, + dest: str, + hashes: Dict = {}, + filename: Optional[str] = None, + auth: Optional[Tuple[str, str]] = None, +) -> Tuple[str, Dict]: """Download a remote tarball from url, uncompresses and computes swh hashes on it. Args: url: Artifact uri to fetch, uncompress and hash dest: Directory to write the archive to hashes: Dict of expected hashes (key is the hash algo) for the artifact to download (those hashes are expected to be hex string) auth: Optional tuple of login/password (for http authentication service, e.g. deposit) Raises: ValueError in case of any error when fetching/computing (length, checksums mismatched...) Returns: Tuple of local (filepath, hashes of filepath) """ params = copy.deepcopy(DEFAULT_PARAMS) if auth is not None: - params['auth'] = auth + params["auth"] = auth response = requests.get(url, **params, stream=True) if response.status_code != 200: - raise ValueError("Fail to query '%s'. Reason: %s" % ( - url, response.status_code)) + raise ValueError("Fail to query '%s'. Reason: %s" % (url, response.status_code)) filename = filename if filename else os.path.basename(url) - logger.debug('filename: %s', filename) + logger.debug("filename: %s", filename) filepath = os.path.join(dest, filename) - logger.debug('filepath: %s', filepath) + logger.debug("filepath: %s", filepath) h = MultiHash(hash_names=DOWNLOAD_HASHES) - with open(filepath, 'wb') as f: + with open(filepath, "wb") as f: for chunk in response.iter_content(chunk_size=HASH_BLOCK_SIZE): h.update(chunk) f.write(chunk) # Also check the expected hashes if provided if hashes: actual_hashes = h.hexdigest() for algo_hash in hashes.keys(): actual_digest = actual_hashes[algo_hash] expected_digest = hashes[algo_hash] if actual_digest != expected_digest: raise ValueError( - 'Failure when fetching %s. ' - 'Checksum mismatched: %s != %s' % ( - url, expected_digest, actual_digest)) + "Failure when fetching %s. " + "Checksum mismatched: %s != %s" + % (url, expected_digest, actual_digest) + ) computed_hashes = h.hexdigest() - length = computed_hashes.pop('length') + length = computed_hashes.pop("length") extrinsic_metadata = { - 'length': length, - 'filename': filename, - 'checksums': computed_hashes, + "length": length, + "filename": filename, + "checksums": computed_hashes, } - logger.debug('extrinsic_metadata', extrinsic_metadata) + logger.debug("extrinsic_metadata", extrinsic_metadata) return filepath, extrinsic_metadata def release_name(version: str, filename: Optional[str] = None) -> str: if filename: - return 'releases/%s/%s' % (version, filename) - return 'releases/%s' % version + return "releases/%s/%s" % (version, filename) + return "releases/%s" % version -def artifact_identity(d: Mapping[str, Any], - id_keys: Sequence[str]) -> List[Any]: +def artifact_identity(d: Mapping[str, Any], id_keys: Sequence[str]) -> List[Any]: """Compute the primary key for a dict using the id_keys as primary key composite. Args: d: A dict entry to compute the primary key on id_keys: Sequence of keys to use as primary key Returns: The identity for that dict entry """ return [d.get(k) for k in id_keys] diff --git a/swh/loader/tests/conftest.py b/swh/loader/tests/conftest.py index bd9aaad..7ea89ee 100644 --- a/swh/loader/tests/conftest.py +++ b/swh/loader/tests/conftest.py @@ -1,29 +1,19 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest from typing import Any, Dict @pytest.fixture def swh_loader_config() -> Dict[str, Any]: return { - 'storage': { - 'cls': 'pipeline', - 'steps': [ - { - 'cls': 'memory', - }, - ], - }, - 'deposit': { - 'url': 'https://deposit.softwareheritage.org/1/private', - 'auth': { - 'username': 'user', - 'password': 'pass', - } + "storage": {"cls": "pipeline", "steps": [{"cls": "memory",},],}, + "deposit": { + "url": "https://deposit.softwareheritage.org/1/private", + "auth": {"username": "user", "password": "pass",}, }, } diff --git a/swh/loader/tests/test_cli.py b/swh/loader/tests/test_cli.py index 9e36a4b..3c8ac40 100644 --- a/swh/loader/tests/test_cli.py +++ b/swh/loader/tests/test_cli.py @@ -1,113 +1,99 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest from swh.loader.cli import run, list, get_loader, SUPPORTED_LOADERS from swh.loader.package.loader import PackageLoader from click.testing import CliRunner def test_get_loader_wrong_input(swh_config): """Unsupported loader should raise """ - loader_type = 'unknown' + loader_type = "unknown" assert loader_type not in SUPPORTED_LOADERS - with pytest.raises(ValueError, match='Invalid loader'): - get_loader(loader_type, url='db-url') + with pytest.raises(ValueError, match="Invalid loader"): + get_loader(loader_type, url="db-url") def test_get_loader(swh_config): """Instantiating a supported loader should be ok """ loader_input = { - 'archive': { - 'url': 'some-url', - 'artifacts': [], - }, - 'debian': { - 'url': 'some-url', - 'date': 'something', - 'packages': [], - }, - 'deposit': { - 'url': 'some-url', - 'deposit_id': 1, - }, - 'npm': { - 'url': 'https://www.npmjs.com/package/onepackage', - }, - 'pypi': { - 'url': 'some-url', - }, + "archive": {"url": "some-url", "artifacts": [],}, + "debian": {"url": "some-url", "date": "something", "packages": [],}, + "deposit": {"url": "some-url", "deposit_id": 1,}, + "npm": {"url": "https://www.npmjs.com/package/onepackage",}, + "pypi": {"url": "some-url",}, } for loader_type, kwargs in loader_input.items(): loader = get_loader(loader_type, **kwargs) assert isinstance(loader, PackageLoader) def test_run_help(swh_config): """Help message should be ok """ runner = CliRunner() - result = runner.invoke(run, ['-h']) + result = runner.invoke(run, ["-h"]) assert result.exit_code == 0 expected_help_msg = """Usage: run [OPTIONS] [archive|cran|debian|deposit|nixguix|npm|pypi] URL [OPTIONS]... Ingest with loader the origin located at Options: -h, --help Show this message and exit. """ # noqa assert result.output.startswith(expected_help_msg) def test_run_pypi(mocker, swh_config): """Triggering a load should be ok """ - mock_loader = mocker.patch('swh.loader.package.pypi.loader.PyPILoader') + mock_loader = mocker.patch("swh.loader.package.pypi.loader.PyPILoader") runner = CliRunner() - result = runner.invoke(run, ['pypi', 'https://some-url']) + result = runner.invoke(run, ["pypi", "https://some-url"]) assert result.exit_code == 0 - mock_loader.assert_called_once_with(url='https://some-url') # constructor + mock_loader.assert_called_once_with(url="https://some-url") # constructor def test_list_help(mocker, swh_config): """Triggering a load should be ok """ runner = CliRunner() - result = runner.invoke(list, ['--help']) + result = runner.invoke(list, ["--help"]) assert result.exit_code == 0 expected_help_msg = """Usage: list [OPTIONS] [[all|archive|cran|debian|deposit|nixguix|npm|pypi]] List supported loaders and optionally their arguments Options: -h, --help Show this message and exit. """ # noqa assert result.output.startswith(expected_help_msg) def test_list_help_npm(mocker, swh_config): """Triggering a load should be ok """ runner = CliRunner() - result = runner.invoke(list, ['npm']) + result = runner.invoke(list, ["npm"]) assert result.exit_code == 0 - expected_help_msg = '''Loader: Load npm origin's artifact releases into swh archive. + expected_help_msg = """Loader: Load npm origin's artifact releases into swh archive. signature: (url: str) -''' # noqa +""" # noqa assert result.output.startswith(expected_help_msg) diff --git a/tox.ini b/tox.ini index 6d644b6..6b250df 100644 --- a/tox.ini +++ b/tox.ini @@ -1,29 +1,36 @@ [tox] -envlist=flake8,mypy,py3 +envlist=black,flake8,mypy,py3 [testenv] extras = testing deps = swh.core[http] pytest-cov dev: ipdb commands = pytest \ !dev: --cov={envsitepackagesdir}/swh/loader/ --cov-branch \ {envsitepackagesdir}/swh/loader/ {posargs} +[testenv:black] +skip_install = true +deps = + black +commands = + {envpython} -m black --check swh + [testenv:flake8] skip_install = true deps = flake8 commands = {envpython} -m flake8 [testenv:mypy] extras = testing deps = mypy commands = mypy swh