diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 839c429..fa5b824 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,43 +1,43 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.1.0 hooks: - id: trailing-whitespace - id: check-json - id: check-yaml - repo: https://gitlab.com/pycqa/flake8 rev: 4.0.1 hooks: - id: flake8 additional_dependencies: [flake8-bugbear==22.3.23] - repo: https://github.com/codespell-project/codespell rev: v2.1.0 hooks: - id: codespell name: Check source code spelling - exclude: ^(swh/loader/package/.*[/]+tests/data/.*)$ + exclude: ^(swh/loader/.*/tests/data/.*)$ args: [-L crate] entry: codespell --ignore-words-list=iff stages: [commit] - repo: local hooks: - id: mypy name: mypy entry: mypy args: [swh] pass_filenames: false language: system types: [python] - repo: https://github.com/PyCQA/isort rev: 5.10.1 hooks: - id: isort - repo: https://github.com/python/black rev: 22.3.0 hooks: - id: black diff --git a/PKG-INFO b/PKG-INFO index 06e2a38..49b7c53 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,52 +1,55 @@ Metadata-Version: 2.1 Name: swh.loader.core -Version: 4.2.0 +Version: 5.0.0 Summary: Software Heritage Base Loader Home-page: https://forge.softwareheritage.org/diffusion/DLDBASE Author: Software Heritage developers Author-email: swh-devel@inria.fr Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-core Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-core/ Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS Software Heritage - Loader foundations ====================================== The Software Heritage Loader Core is a low-level loading utilities and helpers used by :term:`loaders `. The main entry points are classes: -- :class:`swh.loader.core.loader.BaseLoader` for loaders (e.g. svn) -- :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. hg, git, ...) +- :class:`swh.loader.core.loader.BaseLoader` for VCS loaders (e.g. svn) +- :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. git, ...) +- :class:`swh.loader.core.loader.ContentLoader` for Content loader +- :class:`swh.loader.core.loader.DirectoryLoader` for Directory loader - :class:`swh.loader.package.loader.PackageLoader` for Package loaders (e.g. PyPI, Npm, ...) +- ... Package loaders --------------- This package also implements many package loaders directly, out of convenience, as they usually are quite similar and each fits in a single file. They all roughly follow these steps, explained in the :py:meth:`swh.loader.package.loader.PackageLoader.load` documentation. See the :ref:`package-loader-tutorial` for details. VCS loaders ----------- Unlike package loaders, VCS loaders remain in separate packages, as they often need more advanced conversions and very VCS-specific operations. This usually involves getting the branches of a repository and recursively loading revisions in the history (and directory trees in these revisions), until a known revision is found diff --git a/README.rst b/README.rst index 4e27671..1d0fb58 100644 --- a/README.rst +++ b/README.rst @@ -1,30 +1,33 @@ Software Heritage - Loader foundations ====================================== The Software Heritage Loader Core is a low-level loading utilities and helpers used by :term:`loaders `. The main entry points are classes: -- :class:`swh.loader.core.loader.BaseLoader` for loaders (e.g. svn) -- :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. hg, git, ...) +- :class:`swh.loader.core.loader.BaseLoader` for VCS loaders (e.g. svn) +- :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. git, ...) +- :class:`swh.loader.core.loader.ContentLoader` for Content loader +- :class:`swh.loader.core.loader.DirectoryLoader` for Directory loader - :class:`swh.loader.package.loader.PackageLoader` for Package loaders (e.g. PyPI, Npm, ...) +- ... Package loaders --------------- This package also implements many package loaders directly, out of convenience, as they usually are quite similar and each fits in a single file. They all roughly follow these steps, explained in the :py:meth:`swh.loader.package.loader.PackageLoader.load` documentation. See the :ref:`package-loader-tutorial` for details. VCS loaders ----------- Unlike package loaders, VCS loaders remain in separate packages, as they often need more advanced conversions and very VCS-specific operations. This usually involves getting the branches of a repository and recursively loading revisions in the history (and directory trees in these revisions), until a known revision is found diff --git a/docs/README.rst b/docs/README.rst index 4e27671..1d0fb58 100644 --- a/docs/README.rst +++ b/docs/README.rst @@ -1,30 +1,33 @@ Software Heritage - Loader foundations ====================================== The Software Heritage Loader Core is a low-level loading utilities and helpers used by :term:`loaders `. The main entry points are classes: -- :class:`swh.loader.core.loader.BaseLoader` for loaders (e.g. svn) -- :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. hg, git, ...) +- :class:`swh.loader.core.loader.BaseLoader` for VCS loaders (e.g. svn) +- :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. git, ...) +- :class:`swh.loader.core.loader.ContentLoader` for Content loader +- :class:`swh.loader.core.loader.DirectoryLoader` for Directory loader - :class:`swh.loader.package.loader.PackageLoader` for Package loaders (e.g. PyPI, Npm, ...) +- ... Package loaders --------------- This package also implements many package loaders directly, out of convenience, as they usually are quite similar and each fits in a single file. They all roughly follow these steps, explained in the :py:meth:`swh.loader.package.loader.PackageLoader.load` documentation. See the :ref:`package-loader-tutorial` for details. VCS loaders ----------- Unlike package loaders, VCS loaders remain in separate packages, as they often need more advanced conversions and very VCS-specific operations. This usually involves getting the branches of a repository and recursively loading revisions in the history (and directory trees in these revisions), until a known revision is found diff --git a/docs/package-loader-specifications.rst b/docs/package-loader-specifications.rst index ce808d0..01abe7d 100644 --- a/docs/package-loader-specifications.rst +++ b/docs/package-loader-specifications.rst @@ -1,169 +1,187 @@ .. _package-loader-specifications: Package loader specifications ============================= Release fields -------------- Here is an overview of the fields (+ internal version name + branch name) used by each package loader, after D6616: .. list-table:: Fields used by each package loader :header-rows: 1 * - Loader - internal version - branch name - name - message - synthetic - author - date - Notes * - arch - ``p_info.​version`` - ``release_name(​version, filename)`` - =version - Synthetic release for Arch Linux source package {p_info.name} version {p_info.version} {description} - true - from intrinsic metadata - from extra_loader_arguments['arch_metadata'] - Intrinsic metadata extracted from .PKGINFO file of the package * - archive - passed as arg - ``release_name(​version)`` - =version - "Synthetic release for archive at {p_info.url}\n" - true - "" - passed as arg - * - aur - ``p_info.​version`` - ``release_name(​version, filename)`` - =version - Synthetic release for Aur source package {p_info.name} version {p_info.version} {description} - true - "" - from extra_loader_arguments['aur_metadata'] - Intrinsic metadata extracted from .SRCINFO file of the package + * - cpan + - ``p_info.​version`` + - ``release_name(​version)`` + - =version + - Synthetic release for Perl source package {name} version {version} {description} + - true + - from intrinsic metadata if any else from extrinsic + - from extrinsic metadata + - name, version and description from intrinsic metadata * - cran - ``metadata.get(​"Version", passed as arg)`` - ``release_name(​version)`` - =version - standard message - true - ``metadata.get(​"Maintainer", "")`` - ``metadata.get(​"Date")`` - metadata is intrinsic * - crates - ``p_info.​version`` - ``release_name(​version, filename) + "\n\n" + i_metadata.description + "\n"`` - =version - Synthetic release for Crate source package {p_info.name} version {p_info.version} {description} - true - from int metadata - from ext metadata - ``i_metadata`` for intrinsic metadata, ``e_metadata`` for extrinsic metadata * - debian - =``version`` - ``release_name(​version)`` - =``i_version`` - standard message (using ``i_version``) - true - ``metadata​.changelog​.person`` - ``metadata​.changelog​.date`` - metadata is intrinsic. Old revisions have ``dsc`` as type ``i_version`` is the intrinsic version (eg. ``0.7.2-3``) while ``version`` contains the debian suite name (eg. ``stretch/contrib/0.7.2-3``) and is passed as arg * - golang - ``p_info.​version`` - ``release_name(version)`` - =version - Synthetic release for Golang source package {p_info.name} version {p_info.version} - true - "" - from ext metadata - Golang offers basically no metadata outside of version and timestamp * - deposit - HEAD - only HEAD - HEAD - "{client}: Deposit {id} in collection {collection}\n" - true - original author - ```` from SWORD XML - revisions had parents * - maven-loader - passed as arg - HEAD - ``release_name(version)`` - "Synthetic release for archive at {p_info.url}\n" - true - "" - passed as arg - Only one artefact per url (jar/zip src) * - nixguix - URL - URL - URL - None - true - "" - None - it's the URL of the artifact referenced by the derivation * - npm - ``metadata​["version"]`` - ``release_name(​version)`` - =version - standard message - true - from int metadata or "" - from ext metadata or None - * - opam - as given by opam - "{opam_package}​.{version}" - =version - standard message - true - from metadata - None - "{self.opam_package}​.{version}" matches the version names used by opam's backend. metadata is extrinsic * - pubdev - ``p_info.​version`` - ``release_name(​version)`` - =version - - Synthetic release for pub.dev source package {name} version {version} {description} + - Synthetic release for pub.dev source package {p_info.name} version {p_info.version} - true - from extrinsic metadata - from extrinsic metadata - - name, version and description from intrinsic metadata + - name and version from extrinsic metadata + * - puppet + - ``p_info.​version`` + - ``release_name(​version)`` + - =version + - Synthetic release for Puppet source package {p_info.name} version {version} {description} + - true + - from intrinsic metadata + - from extrinsic metadata + - version and description from intrinsic metadata * - pypi - ``metadata​["version"]`` - ``release_name(​version)`` or ``release_name(​version, filename)`` - =version - ``metadata[​'comment_text']}`` or standard message - true - from int metadata or "" - from ext metadata or None - metadata is intrinsic using this function:: def release_name(version: str, filename: Optional[str] = None) -> str: if filename: return "releases/%s/%s" % (version, filename) return "releases/%s" % version and "standard message" being:: msg = ( f"Synthetic release for {PACKAGE_MANAGER} source package {name} " f"version {version}\n" ) The ``target_type`` field is always ``dir``, and the target the id of a directory loaded by unpacking a tarball/zip file/... diff --git a/requirements-swh.txt b/requirements-swh.txt index 30b3fcc..4f0fc56 100644 --- a/requirements-swh.txt +++ b/requirements-swh.txt @@ -1,5 +1,5 @@ swh.core >= 2.12 -swh.model >= 4.4.0 +swh.model >= 6.5.1 swh.objstorage >= 0.2.2 swh.scheduler >= 0.4.0 swh.storage >= 0.29.0 diff --git a/setup.py b/setup.py index 421f131..93e7f45 100755 --- a/setup.py +++ b/setup.py @@ -1,86 +1,90 @@ #!/usr/bin/env python3 # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from io import open from os import path from setuptools import find_packages, setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, "README.rst"), encoding="utf-8") as f: long_description = f.read() def parse_requirements(name=None): if name: reqf = "requirements-%s.txt" % name else: reqf = "requirements.txt" requirements = [] if not path.exists(reqf): return requirements with open(reqf) as f: for line in f.readlines(): line = line.strip() if not line or line.startswith("#"): continue requirements.append(line) return requirements setup( name="swh.loader.core", description="Software Heritage Base Loader", long_description=long_description, long_description_content_type="text/markdown", python_requires=">=3.7", author="Software Heritage developers", author_email="swh-devel@inria.fr", url="https://forge.softwareheritage.org/diffusion/DLDBASE", packages=find_packages(), # packages's modules scripts=[], # scripts to package install_requires=parse_requirements() + parse_requirements("swh"), setup_requires=["setuptools-scm"], use_scm_version=True, extras_require={"testing": parse_requirements("test")}, include_package_data=True, entry_points=""" [swh.cli.subcommands] loader=swh.loader.cli [swh.workers] + loader.content=swh.loader.core:register_content + loader.directory=swh.loader.core:register_directory loader.arch=swh.loader.package.arch:register loader.archive=swh.loader.package.archive:register loader.aur=swh.loader.package.aur:register + loader.cpan=swh.loader.package.cpan:register loader.cran=swh.loader.package.cran:register loader.crates=swh.loader.package.crates:register loader.debian=swh.loader.package.debian:register loader.deposit=swh.loader.package.deposit:register loader.golang=swh.loader.package.golang:register loader.nixguix=swh.loader.package.nixguix:register loader.npm=swh.loader.package.npm:register loader.opam=swh.loader.package.opam:register loader.pubdev=swh.loader.package.pubdev:register + loader.puppet=swh.loader.package.puppet:register loader.pypi=swh.loader.package.pypi:register loader.maven=swh.loader.package.maven:register """, classifiers=[ "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", "Operating System :: OS Independent", "Development Status :: 5 - Production/Stable", ], project_urls={ "Bug Reports": "https://forge.softwareheritage.org/maniphest", "Funding": "https://www.softwareheritage.org/donate", "Source": "https://forge.softwareheritage.org/source/swh-loader-core", "Documentation": "https://docs.softwareheritage.org/devel/swh-loader-core/", }, ) diff --git a/swh.loader.core.egg-info/PKG-INFO b/swh.loader.core.egg-info/PKG-INFO index 06e2a38..49b7c53 100644 --- a/swh.loader.core.egg-info/PKG-INFO +++ b/swh.loader.core.egg-info/PKG-INFO @@ -1,52 +1,55 @@ Metadata-Version: 2.1 Name: swh.loader.core -Version: 4.2.0 +Version: 5.0.0 Summary: Software Heritage Base Loader Home-page: https://forge.softwareheritage.org/diffusion/DLDBASE Author: Software Heritage developers Author-email: swh-devel@inria.fr Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-core Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-core/ Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS Software Heritage - Loader foundations ====================================== The Software Heritage Loader Core is a low-level loading utilities and helpers used by :term:`loaders `. The main entry points are classes: -- :class:`swh.loader.core.loader.BaseLoader` for loaders (e.g. svn) -- :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. hg, git, ...) +- :class:`swh.loader.core.loader.BaseLoader` for VCS loaders (e.g. svn) +- :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. git, ...) +- :class:`swh.loader.core.loader.ContentLoader` for Content loader +- :class:`swh.loader.core.loader.DirectoryLoader` for Directory loader - :class:`swh.loader.package.loader.PackageLoader` for Package loaders (e.g. PyPI, Npm, ...) +- ... Package loaders --------------- This package also implements many package loaders directly, out of convenience, as they usually are quite similar and each fits in a single file. They all roughly follow these steps, explained in the :py:meth:`swh.loader.package.loader.PackageLoader.load` documentation. See the :ref:`package-loader-tutorial` for details. VCS loaders ----------- Unlike package loaders, VCS loaders remain in separate packages, as they often need more advanced conversions and very VCS-specific operations. This usually involves getting the branches of a repository and recursively loading revisions in the history (and directory trees in these revisions), until a known revision is found diff --git a/swh.loader.core.egg-info/SOURCES.txt b/swh.loader.core.egg-info/SOURCES.txt index cbd0425..8a2b351 100644 --- a/swh.loader.core.egg-info/SOURCES.txt +++ b/swh.loader.core.egg-info/SOURCES.txt @@ -1,300 +1,327 @@ .git-blame-ignore-revs .gitignore .pre-commit-config.yaml AUTHORS CODE_OF_CONDUCT.md CONTRIBUTORS LICENSE MANIFEST.in Makefile README.rst conftest.py mypy.ini pyproject.toml pytest.ini requirements-swh.txt requirements-test.txt requirements.txt setup.cfg setup.py tox.ini docs/.gitignore docs/Makefile docs/README.rst docs/cli.rst docs/conf.py docs/index.rst docs/package-loader-specifications.rst docs/package-loader-tutorial.rst docs/vcs-loader-overview.rst docs/_static/.placeholder docs/_templates/.placeholder swh/__init__.py swh.loader.core.egg-info/PKG-INFO swh.loader.core.egg-info/SOURCES.txt swh.loader.core.egg-info/dependency_links.txt swh.loader.core.egg-info/entry_points.txt swh.loader.core.egg-info/requires.txt swh.loader.core.egg-info/top_level.txt swh/loader/__init__.py swh/loader/cli.py swh/loader/exception.py swh/loader/pytest_plugin.py swh/loader/core/__init__.py swh/loader/core/converters.py +swh/loader/core/discovery.py swh/loader/core/loader.py swh/loader/core/metadata_fetchers.py swh/loader/core/py.typed +swh/loader/core/tasks.py swh/loader/core/utils.py swh/loader/core/tests/__init__.py +swh/loader/core/tests/conftest.py swh/loader/core/tests/test_converters.py swh/loader/core/tests/test_loader.py +swh/loader/core/tests/test_tasks.py swh/loader/core/tests/test_utils.py +swh/loader/core/tests/data/https_common-lisp.net/project_asdf_archives_asdf-3.3.5.lisp +swh/loader/core/tests/data/https_example.org/archives_dummy-hello.tar.gz swh/loader/package/__init__.py swh/loader/package/loader.py swh/loader/package/py.typed swh/loader/package/utils.py swh/loader/package/arch/__init__.py swh/loader/package/arch/loader.py swh/loader/package/arch/tasks.py swh/loader/package/arch/tests/__init__.py swh/loader/package/arch/tests/test_arch.py swh/loader/package/arch/tests/test_tasks.py swh/loader/package/arch/tests/data/fake_arch.sh swh/loader/package/arch/tests/data/https_archive.archlinux.org/packages_d_dialog_dialog-1:1.3_20190211-1-x86_64.pkg.tar.xz swh/loader/package/arch/tests/data/https_archive.archlinux.org/packages_d_dialog_dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst swh/loader/package/arch/tests/data/https_uk.mirror.archlinuxarm.org/aarch64_core_gzip-1.12-1-aarch64.pkg.tar.xz swh/loader/package/archive/__init__.py swh/loader/package/archive/loader.py swh/loader/package/archive/tasks.py swh/loader/package/archive/tests/__init__.py swh/loader/package/archive/tests/test_archive.py swh/loader/package/archive/tests/test_tasks.py swh/loader/package/archive/tests/data/not_gzipped_tarball.tar.gz swh/loader/package/archive/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz swh/loader/package/archive/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz_visit1 swh/loader/package/archive/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz_visit2 swh/loader/package/archive/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.2.0.tar.gz swh/loader/package/aur/__init__.py swh/loader/package/aur/loader.py swh/loader/package/aur/tasks.py swh/loader/package/aur/tests/__init__.py swh/loader/package/aur/tests/test_aur.py swh/loader/package/aur/tests/test_tasks.py swh/loader/package/aur/tests/data/fake_aur.sh swh/loader/package/aur/tests/data/https_aur.archlinux.org/cgit_aur.git_snapshot_a-fake-one.tar.gz swh/loader/package/aur/tests/data/https_aur.archlinux.org/cgit_aur.git_snapshot_hg-evolve.tar.gz swh/loader/package/aur/tests/data/https_aur.archlinux.org/cgit_aur.git_snapshot_ibus-git.tar.gz swh/loader/package/aur/tests/data/https_aur.archlinux.org/cgit_aur.git_snapshot_libervia-web-hg.tar.gz swh/loader/package/aur/tests/data/https_aur.archlinux.org/cgit_aur.git_snapshot_tealdeer-git.tar.gz +swh/loader/package/cpan/__init__.py +swh/loader/package/cpan/loader.py +swh/loader/package/cpan/tasks.py +swh/loader/package/cpan/tests/__init__.py +swh/loader/package/cpan/tests/test_cpan.py +swh/loader/package/cpan/tests/test_tasks.py +swh/loader/package/cpan/tests/data/https_cpan.metacpan.org/authors_id_J_JJ_JJORE_Internals-CountObjects-0.01.tar.gz +swh/loader/package/cpan/tests/data/https_cpan.metacpan.org/authors_id_J_JJ_JJORE_Internals-CountObjects-0.05.tar.gz +swh/loader/package/cpan/tests/data/https_fastapi.metacpan.org/v1_release_JJORE_Internals-CountObjects-0.01 +swh/loader/package/cpan/tests/data/https_fastapi.metacpan.org/v1_release_JJORE_Internals-CountObjects-0.05 swh/loader/package/cran/__init__.py swh/loader/package/cran/loader.py swh/loader/package/cran/tasks.py swh/loader/package/cran/tests/__init__.py swh/loader/package/cran/tests/test_cran.py swh/loader/package/cran/tests/test_tasks.py swh/loader/package/cran/tests/data/description/KnownBR swh/loader/package/cran/tests/data/description/acepack swh/loader/package/cran/tests/data/https_cran.r-project.org/src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz swh/loader/package/crates/__init__.py swh/loader/package/crates/loader.py swh/loader/package/crates/tasks.py swh/loader/package/crates/tests/__init__.py swh/loader/package/crates/tests/test_crates.py swh/loader/package/crates/tests/test_tasks.py swh/loader/package/crates/tests/data/fake_crates.sh swh/loader/package/crates/tests/data/https_crates.io/api_v1_crates_hg-core swh/loader/package/crates/tests/data/https_crates.io/api_v1_crates_micro-timer swh/loader/package/crates/tests/data/https_static.crates.io/crates_hg-core_hg-core-0.0.1.crate swh/loader/package/crates/tests/data/https_static.crates.io/crates_micro-timer_micro-timer-0.1.0.crate swh/loader/package/crates/tests/data/https_static.crates.io/crates_micro-timer_micro-timer-0.1.1.crate swh/loader/package/crates/tests/data/https_static.crates.io/crates_micro-timer_micro-timer-0.1.2.crate swh/loader/package/crates/tests/data/https_static.crates.io/crates_micro-timer_micro-timer-0.2.0.crate swh/loader/package/crates/tests/data/https_static.crates.io/crates_micro-timer_micro-timer-0.2.1.crate swh/loader/package/crates/tests/data/https_static.crates.io/crates_micro-timer_micro-timer-0.3.0.crate swh/loader/package/crates/tests/data/https_static.crates.io/crates_micro-timer_micro-timer-0.3.1.crate swh/loader/package/crates/tests/data/https_static.crates.io/crates_micro-timer_micro-timer-0.4.0.crate swh/loader/package/debian/__init__.py swh/loader/package/debian/loader.py swh/loader/package/debian/tasks.py swh/loader/package/debian/tests/__init__.py swh/loader/package/debian/tests/test_debian.py swh/loader/package/debian/tests/test_tasks.py swh/loader/package/debian/tests/data/http_deb.debian.org/debian_pool_contrib_c_cicero_cicero_0.7.2-3.diff.gz swh/loader/package/debian/tests/data/http_deb.debian.org/debian_pool_contrib_c_cicero_cicero_0.7.2-3.dsc swh/loader/package/debian/tests/data/http_deb.debian.org/debian_pool_contrib_c_cicero_cicero_0.7.2-4.diff.gz swh/loader/package/debian/tests/data/http_deb.debian.org/debian_pool_contrib_c_cicero_cicero_0.7.2-4.dsc swh/loader/package/debian/tests/data/http_deb.debian.org/debian_pool_contrib_c_cicero_cicero_0.7.2.orig.tar.gz swh/loader/package/debian/tests/data/http_deb.debian.org/onefile.txt swh/loader/package/deposit/__init__.py swh/loader/package/deposit/loader.py swh/loader/package/deposit/tasks.py swh/loader/package/deposit/tests/__init__.py swh/loader/package/deposit/tests/conftest.py swh/loader/package/deposit/tests/test_deposit.py swh/loader/package/deposit/tests/test_tasks.py swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_666_meta swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_666_raw swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_777_meta swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_777_raw swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_888_meta swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_888_raw swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_999_meta swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_999_raw swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello-2.10.zip swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello-2.12.tar.gz swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello_2.10.json swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello_2.11.json swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello_2.12.json swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello_2.13.json swh/loader/package/golang/__init__.py swh/loader/package/golang/loader.py swh/loader/package/golang/tasks.py swh/loader/package/golang/tests/__init__.py swh/loader/package/golang/tests/test_golang.py swh/loader/package/golang/tests/test_tasks.py swh/loader/package/golang/tests/data/https_proxy.golang.org/example.com_basic-go-module_@latest swh/loader/package/golang/tests/data/https_proxy.golang.org/example.com_basic-go-module_@v_list swh/loader/package/golang/tests/data/https_proxy.golang.org/example.com_basic-go-module_@v_v0.1.3.info swh/loader/package/golang/tests/data/https_proxy.golang.org/example.com_basic-go-module_@v_v0.1.3.zip swh/loader/package/golang/tests/data/https_proxy.golang.org/github.com_adam-hanna_array!operations_@latest swh/loader/package/golang/tests/data/https_proxy.golang.org/github.com_adam-hanna_array!operations_@v_list swh/loader/package/golang/tests/data/https_proxy.golang.org/github.com_adam-hanna_array!operations_@v_v1.0.1.info swh/loader/package/golang/tests/data/https_proxy.golang.org/github.com_adam-hanna_array!operations_@v_v1.0.1.zip swh/loader/package/golang/tests/data/https_proxy.golang.org/github.com_xgdapg_daemon_@latest swh/loader/package/golang/tests/data/https_proxy.golang.org/github.com_xgdapg_daemon_@v_list swh/loader/package/golang/tests/data/https_proxy.golang.org/github.com_xgdapg_daemon_@v_v0.0.0-20131225113241-85981e2038bf.info swh/loader/package/golang/tests/data/https_proxy.golang.org/github.com_xgdapg_daemon_@v_v0.0.0-20131225113241-85981e2038bf.zip swh/loader/package/maven/__init__.py swh/loader/package/maven/loader.py swh/loader/package/maven/tasks.py swh/loader/package/maven/tests/__init__.py swh/loader/package/maven/tests/test_maven.py swh/loader/package/maven/tests/test_tasks.py swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar +swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar.sha1 swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0.pom swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar +swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar.sha1 swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1.pom swh/loader/package/nixguix/__init__.py swh/loader/package/nixguix/loader.py swh/loader/package/nixguix/tasks.py swh/loader/package/nixguix/tests/__init__.py swh/loader/package/nixguix/tests/conftest.py swh/loader/package/nixguix/tests/test_nixguix.py swh/loader/package/nixguix/tests/test_tasks.py swh/loader/package/nixguix/tests/data/https_example.com/file.txt swh/loader/package/nixguix/tests/data/https_fail.com/truncated-archive.tgz swh/loader/package/nixguix/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz swh/loader/package/nixguix/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz_visit1 swh/loader/package/nixguix/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz_visit2 swh/loader/package/nixguix/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.2.0.tar.gz swh/loader/package/nixguix/tests/data/https_github.com/owner-1_repository-1_revision-1.tgz swh/loader/package/nixguix/tests/data/https_github.com/owner-2_repository-1_revision-1.tgz swh/loader/package/nixguix/tests/data/https_github.com/owner-3_repository-1_revision-1.tgz swh/loader/package/nixguix/tests/data/https_nix-community.github.io/nixpkgs-swh_sources-EOFError.json swh/loader/package/nixguix/tests/data/https_nix-community.github.io/nixpkgs-swh_sources.json swh/loader/package/nixguix/tests/data/https_nix-community.github.io/nixpkgs-swh_sources.json_visit1 swh/loader/package/nixguix/tests/data/https_nix-community.github.io/nixpkgs-swh_sources_special.json swh/loader/package/nixguix/tests/data/https_nix-community.github.io/nixpkgs-swh_sources_special.json_visit1 swh/loader/package/npm/__init__.py swh/loader/package/npm/loader.py swh/loader/package/npm/tasks.py swh/loader/package/npm/tests/__init__.py swh/loader/package/npm/tests/test_npm.py swh/loader/package/npm/tests/test_tasks.py swh/loader/package/npm/tests/data/https_registry.npmjs.org/@aller_shared_-_shared-0.1.0.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/@aller_shared_-_shared-0.1.1-alpha.14.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/jammit-express_-_jammit-express-0.0.1.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/nativescript-telerik-analytics_-_nativescript-telerik-analytics-1.0.0.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.0.2.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.0.3-beta.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.0.3.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.0.4.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.0.5.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.1.0.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.2.0.tgz swh/loader/package/npm/tests/data/https_replicate.npmjs.com/@aller_shared swh/loader/package/npm/tests/data/https_replicate.npmjs.com/catify swh/loader/package/npm/tests/data/https_replicate.npmjs.com/jammit-express swh/loader/package/npm/tests/data/https_replicate.npmjs.com/jammit-no-time swh/loader/package/npm/tests/data/https_replicate.npmjs.com/nativescript-telerik-analytics swh/loader/package/npm/tests/data/https_replicate.npmjs.com/org swh/loader/package/npm/tests/data/https_replicate.npmjs.com/org_version_mismatch swh/loader/package/npm/tests/data/https_replicate.npmjs.com/org_visit1 swh/loader/package/opam/__init__.py swh/loader/package/opam/loader.py swh/loader/package/opam/tasks.py swh/loader/package/opam/tests/__init__.py swh/loader/package/opam/tests/test_opam.py swh/loader/package/opam/tests/test_tasks.py swh/loader/package/opam/tests/data/fake_opam_repo/_repo swh/loader/package/opam/tests/data/fake_opam_repo/version swh/loader/package/opam/tests/data/fake_opam_repo/repo/loadertest/lock swh/loader/package/opam/tests/data/fake_opam_repo/repo/loadertest/repos-config swh/loader/package/opam/tests/data/fake_opam_repo/repo/loadertest/packages/agrid/agrid.0.1/opam swh/loader/package/opam/tests/data/fake_opam_repo/repo/loadertest/packages/directories/directories.0.1/opam swh/loader/package/opam/tests/data/fake_opam_repo/repo/loadertest/packages/directories/directories.0.2/opam swh/loader/package/opam/tests/data/fake_opam_repo/repo/loadertest/packages/directories/directories.0.3/opam swh/loader/package/opam/tests/data/fake_opam_repo/repo/loadertest/packages/ocb/ocb.0.1/opam swh/loader/package/opam/tests/data/https_github.com/OCamlPro_agrid_archive_0.1.tar.gz swh/loader/package/opam/tests/data/https_github.com/OCamlPro_directories_archive_0.1.tar.gz swh/loader/package/opam/tests/data/https_github.com/OCamlPro_directories_archive_0.2.tar.gz swh/loader/package/opam/tests/data/https_github.com/OCamlPro_directories_archive_0.3.tar.gz swh/loader/package/opam/tests/data/https_github.com/OCamlPro_ocb_archive_0.1.tar.gz swh/loader/package/pubdev/__init__.py swh/loader/package/pubdev/loader.py swh/loader/package/pubdev/tasks.py swh/loader/package/pubdev/tests/__init__.py swh/loader/package/pubdev/tests/test_pubdev.py swh/loader/package/pubdev/tests/test_tasks.py swh/loader/package/pubdev/tests/data/fake_pubdev.sh swh/loader/package/pubdev/tests/data/https_pub.dartlang.org/packages_Autolinker_versions_0.1.1.tar.gz swh/loader/package/pubdev/tests/data/https_pub.dartlang.org/packages_authentication_versions_0.0.1.tar.gz swh/loader/package/pubdev/tests/data/https_pub.dartlang.org/packages_bezier_versions_1.1.5.tar.gz swh/loader/package/pubdev/tests/data/https_pub.dartlang.org/packages_pdf_versions_1.0.0.tar.gz swh/loader/package/pubdev/tests/data/https_pub.dartlang.org/packages_pdf_versions_3.8.2.tar.gz swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_Autolinker swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_abstract_io swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_audio_manager swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_authentication swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_bezier swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_pdf +swh/loader/package/puppet/__init__.py +swh/loader/package/puppet/loader.py +swh/loader/package/puppet/tasks.py +swh/loader/package/puppet/tests/__init__.py +swh/loader/package/puppet/tests/test_puppet.py +swh/loader/package/puppet/tests/test_tasks.py +swh/loader/package/puppet/tests/data/fake_puppet.sh +swh/loader/package/puppet/tests/data/https_forgeapi.puppet.com/v3_files_saz-memcached-1.0.0.tar.gz +swh/loader/package/puppet/tests/data/https_forgeapi.puppet.com/v3_files_saz-memcached-8.1.0.tar.gz swh/loader/package/pypi/__init__.py swh/loader/package/pypi/loader.py swh/loader/package/pypi/tasks.py swh/loader/package/pypi/tests/__init__.py swh/loader/package/pypi/tests/test_pypi.py swh/loader/package/pypi/tests/test_tasks.py swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/0805nexter-1.1.0.tar.gz swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/0805nexter-1.1.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/0805nexter-1.2.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/0805nexter-1.3.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/0805nexter-1.4.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/nexter-1.1.0.tar.gz swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/nexter-1.1.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_70_97_c49fb8ec24a7aaab54c3dbfbb5a6ca1431419d9ee0f6c363d9ad01d2b8b1_0805nexter-1.3.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_86_10_c9555ec63106153aaaad753a281ff47f4ac79e980ff7f5d740d6649cd56a_upymenu-0.0.1.tar.gz swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_c4_a0_4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4_0805nexter-1.2.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_c4_a0_4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4_0805nexter-1.2.0.zip_visit1 swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_ec_65_c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d_0805nexter-1.1.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_ec_65_c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d_0805nexter-1.1.0.zip_visit1 swh/loader/package/pypi/tests/data/https_pypi.org/pypi_0805nexter_json swh/loader/package/pypi/tests/data/https_pypi.org/pypi_0805nexter_json_visit1 swh/loader/package/pypi/tests/data/https_pypi.org/pypi_nexter_json swh/loader/package/pypi/tests/data/https_pypi.org/pypi_upymenu_json swh/loader/package/tests/__init__.py swh/loader/package/tests/common.py swh/loader/package/tests/test_conftest.py swh/loader/package/tests/test_loader.py swh/loader/package/tests/test_loader_metadata.py swh/loader/package/tests/test_utils.py swh/loader/package/tests/data/https_example.org/package_example_example-v1.0.tar.gz swh/loader/package/tests/data/https_example.org/package_example_example-v2.0.tar.gz swh/loader/package/tests/data/https_example.org/package_example_example-v3.0.tar.gz swh/loader/package/tests/data/https_example.org/package_example_example-v4.0.tar.gz swh/loader/tests/__init__.py swh/loader/tests/conftest.py swh/loader/tests/py.typed swh/loader/tests/test_cli.py swh/loader/tests/test_init.py swh/loader/tests/data/0805nexter-1.1.0.tar.gz \ No newline at end of file diff --git a/swh.loader.core.egg-info/entry_points.txt b/swh.loader.core.egg-info/entry_points.txt index 018531c..b303193 100644 --- a/swh.loader.core.egg-info/entry_points.txt +++ b/swh.loader.core.egg-info/entry_points.txt @@ -1,18 +1,22 @@ [swh.cli.subcommands] loader = swh.loader.cli [swh.workers] loader.arch = swh.loader.package.arch:register loader.archive = swh.loader.package.archive:register loader.aur = swh.loader.package.aur:register +loader.content = swh.loader.core:register_content +loader.cpan = swh.loader.package.cpan:register loader.cran = swh.loader.package.cran:register loader.crates = swh.loader.package.crates:register loader.debian = swh.loader.package.debian:register loader.deposit = swh.loader.package.deposit:register +loader.directory = swh.loader.core:register_directory loader.golang = swh.loader.package.golang:register loader.maven = swh.loader.package.maven:register loader.nixguix = swh.loader.package.nixguix:register loader.npm = swh.loader.package.npm:register loader.opam = swh.loader.package.opam:register loader.pubdev = swh.loader.package.pubdev:register +loader.puppet = swh.loader.package.puppet:register loader.pypi = swh.loader.package.pypi:register diff --git a/swh.loader.core.egg-info/requires.txt b/swh.loader.core.egg-info/requires.txt index bfad1d7..0cd7e54 100644 --- a/swh.loader.core.egg-info/requires.txt +++ b/swh.loader.core.egg-info/requires.txt @@ -1,26 +1,26 @@ psutil requests iso8601 pkginfo python-debian python-dateutil typing-extensions toml packaging swh.core>=2.12 -swh.model>=4.4.0 +swh.model>=6.5.1 swh.objstorage>=0.2.2 swh.scheduler>=0.4.0 swh.storage>=0.29.0 [testing] pytest pytest-mock requests_mock swh-core[testing] swh-scheduler[testing]>=0.5.0 swh-storage[testing]>=0.10.6 types-click types-python-dateutil types-pyyaml types-requests diff --git a/swh/loader/cli.py b/swh/loader/cli.py index bc8076c..775209e 100644 --- a/swh/loader/cli.py +++ b/swh/loader/cli.py @@ -1,141 +1,144 @@ -# Copyright (C) 2019-2021 The Software Heritage developers +# Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information # WARNING: do not import unnecessary things here to keep cli startup time under # control import logging from typing import Any import click import pkg_resources from swh.core.cli import CONTEXT_SETTINGS from swh.core.cli import swh as swh_cli_group logger = logging.getLogger(__name__) LOADERS = { entry_point.name.split(".", 1)[1]: entry_point for entry_point in pkg_resources.iter_entry_points("swh.workers") if entry_point.name.split(".", 1)[0] == "loader" } SUPPORTED_LOADERS = sorted(list(LOADERS)) def get_loader(name: str, **kwargs) -> Any: """Given a loader name, instantiate it. Args: name: Loader's name kwargs: Configuration dict (url...) Returns: An instantiated loader """ if name not in LOADERS: raise ValueError( "Invalid loader %s: only supported loaders are %s" % (name, SUPPORTED_LOADERS) ) registry_entry = LOADERS[name].load()() logger.debug(f"registry: {registry_entry}") loader_cls = registry_entry["loader"] logger.debug(f"loader class: {loader_cls}") return loader_cls.from_config(**kwargs) @swh_cli_group.group(name="loader", context_settings=CONTEXT_SETTINGS) @click.option( "--config-file", "-C", default=None, type=click.Path( exists=True, dir_okay=False, ), help="Configuration file.", ) @click.pass_context def loader(ctx, config_file): """Loader cli tools""" from os import environ from swh.core.config import read ctx.ensure_object(dict) logger.debug("ctx: %s", ctx) if not config_file: config_file = environ.get("SWH_CONFIG_FILENAME") ctx.obj["config"] = read(config_file) logger.debug("config_file: %s", config_file) - logger.debug("config: ", ctx.obj["config"]) + logger.debug("config: %s", ctx.obj["config"]) @loader.command(name="run", context_settings=CONTEXT_SETTINGS) @click.argument("type", type=click.Choice(SUPPORTED_LOADERS)) @click.argument("url") @click.argument("options", nargs=-1) @click.pass_context def run(ctx, type, url, options): """Ingest with loader the origin located at """ import iso8601 from swh.scheduler.cli.utils import parse_options conf = ctx.obj.get("config", {}) if "storage" not in conf: - raise ValueError("Missing storage configuration key") + logger.warning( + "No storage configuration detected, using an in-memory storage instead." + ) + conf["storage"] = {"cls": "memory"} (_, kw) = parse_options(options) logger.debug(f"kw: {kw}") visit_date = kw.get("visit_date") if visit_date and isinstance(visit_date, str): visit_date = iso8601.parse_date(visit_date) kw["visit_date"] = visit_date loader = get_loader( type, url=url, storage=conf["storage"], metadata_fetcher_credentials=conf.get("metadata_fetcher_credentials"), **kw, ) result = loader.load() msg = f"{result} for origin '{url}'" directory = kw.get("directory") if directory: msg = msg + f" and directory '{directory}'" click.echo(msg) @loader.command(name="list", context_settings=CONTEXT_SETTINGS) @click.argument("type", default="all", type=click.Choice(["all"] + SUPPORTED_LOADERS)) @click.pass_context def list(ctx, type): """List supported loaders and optionally their arguments""" import inspect if type == "all": loaders = ", ".join(SUPPORTED_LOADERS) click.echo(f"Supported loaders: {loaders}") else: registry_entry = LOADERS[type].load()() loader_cls = registry_entry["loader"] doc = inspect.getdoc(loader_cls).strip() # Hack to get the signature of the class even though it subclasses # Generic, which reimplements __new__. # See signature = inspect.signature(loader_cls.__init__) signature_str = str(signature).replace("self, ", "") click.echo(f"Loader: {doc}\nsignature: {signature_str}") diff --git a/swh/loader/core/__init__.py b/swh/loader/core/__init__.py index e69de29..6b30de1 100644 --- a/swh/loader/core/__init__.py +++ b/swh/loader/core/__init__.py @@ -0,0 +1,27 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + + +from typing import Any, Mapping + + +def register_content() -> Mapping[str, Any]: + """Register the current worker module's definition""" + from swh.loader.core.loader import ContentLoader + + return { + "task_modules": [f"{__name__}.tasks"], + "loader": ContentLoader, + } + + +def register_directory() -> Mapping[str, Any]: + """Register the current worker module's definition""" + from swh.loader.core.loader import DirectoryLoader + + return { + "task_modules": [f"{__name__}.tasks"], + "loader": DirectoryLoader, + } diff --git a/swh/loader/core/discovery.py b/swh/loader/core/discovery.py new file mode 100644 index 0000000..45162da --- /dev/null +++ b/swh/loader/core/discovery.py @@ -0,0 +1,261 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +"""Primitives for finding the unknown parts of disk contents efficiently.""" + +import abc +from collections import namedtuple +import itertools +import logging +import random +from typing import Any, Iterable, List, Mapping, NamedTuple, Set, Union + +from swh.model.from_disk import model +from swh.model.model import Sha1Git +from swh.storage.interface import StorageInterface + +logger = logging.getLogger(__name__) + +# Maximum amount when sampling from the undecided set of directory entries +SAMPLE_SIZE = 1000 + +# Sets of sha1 of contents, skipped contents and directories respectively +Sample: NamedTuple = namedtuple( + "Sample", ["contents", "skipped_contents", "directories"] +) + + +class ArchiveDiscoveryInterface(abc.ABC): + """Interface used in discovery code to abstract over ways of connecting to + the SWH archive (direct storage, web API, etc.) for all methods needed by + discovery algorithms.""" + + contents: List[model.Content] + skipped_contents: List[model.SkippedContent] + directories: List[model.Directory] + + def __init__( + self, + contents: List[model.Content], + skipped_contents: List[model.SkippedContent], + directories: List[model.Directory], + ) -> None: + self.contents = contents + self.skipped_contents = skipped_contents + self.directories = directories + + @abc.abstractmethod + async def content_missing(self, contents: List[Sha1Git]) -> Iterable[Sha1Git]: + """List content missing from the archive by sha1""" + + @abc.abstractmethod + async def skipped_content_missing( + self, skipped_contents: List[Sha1Git] + ) -> Iterable[Sha1Git]: + """List skipped content missing from the archive by sha1""" + + @abc.abstractmethod + async def directory_missing(self, directories: List[Sha1Git]) -> Iterable[Sha1Git]: + """List directories missing from the archive by sha1""" + + +class DiscoveryStorageConnection(ArchiveDiscoveryInterface): + """Use the storage APIs to query the archive""" + + def __init__( + self, + contents: List[model.Content], + skipped_contents: List[model.SkippedContent], + directories: List[model.Directory], + swh_storage: StorageInterface, + ) -> None: + super().__init__(contents, skipped_contents, directories) + self.storage = swh_storage + + async def content_missing(self, contents: List[Sha1Git]) -> Iterable[Sha1Git]: + """List content missing from the archive by sha1""" + return self.storage.content_missing_per_sha1_git(contents) + + async def skipped_content_missing( + self, skipped_contents: List[Sha1Git] + ) -> Iterable[Sha1Git]: + """List skipped content missing from the archive by sha1""" + contents = [ + {"sha1_git": s, "sha1": None, "sha256": None, "blake2s256": None} + for s in skipped_contents + ] + return (d["sha1_git"] for d in self.storage.skipped_content_missing(contents)) + + async def directory_missing(self, directories: List[Sha1Git]) -> Iterable[Sha1Git]: + """List directories missing from the archive by sha1""" + return self.storage.directory_missing(directories) + + +class BaseDiscoveryGraph: + """Creates the base structures and methods needed for discovery algorithms. + Subclasses should override ``get_sample`` to affect how the discovery is made.""" + + def __init__(self, contents, skipped_contents, directories): + self._all_contents: Mapping[ + Sha1Git, Union[model.Content, model.SkippedContent] + ] = {} + self._undecided_directories: Set[Sha1Git] = set() + self._children: Mapping[Sha1Git, model.DirectoryEntry] = {} + self._parents: Mapping[model.DirectoryEntry, Sha1Git] = {} + self.undecided: Set[Sha1Git] = set() + + for content in itertools.chain(contents, skipped_contents): + self.undecided.add(content.sha1_git) + self._all_contents[content.sha1_git] = content + + for directory in directories: + self.undecided.add(directory.id) + self._undecided_directories.add(directory.id) + self._children[directory.id] = {c.target for c in directory.entries} + for child in directory.entries: + self._parents.setdefault(child.target, set()).add(directory.id) + + self.undecided |= self._undecided_directories + self.known: Set[Sha1Git] = set() + self.unknown: Set[Sha1Git] = set() + + def mark_known(self, entries: Iterable[Sha1Git]): + """Mark ``entries`` and those they imply as known in the SWH archive""" + self._mark_entries(entries, self._children, self.known) + + def mark_unknown(self, entries: Iterable[Sha1Git]): + """Mark ``entries`` and those they imply as unknown in the SWH archive""" + self._mark_entries(entries, self._parents, self.unknown) + + def _mark_entries( + self, + entries: Iterable[Sha1Git], + transitive_mapping: Mapping[Any, Any], + target_set: Set[Any], + ): + """Use Merkle graph properties to mark a directory entry as known or unknown. + + If an entry is known, then all of its descendants are known. If it's + unknown, then all of its ancestors are unknown. + + - ``entries``: directory entries to mark along with their ancestors/descendants + where applicable. + - ``transitive_mapping``: mapping from an entry to the next entries to mark + in the hierarchy, if any. + - ``target_set``: set where marked entries will be added. + + """ + to_process = set(entries) + while to_process: + current = to_process.pop() + target_set.add(current) + self.undecided.discard(current) + self._undecided_directories.discard(current) + next_entries = transitive_mapping.get(current, set()) & self.undecided + to_process.update(next_entries) + + async def get_sample( + self, + ) -> Sample: + """Return a three-tuple of samples from the undecided sets of contents, + skipped contents and directories respectively. + These samples will be queried against the storage which will tell us + which are known.""" + raise NotImplementedError() + + async def do_query( + self, archive: ArchiveDiscoveryInterface, sample: Sample + ) -> None: + """Given a three-tuple of samples, ask the archive which are known or + unknown and mark them as such.""" + + methods = ( + archive.content_missing, + archive.skipped_content_missing, + archive.directory_missing, + ) + + for sample_per_type, method in zip(sample, methods): + if not sample_per_type: + continue + known = set(sample_per_type) + unknown = set(await method(list(sample_per_type))) + known -= unknown + + self.mark_known(known) + self.mark_unknown(unknown) + + +class RandomDirSamplingDiscoveryGraph(BaseDiscoveryGraph): + """Use a random sampling using only directories. + + This allows us to find a statistically good spread of entries in the graph + with a smaller population than using all types of entries. When there are + no more directories, only contents or skipped contents are undecided if any + are left: we send them directly to the storage since they should be few and + their structure flat.""" + + async def get_sample(self) -> Sample: + if self._undecided_directories: + if len(self._undecided_directories) <= SAMPLE_SIZE: + return Sample( + contents=set(), + skipped_contents=set(), + directories=set(self._undecided_directories), + ) + sample = random.sample(self._undecided_directories, SAMPLE_SIZE) + directories = {o for o in sample} + return Sample( + contents=set(), skipped_contents=set(), directories=directories + ) + + contents = set() + skipped_contents = set() + + for sha1 in self.undecided: + obj = self._all_contents[sha1] + obj_type = obj.object_type + if obj_type == model.Content.object_type: + contents.add(sha1) + elif obj_type == model.SkippedContent.object_type: + skipped_contents.add(sha1) + else: + raise TypeError(f"Unexpected object type {obj_type}") + + return Sample( + contents=contents, skipped_contents=skipped_contents, directories=set() + ) + + +async def filter_known_objects(archive: ArchiveDiscoveryInterface): + """Filter ``archive``'s ``contents``, ``skipped_contents`` and ``directories`` + to only return those that are unknown to the SWH archive using a discovery + algorithm.""" + contents = archive.contents + skipped_contents = archive.skipped_contents + directories = archive.directories + + contents_count = len(contents) + skipped_contents_count = len(skipped_contents) + directories_count = len(directories) + + graph = RandomDirSamplingDiscoveryGraph(contents, skipped_contents, directories) + + while graph.undecided: + sample = await graph.get_sample() + await graph.do_query(archive, sample) + + contents = [c for c in contents if c.sha1_git in graph.unknown] + skipped_contents = [c for c in skipped_contents if c.sha1_git in graph.unknown] + directories = [c for c in directories if c.id in graph.unknown] + + logger.debug( + "Filtered out %d contents, %d skipped contents and %d directories", + contents_count - len(contents), + skipped_contents_count - len(skipped_contents), + directories_count - len(directories), + ) + + return (contents, skipped_contents, directories) diff --git a/swh/loader/core/loader.py b/swh/loader/core/loader.py index c787d2e..5311feb 100644 --- a/swh/loader/core/loader.py +++ b/swh/loader/core/loader.py @@ -1,642 +1,967 @@ # Copyright (C) 2015-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib import logging import os +from pathlib import Path +import tempfile import time from typing import Any, ContextManager, Dict, Iterable, List, Optional, Union +from urllib.parse import urlparse +from requests.exceptions import HTTPError import sentry_sdk from swh.core.config import load_from_envvar from swh.core.statsd import Statsd +from swh.core.tarball import uncompress from swh.loader.core.metadata_fetchers import CredentialsType, get_fetchers_for_lister -from swh.loader.exception import NotFound +from swh.loader.core.utils import nix_hashes +from swh.loader.exception import NotFound, UnsupportedChecksumComputation +from swh.loader.package.utils import download +from swh.model import from_disk from swh.model.model import ( BaseContent, Content, Directory, Origin, OriginVisit, OriginVisitStatus, RawExtrinsicMetadata, Release, Revision, Sha1Git, SkippedContent, Snapshot, + SnapshotBranch, + TargetType, ) from swh.storage import get_storage +from swh.storage.algos.snapshot import snapshot_get_latest from swh.storage.interface import StorageInterface from swh.storage.utils import now DEFAULT_CONFIG: Dict[str, Any] = { "max_content_size": 100 * 1024 * 1024, } SENTRY_ORIGIN_URL_TAG_NAME = "swh.loader.origin_url" SENTRY_VISIT_TYPE_TAG_NAME = "swh.loader.visit_type" class BaseLoader: """Base class for (D)VCS loaders (e.g Svn, Git, Mercurial, ...) or PackageLoader (e.g PyPI, Npm, CRAN, ...) A loader retrieves origin information (git/mercurial/svn repositories, pypi/npm/... package artifacts), ingests the contents/directories/revisions/releases/snapshot read from those artifacts and send them to the archive through the storage backend. The main entry point for the loader is the :func:`load` function. 2 static methods (:func:`from_config`, :func:`from_configfile`) centralizes and eases the loader instantiation from either configuration dict or configuration file. Some class examples: - :class:`SvnLoader` - :class:`GitLoader` - :class:`PyPILoader` - :class:`NpmLoader` Args: lister_name: Name of the lister which triggered this load. If provided, the loader will try to use the forge's API to retrieve extrinsic metadata lister_instance_name: Name of the lister instance which triggered this load. Must be None iff lister_name is, but it may be the empty string for listers with a single instance. """ visit_type: str origin: Origin loaded_snapshot_id: Optional[Sha1Git] parent_origins: Optional[List[Origin]] """If the given origin is a "forge fork" (ie. created with the "Fork" button of GitHub-like forges), :meth:`build_extrinsic_origin_metadata` sets this to a list of origins it was forked from; closest parent first.""" def __init__( self, storage: StorageInterface, origin_url: str, logging_class: Optional[str] = None, save_data_path: Optional[str] = None, max_content_size: Optional[int] = None, lister_name: Optional[str] = None, lister_instance_name: Optional[str] = None, metadata_fetcher_credentials: CredentialsType = None, ): if lister_name == "": raise ValueError("lister_name must not be the empty string") if lister_name is None and lister_instance_name is not None: raise ValueError( f"lister_name is None but lister_instance_name is {lister_instance_name!r}" ) if lister_name is not None and lister_instance_name is None: raise ValueError( f"lister_instance_name is None but lister_name is {lister_name!r}" ) self.storage = storage self.origin = Origin(url=origin_url) self.max_content_size = int(max_content_size) if max_content_size else None self.lister_name = lister_name self.lister_instance_name = lister_instance_name self.metadata_fetcher_credentials = metadata_fetcher_credentials or {} if logging_class is None: logging_class = "%s.%s" % ( self.__class__.__module__, self.__class__.__name__, ) self.log = logging.getLogger(logging_class) _log = logging.getLogger("requests.packages.urllib3.connectionpool") _log.setLevel(logging.WARN) sentry_sdk.set_tag(SENTRY_ORIGIN_URL_TAG_NAME, self.origin.url) sentry_sdk.set_tag(SENTRY_VISIT_TYPE_TAG_NAME, self.visit_type) # possibly overridden in self.prepare method self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) self.loaded_snapshot_id = None if save_data_path: path = save_data_path os.stat(path) if not os.access(path, os.R_OK | os.W_OK): raise PermissionError("Permission denied: %r" % path) self.save_data_path = save_data_path self.parent_origins = None self.statsd = Statsd( namespace="swh_loader", constant_tags={"visit_type": self.visit_type} ) @classmethod def from_config(cls, storage: Dict[str, Any], **config: Any): """Instantiate a loader from a configuration dict. This is basically a backwards-compatibility shim for the CLI. Args: storage: instantiation config for the storage config: the configuration dict for the loader, with the following keys: - credentials (optional): credentials list for the scheduler - any other kwargs passed to the loader. Returns: the instantiated loader """ # Drop the legacy config keys which aren't used for this generation of loader. for legacy_key in ("storage", "celery"): config.pop(legacy_key, None) # Instantiate the storage storage_instance = get_storage(**storage) return cls(storage=storage_instance, **config) @classmethod def from_configfile(cls, **kwargs: Any): """Instantiate a loader from the configuration loaded from the SWH_CONFIG_FILENAME envvar, with potential extra keyword arguments if their value is not None. Args: kwargs: kwargs passed to the loader instantiation """ config = dict(load_from_envvar(DEFAULT_CONFIG)) config.update({k: v for k, v in kwargs.items() if v is not None}) return cls.from_config(**config) def save_data(self) -> None: """Save the data associated to the current load""" raise NotImplementedError def get_save_data_path(self) -> str: """The path to which we archive the loader's raw data""" if not hasattr(self, "__save_data_path"): year = str(self.visit_date.year) assert self.origin url = self.origin.url.encode("utf-8") origin_url_hash = hashlib.sha1(url).hexdigest() path = "%s/sha1:%s/%s/%s" % ( self.save_data_path, origin_url_hash[0:2], origin_url_hash, year, ) os.makedirs(path, exist_ok=True) self.__save_data_path = path return self.__save_data_path def flush(self) -> Dict[str, int]: """Flush any potential buffered data not sent to swh-storage. Returns the same value as :meth:`swh.storage.interface.StorageInterface.flush`. """ return self.storage.flush() def cleanup(self) -> None: """Last step executed by the loader.""" raise NotImplementedError def _store_origin_visit(self) -> None: """Store origin and visit references. Sets the self.visit references.""" assert self.origin self.storage.origin_add([self.origin]) assert isinstance(self.visit_type, str) self.visit = list( self.storage.origin_visit_add( [ OriginVisit( origin=self.origin.url, date=self.visit_date, type=self.visit_type, ) ] ) )[0] def prepare(self) -> None: """Second step executed by the loader to prepare some state needed by the loader. Raises NotFound exception if the origin to ingest is not found. """ raise NotImplementedError def get_origin(self) -> Origin: """Get the origin that is currently being loaded. self.origin should be set in :func:`prepare_origin` Returns: dict: an origin ready to be sent to storage by :func:`origin_add`. """ assert self.origin return self.origin def fetch_data(self) -> bool: """Fetch the data from the source the loader is currently loading (ex: git/hg/svn/... repository). Returns: a value that is interpreted as a boolean. If True, fetch_data needs to be called again to complete loading. """ raise NotImplementedError def process_data(self) -> bool: """Run any additional processing between fetching and storing the data Returns: a value that is interpreted as a boolean. If True, fetch_data needs to be called again to complete loading. Ignored if ``fetch_data`` already returned :const:`False`. """ return True - def store_data(self): + def store_data(self) -> None: """Store fetched data in the database. Should call the :func:`maybe_load_xyz` methods, which handle the bundles sent to storage, rather than send directly. """ raise NotImplementedError def load_status(self) -> Dict[str, str]: """Detailed loading status. Defaults to logging an eventful load. Returns: a dictionary that is eventually passed back as the task's result to the scheduler, allowing tuning of the task recurrence mechanism. """ return { "status": "eventful", } def post_load(self, success: bool = True) -> None: """Permit the loader to do some additional actions according to status after the loading is done. The flag success indicates the loading's status. Defaults to doing nothing. This is up to the implementer of this method to make sure this does not break. Args: success (bool): the success status of the loading """ pass def visit_status(self) -> str: """Detailed visit status. Defaults to logging a full visit. """ return "full" def pre_cleanup(self) -> None: """As a first step, will try and check for dangling data to cleanup. This should do its best to avoid raising issues. """ pass def load(self) -> Dict[str, str]: r"""Loading logic for the loader to follow: - Store the actual ``origin_visit`` to storage - Call :meth:`prepare` to prepare any eventual state - Call :meth:`get_origin` to get the origin we work with and store - while True: - Call :meth:`fetch_data` to fetch the data to store - Call :meth:`process_data` to optionally run processing between :meth:`fetch_data` and :meth:`store_data` - Call :meth:`store_data` to store the data - Call :meth:`cleanup` to clean up any eventual state put in place in :meth:`prepare` method. """ try: with self.statsd_timed("pre_cleanup"): self.pre_cleanup() except Exception: msg = "Cleaning up dangling data failed! Continue loading." self.log.warning(msg) sentry_sdk.capture_exception() self._store_origin_visit() assert ( self.visit.visit ), "The method `_store_origin_visit` should set the visit (OriginVisit)" self.log.info( "Load origin '%s' with type '%s'", self.origin.url, self.visit.type ) try: with self.statsd_timed("build_extrinsic_origin_metadata"): metadata = self.build_extrinsic_origin_metadata() self.load_metadata_objects(metadata) except Exception as e: sentry_sdk.capture_exception(e) # Do not fail the whole task if this is the only failure self.log.exception( "Failure while loading extrinsic origin metadata.", extra={ "swh_task_args": [], "swh_task_kwargs": { "origin": self.origin.url, "lister_name": self.lister_name, "lister_instance_name": self.lister_instance_name, }, }, ) total_time_fetch_data = 0.0 total_time_process_data = 0.0 total_time_store_data = 0.0 # Initially not a success, will be True when actually one status = "failed" success = False try: with self.statsd_timed("prepare"): self.prepare() while True: t1 = time.monotonic() more_data_to_fetch = self.fetch_data() t2 = time.monotonic() total_time_fetch_data += t2 - t1 more_data_to_fetch = self.process_data() and more_data_to_fetch t3 = time.monotonic() total_time_process_data += t3 - t2 self.store_data() t4 = time.monotonic() total_time_store_data += t4 - t3 if not more_data_to_fetch: break self.statsd_timing("fetch_data", total_time_fetch_data * 1000.0) self.statsd_timing("process_data", total_time_process_data * 1000.0) self.statsd_timing("store_data", total_time_store_data * 1000.0) status = self.visit_status() visit_status = OriginVisitStatus( origin=self.origin.url, visit=self.visit.visit, type=self.visit_type, date=now(), status=status, snapshot=self.loaded_snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) success = True with self.statsd_timed( "post_load", tags={"success": success, "status": status} ): self.post_load() except BaseException as e: success = False if isinstance(e, NotFound): status = "not_found" task_status = "uneventful" else: status = "partial" if self.loaded_snapshot_id else "failed" task_status = "failed" self.log.exception( "Loading failure, updating to `%s` status", status, extra={ "swh_task_args": [], "swh_task_kwargs": { "origin": self.origin.url, "lister_name": self.lister_name, "lister_instance_name": self.lister_instance_name, }, }, ) if not isinstance(e, (SystemExit, KeyboardInterrupt)): sentry_sdk.capture_exception() visit_status = OriginVisitStatus( origin=self.origin.url, visit=self.visit.visit, type=self.visit_type, date=now(), status=status, snapshot=self.loaded_snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) with self.statsd_timed( "post_load", tags={"success": success, "status": status} ): self.post_load(success=success) if not isinstance(e, Exception): # e derives from BaseException but not Exception; this is most likely # SystemExit or KeyboardInterrupt, so we should re-raise it. raise return {"status": task_status} finally: with self.statsd_timed( "flush", tags={"success": success, "status": status} ): self.flush() with self.statsd_timed( "cleanup", tags={"success": success, "status": status} ): self.cleanup() return self.load_status() def load_metadata_objects( self, metadata_objects: List[RawExtrinsicMetadata] ) -> None: if not metadata_objects: return authorities = {mo.authority for mo in metadata_objects} self.storage.metadata_authority_add(list(authorities)) fetchers = {mo.fetcher for mo in metadata_objects} self.storage.metadata_fetcher_add(list(fetchers)) self.storage.raw_extrinsic_metadata_add(metadata_objects) def build_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using a metadata fetcher returned by :func:`get_fetcher_classes`.""" if self.lister_name is None: self.log.debug("lister_not provided, skipping extrinsic origin metadata") return [] assert ( self.lister_instance_name is not None ), "lister_instance_name is None, but lister_name is not" metadata = [] fetcher_classes = get_fetchers_for_lister(self.lister_name) self.statsd_average("metadata_fetchers", len(fetcher_classes)) for cls in fetcher_classes: metadata_fetcher = cls( origin=self.origin, lister_name=self.lister_name, lister_instance_name=self.lister_instance_name, credentials=self.metadata_fetcher_credentials, ) with self.statsd_timed( "fetch_one_metadata", tags={"fetcher": cls.FETCHER_NAME} ): metadata.extend(metadata_fetcher.get_origin_metadata()) if self.parent_origins is None: self.parent_origins = metadata_fetcher.get_parent_origins() self.statsd_average( "metadata_parent_origins", len(self.parent_origins), tags={"fetcher": cls.FETCHER_NAME}, ) self.statsd_average("metadata_objects", len(metadata)) return metadata def statsd_timed(self, name: str, tags: Dict[str, Any] = {}) -> ContextManager: """ Wrapper for :meth:`swh.core.statsd.Statsd.timed`, which uses the standard metric name and tags for loaders. """ return self.statsd.timed( "operation_duration_seconds", tags={"operation": name, **tags} ) def statsd_timing(self, name: str, value: float, tags: Dict[str, Any] = {}) -> None: """ Wrapper for :meth:`swh.core.statsd.Statsd.timing`, which uses the standard metric name and tags for loaders. """ self.statsd.timing( "operation_duration_seconds", value, tags={"operation": name, **tags} ) def statsd_average( self, name: str, value: Union[int, float], tags: Dict[str, Any] = {} ) -> None: """Increments both ``{name}_sum`` (by the ``value``) and ``{name}_count`` (by ``1``), allowing to prometheus to compute the average ``value`` over time.""" self.statsd.increment(f"{name}_sum", value, tags=tags) self.statsd.increment(f"{name}_count", tags=tags) class DVCSLoader(BaseLoader): """This base class is a pattern for dvcs loaders (e.g. git, mercurial). Those loaders are able to load all the data in one go. For example, the loader defined in swh-loader-git :class:`BulkUpdater`. For other loaders (stateful one, (e.g :class:`SWHSvnLoader`), inherit directly from :class:`BaseLoader`. """ def cleanup(self) -> None: """Clean up an eventual state installed for computations.""" pass def has_contents(self) -> bool: """Checks whether we need to load contents""" return True def get_contents(self) -> Iterable[BaseContent]: """Get the contents that need to be loaded""" raise NotImplementedError def has_directories(self) -> bool: """Checks whether we need to load directories""" return True def get_directories(self) -> Iterable[Directory]: """Get the directories that need to be loaded""" raise NotImplementedError def has_revisions(self) -> bool: """Checks whether we need to load revisions""" return True def get_revisions(self) -> Iterable[Revision]: """Get the revisions that need to be loaded""" raise NotImplementedError def has_releases(self) -> bool: """Checks whether we need to load releases""" return True def get_releases(self) -> Iterable[Release]: """Get the releases that need to be loaded""" raise NotImplementedError def get_snapshot(self) -> Snapshot: """Get the snapshot that needs to be loaded""" raise NotImplementedError def eventful(self) -> bool: """Whether the load was eventful""" raise NotImplementedError def store_data(self) -> None: assert self.origin if self.save_data_path: self.save_data() if self.has_contents(): for obj in self.get_contents(): if isinstance(obj, Content): self.storage.content_add([obj]) elif isinstance(obj, SkippedContent): self.storage.skipped_content_add([obj]) else: raise TypeError(f"Unexpected content type: {obj}") if self.has_directories(): for directory in self.get_directories(): self.storage.directory_add([directory]) if self.has_revisions(): for revision in self.get_revisions(): self.storage.revision_add([revision]) if self.has_releases(): for release in self.get_releases(): self.storage.release_add([release]) snapshot = self.get_snapshot() self.storage.snapshot_add([snapshot]) self.flush() self.loaded_snapshot_id = snapshot.id + + +class NodeLoader(BaseLoader): + """Common class for :class:`ContentLoader` and :class:`Directoryloader`. + + The "checksums" field is a dictionary of hex hashes on the object retrieved (content + or directory). When "checksums_computation" is "standard", that means the checksums + are computed on the content of the remote file to retrieve itself (as unix cli + allows, "sha1sum", "sha256sum", ...). When "checksums_computation" is "nar", the + checks is delegated to the `nix-store --dump` command, it's actually checksums on + the content of the remote artifact retrieved. Other "checksums_computation" will + raise UnsupportedChecksumComputation + + The multiple "fallback" urls received are mirror urls only used to fetch the object + if the main origin is no longer available. Those are not stored. + + Ingestion is considered eventful on the first ingestion. Subsequent load of the same + object should end up being an uneventful visit (matching snapshot). + + """ + + def __init__( + self, + storage: StorageInterface, + url: str, + checksums: Dict[str, str], + checksums_computation: str = "standard", + fallback_urls: List[str] = None, + **kwargs, + ): + super().__init__(storage, url, **kwargs) + self.snapshot: Optional[Snapshot] = None + self.checksums = checksums + self.checksums_computation = checksums_computation + if self.checksums_computation not in ("nar", "standard"): + raise UnsupportedChecksumComputation( + "Unsupported checksums computations: %s", + self.checksums_computation, + ) + + fallback_urls_ = fallback_urls or [] + self.mirror_urls: List[str] = [self.origin.url, *fallback_urls_] + # Ensure content received matched the "standard" checksums received, this + # contains the checksums when checksum_computations is "standard", it's empty + # otherwise + self.standard_hashes = ( + self.checksums if self.checksums_computation == "standard" else {} + ) + self.log.debug("Loader checksums computation: %s", self.checksums_computation) + + def prepare(self) -> None: + self.last_snapshot = snapshot_get_latest(self.storage, self.origin.url) + + def load_status(self) -> Dict[str, Any]: + return { + "status": "uneventful" + if self.last_snapshot == self.snapshot + else "eventful" + } + + def cleanup(self) -> None: + self.log.debug("cleanup") + + +class ContentLoader(NodeLoader): + """Basic loader for edge case content ingestion. + + The output snapshot is of the form: + + .. code:: + + id: + branches: + HEAD: + target_type: content + target: + + """ + + visit_type = "content" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.content: Optional[Content] = None + + def fetch_data(self) -> bool: + """Retrieve the content file as a Content Object""" + errors = [] + for url in self.mirror_urls: + url_ = urlparse(url) + self.log.debug( + "prepare; origin_url=%s fallback=%s scheme=%s path=%s", + self.origin.url, + url, + url_.scheme, + url_.path, + ) + try: + # FIXME: Ensure no "nar" computations is required for file + with tempfile.TemporaryDirectory() as tmpdir: + file_path, _ = download( + url, dest=tmpdir, hashes=self.standard_hashes + ) + if self.checksums_computation == "nar": + # hashes are not "standard", so we need an extra check to happen + self.log.debug("Content to check nar hashes: %s", file_path) + actual_checksums = nix_hashes( + Path(file_path), self.checksums.keys() + ).hexdigest() + + if actual_checksums != self.checksums: + errors.append( + ValueError( + f"Checksum mismatched on <{url}>: " + f"{actual_checksums} != {self.checksums}" + ) + ) + self.log.debug( + "Mismatched checksums <%s>: continue on next mirror " + "url if any", + url, + ) + continue + + with open(file_path, "rb") as file: + self.content = Content.from_data(file.read()) + except ValueError as e: + errors.append(e) + self.log.debug( + "Mismatched checksums <%s>: continue on next mirror url if any", + url, + ) + continue + except HTTPError as http_error: + if http_error.response.status_code == 404: + self.log.debug( + "Not found '%s', continue on next mirror url if any", url + ) + continue + else: + return False # no more data to fetch + + if errors: + raise errors[0] + + # If we reach this point, we did not find any proper content, consider the + # origin not found + raise NotFound(f"Unknown origin {self.origin.url}.") + + def process_data(self) -> bool: + """Build the snapshot out of the Content retrieved.""" + + assert self.content is not None + self.snapshot = Snapshot( + branches={ + b"HEAD": SnapshotBranch( + target=self.content.sha1_git, + target_type=TargetType.CONTENT, + ), + } + ) + + return False # no more data to process + + def store_data(self) -> None: + """Store newly retrieved Content and Snapshot.""" + assert self.content is not None + self.storage.content_add([self.content]) + assert self.snapshot is not None + self.storage.snapshot_add([self.snapshot]) + self.loaded_snapshot_id = self.snapshot.id + + def visit_status(self): + return "full" if self.content and self.snapshot is not None else "partial" + + +class DirectoryLoader(NodeLoader): + """Basic loader for edge case directory ingestion (through one tarball). + + The output snapshot is of the form: + + .. code:: + + id: + branches: + HEAD: + target_type: directory + target: + + """ + + visit_type = "directory" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.directory: Optional[from_disk.Directory] = None + self.cnts: List[Content] = None + self.skipped_cnts: List[SkippedContent] = None + self.dirs: List[Directory] = None + + def fetch_data(self) -> bool: + """Fetch directory as a tarball amongst the self.mirror_urls. + + Raises NotFound if no tarball is found + + """ + errors = [] + for url in self.mirror_urls: + url_ = urlparse(url) + self.log.debug( + "prepare; origin_url=%s fallback=%s scheme=%s path=%s", + self.origin.url, + url, + url_.scheme, + url_.path, + ) + with tempfile.TemporaryDirectory() as tmpdir: + try: + tarball_path, extrinsic_metadata = download( + url, + tmpdir, + hashes=self.standard_hashes, + extra_request_headers={"Accept-Encoding": "identity"}, + ) + except ValueError as e: + errors.append(e) + self.log.debug( + "Mismatched checksums <%s>: continue on next mirror url if any", + url, + ) + continue + except HTTPError as http_error: + if http_error.response.status_code == 404: + self.log.debug( + "Not found <%s>: continue on next mirror url if any", url + ) + continue + + directory_path = Path(tmpdir) / "src" + directory_path.mkdir(parents=True, exist_ok=True) + uncompress(tarball_path, dest=str(directory_path)) + self.log.debug("uncompressed path to directory: %s", directory_path) + + if self.checksums_computation == "nar": + # hashes are not "standard", so we need an extra check to happen + # on the uncompressed tarball + dir_to_check = next(directory_path.iterdir()) + self.log.debug("Directory to check nar hashes: %s", dir_to_check) + actual_checksums = nix_hashes( + dir_to_check, self.checksums.keys() + ).hexdigest() + + if actual_checksums != self.checksums: + errors.append( + ValueError( + f"Checksum mismatched on <{url}>: " + f"{actual_checksums} != {self.checksums}" + ) + ) + self.log.debug( + "Mismatched checksums <%s>: continue on next mirror url if any", + url, + ) + continue + + self.directory = from_disk.Directory.from_disk( + path=bytes(directory_path), + max_content_length=self.max_content_size, + ) + # Compute the merkle dag from the top-level directory + self.cnts, self.skipped_cnts, self.dirs = from_disk.iter_directory( + self.directory + ) + + if self.directory is not None: + return False # no more data to fetch + + if errors: + raise errors[0] + + # if we reach here, we did not find any proper tarball, so consider the origin + # not found + raise NotFound(f"Unknown origin {self.origin.url}.") + + def process_data(self) -> bool: + """Build the snapshot out of the Directory retrieved.""" + + assert self.directory is not None + # Build the snapshot + self.snapshot = Snapshot( + branches={ + b"HEAD": SnapshotBranch( + target=self.directory.hash, + target_type=TargetType.DIRECTORY, + ), + } + ) + + return False # no more data to process + + def store_data(self) -> None: + """Store newly retrieved Content and Snapshot.""" + self.log.debug("Number of skipped contents: %s", len(self.skipped_cnts)) + self.storage.skipped_content_add(self.skipped_cnts) + self.log.debug("Number of contents: %s", len(self.cnts)) + self.storage.content_add(self.cnts) + self.log.debug("Number of directories: %s", len(self.dirs)) + self.storage.directory_add(self.dirs) + assert self.snapshot is not None + self.storage.snapshot_add([self.snapshot]) + self.loaded_snapshot_id = self.snapshot.id + + def visit_status(self): + return "full" if self.directory and self.snapshot is not None else "partial" diff --git a/swh/loader/core/tasks.py b/swh/loader/core/tasks.py new file mode 100644 index 0000000..c221903 --- /dev/null +++ b/swh/loader/core/tasks.py @@ -0,0 +1,20 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from celery import shared_task + +from swh.loader.core.loader import ContentLoader, DirectoryLoader + + +@shared_task(name=__name__ + ".LoadContent") +def load_content(**kwargs): + """Load Content package""" + return ContentLoader.from_configfile(**kwargs).load() + + +@shared_task(name=__name__ + ".LoadDirectory") +def load_directory(**kwargs): + """Load Content package""" + return DirectoryLoader.from_configfile(**kwargs).load() diff --git a/swh/loader/core/tests/conftest.py b/swh/loader/core/tests/conftest.py new file mode 100644 index 0000000..6e4d862 --- /dev/null +++ b/swh/loader/core/tests/conftest.py @@ -0,0 +1,64 @@ +# Copyright (C) 2018-2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from os import path +import shutil +from typing import Dict, List + +import pytest + +from swh.loader.core.utils import compute_nar_hashes +from swh.model.hashutil import MultiHash + +nix_store_missing = shutil.which("nix-store") is None + + +@pytest.fixture +def tarball_path(datadir): + """Return tarball filepath fetched by DirectoryLoader test runs.""" + return path.join(datadir, "https_example.org", "archives_dummy-hello.tar.gz") + + +@pytest.fixture +def content_path(datadir): + """Return filepath fetched by ContentLoader test runs.""" + return path.join( + datadir, "https_common-lisp.net", "project_asdf_archives_asdf-3.3.5.lisp" + ) + + +def compute_hashes(filepath: str, hash_names: List[str] = ["sha256"]) -> Dict[str, str]: + """Compute checksums dict out of a filepath""" + return MultiHash.from_path(filepath, hash_names=hash_names).hexdigest() + + +@pytest.fixture +def tarball_with_std_hashes(tarball_path): + return ( + tarball_path, + compute_hashes(tarball_path, ["sha1", "sha256", "sha512"]), + ) + + +@pytest.fixture +def tarball_with_nar_hashes(tarball_path): + nar_hashes = compute_nar_hashes(tarball_path, ["sha256"]) + # Ensure it's the same hash as the initial one computed from the cli + assert ( + nar_hashes["sha256"] + == "23fb1fe278aeb2de899f7d7f10cf892f63136cea2c07146da2200da4de54b7e4" + ) + return (tarball_path, nar_hashes) + + +@pytest.fixture +def content_with_nar_hashes(content_path): + nar_hashes = compute_nar_hashes(content_path, ["sha256"], is_tarball=False) + # Ensure it's the same hash as the initial one computed from the cli + assert ( + nar_hashes["sha256"] + == "0b555a4d13e530460425d1dc20332294f151067fb64a7e49c7de501f05b0a41a" + ) + return (content_path, nar_hashes) diff --git a/swh/loader/core/tests/data/https_common-lisp.net/project_asdf_archives_asdf-3.3.5.lisp b/swh/loader/core/tests/data/https_common-lisp.net/project_asdf_archives_asdf-3.3.5.lisp new file mode 100644 index 0000000..911b9db --- /dev/null +++ b/swh/loader/core/tests/data/https_common-lisp.net/project_asdf_archives_asdf-3.3.5.lisp @@ -0,0 +1 @@ +(print "hello-world") diff --git a/swh/loader/core/tests/data/https_example.org/archives_dummy-hello.tar.gz b/swh/loader/core/tests/data/https_example.org/archives_dummy-hello.tar.gz new file mode 100644 index 0000000..04aa405 Binary files /dev/null and b/swh/loader/core/tests/data/https_example.org/archives_dummy-hello.tar.gz differ diff --git a/swh/loader/core/tests/test_loader.py b/swh/loader/core/tests/test_loader.py index 6633460..bbbda83 100644 --- a/swh/loader/core/tests/test_loader.py +++ b/swh/loader/core/tests/test_loader.py @@ -1,505 +1,827 @@ -# Copyright (C) 2018-2021 The Software Heritage developers +# Copyright (C) 2018-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime +from functools import partial import hashlib import logging import time from unittest.mock import MagicMock, call import pytest from swh.loader.core.loader import ( SENTRY_ORIGIN_URL_TAG_NAME, SENTRY_VISIT_TYPE_TAG_NAME, BaseLoader, + ContentLoader, + DirectoryLoader, DVCSLoader, ) from swh.loader.core.metadata_fetchers import MetadataFetcherProtocol -from swh.loader.exception import NotFound +from swh.loader.exception import NotFound, UnsupportedChecksumComputation from swh.loader.tests import assert_last_visit_matches from swh.model.hashutil import hash_to_bytes from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, MetadataFetcher, Origin, RawExtrinsicMetadata, Snapshot, ) import swh.storage.exc +from .conftest import compute_hashes, compute_nar_hashes, nix_store_missing + ORIGIN = Origin(url="some-url") PARENT_ORIGIN = Origin(url="base-origin-url") METADATA_AUTHORITY = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="http://example.org/" ) REMD = RawExtrinsicMetadata( target=ORIGIN.swhid(), discovery_date=datetime.datetime.now(tz=datetime.timezone.utc), authority=METADATA_AUTHORITY, fetcher=MetadataFetcher( name="test fetcher", version="0.0.1", ), format="test-format", metadata=b'{"foo": "bar"}', ) class DummyLoader: """Base Loader to overload and simplify the base class (technical: to avoid repetition in other *Loader classes)""" visit_type = "git" def __init__(self, storage, *args, **kwargs): super().__init__(storage, ORIGIN.url, *args, **kwargs) def cleanup(self): pass def prepare(self, *args, **kwargs): pass def fetch_data(self): pass def get_snapshot_id(self): return None class DummyDVCSLoader(DummyLoader, DVCSLoader): """DVCS Loader that does nothing in regards to DAG objects.""" def get_contents(self): return [] def get_directories(self): return [] def get_revisions(self): return [] def get_releases(self): return [] def get_snapshot(self): return Snapshot(branches={}) def eventful(self): return False class DummyBaseLoader(DummyLoader, BaseLoader): """Buffered loader will send new data when threshold is reached""" def store_data(self): pass class DummyMetadataFetcher: SUPPORTED_LISTERS = {"fake-forge"} FETCHER_NAME = "fake-forge" def __init__(self, origin, credentials, lister_name, lister_instance_name): pass def get_origin_metadata(self): return [REMD] def get_parent_origins(self): return [] class DummyMetadataFetcherWithFork: SUPPORTED_LISTERS = {"fake-forge"} FETCHER_NAME = "fake-forge" def __init__(self, origin, credentials, lister_name, lister_instance_name): pass def get_origin_metadata(self): return [REMD] def get_parent_origins(self): return [PARENT_ORIGIN] def test_types(): assert isinstance( DummyMetadataFetcher(None, None, None, None), MetadataFetcherProtocol ) assert isinstance( DummyMetadataFetcherWithFork(None, None, None, None), MetadataFetcherProtocol ) def test_base_loader(swh_storage): loader = DummyBaseLoader(swh_storage) result = loader.load() assert result == {"status": "eventful"} def test_base_loader_with_config(swh_storage): loader = DummyBaseLoader(swh_storage, "logger-name") result = loader.load() assert result == {"status": "eventful"} def test_base_loader_with_known_lister_name(swh_storage, mocker): fetcher_cls = MagicMock(wraps=DummyMetadataFetcher) fetcher_cls.SUPPORTED_LISTERS = DummyMetadataFetcher.SUPPORTED_LISTERS fetcher_cls.FETCHER_NAME = "fake-forge" mocker.patch( "swh.loader.core.metadata_fetchers._fetchers", return_value=[fetcher_cls] ) loader = DummyBaseLoader( swh_storage, lister_name="fake-forge", lister_instance_name="" ) statsd_report = mocker.patch.object(loader.statsd, "_report") result = loader.load() assert result == {"status": "eventful"} fetcher_cls.assert_called_once() fetcher_cls.assert_called_once_with( origin=ORIGIN, credentials={}, lister_name="fake-forge", lister_instance_name="", ) assert swh_storage.raw_extrinsic_metadata_get( ORIGIN.swhid(), METADATA_AUTHORITY ).results == [REMD] assert loader.parent_origins == [] assert [ call("metadata_fetchers_sum", "c", 1, {}, 1), call("metadata_fetchers_count", "c", 1, {}, 1), call("metadata_parent_origins_sum", "c", 0, {"fetcher": "fake-forge"}, 1), call("metadata_parent_origins_count", "c", 1, {"fetcher": "fake-forge"}, 1), call("metadata_objects_sum", "c", 1, {}, 1), call("metadata_objects_count", "c", 1, {}, 1), ] == [c for c in statsd_report.mock_calls if "metadata_" in c[1][0]] assert loader.statsd.namespace == "swh_loader" assert loader.statsd.constant_tags == {"visit_type": "git"} def test_base_loader_with_unknown_lister_name(swh_storage, mocker): fetcher_cls = MagicMock(wraps=DummyMetadataFetcher) fetcher_cls.SUPPORTED_LISTERS = DummyMetadataFetcher.SUPPORTED_LISTERS mocker.patch( "swh.loader.core.metadata_fetchers._fetchers", return_value=[fetcher_cls] ) loader = DummyBaseLoader( swh_storage, lister_name="other-lister", lister_instance_name="" ) result = loader.load() assert result == {"status": "eventful"} fetcher_cls.assert_not_called() with pytest.raises(swh.storage.exc.StorageArgumentException): swh_storage.raw_extrinsic_metadata_get(ORIGIN.swhid(), METADATA_AUTHORITY) def test_base_loader_forked_origin(swh_storage, mocker): fetcher_cls = MagicMock(wraps=DummyMetadataFetcherWithFork) fetcher_cls.SUPPORTED_LISTERS = DummyMetadataFetcherWithFork.SUPPORTED_LISTERS fetcher_cls.FETCHER_NAME = "fake-forge" mocker.patch( "swh.loader.core.metadata_fetchers._fetchers", return_value=[fetcher_cls] ) loader = DummyBaseLoader( swh_storage, lister_name="fake-forge", lister_instance_name="" ) statsd_report = mocker.patch.object(loader.statsd, "_report") result = loader.load() assert result == {"status": "eventful"} fetcher_cls.assert_called_once() fetcher_cls.assert_called_once_with( origin=ORIGIN, credentials={}, lister_name="fake-forge", lister_instance_name="", ) assert swh_storage.raw_extrinsic_metadata_get( ORIGIN.swhid(), METADATA_AUTHORITY ).results == [REMD] assert loader.parent_origins == [PARENT_ORIGIN] assert [ call("metadata_fetchers_sum", "c", 1, {}, 1), call("metadata_fetchers_count", "c", 1, {}, 1), call("metadata_parent_origins_sum", "c", 1, {"fetcher": "fake-forge"}, 1), call("metadata_parent_origins_count", "c", 1, {"fetcher": "fake-forge"}, 1), call("metadata_objects_sum", "c", 1, {}, 1), call("metadata_objects_count", "c", 1, {}, 1), ] == [c for c in statsd_report.mock_calls if "metadata_" in c[1][0]] assert loader.statsd.namespace == "swh_loader" assert loader.statsd.constant_tags == {"visit_type": "git"} def test_base_loader_post_load_raise(swh_storage, mocker): loader = DummyBaseLoader(swh_storage) post_load = mocker.patch.object(loader, "post_load") # raise exception in post_load when success is True def post_load_method(*args, success=True): if success: raise Exception("Error in post_load") post_load.side_effect = post_load_method result = loader.load() assert result == {"status": "failed"} # ensure post_load has been called twice, once with success to True and # once with success to False as the first post_load call raised exception assert post_load.call_args_list == [mocker.call(), mocker.call(success=False)] def test_dvcs_loader(swh_storage): loader = DummyDVCSLoader(swh_storage) result = loader.load() assert result == {"status": "eventful"} def test_dvcs_loader_with_config(swh_storage): loader = DummyDVCSLoader(swh_storage, "another-logger") result = loader.load() assert result == {"status": "eventful"} def test_loader_logger_default_name(swh_storage): loader = DummyBaseLoader(swh_storage) assert isinstance(loader.log, logging.Logger) assert loader.log.name == "swh.loader.core.tests.test_loader.DummyBaseLoader" loader = DummyDVCSLoader(swh_storage) assert isinstance(loader.log, logging.Logger) assert loader.log.name == "swh.loader.core.tests.test_loader.DummyDVCSLoader" def test_loader_logger_with_name(swh_storage): loader = DummyBaseLoader(swh_storage, "some.logger.name") assert isinstance(loader.log, logging.Logger) assert loader.log.name == "some.logger.name" def test_loader_save_data_path(swh_storage, tmp_path): loader = DummyBaseLoader(swh_storage, "some.logger.name.1", save_data_path=tmp_path) url = "http://bitbucket.org/something" loader.origin = Origin(url=url) loader.visit_date = datetime.datetime(year=2019, month=10, day=1) hash_url = hashlib.sha1(url.encode("utf-8")).hexdigest() expected_save_path = "%s/sha1:%s/%s/2019" % (str(tmp_path), hash_url[0:2], hash_url) save_path = loader.get_save_data_path() assert save_path == expected_save_path -def _check_load_failure(caplog, loader, exc_class, exc_text, status="partial"): +def _check_load_failure( + caplog, loader, exc_class, exc_text, status="partial", origin=ORIGIN +): """Check whether a failed load properly logged its exception, and that the snapshot didn't get referenced in storage""" - assert isinstance(loader, DVCSLoader) # was implicit so far + assert isinstance(loader, (DVCSLoader, ContentLoader, DirectoryLoader)) for record in caplog.records: if record.levelname != "ERROR": continue assert "Loading failure" in record.message assert record.exc_info exc = record.exc_info[1] assert isinstance(exc, exc_class) assert exc_text in exc.args[0] - # Check that the get_snapshot operation would have succeeded - assert loader.get_snapshot() is not None + if isinstance(loader, DVCSLoader): + # Check that the get_snapshot operation would have succeeded + assert loader.get_snapshot() is not None # And confirm that the visit doesn't reference a snapshot - visit = assert_last_visit_matches(loader.storage, ORIGIN.url, status) + visit = assert_last_visit_matches(loader.storage, origin.url, status) if status != "partial": assert visit.snapshot is None # But that the snapshot didn't get loaded assert loader.loaded_snapshot_id is None @pytest.mark.parametrize("success", [True, False]) def test_loader_timings(swh_storage, mocker, success): current_time = time.time() mocker.patch("time.monotonic", side_effect=lambda: current_time) mocker.patch("swh.core.statsd.monotonic", side_effect=lambda: current_time) runtimes = { "pre_cleanup": 2.0, "build_extrinsic_origin_metadata": 3.0, "prepare": 5.0, "fetch_data": 7.0, "process_data": 11.0, "store_data": 13.0, "post_load": 17.0, "flush": 23.0, "cleanup": 27.0, } class TimedLoader(BaseLoader): visit_type = "my-visit-type" def __getattribute__(self, method_name): if method_name == "visit_status" and not success: def crashy(): raise Exception("oh no") return crashy if method_name not in runtimes: return super().__getattribute__(method_name) def meth(*args, **kwargs): nonlocal current_time current_time += runtimes[method_name] return meth loader = TimedLoader(swh_storage, origin_url="http://example.org/hello.git") statsd_report = mocker.patch.object(loader.statsd, "_report") loader.load() if success: expected_tags = { "post_load": {"success": True, "status": "full"}, "flush": {"success": True, "status": "full"}, "cleanup": {"success": True, "status": "full"}, } else: expected_tags = { "post_load": {"success": False, "status": "failed"}, "flush": {"success": False, "status": "failed"}, "cleanup": {"success": False, "status": "failed"}, } # note that this is a list equality, so order of entries in 'runtimes' matters. # This is not perfect, but call() objects are not hashable so it's simpler this way, # even if not perfect. assert statsd_report.mock_calls == [ call( "operation_duration_seconds", "ms", value * 1000, {"operation": key, **expected_tags.get(key, {})}, 1, ) for (key, value) in runtimes.items() ] assert loader.statsd.namespace == "swh_loader" assert loader.statsd.constant_tags == {"visit_type": "my-visit-type"} class DummyDVCSLoaderExc(DummyDVCSLoader): """A loader which raises an exception when loading some contents""" def get_contents(self): raise RuntimeError("Failed to get contents!") def test_dvcs_loader_exc_partial_visit(swh_storage, caplog): logger_name = "dvcsloaderexc" caplog.set_level(logging.ERROR, logger=logger_name) loader = DummyDVCSLoaderExc(swh_storage, logging_class=logger_name) # fake the loading ending up in a snapshot loader.loaded_snapshot_id = hash_to_bytes( "9e4dd2b40d1b46b70917c0949aa2195c823a648e" ) result = loader.load() # loading failed assert result == {"status": "failed"} # still resulted in a partial visit with a snapshot (somehow) _check_load_failure( caplog, loader, RuntimeError, "Failed to get contents!", ) class BrokenStorageProxy: def __init__(self, storage): self.storage = storage def __getattr__(self, attr): return getattr(self.storage, attr) def snapshot_add(self, snapshots): raise RuntimeError("Failed to add snapshot!") class DummyDVCSLoaderStorageExc(DummyDVCSLoader): """A loader which raises an exception when loading some contents""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.storage = BrokenStorageProxy(self.storage) def test_dvcs_loader_storage_exc_failed_visit(swh_storage, caplog): logger_name = "dvcsloaderexc" caplog.set_level(logging.ERROR, logger=logger_name) loader = DummyDVCSLoaderStorageExc(swh_storage, logging_class=logger_name) result = loader.load() assert result == {"status": "failed"} _check_load_failure( caplog, loader, RuntimeError, "Failed to add snapshot!", status="failed" ) class DummyDVCSLoaderNotFound(DummyDVCSLoader, BaseLoader): """A loader which raises a not_found exception during the prepare method call""" def prepare(*args, **kwargs): raise NotFound("Unknown origin!") def load_status(self): return { "status": "uneventful", } def test_loader_not_found(swh_storage, caplog): loader = DummyDVCSLoaderNotFound(swh_storage) result = loader.load() assert result == {"status": "uneventful"} _check_load_failure(caplog, loader, NotFound, "Unknown origin!", status="not_found") class DummyLoaderWithError(DummyBaseLoader): def prepare(self, *args, **kwargs): raise Exception("error") class DummyDVCSLoaderWithError(DummyDVCSLoader, BaseLoader): def prepare(self, *args, **kwargs): raise Exception("error") @pytest.mark.parametrize("loader_cls", [DummyLoaderWithError, DummyDVCSLoaderWithError]) def test_loader_sentry_tags_on_error(swh_storage, sentry_events, loader_cls): loader = loader_cls(swh_storage) loader.load() sentry_tags = sentry_events[0]["tags"] assert sentry_tags.get(SENTRY_ORIGIN_URL_TAG_NAME) == ORIGIN.url assert sentry_tags.get(SENTRY_VISIT_TYPE_TAG_NAME) == DummyLoader.visit_type + + +CONTENT_MIRROR = "https://common-lisp.net" +CONTENT_URL = f"{CONTENT_MIRROR}/project/asdf/archives/asdf-3.3.5.lisp" + + +def test_content_loader_missing_field(swh_storage): + """It should raise if the ContentLoader is missing checksums field""" + origin = Origin(CONTENT_URL) + with pytest.raises(TypeError, match="missing"): + ContentLoader(swh_storage, origin.url) + + +@pytest.mark.parametrize("loader_class", [ContentLoader, DirectoryLoader]) +def test_node_loader_missing_field(swh_storage, loader_class): + """It should raise if the ContentLoader is missing checksums field""" + with pytest.raises(UnsupportedChecksumComputation): + loader_class( + swh_storage, + CONTENT_URL, + checksums={"sha256": "irrelevant-for-that-test"}, + checksums_computation="unsupported", + ) + + +def test_content_loader_404(caplog, swh_storage, requests_mock_datadir, content_path): + """It should not ingest origin when there is no file to be found (no mirror url)""" + unknown_origin = Origin(f"{CONTENT_MIRROR}/project/asdf/archives/unknown.lisp") + loader = ContentLoader( + swh_storage, + unknown_origin.url, + checksums=compute_hashes(content_path), + ) + result = loader.load() + + assert result == {"status": "uneventful"} + + _check_load_failure( + caplog, + loader, + NotFound, + "Unknown origin", + status="not_found", + origin=unknown_origin, + ) + + +def test_content_loader_404_with_fallback( + caplog, swh_storage, requests_mock_datadir, content_path +): + """It should not ingest origin when there is no file to be found""" + unknown_origin = Origin(f"{CONTENT_MIRROR}/project/asdf/archives/unknown.lisp") + fallback_url_ko = f"{CONTENT_MIRROR}/project/asdf/archives/unknown2.lisp" + loader = ContentLoader( + swh_storage, + unknown_origin.url, + fallback_urls=[fallback_url_ko], + checksums=compute_hashes(content_path), + ) + result = loader.load() + + assert result == {"status": "uneventful"} + + _check_load_failure( + caplog, + loader, + NotFound, + "Unknown origin", + status="not_found", + origin=unknown_origin, + ) + + +@pytest.mark.parametrize("checksum_algo", ["sha1", "sha256", "sha512"]) +def test_content_loader_ok_with_fallback( + checksum_algo, + caplog, + swh_storage, + requests_mock_datadir, + content_path, +): + """It should be an eventful visit even when ingesting through mirror url""" + dead_origin = Origin(f"{CONTENT_MIRROR}/dead-origin-url") + fallback_url_ok = CONTENT_URL + fallback_url_ko = f"{CONTENT_MIRROR}/project/asdf/archives/unknown2.lisp" + + loader = ContentLoader( + swh_storage, + dead_origin.url, + fallback_urls=[fallback_url_ok, fallback_url_ko], + checksums=compute_hashes(content_path, [checksum_algo]), + ) + result = loader.load() + + assert result == {"status": "eventful"} + + +compute_content_nar_hashes = partial(compute_nar_hashes, is_tarball=False) + + +@pytest.mark.skipif( + nix_store_missing, reason="requires nix-store binary from nix binaries" +) +@pytest.mark.parametrize("checksums_computation", ["standard", "nar"]) +def test_content_loader_ok_simple( + swh_storage, requests_mock_datadir, content_path, checksums_computation +): + """It should be an eventful visit on a new file, then uneventful""" + compute_hashes_fn = ( + compute_content_nar_hashes if checksums_computation == "nar" else compute_hashes + ) + + origin = Origin(CONTENT_URL) + loader = ContentLoader( + swh_storage, + origin.url, + checksums=compute_hashes_fn(content_path, ["sha1", "sha256", "sha512"]), + checksums_computation=checksums_computation, + ) + result = loader.load() + + assert result == {"status": "eventful"} + + visit_status = assert_last_visit_matches( + swh_storage, origin.url, status="full", type="content" + ) + assert visit_status.snapshot is not None + + result2 = loader.load() + + assert result2 == {"status": "uneventful"} + + +@pytest.mark.skipif( + nix_store_missing, reason="requires nix-store binary from nix binaries" +) +@pytest.mark.parametrize("checksums_computation", ["standard", "nar"]) +def test_content_loader_hash_mismatch( + swh_storage, requests_mock_datadir, content_path, checksums_computation +): + """It should be an eventful visit on a new file, then uneventful""" + compute_hashes_fn = ( + compute_content_nar_hashes if checksums_computation == "nar" else compute_hashes + ) + checksums = compute_hashes_fn(content_path, ["sha1", "sha256", "sha512"]) + erratic_checksums = { + algo: chksum.replace("a", "e") # alter checksums to fail integrity check + for algo, chksum in checksums.items() + } + origin = Origin(CONTENT_URL) + loader = ContentLoader( + swh_storage, + origin.url, + checksums=erratic_checksums, + checksums_computation=checksums_computation, + ) + result = loader.load() + + assert result == {"status": "failed"} + + assert_last_visit_matches(swh_storage, origin.url, status="failed", type="content") + + +DIRECTORY_MIRROR = "https://example.org" +DIRECTORY_URL = f"{DIRECTORY_MIRROR}/archives/dummy-hello.tar.gz" + + +def test_directory_loader_missing_field(swh_storage): + """It should raise if the DirectoryLoader is missing checksums field""" + origin = Origin(DIRECTORY_URL) + with pytest.raises(TypeError, match="missing"): + DirectoryLoader(swh_storage, origin.url) + + +def test_directory_loader_404(caplog, swh_storage, requests_mock_datadir, tarball_path): + """It should not ingest origin when there is no tarball to be found (no mirrors)""" + unknown_origin = Origin(f"{DIRECTORY_MIRROR}/archives/unknown.tar.gz") + loader = DirectoryLoader( + swh_storage, + unknown_origin.url, + checksums=compute_hashes(tarball_path), + ) + result = loader.load() + + assert result == {"status": "uneventful"} + + _check_load_failure( + caplog, + loader, + NotFound, + "Unknown origin", + status="not_found", + origin=unknown_origin, + ) + + +def test_directory_loader_404_with_fallback( + caplog, swh_storage, requests_mock_datadir, tarball_path +): + """It should not ingest origin when there is no tarball to be found""" + unknown_origin = Origin(f"{DIRECTORY_MIRROR}/archives/unknown.tbz2") + fallback_url_ko = f"{DIRECTORY_MIRROR}/archives/elsewhere-unknown2.tbz2" + loader = DirectoryLoader( + swh_storage, + unknown_origin.url, + fallback_urls=[fallback_url_ko], + checksums=compute_hashes(tarball_path), + ) + result = loader.load() + + assert result == {"status": "uneventful"} + + _check_load_failure( + caplog, + loader, + NotFound, + "Unknown origin", + status="not_found", + origin=unknown_origin, + ) + + +@pytest.mark.skipif( + nix_store_missing, reason="requires nix-store binary from nix binaries" +) +@pytest.mark.parametrize("checksums_computation", ["standard", "nar"]) +def test_directory_loader_hash_mismatch( + caplog, swh_storage, requests_mock_datadir, tarball_path, checksums_computation +): + """It should not ingest tarball with mismatched checksum""" + compute_hashes_fn = ( + compute_nar_hashes if checksums_computation == "nar" else compute_hashes + ) + checksums = compute_hashes_fn(tarball_path, ["sha1", "sha256", "sha512"]) + + origin = Origin(DIRECTORY_URL) + erratic_checksums = { + algo: chksum.replace("a", "e") # alter checksums to fail integrity check + for algo, chksum in checksums.items() + } + + loader = DirectoryLoader( + swh_storage, + origin.url, + checksums=erratic_checksums, # making the integrity check fail + checksums_computation=checksums_computation, + ) + result = loader.load() + + assert result == {"status": "failed"} + + _check_load_failure( + caplog, + loader, + ValueError, + "mismatched", + status="failed", + origin=origin, + ) + + +@pytest.mark.parametrize("checksum_algo", ["sha1", "sha256", "sha512"]) +def test_directory_loader_ok_with_fallback( + caplog, swh_storage, requests_mock_datadir, tarball_with_std_hashes, checksum_algo +): + """It should be an eventful visit even when ingesting through mirror url""" + tarball_path, checksums = tarball_with_std_hashes + + dead_origin = Origin(f"{DIRECTORY_MIRROR}/dead-origin-url") + fallback_url_ok = DIRECTORY_URL + fallback_url_ko = f"{DIRECTORY_MIRROR}/archives/unknown2.tgz" + + loader = DirectoryLoader( + swh_storage, + dead_origin.url, + fallback_urls=[fallback_url_ok, fallback_url_ko], + checksums={checksum_algo: checksums[checksum_algo]}, + ) + result = loader.load() + + assert result == {"status": "eventful"} + + +@pytest.mark.skipif( + nix_store_missing, reason="requires nix-store binary from nix binaries" +) +@pytest.mark.parametrize("checksums_computation", ["standard", "nar"]) +def test_directory_loader_ok_simple( + swh_storage, requests_mock_datadir, tarball_path, checksums_computation +): + """It should be an eventful visit on a new tarball, then uneventful""" + origin = Origin(DIRECTORY_URL) + compute_hashes_fn = ( + compute_nar_hashes if checksums_computation == "nar" else compute_hashes + ) + + loader = DirectoryLoader( + swh_storage, + origin.url, + checksums=compute_hashes_fn(tarball_path, ["sha1", "sha256", "sha512"]), + checksums_computation=checksums_computation, + ) + result = loader.load() + + assert result == {"status": "eventful"} + + visit_status = assert_last_visit_matches( + swh_storage, origin.url, status="full", type="directory" + ) + assert visit_status.snapshot is not None + + result2 = loader.load() + + assert result2 == {"status": "uneventful"} diff --git a/swh/loader/core/tests/test_tasks.py b/swh/loader/core/tests/test_tasks.py new file mode 100644 index 0000000..5b006c6 --- /dev/null +++ b/swh/loader/core/tests/test_tasks.py @@ -0,0 +1,42 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import uuid + +import pytest + +from swh.scheduler.model import ListedOrigin, Lister + +NAMESPACE = "swh.loader.core" + + +@pytest.fixture +def nixguix_lister(): + return Lister(name="nixguix", instance_name="example", id=uuid.uuid4()) + + +@pytest.mark.parametrize("loader_name", ["Content", "Directory"]) +def test_loader_tasks_for_listed_origin( + loading_task_creation_for_listed_origin_test, + nixguix_lister, + loader_name, +): + + listed_origin = ListedOrigin( + lister_id=nixguix_lister.id, + url="https://example.org/artifact/artifact", + visit_type=loader_name.lower(), + extra_loader_arguments={ + "fallback_urls": ["https://example.org/mirror/artifact-0.0.1.pkg.xz"], + "checksums": {"sha256": "some-valid-checksum"}, + }, + ) + + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.{loader_name}Loader", + task_function_name=f"{NAMESPACE}.tasks.Load{loader_name}", + lister=nixguix_lister, + listed_origin=listed_origin, + ) diff --git a/swh/loader/core/tests/test_utils.py b/swh/loader/core/tests/test_utils.py index 28d6c21..1f8b497 100644 --- a/swh/loader/core/tests/test_utils.py +++ b/swh/loader/core/tests/test_utils.py @@ -1,187 +1,239 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime import os +from pathlib import Path import signal +import tempfile from time import sleep from unittest.mock import patch import pytest +from swh.core.tarball import uncompress from swh.loader.core.utils import ( CloneFailure, CloneTimeout, clean_dangling_folders, clone_with_timeout, + compute_nar_hashes, + nix_hashes, parse_visit_date, ) +from swh.loader.exception import MissingOptionalDependency + +from .conftest import nix_store_missing def prepare_arborescence_from(tmpdir, folder_names): """Prepare arborescence tree with folders Args: tmpdir (Either[LocalPath, str]): Root temporary directory folder_names (List[str]): List of folder names Returns: List of folders """ dangling_folders = [] for dname in folder_names: d = str(tmpdir / dname) os.mkdir(d) dangling_folders.append(d) return str(tmpdir), dangling_folders def assert_dirs(actual_dirs, expected_dirs): """Assert that the directory actual and expected match""" for d in actual_dirs: assert d in expected_dirs assert len(actual_dirs) == len(expected_dirs) def test_clean_dangling_folders_0(tmpdir): """Folder does not exist, do nothing""" r = clean_dangling_folders("/path/does/not/exist", "unused-pattern") assert r is None @patch("swh.loader.core.utils.psutil.pid_exists", return_value=False) def test_clean_dangling_folders_1(mock_pid_exists, tmpdir): """Folder which matches pattern with dead pid are cleaned up""" rootpath, dangling = prepare_arborescence_from( tmpdir, [ "something", "swh.loader.svn-4321.noisynoise", ], ) clean_dangling_folders(rootpath, "swh.loader.svn") actual_dirs = os.listdir(rootpath) mock_pid_exists.assert_called_once_with(4321) assert_dirs(actual_dirs, ["something"]) @patch("swh.loader.core.utils.psutil.pid_exists", return_value=True) def test_clean_dangling_folders_2(mock_pid_exists, tmpdir): """Folder which matches pattern with live pid are skipped""" rootpath, dangling = prepare_arborescence_from( tmpdir, [ "something", "swh.loader.hg-1234.noisynoise", ], ) clean_dangling_folders(rootpath, "swh.loader.hg") actual_dirs = os.listdir(rootpath) mock_pid_exists.assert_called_once_with(1234) assert_dirs( actual_dirs, [ "something", "swh.loader.hg-1234.noisynoise", ], ) @patch("swh.loader.core.utils.psutil.pid_exists", return_value=False) @patch( "swh.loader.core.utils.shutil.rmtree", side_effect=ValueError("Could not remove for reasons"), ) def test_clean_dangling_folders_3(mock_rmtree, mock_pid_exists, tmpdir): """Error in trying to clean dangling folders are skipped""" path1 = "thingy" path2 = "swh.loader.git-1468.noisy" rootpath, dangling = prepare_arborescence_from( tmpdir, [ path1, path2, ], ) clean_dangling_folders(rootpath, "swh.loader.git") actual_dirs = os.listdir(rootpath) mock_pid_exists.assert_called_once_with(1468) mock_rmtree.assert_called_once_with(os.path.join(rootpath, path2)) assert_dirs(actual_dirs, [path2, path1]) def test_clone_with_timeout_no_error_no_timeout(): def succeed(): """This does nothing to simulate a successful clone""" clone_with_timeout("foo", "bar", succeed, timeout=0.5) def test_clone_with_timeout_no_error_timeout(): def slow(): """This lasts for more than the timeout""" sleep(1) with pytest.raises(CloneTimeout): clone_with_timeout("foo", "bar", slow, timeout=0.5) def test_clone_with_timeout_error(): def raise_something(): raise RuntimeError("panic!") with pytest.raises(CloneFailure): clone_with_timeout("foo", "bar", raise_something, timeout=0.5) def test_clone_with_timeout_sigkill(): """This also tests that the traceback is useful""" src = "https://www.mercurial-scm.org/repo/hello" dest = "/dev/null" timeout = 0.5 sleepy_time = 100 * timeout assert sleepy_time > timeout def ignores_sigterm(*args, **kwargs): # ignore SIGTERM to force sigkill signal.signal(signal.SIGTERM, lambda signum, frame: None) sleep(sleepy_time) # we make sure we exceed the timeout with pytest.raises(CloneTimeout) as e: clone_with_timeout(src, dest, ignores_sigterm, timeout) killed = True assert e.value.args == (src, timeout, killed) VISIT_DATE_STR = "2021-02-17 15:50:04.518963" VISIT_DATE = datetime(2021, 2, 17, 15, 50, 4, 518963) @pytest.mark.parametrize( "input_visit_date,expected_date", [ (None, None), (VISIT_DATE, VISIT_DATE), (VISIT_DATE_STR, VISIT_DATE), ], ) def test_utils_parse_visit_date(input_visit_date, expected_date): assert parse_visit_date(input_visit_date) == expected_date def test_utils_parse_visit_date_now(): actual_date = parse_visit_date("now") assert isinstance(actual_date, datetime) def test_utils_parse_visit_date_fails(): with pytest.raises(ValueError, match="invalid"): parse_visit_date(10) # not a string nor a date + + +@patch( + "swh.loader.core.utils.shutil.which", + return_value=None, +) +def test_nix_hashes_missing_nix_store(mock_which): + with pytest.raises(MissingOptionalDependency, match="nix-store"): + nix_hashes("some-irrelevant-filepath", ["sha1"]) + + +@pytest.mark.skipif(nix_store_missing, reason="requires nix-bin installed (bullseye)") +def test_nix_hashes_compute(tarball_with_nar_hashes): + tarball_path, nar_checksums = tarball_with_nar_hashes + + with tempfile.TemporaryDirectory() as tmpdir: + directory_path = Path(tmpdir) / "src" + directory_path.mkdir(parents=True, exist_ok=True) + uncompress(tarball_path, dest=str(directory_path)) + directory = next(directory_path.iterdir()) + + actual_multihash = nix_hashes(directory, nar_checksums.keys()) + + assert actual_multihash.hexdigest() == nar_checksums + + +@pytest.mark.skipif(nix_store_missing, reason="requires nix-bin installed (bullseye)") +def test_compute_nar_hashes_tarball(tarball_with_nar_hashes): + tarball_path, nar_checksums = tarball_with_nar_hashes + + actual_checksums = compute_nar_hashes(tarball_path, nar_checksums.keys()) + + assert actual_checksums == nar_checksums + + +@pytest.mark.skipif(nix_store_missing, reason="requires nix-bin installed (bullseye)") +def test_compute_nar_hashes_file(content_with_nar_hashes): + content_path, nar_checksums = content_with_nar_hashes + + actual_checksums = compute_nar_hashes( + content_path, nar_checksums.keys(), is_tarball=False + ) + + assert actual_checksums == nar_checksums diff --git a/swh/loader/core/utils.py b/swh/loader/core/utils.py index 0e9b388..c56a099 100644 --- a/swh/loader/core/utils.py +++ b/swh/loader/core/utils.py @@ -1,127 +1,188 @@ # Copyright (C) 2018-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime, timezone import io import os +from pathlib import Path import shutil import signal +from subprocess import PIPE, Popen +import tempfile import time import traceback -from typing import Callable, Optional, Union +from typing import Callable, Dict, Iterable, List, Optional, Union from billiard import Process, Queue # type: ignore from dateutil.parser import parse import psutil +from swh.core.tarball import uncompress +from swh.loader.exception import MissingOptionalDependency +from swh.model.hashutil import MultiHash + def clean_dangling_folders(dirpath: str, pattern_check: str, log=None) -> None: """Clean up potential dangling temporary working folder rooted at `dirpath`. Those folders must match a dedicated pattern and not belonging to a live pid. Args: dirpath: Path to check for dangling files pattern_check: A dedicated pattern to check on first level directory (e.g `swh.loader.mercurial.`, `swh.loader.svn.`) log (Logger): Optional logger """ if not os.path.exists(dirpath): return for filename in os.listdir(dirpath): path_to_cleanup = os.path.join(dirpath, filename) try: # pattern: `swh.loader.{loader-type}-pid.{noise}` if ( pattern_check not in filename or "-" not in filename ): # silently ignore unknown patterns continue _, pid_ = filename.split("-") pid = int(pid_.split(".")[0]) if psutil.pid_exists(pid): if log: log.debug("PID %s is live, skipping", pid) continue # could be removed concurrently, so check before removal if os.path.exists(path_to_cleanup): shutil.rmtree(path_to_cleanup) except Exception as e: if log: log.warn("Fail to clean dangling path %s: %s", path_to_cleanup, e) class CloneTimeout(Exception): pass class CloneFailure(Exception): pass def _clone_task(clone_func: Callable[[], None], errors: Queue) -> None: try: clone_func() except Exception as e: exc_buffer = io.StringIO() traceback.print_exc(file=exc_buffer) errors.put_nowait(exc_buffer.getvalue()) raise e def clone_with_timeout( src: str, dest: str, clone_func: Callable[[], None], timeout: float ) -> None: """Clone a repository with timeout. Args: src: clone source dest: clone destination clone_func: callable that does the actual cloning timeout: timeout in seconds """ errors: Queue = Queue() process = Process(target=_clone_task, args=(clone_func, errors)) process.start() process.join(timeout) if process.is_alive(): process.terminate() # Give it literally a second (in successive steps of 0.1 second), # then kill it. # Can't use `process.join(1)` here, billiard appears to be bugged # https://github.com/celery/billiard/issues/270 killed = False for _ in range(10): time.sleep(0.1) if not process.is_alive(): break else: killed = True os.kill(process.pid, signal.SIGKILL) raise CloneTimeout(src, timeout, killed) if not errors.empty(): raise CloneFailure(src, dest, errors.get()) def parse_visit_date(visit_date: Optional[Union[datetime, str]]) -> Optional[datetime]: """Convert visit date from either None, a string or a datetime to either None or datetime. """ if visit_date is None: return None if isinstance(visit_date, datetime): return visit_date if visit_date == "now": return datetime.now(tz=timezone.utc) if isinstance(visit_date, str): return parse(visit_date) raise ValueError(f"invalid visit date {visit_date!r}") + + +def nix_hashes(filepath: Path, hash_names: Iterable[str]) -> MultiHash: + """Compute nix-store hashes on filepath. + + Raises: + FileNotFoundError in case the nix-store command is not available on the system. + + """ + NIX_STORE = shutil.which("nix-store") + if NIX_STORE is None: + raise MissingOptionalDependency("nix-store") + + multi_hash = MultiHash(hash_names=hash_names) + + command = [NIX_STORE, "--dump", str(filepath)] + with Popen(command, stdout=PIPE) as proc: + assert proc.stdout is not None + for chunk in proc.stdout: + multi_hash.update(chunk) + + return multi_hash + + +def compute_nar_hashes( + filepath: Path, + hash_names: List[str] = ["sha256"], + is_tarball=True, +) -> Dict[str, str]: + """Compute nar checksums dict out of a filepath (tarball or plain file). + + If it's a tarball, this uncompresses the tarball in a temporary directory to compute + the nix hashes (and then cleans it up). + + Args: + filepath: The tarball (if is_tarball is True) or a filepath + hash_names: The list of checksums to compute + is_tarball: Whether filepath represents a tarball or not + + Returns: + The dict of checksums values whose keys are present in hash_names. + + """ + with tempfile.TemporaryDirectory() as tmpdir: + if is_tarball: + directory_path = Path(tmpdir) + directory_path.mkdir(parents=True, exist_ok=True) + uncompress(str(filepath), dest=str(directory_path)) + path_on_disk = next(directory_path.iterdir()) + else: + path_on_disk = filepath + + hashes = nix_hashes(path_on_disk, hash_names).hexdigest() + return hashes diff --git a/swh/loader/exception.py b/swh/loader/exception.py index 6a77fc9..3fd396d 100644 --- a/swh/loader/exception.py +++ b/swh/loader/exception.py @@ -1,13 +1,25 @@ -# Copyright (C) 2021 The Software Heritage developers +# Copyright (C) 2021-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information class NotFound(ValueError): """An exception raised when some information to retrieve is not found (e.g origin, artifact, ...) """ pass + + +class MissingOptionalDependency(ValueError): + """An exception raised when an optional runtime dependency is missing.""" + + pass + + +class UnsupportedChecksumComputation(ValueError): + """An exception raised when loader cannot compute such checksums.""" + + pass diff --git a/swh/loader/package/arch/loader.py b/swh/loader/package/arch/loader.py index 7ab9fc2..6753966 100644 --- a/swh/loader/package/arch/loader.py +++ b/swh/loader/package/arch/loader.py @@ -1,141 +1,142 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from distutils.version import LooseVersion from pathlib import Path import re from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple import attr from swh.loader.package.loader import BasePackageInfo, PackageLoader from swh.loader.package.utils import release_name from swh.model.model import ObjectType, Person, Release, Sha1Git, TimestampWithTimezone from swh.storage.interface import StorageInterface @attr.s class ArchPackageInfo(BasePackageInfo): name = attr.ib(type=str) """Name of the package""" version = attr.ib(type=str) """Current version""" last_modified = attr.ib(type=str) """File last modified date as release date""" def extract_intrinsic_metadata(dir_path: Path) -> Dict[str, Any]: """Extract intrinsic metadata from .PKGINFO file at dir_path. Each Arch linux package has a .PKGINFO file at the root of the archive. Args: dir_path: A directory on disk where a package has been extracted Returns: A dict mapping """ pkginfo_path = Path(dir_path, ".PKGINFO") rex = re.compile(r"^(\w+)\s=\s(.*)$", re.M) with pkginfo_path.open("rb") as content: parsed = rex.findall(content.read().decode()) data = {entry[0].lower(): entry[1] for entry in parsed} if "url" in data.keys(): data["project_url"] = data["url"] return data class ArchLoader(PackageLoader[ArchPackageInfo]): visit_type = "arch" def __init__( self, storage: StorageInterface, url: str, artifacts: List[Dict[str, Any]], arch_metadata: List[Dict[str, Any]], **kwargs, ): super().__init__(storage=storage, url=url, **kwargs) self.url = url self.artifacts: Dict[str, Dict] = { artifact["version"]: artifact for artifact in artifacts } self.arch_metadata: Dict[str, Dict] = { metadata["version"]: metadata for metadata in arch_metadata } def get_versions(self) -> Sequence[str]: """Get all released versions of an Arch Linux package Returns: A sequence of versions Example:: ["0.1.1", "0.10.2"] """ versions = list(self.artifacts.keys()) versions.sort(key=LooseVersion) return versions def get_default_version(self) -> str: """Get the newest release version of an Arch Linux package Returns: A string representing a version Example:: "0.1.2" """ return self.get_versions()[-1] def get_package_info(self, version: str) -> Iterator[Tuple[str, ArchPackageInfo]]: """Get release name and package information from version Args: version: arch version (e.g: "0.1.0") Returns: Iterator of tuple (release_name, p_info) """ artifact = self.artifacts[version] metadata = self.arch_metadata[version] assert version == artifact["version"] == metadata["version"] p_info = ArchPackageInfo( name=metadata["name"], filename=artifact["filename"], url=artifact["url"], version=version, last_modified=metadata["last_modified"], + checksums=artifact["checksums"], ) yield release_name(version, artifact["filename"]), p_info def build_release( self, p_info: ArchPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: intrinsic_metadata = extract_intrinsic_metadata(Path(uncompressed_path)) author = Person.from_fullname(intrinsic_metadata["packager"].encode()) description = intrinsic_metadata["pkgdesc"] message = ( f"Synthetic release for Arch Linux source package {p_info.name} " f"version {p_info.version}\n\n" f"{description}\n" ) return Release( name=p_info.version.encode(), author=author, date=TimestampWithTimezone.from_iso8601(p_info.last_modified), message=message.encode(), target_type=ObjectType.DIRECTORY, target=directory, synthetic=True, ) diff --git a/swh/loader/package/arch/tests/test_arch.py b/swh/loader/package/arch/tests/test_arch.py index 3180f9d..e061ce3 100644 --- a/swh/loader/package/arch/tests/test_arch.py +++ b/swh/loader/package/arch/tests/test_arch.py @@ -1,253 +1,271 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information + +# flake8: noqa: B950 + import pytest from swh.loader.package.arch.loader import ArchLoader from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import ( ObjectType, Person, Release, Snapshot, SnapshotBranch, TargetType, TimestampWithTimezone, ) EXPECTED_PACKAGES = [ { "url": "https://archive.archlinux.org/packages/d/dialog/", "artifacts": [ { - "url": "https://archive.archlinux.org/packages/d/dialog/dialog-1:1.3_20190211-1-x86_64.pkg.tar.xz", # noqa: B950 + "url": "https://archive.archlinux.org/packages/d/dialog/dialog-1:1.3_20190211-1-x86_64.pkg.tar.xz", "version": "1:1.3_20190211-1", - "length": 180000, + "length": 440, "filename": "dialog-1:1.3_20190211-1-x86_64.pkg.tar.xz", + "checksums": { + "length": 440, + "md5": "ce66c053ded0d51e5610368d85242684", + "sha256": "27c6a7af005cd2214fd63f7498bf51e3bff332df33a9b8f7ed07934823f7ba43", + }, }, { - "url": "https://archive.archlinux.org/packages/d/dialog/dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst", # noqa: B950 + "url": "https://archive.archlinux.org/packages/d/dialog/dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst", "version": "1:1.3_20220414-1", - "length": 198000, + "length": 371, "filename": "dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst", + "checksums": { + "length": 371, + "md5": "5687f6bfc3b6975fdd073deb7075ec09", + "sha256": "b002d18d1e1f356410f73b08170f0bd52f0d83b37b71ccd938594e7d486c4e8a", + }, }, ], "arch_metadata": [ { "arch": "x86_64", "repo": "core", "name": "dialog", "version": "1:1.3_20190211-1", "last_modified": "2019-02-13T08:36:00", }, { "arch": "x86_64", "repo": "core", "name": "dialog", "version": "1:1.3_20220414-1", "last_modified": "2022-04-16T03:59:00", }, ], }, { "url": "https://archlinuxarm.org/packages/aarch64/gzip", "artifacts": [ { - "url": "https://uk.mirror.archlinuxarm.org/aarch64/core/gzip-1.12-1-aarch64.pkg.tar.xz", # noqa: B950 - "length": 79640, + "url": "https://uk.mirror.archlinuxarm.org/aarch64/core/gzip-1.12-1-aarch64.pkg.tar.xz", + "length": 472, "version": "1.12-1", "filename": "gzip-1.12-1-aarch64.pkg.tar.xz", + "checksums": { + "length": 472, + "md5": "0b96fa72ae35c097ec78132ed2f05a57", + "sha256": "8d45b871283e2c37513833f6327ebcdd96c6c3b335588945f873cb809b1e6d2b", + }, } ], "arch_metadata": [ { "arch": "aarch64", "name": "gzip", "repo": "core", "version": "1.12-1", "last_modified": "2022-04-07T21:08:14", } ], }, ] def test_get_versions(swh_storage): loader = ArchLoader( swh_storage, url=EXPECTED_PACKAGES[0]["url"], artifacts=EXPECTED_PACKAGES[0]["artifacts"], arch_metadata=EXPECTED_PACKAGES[0]["arch_metadata"], ) assert loader.get_versions() == [ "1:1.3_20190211-1", "1:1.3_20220414-1", ] def test_get_default_version(requests_mock_datadir, swh_storage): loader = ArchLoader( swh_storage, url=EXPECTED_PACKAGES[0]["url"], artifacts=EXPECTED_PACKAGES[0]["artifacts"], arch_metadata=EXPECTED_PACKAGES[0]["arch_metadata"], ) assert loader.get_default_version() == "1:1.3_20220414-1" def test_arch_loader_load_one_version(datadir, requests_mock_datadir, swh_storage): loader = ArchLoader( swh_storage, url=EXPECTED_PACKAGES[1]["url"], artifacts=EXPECTED_PACKAGES[1]["artifacts"], arch_metadata=EXPECTED_PACKAGES[1]["arch_metadata"], ) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None expected_snapshot_id = "4020d0a278027550e336b5481a4159a913c91aa4" expected_release_id = "7681098c9e381f9cc8bd1724d57eeee2182982dc" assert expected_snapshot_id == actual_load_status["snapshot_id"] expected_snapshot = Snapshot( id=hash_to_bytes(actual_load_status["snapshot_id"]), branches={ b"releases/1.12-1/gzip-1.12-1-aarch64.pkg.tar.xz": SnapshotBranch( target=hash_to_bytes(expected_release_id), target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/1.12-1/gzip-1.12-1-aarch64.pkg.tar.xz", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 1, "directory": 1, "origin": 1, "origin_visit": 1, "release": 1, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats assert swh_storage.release_get([hash_to_bytes(expected_release_id)])[0] == Release( name=b"1.12-1", message=b"Synthetic release for Arch Linux source package gzip version " b"1.12-1\n\nGNU compression utility\n", target=hash_to_bytes("bd742aaf422953a1f7a5e084ec4a7477491d63fb"), target_type=ObjectType.DIRECTORY, synthetic=True, author=Person.from_fullname( b"Arch Linux ARM Build System " ), date=TimestampWithTimezone.from_iso8601("2022-04-07T21:08:14+00:00"), id=hash_to_bytes(expected_release_id), ) assert_last_visit_matches( swh_storage, url=EXPECTED_PACKAGES[1]["url"], status="full", type="arch", snapshot=expected_snapshot.id, ) def test_arch_loader_load_n_versions(datadir, requests_mock_datadir, swh_storage): loader = ArchLoader( swh_storage, url=EXPECTED_PACKAGES[0]["url"], artifacts=EXPECTED_PACKAGES[0]["artifacts"], arch_metadata=EXPECTED_PACKAGES[0]["arch_metadata"], ) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None expected_snapshot_id = "832139d69a91edffcc3a96cca11deaf9255041c3" assert expected_snapshot_id == actual_load_status["snapshot_id"] expected_snapshot = Snapshot( id=hash_to_bytes(actual_load_status["snapshot_id"]), branches={ b"releases/1:1.3_20190211-1/" b"dialog-1:1.3_20190211-1-x86_64.pkg.tar.xz": SnapshotBranch( target=hash_to_bytes("37efb727ff8bb8fbf92518aa8fe5fff2ad427d06"), target_type=TargetType.RELEASE, ), b"releases/1:1.3_20220414-1/" b"dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst": SnapshotBranch( target=hash_to_bytes("020d3f5627df7474f257fd04f1ede4415296e265"), target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/1:1.3_20220414-1/dialog-1:1.3_20220414-1-x86_64.pkg.tar.zst", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 2, "directory": 2, "origin": 1, "origin_visit": 1, "release": 2, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats assert_last_visit_matches( swh_storage, url=EXPECTED_PACKAGES[0]["url"], status="full", type="arch", snapshot=expected_snapshot.id, ) def test_arch_invalid_origin_archive_not_found(swh_storage, requests_mock_datadir): url = "https://nowhere/packages/42" loader = ArchLoader( swh_storage, url, artifacts=[ { "filename": "42-0.0.1.pkg.xz", "url": "https://mirror2.nowhere/pkg/42-0.0.1.pkg.xz", "version": "0.0.1", "length": 42, }, ], arch_metadata=[ { "version": "0.0.1", "arch": "aarch64", "name": "42", "repo": "community", "last_modified": "2022-04-07T21:08:14", }, ], ) with pytest.raises(Exception): assert loader.load() == {"status": "failed"} assert_last_visit_matches( swh_storage, url, status="not_found", type="arch", snapshot=None ) diff --git a/swh/loader/package/arch/tests/test_tasks.py b/swh/loader/package/arch/tests/test_tasks.py index b5178ac..13f5768 100644 --- a/swh/loader/package/arch/tests/test_tasks.py +++ b/swh/loader/package/arch/tests/test_tasks.py @@ -1,40 +1,59 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information +import uuid -def test_tasks_arch_loader( - mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config -): - mock_load = mocker.patch("swh.loader.package.arch.loader.ArchLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.arch.tasks.LoadArch", - kwargs=dict( - url="some-url/packages/s/some-package", - artifacts=[ +import pytest + +from swh.scheduler.model import ListedOrigin, Lister + +NAMESPACE = "swh.loader.package.arch" + + +@pytest.fixture +def arch_lister(): + return Lister(name="arch", instance_name="example", id=uuid.uuid4()) + + +@pytest.fixture +def arch_listed_origin(arch_lister): + return ListedOrigin( + lister_id=arch_lister.id, + url="some-url/packages/s/some-package", + visit_type="arch", + extra_loader_arguments={ + "artifacts": [ { "version": "0.0.1", "url": "https://somewhere/some-package-0.0.1.pkg.xz", "filename": "some-package-0.0.1.pkg.xz", "length": 42, } ], - arch_metadata=[ + "arch_metadata": [ { "version": "0.0.1", "arch": "aarch64", "name": "some-package", "repo": "community", "last_modified": "1970-01-01T21:08:14", } ], - ), + }, + ) + + +def test_arch_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + arch_lister, + arch_listed_origin, +): + + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.ArchLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadArch", + lister=arch_lister, + listed_origin=arch_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/archive/loader.py b/swh/loader/package/archive/loader.py index b96cad6..feef63b 100644 --- a/swh/loader/package/archive/loader.py +++ b/swh/loader/package/archive/loader.py @@ -1,168 +1,171 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information +from __future__ import annotations + import datetime import hashlib import logging from os import path import string from typing import Any, Dict, Iterator, Mapping, Optional, Sequence, Tuple, Union import attr import iso8601 from swh.loader.package.loader import BasePackageInfo, PackageLoader, PartialExtID from swh.loader.package.utils import EMPTY_AUTHOR, release_name from swh.model.model import ObjectType, Release, Sha1Git, TimestampWithTimezone from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) @attr.s class ArchivePackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) length = attr.ib(type=int) """Size of the archive file""" time = attr.ib(type=Union[str, datetime.datetime]) """Timestamp of the archive file on the server""" # default format for gnu MANIFEST_FORMAT = string.Template("$time $length $version $url") def extid(self, manifest_format: Optional[string.Template] = None) -> PartialExtID: """Returns a unique intrinsic identifier of this package info ``manifest_format`` allows overriding the class' default MANIFEST_FORMAT""" manifest_format = manifest_format or self.MANIFEST_FORMAT # TODO: use parsed attributes instead of self.raw_info manifest = manifest_format.substitute( {k: str(v) for (k, v) in self.raw_info.items()} ) return ( self.EXTID_TYPE, self.EXTID_VERSION, hashlib.sha256(manifest.encode()).digest(), ) @classmethod - def from_metadata(cls, a_metadata: Dict[str, Any]) -> "ArchivePackageInfo": + def from_metadata(cls, a_metadata: Dict[str, Any]) -> ArchivePackageInfo: url = a_metadata["url"] filename = a_metadata.get("filename") return cls( url=url, filename=filename if filename else path.split(url)[-1], raw_info=a_metadata, length=a_metadata["length"], time=a_metadata["time"], version=a_metadata["version"], + checksums={"length": a_metadata["length"]}, ) class ArchiveLoader(PackageLoader[ArchivePackageInfo]): """Load archive origin's artifact files into swh archive""" visit_type = "tar" def __init__( self, storage: StorageInterface, url: str, artifacts: Sequence[Dict[str, Any]], extid_manifest_format: Optional[str] = None, snapshot_append: bool = False, **kwargs: Any, ): f"""Loader constructor. For now, this is the lister's task output. Args: url: Origin url artifacts: List of artifact information with keys: - **time**: last modification time as either isoformat date string or timestamp - **url**: the artifact url to retrieve filename - **filename**: optionally, the file's name - **version**: artifact's version - **length**: artifact's length extid_manifest_format: template string used to format a manifest, which is hashed to get the extid of a package. Defaults to {ArchivePackageInfo.MANIFEST_FORMAT!r} snapshot_append: if :const:`True`, append latest snapshot content to the new snapshot created by the loader """ super().__init__(storage=storage, url=url, **kwargs) self.artifacts = artifacts # assume order is enforced in the lister self.extid_manifest_format = ( None if extid_manifest_format is None else string.Template(extid_manifest_format) ) self.snapshot_append = snapshot_append def get_versions(self) -> Sequence[str]: versions = [] for archive in self.artifacts: v = archive.get("version") if v: versions.append(v) return versions def get_default_version(self) -> str: # It's the most recent, so for this loader, it's the last one return self.artifacts[-1]["version"] def get_package_info( self, version: str ) -> Iterator[Tuple[str, ArchivePackageInfo]]: for a_metadata in self.artifacts: p_info = ArchivePackageInfo.from_metadata(a_metadata) if version == p_info.version: # FIXME: this code assumes we have only 1 artifact per # versioned package yield release_name(version), p_info def new_packageinfo_to_extid( self, p_info: ArchivePackageInfo ) -> Optional[PartialExtID]: return p_info.extid(manifest_format=self.extid_manifest_format) def build_release( self, p_info: ArchivePackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: time = p_info.time # assume it's a timestamp if isinstance(time, str): # otherwise, assume it's a parsable date parsed_time = iso8601.parse_date(time) else: parsed_time = time normalized_time = ( TimestampWithTimezone.from_datetime(parsed_time) if parsed_time is not None else None ) msg = f"Synthetic release for archive at {p_info.url}\n" return Release( name=p_info.version.encode(), message=msg.encode(), date=normalized_time, author=EMPTY_AUTHOR, target=directory, target_type=ObjectType.DIRECTORY, synthetic=True, ) def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]: if not self.snapshot_append: return {} last_snapshot = self.last_snapshot() return last_snapshot.to_dict()["branches"] if last_snapshot else {} diff --git a/swh/loader/package/archive/tests/test_archive.py b/swh/loader/package/archive/tests/test_archive.py index 7a32b2c..1aaefae 100644 --- a/swh/loader/package/archive/tests/test_archive.py +++ b/swh/loader/package/archive/tests/test_archive.py @@ -1,502 +1,625 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy import datetime import hashlib from io import BytesIO from pathlib import Path import string import attr import pytest from requests.exceptions import ContentDecodingError from swh.loader.package.archive.loader import ArchiveLoader, ArchivePackageInfo from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.model import ( ObjectType, Person, Release, Snapshot, SnapshotBranch, TargetType, TimestampWithTimezone, ) URL = "https://ftp.gnu.org/gnu/8sync/" GNU_ARTIFACTS = [ { "time": 944729610, "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz", "length": 221837, "filename": "8sync-0.1.0.tar.gz", "version": "0.1.0", }, { "time": 1480991830, "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz", "length": 238466, "filename": "8sync-0.2.0.tar.gz", "version": "0.2.0", }, ] _expected_new_contents_first_visit = [ "e9258d81faf5881a2f96a77ba609396f82cb97ad", "1170cf105b04b7e2822a0e09d2acf71da7b9a130", "fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac", "0057bec9b5422aff9256af240b177ac0e3ac2608", "2b8d0d0b43a1078fc708930c8ddc2956a86c566e", "27de3b3bc6545d2a797aeeb4657c0e215a0c2e55", "2e6db43f5cd764e677f416ff0d0c78c7a82ef19b", "ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62", "edeb33282b2bffa0e608e9d2fd960fd08093c0ea", "d64e64d4c73679323f8d4cde2643331ba6c20af9", "7a756602914be889c0a2d3952c710144b3e64cb0", "84fb589b554fcb7f32b806951dcf19518d67b08f", "8624bcdae55baeef00cd11d5dfcfa60f68710a02", "e08441aeab02704cfbd435d6445f7c072f8f524e", "f67935bc3a83a67259cda4b2d43373bd56703844", "809788434b433eb2e3cfabd5d591c9a659d5e3d8", "7d7c6c8c5ebaeff879f61f37083a3854184f6c41", "b99fec102eb24bffd53ab61fc30d59e810f116a2", "7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68", "f0c97052e567948adf03e641301e9983c478ccff", "7fb724242e2b62b85ca64190c31dcae5303e19b3", "4f9709e64a9134fe8aefb36fd827b84d8b617ab5", "7350628ccf194c2c3afba4ac588c33e3f3ac778d", "0bb892d9391aa706dc2c3b1906567df43cbe06a2", "49d4c0ce1a16601f1e265d446b6c5ea6b512f27c", "6b5cc594ac466351450f7f64a0b79fdaf4435ad3", "3046e5d1f70297e2a507b98224b6222c9688d610", "1572607d456d7f633bc6065a2b3048496d679a31", ] _expected_new_directories_first_visit = [ "daabc65ec75d487b1335ffc101c0ac11c803f8fc", "263be23b4a8101d3ad0d9831319a3e0f2b065f36", "7f6e63ba6eb3e2236f65892cd822041f1a01dd5c", "4db0a3ecbc976083e2dac01a62f93729698429a3", "dfef1c80e1098dd5deda664bb44a9ab1f738af13", "eca971d346ea54d95a6e19d5051f900237fafdaa", "3aebc29ed1fccc4a6f2f2010fb8e57882406b528", ] _expected_new_releases_first_visit = { "c92b2ad9e70ef1dce455e8fe1d8e41b92512cc08": ( "3aebc29ed1fccc4a6f2f2010fb8e57882406b528" ) } +@pytest.fixture(autouse=True, scope="function") +def lower_sample_rate(mocker): + """Lower the number of entries per discovery sample so the minimum threshold + for discovery is hit in tests without creating huge test data""" + mocker.patch("swh.loader.package.loader.discovery.SAMPLE_SIZE", 1) + + def test_archive_visit_with_no_artifact_found(swh_storage, requests_mock_datadir): url = URL unknown_artifact_url = "https://ftp.g.o/unknown/8sync-0.1.0.tar.gz" loader = ArchiveLoader( swh_storage, url, artifacts=[ { "time": 944729610, "url": unknown_artifact_url, # unknown artifact "length": 221837, "filename": "8sync-0.1.0.tar.gz", "version": "0.1.0", } ], ) actual_load_status = loader.load() assert actual_load_status["status"] == "uneventful" assert actual_load_status["snapshot_id"] is not None stats = get_stats(swh_storage) assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats assert_last_visit_matches(swh_storage, url, status="partial", type="tar") +def test_archive_visit_with_skipped_content(swh_storage, requests_mock_datadir): + """With no prior visit, load a gnu project and set the max content size + to something low to check that the loader skips "big" content.""" + loader = ArchiveLoader( + swh_storage, URL, artifacts=GNU_ARTIFACTS[:1], max_content_size=10 * 1024 + ) + + actual_load_status = loader.load() + assert actual_load_status["status"] == "eventful" + + expected_snapshot_first_visit_id = hash_to_bytes( + "9efecc835e8f99254934f256b5301b94f348fd17" + ) + + assert actual_load_status["snapshot_id"] == hash_to_hex( + expected_snapshot_first_visit_id + ) + + assert_last_visit_matches(swh_storage, URL, status="full", type="tar") + + _expected_new_non_skipped_contents_first_visit = [ + "ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62", + "809788434b433eb2e3cfabd5d591c9a659d5e3d8", + "1572607d456d7f633bc6065a2b3048496d679a31", + "27de3b3bc6545d2a797aeeb4657c0e215a0c2e55", + "fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac", + "4f9709e64a9134fe8aefb36fd827b84d8b617ab5", + "84fb589b554fcb7f32b806951dcf19518d67b08f", + "3046e5d1f70297e2a507b98224b6222c9688d610", + "e08441aeab02704cfbd435d6445f7c072f8f524e", + "49d4c0ce1a16601f1e265d446b6c5ea6b512f27c", + "7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68", + "f0c97052e567948adf03e641301e9983c478ccff", + "2e6db43f5cd764e677f416ff0d0c78c7a82ef19b", + "e9258d81faf5881a2f96a77ba609396f82cb97ad", + "7350628ccf194c2c3afba4ac588c33e3f3ac778d", + "0057bec9b5422aff9256af240b177ac0e3ac2608", + "6b5cc594ac466351450f7f64a0b79fdaf4435ad3", + ] + + _expected_new_skipped_contents_first_visit = [ + "1170cf105b04b7e2822a0e09d2acf71da7b9a130", + "2b8d0d0b43a1078fc708930c8ddc2956a86c566e", + "edeb33282b2bffa0e608e9d2fd960fd08093c0ea", + "d64e64d4c73679323f8d4cde2643331ba6c20af9", + "7a756602914be889c0a2d3952c710144b3e64cb0", + "8624bcdae55baeef00cd11d5dfcfa60f68710a02", + "f67935bc3a83a67259cda4b2d43373bd56703844", + "7d7c6c8c5ebaeff879f61f37083a3854184f6c41", + "b99fec102eb24bffd53ab61fc30d59e810f116a2", + "7fb724242e2b62b85ca64190c31dcae5303e19b3", + "0bb892d9391aa706dc2c3b1906567df43cbe06a2", + ] + + # Check that the union of both sets make up the original set (without skipping) + union = set(_expected_new_non_skipped_contents_first_visit) | set( + _expected_new_skipped_contents_first_visit + ) + assert union == set(_expected_new_contents_first_visit) + + stats = get_stats(swh_storage) + assert { + "content": len(_expected_new_non_skipped_contents_first_visit), + "directory": len(_expected_new_directories_first_visit), + "origin": 1, + "origin_visit": 1, + "release": len(_expected_new_releases_first_visit), + "revision": 0, + "skipped_content": len(_expected_new_skipped_contents_first_visit), + "snapshot": 1, + } == stats + + release_id = hash_to_bytes(list(_expected_new_releases_first_visit)[0]) + expected_snapshot = Snapshot( + id=expected_snapshot_first_visit_id, + branches={ + b"HEAD": SnapshotBranch( + target_type=TargetType.ALIAS, + target=b"releases/0.1.0", + ), + b"releases/0.1.0": SnapshotBranch( + target_type=TargetType.RELEASE, + target=release_id, + ), + }, + ) + check_snapshot(expected_snapshot, swh_storage) + + assert swh_storage.release_get([release_id])[0] == Release( + id=release_id, + name=b"0.1.0", + message=( + b"Synthetic release for archive at " + b"https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz\n" + ), + target=hash_to_bytes("3aebc29ed1fccc4a6f2f2010fb8e57882406b528"), + target_type=ObjectType.DIRECTORY, + synthetic=True, + author=Person.from_fullname(b""), + date=TimestampWithTimezone.from_datetime( + datetime.datetime(1999, 12, 9, 8, 53, 30, tzinfo=datetime.timezone.utc) + ), + ) + + expected_contents = map( + hash_to_bytes, _expected_new_non_skipped_contents_first_visit + ) + assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] + + expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit) + assert list(swh_storage.directory_missing(expected_dirs)) == [] + + expected_rels = map(hash_to_bytes, _expected_new_releases_first_visit) + assert list(swh_storage.release_missing(expected_rels)) == [] + + def test_archive_visit_with_release_artifact_no_prior_visit( swh_storage, requests_mock_datadir ): """With no prior visit, load a gnu project ends up with 1 snapshot""" loader = ArchiveLoader(swh_storage, URL, artifacts=GNU_ARTIFACTS[:1]) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" expected_snapshot_first_visit_id = hash_to_bytes( "9efecc835e8f99254934f256b5301b94f348fd17" ) assert actual_load_status["snapshot_id"] == hash_to_hex( expected_snapshot_first_visit_id ) assert_last_visit_matches(swh_storage, URL, status="full", type="tar") stats = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, "release": len(_expected_new_releases_first_visit), "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats release_id = hash_to_bytes(list(_expected_new_releases_first_visit)[0]) expected_snapshot = Snapshot( id=expected_snapshot_first_visit_id, branches={ b"HEAD": SnapshotBranch( target_type=TargetType.ALIAS, target=b"releases/0.1.0", ), b"releases/0.1.0": SnapshotBranch( target_type=TargetType.RELEASE, target=release_id, ), }, ) check_snapshot(expected_snapshot, swh_storage) assert swh_storage.release_get([release_id])[0] == Release( id=release_id, name=b"0.1.0", message=( b"Synthetic release for archive at " b"https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz\n" ), target=hash_to_bytes("3aebc29ed1fccc4a6f2f2010fb8e57882406b528"), target_type=ObjectType.DIRECTORY, synthetic=True, author=Person.from_fullname(b""), date=TimestampWithTimezone.from_datetime( datetime.datetime(1999, 12, 9, 8, 53, 30, tzinfo=datetime.timezone.utc) ), ) expected_contents = map(hash_to_bytes, _expected_new_contents_first_visit) assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit) assert list(swh_storage.directory_missing(expected_dirs)) == [] expected_rels = map(hash_to_bytes, _expected_new_releases_first_visit) assert list(swh_storage.release_missing(expected_rels)) == [] def test_archive_2_visits_without_change(swh_storage, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot""" url = URL loader = ArchiveLoader(swh_storage, url, artifacts=GNU_ARTIFACTS[:1]) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, url, status="full", type="tar") actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" assert actual_load_status2["snapshot_id"] is not None assert actual_load_status["snapshot_id"] == actual_load_status2["snapshot_id"] assert_last_visit_matches(swh_storage, url, status="full", type="tar") urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 def test_archive_2_visits_with_new_artifact(swh_storage, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot""" url = URL artifact1 = GNU_ARTIFACTS[0] loader = ArchiveLoader(swh_storage, url, [artifact1]) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, url, status="full", type="tar") stats = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit), "directory": len(_expected_new_directories_first_visit), "origin": 1, "origin_visit": 1, "release": len(_expected_new_releases_first_visit), "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 artifact2 = GNU_ARTIFACTS[1] loader2 = ArchiveLoader(swh_storage, url, [artifact1, artifact2]) stats2 = get_stats(swh_storage) assert stats == stats2 # ensure we share the storage actual_load_status2 = loader2.load() assert actual_load_status2["status"] == "eventful" assert actual_load_status2["snapshot_id"] is not None stats2 = get_stats(swh_storage) assert { "content": len(_expected_new_contents_first_visit) + 14, "directory": len(_expected_new_directories_first_visit) + 8, "origin": 1, "origin_visit": 1 + 1, "release": len(_expected_new_releases_first_visit) + 1, "revision": 0, "skipped_content": 0, "snapshot": 1 + 1, } == stats2 assert_last_visit_matches(swh_storage, url, status="full", type="tar") urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] # 1 artifact (2nd time no modification) + 1 new artifact assert len(urls) == 2 def test_archive_2_visits_without_change_not_gnu(swh_storage, requests_mock_datadir): """Load a project archive (not gnu) ends up with 1 snapshot""" url = "https://something.else.org/8sync/" artifacts = [ # this is not a gnu artifact { "time": "1999-12-09T09:53:30+00:00", # it's also not a timestamp "sha256": "d5d1051e59b2be6f065a9fc6aedd3a391e44d0274b78b9bb4e2b57a09134dbe4", # noqa # keep a gnu artifact reference to avoid adding other test files "url": "https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz", "length": 238466, "filename": "8sync-0.2.0.tar.gz", "version": "0.2.0", } ] # Here the loader defines the id_keys to use for existence in the snapshot # It's not the default archive loader which loader = ArchiveLoader( swh_storage, url, artifacts=artifacts, extid_manifest_format="$sha256 $length $url", ) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, url, status="full", type="tar") actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" assert actual_load_status2["snapshot_id"] == actual_load_status["snapshot_id"] assert_last_visit_matches(swh_storage, url, status="full", type="tar") urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("https://ftp.gnu.org") ] assert len(urls) == 1 def test_archive_extid(): """Compute primary key should return the right identity""" @attr.s class TestPackageInfo(ArchivePackageInfo): a = attr.ib() b = attr.ib() metadata = GNU_ARTIFACTS[0] p_info = TestPackageInfo( raw_info={**metadata, "a": 1, "b": 2}, a=1, b=2, **metadata, ) for manifest_format, expected_manifest in [ (string.Template("$a $b"), b"1 2"), (string.Template(""), b""), (None, "{time} {length} {version} {url}".format(**metadata).encode()), ]: actual_id = p_info.extid(manifest_format=manifest_format) assert actual_id == ( "package-manifest-sha256", 0, hashlib.sha256(expected_manifest).digest(), ) with pytest.raises(KeyError): p_info.extid(manifest_format=string.Template("$a $unknown_key")) def test_archive_snapshot_append(swh_storage, requests_mock_datadir): # first loading with a first artifact artifact1 = GNU_ARTIFACTS[0] loader = ArchiveLoader(swh_storage, URL, [artifact1], snapshot_append=True) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, URL, status="full", type="tar") # check expected snapshot snapshot = loader.last_snapshot() assert len(snapshot.branches) == 2 branch_artifact1_name = f"releases/{artifact1['version']}".encode() assert b"HEAD" in snapshot.branches assert branch_artifact1_name in snapshot.branches assert snapshot.branches[b"HEAD"].target == branch_artifact1_name # second loading with a second artifact artifact2 = GNU_ARTIFACTS[1] loader = ArchiveLoader(swh_storage, URL, [artifact2], snapshot_append=True) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, URL, status="full", type="tar") # check expected snapshot, should contain a new branch and the # branch for the first artifact snapshot = loader.last_snapshot() assert len(snapshot.branches) == 3 branch_artifact2_name = f"releases/{artifact2['version']}".encode() assert b"HEAD" in snapshot.branches assert branch_artifact2_name in snapshot.branches assert branch_artifact1_name in snapshot.branches assert snapshot.branches[b"HEAD"].target == branch_artifact2_name def test_archive_snapshot_append_branch_override(swh_storage, requests_mock_datadir): # first loading for a first artifact artifact1 = GNU_ARTIFACTS[0] loader = ArchiveLoader(swh_storage, URL, [artifact1], snapshot_append=True) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, URL, status="full", type="tar") # check expected snapshot snapshot = loader.last_snapshot() assert len(snapshot.branches) == 2 branch_artifact1_name = f"releases/{artifact1['version']}".encode() assert branch_artifact1_name in snapshot.branches branch_target_first_visit = snapshot.branches[branch_artifact1_name].target # second loading for a second artifact with same version as the first one # but with different tarball content artifact2 = dict(GNU_ARTIFACTS[0]) artifact2["url"] = GNU_ARTIFACTS[1]["url"] artifact2["time"] = GNU_ARTIFACTS[1]["time"] artifact2["length"] = GNU_ARTIFACTS[1]["length"] loader = ArchiveLoader(swh_storage, URL, [artifact2], snapshot_append=True) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None assert_last_visit_matches(swh_storage, URL, status="full", type="tar") # check expected snapshot, should contain the same branch as previously # but with different target snapshot = loader.last_snapshot() assert len(snapshot.branches) == 2 assert branch_artifact1_name in snapshot.branches branch_target_second_visit = snapshot.branches[branch_artifact1_name].target assert branch_target_first_visit != branch_target_second_visit @pytest.fixture def not_gzipped_tarball_bytes(datadir): return Path(datadir, "not_gzipped_tarball.tar.gz").read_bytes() def test_archive_not_gzipped_tarball( swh_storage, requests_mock, not_gzipped_tarball_bytes ): """Check that a tarball erroneously marked as gzip compressed can still be downloaded and processed. """ filename = "not_gzipped_tarball.tar.gz" url = f"https://example.org/ftp/{filename}" requests_mock.get( url, [ { "exc": ContentDecodingError, }, { "body": BytesIO(not_gzipped_tarball_bytes), }, ], ) loader = ArchiveLoader( swh_storage, url, artifacts=[ { "time": 944729610, "url": url, - "length": 221837, + "length": 778240, "filename": filename, "version": "0.1.0", } ], ) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] is not None snapshot = loader.last_snapshot() assert len(snapshot.branches) == 2 assert b"releases/0.1.0" in snapshot.branches def test_archive_visit_no_time_for_tarball(swh_storage, requests_mock_datadir): artifacts = copy.deepcopy(GNU_ARTIFACTS) for artifact in artifacts: artifact["time"] = None loader = ArchiveLoader(swh_storage, URL, artifacts=artifacts) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert_last_visit_matches(swh_storage, URL, status="full", type="tar") diff --git a/swh/loader/package/archive/tests/test_tasks.py b/swh/loader/package/archive/tests/test_tasks.py index 0d2448f..dc56908 100644 --- a/swh/loader/package/archive/tests/test_tasks.py +++ b/swh/loader/package/archive/tests/test_tasks.py @@ -1,92 +1,52 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import uuid import pytest from swh.scheduler.model import ListedOrigin, Lister -from swh.scheduler.utils import create_origin_task_dict - -@pytest.fixture(autouse=True) -def celery_worker_and_swh_config(swh_scheduler_celery_worker, swh_config): - pass +NAMESPACE = "swh.loader.package.archive" @pytest.fixture def archive_lister(): return Lister(name="archive-lister", instance_name="example", id=uuid.uuid4()) @pytest.fixture def archive_listed_origin(archive_lister): return ListedOrigin( lister_id=archive_lister.id, url="https://example.org/archives", visit_type="tar", extra_loader_arguments={ - "artifacts": [], + "artifacts": [ + { + "time": "2010-08-14T01:41:56", + "url": "https://example.org/archives/project-v1.0.0.tar.gz", + "filename": "project-v1.0.0.tar.gz", + "version": "1.0.0", + "length": 2500, + } + ], "snapshot_append": True, }, ) -def test_tasks_archive_loader( - mocker, - swh_scheduler_celery_app, -): - mock_load = mocker.patch("swh.loader.package.archive.loader.ArchiveLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.archive.tasks.LoadArchive", - kwargs=dict(url="https://gnu.org/", artifacts=[]), - ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} - - -def test_tasks_archive_loader_snapshot_append( - mocker, - swh_scheduler_celery_app, -): - mock_load = mocker.patch("swh.loader.package.archive.loader.ArchiveLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.archive.tasks.LoadArchive", - kwargs=dict(url="https://gnu.org/", artifacts=[], snapshot_append=True), - ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} - - -def test_tasks_archive_loader_for_listed_origin( - mocker, - swh_scheduler_celery_app, +def test_archive_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, archive_lister, archive_listed_origin, ): - mock_load = mocker.patch("swh.loader.package.archive.loader.ArchiveLoader.load") - mock_load.return_value = {"status": "eventful"} - - task_dict = create_origin_task_dict(archive_listed_origin, archive_lister) - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.archive.tasks.LoadArchive", - kwargs=task_dict["arguments"]["kwargs"], + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.ArchiveLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadArchive", + lister=archive_lister, + listed_origin=archive_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/aur/tests/test_tasks.py b/swh/loader/package/aur/tests/test_tasks.py index b3ebafa..f814dc1 100644 --- a/swh/loader/package/aur/tests/test_tasks.py +++ b/swh/loader/package/aur/tests/test_tasks.py @@ -1,38 +1,57 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information +import uuid -def test_tasks_aur_loader( - mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config -): - mock_load = mocker.patch("swh.loader.package.aur.loader.AurLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.aur.tasks.LoadAur", - kwargs=dict( - url="https://somewhere/some-package.git", - artifacts=[ +import pytest + +from swh.scheduler.model import ListedOrigin, Lister + +NAMESPACE = "swh.loader.package.aur" + + +@pytest.fixture +def aur_lister(): + return Lister(name="aur", instance_name="example", id=uuid.uuid4()) + + +@pytest.fixture +def aur_listed_origin(aur_lister): + return ListedOrigin( + lister_id=aur_lister.id, + url="https://somewhere/some-package.git", + visit_type="aur", + extra_loader_arguments={ + "artifacts": [ { "filename": "some-package.tar.gz", "url": "https://somewhere/some-package.tar.gz", "version": "0.0.1", } ], - aur_metadata=[ + "aur_metadata": [ { "version": "0.0.1", "project_url": "https://somewhere/some-package", "last_update": "1970-01-01T21:08:14", "pkgname": "some-package", } ], - ), + }, + ) + + +def test_aur_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + aur_lister, + aur_listed_origin, +): + + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.AurLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadAur", + lister=aur_lister, + listed_origin=aur_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/cpan/__init__.py b/swh/loader/package/cpan/__init__.py new file mode 100644 index 0000000..dcf6e8c --- /dev/null +++ b/swh/loader/package/cpan/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + + +from typing import Any, Mapping + + +def register() -> Mapping[str, Any]: + """Register the current worker module's definition""" + from .loader import CpanLoader + + return { + "task_modules": [f"{__name__}.tasks"], + "loader": CpanLoader, + } diff --git a/swh/loader/package/cpan/loader.py b/swh/loader/package/cpan/loader.py new file mode 100644 index 0000000..cfbcec9 --- /dev/null +++ b/swh/loader/package/cpan/loader.py @@ -0,0 +1,180 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from datetime import datetime +import logging +from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple + +import attr +import iso8601 +from packaging.version import parse as parse_version +from requests import HTTPError + +from swh.loader.package.loader import ( + BasePackageInfo, + PackageLoader, + RawExtrinsicMetadataCore, +) +from swh.loader.package.utils import EMPTY_AUTHOR, Person, get_url_body, release_name +from swh.model.model import ( + MetadataAuthority, + MetadataAuthorityType, + ObjectType, + Release, + Sha1Git, + TimestampWithTimezone, +) +from swh.storage.interface import StorageInterface + +logger = logging.getLogger(__name__) + + +@attr.s +class CpanPackageInfo(BasePackageInfo): + + name = attr.ib(type=str) + """Name of the package""" + + version = attr.ib(type=str) + """Current version""" + + last_modified = attr.ib(type=datetime) + """File last modified date as release date.""" + + author = attr.ib(type=Person) + """Author""" + + +class CpanLoader(PackageLoader[CpanPackageInfo]): + visit_type = "cpan" + + EXTRINSIC_METADATA_URL_PATTERN = "{api_base_url}/release/{author}/{release_name}" + + def __init__( + self, + storage: StorageInterface, + url: str, + api_base_url: str, + artifacts: List[Dict[str, Any]], + module_metadata: List[Dict[str, Any]], + **kwargs, + ): + + super().__init__(storage=storage, url=url, **kwargs) + self.url = url + self.api_base_url = api_base_url + self.artifacts: Dict[str, Dict] = { + artifact["version"]: {k: v for k, v in artifact.items() if k != "version"} + for artifact in artifacts + } + self.module_metadata: Dict[str, Dict] = { + meta["version"]: meta for meta in module_metadata + } + + def get_metadata_authority(self): + return MetadataAuthority( + type=MetadataAuthorityType.FORGE, + url="https://metacpan.org/", + ) + + def get_versions(self) -> Sequence[str]: + """Get all released versions of a Perl package + + Returns: + A sequence of versions + + Example:: + + ["0.1.1", "0.10.2"] + """ + versions = list(self.artifacts.keys()) + versions.sort(key=parse_version) + return versions + + def get_default_version(self) -> str: + """Get the newest release version of a Perl package + + Returns: + A string representing a version + + Example:: + + "0.10.2" + """ + return self.get_versions()[-1] + + def get_package_info(self, version: str) -> Iterator[Tuple[str, CpanPackageInfo]]: + """Get release name and package information from version + + Args: + version: Package version (e.g: "0.1.0") + + Returns: + Iterator of tuple (release_name, p_info) + """ + artifact = self.artifacts[version] + metadata = self.module_metadata[version] + + last_modified = iso8601.parse_date(metadata["date"]) + author = ( + Person.from_fullname(metadata["author"].encode()) + if metadata["author"] + else EMPTY_AUTHOR + ) + + try: + extrinsic_metadata_url = self.EXTRINSIC_METADATA_URL_PATTERN.format( + api_base_url=self.api_base_url, + author=metadata["cpan_author"], + release_name=metadata["release_name"], + ) + version_extrinsic_metadata = get_url_body(extrinsic_metadata_url) + except HTTPError: + logger.warning( + "Could not fetch extrinsic_metadata for module %s version %s", + metadata["name"], + version, + ) + version_extrinsic_metadata = None + + directory_extrinsic_metadata = [] + if version_extrinsic_metadata: + directory_extrinsic_metadata.append( + RawExtrinsicMetadataCore( + format="cpan-release-json", + metadata=version_extrinsic_metadata, + ) + ) + + p_info = CpanPackageInfo( + name=metadata["name"], + filename=artifact["filename"], + url=artifact["url"], + version=version, + last_modified=last_modified, + author=author, + checksums=artifact["checksums"], + directory_extrinsic_metadata=directory_extrinsic_metadata, + ) + yield release_name(version), p_info + + def build_release( + self, p_info: CpanPackageInfo, uncompressed_path: str, directory: Sha1Git + ) -> Optional[Release]: + + message = ( + f"Synthetic release for Perl source package {p_info.name} " + f"version {p_info.version}\n" + ) + + return Release( + name=p_info.version.encode(), + author=p_info.author, + date=TimestampWithTimezone.from_datetime(p_info.last_modified), + message=message.encode(), + target_type=ObjectType.DIRECTORY, + target=directory, + synthetic=True, + ) diff --git a/swh/loader/package/cpan/tasks.py b/swh/loader/package/cpan/tasks.py new file mode 100644 index 0000000..07fb36b --- /dev/null +++ b/swh/loader/package/cpan/tasks.py @@ -0,0 +1,14 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from celery import shared_task + +from swh.loader.package.cpan.loader import CpanLoader + + +@shared_task(name=__name__ + ".LoadCpan") +def load_cpan(**kwargs): + """Load packages from Cpan (The Comprehensive Perl Archive Network)""" + return CpanLoader.from_configfile(**kwargs).load() diff --git a/swh/loader/core/__init__.py b/swh/loader/package/cpan/tests/__init__.py similarity index 100% copy from swh/loader/core/__init__.py copy to swh/loader/package/cpan/tests/__init__.py diff --git a/swh/loader/package/cpan/tests/data/https_cpan.metacpan.org/authors_id_J_JJ_JJORE_Internals-CountObjects-0.01.tar.gz b/swh/loader/package/cpan/tests/data/https_cpan.metacpan.org/authors_id_J_JJ_JJORE_Internals-CountObjects-0.01.tar.gz new file mode 100644 index 0000000..d55941e Binary files /dev/null and b/swh/loader/package/cpan/tests/data/https_cpan.metacpan.org/authors_id_J_JJ_JJORE_Internals-CountObjects-0.01.tar.gz differ diff --git a/swh/loader/package/cpan/tests/data/https_cpan.metacpan.org/authors_id_J_JJ_JJORE_Internals-CountObjects-0.05.tar.gz b/swh/loader/package/cpan/tests/data/https_cpan.metacpan.org/authors_id_J_JJ_JJORE_Internals-CountObjects-0.05.tar.gz new file mode 100644 index 0000000..42517c7 Binary files /dev/null and b/swh/loader/package/cpan/tests/data/https_cpan.metacpan.org/authors_id_J_JJ_JJORE_Internals-CountObjects-0.05.tar.gz differ diff --git a/swh/loader/package/cpan/tests/data/https_fastapi.metacpan.org/v1_release_JJORE_Internals-CountObjects-0.01 b/swh/loader/package/cpan/tests/data/https_fastapi.metacpan.org/v1_release_JJORE_Internals-CountObjects-0.01 new file mode 100644 index 0000000..92b2ead --- /dev/null +++ b/swh/loader/package/cpan/tests/data/https_fastapi.metacpan.org/v1_release_JJORE_Internals-CountObjects-0.01 @@ -0,0 +1,89 @@ +{ + "total" : 1, + "took" : 2, + "release" : { + "date" : "2011-06-05T18:44:02", + "download_url" : "https://cpan.metacpan.org/authors/id/J/JJ/JJORE/Internals-CountObjects-0.01.tar.gz", + "status" : "cpan", + "deprecated" : false, + "archive" : "Internals-CountObjects-0.01.tar.gz", + "stat" : { + "mode" : 33188, + "mtime" : 1307299442, + "gid" : 1009, + "uid" : 1009, + "size" : 52541 + }, + "version_numified" : 0.01, + "abstract" : "Report all allocated perl objects", + "maturity" : "released", + "checksum_sha256" : "c4904fc34954f18783b15bc1424eda032090fb20efa98d8f2e42c3d3ad153376", + "id" : "_YhX3DZlS7XliSAheYHR8UibjPg", + "metadata" : { + "no_index" : { + "directory" : [ + "t", + "xt", + "inc", + "local", + "perl5", + "fatlib", + "example", + "blib", + "examples", + "eg" + ] + }, + "generated_by" : "Dist::Zilla version 4.200000, CPAN::Meta::Converter version 2.102400, CPAN::Meta::Converter version 2.150005", + "license" : [ + "perl_5" + ], + "release_status" : "stable", + "name" : "Internals-CountObjects", + "author" : [ + "Josh Jore " + ], + "dynamic_config" : "0", + "abstract" : "Report all allocated perl objects", + "prereqs" : { + "configure" : { + "requires" : { + "ExtUtils::MakeMaker" : "6.31" + } + }, + "build" : { + "requires" : {} + } + }, + "meta-spec" : { + "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec", + "version" : "2" + }, + "version" : "0.01" + }, + "tests" : { + "na" : 0, + "pass" : 115, + "fail" : 0, + "unknown" : 34 + }, + "dependency" : { + "relationship" : "requires", + "version" : "6.31", + "module" : "ExtUtils::MakeMaker", + "phase" : "configure" + }, + "main_module" : "Internals::CountObjects", + "authorized" : true, + "license" : "perl_5", + "provides" : "Internals::CountObjects", + "distribution" : "Internals-CountObjects", + "checksum_md5" : "f178444dad69f126db79ebd76c4e95bd", + "changes_file" : "", + "version" : "0.01", + "author" : "JJORE", + "name" : "Internals-CountObjects-0.01", + "first" : true, + "resources" : {} + } +} diff --git a/swh/loader/package/cpan/tests/data/https_fastapi.metacpan.org/v1_release_JJORE_Internals-CountObjects-0.05 b/swh/loader/package/cpan/tests/data/https_fastapi.metacpan.org/v1_release_JJORE_Internals-CountObjects-0.05 new file mode 100644 index 0000000..325b4ce --- /dev/null +++ b/swh/loader/package/cpan/tests/data/https_fastapi.metacpan.org/v1_release_JJORE_Internals-CountObjects-0.05 @@ -0,0 +1,109 @@ +{ + "release" : { + "provides" : "Internals::CountObjects", + "distribution" : "Internals-CountObjects", + "authorized" : true, + "license" : "perl_5", + "author" : "JJORE", + "name" : "Internals-CountObjects-0.05", + "first" : false, + "resources" : { + "bugtracker" : { + "web" : "http://rt.cpan.org/NoAuth/Bugs.html?Dist=Internals-CountObjects", + "mailto" : "bug-Internals-CountObjects@rt.cpan.org" + }, + "repository" : { + "web" : "http://github.com/jbenjore/Internals-CountObjects", + "url" : "git://github.com/jbenjore/Internals-CountObjects.git", + "type" : "git" + }, + "homepage" : "http://search.cpan.org/dist/Internals-CountObjects" + }, + "checksum_md5" : "ee751810f504b5a463397f22634467a7", + "version" : "0.05", + "changes_file" : "", + "stat" : { + "size" : 54473, + "mtime" : 1307769811, + "uid" : 1009, + "gid" : 1009, + "mode" : 33188 + }, + "status" : "latest", + "archive" : "Internals-CountObjects-0.05.tar.gz", + "deprecated" : false, + "date" : "2011-06-11T05:23:31", + "download_url" : "https://cpan.metacpan.org/authors/id/J/JJ/JJORE/Internals-CountObjects-0.05.tar.gz", + "metadata" : { + "license" : [ + "perl_5" + ], + "release_status" : "stable", + "generated_by" : "Dist::Zilla version 4.200000, CPAN::Meta::Converter version 2.102400, CPAN::Meta::Converter version 2.150005", + "no_index" : { + "directory" : [ + "t", + "xt", + "inc", + "local", + "perl5", + "fatlib", + "example", + "blib", + "examples", + "eg" + ] + }, + "version" : "0.05", + "meta-spec" : { + "version" : "2", + "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec" + }, + "prereqs" : { + "build" : { + "requires" : { + "ExtUtils::CBuilder" : "0" + } + } + }, + "abstract" : "Report all allocated perl objects", + "resources" : { + "repository" : { + "url" : "git://github.com/jbenjore/Internals-CountObjects.git", + "web" : "http://github.com/jbenjore/Internals-CountObjects", + "type" : "git" + }, + "homepage" : "http://search.cpan.org/dist/Internals-CountObjects", + "bugtracker" : { + "web" : "http://rt.cpan.org/NoAuth/Bugs.html?Dist=Internals-CountObjects", + "mailto" : "bug-Internals-CountObjects@rt.cpan.org" + } + }, + "author" : [ + "Josh Jore " + ], + "dynamic_config" : 0, + "name" : "Internals-CountObjects" + }, + "dependency" : { + "phase" : "build", + "relationship" : "requires", + "version" : "0", + "module" : "ExtUtils::CBuilder" + }, + "tests" : { + "pass" : 491, + "fail" : 0, + "unknown" : 1, + "na" : 0 + }, + "main_module" : "Internals::CountObjects", + "abstract" : "Report all allocated perl objects", + "version_numified" : 0.05, + "maturity" : "released", + "id" : "D3RywifomVjSA3VV8eM_huWKfrk", + "checksum_sha256" : "bbf65021207a7a51c8f8475bc25c4735f49d62744a75d33595e9720731b2b02f" + }, + "took" : 2, + "total" : 1 +} diff --git a/swh/loader/package/cpan/tests/test_cpan.py b/swh/loader/package/cpan/tests/test_cpan.py new file mode 100644 index 0000000..f865372 --- /dev/null +++ b/swh/loader/package/cpan/tests/test_cpan.py @@ -0,0 +1,209 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +# flake8: noqa: B950 + +import json +from pathlib import Path + +import pytest + +from swh.loader.package import __version__ +from swh.loader.package.cpan.loader import CpanLoader +from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats +from swh.model.hashutil import hash_to_bytes +from swh.model.model import ( + Person, + RawExtrinsicMetadata, + Release, + Snapshot, + SnapshotBranch, + TargetType, + TimestampWithTimezone, +) +from swh.model.model import MetadataFetcher +from swh.model.model import ObjectType as ModelObjectType +from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType + +ORIGIN_URL = "https://metacpan.org/dist/Internals-CountObjects" + +API_BASE_URL = "https://fastapi.metacpan.org/v1" + +ORIGIN_ARTIFACTS = [ + { + "url": "https://cpan.metacpan.org/authors/id/J/JJ/JJORE/Internals-CountObjects-0.05.tar.gz", + "filename": "CountObjects-0.05.tar.gz", + "version": "0.05", + "length": 632, + "checksums": { + "sha256": "e0ecf6ab4873fa55ff74da22a3c4ae0ab6a1409635c9cd2d6059abbb32be3a6a" + }, + }, + { + "url": "https://cpan.metacpan.org/authors/id/J/JJ/JJORE/Internals-CountObjects-0.01.tar.gz", + "filename": "CountObjects-0.01.tar.gz", + "version": "0.01", + "length": 453, + "checksums": { + "sha256": "a368004ab98c5860a8fd87e0a4c44e4ee2d1b95d9b13597519a0e644c167468a" + }, + }, +] + +ORIGIN_MODULE_METADATA = [ + { + "name": "Internals-CountObjects", + "version": "0.05", + "author": "Josh Jore ", + "cpan_author": "JJORE", + "date": "2011-06-11T05:23:31", + "release_name": "Internals-CountObjects-0.05", + }, + { + "name": "Internals-CountObjects", + "version": "0.01", + "author": "Josh Jore ", + "cpan_author": "JJORE", + "date": "2011-06-05T18:44:02", + "release_name": "Internals-CountObjects-0.01", + }, +] + + +@pytest.fixture +def head_release_original_artifacts_metadata(): + return json.dumps( + [{k: v for k, v in ORIGIN_ARTIFACTS[0].items() if k != "version"}] + ).encode() + + +@pytest.fixture +def head_release_extrinsic_metadata(datadir): + return Path( + datadir, + "https_fastapi.metacpan.org", + "v1_release_JJORE_Internals-CountObjects-0.05", + ).read_bytes() + + +@pytest.fixture +def cpan_loader(requests_mock_datadir, swh_storage): + return CpanLoader( + swh_storage, + url=ORIGIN_URL, + api_base_url=API_BASE_URL, + artifacts=ORIGIN_ARTIFACTS, + module_metadata=ORIGIN_MODULE_METADATA, + ) + + +def test_get_versions(cpan_loader): + assert cpan_loader.get_versions() == ["0.01", "0.05"] + + +def test_get_default_version(cpan_loader): + assert cpan_loader.get_default_version() == "0.05" + + +def test_cpan_loader_load_multiple_version( + cpan_loader, + head_release_original_artifacts_metadata, + head_release_extrinsic_metadata, +): + + load_status = cpan_loader.load() + assert load_status["status"] == "eventful" + assert load_status["snapshot_id"] is not None + + expected_snapshot_id = "848ee8d69d33481c88ab81f6794f6504190f011f" + expected_head_release = "07382fd255ec0fc293b92aeb7e68b3fe31c174f9" + + assert expected_snapshot_id == load_status["snapshot_id"] + + expected_snapshot = Snapshot( + id=hash_to_bytes(load_status["snapshot_id"]), + branches={ + b"releases/0.01": SnapshotBranch( + target=hash_to_bytes("e73aced4cc3d56b32a328d3248b25b052f029df4"), + target_type=TargetType.RELEASE, + ), + b"releases/0.05": SnapshotBranch( + target=hash_to_bytes(expected_head_release), + target_type=TargetType.RELEASE, + ), + b"HEAD": SnapshotBranch( + target=b"releases/0.05", + target_type=TargetType.ALIAS, + ), + }, + ) + + storage = cpan_loader.storage + + check_snapshot(expected_snapshot, storage) + + stats = get_stats(storage) + assert { + "content": 2, + "directory": 4, + "origin": 1, + "origin_visit": 1, + "release": 2, + "revision": 0, + "skipped_content": 0, + "snapshot": 1, + } == stats + + head_release = storage.release_get([hash_to_bytes(expected_head_release)])[0] + + assert head_release == Release( + name=b"0.05", + message=b"Synthetic release for Perl source package Internals-CountObjects version 0.05\n", + target=hash_to_bytes("af3f6a43eaf4b26dbcadb1101e8d81db6d6151e0"), + target_type=ModelObjectType.DIRECTORY, + synthetic=True, + author=Person( + fullname=b"Josh Jore ", + name=b"Josh Jore", + email=b"jjore@cpan.org", + ), + date=TimestampWithTimezone.from_iso8601("2011-06-11T05:23:31+00:00"), + id=hash_to_bytes(expected_head_release), + ) + + assert_last_visit_matches( + storage, + url=ORIGIN_URL, + status="full", + type="cpan", + snapshot=expected_snapshot.id, + ) + + release_swhid = CoreSWHID(object_type=ObjectType.RELEASE, object_id=head_release.id) + directory_swhid = ExtendedSWHID( + object_type=ExtendedObjectType.DIRECTORY, object_id=head_release.target + ) + expected_metadata = [ + RawExtrinsicMetadata( + target=directory_swhid, + authority=cpan_loader.get_metadata_authority(), + fetcher=MetadataFetcher( + name="swh.loader.package.cpan.loader.CpanLoader", + version=__version__, + ), + discovery_date=cpan_loader.visit_date, + format="cpan-release-json", + metadata=head_release_extrinsic_metadata, + origin=ORIGIN_URL, + release=release_swhid, + ), + ] + assert ( + cpan_loader.storage.raw_extrinsic_metadata_get( + directory_swhid, + cpan_loader.get_metadata_authority(), + ).results + == expected_metadata + ) diff --git a/swh/loader/package/cpan/tests/test_tasks.py b/swh/loader/package/cpan/tests/test_tasks.py new file mode 100644 index 0000000..55cfa31 --- /dev/null +++ b/swh/loader/package/cpan/tests/test_tasks.py @@ -0,0 +1,52 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import uuid + +import pytest + +from swh.scheduler.model import ListedOrigin, Lister + +from .test_cpan import ( + API_BASE_URL, + ORIGIN_ARTIFACTS, + ORIGIN_MODULE_METADATA, + ORIGIN_URL, +) + +NAMESPACE = "swh.loader.package.cpan" + + +@pytest.fixture +def cpan_lister(): + return Lister(name="cpan", instance_name="example", id=uuid.uuid4()) + + +@pytest.fixture +def cpan_listed_origin(cpan_lister): + return ListedOrigin( + lister_id=cpan_lister.id, + url=ORIGIN_URL, + visit_type="cpan", + extra_loader_arguments={ + "api_base_url": API_BASE_URL, + "artifacts": ORIGIN_ARTIFACTS, + "module_metadata": ORIGIN_MODULE_METADATA, + }, + ) + + +def test_cpan_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + cpan_lister, + cpan_listed_origin, +): + + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.CpanLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadCpan", + lister=cpan_lister, + listed_origin=cpan_listed_origin, + ) diff --git a/swh/loader/package/cran/loader.py b/swh/loader/package/cran/loader.py index b7383d6..93997fe 100644 --- a/swh/loader/package/cran/loader.py +++ b/swh/loader/package/cran/loader.py @@ -1,175 +1,176 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from datetime import timezone import logging import os from os import path import re import string from typing import Any, Dict, Iterator, List, Optional, Tuple import attr import dateutil.parser from debian.deb822 import Deb822 from swh.loader.package.loader import BasePackageInfo, PackageLoader from swh.loader.package.utils import release_name from swh.model.model import ObjectType, Person, Release, Sha1Git, TimestampWithTimezone from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) DATE_PATTERN = re.compile(r"^(?P\d{4})-(?P\d{2})$") @attr.s class CRANPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) name = attr.ib(type=str) EXTID_TYPE = "cran-sha256" MANIFEST_FORMAT = string.Template("$version $url") @classmethod def from_metadata(cls, a_metadata: Dict[str, Any]) -> "CRANPackageInfo": url = a_metadata["url"] return CRANPackageInfo( url=url, filename=path.basename(url), raw_info=a_metadata, name=a_metadata["package"], version=a_metadata["version"], + checksums=a_metadata.get("checksums", {}), ) class CRANLoader(PackageLoader[CRANPackageInfo]): visit_type = "cran" def __init__( self, storage: StorageInterface, url: str, artifacts: List[Dict], **kwargs: Any ): """Loader constructor. Args: url: Origin url to retrieve cran artifact(s) from artifacts: List of associated artifact for the origin url """ super().__init__(storage=storage, url=url, **kwargs) # explicit what we consider the artifact identity self.artifacts = artifacts def get_versions(self) -> List[str]: versions = [] for artifact in self.artifacts: versions.append(artifact["version"]) return versions def get_default_version(self) -> str: return self.artifacts[-1]["version"] def get_package_info(self, version: str) -> Iterator[Tuple[str, CRANPackageInfo]]: for a_metadata in self.artifacts: p_info = CRANPackageInfo.from_metadata(a_metadata) if version == p_info.version: yield release_name(version), p_info def build_release( self, p_info: CRANPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: # a_metadata is empty metadata = extract_intrinsic_metadata(uncompressed_path) date = parse_date(metadata.get("Date")) author = Person.from_fullname(metadata.get("Maintainer", "").encode()) msg = ( f"Synthetic release for CRAN source package {p_info.name} " f"version {p_info.version}\n" ) return Release( name=p_info.version.encode(), message=msg.encode(), date=date, author=author, target_type=ObjectType.DIRECTORY, target=directory, synthetic=True, ) def parse_debian_control(filepath: str) -> Dict[str, Any]: """Parse debian control at filepath""" metadata: Dict = {} logger.debug("Debian control file %s", filepath) for paragraph in Deb822.iter_paragraphs(open(filepath, "rb")): logger.debug("paragraph: %s", paragraph) metadata.update(**paragraph) logger.debug("metadata parsed: %s", metadata) return metadata def extract_intrinsic_metadata(dir_path: str) -> Dict[str, Any]: """Given an uncompressed path holding the DESCRIPTION file, returns a DESCRIPTION parsed structure as a dict. Cran origins describes their intrinsic metadata within a DESCRIPTION file at the root tree of a tarball. This DESCRIPTION uses a simple file format called DCF, the Debian control format. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the DESCRIPTION parsed structure as a dict (or empty dict if missing) """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] description_path = os.path.join(dir_path, project_dirname, "DESCRIPTION") if not os.path.exists(description_path): return {} return parse_debian_control(description_path) def parse_date(date: Optional[str]) -> Optional[TimestampWithTimezone]: """Parse a date into a datetime""" assert not date or isinstance(date, str) dt: Optional[datetime.datetime] = None if not date: return None try: specific_date = DATE_PATTERN.match(date) if specific_date: year = int(specific_date.group("year")) month = int(specific_date.group("month")) dt = datetime.datetime(year, month, 1) else: dt = dateutil.parser.parse(date) if not dt.tzinfo: # up for discussion the timezone needs to be set or # normalize_timestamp is not happy: ValueError: normalize_timestamp # received datetime without timezone: 2001-06-08 00:00:00 dt = dt.replace(tzinfo=timezone.utc) except Exception as e: logger.warning("Fail to parse date %s. Reason: %s", date, e) if dt: return TimestampWithTimezone.from_datetime(dt) else: return None diff --git a/swh/loader/package/cran/tests/test_tasks.py b/swh/loader/package/cran/tests/test_tasks.py index e2b5de2..8ea1487 100644 --- a/swh/loader/package/cran/tests/test_tasks.py +++ b/swh/loader/package/cran/tests/test_tasks.py @@ -1,76 +1,43 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import uuid import pytest from swh.scheduler.model import ListedOrigin, Lister -from swh.scheduler.utils import create_origin_task_dict - -@pytest.fixture(autouse=True) -def celery_worker_and_swh_config(swh_scheduler_celery_worker, swh_config): - pass +NAMESPACE = "swh.loader.package.cran" @pytest.fixture def cran_lister(): - return Lister(name="cran-lister", instance_name="example", id=uuid.uuid4()) + return Lister(name="cran", instance_name="example", id=uuid.uuid4()) @pytest.fixture def cran_listed_origin(cran_lister): return ListedOrigin( lister_id=cran_lister.id, url="https://cran.example.org/project", visit_type="cran", extra_loader_arguments={ "artifacts": [{"version": "1.2.3", "url": "artifact-url"}], }, ) -def test_tasks_cran_loader( - mocker, - swh_scheduler_celery_app, -): - mock_load = mocker.patch("swh.loader.package.cran.loader.CRANLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.cran.tasks.LoadCRAN", - kwargs=dict( - url="some-url", - artifacts=[{"version": "1.2.3", "url": "artifact-url"}], - ), - ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} - - -def test_tasks_cran_loader_for_listed_origin( - mocker, - swh_scheduler_celery_app, +def test_cran_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, cran_lister, cran_listed_origin, ): - mock_load = mocker.patch("swh.loader.package.cran.loader.CRANLoader.load") - mock_load.return_value = {"status": "eventful"} - - task_dict = create_origin_task_dict(cran_listed_origin, cran_lister) - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.cran.tasks.LoadCRAN", - kwargs=task_dict["arguments"]["kwargs"], + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.CRANLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadCRAN", + lister=cran_lister, + listed_origin=cran_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/crates/tests/test_tasks.py b/swh/loader/package/crates/tests/test_tasks.py index b60e2d4..2c1b459 100644 --- a/swh/loader/package/crates/tests/test_tasks.py +++ b/swh/loader/package/crates/tests/test_tasks.py @@ -1,24 +1,43 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information +import uuid -def test_tasks_crates_loader( - mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config +import pytest + +from swh.scheduler.model import ListedOrigin, Lister + +NAMESPACE = "swh.loader.package.crates" + + +@pytest.fixture +def crates_lister(): + return Lister(name="crates", instance_name="example", id=uuid.uuid4()) + + +@pytest.fixture +def crates_listed_origin(crates_lister): + return ListedOrigin( + lister_id=crates_lister.id, + url="some-url/api/v1/crates/some-package", + visit_type="crates", + extra_loader_arguments={ + "artifacts": [{"version": "0.0.1", "url": "some-package-0.0.1.crate"}], + }, + ) + + +def test_crates_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + crates_lister, + crates_listed_origin, ): - mock_load = mocker.patch("swh.loader.package.crates.loader.CratesLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.crates.tasks.LoadCrates", - kwargs=dict( - url="some-url/api/v1/crates/some-package", - artifacts=[{"version": "0.0.1", "url": "some-package-0.0.1.crate"}], - ), + + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.CratesLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadCrates", + lister=crates_lister, + listed_origin=crates_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/debian/tests/test_tasks.py b/swh/loader/package/debian/tests/test_tasks.py index f55979f..5767da4 100644 --- a/swh/loader/package/debian/tests/test_tasks.py +++ b/swh/loader/package/debian/tests/test_tasks.py @@ -1,65 +1,41 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import uuid import pytest from swh.scheduler.model import ListedOrigin, Lister -from swh.scheduler.utils import create_origin_task_dict - -@pytest.fixture(autouse=True) -def celery_worker_and_swh_config(swh_scheduler_celery_worker, swh_config): - pass +NAMESPACE = "swh.loader.package.debian" @pytest.fixture def debian_lister(): - return Lister(name="debian-lister", instance_name="example", id=uuid.uuid4()) + return Lister(name="debian", instance_name="example", id=uuid.uuid4()) @pytest.fixture def debian_listed_origin(debian_lister): return ListedOrigin( lister_id=debian_lister.id, url="https://debian.example.org/package", visit_type="debian", extra_loader_arguments={"packages": {}}, ) -def test_tasks_debian_loader(mocker, swh_scheduler_celery_app): - mock_load = mocker.patch("swh.loader.package.debian.loader.DebianLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.debian.tasks.LoadDebian", - kwargs=dict(url="some-url", packages={}), - ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} - - -def test_tasks_debian_loader_for_listed_origin( - mocker, swh_scheduler_celery_app, debian_lister, debian_listed_origin +def test_debian_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + debian_lister, + debian_listed_origin, ): - mock_load = mocker.patch("swh.loader.package.debian.loader.DebianLoader.load") - mock_load.return_value = {"status": "eventful"} - - task_dict = create_origin_task_dict(debian_listed_origin, debian_lister) - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.debian.tasks.LoadDebian", - kwargs=task_dict["arguments"]["kwargs"], + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.DebianLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadDebian", + lister=debian_lister, + listed_origin=debian_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/deposit/tests/test_tasks.py b/swh/loader/package/deposit/tests/test_tasks.py index 1d82756..d56c169 100644 --- a/swh/loader/package/deposit/tests/test_tasks.py +++ b/swh/loader/package/deposit/tests/test_tasks.py @@ -1,80 +1,41 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import uuid import pytest from swh.scheduler.model import ListedOrigin, Lister -from swh.scheduler.utils import create_origin_task_dict - -@pytest.fixture(autouse=True) -def celery_worker_and_swh_config(swh_scheduler_celery_worker, swh_config): - pass +NAMESPACE = "swh.loader.package.deposit" @pytest.fixture def deposit_lister(): - return Lister(name="deposit-lister", instance_name="example", id=uuid.uuid4()) + return Lister(name="deposit", instance_name="example", id=uuid.uuid4()) @pytest.fixture def deposit_listed_origin(deposit_lister): return ListedOrigin( lister_id=deposit_lister.id, url="https://example.org/project", visit_type="deposit", extra_loader_arguments={"deposit_id": "some-d-id"}, ) -def test_tasks_deposit_loader( - mocker, - swh_scheduler_celery_app, -): - mock_loader = mocker.patch( - "swh.loader.package.deposit.loader.DepositLoader.from_configfile" - ) - mock_loader.return_value = mock_loader - mock_loader.load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.deposit.tasks.LoadDeposit", - kwargs=dict( - url="some-url", - deposit_id="some-d-id", - ), - ) - assert res - res.wait() - assert res.successful() - assert mock_loader.called - assert res.result == {"status": "eventful"} - - -def test_tasks_deposit_loader_for_listed_origin( - mocker, - swh_scheduler_celery_app, +def test_deposit_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, deposit_lister, deposit_listed_origin, ): - mock_loader = mocker.patch( - "swh.loader.package.deposit.loader.DepositLoader.from_configfile" - ) - mock_loader.return_value = mock_loader - mock_loader.load.return_value = {"status": "eventful"} - - task_dict = create_origin_task_dict(deposit_listed_origin, deposit_lister) - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.deposit.tasks.LoadDeposit", - kwargs=task_dict["arguments"]["kwargs"], + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.DepositLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadDeposit", + lister=deposit_lister, + listed_origin=deposit_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_loader.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/golang/loader.py b/swh/loader/package/golang/loader.py index 0bc68a4..8a60b68 100644 --- a/swh/loader/package/golang/loader.py +++ b/swh/loader/package/golang/loader.py @@ -1,109 +1,109 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import re from typing import Iterator, Optional, Sequence, Tuple import attr from swh.loader.package.loader import BasePackageInfo, PackageLoader from swh.loader.package.utils import ( EMPTY_AUTHOR, + cached_method, get_url_body, release_name, - cached_method, ) from swh.model.model import ObjectType, Release, Sha1Git, TimestampWithTimezone from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) def _uppercase_encode(url: str) -> str: return re.sub("([A-Z]{1})", r"!\1", url).lower() @attr.s class GolangPackageInfo(BasePackageInfo): name = attr.ib(type=str) timestamp = attr.ib(type=Optional[TimestampWithTimezone]) class GolangLoader(PackageLoader[GolangPackageInfo]): """Load Golang module zip file into SWH archive.""" visit_type = "golang" GOLANG_PKG_DEV_URL = "https://pkg.go.dev" GOLANG_PROXY_URL = "https://proxy.golang.org" def __init__( self, storage: StorageInterface, url: str, max_content_size: Optional[int] = None, **kwargs, ): super().__init__(storage, url, max_content_size=max_content_size, **kwargs) # The lister saves human-usable URLs, so we translate them to proxy URLs # for use in the loader. # This URL format is detailed in https://go.dev/ref/mod#goproxy-protocol assert url.startswith( self.GOLANG_PKG_DEV_URL ), "Go package URL (%s) not from %s" % (url, self.GOLANG_PKG_DEV_URL) self.name = url[len(self.GOLANG_PKG_DEV_URL) + 1 :] self.url = url.replace(self.GOLANG_PKG_DEV_URL, self.GOLANG_PROXY_URL) self.url = _uppercase_encode(self.url) def get_versions(self) -> Sequence[str]: versions = get_url_body(f"{self.url}/@v/list").decode().splitlines() # some go packages only have a development version not listed by the endpoint above, # so ensure to return it or it will be missed by the golang loader default_version = self.get_default_version() if default_version not in versions: versions.append(default_version) return versions @cached_method def get_default_version(self) -> str: latest = get_url_body(f"{self.url}/@latest") return json.loads(latest)["Version"] def _raw_info(self, version: str) -> dict: url = f"{self.url}/@v/{_uppercase_encode(version)}.info" return json.loads(get_url_body(url)) def get_package_info(self, version: str) -> Iterator[Tuple[str, GolangPackageInfo]]: # Encode the name because creating nested folders can become problematic encoded_name = self.name.replace("/", "__") filename = f"{encoded_name}-{version}.zip" timestamp = TimestampWithTimezone.from_iso8601(self._raw_info(version)["Time"]) p_info = GolangPackageInfo( url=f"{self.url}/@v/{version}.zip", filename=filename, version=version, timestamp=timestamp, name=self.name, ) yield release_name(version), p_info def build_release( self, p_info: GolangPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: msg = ( f"Synthetic release for Golang source package {p_info.name} " f"version {p_info.version}\n" ) return Release( name=p_info.version.encode(), message=msg.encode(), date=p_info.timestamp, author=EMPTY_AUTHOR, # Go modules offer very little metadata target_type=ObjectType.DIRECTORY, target=directory, synthetic=True, ) diff --git a/swh/loader/package/golang/tests/test_tasks.py b/swh/loader/package/golang/tests/test_tasks.py index 18819b9..02b1295 100644 --- a/swh/loader/package/golang/tests/test_tasks.py +++ b/swh/loader/package/golang/tests/test_tasks.py @@ -1,21 +1,40 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information +import uuid -def test_tasks_golang_loader( - mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config +import pytest + +from swh.scheduler.model import ListedOrigin, Lister + +NAMESPACE = "swh.loader.package.golang" + + +@pytest.fixture +def golang_lister(): + return Lister(name="golang", instance_name="example", id=uuid.uuid4()) + + +@pytest.fixture +def golang_listed_origin(golang_lister): + return ListedOrigin( + lister_id=golang_lister.id, + url="https://pkg.go.dev/golang.org/whatever/package", + visit_type="golang", + ) + + +def test_golang_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + golang_lister, + golang_listed_origin, ): - mock_load = mocker.patch("swh.loader.package.golang.loader.GolangLoader.load") - mock_load.return_value = {"status": "eventful"} - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.golang.tasks.LoadGolang", - kwargs={"url": "https://pkg.go.dev/golang.org/whatever/package"}, + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.GolangLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadGolang", + lister=golang_lister, + listed_origin=golang_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index 4cc6430..4af88a0 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,1111 +1,1137 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information +import asyncio import datetime import hashlib from itertools import islice import json import logging import os import string import sys import tempfile from typing import ( Any, Dict, Generic, Iterator, List, Mapping, Optional, Sequence, Set, Tuple, TypeVar, ) import attr from requests.exceptions import ContentDecodingError import sentry_sdk from swh.core.tarball import uncompress +from swh.loader.core import discovery from swh.loader.core.loader import BaseLoader from swh.loader.exception import NotFound from swh.loader.package.utils import download from swh.model import from_disk from swh.model.hashutil import hash_to_hex from swh.model.model import ( ExtID, MetadataAuthority, MetadataAuthorityType, MetadataFetcher, ) from swh.model.model import ( Origin, OriginVisit, OriginVisitStatus, RawExtrinsicMetadata, Release, Revision, Sha1Git, Snapshot, ) from swh.model.model import ObjectType as ModelObjectType from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType from swh.storage.algos.snapshot import snapshot_get_latest from swh.storage.interface import StorageInterface from swh.storage.utils import now logger = logging.getLogger(__name__) SWH_METADATA_AUTHORITY = MetadataAuthority( type=MetadataAuthorityType.REGISTRY, url="https://softwareheritage.org/", metadata={}, ) """Metadata authority for extrinsic metadata generated by Software Heritage. Used for metadata on "original artifacts", ie. length, filename, and checksums of downloaded archive files.""" PartialExtID = Tuple[str, int, bytes] """The ``extid_type`` and ``extid`` fields of an :class:`ExtID` object.""" @attr.s class RawExtrinsicMetadataCore: """Contains the core of the metadata extracted by a loader, that will be used to build a full RawExtrinsicMetadata object by adding object identifier, context, and provenance information.""" format = attr.ib(type=str) metadata = attr.ib(type=bytes) discovery_date = attr.ib(type=Optional[datetime.datetime], default=None) """Defaults to the visit date.""" @attr.s class BasePackageInfo: """Compute the primary key for a dict using the id_keys as primary key composite. Args: d: A dict entry to compute the primary key on id_keys: Sequence of keys to use as primary key Returns: The identity for that dict entry """ url = attr.ib(type=str) filename = attr.ib(type=Optional[str]) version = attr.ib(type=str) """Version name/number.""" MANIFEST_FORMAT: Optional[string.Template] = None """If not None, used by the default extid() implementation to format a manifest, before hashing it to produce an ExtID.""" EXTID_TYPE: str = "package-manifest-sha256" EXTID_VERSION: int = 0 # The following attribute has kw_only=True in order to allow subclasses # to add attributes. Without kw_only, attributes without default values cannot # go after attributes with default values. # See directory_extrinsic_metadata = attr.ib( type=List[RawExtrinsicMetadataCore], default=[], kw_only=True, ) """:term:`extrinsic metadata` collected by the loader, that will be attached to the loaded directory and added to the Metadata storage.""" + checksums = attr.ib(type=Dict[str, str], default={}, kw_only=True) + """Dictionary holding package tarball checksums for integrity check after + download, keys are hash algorithm names and values are checksums in + hexadecimal format. The supported algorithms are defined in the + :data:`swh.model.hashutil.ALGORITHMS` set.""" + # TODO: add support for metadata for releases and contents def extid(self) -> Optional[PartialExtID]: """Returns a unique intrinsic identifier of this package info, or None if this package info is not 'deduplicatable' (meaning that we will always load it, instead of checking the ExtID storage to see if we already did)""" if self.MANIFEST_FORMAT is None: return None else: manifest = self.MANIFEST_FORMAT.substitute( {k: str(v) for (k, v) in attr.asdict(self).items()} ) return ( self.EXTID_TYPE, self.EXTID_VERSION, hashlib.sha256(manifest.encode()).digest(), ) TPackageInfo = TypeVar("TPackageInfo", bound=BasePackageInfo) class PackageLoader(BaseLoader, Generic[TPackageInfo]): def __init__(self, storage: StorageInterface, url: str, **kwargs: Any): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: storage: Storage instance url: Origin url to load data from """ super().__init__(storage=storage, origin_url=url, **kwargs) self.status_load = "" self.status_visit = "" def load_status(self) -> Dict[str, str]: """Detailed loading status.""" return { "status": self.status_load, } def visit_status(self) -> str: """Detailed visit status.""" return self.status_visit def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Raises: class:`swh.loader.exception.NotFound` error when failing to read the published package versions. Returns: Sequence of published versions """ return [] def get_package_info(self, version: str) -> Iterator[Tuple[str, TPackageInfo]]: """Given a release version of a package, retrieve the associated package information for such version. Args: version: Package version Returns: (branch name, package metadata) """ yield from {} def build_release( self, p_info: TPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: """Build the release from the archive metadata (extrinsic artifact metadata) and the intrinsic metadata. Args: p_info: Package information uncompressed_path: Artifact uncompressed path on disk """ raise NotImplementedError("build_release") def get_default_version(self) -> str: """Retrieve the latest release version if any. Returns: Latest version """ return "" def last_snapshot(self) -> Optional[Snapshot]: """Retrieve the last snapshot out of the last visit.""" return snapshot_get_latest(self.storage, self.origin.url) def new_packageinfo_to_extid(self, p_info: TPackageInfo) -> Optional[PartialExtID]: return p_info.extid() def _get_known_extids( self, packages_info: List[TPackageInfo] ) -> Dict[PartialExtID, List[CoreSWHID]]: """Compute the ExtIDs from new PackageInfo objects, searches which are already loaded in the archive, and returns them if any.""" # Compute the ExtIDs of all the new packages, grouped by extid type new_extids: Dict[Tuple[str, int], List[bytes]] = {} for p_info in packages_info: res = p_info.extid() if res is not None: (extid_type, extid_version, extid_extid) = res new_extids.setdefault((extid_type, extid_version), []).append( extid_extid ) # For each extid type, call extid_get_from_extid() with all the extids of # that type, and store them in the '(type, extid) -> target' map. known_extids: Dict[PartialExtID, List[CoreSWHID]] = {} for ((extid_type, extid_version), extids) in new_extids.items(): for extid in self.storage.extid_get_from_extid( extid_type, extids, version=extid_version ): if extid is not None: key = (extid.extid_type, extid_version, extid.extid) known_extids.setdefault(key, []).append(extid.target) return known_extids def resolve_object_from_extids( self, known_extids: Dict[PartialExtID, List[CoreSWHID]], p_info: TPackageInfo, whitelist: Set[Sha1Git], ) -> Optional[CoreSWHID]: """Resolve the revision/release from known ExtIDs and a package info object. If the artifact has already been downloaded, this will return the existing release (or revision) targeting that uncompressed artifact directory. Otherwise, this returns None. Args: known_extids: Dict built from a list of ExtID, with the target as value p_info: Package information whitelist: Any ExtID with target not in this set is filtered out Returns: None or release/revision SWHID """ new_extid = p_info.extid() if new_extid is None: return None extid_targets = set() for extid_target in known_extids.get(new_extid, []): if extid_target.object_id not in whitelist: # There is a known ExtID for this package, but its target is not # in the snapshot. # This can happen for three reasons: # # 1. a loader crashed after writing the ExtID, but before writing # the snapshot # 2. some other loader loaded the same artifact, but produced # a different revision, causing an additional ExtID object # to be written. We will probably find this loader's ExtID # in a future iteration of this loop. # Note that for now, this is impossible, as each loader has a # completely different extid_type, but this is an implementation # detail of each loader. # 3. we took a snapshot, then the package disappeared, # then we took another snapshot, and the package reappeared # # In case of 1, we must actually load the package now, # so let's do it. # TODO: detect when we are in case 3 using release_missing # or revision_missing instead of the snapshot. continue elif extid_target.object_type in (ObjectType.RELEASE, ObjectType.REVISION): extid_targets.add(extid_target) else: # Note that this case should never be reached unless there is a # collision between a revision hash and some non-revision object's # hash, but better safe than sorry. logger.warning( "%s is in the whitelist, but is not a revision/release.", hash_to_hex(extid_target.object_type), ) if extid_targets: # This is a known package version, as we have an extid to reference it. # Let's return one of them. # If there is a release extid, return it. release_extid_targets = { extid_target for extid_target in extid_targets if extid_target.object_type == ObjectType.RELEASE } # Exclude missing targets missing_releases = { CoreSWHID(object_type=ObjectType.RELEASE, object_id=id_) for id_ in self.storage.release_missing( [swhid.object_id for swhid in release_extid_targets] ) } if missing_releases: err_message = "Found ExtIDs pointing to missing releases" logger.error(err_message + ": %s", missing_releases) with sentry_sdk.push_scope() as scope: scope.set_extra( "missing_releases", [str(x) for x in missing_releases] ) sentry_sdk.capture_message(err_message, "error") release_extid_targets -= missing_releases extid_target2 = self.select_extid_target(p_info, release_extid_targets) if extid_target2: return extid_target2 # If there is no release extid (ie. if the package was only loaded with # older versions of this loader, which produced revision objects instead # of releases), return a revision extid when possible. revision_extid_targets = { extid_target for extid_target in extid_targets if extid_target.object_type == ObjectType.REVISION } if revision_extid_targets: assert len(extid_targets) == 1, extid_targets extid_target = list(extid_targets)[0] return extid_target # No target found (this is probably a new package version) return None def select_extid_target( self, p_info: TPackageInfo, extid_targets: Set[CoreSWHID] ) -> Optional[CoreSWHID]: """Given a list of release extid targets, choses one appropriate for the given package info. Package loaders shyould implement this if their ExtIDs may map to multiple releases, so they can fetch releases from the storage and inspect their fields to select the right one for this ``p_info``. """ if extid_targets: # The base package loader does not have the domain-specific knowledge # to select the right release -> crash if there is more than one. assert len(extid_targets) == 1, extid_targets return list(extid_targets)[0] return None def download_package( self, p_info: TPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Download artifacts for a specific package. All downloads happen in in the tmpdir folder. Default implementation expects the artifacts package info to be about one artifact per package. Note that most implementation have 1 artifact per package. But some implementation have multiple artifacts per package (debian), some have none, the package is the artifact (gnu). Args: artifacts_package_info: Information on the package artifacts to download (url, filename, etc...) tmpdir: Location to retrieve such artifacts Returns: List of (path, computed hashes) """ try: - return [download(p_info.url, dest=tmpdir, filename=p_info.filename)] + return [ + download( + p_info.url, + dest=tmpdir, + filename=p_info.filename, + hashes=p_info.checksums, + ) + ] except ContentDecodingError: # package might be erroneously marked as gzip compressed while is is not, # try to download its raw bytes again without attempting to uncompress # the input stream return [ download( p_info.url, dest=tmpdir, filename=p_info.filename, + hashes=p_info.checksums, extra_request_headers={"Accept-Encoding": "identity"}, ) ] def uncompress( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str ) -> str: """Uncompress the artifact(s) in the destination folder dest. Optionally, this could need to use the p_info dict for some more information (debian). """ uncompressed_path = os.path.join(dest, "src") for a_path, _ in dl_artifacts: uncompress(a_path, dest=uncompressed_path) return uncompressed_path def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]: """Return an extra dict of branches that are used to update the set of branches. """ return {} def finalize_visit( self, *, snapshot: Optional[Snapshot], visit: OriginVisit, status_visit: str, status_load: str, failed_branches: List[str], errors: Optional[List[str]] = None, ) -> Dict[str, Any]: """Finalize the visit: - flush eventual unflushed data to storage - update origin visit's status - return the task's status """ self.status_load = status_load self.status_visit = status_visit self.storage.flush() snapshot_id: Optional[bytes] = None if snapshot and snapshot.id: # to prevent the snapshot.id to b"" snapshot_id = snapshot.id assert visit.visit visit_status = OriginVisitStatus( origin=self.origin.url, visit=visit.visit, type=self.visit_type, date=now(), status=status_visit, snapshot=snapshot_id, ) self.storage.origin_visit_status_add([visit_status]) result: Dict[str, Any] = { "status": status_load, } if snapshot_id: result["snapshot_id"] = hash_to_hex(snapshot_id) if failed_branches: logger.warning("%d failed branches", len(failed_branches)) for i, urls in enumerate(islice(failed_branches, 50)): prefix_url = "Failed branches: " if i == 0 else "" logger.warning("%s%s", prefix_url, urls) return result def load(self) -> Dict: """Load for a specific origin the associated contents. 1. Get the list of versions in an origin. 2. Get the snapshot from the previous run of the loader, and filter out versions that were already loaded, if their :term:`extids ` match Then, for each remaining version in the origin 3. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 4. Extract the downloaded files. By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 5. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 6. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 7. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 8. Generate and load the snapshot for the visit Using the revisions/releases collected at step 7., and the branch information from step 2., generate a snapshot and load it into the Software Heritage archive """ self.status_load = "uneventful" # either: eventful, uneventful, failed self.status_visit = "full" # see swh.model.model.OriginVisitStatus snapshot = None failed_branches: List[str] = [] # Prepare origin and origin_visit origin = Origin(url=self.origin.url) try: self.storage.origin_add([origin]) visit = list( self.storage.origin_visit_add( [ OriginVisit( origin=self.origin.url, date=self.visit_date, type=self.visit_type, ) ] ) )[0] except Exception as e: logger.exception( "Failed to initialize origin_visit for %s", self.origin.url ) sentry_sdk.capture_exception(e) self.status_load = self.status_visit = "failed" return {"status": "failed"} # Get the previous snapshot for this origin. It is then used to see which # of the package's versions are already loaded in the archive. try: last_snapshot = self.last_snapshot() logger.debug("last snapshot: %s", last_snapshot) except Exception as e: logger.exception("Failed to get previous state for %s", self.origin.url) sentry_sdk.capture_exception(e) return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", errors=[str(e)], ) load_exceptions: List[Exception] = [] # Get the list of all version names try: versions = self.get_versions() except NotFound as e: return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="not_found", status_load="failed", errors=[str(e)], ) except Exception as e: logger.exception("Failed to get list of versions for %s", self.origin.url) sentry_sdk.capture_exception(e) return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", errors=[str(e)], ) errors = [] # Get the metadata of each version's package packages_info: List[Tuple[str, TPackageInfo]] = [] for version in versions: try: for branch_name, p_info in self.get_package_info(version): packages_info.append((branch_name, p_info)) except Exception as e: load_exceptions.append(e) sentry_sdk.capture_exception(e) error = f"Failed to get package info for version {version} of {self.origin.url}" logger.exception(error) errors.append(f"{error}: {e}") # Compute the ExtID of each of these packages known_extids = self._get_known_extids([p_info for (_, p_info) in packages_info]) if last_snapshot is None: last_snapshot_targets: Set[Sha1Git] = set() else: last_snapshot_targets = { branch.target for branch in last_snapshot.branches.values() } new_extids: Set[ExtID] = set() tmp_releases: Dict[str, List[Tuple[str, Sha1Git]]] = { version: [] for version in versions } for (branch_name, p_info) in packages_info: logger.debug("package_info: %s", p_info) # Check if the package was already loaded, using its ExtID swhid = self.resolve_object_from_extids( known_extids, p_info, last_snapshot_targets ) if swhid is not None and swhid.object_type == ObjectType.REVISION: # This package was already loaded, but by an older version # of this loader, which produced revisions instead of releases. # Let's fetch the revision's data, and "upgrade" it into a release. (rev,) = self.storage.revision_get([swhid.object_id]) if not rev: logger.error( "Failed to upgrade branch %s from revision to " "release, %s is missing from the storage. " "Falling back to re-loading from the origin.", branch_name, swhid, ) else: rev = None if swhid is None or (swhid.object_type == ObjectType.REVISION and not rev): # No matching revision or release found in the last snapshot, load it. release_id = None try: res = self._load_release(p_info, origin) if res: (release_id, directory_id) = res assert release_id assert directory_id self._load_extrinsic_directory_metadata( p_info, release_id, directory_id ) self.storage.flush() self.status_load = "eventful" except Exception as e: self.storage.clear_buffers() load_exceptions.append(e) sentry_sdk.capture_exception(e) error = f"Failed to load branch {branch_name} for {self.origin.url}" logger.exception(error) failed_branches.append(branch_name) errors.append(f"{error}: {e}") continue if release_id is None: continue add_extid = True elif swhid.object_type == ObjectType.REVISION: # If 'rev' was None, the previous block would have run. assert rev is not None rel = rev2rel(rev, p_info.version) self.storage.release_add([rel]) logger.debug("Upgraded %s to %s", swhid, rel.swhid()) release_id = rel.id # Create a new extid for this package, so the next run of this loader # will be able to find the new release, and use it (instead of the # old revision) add_extid = True elif swhid.object_type == ObjectType.RELEASE: # This package was already loaded, nothing to do. release_id = swhid.object_id add_extid = False else: assert False, f"Unexpected object type: {swhid}" assert release_id is not None if add_extid: partial_extid = p_info.extid() if partial_extid is not None: (extid_type, extid_version, extid) = partial_extid release_swhid = CoreSWHID( object_type=ObjectType.RELEASE, object_id=release_id ) new_extids.add( ExtID( extid_type=extid_type, extid_version=extid_version, extid=extid, target=release_swhid, ) ) tmp_releases[p_info.version].append((branch_name, release_id)) if load_exceptions: self.status_visit = "partial" if not tmp_releases: # We could not load any releases; fail completely logger.error("Failed to load any release for %s", self.origin.url) return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit="failed", status_load="failed", errors=errors, ) try: # Retrieve the default release version (the "latest" one) default_version = self.get_default_version() logger.debug("default version: %s", default_version) # Retrieve extra branches extra_branches = self.extra_branches() logger.debug("extra branches: %s", extra_branches) snapshot = self._load_snapshot( default_version, tmp_releases, extra_branches ) self.storage.flush() except Exception as e: error = f"Failed to build snapshot for origin {self.origin.url}" logger.exception(error) errors.append(f"{error}: {e}") sentry_sdk.capture_exception(e) self.status_visit = "failed" self.status_load = "failed" if snapshot: try: metadata_objects = self.build_extrinsic_snapshot_metadata(snapshot.id) self.load_metadata_objects(metadata_objects) except Exception as e: error = ( f"Failed to load extrinsic snapshot metadata for {self.origin.url}" ) logger.exception(error) errors.append(f"{error}: {e}") sentry_sdk.capture_exception(e) self.status_visit = "partial" self.status_load = "failed" try: metadata_objects = self.build_extrinsic_origin_metadata() self.load_metadata_objects(metadata_objects) except Exception as e: error = f"Failed to load extrinsic origin metadata for {self.origin.url}" logger.exception(error) errors.append(f"{error}: {e}") sentry_sdk.capture_exception(e) self.status_visit = "partial" self.status_load = "failed" if self.status_load != "failed": self._load_extids(new_extids) return self.finalize_visit( snapshot=snapshot, visit=visit, failed_branches=failed_branches, status_visit=self.status_visit, status_load=self.status_load, errors=errors, ) def _load_directory( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], tmpdir: str ) -> Tuple[str, from_disk.Directory]: uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir) logger.debug("uncompressed_path: %s", uncompressed_path) directory = from_disk.Directory.from_disk( path=uncompressed_path.encode("utf-8"), max_content_length=self.max_content_size, ) contents, skipped_contents, directories = from_disk.iter_directory(directory) + # Instead of sending everything from the bottom up to the storage, + # use a Merkle graph discovery algorithm to filter out known objects. + contents, skipped_contents, directories = asyncio.run( + discovery.filter_known_objects( + discovery.DiscoveryStorageConnection( + contents, skipped_contents, directories, self.storage + ), + ) + ) + logger.debug("Number of skipped contents: %s", len(skipped_contents)) self.storage.skipped_content_add(skipped_contents) logger.debug("Number of contents: %s", len(contents)) self.storage.content_add(contents) logger.debug("Number of directories: %s", len(directories)) self.storage.directory_add(directories) return (uncompressed_path, directory) def _load_release( self, p_info: TPackageInfo, origin ) -> Optional[Tuple[Sha1Git, Sha1Git]]: """Does all the loading of a release itself: * downloads a package and uncompresses it * loads it from disk * adds contents, directories, and release to self.storage * returns (release_id, directory_id) Raises exception when unable to download or uncompress artifacts """ with tempfile.TemporaryDirectory() as tmpdir: dl_artifacts = self.download_package(p_info, tmpdir) (uncompressed_path, directory) = self._load_directory(dl_artifacts, tmpdir) # FIXME: This should be release. cf. D409 release = self.build_release( p_info, uncompressed_path, directory=directory.hash ) if not release: # Some artifacts are missing intrinsic metadata # skipping those return None metadata = [metadata for (filepath, metadata) in dl_artifacts] assert release.target is not None, release assert release.target_type == ModelObjectType.DIRECTORY, release metadata_target = ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=release.target ) original_artifact_metadata = RawExtrinsicMetadata( target=metadata_target, discovery_date=self.visit_date, authority=SWH_METADATA_AUTHORITY, fetcher=self.get_metadata_fetcher(), format="original-artifacts-json", metadata=json.dumps(metadata).encode(), origin=self.origin.url, release=release.swhid(), ) self.load_metadata_objects([original_artifact_metadata]) logger.debug("Release: %s", release) self.storage.release_add([release]) assert directory.hash return (release.id, directory.hash) def _load_snapshot( self, default_version: str, releases: Dict[str, List[Tuple[str, bytes]]], extra_branches: Dict[bytes, Mapping[str, Any]], ) -> Optional[Snapshot]: """Build snapshot out of the current releases stored and extra branches. Then load it in the storage. """ logger.debug("releases: %s", releases) # Build and load the snapshot branches = {} # type: Dict[bytes, Mapping[str, Any]] for version, branch_name_releases in releases.items(): if version == default_version and len(branch_name_releases) == 1: # only 1 branch (no ambiguity), we can create an alias # branch 'HEAD' branch_name, _ = branch_name_releases[0] # except for some corner case (deposit) if branch_name != "HEAD": branches[b"HEAD"] = { "target_type": "alias", "target": branch_name.encode("utf-8"), } for branch_name, target in branch_name_releases: branches[branch_name.encode("utf-8")] = { "target_type": "release", "target": target, } # Deal with extra-branches for name, branch_target in extra_branches.items(): if name in branches: error_message = f"Extra branch '{name!r}' has been ignored" logger.error(error_message) sentry_sdk.capture_message(error_message, "error") else: branches[name] = branch_target snapshot_data = {"branches": branches} logger.debug("snapshot: %s", snapshot_data) snapshot = Snapshot.from_dict(snapshot_data) logger.debug("snapshot: %s", snapshot) self.storage.snapshot_add([snapshot]) return snapshot def get_loader_name(self) -> str: """Returns a fully qualified name of this loader.""" return f"{self.__class__.__module__}.{self.__class__.__name__}" def get_loader_version(self) -> str: """Returns the version of the current loader.""" module_name = self.__class__.__module__ or "" module_name_parts = module_name.split(".") # Iterate rootward through the package hierarchy until we find a parent of this # loader's module with a __version__ attribute. for prefix_size in range(len(module_name_parts), 0, -1): package_name = ".".join(module_name_parts[0:prefix_size]) module = sys.modules[package_name] if hasattr(module, "__version__"): return module.__version__ # If this loader's class has no parent package with a __version__, # it should implement it itself. raise NotImplementedError( f"Could not dynamically find the version of {self.get_loader_name()}." ) def get_metadata_fetcher(self) -> MetadataFetcher: """Returns a MetadataFetcher instance representing this package loader; which is used to for adding provenance information to extracted extrinsic metadata, if any.""" return MetadataFetcher( name=self.get_loader_name(), version=self.get_loader_version(), metadata={}, ) def get_metadata_authority(self) -> MetadataAuthority: """For package loaders that get extrinsic metadata, returns the authority the metadata are coming from. """ raise NotImplementedError("get_metadata_authority") def get_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadataCore]: """Returns metadata items, used by build_extrinsic_origin_metadata.""" return [] def build_extrinsic_origin_metadata(self) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using metadata returned by get_extrinsic_origin_metadata.""" metadata_items = self.get_extrinsic_origin_metadata() if not metadata_items: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in metadata_items: metadata_objects.append( RawExtrinsicMetadata( target=self.origin.swhid(), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, ) ) return metadata_objects def get_extrinsic_snapshot_metadata(self) -> List[RawExtrinsicMetadataCore]: """Returns metadata items, used by build_extrinsic_snapshot_metadata.""" return [] def build_extrinsic_snapshot_metadata( self, snapshot_id: Sha1Git ) -> List[RawExtrinsicMetadata]: """Builds a list of full RawExtrinsicMetadata objects, using metadata returned by get_extrinsic_snapshot_metadata.""" metadata_items = self.get_extrinsic_snapshot_metadata() if not metadata_items: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in metadata_items: metadata_objects.append( RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.SNAPSHOT, object_id=snapshot_id ), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, origin=self.origin.url, ) ) return metadata_objects def build_extrinsic_directory_metadata( self, p_info: TPackageInfo, release_id: Sha1Git, directory_id: Sha1Git, ) -> List[RawExtrinsicMetadata]: if not p_info.directory_extrinsic_metadata: # If this package loader doesn't write metadata, no need to require # an implementation for get_metadata_authority. return [] authority = self.get_metadata_authority() fetcher = self.get_metadata_fetcher() metadata_objects = [] for item in p_info.directory_extrinsic_metadata: metadata_objects.append( RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=directory_id ), discovery_date=item.discovery_date or self.visit_date, authority=authority, fetcher=fetcher, format=item.format, metadata=item.metadata, origin=self.origin.url, release=CoreSWHID( object_type=ObjectType.RELEASE, object_id=release_id ), ) ) return metadata_objects def _load_extrinsic_directory_metadata( self, p_info: TPackageInfo, release_id: Sha1Git, directory_id: Sha1Git, ) -> None: metadata_objects = self.build_extrinsic_directory_metadata( p_info, release_id, directory_id ) self.load_metadata_objects(metadata_objects) def _load_extids(self, extids: Set[ExtID]) -> None: if not extids: return try: self.storage.extid_add(list(extids)) except Exception as e: logger.exception("Failed to load new ExtIDs for %s", self.origin.url) sentry_sdk.capture_exception(e) # No big deal, it just means the next visit will load the same versions # again. def rev2rel(rev: Revision, version: str) -> Release: """Converts a revision to a release.""" message = rev.message if message and not message.endswith(b"\n"): message += b"\n" return Release( name=version.encode(), message=message, target=rev.directory, target_type=ModelObjectType.DIRECTORY, synthetic=rev.synthetic, author=rev.author, date=rev.date, ) diff --git a/swh/loader/package/maven/loader.py b/swh/loader/package/maven/loader.py index 140a703..ef5e456 100644 --- a/swh/loader/package/maven/loader.py +++ b/swh/loader/package/maven/loader.py @@ -1,206 +1,212 @@ # Copyright (C) 2021-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from __future__ import annotations from datetime import datetime, timezone import json import logging from os import path import string from typing import Any, Iterator, List, Optional, Sequence, Tuple import attr import iso8601 import requests from typing_extensions import TypedDict from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) -from swh.loader.package.utils import EMPTY_AUTHOR, release_name +from swh.loader.package.utils import EMPTY_AUTHOR, get_url_body, release_name from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, ObjectType, RawExtrinsicMetadata, Release, Sha1Git, TimestampWithTimezone, ) from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) class ArtifactDict(TypedDict): """Data about a Maven artifact, passed by the Maven Lister.""" time: str """the time of the last update of jar file on the server as an iso8601 date string """ url: str """the artifact url to retrieve filename""" filename: Optional[str] """optionally, the file's name""" gid: str """artifact's groupId""" aid: str """artifact's artifactId""" version: str """artifact's version""" base_url: str """root URL of the Maven instance""" @attr.s class MavenPackageInfo(BasePackageInfo): time = attr.ib(type=datetime) """Timestamp of the last update of jar file on the server.""" gid = attr.ib(type=str) """Group ID of the maven artifact""" aid = attr.ib(type=str) """Artifact ID of the maven artifact""" version = attr.ib(type=str) """Version of the maven artifact""" base_url = attr.ib(type=str) """Root URL of the Maven instance""" # default format for maven artifacts MANIFEST_FORMAT = string.Template("$gid $aid $version $url $time") EXTID_TYPE = "maven-jar" EXTID_VERSION = 0 @classmethod def from_metadata(cls, a_metadata: ArtifactDict) -> MavenPackageInfo: time = iso8601.parse_date(a_metadata["time"]).astimezone(tz=timezone.utc) url = a_metadata["url"] + checksums = {} + try: + checksums["sha1"] = get_url_body(url + ".sha1").decode() + except requests.HTTPError: + pass return cls( url=url, filename=a_metadata.get("filename") or path.split(url)[-1], time=time, gid=a_metadata["gid"], aid=a_metadata["aid"], version=a_metadata["version"], base_url=a_metadata["base_url"], directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( format="maven-json", metadata=json.dumps(a_metadata).encode(), ), ], + checksums=checksums, ) class MavenLoader(PackageLoader[MavenPackageInfo]): """Load source code jar origin's artifact files into swh archive""" visit_type = "maven" def __init__( self, storage: StorageInterface, url: str, artifacts: Sequence[ArtifactDict], **kwargs: Any, ): """Loader constructor. For now, this is the lister's task output. There is one, and only one, artefact (jar or zip) per version, as guaranteed by the Maven coordinates system. Args: url: Origin url artifacts: List of single artifact information """ super().__init__(storage=storage, url=url, **kwargs) self.artifacts = artifacts # assume order is enforced in the lister self.version_artifact = { jar["version"]: jar for jar in artifacts if jar["version"] } if artifacts: base_urls = {jar["base_url"] for jar in artifacts} try: (self.base_url,) = base_urls except ValueError: raise ValueError( "Artifacts originate from more than one Maven instance: " + ", ".join(base_urls) ) from None else: # There is no artifact, so self.metadata_authority won't be called, # so self.base_url won't be accessed. pass def get_versions(self) -> Sequence[str]: return list(self.version_artifact) def get_default_version(self) -> str: # Default version is the one of the most recent artifact return max(self.artifacts, key=lambda a: a["time"])["version"] def get_metadata_authority(self): return MetadataAuthority(type=MetadataAuthorityType.FORGE, url=self.base_url) def build_extrinsic_directory_metadata( self, p_info: MavenPackageInfo, release_id: Sha1Git, directory_id: Sha1Git, ) -> List[RawExtrinsicMetadata]: # Rebuild POM URL. pom_url = path.dirname(p_info.url) pom_url = f"{pom_url}/{p_info.aid}-{p_info.version}.pom" r = requests.get(pom_url, allow_redirects=True) if r.status_code == 200: metadata_pom = r.content else: metadata_pom = b"" p_info.directory_extrinsic_metadata.append( RawExtrinsicMetadataCore( format="maven-pom", metadata=metadata_pom, ) ) return super().build_extrinsic_directory_metadata( p_info=p_info, release_id=release_id, directory_id=directory_id, ) def get_package_info(self, version: str) -> Iterator[Tuple[str, MavenPackageInfo]]: a_metadata = self.version_artifact[version] rel_name = release_name(a_metadata["version"]) yield rel_name, MavenPackageInfo.from_metadata(a_metadata) def build_release( self, p_info: MavenPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: msg = f"Synthetic release for archive at {p_info.url}\n".encode("utf-8") normalized_time = TimestampWithTimezone.from_datetime(p_info.time) return Release( name=p_info.version.encode(), message=msg, date=normalized_time, author=EMPTY_AUTHOR, target=directory, target_type=ObjectType.DIRECTORY, synthetic=True, ) diff --git a/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar.sha1 b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar.sha1 new file mode 100644 index 0000000..9b4204d --- /dev/null +++ b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar.sha1 @@ -0,0 +1 @@ +6976e186000753610a63713677f42f0228f04e64 \ No newline at end of file diff --git a/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar.sha1 b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar.sha1 new file mode 100644 index 0000000..065821c --- /dev/null +++ b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar.sha1 @@ -0,0 +1 @@ +10c61786a119470096b8d1884e43d5880d99ec7e \ No newline at end of file diff --git a/swh/loader/package/maven/tests/test_maven.py b/swh/loader/package/maven/tests/test_maven.py index 36de2a7..5bc6db6 100644 --- a/swh/loader/package/maven/tests/test_maven.py +++ b/swh/loader/package/maven/tests/test_maven.py @@ -1,475 +1,499 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib from itertools import chain import json import os from pathlib import Path import pytest from swh.core.tarball import uncompress from swh.loader.package import __version__ from swh.loader.package.maven.loader import MavenLoader, MavenPackageInfo from swh.loader.package.utils import EMPTY_AUTHOR from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.from_disk import Directory, iter_directory from swh.model.hashutil import hash_to_bytes from swh.model.model import ( RawExtrinsicMetadata, Release, Snapshot, SnapshotBranch, TargetType, TimestampWithTimezone, ) from swh.model.model import MetadataAuthority, MetadataAuthorityType, MetadataFetcher from swh.model.model import ObjectType as ModelObjectType from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType from swh.storage.algos.snapshot import snapshot_get_all_branches REPO_BASE_URL = "https://repo1.maven.org/maven2/" MVN_ORIGIN_URL = f"{REPO_BASE_URL}al/aldi/sprova4j" MVN_ARTIFACTS = [ { "time": "2021-07-12 19:06:59.335000", "gid": "al.aldi", "aid": "sprova4j", "filename": "sprova4j-0.1.0-sources.jar", "version": "0.1.0", "base_url": REPO_BASE_URL, "url": f"{REPO_BASE_URL}al/aldi/sprova4j/0.1.0/sprova4j-0.1.0-sources.jar", }, { "time": "2021-07-12 19:37:05.534000", "gid": "al.aldi", "aid": "sprova4j", "filename": "sprova4j-0.1.1-sources.jar", "version": "0.1.1", "base_url": REPO_BASE_URL, "url": f"{REPO_BASE_URL}al/aldi/sprova4j/0.1.1/sprova4j-0.1.1-sources.jar", }, ] MVN_ARTIFACTS_POM = [ f"{REPO_BASE_URL}al/aldi/sprova4j/0.1.0/sprova4j-0.1.0.pom", f"{REPO_BASE_URL}al/aldi/sprova4j/0.1.1/sprova4j-0.1.1.pom", ] REL_MSGS = ( b"Synthetic release for archive at https://repo1.maven.org/maven2/al/aldi/" b"sprova4j/0.1.0/sprova4j-0.1.0-sources.jar\n", b"Synthetic release for archive at https://repo1.maven.org/maven2/al/aldi/" b"sprova4j/0.1.1/sprova4j-0.1.1-sources.jar\n", ) REL_DATES = ( TimestampWithTimezone.from_datetime( datetime.datetime(2021, 7, 12, 19, 6, 59, 335000, tzinfo=datetime.timezone.utc) ), TimestampWithTimezone.from_datetime( datetime.datetime(2021, 7, 12, 19, 37, 5, 534000, tzinfo=datetime.timezone.utc) ), ) @pytest.fixture def data_jar_1(datadir): content = Path( datadir, "https_maven.org", "sprova4j-0.1.0-sources.jar" ).read_bytes() return content +@pytest.fixture +def data_jar_1_sha1(datadir): + content = Path( + datadir, "https_maven.org", "sprova4j-0.1.0-sources.jar.sha1" + ).read_bytes() + return content + + @pytest.fixture def data_pom_1(datadir): content = Path(datadir, "https_maven.org", "sprova4j-0.1.0.pom").read_bytes() return content @pytest.fixture def data_jar_2(datadir): content = Path( datadir, "https_maven.org", "sprova4j-0.1.1-sources.jar" ).read_bytes() return content +@pytest.fixture +def data_jar_2_sha1(datadir): + content = Path( + datadir, "https_maven.org", "sprova4j-0.1.1-sources.jar.sha1" + ).read_bytes() + return content + + @pytest.fixture def data_pom_2(datadir): content = Path(datadir, "https_maven.org", "sprova4j-0.1.1.pom").read_bytes() return content @pytest.fixture def jar_dirs(datadir, tmp_path): jar_1_path = os.path.join(datadir, "https_maven.org", "sprova4j-0.1.0-sources.jar") jar_2_path = os.path.join(datadir, "https_maven.org", "sprova4j-0.1.1-sources.jar") jar_1_extract_path = os.path.join(tmp_path, "jar_1") jar_2_extract_path = os.path.join(tmp_path, "jar_2") uncompress(jar_1_path, jar_1_extract_path) uncompress(jar_2_path, jar_2_extract_path) jar_1_dir = Directory.from_disk(path=jar_1_extract_path.encode()) jar_2_dir = Directory.from_disk(path=jar_2_extract_path.encode()) return [jar_1_dir, jar_2_dir] @pytest.fixture def expected_contents_and_directories(jar_dirs): jar_1_cnts, _, jar_1_dirs = iter_directory(jar_dirs[0]) jar_2_cnts, _, jar_2_dirs = iter_directory(jar_dirs[1]) contents = {cnt.sha1 for cnt in chain(jar_1_cnts, jar_2_cnts)} directories = {dir.id for dir in chain(jar_1_dirs, jar_2_dirs)} return contents, directories @pytest.fixture def expected_releases(jar_dirs): return [ Release( name=b"0.1.0", message=REL_MSGS[0], author=EMPTY_AUTHOR, date=REL_DATES[0], target_type=ModelObjectType.DIRECTORY, target=jar_dirs[0].hash, synthetic=True, metadata=None, ), Release( name=b"0.1.1", message=REL_MSGS[1], author=EMPTY_AUTHOR, date=REL_DATES[1], target_type=ModelObjectType.DIRECTORY, target=jar_dirs[1].hash, synthetic=True, metadata=None, ), ] @pytest.fixture def expected_snapshot(expected_releases): return Snapshot( branches={ b"HEAD": SnapshotBranch( target_type=TargetType.ALIAS, target=b"releases/0.1.1", ), b"releases/0.1.0": SnapshotBranch( target_type=TargetType.RELEASE, target=expected_releases[0].id, ), b"releases/0.1.1": SnapshotBranch( target_type=TargetType.RELEASE, target=expected_releases[1].id, ), }, ) @pytest.fixture def expected_json_metadata(): return MVN_ARTIFACTS @pytest.fixture def expected_pom_metadata(data_pom_1, data_pom_2): return [data_pom_1, data_pom_2] @pytest.fixture(autouse=True) def network_requests_mock( requests_mock, data_jar_1, + data_jar_1_sha1, data_pom_1, data_jar_2, + data_jar_2_sha1, data_pom_2, ): requests_mock.get(MVN_ARTIFACTS[0]["url"], content=data_jar_1) + requests_mock.get(MVN_ARTIFACTS[0]["url"] + ".sha1", content=data_jar_1_sha1) requests_mock.get(MVN_ARTIFACTS_POM[0], content=data_pom_1) requests_mock.get(MVN_ARTIFACTS[1]["url"], content=data_jar_2) + requests_mock.get(MVN_ARTIFACTS[1]["url"] + ".sha1", content=data_jar_2_sha1) requests_mock.get(MVN_ARTIFACTS_POM[1], content=data_pom_2) def test_maven_loader_visit_with_no_artifact_found(swh_storage, requests_mock_datadir): origin_url = "https://ftp.g.o/unknown" unknown_artifact_url = "https://ftp.g.o/unknown/8sync-0.1.0.tar.gz" loader = MavenLoader( swh_storage, origin_url, artifacts=[ { "time": "2021-07-18 08:05:05.187000", "url": unknown_artifact_url, # unknown artifact "filename": "8sync-0.1.0.tar.gz", "gid": "al/aldi", "aid": "sprova4j", "version": "0.1.0", "base_url": "https://repo1.maven.org/maven2/", } ], ) actual_load_status = loader.load() assert actual_load_status["status"] == "uneventful" assert actual_load_status["snapshot_id"] is not None expected_snapshot_id = "1a8893e6a86f444e8be8e7bda6cb34fb1735a00e" assert actual_load_status["snapshot_id"] == expected_snapshot_id stats = get_stats(swh_storage) assert_last_visit_matches(swh_storage, origin_url, status="partial", type="maven") assert { "content": 0, "directory": 0, "origin": 1, "origin_visit": 1, "release": 0, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_maven_loader_jar_visit_inconsistent_base_url( swh_storage, requests_mock, data_jar_1, data_pom_1 ): """With no prior visit, loading a jar ends up with 1 snapshot""" with pytest.raises(ValueError, match="more than one Maven instance"): MavenLoader( swh_storage, MVN_ORIGIN_URL, artifacts=[ MVN_ARTIFACTS[0], {**MVN_ARTIFACTS[1], "base_url": "http://maven.example/"}, ], ) def test_maven_loader_first_visit( swh_storage, expected_contents_and_directories, expected_snapshot, expected_releases ): """With no prior visit, loading a jar ends up with 1 snapshot""" loader = MavenLoader(swh_storage, MVN_ORIGIN_URL, artifacts=MVN_ARTIFACTS) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" actual_snapshot = snapshot_get_all_branches( swh_storage, hash_to_bytes(actual_load_status["snapshot_id"]) ) assert actual_load_status["snapshot_id"] == expected_snapshot.id.hex() check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert_last_visit_matches(swh_storage, MVN_ORIGIN_URL, status="full", type="maven") expected_contents, expected_directories = expected_contents_and_directories assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] assert list(swh_storage.directory_missing(expected_directories)) == [] rel_id = actual_snapshot.branches[b"releases/0.1.0"].target rel2_id = actual_snapshot.branches[b"releases/0.1.1"].target releases = swh_storage.release_get([rel_id, rel2_id]) assert releases == expected_releases assert { "content": len(expected_contents), "directory": len(expected_directories), "origin": 1, "origin_visit": 1, "release": 2, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_maven_loader_2_visits_without_change( swh_storage, requests_mock, expected_snapshot ): """With no prior visit, load a maven project ends up with 1 snapshot""" loader = MavenLoader(swh_storage, MVN_ORIGIN_URL, artifacts=MVN_ARTIFACTS) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" assert actual_load_status["snapshot_id"] == expected_snapshot.id.hex() assert_last_visit_matches(swh_storage, MVN_ORIGIN_URL, status="full", type="maven") actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" assert actual_load_status2["snapshot_id"] is not None assert actual_load_status["snapshot_id"] == actual_load_status2["snapshot_id"] assert_last_visit_matches(swh_storage, MVN_ORIGIN_URL, status="full", type="maven") # Make sure we have only one entry in history for the pom fetch, one for # the actual download of jar, and that they're correct. urls_history = [str(req.url) for req in list(requests_mock.request_history)] assert urls_history == [ + MVN_ARTIFACTS[0]["url"] + ".sha1", + MVN_ARTIFACTS[1]["url"] + ".sha1", MVN_ARTIFACTS[0]["url"], MVN_ARTIFACTS_POM[0], MVN_ARTIFACTS[1]["url"], MVN_ARTIFACTS_POM[1], + MVN_ARTIFACTS[0]["url"] + ".sha1", + MVN_ARTIFACTS[1]["url"] + ".sha1", ] def test_maven_loader_extrinsic_metadata( swh_storage, expected_releases, expected_json_metadata, expected_pom_metadata ): """With no prior visit, loading a jar ends up with 1 snapshot. Extrinsic metadata is the pom file associated to the source jar. """ loader = MavenLoader(swh_storage, MVN_ORIGIN_URL, artifacts=MVN_ARTIFACTS) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" for i, expected_release in enumerate(expected_releases): expected_release_id = expected_release.id release = swh_storage.release_get([expected_release_id])[0] assert release is not None release_swhid = CoreSWHID( object_type=ObjectType.RELEASE, object_id=expected_release_id ) directory_swhid = ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=release.target ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url=REPO_BASE_URL, ) expected_metadata = [ RawExtrinsicMetadata( target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.maven.loader.MavenLoader", version=__version__, ), discovery_date=loader.visit_date, format="maven-pom", metadata=expected_pom_metadata[i], origin=MVN_ORIGIN_URL, release=release_swhid, ), RawExtrinsicMetadata( target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.maven.loader.MavenLoader", version=__version__, ), discovery_date=loader.visit_date, format="maven-json", metadata=json.dumps(expected_json_metadata[i]).encode(), origin=MVN_ORIGIN_URL, release=release_swhid, ), ] res = swh_storage.raw_extrinsic_metadata_get( directory_swhid, metadata_authority ) assert res.next_page_token is None assert set(res.results) == set(expected_metadata) def test_maven_loader_extrinsic_metadata_no_pom( swh_storage, requests_mock, expected_releases, expected_json_metadata ): """With no prior visit, loading a jar ends up with 1 snapshot. Extrinsic metadata is None if the pom file cannot be retrieved. """ requests_mock.get(MVN_ARTIFACTS_POM[0], status_code="404") loader = MavenLoader(swh_storage, MVN_ORIGIN_URL, artifacts=MVN_ARTIFACTS) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" expected_release_id = expected_releases[0].id release = swh_storage.release_get([expected_release_id])[0] assert release is not None release_swhid = CoreSWHID( object_type=ObjectType.RELEASE, object_id=expected_release_id ) directory_swhid = ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=release.target ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url=REPO_BASE_URL, ) expected_metadata = [ RawExtrinsicMetadata( target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.maven.loader.MavenLoader", version=__version__, ), discovery_date=loader.visit_date, format="maven-pom", metadata=b"", origin=MVN_ORIGIN_URL, release=release_swhid, ), RawExtrinsicMetadata( target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.maven.loader.MavenLoader", version=__version__, ), discovery_date=loader.visit_date, format="maven-json", metadata=json.dumps(expected_json_metadata[0]).encode(), origin=MVN_ORIGIN_URL, release=release_swhid, ), ] res = swh_storage.raw_extrinsic_metadata_get(directory_swhid, metadata_authority) assert res.next_page_token is None assert set(res.results) == set(expected_metadata) def test_maven_loader_jar_extid(): """Compute primary key should return the right identity""" metadata = MVN_ARTIFACTS[0] p_info = MavenPackageInfo(**metadata) expected_manifest = "{gid} {aid} {version} {url} {time}".format(**metadata).encode() actual_id = p_info.extid() assert actual_id == ( "maven-jar", 0, hashlib.sha256(expected_manifest).digest(), ) diff --git a/swh/loader/package/maven/tests/test_tasks.py b/swh/loader/package/maven/tests/test_tasks.py index 71773a5..9b0524b 100644 --- a/swh/loader/package/maven/tests/test_tasks.py +++ b/swh/loader/package/maven/tests/test_tasks.py @@ -1,86 +1,56 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import uuid import pytest from swh.scheduler.model import ListedOrigin, Lister -from swh.scheduler.utils import create_origin_task_dict + +NAMESPACE = "swh.loader.package.maven" MVN_ARTIFACTS = [ { "time": 1626109619335, "url": "https://repo1.maven.org/maven2/al/aldi/sprova4j/0.1.0/" + "sprova4j-0.1.0.jar", "gid": "al.aldi", "aid": "sprova4j", "filename": "sprova4j-0.1.0.jar", "version": "0.1.0", "base_url": "https://repo1.maven.org/maven2/", }, ] -@pytest.fixture(autouse=True) -def celery_worker_and_swh_config(swh_scheduler_celery_worker, swh_config): - pass - - @pytest.fixture def maven_lister(): - return Lister(name="maven-lister", instance_name="example", id=uuid.uuid4()) + return Lister(name="maven", instance_name="example", id=uuid.uuid4()) @pytest.fixture def maven_listed_origin(maven_lister): return ListedOrigin( lister_id=maven_lister.id, url=MVN_ARTIFACTS[0]["url"], visit_type="maven", extra_loader_arguments={ "artifacts": MVN_ARTIFACTS, }, ) -def test_tasks_maven_loader( - mocker, - swh_scheduler_celery_app, +def test_maven_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + maven_lister, + maven_listed_origin, ): - mock_load = mocker.patch("swh.loader.package.maven.loader.MavenLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.maven.tasks.LoadMaven", - kwargs=dict( - url=MVN_ARTIFACTS[0]["url"], - artifacts=MVN_ARTIFACTS, - ), - ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} - - -def test_tasks_maven_loader_for_listed_origin( - mocker, swh_scheduler_celery_app, maven_lister, maven_listed_origin -): - mock_load = mocker.patch("swh.loader.package.maven.loader.MavenLoader.load") - mock_load.return_value = {"status": "eventful"} - - task_dict = create_origin_task_dict(maven_listed_origin, maven_lister) - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.maven.tasks.LoadMaven", - kwargs=task_dict["arguments"]["kwargs"], + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.MavenLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadMaven", + lister=maven_lister, + listed_origin=maven_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/nixguix/tests/test_tasks.py b/swh/loader/package/nixguix/tests/test_tasks.py index 5249568..48cd4f9 100644 --- a/swh/loader/package/nixguix/tests/test_tasks.py +++ b/swh/loader/package/nixguix/tests/test_tasks.py @@ -1,72 +1,40 @@ # Copyright (C) 2020-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import uuid import pytest from swh.scheduler.model import ListedOrigin, Lister -from swh.scheduler.utils import create_origin_task_dict - -@pytest.fixture(autouse=True) -def celery_worker_and_swh_config(swh_scheduler_celery_worker, swh_config): - pass +NAMESPACE = "swh.loader.package.nixguix" @pytest.fixture def nixguix_lister(): - return Lister(name="nixguix-lister", instance_name="example", id=uuid.uuid4()) + return Lister(name="nixguix", instance_name="example", id=uuid.uuid4()) @pytest.fixture def nixguix_listed_origin(nixguix_lister): return ListedOrigin( lister_id=nixguix_lister.id, url="https://nixguix.example.org/", visit_type="nixguix", ) -def test_tasks_nixguix_loader( - mocker, - swh_scheduler_celery_app, +def test_nixguix_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + nixguix_lister, + nixguix_listed_origin, ): - mock_loader = mocker.patch( - "swh.loader.package.nixguix.loader.NixGuixLoader.from_configfile" - ) - mock_loader.return_value = mock_loader - mock_loader.load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.nixguix.tasks.LoadNixguix", kwargs=dict(url="some-url") - ) - assert res - res.wait() - assert res.successful() - assert mock_loader.called - assert res.result == {"status": "eventful"} - - -def test_tasks_nixguix_loader_for_listed_origin( - mocker, swh_scheduler_celery_app, nixguix_lister, nixguix_listed_origin -): - mock_loader = mocker.patch( - "swh.loader.package.nixguix.loader.NixGuixLoader.from_configfile" - ) - mock_loader.return_value = mock_loader - mock_loader.load.return_value = {"status": "eventful"} - - task_dict = create_origin_task_dict(nixguix_listed_origin, nixguix_lister) - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.nixguix.tasks.LoadNixguix", - kwargs=task_dict["arguments"]["kwargs"], + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.NixGuixLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadNixguix", + lister=nixguix_lister, + listed_origin=nixguix_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_loader.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/npm/loader.py b/swh/loader/package/npm/loader.py index a44e22d..1e2dd76 100644 --- a/swh/loader/package/npm/loader.py +++ b/swh/loader/package/npm/loader.py @@ -1,300 +1,301 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from codecs import BOM_UTF8 import json import logging import os import string from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union from urllib.parse import quote import attr import chardet from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import cached_method, get_url_body, release_name from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, ObjectType, Person, Release, Sha1Git, TimestampWithTimezone, ) from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) EMPTY_PERSON = Person.from_fullname(b"") @attr.s class NpmPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) package_name = attr.ib(type=str) date = attr.ib(type=Optional[str]) shasum = attr.ib(type=str) """sha1 checksum""" # we cannot rely only on $shasum, as it is technically possible for two versions # of the same package to have the exact same tarball. # But the release data (message and date) are extrinsic to the content of the # package, so they differ between versions. # So we need every attribute used to build the release object to be part of the # manifest. MANIFEST_FORMAT = string.Template( "date $date\nname $package_name\nshasum $shasum\nurl $url\nversion $version" ) EXTID_TYPE = "npm-manifest-sha256" EXTID_VERSION = 0 @classmethod def from_metadata( cls, project_metadata: Dict[str, Any], version: str ) -> "NpmPackageInfo": package_metadata = project_metadata["versions"][version] url = package_metadata["dist"]["tarball"] assert package_metadata["name"] == project_metadata["name"] # No date available in intrinsic metadata: retrieve it from the API # metadata, using the version number that the API claims this package # has. extrinsic_version = package_metadata["version"] if "time" in project_metadata: date = project_metadata["time"][extrinsic_version] elif "mtime" in package_metadata: date = package_metadata["mtime"] else: date = None return cls( package_name=package_metadata["name"], url=url, filename=os.path.basename(url), date=date, shasum=package_metadata["dist"]["shasum"], version=extrinsic_version, raw_info=package_metadata, directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( format="replicate-npm-package-json", metadata=json.dumps(package_metadata).encode(), ) ], + checksums={"sha1": package_metadata["dist"]["shasum"]}, ) class NpmLoader(PackageLoader[NpmPackageInfo]): """Load npm origin's artifact releases into swh archive.""" visit_type = "npm" def __init__(self, storage: StorageInterface, url: str, **kwargs: Any): """Constructor Args str: origin url (e.g. https://www.npmjs.com/package/) """ super().__init__(storage=storage, url=url, **kwargs) self.package_name = url.split("https://www.npmjs.com/package/")[1] safe_name = quote(self.package_name, safe="") self.provider_url = f"https://replicate.npmjs.com/{safe_name}/" self._info: Dict[str, Any] = {} self._versions = None @cached_method def _raw_info(self) -> bytes: return get_url_body(self.provider_url) @cached_method def info(self) -> Dict: """Return the project metadata information (fetched from npm registry)""" return json.loads(self._raw_info()) def get_versions(self) -> Sequence[str]: return sorted(list(self.info()["versions"].keys())) def get_default_version(self) -> str: return self.info()["dist-tags"].get("latest", "") def get_metadata_authority(self): return MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://npmjs.com/", metadata={}, ) def get_package_info(self, version: str) -> Iterator[Tuple[str, NpmPackageInfo]]: p_info = NpmPackageInfo.from_metadata( project_metadata=self.info(), version=version ) yield release_name(version), p_info def build_release( self, p_info: NpmPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: # Metadata from NPM is not intrinsic to tarballs. # This means two package versions can have the same tarball, but different # metadata. To avoid mixing up releases, every field used to build the # release object must be part of NpmPackageInfo.MANIFEST_FORMAT. i_metadata = extract_intrinsic_metadata(uncompressed_path) if not i_metadata: return None author = extract_npm_package_author(i_metadata) assert self.package_name == p_info.package_name msg = ( f"Synthetic release for NPM source package {p_info.package_name} " f"version {p_info.version}\n" ) if p_info.date is None: url = p_info.url artifact_name = os.path.basename(url) raise ValueError( "Origin %s: Cannot determine upload time for artifact %s." % (p_info.url, artifact_name) ) date = TimestampWithTimezone.from_iso8601(p_info.date) # FIXME: this is to remain bug-compatible with earlier versions: date = attr.evolve(date, timestamp=attr.evolve(date.timestamp, microseconds=0)) r = Release( name=p_info.version.encode(), message=msg.encode(), author=author, date=date, target=directory, target_type=ObjectType.DIRECTORY, synthetic=True, ) return r def _author_str(author_data: Union[Dict, List, str]) -> str: """Parse author from package.json author fields""" if isinstance(author_data, dict): author_str = "" name = author_data.get("name") if name is not None: if isinstance(name, str): author_str += name elif isinstance(name, list): author_str += _author_str(name[0]) if len(name) > 0 else "" email = author_data.get("email") if email is not None: author_str += f" <{email}>" result = author_str elif isinstance(author_data, list): result = _author_str(author_data[0]) if len(author_data) > 0 else "" else: result = author_data return result def extract_npm_package_author(package_json: Dict[str, Any]) -> Person: """ Extract package author from a ``package.json`` file content and return it in swh format. Args: package_json: Dict holding the content of parsed ``package.json`` file Returns: Person """ for author_key in ("author", "authors"): if author_key in package_json: author_data = package_json[author_key] if author_data is None: return EMPTY_PERSON author_str = _author_str(author_data) return Person.from_fullname(author_str.encode()) return EMPTY_PERSON def _lstrip_bom(s, bom=BOM_UTF8): if s.startswith(bom): return s[len(bom) :] else: return s def load_json(json_bytes): """ Try to load JSON from bytes and return a dictionary. First try to decode from utf-8. If the decoding failed, try to detect the encoding and decode again with replace error handling. If JSON is malformed, an empty dictionary will be returned. Args: json_bytes (bytes): binary content of a JSON file Returns: dict: JSON data loaded in a dictionary """ json_data = {} try: json_str = _lstrip_bom(json_bytes).decode("utf-8") except UnicodeDecodeError: encoding = chardet.detect(json_bytes)["encoding"] if encoding: json_str = json_bytes.decode(encoding, "replace") try: json_data = json.loads(json_str) except json.decoder.JSONDecodeError: pass return json_data def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from npm. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) == 0: return {} project_dirname = lst[0] package_json_path = os.path.join(dir_path, project_dirname, "package.json") if not os.path.exists(package_json_path): return {} with open(package_json_path, "rb") as package_json_file: package_json_bytes = package_json_file.read() return load_json(package_json_bytes) diff --git a/swh/loader/package/npm/tests/test_tasks.py b/swh/loader/package/npm/tests/test_tasks.py index 83b4d5d..650e1ae 100644 --- a/swh/loader/package/npm/tests/test_tasks.py +++ b/swh/loader/package/npm/tests/test_tasks.py @@ -1,67 +1,40 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import uuid import pytest from swh.scheduler.model import ListedOrigin, Lister -from swh.scheduler.utils import create_origin_task_dict - -@pytest.fixture(autouse=True) -def celery_worker_and_swh_config(swh_scheduler_celery_worker, swh_config): - pass +NAMESPACE = "swh.loader.package.npm" @pytest.fixture def npm_lister(): - return Lister(name="npm-lister", instance_name="npm", id=uuid.uuid4()) + return Lister(name="npm", instance_name="npm", id=uuid.uuid4()) @pytest.fixture def npm_listed_origin(npm_lister): return ListedOrigin( lister_id=npm_lister.id, url="https://www.npmjs.com/package/some-package", visit_type="npm", ) -def test_tasks_npm_loader( - mocker, - swh_scheduler_celery_app, +def test_npm_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + npm_lister, + npm_listed_origin, ): - mock_load = mocker.patch("swh.loader.package.npm.loader.NpmLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.npm.tasks.LoadNpm", - kwargs=dict(url="https://www.npmjs.com/package/some-package"), - ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} - - -def test_tasks_npm_loader_for_listed_origin( - mocker, swh_scheduler_celery_app, npm_lister, npm_listed_origin -): - mock_load = mocker.patch("swh.loader.package.npm.loader.NpmLoader.load") - mock_load.return_value = {"status": "eventful"} - - task_dict = create_origin_task_dict(npm_listed_origin, npm_lister) - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.npm.tasks.LoadNpm", - kwargs=task_dict["arguments"]["kwargs"], + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.NpmLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadNpm", + lister=npm_lister, + listed_origin=npm_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/opam/loader.py b/swh/loader/package/opam/loader.py index d192695..0c39bf0 100644 --- a/swh/loader/package/opam/loader.py +++ b/swh/loader/package/opam/loader.py @@ -1,265 +1,273 @@ # Copyright (C) 2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import io import os from subprocess import PIPE, Popen, call from typing import Any, Iterator, List, Optional, Tuple import attr from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import cached_method from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, ObjectType, Person, Release, Sha1Git, ) from swh.storage.interface import StorageInterface @attr.s class OpamPackageInfo(BasePackageInfo): author = attr.ib(type=Person) committer = attr.ib(type=Person) def opam_read( cmd: List[str], init_error_msg_if_any: Optional[str] = None ) -> Optional[str]: """This executes an opam command and returns the first line of the output. Args: cmd: Opam command to execute as a list of string init_error_msg_if_any: Error message to raise in case a problem occurs during initialization Raises: ValueError with the init_error_msg_if_any content in case stdout is not consumable and the variable is provided with non empty value. Returns: the first line of the executed command output """ with Popen(cmd, stdout=PIPE) as proc: if proc.stdout is not None: for line in io.TextIOWrapper(proc.stdout): # care only for the first line output result (mostly blank separated # values, callers will deal with the parsing of the line) return line elif init_error_msg_if_any: raise ValueError(init_error_msg_if_any) return None class OpamLoader(PackageLoader[OpamPackageInfo]): """Load all versions of a given package in a given opam repository. The state of the opam repository is stored in a directory called an opam root. This folder is a requisite for the opam binary to actually list information on package. When initialize_opam_root is False (the default for production workers), the opam root must already have been configured outside of the loading process. If not an error is raised, thus failing the loading. For standalone workers, initialize_opam_root must be set to True, so the ingestion can take care of installing the required opam root properly. The remaining ingestion uses the opam binary to give the versions of the given package. Then, for each version, the loader uses the opam binary to list the tarball url to fetch and ingest. """ visit_type = "opam" def __init__( self, storage: StorageInterface, url: str, opam_root: str, opam_instance: str, opam_url: str, opam_package: str, initialize_opam_root: bool = False, **kwargs: Any, ): super().__init__(storage=storage, url=url, **kwargs) self.opam_root = opam_root self.opam_instance = opam_instance self.opam_url = opam_url self.opam_package = opam_package self.initialize_opam_root = initialize_opam_root def get_package_dir(self) -> str: return ( f"{self.opam_root}/repo/{self.opam_instance}/packages/{self.opam_package}" ) def get_package_name(self, version: str) -> str: return f"{self.opam_package}.{version}" def get_package_file(self, version: str) -> str: return f"{self.get_package_dir()}/{self.get_package_name(version)}/opam" def get_metadata_authority(self): return MetadataAuthority(type=MetadataAuthorityType.FORGE, url=self.opam_url) @cached_method def _compute_versions(self) -> List[str]: """Compute the versions using opam internals Raises: ValueError in case the lister is not able to determine the list of versions Returns: The list of versions for the package """ # TODO: use `opam show` instead of this workaround when it support the `--repo` # flag package_dir = self.get_package_dir() if not os.path.exists(package_dir): raise ValueError( f"can't get versions for package {self.opam_package} " f"(at url {self.origin.url})." ) versions = [ ".".join(version.split(".")[1:]) for version in os.listdir(package_dir) ] if not versions: raise ValueError( f"can't get versions for package {self.opam_package} " f"(at url {self.origin.url})" ) versions.sort() return versions def get_versions(self) -> List[str]: """First initialize the opam root directory if needed then start listing the package versions. Raises: ValueError in case the lister is not able to determine the list of versions or if the opam root directory is invalid. """ if self.initialize_opam_root: # for standalone loader (e.g docker), loader must initialize the opam root # folder call( [ "opam", "init", "--reinit", "--bare", "--no-setup", "--root", self.opam_root, self.opam_instance, self.opam_url, ] ) else: # for standard/production loaders, no need to initialize the opam root # folder. It must be present though so check for it, if not present, raise if not os.path.isfile(os.path.join(self.opam_root, "config")): # so if not correctly setup, raise immediately raise ValueError("Invalid opam root") return self._compute_versions() def get_default_version(self) -> str: """Return the most recent version of the package as default.""" return self._compute_versions()[-1] def _opam_show_args(self, version: str): package_file = self.get_package_file(version) return [ "opam", "show", "--color", "never", "--safe", "--normalise", "--root", self.opam_root, "--file", package_file, ] def get_enclosed_single_line_field(self, field, version) -> Optional[str]: result = opam_read(self._opam_show_args(version) + ["--field", field]) # Sanitize the result if any (remove trailing \n and enclosing ") return result.strip().strip('"') if result else None def get_package_info(self, version: str) -> Iterator[Tuple[str, OpamPackageInfo]]: url = self.get_enclosed_single_line_field("url.src:", version) if url is None: raise ValueError( f"can't get field url.src: for version {version} of package" f" {self.opam_package} (at url {self.origin.url}) from `opam show`" ) + checksums_str = self.get_enclosed_single_line_field("url.checksum:", version) + checksums = {} + if checksums_str: + for c in checksums_str.strip("[]").split(" "): + algo, hash = c.strip('"').split("=") + checksums[algo] = hash + authors_field = self.get_enclosed_single_line_field("authors:", version) fullname = b"" if authors_field is None else str.encode(authors_field) author = Person.from_fullname(fullname) maintainer_field = self.get_enclosed_single_line_field("maintainer:", version) fullname = b"" if maintainer_field is None else str.encode(maintainer_field) committer = Person.from_fullname(fullname) with Popen(self._opam_show_args(version) + ["--raw"], stdout=PIPE) as proc: assert proc.stdout is not None metadata = proc.stdout.read() yield self.get_package_name(version), OpamPackageInfo( url=url, filename=None, author=author, committer=committer, version=version, directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( metadata=metadata, format="opam-package-definition", ) ], + checksums=checksums, ) def build_release( self, p_info: OpamPackageInfo, uncompressed_path: str, directory: Sha1Git, ) -> Optional[Release]: msg = ( f"Synthetic release for OPAM source package {self.opam_package} " f"version {p_info.version}\n" ) return Release( name=p_info.version.encode(), author=p_info.author, message=msg.encode(), date=None, target=directory, target_type=ObjectType.DIRECTORY, synthetic=True, ) diff --git a/swh/loader/package/opam/tests/test_opam.py b/swh/loader/package/opam/tests/test_opam.py index 1ab1cdc..1b052b5 100644 --- a/swh/loader/package/opam/tests/test_opam.py +++ b/swh/loader/package/opam/tests/test_opam.py @@ -1,414 +1,421 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from os.path import exists import shutil import pytest from swh.loader.package import __version__ from swh.loader.package.loader import RawExtrinsicMetadataCore from swh.loader.package.opam.loader import OpamLoader, OpamPackageInfo from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import ( Person, RawExtrinsicMetadata, Release, Snapshot, SnapshotBranch, TargetType, ) from swh.model.model import MetadataAuthority, MetadataAuthorityType, MetadataFetcher from swh.model.model import ObjectType as ModelObjectType from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType from swh.storage.interface import PagedResult OCB_METADATA = b"""\ opam-version: "2.0" name: "ocb" version: "0.1" synopsis: "SVG badge generator" description: "An OCaml library for SVG badge generation. There\'s also a command-line tool provided." maintainer: "OCamlPro " authors: "OCamlPro " license: "ISC" homepage: "https://ocamlpro.github.io/ocb/" doc: "https://ocamlpro.github.io/ocb/api/" bug-reports: "https://github.com/OCamlPro/ocb/issues" depends: [ "ocaml" {>= "4.05"} "dune" {>= "2.0"} "odoc" {with-doc} ] build: [ ["dune" "subst"] {dev} [ "dune" "build" "-p" name "-j" jobs "@install" "@runtest" {with-test} "@doc" {with-doc} ] ] dev-repo: "git+https://github.com/OCamlPro/ocb.git" url { src: "https://github.com/OCamlPro/ocb/archive/0.1.tar.gz" checksum: [ "sha256=aa27684fbda1b8036ae7e3c87de33a98a9cd2662bcc91c8447e00e41476b6a46" "sha512=1260344f184dd8c8074b0439dbcc8a5d59550a654c249cd61913d4c150c664f37b76195ddca38f7f6646d08bddb320ceb8d420508450b4f09a233cd5c22e6b9b" ] } """ # noqa @pytest.fixture def fake_opam_root(mocker, tmpdir, datadir): """Fixture to initialize the actual opam in test context. It mocks the actual opam init calls and installs a fake opam root out of the one present in datadir. """ # inhibits the real `subprocess.call` which prepares the required internal opam # state module_name = "swh.loader.package.opam.loader" mock_init = mocker.patch(f"{module_name}.call", return_value=None) # Installs the fake opam root for the tests to use fake_opam_root_src = f"{datadir}/fake_opam_repo" fake_opam_root_dst = f"{tmpdir}/opam" # old version does not support dirs_exist_ok... # TypeError: copytree() got an unexpected keyword argument 'dirs_exist_ok' # see: https://docs.python.org/3.7/library/shutil.html if exists(fake_opam_root_dst): shutil.rmtree(fake_opam_root_dst) shutil.copytree(fake_opam_root_src, fake_opam_root_dst) yield fake_opam_root_dst # loader are initialized with `initialize_opam_root=True` so this should be called assert mock_init.called, "This should be called when loader use this fixture" def test_opam_loader_no_opam_repository_fails(swh_storage, tmpdir, datadir): """Running opam loader without a prepared opam repository fails""" opam_url = f"file://{datadir}/fake_opam_repo" opam_root = tmpdir opam_instance = "loadertest" opam_package = "agrid" url = f"opam+{opam_url}/packages/{opam_package}" loader = OpamLoader( swh_storage, url, opam_root, opam_instance, opam_url, opam_package, initialize_opam_root=False, # The opam directory must be present and no init... ) # No opam root directory init directory from loader. So, at the opam root does not # exist, the loading fails. That's the expected use for the production workers # (whose opam_root maintenance will be externally managed). actual_load_status = loader.load() assert actual_load_status == {"status": "failed"} def test_opam_loader_one_version( tmpdir, requests_mock_datadir, fake_opam_root, datadir, swh_storage ): opam_url = f"file://{datadir}/fake_opam_repo" opam_root = fake_opam_root opam_instance = "loadertest" opam_package = "agrid" url = f"opam+{opam_url}/packages/{opam_package}" loader = OpamLoader( swh_storage, url, opam_root, opam_instance, opam_url, opam_package, initialize_opam_root=True, # go through the initialization while mocking it ) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("e1159446b00745ba4daa7ee26d74fbd81ecc081c") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } assert_last_visit_matches( swh_storage, url, status="full", type="opam", snapshot=expected_snapshot_id ) release_id = hash_to_bytes("d4d8d3df4f34609a3eeabd48aea49002c5f54f41") expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target=b"agrid.0.1", target_type=TargetType.ALIAS, ), b"agrid.0.1": SnapshotBranch( target=release_id, target_type=TargetType.RELEASE, ), }, ) check_snapshot(expected_snapshot, swh_storage) assert swh_storage.release_get([release_id])[0] == Release( name=b"0.1", message=b"Synthetic release for OPAM source package agrid version 0.1\n", target=hash_to_bytes("00412ee5bc601deb462e55addd1004715116785e"), target_type=ModelObjectType.DIRECTORY, synthetic=True, author=Person.from_fullname(b"OCamlPro "), date=None, id=release_id, ) stats = get_stats(swh_storage) assert { "content": 18, "directory": 8, "origin": 1, "origin_visit": 1, "release": 1, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_opam_loader_many_version( tmpdir, requests_mock_datadir, fake_opam_root, datadir, swh_storage ): opam_url = f"file://{datadir}/fake_opam_repo" opam_root = fake_opam_root opam_instance = "loadertest" opam_package = "directories" url = f"opam+{opam_url}/packages/{opam_package}" loader = OpamLoader( swh_storage, url, opam_root, opam_instance, opam_url, opam_package, initialize_opam_root=True, ) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("f498f7f3b0edbce5cf5834b487a4f8360f6a6a43") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } expected_snapshot = Snapshot( id=expected_snapshot_id, branches={ b"HEAD": SnapshotBranch( target=b"directories.0.3", target_type=TargetType.ALIAS, ), b"directories.0.1": SnapshotBranch( target=hash_to_bytes("1c88d466b3d57a619e296999322d096fa37bb1c2"), target_type=TargetType.RELEASE, ), b"directories.0.2": SnapshotBranch( target=hash_to_bytes("d6f30684039ad485511a138e2ae504ff67a13075"), target_type=TargetType.RELEASE, ), b"directories.0.3": SnapshotBranch( target=hash_to_bytes("6cf92c0ff052074e69ac18809a9c8198bcc2e746"), target_type=TargetType.RELEASE, ), }, ) assert_last_visit_matches( swh_storage, url, status="full", type="opam", snapshot=expected_snapshot_id ) check_snapshot(expected_snapshot, swh_storage) def test_opam_release( tmpdir, requests_mock_datadir, fake_opam_root, swh_storage, datadir ): opam_url = f"file://{datadir}/fake_opam_repo" opam_root = fake_opam_root opam_instance = "loadertest" opam_package = "ocb" url = f"opam+{opam_url}/packages/{opam_package}" loader = OpamLoader( swh_storage, url, opam_root, opam_instance, opam_url, opam_package, initialize_opam_root=True, ) actual_load_status = loader.load() expected_snapshot_id = hash_to_bytes("8ba39f050243a72ca667c5587a87413240cbaa47") assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id.hex(), } info_iter = loader.get_package_info("0.1") branch_name, package_info = next(info_iter) expected_branch_name = "ocb.0.1" expected_package_info = OpamPackageInfo( url="https://github.com/OCamlPro/ocb/archive/0.1.tar.gz", filename=None, author=Person.from_fullname(b"OCamlPro "), committer=Person.from_fullname(b"OCamlPro "), version="0.1", directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( metadata=OCB_METADATA, format="opam-package-definition", ) ], + checksums={ + "sha256": "aa27684fbda1b8036ae7e3c87de33a98a9cd2662bcc91c8447e00e41476b6a46", + "sha512": ( + "1260344f184dd8c8074b0439dbcc8a5d59550a654c249cd61913d4c150c664f" + "37b76195ddca38f7f6646d08bddb320ceb8d420508450b4f09a233cd5c22e6b9b" + ), + }, ) assert branch_name == expected_branch_name assert package_info == expected_package_info release_id = hash_to_bytes("c231e541eb29c712635ada394b04127ac69e9fb0") expected_snapshot = Snapshot( id=hash_to_bytes(actual_load_status["snapshot_id"]), branches={ b"HEAD": SnapshotBranch( target=b"ocb.0.1", target_type=TargetType.ALIAS, ), b"ocb.0.1": SnapshotBranch( target=release_id, target_type=TargetType.RELEASE, ), }, ) assert_last_visit_matches( swh_storage, url, status="full", type="opam", snapshot=expected_snapshot.id ) check_snapshot(expected_snapshot, swh_storage) release = swh_storage.release_get([release_id])[0] assert release is not None assert release.author == expected_package_info.author def test_opam_metadata( tmpdir, requests_mock_datadir, fake_opam_root, swh_storage, datadir ): opam_url = f"file://{datadir}/fake_opam_repo" opam_root = fake_opam_root opam_instance = "loadertest" opam_package = "ocb" url = f"opam+{opam_url}/packages/{opam_package}" loader = OpamLoader( swh_storage, url, opam_root, opam_instance, opam_url, opam_package, initialize_opam_root=True, ) actual_load_status = loader.load() assert actual_load_status["status"] == "eventful" expected_release_id = hash_to_bytes("c231e541eb29c712635ada394b04127ac69e9fb0") expected_snapshot = Snapshot( id=hash_to_bytes(actual_load_status["snapshot_id"]), branches={ b"HEAD": SnapshotBranch( target=b"ocb.0.1", target_type=TargetType.ALIAS, ), b"ocb.0.1": SnapshotBranch( target=expected_release_id, target_type=TargetType.RELEASE, ), }, ) assert_last_visit_matches( swh_storage, url, status="full", type="opam", snapshot=expected_snapshot.id ) check_snapshot(expected_snapshot, swh_storage) release = swh_storage.release_get([expected_release_id])[0] assert release is not None release_swhid = CoreSWHID( object_type=ObjectType.RELEASE, object_id=expected_release_id ) directory_swhid = ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=release.target ) metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url=opam_url, ) expected_metadata = [ RawExtrinsicMetadata( target=directory_swhid, authority=metadata_authority, fetcher=MetadataFetcher( name="swh.loader.package.opam.loader.OpamLoader", version=__version__, ), discovery_date=loader.visit_date, format="opam-package-definition", metadata=OCB_METADATA, origin=url, release=release_swhid, ) ] assert swh_storage.raw_extrinsic_metadata_get( directory_swhid, metadata_authority, ) == PagedResult( next_page_token=None, results=expected_metadata, ) diff --git a/swh/loader/package/opam/tests/test_tasks.py b/swh/loader/package/opam/tests/test_tasks.py index eec635e..96939c1 100644 --- a/swh/loader/package/opam/tests/test_tasks.py +++ b/swh/loader/package/opam/tests/test_tasks.py @@ -1,78 +1,51 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import uuid import pytest from swh.scheduler.model import ListedOrigin, Lister -from swh.scheduler.utils import create_origin_task_dict + +NAMESPACE = "swh.loader.package.opam" OPAM_LOADER_ARGS = { "url": "opam+https://opam.ocaml.org/packages/agrid", "opam_root": "/tmp/test_tasks_opam_loader", "opam_instance": "test_tasks_opam_loader", "opam_url": "https://opam.ocaml.org", "opam_package": "agrid", } -@pytest.fixture(autouse=True) -def celery_worker_and_swh_config(swh_scheduler_celery_worker, swh_config): - pass - - @pytest.fixture def opam_lister(): - return Lister(name="opam-lister", instance_name="example", id=uuid.uuid4()) + return Lister(name="opam", instance_name="example", id=uuid.uuid4()) @pytest.fixture def opam_listed_origin(opam_lister): return ListedOrigin( lister_id=opam_lister.id, url=OPAM_LOADER_ARGS["url"], visit_type="opam", extra_loader_arguments={ k: v for k, v in OPAM_LOADER_ARGS.items() if k != "url" }, ) -def test_tasks_opam_loader( - mocker, - swh_scheduler_celery_app, +def test_opam_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + opam_lister, + opam_listed_origin, ): - mock_load = mocker.patch("swh.loader.package.opam.loader.OpamLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.opam.tasks.LoadOpam", - kwargs=OPAM_LOADER_ARGS, - ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} - - -def test_tasks_opam_loader_for_listed_origin( - mocker, swh_scheduler_celery_app, opam_lister, opam_listed_origin -): - mock_load = mocker.patch("swh.loader.package.opam.loader.OpamLoader.load") - mock_load.return_value = {"status": "eventful"} - - task_dict = create_origin_task_dict(opam_listed_origin, opam_lister) - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.opam.tasks.LoadOpam", - kwargs=task_dict["arguments"]["kwargs"], + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.OpamLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadOpam", + lister=opam_lister, + listed_origin=opam_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/pubdev/loader.py b/swh/loader/package/pubdev/loader.py index 608457a..4bffa3b 100644 --- a/swh/loader/package/pubdev/loader.py +++ b/swh/loader/package/pubdev/loader.py @@ -1,194 +1,152 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json -from pathlib import Path -from typing import Any, Dict, Iterator, Optional, Sequence, Tuple +from typing import Dict, Iterator, Optional, Sequence, Tuple import attr from packaging.version import parse as parse_version -import yaml from swh.loader.package.loader import BasePackageInfo, PackageLoader from swh.loader.package.utils import ( EMPTY_AUTHOR, Person, cached_method, get_url_body, release_name, ) from swh.model.model import ObjectType, Release, Sha1Git, TimestampWithTimezone from swh.storage.interface import StorageInterface @attr.s class PubDevPackageInfo(BasePackageInfo): name = attr.ib(type=str) """Name of the package""" version = attr.ib(type=str) """Current version""" last_modified = attr.ib(type=str) """Last modified date as release date""" author = attr.ib(type=Person) """Author""" - description = attr.ib(type=str) - """Description""" - - -def extract_intrinsic_metadata(dir_path: Path) -> Dict[str, Any]: - """Extract intrinsic metadata from pubspec.yaml file at dir_path. - - Each pub.dev package version has a pubspec.yaml file at the root of the archive. - - See https://dart.dev/tools/pub/pubspec for pubspec specifications. - - Args: - dir_path: A directory on disk where a pubspec.yaml must be present - - Returns: - A dict mapping from yaml parser - """ - pubspec_path = dir_path / "pubspec.yaml" - return yaml.safe_load(pubspec_path.read_text()) - class PubDevLoader(PackageLoader[PubDevPackageInfo]): visit_type = "pubdev" PUBDEV_BASE_URL = "https://pub.dev/" def __init__( self, storage: StorageInterface, url: str, **kwargs, ): super().__init__(storage=storage, url=url, **kwargs) self.url = url assert url.startswith(self.PUBDEV_BASE_URL) self.package_info_url = url.replace( self.PUBDEV_BASE_URL, f"{self.PUBDEV_BASE_URL}api/" ) - def _raw_info(self) -> bytes: - return get_url_body(self.package_info_url) - @cached_method def info(self) -> Dict: """Return the project metadata information (fetched from pub.dev registry)""" # Use strict=False in order to correctly manage case where \n is present in a string - info = json.loads(self._raw_info(), strict=False) + info = json.loads(get_url_body(self.package_info_url), strict=False) # Arrange versions list as a new dict with `version` as key versions = {v["version"]: v for v in info["versions"]} info["versions"] = versions return info def get_versions(self) -> Sequence[str]: """Get all released versions of a PubDev package Returns: A sequence of versions Example:: ["0.1.1", "0.10.2"] """ versions = list(self.info()["versions"].keys()) versions.sort(key=parse_version) return versions def get_default_version(self) -> str: """Get the newest release version of a PubDev package Returns: A string representing a version Example:: "0.1.2" """ latest = self.info()["latest"] return latest["version"] def get_package_info(self, version: str) -> Iterator[Tuple[str, PubDevPackageInfo]]: """Get release name and package information from version Package info comes from extrinsic metadata (from self.info()) Args: version: Package version (e.g: "0.1.0") Returns: Iterator of tuple (release_name, p_info) """ v = self.info()["versions"][version] assert v["version"] == version url = v["archive_url"] name = v["pubspec"]["name"] filename = f"{name}-{version}.tar.gz" last_modified = v["published"] + checksums = {"sha256": v["archive_sha256"]} if v.get("archive_sha256") else {} - if "authors" in v["pubspec"]: + authors = v.get("pubspec", {}).get("authors") + if authors and isinstance(authors, list): # TODO: here we have a list of author, see T3887 - author = Person.from_fullname(v["pubspec"]["authors"][0].encode()) - elif "author" in v["pubspec"] and v["pubspec"]["author"] is not None: + author = Person.from_fullname(authors[0].encode()) + elif v.get("pubspec", {}).get("author"): author = Person.from_fullname(v["pubspec"]["author"].encode()) else: author = EMPTY_AUTHOR - description = v["pubspec"]["description"] - p_info = PubDevPackageInfo( name=name, filename=filename, url=url, version=version, last_modified=last_modified, author=author, - description=description, + checksums=checksums, ) yield release_name(version), p_info def build_release( self, p_info: PubDevPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: - # Extract intrinsic metadata from uncompressed_path/pubspec.yaml - intrinsic_metadata = extract_intrinsic_metadata(Path(uncompressed_path)) - - name: str = intrinsic_metadata["name"] - version: str = intrinsic_metadata["version"] - assert version == p_info.version - - # author from intrinsic_metadata should not take precedence over the one - # returned by the api, see https://dart.dev/tools/pub/pubspec#authorauthors - author: Person = p_info.author - - if "description" in intrinsic_metadata and intrinsic_metadata["description"]: - description = intrinsic_metadata["description"] - else: - description = p_info.description - message = ( - f"Synthetic release for pub.dev source package {name} " - f"version {version}\n\n" - f"{description}\n" + f"Synthetic release for pub.dev source package {p_info.name} " + f"version {p_info.version}\n" ) return Release( - name=version.encode(), - author=author, + name=p_info.version.encode(), + author=p_info.author, date=TimestampWithTimezone.from_iso8601(p_info.last_modified), message=message.encode(), target_type=ObjectType.DIRECTORY, target=directory, synthetic=True, ) diff --git a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_Autolinker b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_Autolinker index 824af41..1c1d1b9 100644 --- a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_Autolinker +++ b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_Autolinker @@ -1,29 +1,31 @@ { "name": "Autolinker", "latest": { "version": "0.1.1", "pubspec": { "version": "0.1.1", "homepage": "https://github.com/hackcave", "description": "Port of Autolinker.js to dart", "name": "Autolinker", "author": "hackcave " }, "archive_url": "https://pub.dartlang.org/packages/Autolinker/versions/0.1.1.tar.gz", + "archive_sha256": "ca6149c2bb566b07beaf731930ade8b77fad86055b3f37b6eb2f17aca2fbc1b1", "published": "2014-12-24T22:34:02.534090Z" }, "versions": [ { "version": "0.1.1", "pubspec": { "version": "0.1.1", "homepage": "https://github.com/hackcave", "description": "Port of Autolinker.js to dart", "name": "Autolinker", "author": "hackcave " }, "archive_url": "https://pub.dartlang.org/packages/Autolinker/versions/0.1.1.tar.gz", + "archive_sha256": "ca6149c2bb566b07beaf731930ade8b77fad86055b3f37b6eb2f17aca2fbc1b1", "published": "2014-12-24T22:34:02.534090Z" } ] -} +} \ No newline at end of file diff --git a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_authentication b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_authentication index 25ca01a..f1f6a45 100644 --- a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_authentication +++ b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_authentication @@ -1,77 +1,77 @@ { "name": "authentication", "latest": { "version": "0.0.1", "pubspec": { "name": "authentication", "description": "Persistent user authentication for Flutter with optional backend API integration.", "version": "0.0.1", "author": null, "homepage": null, "environment": { "sdk": ">=2.7.0 <3.0.0", "flutter": ">=1.17.0 <2.0.0" }, "dependencies": { "flutter": { "sdk": "flutter" } }, "dev_dependencies": { "flutter_test": { "sdk": "flutter" } }, "flutter": { "plugin": { "platforms": { "some_platform": { "pluginClass": "somePluginClass" } } } } }, "archive_url": "https://pub.dartlang.org/packages/authentication/versions/0.0.1.tar.gz", - "archive_sha256": "0179334b346cb67e4e6e3c905e5cc5c8e488a45ebd99fd2be3a7e0476d620d99", + "archive_sha256": "bb8296bce47a5fe587b8d902ff87490593e8b86f736b38c6f9259c958b1f9b21", "published": "2020-08-13T04:53:34.134687Z" }, "versions": [ { "version": "0.0.1", "pubspec": { "name": "authentication", "description": "Persistent user authentication for Flutter with optional backend API integration.", "version": "0.0.1", "author": null, "homepage": null, "environment": { "sdk": ">=2.7.0 <3.0.0", "flutter": ">=1.17.0 <2.0.0" }, "dependencies": { "flutter": { "sdk": "flutter" } }, "dev_dependencies": { "flutter_test": { "sdk": "flutter" } }, "flutter": { "plugin": { "platforms": { "some_platform": { "pluginClass": "somePluginClass" } } } } }, "archive_url": "https://pub.dartlang.org/packages/authentication/versions/0.0.1.tar.gz", - "archive_sha256": "0179334b346cb67e4e6e3c905e5cc5c8e488a45ebd99fd2be3a7e0476d620d99", + "archive_sha256": "bb8296bce47a5fe587b8d902ff87490593e8b86f736b38c6f9259c958b1f9b21", "published": "2020-08-13T04:53:34.134687Z" } ] -} +} \ No newline at end of file diff --git a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_bezier b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_bezier index dacdd55..38cc16a 100644 --- a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_bezier +++ b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_bezier @@ -1,55 +1,55 @@ { "name": "bezier", "latest": { "version": "1.1.5", "pubspec": { "name": "bezier", "version": "1.1.5", "authors": [ "Aaron Barrett ", "Isaac Barrett " ], "description": "A 2D Bézier curve math library. Based heavily on the work of @TheRealPomax . Live examples can be found at .", "homepage": "https://github.com/aab29/bezier.dart", "environment": { "sdk": ">=2.0.0 <3.0.0" }, "dependencies": { "vector_math": "^2.0.0" }, "dev_dependencies": { "test": "^1.0.0" } }, "archive_url": "https://pub.dartlang.org/packages/bezier/versions/1.1.5.tar.gz", - "archive_sha256": "cc5da2fa927b5d347550f78d456cd984b7df78a7f0405119cdab12111e2f9ee8", + "archive_sha256": "d8f2a8f75732e7f7c3c0295801c95970301536eee205d4532cb3bc1d720cb1bf", "published": "2019-12-22T03:17:30.805225Z" }, "versions": [ { "version": "1.1.5", "pubspec": { "name": "bezier", "version": "1.1.5", "authors": [ "Aaron Barrett ", "Isaac Barrett " ], "description": "A 2D Bézier curve math library. Based heavily on the work of @TheRealPomax .Live examples can be found at .", "homepage": "https://github.com/aab29/bezier.dart", "environment": { "sdk": ">=2.0.0 <3.0.0" }, "dependencies": { "vector_math": "^2.0.0" }, "dev_dependencies": { "test": "^1.0.0" } }, "archive_url": "https://pub.dartlang.org/packages/bezier/versions/1.1.5.tar.gz", - "archive_sha256": "cc5da2fa927b5d347550f78d456cd984b7df78a7f0405119cdab12111e2f9ee8", + "archive_sha256": "d8f2a8f75732e7f7c3c0295801c95970301536eee205d4532cb3bc1d720cb1bf", "published": "2019-12-22T03:17:30.805225Z" } ] -} +} \ No newline at end of file diff --git a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_pdf b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_pdf index 1541536..2afc490 100644 --- a/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_pdf +++ b/swh/loader/package/pubdev/tests/data/https_pub.dev/api_packages_pdf @@ -1,88 +1,91 @@ { "name": "pdf", "latest": { "version": "3.8.2", "pubspec": { "name": "pdf", "description": "A pdf producer for Dart. It can create pdf files for both web or flutter.", "homepage": "https://github.com/DavBfr/dart_pdf/tree/master/pdf", "repository": "https://github.com/DavBfr/dart_pdf", "issue_tracker": "https://github.com/DavBfr/dart_pdf/issues", "version": "3.8.2", "environment": { "sdk": ">=2.12.0 <3.0.0" }, "dependencies": { "archive": "^3.1.0", "barcode": ">=2.2.0 <3.0.0", "crypto": "^3.0.0", "image": ">=3.0.1 <4.0.0", "meta": ">=1.3.0 <2.0.0", "path_parsing": ">=0.2.0 <2.0.0", "vector_math": "^2.1.0", "xml": ">=5.1.0 <7.0.0" }, "dev_dependencies": { "flutter_lints": "^1.0.4", "test": ">=1.16.0 <2.0.0" } }, "archive_url": "https://pub.dartlang.org/packages/pdf/versions/3.8.2.tar.gz", + "archive_sha256": "b69a47f10620b5639bfcf51cd9acd1083e7e856dfc4a23f49df89445d1d27692", "published": "2022-07-25T11:38:25.983876Z" }, "versions": [ { "version": "1.0.0", "pubspec": { "version": "1.0.0", "name": "pdf", "dependencies": { "ttf_parser": "^1.0.0", "vector_math": "^2.0.7", "meta": "^1.1.5" }, "author": "David PHAM-VAN ", "description": "A pdf producer for Dart", "homepage": "https://github.com/davbfr/dart_pdf", "environment": { "sdk": ">=1.8.0 <2.0.0" }, "dev_dependencies": { "test": "any" } }, "archive_url": "https://pub.dartlang.org/packages/pdf/versions/1.0.0.tar.gz", + "archive_sha256": "54f1b1c4d519c3bad61ca63b53b46e7e9eabc3b7fb9a4707525520215152e4e1", "published": "2018-07-16T21:12:28.894137Z" }, { "version": "3.8.2", "pubspec": { "name": "pdf", "description": "A pdf producer for Dart. It can create pdf files for both web or flutter.", "homepage": "https://github.com/DavBfr/dart_pdf/tree/master/pdf", "repository": "https://github.com/DavBfr/dart_pdf", "issue_tracker": "https://github.com/DavBfr/dart_pdf/issues", "version": "3.8.2", "environment": { "sdk": ">=2.12.0 <3.0.0" }, "dependencies": { "archive": "^3.1.0", "barcode": ">=2.2.0 <3.0.0", "crypto": "^3.0.0", "image": ">=3.0.1 <4.0.0", "meta": ">=1.3.0 <2.0.0", "path_parsing": ">=0.2.0 <2.0.0", "vector_math": "^2.1.0", "xml": ">=5.1.0 <7.0.0" }, "dev_dependencies": { "flutter_lints": "^1.0.4", "test": ">=1.16.0 <2.0.0" } }, "archive_url": "https://pub.dartlang.org/packages/pdf/versions/3.8.2.tar.gz", + "archive_sha256": "b69a47f10620b5639bfcf51cd9acd1083e7e856dfc4a23f49df89445d1d27692", "published": "2022-07-25T11:38:25.983876Z" } ] -} +} \ No newline at end of file diff --git a/swh/loader/package/pubdev/tests/test_pubdev.py b/swh/loader/package/pubdev/tests/test_pubdev.py index 9267c24..757b143 100644 --- a/swh/loader/package/pubdev/tests/test_pubdev.py +++ b/swh/loader/package/pubdev/tests/test_pubdev.py @@ -1,326 +1,325 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest from swh.loader.package.pubdev.loader import PubDevLoader from swh.loader.package.utils import EMPTY_AUTHOR from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import ( ObjectType, Person, Release, Snapshot, SnapshotBranch, TargetType, TimestampWithTimezone, ) EXPECTED_PACKAGES = [ { "url": "https://pub.dev/packages/Autolinker", # one version }, { "url": "https://pub.dev/packages/pdf", # multiple versions }, { "url": "https://pub.dev/packages/bezier", # multiple authors }, { "url": "https://pub.dev/packages/authentication", # empty author }, { "url": "https://pub.dev/packages/abstract_io", # loose versions names }, { "url": "https://pub.dev/packages/audio_manager", # loose ++ versions names }, ] def test_get_versions(requests_mock_datadir, swh_storage): loader = PubDevLoader( swh_storage, url=EXPECTED_PACKAGES[1]["url"], ) assert loader.get_versions() == [ "1.0.0", "3.8.2", ] def test_sort_loose_versions(requests_mock_datadir, swh_storage): """Sometimes version name does not follow semver""" loader = PubDevLoader( swh_storage, url=EXPECTED_PACKAGES[4]["url"], ) assert loader.get_versions() == ["0.1.2+4", "0.1.2+5", "0.1.2+6"] def test_sort_loose_versions_1(requests_mock_datadir, swh_storage): """Sometimes version name does not follow semver and mix patterns""" loader = PubDevLoader( swh_storage, url=EXPECTED_PACKAGES[5]["url"], ) assert loader.get_versions() == [ "0.0.1", "0.0.2", "0.1.1", "0.1.2", "0.1.3", "0.1.4", "0.1.5", "0.2.1", "0.2.1+hotfix.1", "0.2.1+hotfix.2", "0.2.1+3", "0.3.1", "0.3.1+1", "0.5.1", "0.5.1+1", "0.5.1+2", "0.5.1+3", "0.5.1+4", "0.5.1+5", "0.5.2", "0.5.2+1", "0.5.3", "0.5.3+1", "0.5.3+2", "0.5.3+3", "0.5.4", "0.5.4+1", "0.5.5", "0.5.5+1", "0.5.5+2", "0.5.5+3", "0.5.6", "0.5.7", "0.5.7+1", "0.6.1", "0.6.2", "0.7.1", "0.7.2", "0.7.3", "0.8.1", "0.8.2", ] def test_get_default_version(requests_mock_datadir, swh_storage): loader = PubDevLoader( swh_storage, url=EXPECTED_PACKAGES[1]["url"], ) assert loader.get_default_version() == "3.8.2" def test_pubdev_loader_load_one_version(datadir, requests_mock_datadir, swh_storage): loader = PubDevLoader( swh_storage, url=EXPECTED_PACKAGES[0]["url"], ) load_status = loader.load() assert load_status["status"] == "eventful" assert load_status["snapshot_id"] is not None - expected_snapshot_id = "245092931ba809e6c54ebda8f865fb5a969a4134" - expected_release_id = "919f267ea050539606344d49d14bf594c4386e5a" + expected_snapshot_id = "dffca49aec93fcf1fa63fa25bf9a04c833a30d73" + expected_release_id = "1e2e7226ac9136f2eb7ce28f32ca08fff28590b1" assert expected_snapshot_id == load_status["snapshot_id"] expected_snapshot = Snapshot( id=hash_to_bytes(load_status["snapshot_id"]), branches={ b"releases/0.1.1": SnapshotBranch( target=hash_to_bytes(expected_release_id), target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/0.1.1", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 1, "directory": 1, "origin": 1, "origin_visit": 1, "release": 1, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats assert swh_storage.release_get([hash_to_bytes(expected_release_id)])[0] == Release( name=b"0.1.1", - message=b"Synthetic release for pub.dev source package Autolinker version" - b" 0.1.1\n\nPort of Autolinker.js to dart\n", + message=b"Synthetic release for pub.dev source package Autolinker version 0.1.1\n", target=hash_to_bytes("3fb6d4f2c0334d1604357ae92b2dd38a55a78194"), target_type=ObjectType.DIRECTORY, synthetic=True, author=Person( fullname=b"hackcave ", name=b"hackcave", email=b"hackers@hackcave.org", ), date=TimestampWithTimezone.from_iso8601("2014-12-24T22:34:02.534090+00:00"), id=hash_to_bytes(expected_release_id), ) assert_last_visit_matches( swh_storage, url=EXPECTED_PACKAGES[0]["url"], status="full", type="pubdev", snapshot=expected_snapshot.id, ) def test_pubdev_loader_load_multiple_versions( datadir, requests_mock_datadir, swh_storage ): loader = PubDevLoader( swh_storage, url=EXPECTED_PACKAGES[1]["url"], ) load_status = loader.load() assert load_status["status"] == "eventful" assert load_status["snapshot_id"] is not None - expected_snapshot_id = "43d5b68a9fa973aa95e56916aaef70841ccbc2a0" + expected_snapshot_id = "b03a4ef56b1a3bd4812f8e37f439c261cf4fd2c7" assert expected_snapshot_id == load_status["snapshot_id"] expected_snapshot = Snapshot( id=hash_to_bytes(load_status["snapshot_id"]), branches={ b"releases/1.0.0": SnapshotBranch( - target=hash_to_bytes("fbf8e40af675096681954553d737861e10b57216"), + target=hash_to_bytes("6f6eecd1ced321778d6a4bc60af4fb0e93178307"), target_type=TargetType.RELEASE, ), b"releases/3.8.2": SnapshotBranch( - target=hash_to_bytes("627a5d586e3fb4e7319b17f1aee268fe2fb8e01c"), + target=hash_to_bytes("012bac381e2b9cda7de2da0391bc2969bf80ff97"), target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/3.8.2", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 1 + 1, "directory": 1 + 1, "origin": 1, "origin_visit": 1, "release": 1 + 1, "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats assert_last_visit_matches( swh_storage, url=EXPECTED_PACKAGES[1]["url"], status="full", type="pubdev", snapshot=expected_snapshot.id, ) def test_pubdev_loader_multiple_authors(datadir, requests_mock_datadir, swh_storage): loader = PubDevLoader( swh_storage, url=EXPECTED_PACKAGES[2]["url"], ) load_status = loader.load() assert load_status["status"] == "eventful" assert load_status["snapshot_id"] is not None - expected_snapshot_id = "4fa9f19d1d6ccc70921c8c50b278f510db63aa36" - expected_release_id = "538c98fd69a42d8d0561a7ca95b354de2143a3ab" + expected_snapshot_id = "2af571a302514bf17807dc114fff15501f8c1387" + expected_release_id = "87331a7804673cb00a339b504d2345769b7ae34a" assert expected_snapshot_id == load_status["snapshot_id"] expected_snapshot = Snapshot( id=hash_to_bytes(load_status["snapshot_id"]), branches={ b"releases/1.1.5": SnapshotBranch( target=hash_to_bytes(expected_release_id), target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/1.1.5", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) release = swh_storage.release_get([hash_to_bytes(expected_release_id)])[0] assert release.author == Person( fullname=b"Aaron Barrett ", name=b"Aaron Barrett", email=b"aaron@aaronbarrett.com", ) def test_pubdev_loader_empty_author(datadir, requests_mock_datadir, swh_storage): loader = PubDevLoader( swh_storage, url=EXPECTED_PACKAGES[3]["url"], ) load_status = loader.load() assert load_status["status"] == "eventful" assert load_status["snapshot_id"] is not None - expected_snapshot_id = "0c7fa6b9fced23c648d2093ad5597622683f8aed" - expected_release_id = "7d8c05181069aa1049a3f0bc1d13bedc34625d47" + expected_snapshot_id = "8b86c9fb49bbf3e2b4513dc35a2838c67e8895bc" + expected_release_id = "d6ba845e28fba2a51e2ed358664cad645a2591ca" assert expected_snapshot_id == load_status["snapshot_id"] expected_snapshot = Snapshot( id=hash_to_bytes(load_status["snapshot_id"]), branches={ b"releases/0.0.1": SnapshotBranch( target=hash_to_bytes(expected_release_id), target_type=TargetType.RELEASE, ), b"HEAD": SnapshotBranch( target=b"releases/0.0.1", target_type=TargetType.ALIAS, ), }, ) check_snapshot(expected_snapshot, swh_storage) release = swh_storage.release_get([hash_to_bytes(expected_release_id)])[0] assert release.author == EMPTY_AUTHOR def test_pubdev_invalid_origin(swh_storage): with pytest.raises(AssertionError): PubDevLoader( swh_storage, "http://nowhere/api/packages/42", ) diff --git a/swh/loader/package/pubdev/tests/test_tasks.py b/swh/loader/package/pubdev/tests/test_tasks.py index c5b2ce7..c70777e 100644 --- a/swh/loader/package/pubdev/tests/test_tasks.py +++ b/swh/loader/package/pubdev/tests/test_tasks.py @@ -1,23 +1,40 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information +import uuid -def test_tasks_pubdev_loader( - mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config +import pytest + +from swh.scheduler.model import ListedOrigin, Lister + +NAMESPACE = "swh.loader.package.pubdev" + + +@pytest.fixture +def pubdev_lister(): + return Lister(name="pubdev", instance_name="example", id=uuid.uuid4()) + + +@pytest.fixture +def pubdev_listed_origin(pubdev_lister): + return ListedOrigin( + lister_id=pubdev_lister.id, + url="https://pub.dev/packages/some-package", + visit_type="pubdev", + ) + + +def test_pubdev_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + pubdev_lister, + pubdev_listed_origin, ): - mock_load = mocker.patch("swh.loader.package.pubdev.loader.PubDevLoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.pubdev.tasks.LoadPubDev", - kwargs=dict( - url="https://pub.dev/packages/some-package", - ), + + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.PubDevLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadPubDev", + lister=pubdev_lister, + listed_origin=pubdev_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/puppet/__init__.py b/swh/loader/package/puppet/__init__.py new file mode 100644 index 0000000..ba1dc18 --- /dev/null +++ b/swh/loader/package/puppet/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + + +from typing import Any, Mapping + + +def register() -> Mapping[str, Any]: + """Register the current worker module's definition""" + from .loader import PuppetLoader + + return { + "task_modules": [f"{__name__}.tasks"], + "loader": PuppetLoader, + } diff --git a/swh/loader/package/puppet/loader.py b/swh/loader/package/puppet/loader.py new file mode 100644 index 0000000..6f0221e --- /dev/null +++ b/swh/loader/package/puppet/loader.py @@ -0,0 +1,153 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from datetime import datetime +import json +from pathlib import Path +from typing import Any, Dict, Iterator, Optional, Sequence, Tuple + +import attr +import iso8601 +from packaging.version import parse as parse_version + +from swh.loader.package.loader import BasePackageInfo, PackageLoader +from swh.loader.package.utils import Person, release_name +from swh.model.model import ObjectType, Release, Sha1Git, TimestampWithTimezone +from swh.storage.interface import StorageInterface + + +@attr.s +class PuppetPackageInfo(BasePackageInfo): + + name = attr.ib(type=str) + """Name of the package""" + + filename = attr.ib(type=str) + """Archive (tar.gz) file name""" + + version = attr.ib(type=str) + """Current version""" + + last_modified = attr.ib(type=datetime) + """Module last update date as release date""" + + +def extract_intrinsic_metadata(dir_path: Path) -> Dict[str, Any]: + """Extract intrinsic metadata from metadata.json file at dir_path. + + Each Puppet module version has a metadata.json file at the root of the archive. + + See ``https://puppet.com/docs/puppet/7/modules_metadata.html`` for metadata specifications. + + Args: + dir_path: A directory on disk where a metadata.json file must be present + + Returns: + A dict mapping from json parser + """ + meta_json_path = dir_path / "metadata.json" + metadata: Dict[str, Any] = json.loads(meta_json_path.read_text()) + return metadata + + +class PuppetLoader(PackageLoader[PuppetPackageInfo]): + visit_type = "puppet" + + def __init__( + self, + storage: StorageInterface, + url: str, + artifacts: Dict[str, Any], + **kwargs, + ): + + super().__init__(storage=storage, url=url, **kwargs) + self.url = url + self.artifacts = artifacts + + def get_versions(self) -> Sequence[str]: + """Get all released versions of a Puppet module + + Returns: + A sequence of versions + + Example:: + + ["0.1.1", "0.10.2"] + """ + versions = list(self.artifacts.keys()) + versions.sort(key=parse_version) + return versions + + def get_default_version(self) -> str: + """Get the newest release version of a Puppet module + + Returns: + A string representing a version + + Example:: + + "0.10.2" + """ + return self.get_versions()[-1] + + def get_package_info(self, version: str) -> Iterator[Tuple[str, PuppetPackageInfo]]: + """Get release name and package information from version + + Args: + version: Package version (e.g: "0.1.0") + + Returns: + Iterator of tuple (release_name, p_info) + """ + data = self.artifacts[version] + assert data["filename"].endswith(f"-{version}.tar.gz") + pkgname: str = data["filename"].split(f"-{version}.tar.gz")[0] + url: str = data["url"] + filename: str = data["filename"] + last_modified: datetime = iso8601.parse_date(data["last_update"]) + + p_info = PuppetPackageInfo( + name=pkgname, + filename=filename, + url=url, + version=version, + last_modified=last_modified, + checksums=data["checksums"], + ) + yield release_name(version), p_info + + def build_release( + self, p_info: PuppetPackageInfo, uncompressed_path: str, directory: Sha1Git + ) -> Optional[Release]: + # compute extracted module directory name + dirname = p_info.filename.split(".tar.gz")[0] + + # Extract intrinsic metadata from uncompressed_path/{dirname}/metadata.json + intrinsic_metadata = extract_intrinsic_metadata( + Path(uncompressed_path) / f"{dirname}" + ) + + version: str = intrinsic_metadata["version"] + assert version == p_info.version + + description = intrinsic_metadata["summary"] + author = Person.from_fullname(intrinsic_metadata["author"].encode()) + + message = ( + f"Synthetic release for Puppet source package {p_info.name} " + f"version {version}\n\n" + f"{description}\n" + ) + + return Release( + name=version.encode(), + author=author, + date=TimestampWithTimezone.from_datetime(p_info.last_modified), + message=message.encode(), + target_type=ObjectType.DIRECTORY, + target=directory, + synthetic=True, + ) diff --git a/swh/loader/package/puppet/tasks.py b/swh/loader/package/puppet/tasks.py new file mode 100644 index 0000000..aac7035 --- /dev/null +++ b/swh/loader/package/puppet/tasks.py @@ -0,0 +1,14 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from celery import shared_task + +from swh.loader.package.puppet.loader import PuppetLoader + + +@shared_task(name=__name__ + ".LoadPuppet") +def load_puppet(**kwargs): + """Load Puppet modules from puppet.com""" + return PuppetLoader.from_configfile(**kwargs).load() diff --git a/swh/loader/core/__init__.py b/swh/loader/package/puppet/tests/__init__.py similarity index 100% copy from swh/loader/core/__init__.py copy to swh/loader/package/puppet/tests/__init__.py diff --git a/swh/loader/package/puppet/tests/data/fake_puppet.sh b/swh/loader/package/puppet/tests/data/fake_puppet.sh new file mode 100644 index 0000000..43ead41 --- /dev/null +++ b/swh/loader/package/puppet/tests/data/fake_puppet.sh @@ -0,0 +1,136 @@ +#!/usr/bin/env bash + +# Script to generate fake Puppet module archives as .tar.gz. + +set -euo pipefail + +# Create directories +readonly TMP=tmp_dir/puppet +readonly BASE_PATH=https_forgeapi.puppet.com + +mkdir -p $TMP + +# tar.gz package archives +# Puppet module tar.gz archive needs at least one directory with a metadata.json file +mkdir -p ${TMP}/saz-memcached-1.0.0 +mkdir -p ${TMP}/saz-memcached-8.1.0 +mkdir -p $BASE_PATH + +echo -e '''{ + "summary": "UNKNOWN", + "author": "saz", + "source": "UNKNOWN", + "dependencies": [ + + ], + "types": [ + + ], + "license": "Apache License, Version 2.0", + "project_page": "https://github.com/saz/puppet-memcached", + "version": "1.0.0", + "name": "saz-memcached", + "checksums": { + "spec/spec_helper.rb": "ca19ec4f451ebc7fdb035b52eae6e909", + "manifests/params.pp": "0b8904086e7fa6f0d1f667d547a17d96", + "README.md": "fa0b9f6d97f2763e565d8a330fb3930b", + "manifests/config.pp": "706f7c5001fb6014575909a335a52def", + "templates/memcached.conf.erb": "8151e00d922bb9ebb1a24a05ac0969d7", + "manifests/service.pp": "a528751401189c299a38cab12d52431f", + "tests/init.pp": "e798f4999ba392f3c0fce0d5290c263f", + "manifests/install.pp": "11a9e9a99a7bc1c7b2511ce7e79c9fb4", + "spec/spec.opts": "a600ded995d948e393fbe2320ba8e51c", + "metadata.json": "d34d0b70aba36510fbc2df4e667479ef", + "manifests/init.pp": "c5166a8a88b544ded705efac21494bc1", + "Modulefile": "7f512991a7d2ad99ffb28ac6e7419f9e" + }, + "description": "Manage memcached via Puppet" +} +''' > ${TMP}/saz-memcached-1.0.0/metadata.json + +echo -e '''{ + "name": "saz-memcached", + "version": "8.1.0", + "author": "saz", + "summary": "Manage memcached via Puppet", + "license": "Apache-2.0", + "source": "git://github.com/saz/puppet-memcached.git", + "project_page": "https://github.com/saz/puppet-memcached", + "issues_url": "https://github.com/saz/puppet-memcached/issues", + "description": "Manage memcached via Puppet", + "requirements": [ + {"name":"puppet","version_requirement":">= 6.1.0 < 8.0.0" } + ], + "dependencies": [ + {"name":"puppetlabs/stdlib","version_requirement":">= 4.13.1 < 9.0.0"}, + {"name":"puppetlabs/firewall","version_requirement":">= 0.1.0 < 4.0.0"}, + {"name":"puppet/systemd","version_requirement":">= 2.10.0 < 4.0.0"}, + {"name":"puppet/selinux","version_requirement":">= 3.2.0 < 4.0.0"} + ], + "operatingsystem_support": [ + { + "operatingsystem": "RedHat", + "operatingsystemrelease": [ + "7", + "8", + "9" + ] + }, + { + "operatingsystem": "CentOS", + "operatingsystemrelease": [ + "7", + "8", + "9" + ] + }, + { + "operatingsystem": "OracleLinux", + "operatingsystemrelease": [ + "7" + ] + }, + { + "operatingsystem": "Scientific", + "operatingsystemrelease": [ + "7" + ] + }, + { + "operatingsystem": "Debian", + "operatingsystemrelease": [ + "9", + "10", + "11" + ] + }, + { + "operatingsystem": "Ubuntu", + "operatingsystemrelease": [ + "18.04", + "20.04", + "22.04" + ] + }, + { + "operatingsystem": "Windows" + }, + { + "operatingsystem": "FreeBSD" + } + ] +} +''' > ${TMP}/saz-memcached-8.1.0/metadata.json + +cd $TMP + +# Tar compress +tar -czf v3_files_saz-memcached-1.0.0.tar.gz saz-memcached-1.0.0 +tar -czf v3_files_saz-memcached-8.1.0.tar.gz saz-memcached-8.1.0 + +# Move .tar.gz archives to a servable directory +mv *.tar.gz ../../$BASE_PATH + +# Clean up removing tmp_dir +cd ../../ +rm -r tmp_dir/ diff --git a/swh/loader/package/puppet/tests/data/https_forgeapi.puppet.com/v3_files_saz-memcached-1.0.0.tar.gz b/swh/loader/package/puppet/tests/data/https_forgeapi.puppet.com/v3_files_saz-memcached-1.0.0.tar.gz new file mode 100644 index 0000000..993925b Binary files /dev/null and b/swh/loader/package/puppet/tests/data/https_forgeapi.puppet.com/v3_files_saz-memcached-1.0.0.tar.gz differ diff --git a/swh/loader/package/puppet/tests/data/https_forgeapi.puppet.com/v3_files_saz-memcached-8.1.0.tar.gz b/swh/loader/package/puppet/tests/data/https_forgeapi.puppet.com/v3_files_saz-memcached-8.1.0.tar.gz new file mode 100644 index 0000000..7b818e1 Binary files /dev/null and b/swh/loader/package/puppet/tests/data/https_forgeapi.puppet.com/v3_files_saz-memcached-8.1.0.tar.gz differ diff --git a/swh/loader/package/puppet/tests/test_puppet.py b/swh/loader/package/puppet/tests/test_puppet.py new file mode 100644 index 0000000..07cf0ac --- /dev/null +++ b/swh/loader/package/puppet/tests/test_puppet.py @@ -0,0 +1,125 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from swh.loader.package.puppet.loader import PuppetLoader +from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats +from swh.model.hashutil import hash_to_bytes +from swh.model.model import ( + ObjectType, + Person, + Release, + Snapshot, + SnapshotBranch, + TargetType, + TimestampWithTimezone, +) + +ORIGINS = { + "url": "https://forge.puppet.com/modules/saz/memcached", + "artifacts": { + "1.0.0": { + "url": "https://forgeapi.puppet.com/v3/files/saz-memcached-1.0.0.tar.gz", # noqa: B950 + "version": "1.0.0", + "filename": "saz-memcached-1.0.0.tar.gz", + "last_update": "2011-11-20T13:40:30-08:00", + "checksums": { + "length": 763, + }, + }, + "8.1.0": { + "url": "https://forgeapi.puppet.com/v3/files/saz-memcached-8.1.0.tar.gz", # noqa: B950 + "version": "8.1.0", + "filename": "saz-memcached-8.1.0.tar.gz", + "last_update": "2022-07-11T03:34:55-07:00", + "checksums": { + "md5": "5313e8fff0af08d63681daf955e7a604", + "sha256": "0dbb1470c64435700767e9887d0cf70203b1ae59445c401d5d200f2dabb3226e", # noqa: B950 + }, + }, + }, +} + + +def test_get_versions(requests_mock_datadir, swh_storage): + loader = PuppetLoader( + swh_storage, url=ORIGINS["url"], artifacts=ORIGINS["artifacts"] + ) + assert loader.get_versions() == ["1.0.0", "8.1.0"] + + +def test_get_default_version(requests_mock_datadir, swh_storage): + loader = PuppetLoader( + swh_storage, url=ORIGINS["url"], artifacts=ORIGINS["artifacts"] + ) + assert loader.get_default_version() == "8.1.0" + + +def test_puppet_loader_load_multiple_version( + datadir, requests_mock_datadir, swh_storage +): + loader = PuppetLoader( + swh_storage, url=ORIGINS["url"], artifacts=ORIGINS["artifacts"] + ) + load_status = loader.load() + assert load_status["status"] == "eventful" + assert load_status["snapshot_id"] is not None + + expected_snapshot_id = "9a8e76a8a6eae5285059d9f6d5083a99317727cf" + + assert expected_snapshot_id == load_status["snapshot_id"] + + expected_snapshot = Snapshot( + id=hash_to_bytes(load_status["snapshot_id"]), + branches={ + b"HEAD": SnapshotBranch( + target=b"releases/8.1.0", + target_type=TargetType.ALIAS, + ), + b"releases/1.0.0": SnapshotBranch( + target=hash_to_bytes("50eb560bb5322cd149359b9cc8debc78834bcfad"), + target_type=TargetType.RELEASE, + ), + b"releases/8.1.0": SnapshotBranch( + target=hash_to_bytes("2f5722136d775dd48fe85fabdd274f1e2d7fcf22"), + target_type=TargetType.RELEASE, + ), + }, + ) + + check_snapshot(expected_snapshot, swh_storage) + + stats = get_stats(swh_storage) + assert { + "content": 1 + 1, + "directory": 2 + 2, + "origin": 1, + "origin_visit": 1, + "release": 1 + 1, + "revision": 0, + "skipped_content": 0, + "snapshot": 1, + } == stats + + assert swh_storage.release_get( + [hash_to_bytes("2f5722136d775dd48fe85fabdd274f1e2d7fcf22")] + )[0] == Release( + name=b"8.1.0", + message=b"Synthetic release for Puppet source package saz-memcached version 8.1.0\n\n" + b"Manage memcached via Puppet\n", + target=hash_to_bytes("1b9a2dbc80f954e1ba4b2f1c6344d1ce4e84ab7c"), + target_type=ObjectType.DIRECTORY, + synthetic=True, + author=Person(fullname=b"saz", name=b"saz", email=None), + date=TimestampWithTimezone.from_iso8601("2022-07-11T03:34:55-07:00"), + id=hash_to_bytes("2f5722136d775dd48fe85fabdd274f1e2d7fcf22"), + ) + + assert_last_visit_matches( + swh_storage, + url=ORIGINS["url"], + status="full", + type="puppet", + snapshot=expected_snapshot.id, + ) diff --git a/swh/loader/package/puppet/tests/test_tasks.py b/swh/loader/package/puppet/tests/test_tasks.py new file mode 100644 index 0000000..e3a0d5a --- /dev/null +++ b/swh/loader/package/puppet/tests/test_tasks.py @@ -0,0 +1,50 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import uuid + +import pytest + +from swh.scheduler.model import ListedOrigin, Lister + +NAMESPACE = "swh.loader.package.puppet" + + +@pytest.fixture +def puppet_lister(): + return Lister(name="puppet", instance_name="example", id=uuid.uuid4()) + + +@pytest.fixture +def puppet_listed_origin(puppet_lister): + return ListedOrigin( + lister_id=puppet_lister.id, + url="some-url/api/packages/some-package", + visit_type="aur", + extra_loader_arguments={ + "artifacts": { + "1.0.0": { + "url": "https://domain/some-package-1.0.0.tar.gz", + "version": "1.0.0", + "filename": "some-module-1.0.0.tar.gz", + "last_update": "2011-11-20T13:40:30-08:00", + }, + } + }, + ) + + +def test_puppet_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, + puppet_lister, + puppet_listed_origin, +): + + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.PuppetLoader", + task_function_name=f"{NAMESPACE}.tasks.LoadPuppet", + lister=puppet_lister, + listed_origin=puppet_listed_origin, + ) diff --git a/swh/loader/package/pypi/loader.py b/swh/loader/package/pypi/loader.py index fe814f7..19e26e0 100644 --- a/swh/loader/package/pypi/loader.py +++ b/swh/loader/package/pypi/loader.py @@ -1,248 +1,251 @@ -# Copyright (C) 2019-2021 The Software Heritage developers +# Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information +from __future__ import annotations + import json import logging import os from typing import Any, Dict, Iterator, Optional, Sequence, Tuple from urllib.parse import urlparse import attr from pkginfo import UnpackedSDist from swh.loader.package.loader import ( BasePackageInfo, PackageLoader, PartialExtID, RawExtrinsicMetadataCore, ) from swh.loader.package.utils import ( EMPTY_AUTHOR, cached_method, get_url_body, release_name, ) from swh.model.hashutil import hash_to_bytes from swh.model.model import ( MetadataAuthority, MetadataAuthorityType, ObjectType, Person, Release, Sha1Git, TimestampWithTimezone, ) from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) EXTID_TYPE = "pypi-archive-sha256" EXTID_VERSION = 0 @attr.s class PyPIPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) name = attr.ib(type=str) comment_text = attr.ib(type=Optional[str]) sha256 = attr.ib(type=str) upload_time = attr.ib(type=str) @classmethod def from_metadata( cls, metadata: Dict[str, Any], name: str, version: str - ) -> "PyPIPackageInfo": + ) -> PyPIPackageInfo: return cls( url=metadata["url"], filename=metadata["filename"], version=version, raw_info=metadata, name=name, comment_text=metadata.get("comment_text"), sha256=metadata["digests"]["sha256"], upload_time=metadata["upload_time"], directory_extrinsic_metadata=[ RawExtrinsicMetadataCore( format="pypi-project-json", metadata=json.dumps(metadata).encode(), ) ], + checksums={"sha256": metadata["digests"]["sha256"]}, ) def extid(self) -> PartialExtID: return (EXTID_TYPE, EXTID_VERSION, hash_to_bytes(self.sha256)) class PyPILoader(PackageLoader[PyPIPackageInfo]): """Load pypi origin's artifact releases into swh archive.""" visit_type = "pypi" def __init__(self, storage: StorageInterface, url: str, **kwargs): super().__init__(storage=storage, url=url, **kwargs) self.provider_url = pypi_api_url(self.origin.url) @cached_method def _raw_info(self) -> bytes: return get_url_body(self.provider_url) @cached_method def info(self) -> Dict: """Return the project metadata information (fetched from pypi registry)""" return json.loads(self._raw_info()) def get_versions(self) -> Sequence[str]: return self.info()["releases"].keys() def get_default_version(self) -> str: return self.info()["info"]["version"] def get_metadata_authority(self): p_url = urlparse(self.origin.url) return MetadataAuthority( type=MetadataAuthorityType.FORGE, url=f"{p_url.scheme}://{p_url.netloc}/", metadata={}, ) def get_package_info(self, version: str) -> Iterator[Tuple[str, PyPIPackageInfo]]: res = [] for meta in self.info()["releases"][version]: # process only standard sdist archives if meta["packagetype"] != "sdist" or meta["filename"].lower().endswith( (".deb", ".egg", ".rpm", ".whl") ): continue p_info = PyPIPackageInfo.from_metadata( meta, name=self.info()["info"]["name"], version=version ) res.append((version, p_info)) if len(res) == 1: version, p_info = res[0] yield release_name(version), p_info else: for version, p_info in res: yield release_name(version, p_info.filename), p_info def build_release( self, p_info: PyPIPackageInfo, uncompressed_path: str, directory: Sha1Git ) -> Optional[Release]: i_metadata = extract_intrinsic_metadata(uncompressed_path) if not i_metadata: return None # from intrinsic metadata version_ = i_metadata.get("version", p_info.version) author_ = author(i_metadata) if p_info.comment_text: msg = p_info.comment_text else: msg = ( f"Synthetic release for PyPI source package {p_info.name} " f"version {version_}\n" ) date = TimestampWithTimezone.from_iso8601(p_info.upload_time) return Release( name=p_info.version.encode(), message=msg.encode(), author=author_, date=date, target=directory, target_type=ObjectType.DIRECTORY, synthetic=True, ) def pypi_api_url(url: str) -> str: """Compute api url from a project url Args: url (str): PyPI instance's url (e.g: https://pypi.org/project/requests) This deals with correctly transforming the project's api url (e.g https://pypi.org/pypi/requests/json) Returns: api url """ p_url = urlparse(url) project_name = p_url.path.rstrip("/").split("/")[-1] url = "%s://%s/pypi/%s/json" % (p_url.scheme, p_url.netloc, project_name) return url def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] pkginfo_path = os.path.join(dir_path, project_dirname, "PKG-INFO") if not os.path.exists(pkginfo_path): return {} pkginfo = UnpackedSDist(pkginfo_path) raw = pkginfo.__dict__ raw.pop("filename") # this gets added with the ondisk location return raw def author(data: Dict) -> Person: """Given a dict of project/release artifact information (coming from PyPI), returns an author subset. Args: data (dict): Representing either artifact information or release information. Returns: swh-model dict representing a person. """ name = data.get("author") email = data.get("author_email") fullname = None # type: Optional[str] if email: fullname = "%s <%s>" % (name, email) else: fullname = name if not fullname: return EMPTY_AUTHOR if name is not None: name = name.encode("utf-8") if email is not None: email = email.encode("utf-8") return Person(fullname=fullname.encode("utf-8"), name=name, email=email) diff --git a/swh/loader/package/pypi/tests/test_tasks.py b/swh/loader/package/pypi/tests/test_tasks.py index c294762..26f7abf 100644 --- a/swh/loader/package/pypi/tests/test_tasks.py +++ b/swh/loader/package/pypi/tests/test_tasks.py @@ -1,69 +1,40 @@ # Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import uuid import pytest from swh.scheduler.model import ListedOrigin, Lister -from swh.scheduler.utils import create_origin_task_dict - -@pytest.fixture(autouse=True) -def celery_worker_and_swh_config(swh_scheduler_celery_worker, swh_config): - pass +NAMESPACE = "swh.loader.package.pypi" @pytest.fixture def pypi_lister(): - return Lister(name="pypi-lister", instance_name="example", id=uuid.uuid4()) + return Lister(name="pypi", instance_name="example", id=uuid.uuid4()) @pytest.fixture def pypi_listed_origin(pypi_lister): return ListedOrigin( lister_id=pypi_lister.id, url="https://pypi.example.org/package", visit_type="pypi", ) -def test_tasks_pypi_loader( - mocker, - swh_scheduler_celery_app, -): - mock_load = mocker.patch("swh.loader.package.pypi.loader.PyPILoader.load") - mock_load.return_value = {"status": "eventful"} - - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.pypi.tasks.LoadPyPI", kwargs=dict(url="some-url") - ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} - - -def test_tasks_pypi_loader_for_listed_origin( - mocker, - swh_scheduler_celery_app, +def test_pypi_loader_task_for_listed_origin( + loading_task_creation_for_listed_origin_test, pypi_lister, pypi_listed_origin, ): - mock_load = mocker.patch("swh.loader.package.pypi.loader.PyPILoader.load") - mock_load.return_value = {"status": "eventful"} - - task_dict = create_origin_task_dict(pypi_listed_origin, pypi_lister) - res = swh_scheduler_celery_app.send_task( - "swh.loader.package.pypi.tasks.LoadPyPI", - kwargs=task_dict["arguments"]["kwargs"], + loading_task_creation_for_listed_origin_test( + loader_class_name=f"{NAMESPACE}.loader.PyPILoader", + task_function_name=f"{NAMESPACE}.tasks.LoadPyPI", + lister=pypi_lister, + listed_origin=pypi_listed_origin, ) - assert res - res.wait() - assert res.successful() - assert mock_load.called - assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/utils.py b/swh/loader/package/utils.py index adf882b..042702b 100644 --- a/swh/loader/package/utils.py +++ b/swh/loader/package/utils.py @@ -1,213 +1,213 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy import functools import itertools import logging import os import re from typing import Callable, Dict, Optional, Tuple, TypeVar from urllib.parse import unquote, urlsplit from urllib.request import urlopen import requests from requests.exceptions import HTTPError from tenacity import retry from tenacity.before_sleep import before_sleep_log from tenacity.stop import stop_after_attempt from tenacity.wait import wait_exponential from swh.loader.exception import NotFound from swh.loader.package import DEFAULT_PARAMS from swh.model.hashutil import HASH_BLOCK_SIZE, MultiHash from swh.model.model import Person logger = logging.getLogger(__name__) DOWNLOAD_HASHES = set(["sha1", "sha256", "length"]) EMPTY_AUTHOR = Person.from_fullname(b"") def _content_disposition_filename(header: str) -> Optional[str]: fname = None fnames = re.findall(r"filename[\*]?=([^;]+)", header) if fnames and "utf-8''" in fnames[0].lower(): # RFC 5987 fname = re.sub("utf-8''", "", fnames[0], flags=re.IGNORECASE) fname = unquote(fname) elif fnames: fname = fnames[0] if fname: fname = os.path.basename(fname.strip().strip('"')) return fname def _retry_if_throttling(retry_state) -> bool: """Custom tenacity retry predicate for handling HTTP responses with status code 429 (too many requests). """ attempt = retry_state.outcome if attempt.failed: exception = attempt.exception() return ( isinstance(exception, HTTPError) and exception.response.status_code == 429 ) return False throttling_retry = retry( retry=_retry_if_throttling, wait=wait_exponential(exp_base=10), stop=stop_after_attempt(max_attempt_number=5), before_sleep=before_sleep_log(logger, logging.WARNING), reraise=True, ) @throttling_retry def download( url: str, dest: str, hashes: Dict = {}, filename: Optional[str] = None, auth: Optional[Tuple[str, str]] = None, extra_request_headers: Optional[Dict[str, str]] = None, ) -> Tuple[str, Dict]: - """Download a remote tarball from url, uncompresses and computes swh hashes - on it. + """Download a remote file from url, and compute swh hashes on it. Args: - url: Artifact uri to fetch, uncompress and hash + url: Artifact uri to fetch and hash dest: Directory to write the archive to hashes: Dict of expected hashes (key is the hash algo) for the artifact - to download (those hashes are expected to be hex string) + to download (those hashes are expected to be hex string). The supported + algorithms are defined in the :data:`swh.model.hashutil.ALGORITHMS` set. auth: Optional tuple of login/password (for http authentication service, e.g. deposit) Raises: ValueError in case of any error when fetching/computing (length, checksums mismatched...) Returns: Tuple of local (filepath, hashes of filepath) """ params = copy.deepcopy(DEFAULT_PARAMS) if auth is not None: params["auth"] = auth if extra_request_headers is not None: params["headers"].update(extra_request_headers) # so the connection does not hang indefinitely (read/connection timeout) timeout = params.get("timeout", 60) if url.startswith("ftp://"): response = urlopen(url, timeout=timeout) chunks = (response.read(HASH_BLOCK_SIZE) for _ in itertools.count()) response_data = itertools.takewhile(bool, chunks) else: response = requests.get(url, **params, timeout=timeout, stream=True) response.raise_for_status() # update URL to response one as requests follow redirection by default # on GET requests url = response.url # try to extract filename from content-disposition header if available if filename is None and "content-disposition" in response.headers: filename = _content_disposition_filename( response.headers["content-disposition"] ) response_data = response.iter_content(chunk_size=HASH_BLOCK_SIZE) filename = filename if filename else os.path.basename(urlsplit(url).path) logger.debug("filename: %s", filename) filepath = os.path.join(dest, filename) logger.debug("filepath: %s", filepath) h = MultiHash(hash_names=DOWNLOAD_HASHES | set(hashes.keys())) with open(filepath, "wb") as f: for chunk in response_data: h.update(chunk) f.write(chunk) response.close() # Also check the expected hashes if provided if hashes: actual_hashes = h.hexdigest() for algo_hash in hashes.keys(): actual_digest = actual_hashes[algo_hash] expected_digest = hashes[algo_hash] if actual_digest != expected_digest: raise ValueError( "Failure when fetching %s. " "Checksum mismatched: %s != %s" % (url, expected_digest, actual_digest) ) computed_hashes = h.hexdigest() length = computed_hashes.pop("length") extrinsic_metadata = { "length": length, "filename": filename, "checksums": computed_hashes, "url": url, } logger.debug("extrinsic_metadata", extrinsic_metadata) return filepath, extrinsic_metadata @throttling_retry def get_url_body(url: str, **extra_params) -> bytes: """Basic HTTP client to retrieve information on software package, typically JSON metadata from a REST API. Args: url (str): An HTTP URL Raises: NotFound in case of query failures (for some reasons: 404, ...) Returns: The associated response's information """ logger.debug("Fetching %s", url) response = requests.get(url, **{**DEFAULT_PARAMS, **extra_params}) if response.status_code == 404: raise NotFound(f"Fail to query '{url}'. Reason: {response.status_code}") response.raise_for_status() return response.content def release_name(version: str, filename: Optional[str] = None) -> str: if filename: return "releases/%s/%s" % (version, filename) return "releases/%s" % version TReturn = TypeVar("TReturn") TSelf = TypeVar("TSelf") _UNDEFINED = object() def cached_method(f: Callable[[TSelf], TReturn]) -> Callable[[TSelf], TReturn]: cache_name = f"_cached_{f.__name__}" @functools.wraps(f) def newf(self): value = getattr(self, cache_name, _UNDEFINED) if value is _UNDEFINED: value = f(self) setattr(self, cache_name, value) return value return newf diff --git a/swh/loader/pytest_plugin.py b/swh/loader/pytest_plugin.py index e501f02..57178b1 100644 --- a/swh/loader/pytest_plugin.py +++ b/swh/loader/pytest_plugin.py @@ -1,54 +1,88 @@ -# Copyright (C) 2019-2021 The Software Heritage developers +# Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from typing import Any, Dict import pytest import yaml +from swh.scheduler.model import ListedOrigin, Lister +from swh.scheduler.utils import create_origin_task_dict + @pytest.fixture def swh_storage_backend_config(swh_storage_postgresql) -> Dict[str, Any]: return { "cls": "retry", "storage": { "cls": "filter", "storage": { "cls": "buffer", "storage": { "cls": "postgresql", "db": swh_storage_postgresql.dsn, "objstorage": {"cls": "memory"}, }, }, }, } @pytest.fixture def swh_loader_config(swh_storage_backend_config) -> Dict[str, Any]: return { "storage": swh_storage_backend_config, } @pytest.fixture def swh_config(swh_loader_config, monkeypatch, tmp_path) -> str: conffile = os.path.join(str(tmp_path), "loader.yml") with open(conffile, "w") as f: f.write(yaml.dump(swh_loader_config)) monkeypatch.setenv("SWH_CONFIG_FILENAME", conffile) return conffile @pytest.fixture(autouse=True, scope="session") def swh_proxy(): """Automatically inject this fixture in all tests to ensure no outside connection takes place. """ os.environ["http_proxy"] = "http://localhost:999" os.environ["https_proxy"] = "http://localhost:999" + + +@pytest.fixture +def loading_task_creation_for_listed_origin_test( + mocker, + swh_scheduler_celery_app, + swh_scheduler_celery_worker, + swh_config, +): + def test_implementation( + loader_class_name: str, + task_function_name: str, + lister: Lister, + listed_origin: ListedOrigin, + ): + mock_load = mocker.patch(f"{loader_class_name}.load") + mock_load.return_value = {"status": "eventful"} + + task_dict = create_origin_task_dict(listed_origin, lister) + + res = swh_scheduler_celery_app.send_task( + task_function_name, + kwargs=task_dict["arguments"]["kwargs"], + ) + assert res + res.wait() + assert res.successful() + assert mock_load.called + assert res.result == {"status": "eventful"} + + return test_implementation diff --git a/swh/loader/tests/conftest.py b/swh/loader/tests/conftest.py index 0494f2b..33b9eea 100644 --- a/swh/loader/tests/conftest.py +++ b/swh/loader/tests/conftest.py @@ -1,24 +1,24 @@ -# Copyright (C) 2019-2021 The Software Heritage developers +# Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Dict import pytest @pytest.fixture def swh_loader_config() -> Dict[str, Any]: return { "storage": { "cls": "memory", }, "deposit": { "url": "https://deposit.softwareheritage.org/1/private", "auth": { "username": "user", "password": "pass", }, }, } diff --git a/swh/loader/tests/test_cli.py b/swh/loader/tests/test_cli.py index 7d762da..324684e 100644 --- a/swh/loader/tests/test_cli.py +++ b/swh/loader/tests/test_cli.py @@ -1,157 +1,133 @@ -# Copyright (C) 2019-2021 The Software Heritage developers +# Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime -import os from click.formatting import HelpFormatter from click.testing import CliRunner import pytest -import yaml from swh.loader.cli import SUPPORTED_LOADERS, get_loader from swh.loader.cli import loader as loader_cli from swh.loader.package.loader import PackageLoader def test_get_loader_wrong_input(swh_config): """Unsupported loader should raise""" loader_type = "unknown" assert loader_type not in SUPPORTED_LOADERS with pytest.raises(ValueError, match="Invalid loader"): get_loader(loader_type, url="db-url") def test_get_loader(swh_loader_config): """Instantiating a supported loader should be ok""" loader_input = { "archive": {"url": "some-url", "artifacts": []}, "debian": { "url": "some-url", "packages": [], }, "npm": { "url": "https://www.npmjs.com/package/onepackage", }, "pypi": { "url": "some-url", }, } for loader_type, kwargs in loader_input.items(): kwargs["storage"] = swh_loader_config["storage"] loader = get_loader(loader_type, **kwargs) assert isinstance(loader, PackageLoader) def _write_usage(command, args, max_width=80): hf = HelpFormatter(width=max_width) hf.write_usage(command, args) return hf.getvalue()[:-1] def test_run_help(swh_config): """Usage message should contain list of available loaders""" runner = CliRunner() result = runner.invoke(loader_cli, ["run", "-h"]) assert result.exit_code == 0 # Syntax depends on dependencies' versions supported_loaders = "|".join(SUPPORTED_LOADERS) usage_prefix = _write_usage("loader run", "[OPTIONS] [%s]\n" % supported_loaders) usage_prefix2 = _write_usage("loader run", "[OPTIONS] {%s}\n" % supported_loaders) assert result.output.startswith((usage_prefix, usage_prefix2)) -def test_run_with_configuration_failure(tmp_path): - """Triggering a load should fail since configuration is incomplete""" - runner = CliRunner() - - conf_path = os.path.join(str(tmp_path), "cli.yml") - with open(conf_path, "w") as f: - f.write(yaml.dump({})) - - with pytest.raises(ValueError, match="Missing storage"): - runner.invoke( - loader_cli, - [ - "-C", - conf_path, - "run", - "pypi", - "url=https://some-url", - ], - catch_exceptions=False, - ) - - def test_run_pypi(mocker, swh_config): """Triggering a load should be ok""" mock_loader = mocker.patch("swh.loader.package.pypi.loader.PyPILoader.load") runner = CliRunner() result = runner.invoke( loader_cli, [ "-C", swh_config, "run", "pypi", "url=https://some-url", ], ) assert result.exit_code == 0 mock_loader.assert_called_once_with() def test_run_with_visit_date(mocker, swh_config): """iso visit_date parameter should be parsed as datetime""" mock_loader = mocker.patch("swh.loader.cli.get_loader") runner = CliRunner() input_date = "2016-05-03 15:16:32+00" result = runner.invoke( loader_cli, ["run", "npm", "https://some-url", f"visit_date='{input_date}'"] ) assert result.exit_code == 0 expected_parsed_date = datetime.datetime( 2016, 5, 3, 15, 16, 32, tzinfo=datetime.timezone.utc ) mock_loader.assert_called_once_with( "npm", storage={"cls": "memory"}, url="https://some-url", visit_date=expected_parsed_date, metadata_fetcher_credentials=None, ) def test_list_help(mocker, swh_config): """Usage message should contain list of available loaders""" runner = CliRunner() result = runner.invoke(loader_cli, ["list", "--help"]) assert result.exit_code == 0 usage_prefix = _write_usage( "loader list", f"[OPTIONS] [[{'|'.join(['all'] + SUPPORTED_LOADERS)}]]" ) expected_help_msg = f"""{usage_prefix} List supported loaders and optionally their arguments Options: -h, --help Show this message and exit. """ assert result.output.startswith(expected_help_msg) def test_list_help_npm(mocker, swh_config): """Triggering a load should be ok""" runner = CliRunner() result = runner.invoke(loader_cli, ["list", "npm"]) assert result.exit_code == 0 expected_help_msg = """ Loader: Load npm origin's artifact releases into swh archive. """ assert result.output.startswith(expected_help_msg[1:])