diff --git a/PKG-INFO b/PKG-INFO index bab4b3e..72c81fd 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,56 +1,56 @@ Metadata-Version: 2.1 Name: swh.loader.core -Version: 1.2.1 +Version: 1.3.0 Summary: Software Heritage Base Loader Home-page: https://forge.softwareheritage.org/diffusion/DLDBASE Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-core Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-core/ Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS Software Heritage - Loader foundations ====================================== The Software Heritage Loader Core is a low-level loading utilities and helpers used by :term:`loaders `. The main entry points are classes: - :class:`swh.loader.core.loader.BaseLoader` for loaders (e.g. svn) - :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. hg, git, ...) - :class:`swh.loader.package.loader.PackageLoader` for Package loaders (e.g. PyPI, Npm, ...) Package loaders --------------- This package also implements many package loaders directly, out of convenience, as they usually are quite similar and each fits in a single file. They all roughly follow these steps, explained in the :py:meth:`swh.loader.package.loader.PackageLoader.load` documentation. See the :ref:`package-loader-tutorial` for details. VCS loaders ----------- Unlike package loaders, VCS loaders remain in separate packages, as they often need more advanced conversions and very VCS-specific operations. This usually involves getting the branches of a repository and recursively loading revisions in the history (and directory trees in these revisions), until a known revision is found diff --git a/conftest.py b/conftest.py index 2d4f2f7..b4a6d0a 100644 --- a/conftest.py +++ b/conftest.py @@ -1,25 +1,26 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest pytest_plugins = [ "swh.scheduler.pytest_plugin", "swh.storage.pytest_plugin", "swh.loader.pytest_plugin", ] @pytest.fixture(scope="session") def swh_scheduler_celery_includes(swh_scheduler_celery_includes): return swh_scheduler_celery_includes + [ "swh.loader.package.archive.tasks", "swh.loader.package.cran.tasks", "swh.loader.package.debian.tasks", "swh.loader.package.deposit.tasks", "swh.loader.package.npm.tasks", "swh.loader.package.pypi.tasks", "swh.loader.package.nixguix.tasks", + "swh.loader.package.maven.tasks", ] diff --git a/docs/package-loader-specifications.rst b/docs/package-loader-specifications.rst index 9609a8b..aceed29 100644 --- a/docs/package-loader-specifications.rst +++ b/docs/package-loader-specifications.rst @@ -1,112 +1,121 @@ .. _package-loader-specifications: Package loader specifications ============================= Release fields -------------- Here is an overview of the fields (+ internal version name + branch name) used by each package loader, after D6616: .. list-table:: Fields used by each package loader :header-rows: 1 * - Loader - internal version - branch name - name - message - synthetic - author - date - Notes * - archive - passed as arg - ``release_name(​version)`` - =version - "Synthetic release for archive at {p_info.url}\n" - true - "" - passed as arg - * - cran - ``metadata.get(​"Version", passed as arg)`` - ``release_name(​version)`` - =version - standard message - true - ``metadata.get(​"Maintainer", "")`` - ``metadata.get(​"Date")`` - metadata is intrinsic * - debian - passed as arg (eg. ``stretch/contrib/0.7.2-3``) - ``release_name(​version)`` - =version - standard message (using full version) - true - ``metadata​.changelog​.person`` - ``metadata​.changelog​.date`` - metadata is intrinsic. Old revisions have ``dsc`` as type * - deposit - HEAD - only HEAD - HEAD - "{client}: Deposit {id} in collection {collection}\n" - true - original author - ```` from SWORD XML - revisions had parents + * - maven-loader + - passed as arg + - HEAD + - ``release_name(version)`` + - "Synthetic release for archive at {p_info.url}\n" + - true + - "" + - passed as arg + - Only one artefact per url (jar/zip src) * - nixguix - URL - URL - URL - None - true - "" - None - it's the URL of the artifact referenced by the derivation * - npm - ``metadata​["version"]`` - ``release_name(​version)`` - =version - standard message - true - from int metadata or "" - from ext metadata or None - * - opam - as given by opam - "{opam_package}​.{version}" - =version - standard message - true - from metadata - None - "{self.opam_package}​.{version}" matches the version names used by opam's backend. metadata is extrinsic * - pypi - ``metadata​["version"]`` - ``release_name(​version)`` or ``release_name(​version, filename)`` - =version - ``metadata[​'comment_text']}`` or standard message - true - from int metadata or "" - from ext metadata or None - metadata is intrinsic using this function:: def release_name(version: str, filename: Optional[str] = None) -> str: if filename: return "releases/%s/%s" % (version, filename) return "releases/%s" % version and "standard message" being:: msg = ( f"Synthetic release for {PACKAGE_MANAGER} source package {name} " f"version {version}\n" ) The ``target_type`` field is always ``dir``, and the target the id of a directory loaded by unpacking a tarball/zip file/... diff --git a/requirements-swh.txt b/requirements-swh.txt index 5607b93..40cb412 100644 --- a/requirements-swh.txt +++ b/requirements-swh.txt @@ -1,5 +1,5 @@ swh.core >= 0.3 -swh.model >= 1.0.0 +swh.model >= 3.1.0 swh.objstorage >= 0.2.2 swh.scheduler >= 0.4.0 swh.storage >= 0.29.0 diff --git a/setup.py b/setup.py index cebead9..81f0481 100755 --- a/setup.py +++ b/setup.py @@ -1,80 +1,81 @@ #!/usr/bin/env python3 # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from io import open from os import path from setuptools import find_packages, setup here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, "README.rst"), encoding="utf-8") as f: long_description = f.read() def parse_requirements(name=None): if name: reqf = "requirements-%s.txt" % name else: reqf = "requirements.txt" requirements = [] if not path.exists(reqf): return requirements with open(reqf) as f: for line in f.readlines(): line = line.strip() if not line or line.startswith("#"): continue requirements.append(line) return requirements setup( name="swh.loader.core", description="Software Heritage Base Loader", long_description=long_description, long_description_content_type="text/markdown", python_requires=">=3.7", author="Software Heritage developers", author_email="swh-devel@inria.fr", url="https://forge.softwareheritage.org/diffusion/DLDBASE", packages=find_packages(), # packages's modules scripts=[], # scripts to package install_requires=parse_requirements() + parse_requirements("swh"), setup_requires=["setuptools-scm"], use_scm_version=True, extras_require={"testing": parse_requirements("test")}, include_package_data=True, entry_points=""" [swh.cli.subcommands] loader=swh.loader.cli [swh.workers] loader.archive=swh.loader.package.archive:register loader.cran=swh.loader.package.cran:register loader.debian=swh.loader.package.debian:register loader.deposit=swh.loader.package.deposit:register loader.nixguix=swh.loader.package.nixguix:register loader.npm=swh.loader.package.npm:register loader.opam=swh.loader.package.opam:register loader.pypi=swh.loader.package.pypi:register + loader.maven=swh.loader.package.maven:register """, classifiers=[ "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", "Operating System :: OS Independent", "Development Status :: 5 - Production/Stable", ], project_urls={ "Bug Reports": "https://forge.softwareheritage.org/maniphest", "Funding": "https://www.softwareheritage.org/donate", "Source": "https://forge.softwareheritage.org/source/swh-loader-core", "Documentation": "https://docs.softwareheritage.org/devel/swh-loader-core/", }, ) diff --git a/swh.loader.core.egg-info/PKG-INFO b/swh.loader.core.egg-info/PKG-INFO index bab4b3e..72c81fd 100644 --- a/swh.loader.core.egg-info/PKG-INFO +++ b/swh.loader.core.egg-info/PKG-INFO @@ -1,56 +1,56 @@ Metadata-Version: 2.1 Name: swh.loader.core -Version: 1.2.1 +Version: 1.3.0 Summary: Software Heritage Base Loader Home-page: https://forge.softwareheritage.org/diffusion/DLDBASE Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-core Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-core/ Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS Software Heritage - Loader foundations ====================================== The Software Heritage Loader Core is a low-level loading utilities and helpers used by :term:`loaders `. The main entry points are classes: - :class:`swh.loader.core.loader.BaseLoader` for loaders (e.g. svn) - :class:`swh.loader.core.loader.DVCSLoader` for DVCS loaders (e.g. hg, git, ...) - :class:`swh.loader.package.loader.PackageLoader` for Package loaders (e.g. PyPI, Npm, ...) Package loaders --------------- This package also implements many package loaders directly, out of convenience, as they usually are quite similar and each fits in a single file. They all roughly follow these steps, explained in the :py:meth:`swh.loader.package.loader.PackageLoader.load` documentation. See the :ref:`package-loader-tutorial` for details. VCS loaders ----------- Unlike package loaders, VCS loaders remain in separate packages, as they often need more advanced conversions and very VCS-specific operations. This usually involves getting the branches of a repository and recursively loading revisions in the history (and directory trees in these revisions), until a known revision is found diff --git a/swh.loader.core.egg-info/SOURCES.txt b/swh.loader.core.egg-info/SOURCES.txt index 5a39aed..a04fb5c 100644 --- a/swh.loader.core.egg-info/SOURCES.txt +++ b/swh.loader.core.egg-info/SOURCES.txt @@ -1,198 +1,208 @@ .gitignore .pre-commit-config.yaml AUTHORS CODE_OF_CONDUCT.md CONTRIBUTORS LICENSE MANIFEST.in Makefile README.rst conftest.py mypy.ini pyproject.toml pytest.ini requirements-swh.txt requirements-test.txt requirements.txt setup.cfg setup.py tox.ini docs/.gitignore docs/Makefile docs/README.rst docs/cli.rst docs/conf.py docs/index.rst docs/package-loader-specifications.rst docs/package-loader-tutorial.rst docs/vcs-loader-overview.rst docs/_static/.placeholder docs/_templates/.placeholder swh/__init__.py swh.loader.core.egg-info/PKG-INFO swh.loader.core.egg-info/SOURCES.txt swh.loader.core.egg-info/dependency_links.txt swh.loader.core.egg-info/entry_points.txt swh.loader.core.egg-info/requires.txt swh.loader.core.egg-info/top_level.txt swh/loader/__init__.py swh/loader/cli.py swh/loader/exception.py swh/loader/pytest_plugin.py swh/loader/core/__init__.py swh/loader/core/converters.py swh/loader/core/loader.py swh/loader/core/py.typed swh/loader/core/utils.py swh/loader/core/tests/__init__.py swh/loader/core/tests/test_converters.py swh/loader/core/tests/test_loader.py swh/loader/core/tests/test_utils.py swh/loader/package/__init__.py swh/loader/package/loader.py swh/loader/package/utils.py swh/loader/package/archive/__init__.py swh/loader/package/archive/loader.py swh/loader/package/archive/tasks.py swh/loader/package/archive/tests/__init__.py swh/loader/package/archive/tests/test_archive.py swh/loader/package/archive/tests/test_tasks.py swh/loader/package/archive/tests/data/not_gzipped_tarball.tar.gz swh/loader/package/archive/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz swh/loader/package/archive/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz_visit1 swh/loader/package/archive/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz_visit2 swh/loader/package/archive/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.2.0.tar.gz swh/loader/package/cran/__init__.py swh/loader/package/cran/loader.py swh/loader/package/cran/tasks.py swh/loader/package/cran/tests/__init__.py swh/loader/package/cran/tests/test_cran.py swh/loader/package/cran/tests/test_tasks.py swh/loader/package/cran/tests/data/description/KnownBR swh/loader/package/cran/tests/data/description/acepack swh/loader/package/cran/tests/data/https_cran.r-project.org/src_contrib_1.4.0_Recommended_KernSmooth_2.22-6.tar.gz swh/loader/package/debian/__init__.py swh/loader/package/debian/loader.py swh/loader/package/debian/tasks.py swh/loader/package/debian/tests/__init__.py swh/loader/package/debian/tests/test_debian.py swh/loader/package/debian/tests/test_tasks.py swh/loader/package/debian/tests/data/http_deb.debian.org/debian_pool_contrib_c_cicero_cicero_0.7.2-3.diff.gz swh/loader/package/debian/tests/data/http_deb.debian.org/debian_pool_contrib_c_cicero_cicero_0.7.2-3.dsc swh/loader/package/debian/tests/data/http_deb.debian.org/debian_pool_contrib_c_cicero_cicero_0.7.2-4.diff.gz swh/loader/package/debian/tests/data/http_deb.debian.org/debian_pool_contrib_c_cicero_cicero_0.7.2-4.dsc swh/loader/package/debian/tests/data/http_deb.debian.org/debian_pool_contrib_c_cicero_cicero_0.7.2.orig.tar.gz swh/loader/package/debian/tests/data/http_deb.debian.org/onefile.txt swh/loader/package/deposit/__init__.py swh/loader/package/deposit/loader.py swh/loader/package/deposit/tasks.py swh/loader/package/deposit/tests/__init__.py swh/loader/package/deposit/tests/conftest.py swh/loader/package/deposit/tests/test_deposit.py swh/loader/package/deposit/tests/test_tasks.py swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_666_meta swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_666_raw swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_777_meta swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_777_raw swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_888_meta swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/1_private_888_raw swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello-2.10.zip swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello-2.12.tar.gz swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello_2.10.json swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello_2.11.json swh/loader/package/deposit/tests/data/https_deposit.softwareheritage.org/hello_2.12.json +swh/loader/package/maven/__init__.py +swh/loader/package/maven/loader.py +swh/loader/package/maven/tasks.py +swh/loader/package/maven/tests/__init__.py +swh/loader/package/maven/tests/test_maven.py +swh/loader/package/maven/tests/test_tasks.py +swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar +swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0.pom +swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar +swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1.pom swh/loader/package/nixguix/__init__.py swh/loader/package/nixguix/loader.py swh/loader/package/nixguix/tasks.py swh/loader/package/nixguix/tests/__init__.py swh/loader/package/nixguix/tests/conftest.py swh/loader/package/nixguix/tests/test_nixguix.py swh/loader/package/nixguix/tests/test_tasks.py swh/loader/package/nixguix/tests/data/https_example.com/file.txt swh/loader/package/nixguix/tests/data/https_fail.com/truncated-archive.tgz swh/loader/package/nixguix/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz swh/loader/package/nixguix/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz_visit1 swh/loader/package/nixguix/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.1.0.tar.gz_visit2 swh/loader/package/nixguix/tests/data/https_ftp.gnu.org/gnu_8sync_8sync-0.2.0.tar.gz swh/loader/package/nixguix/tests/data/https_github.com/owner-1_repository-1_revision-1.tgz swh/loader/package/nixguix/tests/data/https_github.com/owner-2_repository-1_revision-1.tgz swh/loader/package/nixguix/tests/data/https_nix-community.github.io/nixpkgs-swh_sources-EOFError.json swh/loader/package/nixguix/tests/data/https_nix-community.github.io/nixpkgs-swh_sources.json swh/loader/package/nixguix/tests/data/https_nix-community.github.io/nixpkgs-swh_sources.json_visit1 swh/loader/package/nixguix/tests/data/https_nix-community.github.io/nixpkgs-swh_sources_special.json swh/loader/package/nixguix/tests/data/https_nix-community.github.io/nixpkgs-swh_sources_special.json_visit1 swh/loader/package/npm/__init__.py swh/loader/package/npm/loader.py swh/loader/package/npm/tasks.py swh/loader/package/npm/tests/__init__.py swh/loader/package/npm/tests/test_npm.py swh/loader/package/npm/tests/test_tasks.py swh/loader/package/npm/tests/data/https_registry.npmjs.org/@aller_shared_-_shared-0.1.0.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/@aller_shared_-_shared-0.1.1-alpha.14.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/jammit-express_-_jammit-express-0.0.1.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/nativescript-telerik-analytics_-_nativescript-telerik-analytics-1.0.0.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.0.2.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.0.3.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.0.4.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.0.5.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.1.0.tgz swh/loader/package/npm/tests/data/https_registry.npmjs.org/org_-_org-0.2.0.tgz swh/loader/package/npm/tests/data/https_replicate.npmjs.com/@aller_shared swh/loader/package/npm/tests/data/https_replicate.npmjs.com/catify swh/loader/package/npm/tests/data/https_replicate.npmjs.com/jammit-express swh/loader/package/npm/tests/data/https_replicate.npmjs.com/jammit-no-time swh/loader/package/npm/tests/data/https_replicate.npmjs.com/nativescript-telerik-analytics swh/loader/package/npm/tests/data/https_replicate.npmjs.com/org swh/loader/package/npm/tests/data/https_replicate.npmjs.com/org_visit1 swh/loader/package/opam/__init__.py swh/loader/package/opam/loader.py swh/loader/package/opam/tasks.py swh/loader/package/opam/tests/__init__.py swh/loader/package/opam/tests/test_opam.py swh/loader/package/opam/tests/test_tasks.py swh/loader/package/opam/tests/data/fake_opam_repo/repo swh/loader/package/opam/tests/data/fake_opam_repo/version swh/loader/package/opam/tests/data/fake_opam_repo/packages/agrid/agrid.0.1/opam swh/loader/package/opam/tests/data/fake_opam_repo/packages/directories/directories.0.1/opam swh/loader/package/opam/tests/data/fake_opam_repo/packages/directories/directories.0.2/opam swh/loader/package/opam/tests/data/fake_opam_repo/packages/directories/directories.0.3/opam swh/loader/package/opam/tests/data/fake_opam_repo/packages/ocb/ocb.0.1/opam swh/loader/package/opam/tests/data/https_github.com/OCamlPro_agrid_archive_0.1.tar.gz swh/loader/package/opam/tests/data/https_github.com/OCamlPro_directories_archive_0.1.tar.gz swh/loader/package/opam/tests/data/https_github.com/OCamlPro_directories_archive_0.2.tar.gz swh/loader/package/opam/tests/data/https_github.com/OCamlPro_directories_archive_0.3.tar.gz swh/loader/package/opam/tests/data/https_github.com/OCamlPro_ocb_archive_0.1.tar.gz swh/loader/package/pypi/__init__.py swh/loader/package/pypi/loader.py swh/loader/package/pypi/tasks.py swh/loader/package/pypi/tests/__init__.py swh/loader/package/pypi/tests/test_pypi.py swh/loader/package/pypi/tests/test_tasks.py swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/0805nexter-1.1.0.tar.gz swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/0805nexter-1.1.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/0805nexter-1.2.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/0805nexter-1.3.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/0805nexter-1.4.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/nexter-1.1.0.tar.gz swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/nexter-1.1.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_70_97_c49fb8ec24a7aaab54c3dbfbb5a6ca1431419d9ee0f6c363d9ad01d2b8b1_0805nexter-1.3.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_86_10_c9555ec63106153aaaad753a281ff47f4ac79e980ff7f5d740d6649cd56a_upymenu-0.0.1.tar.gz swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_c4_a0_4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4_0805nexter-1.2.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_c4_a0_4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4_0805nexter-1.2.0.zip_visit1 swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_ec_65_c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d_0805nexter-1.1.0.zip swh/loader/package/pypi/tests/data/https_files.pythonhosted.org/packages_ec_65_c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d_0805nexter-1.1.0.zip_visit1 swh/loader/package/pypi/tests/data/https_pypi.org/pypi_0805nexter_json swh/loader/package/pypi/tests/data/https_pypi.org/pypi_0805nexter_json_visit1 swh/loader/package/pypi/tests/data/https_pypi.org/pypi_nexter_json swh/loader/package/pypi/tests/data/https_pypi.org/pypi_upymenu_json swh/loader/package/tests/__init__.py swh/loader/package/tests/common.py swh/loader/package/tests/test_conftest.py swh/loader/package/tests/test_loader.py swh/loader/package/tests/test_loader_metadata.py swh/loader/package/tests/test_utils.py swh/loader/tests/__init__.py swh/loader/tests/conftest.py swh/loader/tests/test_cli.py swh/loader/tests/test_init.py swh/loader/tests/data/0805nexter-1.1.0.tar.gz \ No newline at end of file diff --git a/swh.loader.core.egg-info/entry_points.txt b/swh.loader.core.egg-info/entry_points.txt index 66b9f28..8be3745 100644 --- a/swh.loader.core.egg-info/entry_points.txt +++ b/swh.loader.core.egg-info/entry_points.txt @@ -1,13 +1,14 @@ [swh.cli.subcommands] loader=swh.loader.cli [swh.workers] loader.archive=swh.loader.package.archive:register loader.cran=swh.loader.package.cran:register loader.debian=swh.loader.package.debian:register loader.deposit=swh.loader.package.deposit:register loader.nixguix=swh.loader.package.nixguix:register loader.npm=swh.loader.package.npm:register loader.opam=swh.loader.package.opam:register loader.pypi=swh.loader.package.pypi:register + loader.maven=swh.loader.package.maven:register \ No newline at end of file diff --git a/swh.loader.core.egg-info/requires.txt b/swh.loader.core.egg-info/requires.txt index eb6a42e..09127b0 100644 --- a/swh.loader.core.egg-info/requires.txt +++ b/swh.loader.core.egg-info/requires.txt @@ -1,23 +1,23 @@ psutil requests iso8601 pkginfo python-debian python-dateutil swh.core>=0.3 -swh.model>=1.0.0 +swh.model>=3.1.0 swh.objstorage>=0.2.2 swh.scheduler>=0.4.0 swh.storage>=0.29.0 [testing] pytest pytest-mock requests_mock swh-core[testing] swh-scheduler[testing]>=0.5.0 swh-storage[testing]>=0.10.6 types-click types-python-dateutil types-pyyaml types-requests diff --git a/swh/loader/package/debian/loader.py b/swh/loader/package/debian/loader.py index 395b8d9..60f0801 100644 --- a/swh/loader/package/debian/loader.py +++ b/swh/loader/package/debian/loader.py @@ -1,466 +1,469 @@ # Copyright (C) 2017-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import email.utils import logging from os import path import re import subprocess from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple import attr from dateutil.parser import parse as parse_date from debian.changelog import Changelog from debian.deb822 import Dsc from swh.loader.package.loader import BasePackageInfo, PackageLoader, PartialExtID from swh.loader.package.utils import download, release_name from swh.model.hashutil import hash_to_bytes from swh.model.model import ObjectType, Person, Release, Sha1Git, TimestampWithTimezone from swh.storage.interface import StorageInterface logger = logging.getLogger(__name__) UPLOADERS_SPLIT = re.compile(r"(?<=\>)\s*,\s*") EXTID_TYPE = "dsc-sha256" class DscCountError(ValueError): """Raised when an unexpected number of .dsc files is seen""" pass @attr.s class DebianFileMetadata: name = attr.ib(type=str) """Filename""" - sha256 = attr.ib(type=str) + size = attr.ib(type=int) uri = attr.ib(type=str) """URL of this specific file""" - # md5sum is not always available, make it optional + # all checksums are not always available, make them optional + sha256 = attr.ib(type=str, default="") md5sum = attr.ib(type=str, default="") - - # sha1 is not always available, make it optional sha1 = attr.ib(type=str, default="") # Some of the DSC files imported in swh apparently had a Checksums-SHA512 # field which got recorded in the archive. Current versions of dpkg-source # don't seem to generate them, but keep the field available for # future-proofing. sha512 = attr.ib(type=str, default="") @attr.s class DebianPackageChangelog: person = attr.ib(type=Dict[str, str]) """A dict with fields like, model.Person, except they are str instead of bytes, and 'email' is optional.""" date = attr.ib(type=str) """Date of the changelog entry.""" history = attr.ib(type=List[Tuple[str, str]]) """List of tuples (package_name, version)""" @attr.s class DebianPackageInfo(BasePackageInfo): raw_info = attr.ib(type=Dict[str, Any]) files = attr.ib(type=Dict[str, DebianFileMetadata]) """Metadata of the files (.deb, .dsc, ...) of the package.""" name = attr.ib(type=str) full_version = attr.ib(type=str) """eg. stretch/contrib/0.7.2-3""" @classmethod def from_metadata( cls, a_metadata: Dict[str, Any], url: str, version: str ) -> "DebianPackageInfo": return cls( url=url, filename=None, version=version, raw_info=a_metadata, files={ file_name: DebianFileMetadata(**file_metadata) for (file_name, file_metadata) in a_metadata.get("files", {}).items() }, name=a_metadata["name"], full_version=a_metadata["version"], ) def extid(self) -> Optional[PartialExtID]: dsc_files = [ file for (name, file) in self.files.items() if name.endswith(".dsc") ] if len(dsc_files) != 1: raise DscCountError( f"Expected exactly one .dsc file for package {self.name}, " f"got {len(dsc_files)}" ) return (EXTID_TYPE, hash_to_bytes(dsc_files[0].sha256)) @attr.s class IntrinsicPackageMetadata: """Metadata extracted from a package's .dsc file.""" name = attr.ib(type=str) version = attr.ib(type=str) changelog = attr.ib(type=DebianPackageChangelog) maintainers = attr.ib(type=List[Dict[str, str]]) """A list of dicts with fields like, model.Person, except they are str instead of bytes, and 'email' is optional.""" class DebianLoader(PackageLoader[DebianPackageInfo]): """Load debian origins into swh archive. """ visit_type = "deb" def __init__( self, storage: StorageInterface, url: str, packages: Mapping[str, Any], max_content_size: Optional[int] = None, ): """Debian Loader implementation. Args: url: Origin url (e.g. deb://Debian/packages/cicero) date: Ignored packages: versioned packages and associated artifacts, example:: { 'stretch/contrib/0.7.2-3': { 'name': 'cicero', 'version': '0.7.2-3' 'files': { 'cicero_0.7.2-3.diff.gz': { 'md5sum': 'a93661b6a48db48d59ba7d26796fc9ce', 'name': 'cicero_0.7.2-3.diff.gz', 'sha256': 'f039c9642fe15c75bed5254315e2a29f...', 'size': 3964, 'uri': 'http://d.d.o/cicero_0.7.2-3.diff.gz', }, 'cicero_0.7.2-3.dsc': { 'md5sum': 'd5dac83eb9cfc9bb52a15eb618b4670a', 'name': 'cicero_0.7.2-3.dsc', 'sha256': '35b7f1048010c67adfd8d70e4961aefb...', 'size': 1864, 'uri': 'http://d.d.o/cicero_0.7.2-3.dsc', }, 'cicero_0.7.2.orig.tar.gz': { 'md5sum': '4353dede07c5728319ba7f5595a7230a', 'name': 'cicero_0.7.2.orig.tar.gz', 'sha256': '63f40f2436ea9f67b44e2d4bd669dbab...', 'size': 96527, 'uri': 'http://d.d.o/cicero_0.7.2.orig.tar.gz', } }, }, # ... } """ super().__init__(storage=storage, url=url, max_content_size=max_content_size) self.packages = packages def get_versions(self) -> Sequence[str]: """Returns the keys of the packages input (e.g. stretch/contrib/0.7.2-3, etc...) """ return list(self.packages.keys()) def get_package_info(self, version: str) -> Iterator[Tuple[str, DebianPackageInfo]]: meta = self.packages[version] p_info = DebianPackageInfo.from_metadata(meta, url=self.url, version=version) yield release_name(version), p_info def download_package( self, p_info: DebianPackageInfo, tmpdir: str ) -> List[Tuple[str, Mapping]]: """Contrary to other package loaders (1 package, 1 artifact), `p_info.files` represents the package's datafiles set to fetch: - .orig.tar.gz - .dsc - .diff.gz This is delegated to the `download_package` function. """ all_hashes = download_package(p_info, tmpdir) logger.debug("all_hashes: %s", all_hashes) res = [] for hashes in all_hashes.values(): res.append((tmpdir, hashes)) logger.debug("res: %s", res) return res def uncompress( self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str ) -> str: logger.debug("dl_artifacts: %s", dl_artifacts) return extract_package(dl_artifacts, dest=dest) def build_release( self, p_info: DebianPackageInfo, uncompressed_path: str, directory: Sha1Git, ) -> Optional[Release]: dsc_url, dsc_name = dsc_information(p_info) if not dsc_name: raise ValueError("dsc name for url %s should not be None" % dsc_url) dsc_path = path.join(path.dirname(uncompressed_path), dsc_name) intrinsic_metadata = get_intrinsic_package_metadata( p_info, dsc_path, uncompressed_path ) logger.debug("intrinsic_metadata: %s", intrinsic_metadata) logger.debug("p_info: %s", p_info) msg = ( f"Synthetic release for Debian source package {p_info.name} " f"version {p_info.full_version}\n" ) author = prepare_person(intrinsic_metadata.changelog.person) date = TimestampWithTimezone.from_iso8601(intrinsic_metadata.changelog.date) # inspired from swh.loader.debian.converters.package_metadata_to_revision return Release( name=p_info.version.encode(), message=msg.encode(), author=author, date=date, target=directory, target_type=ObjectType.DIRECTORY, synthetic=True, ) def uid_to_person(uid: str) -> Dict[str, str]: """Convert an uid to a person suitable for insertion. Args: uid: an uid of the form "Name " Returns: a dictionary with the following keys: - name: the name associated to the uid - email: the mail associated to the uid - fullname: the actual uid input """ logger.debug("uid: %s", uid) ret = { "name": "", "email": "", "fullname": uid, } name, mail = email.utils.parseaddr(uid) if name and email: ret["name"] = name ret["email"] = mail else: ret["name"] = uid return ret def prepare_person(person: Mapping[str, str]) -> Person: """Prepare person for swh serialization... Args: A person dict Returns: A person ready for storage """ return Person.from_dict( {key: value.encode("utf-8") for (key, value) in person.items()} ) def download_package(p_info: DebianPackageInfo, tmpdir: Any) -> Mapping[str, Any]: """Fetch a source package in a temporary directory and check the checksums for all files. Args: p_info: Information on a package tmpdir: Where to download and extract the files to ingest Returns: Dict of swh hashes per filename key """ all_hashes = {} for filename, fileinfo in p_info.files.items(): uri = fileinfo.uri logger.debug("fileinfo: %s", fileinfo) - extrinsic_hashes = {"sha256": fileinfo.sha256} + extrinsic_hashes = {"md5": fileinfo.md5sum} + if fileinfo.sha256: + extrinsic_hashes["sha256"] = fileinfo.sha256 + if fileinfo.sha1: + extrinsic_hashes["sha1"] = fileinfo.sha1 logger.debug("extrinsic_hashes(%s): %s", filename, extrinsic_hashes) - filepath, hashes = download( + _, hashes = download( uri, dest=tmpdir, filename=filename, hashes=extrinsic_hashes ) all_hashes[filename] = hashes logger.debug("all_hashes: %s", all_hashes) return all_hashes def dsc_information(p_info: DebianPackageInfo) -> Tuple[Optional[str], Optional[str]]: """Retrieve dsc information from a package. Args: p_info: Package metadata information Returns: Tuple of dsc file's uri, dsc's full disk path """ dsc_name = None dsc_url = None for filename, fileinfo in p_info.files.items(): if filename.endswith(".dsc"): if dsc_name: raise DscCountError( "Package %s_%s references several dsc files." % (p_info.name, p_info.version) ) dsc_url = fileinfo.uri dsc_name = filename return dsc_url, dsc_name def extract_package(dl_artifacts: List[Tuple[str, Mapping]], dest: str) -> str: """Extract a Debian source package to a given directory. Note that after extraction the target directory will be the root of the extracted package, rather than containing it. Args: package: package information dictionary dest: directory where the package files are stored Returns: Package extraction directory """ a_path = dl_artifacts[0][0] logger.debug("dl_artifacts: %s", dl_artifacts) for _, hashes in dl_artifacts: logger.debug("hashes: %s", hashes) filename = hashes["filename"] if filename.endswith(".dsc"): dsc_name = filename break dsc_path = path.join(a_path, dsc_name) destdir = path.join(dest, "extracted") logfile = path.join(dest, "extract.log") logger.debug( "extract Debian source package %s in %s" % (dsc_path, destdir), extra={"swh_type": "deb_extract", "swh_dsc": dsc_path, "swh_destdir": destdir,}, ) cmd = [ "dpkg-source", "--no-copy", "--no-check", "--ignore-bad-version", "-x", dsc_path, destdir, ] try: with open(logfile, "w") as stdout: subprocess.check_call(cmd, stdout=stdout, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logdata = open(logfile, "r").read() raise ValueError( "dpkg-source exited with code %s: %s" % (e.returncode, logdata) ) from None return destdir def get_intrinsic_package_metadata( p_info: DebianPackageInfo, dsc_path: str, extracted_path: str ) -> IntrinsicPackageMetadata: """Get the package metadata from the source package at dsc_path, extracted in extracted_path. Args: p_info: the package information dsc_path: path to the package's dsc file extracted_path: the path where the package got extracted Returns: dict: a dictionary with the following keys: - history: list of (package_name, package_version) tuples parsed from the package changelog """ with open(dsc_path, "rb") as dsc: parsed_dsc = Dsc(dsc) # Parse the changelog to retrieve the rest of the package information changelog_path = path.join(extracted_path, "debian/changelog") with open(changelog_path, "rb") as changelog_file: try: parsed_changelog = Changelog(changelog_file) except UnicodeDecodeError: logger.warning( "Unknown encoding for changelog %s," " falling back to iso" % changelog_path, extra={ "swh_type": "deb_changelog_encoding", "swh_name": p_info.name, "swh_version": str(p_info.version), "swh_changelog": changelog_path, }, ) # need to reset as Changelog scrolls to the end of the file changelog_file.seek(0) parsed_changelog = Changelog(changelog_file, encoding="iso-8859-15") history: List[Tuple[str, str]] = [] for block in parsed_changelog: assert block.package is not None history.append((block.package, str(block.version))) changelog = DebianPackageChangelog( person=uid_to_person(parsed_changelog.author), date=parse_date(parsed_changelog.date).isoformat(), history=history[1:], ) maintainers = [ uid_to_person(parsed_dsc["Maintainer"]), ] maintainers.extend( uid_to_person(person) for person in UPLOADERS_SPLIT.split(parsed_dsc.get("Uploaders", "")) ) return IntrinsicPackageMetadata( name=p_info.name, version=str(p_info.version), changelog=changelog, maintainers=maintainers, ) diff --git a/swh/loader/package/debian/tests/test_debian.py b/swh/loader/package/debian/tests/test_debian.py index 6f40e9e..4086abf 100644 --- a/swh/loader/package/debian/tests/test_debian.py +++ b/swh/loader/package/debian/tests/test_debian.py @@ -1,478 +1,522 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from copy import deepcopy import hashlib import logging from os import path import pytest import requests from swh.loader.package.debian.loader import ( DebianLoader, DebianPackageChangelog, DebianPackageInfo, IntrinsicPackageMetadata, download_package, dsc_information, extract_package, get_intrinsic_package_metadata, prepare_person, uid_to_person, ) from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats from swh.model.hashutil import hash_to_bytes from swh.model.model import ( ObjectType, Person, Release, Snapshot, SnapshotBranch, TargetType, Timestamp, TimestampWithTimezone, ) logger = logging.getLogger(__name__) URL = "deb://Debian/packages/cicero" PACKAGE_FILES = { "name": "cicero", "version": "0.7.2-3", "files": { "cicero_0.7.2-3.diff.gz": { "md5sum": "a93661b6a48db48d59ba7d26796fc9ce", "name": "cicero_0.7.2-3.diff.gz", "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa "size": 3964, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.diff.gz", # noqa }, "cicero_0.7.2-3.dsc": { "md5sum": "d5dac83eb9cfc9bb52a15eb618b4670a", "name": "cicero_0.7.2-3.dsc", "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa "size": 1864, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc", # noqa }, # noqa "cicero_0.7.2.orig.tar.gz": { "md5sum": "4353dede07c5728319ba7f5595a7230a", "name": "cicero_0.7.2.orig.tar.gz", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa "size": 96527, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa }, }, } PACKAGE_FILES2 = { "name": "cicero", "version": "0.7.2-4", "files": { "cicero_0.7.2-4.diff.gz": { "md5sum": "1e7e6fc4a59d57c98082a3af78145734", "name": "cicero_0.7.2-4.diff.gz", "sha256": "2e6fa296ee7005473ff58d0971f4fd325617b445671480e9f2cfb738d5dbcd01", # noqa "size": 4038, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.diff.gz", # noqa }, "cicero_0.7.2-4.dsc": { "md5sum": "1a6c8855a73b4282bb31d15518f18cde", "name": "cicero_0.7.2-4.dsc", "sha256": "913ee52f7093913420de5cbe95d63cfa817f1a1daf997961149501894e754f8b", # noqa "size": 1881, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-4.dsc", # noqa }, # noqa "cicero_0.7.2.orig.tar.gz": { "md5sum": "4353dede07c5728319ba7f5595a7230a", "name": "cicero_0.7.2.orig.tar.gz", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa "size": 96527, "uri": "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz", # noqa }, }, } PACKAGE_PER_VERSION = { "stretch/contrib/0.7.2-3": PACKAGE_FILES, } PACKAGES_PER_VERSION = { "stretch/contrib/0.7.2-3": PACKAGE_FILES, "buster/contrib/0.7.2-4": PACKAGE_FILES2, } def test_debian_first_visit(swh_storage, requests_mock_datadir): """With no prior visit, load a gnu project ends up with 1 snapshot """ loader = DebianLoader(swh_storage, URL, packages=PACKAGE_PER_VERSION,) actual_load_status = loader.load() expected_snapshot_id = "ad1367b5470a03857be7c7325a5a8bde698e1800" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches( swh_storage, URL, status="full", type="deb", snapshot=hash_to_bytes(expected_snapshot_id), ) release_id = hash_to_bytes("73e0ede9c21f7074ad1f9c81a774cfcb9e02addf") expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( target_type=TargetType.RELEASE, target=release_id, ) }, ) # different than the previous loader as no release is done check_snapshot(expected_snapshot, swh_storage) assert swh_storage.release_get([release_id])[0] == Release( id=release_id, name=b"stretch/contrib/0.7.2-3", message=b"Synthetic release for Debian source package cicero version 0.7.2-3\n", target=hash_to_bytes("798df511408c53bf842a8e54d4d335537836bdc3"), target_type=ObjectType.DIRECTORY, synthetic=True, author=Person( fullname=b"Samuel Thibault ", name=b"Samuel Thibault", email=b"sthibault@debian.org", ), date=TimestampWithTimezone( timestamp=Timestamp(seconds=1413730355, microseconds=0), offset=120, negative_utc=False, ), ) stats = get_stats(swh_storage) assert { "content": 42, "directory": 2, "origin": 1, "origin_visit": 1, "release": 1, # all artifacts under 1 release "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats def test_debian_first_visit_then_another_visit(swh_storage, requests_mock_datadir): """With no prior visit, load a debian project ends up with 1 snapshot """ loader = DebianLoader(swh_storage, URL, packages=PACKAGE_PER_VERSION,) actual_load_status = loader.load() expected_snapshot_id = "ad1367b5470a03857be7c7325a5a8bde698e1800" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches( swh_storage, URL, status="full", type="deb", snapshot=hash_to_bytes(expected_snapshot_id), ) expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( target_type=TargetType.RELEASE, target=hash_to_bytes("73e0ede9c21f7074ad1f9c81a774cfcb9e02addf"), ) }, ) # different than the previous loader as no release is done check_snapshot(expected_snapshot, swh_storage) stats = get_stats(swh_storage) assert { "content": 42, "directory": 2, "origin": 1, "origin_visit": 1, "release": 1, # all artifacts under 1 release "revision": 0, "skipped_content": 0, "snapshot": 1, } == stats # No change in between load actual_load_status2 = loader.load() assert actual_load_status2["status"] == "uneventful" assert_last_visit_matches( swh_storage, URL, status="full", type="deb", snapshot=hash_to_bytes(expected_snapshot_id), ) stats2 = get_stats(swh_storage) assert { "content": 42 + 0, "directory": 2 + 0, "origin": 1, "origin_visit": 1 + 1, # a new visit occurred "release": 1, "revision": 0, "skipped_content": 0, "snapshot": 1, # same snapshot across 2 visits } == stats2 urls = [ m.url for m in requests_mock_datadir.request_history if m.url.startswith("http://deb.debian.org") ] # visited each package artifact twice across 2 visits assert len(urls) == len(set(urls)) def test_debian_uid_to_person(): uid = "Someone Name " actual_person = uid_to_person(uid) assert actual_person == { "name": "Someone Name", "email": "someone@orga.org", "fullname": uid, } def test_debian_prepare_person(): actual_author = prepare_person( { "name": "Someone Name", "email": "someone@orga.org", "fullname": "Someone Name ", } ) assert actual_author == Person( name=b"Someone Name", email=b"someone@orga.org", fullname=b"Someone Name ", ) def test_debian_download_package(datadir, tmpdir, requests_mock_datadir): tmpdir = str(tmpdir) # py3.5 work around (LocalPath issue) p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL, version="0.7.2-3") all_hashes = download_package(p_info, tmpdir) assert all_hashes == { "cicero_0.7.2-3.diff.gz": { "checksums": { + "md5": "a93661b6a48db48d59ba7d26796fc9ce", "sha1": "0815282053f21601b0ec4adf7a8fe47eace3c0bc", "sha256": "f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c", # noqa }, "filename": "cicero_0.7.2-3.diff.gz", "length": 3964, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/" "cicero_0.7.2-3.diff.gz" ), }, "cicero_0.7.2-3.dsc": { "checksums": { + "md5": "d5dac83eb9cfc9bb52a15eb618b4670a", "sha1": "abbec4e8efbbc80278236e1dd136831eac08accd", "sha256": "35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03", # noqa }, "filename": "cicero_0.7.2-3.dsc", "length": 1864, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/cicero_0.7.2-3.dsc" ), }, "cicero_0.7.2.orig.tar.gz": { "checksums": { + "md5": "4353dede07c5728319ba7f5595a7230a", "sha1": "a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43", "sha256": "63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786", # noqa }, "filename": "cicero_0.7.2.orig.tar.gz", "length": 96527, "url": ( "http://deb.debian.org/debian/pool/contrib/c/cicero/" "cicero_0.7.2.orig.tar.gz" ), }, } def test_debian_dsc_information_ok(): fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL, version="0.7.2-3") dsc_url, dsc_name = dsc_information(p_info) assert dsc_url == PACKAGE_FILES["files"][fname]["uri"] assert dsc_name == PACKAGE_FILES["files"][fname]["name"] def test_debian_dsc_information_not_found(): fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL, version="0.7.2-3") p_info.files.pop(fname) dsc_url, dsc_name = dsc_information(p_info) assert dsc_url is None assert dsc_name is None def test_debian_dsc_information_missing_md5sum(): package_files = deepcopy(PACKAGE_FILES) for package_metadata in package_files["files"].values(): del package_metadata["md5sum"] p_info = DebianPackageInfo.from_metadata(package_files, url=URL, version="0.7.2-3") for debian_file_metadata in p_info.files.values(): assert not debian_file_metadata.md5sum def test_debian_dsc_information_extra_sha1(requests_mock_datadir): package_files = deepcopy(PACKAGE_FILES) for package_metadata in package_files["files"].values(): file_bytes = requests.get(package_metadata["uri"]).content package_metadata["sha1"] = hashlib.sha1(file_bytes).hexdigest() p_info = DebianPackageInfo.from_metadata(package_files, url=URL, version="0.7.2-3") for debian_file_metadata in p_info.files.values(): assert debian_file_metadata.sha1 def test_debian_dsc_information_too_many_dsc_entries(): # craft an extra dsc file fname = "cicero_0.7.2-3.dsc" p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL, version="0.7.2-3") data = p_info.files[fname] fname2 = fname.replace("cicero", "ciceroo") p_info.files[fname2] = data with pytest.raises( ValueError, match="Package %s_%s references several dsc" % (PACKAGE_FILES["name"], PACKAGE_FILES["version"]), ): dsc_information(p_info) def test_debian_get_intrinsic_package_metadata( requests_mock_datadir, datadir, tmp_path ): tmp_path = str(tmp_path) # py3.5 compat. p_info = DebianPackageInfo.from_metadata(PACKAGE_FILES, url=URL, version="0.7.2-3") logger.debug("p_info: %s", p_info) # download the packages all_hashes = download_package(p_info, tmp_path) # Retrieve information from package _, dsc_name = dsc_information(p_info) dl_artifacts = [(tmp_path, hashes) for hashes in all_hashes.values()] # Extract information from package extracted_path = extract_package(dl_artifacts, tmp_path) # Retrieve information on package dsc_path = path.join(path.dirname(extracted_path), dsc_name) actual_package_info = get_intrinsic_package_metadata( p_info, dsc_path, extracted_path ) logger.debug("actual_package_info: %s", actual_package_info) assert actual_package_info == IntrinsicPackageMetadata( changelog=DebianPackageChangelog( date="2014-10-19T16:52:35+02:00", history=[ ("cicero", "0.7.2-2"), ("cicero", "0.7.2-1"), ("cicero", "0.7-1"), ], person={ "email": "sthibault@debian.org", "fullname": "Samuel Thibault ", "name": "Samuel Thibault", }, ), maintainers=[ { "email": "debian-accessibility@lists.debian.org", "fullname": "Debian Accessibility Team " "", "name": "Debian Accessibility Team", }, { "email": "sthibault@debian.org", "fullname": "Samuel Thibault ", "name": "Samuel Thibault", }, ], name="cicero", version="0.7.2-3", ) def test_debian_multiple_packages(swh_storage, requests_mock_datadir): loader = DebianLoader(swh_storage, URL, packages=PACKAGES_PER_VERSION,) actual_load_status = loader.load() expected_snapshot_id = "a83fa5c089b048161f0677b9614a4aae96a6ca18" assert actual_load_status == { "status": "eventful", "snapshot_id": expected_snapshot_id, } assert_last_visit_matches( swh_storage, URL, status="full", type="deb", snapshot=hash_to_bytes(expected_snapshot_id), ) expected_snapshot = Snapshot( id=hash_to_bytes(expected_snapshot_id), branches={ b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( target_type=TargetType.RELEASE, target=hash_to_bytes("73e0ede9c21f7074ad1f9c81a774cfcb9e02addf"), ), b"releases/buster/contrib/0.7.2-4": SnapshotBranch( target_type=TargetType.RELEASE, target=hash_to_bytes("9f6d8d868514f991af0d9f5d7173aba1236a5a75"), ), }, ) check_snapshot(expected_snapshot, swh_storage) + + +def test_debian_loader_only_md5_sum_in_dsc(swh_storage, requests_mock_datadir): + + packages_per_version = deepcopy(PACKAGES_PER_VERSION) + for package_files in packages_per_version.values(): + for package_data in package_files["files"].values(): + del package_data["sha256"] + + loader = DebianLoader(swh_storage, URL, packages=packages_per_version) + + actual_load_status = loader.load() + expected_snapshot_id = "a83fa5c089b048161f0677b9614a4aae96a6ca18" + assert actual_load_status == { + "status": "eventful", + "snapshot_id": expected_snapshot_id, + } + + assert_last_visit_matches( + swh_storage, + URL, + status="full", + type="deb", + snapshot=hash_to_bytes(expected_snapshot_id), + ) + + expected_snapshot = Snapshot( + id=hash_to_bytes(expected_snapshot_id), + branches={ + b"releases/stretch/contrib/0.7.2-3": SnapshotBranch( + target_type=TargetType.RELEASE, + target=hash_to_bytes("73e0ede9c21f7074ad1f9c81a774cfcb9e02addf"), + ), + b"releases/buster/contrib/0.7.2-4": SnapshotBranch( + target_type=TargetType.RELEASE, + target=hash_to_bytes("9f6d8d868514f991af0d9f5d7173aba1236a5a75"), + ), + }, + ) + + check_snapshot(expected_snapshot, swh_storage) diff --git a/swh/loader/package/maven/__init__.py b/swh/loader/package/maven/__init__.py new file mode 100644 index 0000000..1e5b016 --- /dev/null +++ b/swh/loader/package/maven/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2021 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + + +from typing import Any, Mapping + + +def register() -> Mapping[str, Any]: + """Register the current worker module's definition""" + from .loader import MavenLoader + + return { + "task_modules": [f"{__name__}.tasks"], + "loader": MavenLoader, + } diff --git a/swh/loader/package/maven/loader.py b/swh/loader/package/maven/loader.py new file mode 100644 index 0000000..bf09e82 --- /dev/null +++ b/swh/loader/package/maven/loader.py @@ -0,0 +1,231 @@ +# Copyright (C) 2021 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from datetime import datetime, timezone +import hashlib +import json +import logging +from os import path +import string +from typing import ( + Any, + Dict, + Iterator, + List, + Mapping, + Optional, + OrderedDict, + Sequence, + Tuple, +) +from urllib.parse import urlparse + +import attr +import iso8601 +import requests + +from swh.loader.package.loader import ( + BasePackageInfo, + PackageLoader, + PartialExtID, + RawExtrinsicMetadataCore, +) +from swh.loader.package.utils import EMPTY_AUTHOR, release_name +from swh.model.model import ( + MetadataAuthority, + MetadataAuthorityType, + ObjectType, + RawExtrinsicMetadata, + Release, + Sha1Git, + TimestampWithTimezone, +) +from swh.storage.interface import StorageInterface + +logger = logging.getLogger(__name__) + + +@attr.s +class MavenPackageInfo(BasePackageInfo): + time = attr.ib(type=datetime) + """Timestamp of the last update of jar file on the server.""" + gid = attr.ib(type=str) + """Group ID of the maven artifact""" + aid = attr.ib(type=str) + """Artifact ID of the maven artifact""" + version = attr.ib(type=str) + """Version of the maven artifact""" + + # default format for maven artifacts + MANIFEST_FORMAT = string.Template("$gid $aid $version $url $time") + + def extid(self, manifest_format: Optional[string.Template] = None) -> PartialExtID: + """Returns a unique intrinsic identifier of this package info + + ``manifest_format`` allows overriding the class' default MANIFEST_FORMAT""" + manifest_format = manifest_format or self.MANIFEST_FORMAT + manifest = manifest_format.substitute( + { + "gid": self.gid, + "aid": self.aid, + "version": self.version, + "url": self.url, + "time": str(self.time), + } + ) + return ("maven-jar", hashlib.sha256(manifest.encode()).digest()) + + @classmethod + def from_metadata(cls, a_metadata: Dict[str, Any]) -> "MavenPackageInfo": + url = a_metadata["url"] + filename = a_metadata.get("filename") + time = iso8601.parse_date(a_metadata["time"]) + time = time.astimezone(tz=timezone.utc) + gid = a_metadata["gid"] + aid = a_metadata["aid"] + version = a_metadata["version"] + return cls( + url=url, + filename=filename or path.split(url)[-1], + time=time, + gid=gid, + aid=aid, + version=version, + directory_extrinsic_metadata=[ + RawExtrinsicMetadataCore( + format="maven-json", metadata=json.dumps(a_metadata).encode(), + ), + ], + ) + + +class MavenLoader(PackageLoader[MavenPackageInfo]): + """Load source code jar origin's artifact files into swh archive + + """ + + visit_type = "maven" + + def __init__( + self, + storage: StorageInterface, + url: str, + artifacts: Sequence[Dict[str, Any]], + extid_manifest_format: Optional[str] = None, + max_content_size: Optional[int] = None, + ): + f"""Loader constructor. + + For now, this is the lister's task output. + There is one, and only one, artefact (jar or zip) per version, as guaranteed by + the Maven coordinates system. + + Args: + url: Origin url + artifacts: List of single artifact information with keys: + + - **time**: the time of the last update of jar file on the server + as an iso8601 date string + + - **url**: the artifact url to retrieve filename + + - **filename**: optionally, the file's name + + - **gid**: artifact's groupId + + - **aid**: artifact's artifactId + + - **version**: artifact's version + + extid_manifest_format: template string used to format a manifest, + which is hashed to get the extid of a package. + Defaults to {MavenPackageInfo.MANIFEST_FORMAT!r} + + """ + super().__init__(storage=storage, url=url, max_content_size=max_content_size) + self.artifacts = artifacts # assume order is enforced in the lister + self.version_artifact: OrderedDict[str, Dict[str, Any]] + self.version_artifact = OrderedDict( + {str(jar["version"]): jar for jar in artifacts if jar["version"]} + ) + + def get_versions(self) -> Sequence[str]: + return list(self.version_artifact.keys()) + + def get_default_version(self) -> str: + # Default version is the last item + return self.artifacts[-1]["version"] + + def get_metadata_authority(self): + p_url = urlparse(self.url) + return MetadataAuthority( + type=MetadataAuthorityType.FORGE, + url=f"{p_url.scheme}://{p_url.netloc}/", + metadata={}, + ) + + def build_extrinsic_directory_metadata( + self, p_info: MavenPackageInfo, release_id: Sha1Git, directory_id: Sha1Git, + ) -> List[RawExtrinsicMetadata]: + if not p_info.directory_extrinsic_metadata: + # If this package loader doesn't write metadata, no need to require + # an implementation for get_metadata_authority. + return [] + + # Get artifacts + dir_ext_metadata = p_info.directory_extrinsic_metadata[0] + a_metadata = json.loads(dir_ext_metadata.metadata) + aid = a_metadata["aid"] + version = a_metadata["version"] + + # Rebuild POM URL. + pom_url = path.dirname(p_info.url) + pom_url = f"{pom_url}/{aid}-{version}.pom" + + r = requests.get(pom_url, allow_redirects=True) + if r.status_code == 200: + metadata_pom = r.content + else: + metadata_pom = b"" + + return super().build_extrinsic_directory_metadata( + attr.evolve( + p_info, + directory_extrinsic_metadata=[ + RawExtrinsicMetadataCore( + format="maven-pom", metadata=metadata_pom, + ), + dir_ext_metadata, + ], + ), + release_id=release_id, + directory_id=directory_id, + ) + + def get_package_info(self, version: str) -> Iterator[Tuple[str, MavenPackageInfo]]: + a_metadata = self.version_artifact[version] + yield release_name(a_metadata["version"]), MavenPackageInfo.from_metadata( + a_metadata + ) + + def build_release( + self, p_info: MavenPackageInfo, uncompressed_path: str, directory: Sha1Git + ) -> Optional[Release]: + msg = f"Synthetic release for archive at {p_info.url}\n".encode("utf-8") + # time is an iso8601 date + normalized_time = TimestampWithTimezone.from_datetime(p_info.time) + return Release( + name=p_info.version.encode(), + message=msg, + date=normalized_time, + author=EMPTY_AUTHOR, + target=directory, + target_type=ObjectType.DIRECTORY, + synthetic=True, + ) + + def extra_branches(self) -> Dict[bytes, Mapping[str, Any]]: + last_snapshot = self.last_snapshot() + return last_snapshot.to_dict()["branches"] if last_snapshot else {} diff --git a/swh/loader/package/maven/tasks.py b/swh/loader/package/maven/tasks.py new file mode 100644 index 0000000..5be462d --- /dev/null +++ b/swh/loader/package/maven/tasks.py @@ -0,0 +1,15 @@ +# Copyright (C) 2021 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from celery import shared_task + +from swh.loader.package.maven.loader import MavenLoader + + +@shared_task(name=__name__ + ".LoadMaven") +def load_jar_file(*, url=None, artifacts=None): + """Load jar's artifacts.""" + loader = MavenLoader.from_configfile(url=url, artifacts=artifacts) + return loader.load() diff --git a/swh/loader/package/maven/tests/__init__.py b/swh/loader/package/maven/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar new file mode 100644 index 0000000..2a15a03 Binary files /dev/null and b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0-sources.jar differ diff --git a/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0.pom b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0.pom new file mode 100644 index 0000000..bc1a35b --- /dev/null +++ b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.0.pom @@ -0,0 +1,86 @@ + + + 4.0.0 + al.aldi + sprova4j + 0.1.0 + sprova4j + Java client for Sprova Test Management + https://github.com/aldialimucaj/sprova4j + 2018 + + + The Apache Software License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + aldi + Aldi Alimucaj + aldi.alimucaj@gmail.com + + + + scm:git:git://github.com/aldialimucaj/sprova4j.git + scm:git:git://github.com/aldialimucaj/sprova4j.git + https://github.com/aldialimucaj/sprova4j + + + + ch.qos.logback + logback-classic + 1.2.3 + runtime + + + com.google.code.gson + gson + 2.8.3 + runtime + + + com.squareup.okhttp3 + okhttp + 3.10.0 + runtime + + + com.squareup.okio + okio + 1.0.0 + runtime + + + org.glassfish + javax.json + 1.1.2 + runtime + + + javax.json + javax.json-api + 1.1.2 + runtime + + + javax.validation + validation-api + 2.0.1.Final + runtime + + + junit + junit + 4.12 + test + + + com.squareup.okhttp3 + mockwebserver + 3.10.0 + test + + + diff --git a/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar new file mode 100644 index 0000000..06fbedb Binary files /dev/null and b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1-sources.jar differ diff --git a/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1.pom b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1.pom new file mode 100644 index 0000000..05e5a71 --- /dev/null +++ b/swh/loader/package/maven/tests/data/https_maven.org/sprova4j-0.1.1.pom @@ -0,0 +1,86 @@ + + + 4.0.0 + al.aldi + sprova4j + 0.1.1 + sprova4j + Java client for Sprova Test Management + https://github.com/aldialimucaj/sprova4j + 2018 + + + The Apache Software License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + aldi + Aldi Alimucaj + aldi.alimucaj@gmail.com + + + + https://github.com/aldialimucaj/sprova4j.git + https://github.com/aldialimucaj/sprova4j.git + https://github.com/aldialimucaj/sprova4j + + + + ch.qos.logback + logback-classic + 1.2.3 + runtime + + + com.google.code.gson + gson + 2.8.5 + runtime + + + com.squareup.okhttp3 + okhttp + 3.10.0 + runtime + + + com.squareup.okio + okio + 1.14.1 + runtime + + + org.glassfish + javax.json + 1.1.2 + runtime + + + javax.json + javax.json-api + 1.1.2 + runtime + + + javax.validation + validation-api + 2.0.1.Final + runtime + + + junit + junit + 4.12 + test + + + com.squareup.okhttp3 + mockwebserver + 3.10.0 + test + + + diff --git a/swh/loader/package/maven/tests/test_maven.py b/swh/loader/package/maven/tests/test_maven.py new file mode 100644 index 0000000..3d2dff4 --- /dev/null +++ b/swh/loader/package/maven/tests/test_maven.py @@ -0,0 +1,615 @@ +# Copyright (C) 2019-2021 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import hashlib +import json +from pathlib import Path +import string + +import pytest + +from swh.loader.package import __version__ +from swh.loader.package.maven.loader import MavenLoader, MavenPackageInfo +from swh.loader.package.utils import EMPTY_AUTHOR +from swh.loader.tests import assert_last_visit_matches, check_snapshot, get_stats +from swh.model.hashutil import hash_to_bytes +from swh.model.model import ( + RawExtrinsicMetadata, + Release, + Snapshot, + SnapshotBranch, + TargetType, + Timestamp, + TimestampWithTimezone, +) +from swh.model.model import MetadataAuthority, MetadataAuthorityType, MetadataFetcher +from swh.model.model import ObjectType as ModelObjectType +from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType +from swh.storage.algos.snapshot import snapshot_get_all_branches + +URL = "https://repo1.maven.org/maven2/" +MVN_ARTIFACTS = [ + { + "time": "2021-07-12 19:06:59.335000", + "url": "https://repo1.maven.org/maven2/al/aldi/sprova4j/0.1.0/" + + "sprova4j-0.1.0-sources.jar", + "gid": "al.aldi", + "aid": "sprova4j", + "filename": "sprova4j-0.1.0-sources.jar", + "version": "0.1.0", + }, + { + "time": "2021-07-12 19:37:05.534000", + "url": "https://repo1.maven.org/maven2/al/aldi/sprova4j/0.1.1/" + + "sprova4j-0.1.1-sources.jar", + "gid": "al.aldi", + "aid": "sprova4j", + "filename": "sprova4j-0.1.1-sources.jar", + "version": "0.1.1", + }, +] + +MVN_ARTIFACTS_POM = [ + "https://repo1.maven.org/maven2/al/aldi/sprova4j/0.1.0/sprova4j-0.1.0.pom", + "https://repo1.maven.org/maven2/al/aldi/sprova4j/0.1.1/sprova4j-0.1.1.pom", +] + +_expected_new_contents_first_visit = [ + "cd807364cd7730022b3849f90ccf4bababbada84", + "79e33dd52ebdf615e6696ae69add91cb990d81e2", + "8002bd514156f05a0940ae14ef86eb0179cbd510", + "23479553a6ccec30d377dee0496123a65d23fd8c", + "07ffbebb933bc1660e448f07d8196c2b083797f9", + "abf021b581f80035b56153c9aa27195b8d7ebbb8", + "eec70ba80a6862ed2619727663b17eb0d9dfe131", + "81a493dacb44dedf623f29ecf62c0e035bf698de", + "bda85ed0bbecf8cddfea04234bee16f476f64fe4", + "1ec91d561f5bdf59acb417086e04c54ead94e94e", + "d517b423da707fa21378623f35facebff53cb59d", + "3f0f21a764972d79e583908991c893c999613354", + "a2dd4d7dfe6043baf9619081e4e29966989211af", + "f62685cf0c6825a4097c949280b584cf0e16d047", + "56afc1ea60cef6548ce0a34f44e91b0e4b063835", + "cf7c740926e7ebc9ac8978a5c4f0e1e7a0e9e3af", + "86ff828bea1c22ca3d50ed82569b9c59ce2c41a1", + "1d0fa04454d9fec31d8ee3f35b58158ca1e28b15", + "e90239a2c8d9ede61a29671a8b397a743e18fa34", + "ce8851005d084aea089bcd8cf01052f4b234a823", + "2c34ce622aa7fa68d104900840f66671718e6249", + "e6a6fec32dcb3bee93c34fc11b0174a6b0b0ec6d", + "405d3e1be4b658bf26de37f2c90c597b2796b9d7", + "d0d2f5848721e04300e537826ef7d2d6d9441df0", + "399c67e33e38c475fd724d283dd340f6a2e8dc91", + "dea10c1111cc61ac1809fb7e88857e3db054959f", +] + +_expected_json_metadata = { + "time": "2021-07-12 19:06:59.335000", + "url": ( + "https://repo1.maven.org/maven2/al/aldi/sprova4j/0.1.0/" + "sprova4j-0.1.0-sources.jar" + ), + "gid": "al.aldi", + "aid": "sprova4j", + "filename": "sprova4j-0.1.0-sources.jar", + "version": "0.1.0", +} +_expected_pom_metadata = ( + """ + + 4.0.0 + al.aldi + sprova4j + 0.1.0 + sprova4j + Java client for Sprova Test Management + https://github.com/aldialimucaj/sprova4j + 2018 + + + The Apache Software License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + aldi + Aldi Alimucaj + aldi.alimucaj@gmail.com + + + + scm:git:git://github.com/aldialimucaj/sprova4j.git + scm:git:git://github.com/aldialimucaj/sprova4j.git + https://github.com/aldialimucaj/sprova4j + + + + ch.qos.logback + logback-classic + 1.2.3 + runtime + + + com.google.code.gson + gson + 2.8.3 + runtime + + + com.squareup.okhttp3 + okhttp + 3.10.0 + runtime + + + com.squareup.okio + okio + 1.0.0 + runtime + + + org.glassfish + javax.json + 1.1.2 + runtime + + + javax.json + javax.json-api + 1.1.2 + runtime + + + javax.validation + validation-api + 2.0.1.Final + runtime + + + junit + junit + 4.12 + test + + + com.squareup.okhttp3 + mockwebserver + 3.10.0 + test + + + +""" +) + +_expected_new_directories_first_visit = [ + "6c9de41e4cebb91a8368da1d89ae9873bd540ec3", + "c1a2ee97fc47426d0179f94d223405336b5cd075", + "9e1bdca292765a9528af18743bd793b80362c768", + "193a7af634592ef27fb341762806f61e8fb8eab3", + "a297aa21e3dbf138b370be3aae7a852dd403bbbb", + "da84026119ae04022f007d5b3362e98d46d09045", + "75bb915942a9c441ca62aeffc3b634f1ec9ce5e2", + "0851d359283b2ad82b116c8d1b55ab14b1ec219c", + "2bcbb8b723a025ee9a36b719cea229ed38c37e46", +] + +_expected_new_release_first_visit = "02e83c29ec094db581f939d2e238d0613a4f59ac" + +REL_MSG = ( + b"Synthetic release for archive at https://repo1.maven.org/maven2/al/aldi/" + b"sprova4j/0.1.0/sprova4j-0.1.0-sources.jar\n" +) + +REVISION_DATE = TimestampWithTimezone( + timestamp=Timestamp(seconds=1626116819, microseconds=335000), + offset=0, + negative_utc=False, +) + + +@pytest.fixture +def data_jar_1(datadir): + content = Path( + datadir, "https_maven.org", "sprova4j-0.1.0-sources.jar" + ).read_bytes() + return content + + +@pytest.fixture +def data_pom_1(datadir): + content = Path(datadir, "https_maven.org", "sprova4j-0.1.0.pom").read_bytes() + return content + + +@pytest.fixture +def data_jar_2(datadir): + content = Path( + datadir, "https_maven.org", "sprova4j-0.1.1-sources.jar" + ).read_bytes() + return content + + +@pytest.fixture +def data_pom_2(datadir): + content = Path(datadir, "https_maven.org", "sprova4j-0.1.1.pom").read_bytes() + return content + + +def test_jar_visit_with_no_artifact_found(swh_storage, requests_mock_datadir): + unknown_artifact_url = "https://ftp.g.o/unknown/8sync-0.1.0.tar.gz" + loader = MavenLoader( + swh_storage, + unknown_artifact_url, + artifacts=[ + { + "time": "2021-07-18 08:05:05.187000", + "url": unknown_artifact_url, # unknown artifact + "filename": "8sync-0.1.0.tar.gz", + "gid": "al/aldi", + "aid": "sprova4j", + "version": "0.1.0", + } + ], + ) + + actual_load_status = loader.load() + assert actual_load_status["status"] == "uneventful" + assert actual_load_status["snapshot_id"] is not None + + expected_snapshot_id = "1a8893e6a86f444e8be8e7bda6cb34fb1735a00e" + assert actual_load_status["snapshot_id"] == expected_snapshot_id + + stats = get_stats(swh_storage) + + assert_last_visit_matches( + swh_storage, unknown_artifact_url, status="partial", type="maven" + ) + + assert { + "content": 0, + "directory": 0, + "origin": 1, + "origin_visit": 1, + "release": 0, + "revision": 0, + "skipped_content": 0, + "snapshot": 1, + } == stats + + +def test_jar_visit_with_release_artifact_no_prior_visit( + swh_storage, requests_mock, data_jar_1, data_pom_1 +): + """With no prior visit, loading a jar ends up with 1 snapshot + + """ + requests_mock.get(MVN_ARTIFACTS[0]["url"], content=data_jar_1) + requests_mock.get(MVN_ARTIFACTS_POM[0], content=data_pom_1) + loader = MavenLoader( + swh_storage, MVN_ARTIFACTS[0]["url"], artifacts=[MVN_ARTIFACTS[0]] + ) + + actual_load_status = loader.load() + assert actual_load_status["status"] == "eventful" + + expected_snapshot_first_visit_id = hash_to_bytes( + "c5195b8ebd148649bf094561877964b131ab27e0" + ) + + expected_snapshot = Snapshot( + id=expected_snapshot_first_visit_id, + branches={ + b"HEAD": SnapshotBranch( + target_type=TargetType.ALIAS, target=b"releases/0.1.0", + ), + b"releases/0.1.0": SnapshotBranch( + target_type=TargetType.RELEASE, + target=hash_to_bytes(_expected_new_release_first_visit), + ), + }, + ) + actual_snapshot = snapshot_get_all_branches( + swh_storage, hash_to_bytes(actual_load_status["snapshot_id"]) + ) + + assert actual_snapshot == expected_snapshot + check_snapshot(expected_snapshot, swh_storage) + + assert ( + hash_to_bytes(actual_load_status["snapshot_id"]) + == expected_snapshot_first_visit_id + ) + + stats = get_stats(swh_storage) + assert_last_visit_matches( + swh_storage, MVN_ARTIFACTS[0]["url"], status="full", type="maven" + ) + + expected_contents = map(hash_to_bytes, _expected_new_contents_first_visit) + assert list(swh_storage.content_missing_per_sha1(expected_contents)) == [] + + expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit) + assert list(swh_storage.directory_missing(expected_dirs)) == [] + + expected_rels = map(hash_to_bytes, {_expected_new_release_first_visit}) + assert list(swh_storage.release_missing(expected_rels)) == [] + + rel_id = actual_snapshot.branches[b"releases/0.1.0"].target + (rel,) = swh_storage.release_get([rel_id]) + + assert rel == Release( + id=hash_to_bytes(_expected_new_release_first_visit), + name=b"0.1.0", + message=REL_MSG, + author=EMPTY_AUTHOR, + date=REVISION_DATE, + target_type=ModelObjectType.DIRECTORY, + target=hash_to_bytes("6c9de41e4cebb91a8368da1d89ae9873bd540ec3"), + synthetic=True, + metadata=None, + ) + + assert { + "content": len(_expected_new_contents_first_visit), + "directory": len(_expected_new_directories_first_visit), + "origin": 1, + "origin_visit": 1, + "release": 1, + "revision": 0, + "skipped_content": 0, + "snapshot": 1, + } == stats + + +def test_jar_2_visits_without_change( + swh_storage, requests_mock_datadir, requests_mock, data_jar_2, data_pom_2 +): + """With no prior visit, load a gnu project ends up with 1 snapshot + + """ + requests_mock.get(MVN_ARTIFACTS[1]["url"], content=data_jar_2) + requests_mock.get(MVN_ARTIFACTS_POM[1], content=data_pom_2) + loader = MavenLoader( + swh_storage, MVN_ARTIFACTS[1]["url"], artifacts=[MVN_ARTIFACTS[1]] + ) + + actual_load_status = loader.load() + assert actual_load_status["status"] == "eventful" + + expected_snapshot_first_visit_id = hash_to_bytes( + "91dcacee7a6d2b54f9cab14bc14cb86d22d2ac2b" + ) + + assert ( + hash_to_bytes(actual_load_status["snapshot_id"]) + == expected_snapshot_first_visit_id + ) + + assert_last_visit_matches( + swh_storage, MVN_ARTIFACTS[1]["url"], status="full", type="maven" + ) + + actual_load_status2 = loader.load() + assert actual_load_status2["status"] == "uneventful" + assert actual_load_status2["snapshot_id"] is not None + assert actual_load_status["snapshot_id"] == actual_load_status2["snapshot_id"] + + assert_last_visit_matches( + swh_storage, MVN_ARTIFACTS[1]["url"], status="full", type="maven" + ) + + # Make sure we have only one entry in history for the pom fetch, one for + # the actual download of jar, and that they're correct. + urls_history = [str(req.url) for req in list(requests_mock_datadir.request_history)] + assert urls_history == [ + MVN_ARTIFACTS[1]["url"], + MVN_ARTIFACTS_POM[1], + ] + + +def test_metadatata(swh_storage, requests_mock, data_jar_1, data_pom_1): + """With no prior visit, loading a jar ends up with 1 snapshot. + Extrinsic metadata is the pom file associated to the source jar. + """ + requests_mock.get(MVN_ARTIFACTS[0]["url"], content=data_jar_1) + requests_mock.get(MVN_ARTIFACTS_POM[0], content=data_pom_1) + loader = MavenLoader( + swh_storage, MVN_ARTIFACTS[0]["url"], artifacts=[MVN_ARTIFACTS[0]] + ) + + actual_load_status = loader.load() + assert actual_load_status["status"] == "eventful" + + expected_release_id = hash_to_bytes(_expected_new_release_first_visit) + release = swh_storage.release_get([expected_release_id])[0] + assert release is not None + + release_swhid = CoreSWHID( + object_type=ObjectType.RELEASE, object_id=expected_release_id + ) + directory_swhid = ExtendedSWHID( + object_type=ExtendedObjectType.DIRECTORY, object_id=release.target + ) + metadata_authority = MetadataAuthority( + type=MetadataAuthorityType.FORGE, url="https://repo1.maven.org/", + ) + + expected_metadata = [ + RawExtrinsicMetadata( + target=directory_swhid, + authority=metadata_authority, + fetcher=MetadataFetcher( + name="swh.loader.package.maven.loader.MavenLoader", version=__version__, + ), + discovery_date=loader.visit_date, + format="maven-pom", + metadata=_expected_pom_metadata.encode(), + origin=MVN_ARTIFACTS[0]["url"], + release=release_swhid, + ), + RawExtrinsicMetadata( + target=directory_swhid, + authority=metadata_authority, + fetcher=MetadataFetcher( + name="swh.loader.package.maven.loader.MavenLoader", version=__version__, + ), + discovery_date=loader.visit_date, + format="maven-json", + metadata=json.dumps(_expected_json_metadata).encode(), + origin=MVN_ARTIFACTS[0]["url"], + release=release_swhid, + ), + ] + + res = swh_storage.raw_extrinsic_metadata_get(directory_swhid, metadata_authority) + assert res.next_page_token is None + assert set(res.results) == set(expected_metadata) + + +def test_metadatata_no_pom(swh_storage, requests_mock, data_jar_1): + """With no prior visit, loading a jar ends up with 1 snapshot. + Extrinsic metadata is None if the pom file cannot be retrieved. + """ + requests_mock.get(MVN_ARTIFACTS[0]["url"], content=data_jar_1) + requests_mock.get(MVN_ARTIFACTS_POM[0], status_code="404") + loader = MavenLoader( + swh_storage, MVN_ARTIFACTS[0]["url"], artifacts=[MVN_ARTIFACTS[0]] + ) + + actual_load_status = loader.load() + assert actual_load_status["status"] == "eventful" + + expected_release_id = hash_to_bytes(_expected_new_release_first_visit) + release = swh_storage.release_get([expected_release_id])[0] + assert release is not None + + release_swhid = CoreSWHID( + object_type=ObjectType.RELEASE, object_id=expected_release_id + ) + directory_swhid = ExtendedSWHID( + object_type=ExtendedObjectType.DIRECTORY, object_id=release.target + ) + metadata_authority = MetadataAuthority( + type=MetadataAuthorityType.FORGE, url="https://repo1.maven.org/", + ) + + expected_metadata = [ + RawExtrinsicMetadata( + target=directory_swhid, + authority=metadata_authority, + fetcher=MetadataFetcher( + name="swh.loader.package.maven.loader.MavenLoader", version=__version__, + ), + discovery_date=loader.visit_date, + format="maven-pom", + metadata=b"", + origin=MVN_ARTIFACTS[0]["url"], + release=release_swhid, + ), + RawExtrinsicMetadata( + target=directory_swhid, + authority=metadata_authority, + fetcher=MetadataFetcher( + name="swh.loader.package.maven.loader.MavenLoader", version=__version__, + ), + discovery_date=loader.visit_date, + format="maven-json", + metadata=json.dumps(_expected_json_metadata).encode(), + origin=MVN_ARTIFACTS[0]["url"], + release=release_swhid, + ), + ] + res = swh_storage.raw_extrinsic_metadata_get(directory_swhid, metadata_authority) + assert res.next_page_token is None + assert set(res.results) == set(expected_metadata) + + +def test_jar_extid(): + """Compute primary key should return the right identity + + """ + + metadata = MVN_ARTIFACTS[0] + + p_info = MavenPackageInfo(**metadata) + + expected_manifest = ( + b"al.aldi sprova4j 0.1.0 " + b"https://repo1.maven.org/maven2/al/aldi/sprova4j/0.1.0/sprova4j-0.1.0" + b"-sources.jar 1626109619335" + ) + for manifest_format in [ + string.Template("$aid $gid $version"), + string.Template("$gid $aid"), + string.Template("$gid $aid $version"), + ]: + actual_id = p_info.extid(manifest_format=manifest_format) + assert actual_id != ("maven-jar", hashlib.sha256(expected_manifest).digest(),) + + for manifest_format, expected_manifest in [ + (None, "{gid} {aid} {version} {url} {time}".format(**metadata).encode()), + ]: + actual_id = p_info.extid(manifest_format=manifest_format) + assert actual_id == ("maven-jar", hashlib.sha256(expected_manifest).digest(),) + + with pytest.raises(KeyError): + p_info.extid(manifest_format=string.Template("$a $unknown_key")) + + +def test_jar_snapshot_append( + swh_storage, + requests_mock_datadir, + requests_mock, + data_jar_1, + data_pom_1, + data_jar_2, + data_pom_2, +): + + # first loading with a first artifact + artifact1 = MVN_ARTIFACTS[0] + url1 = artifact1["url"] + requests_mock.get(url1, content=data_jar_1) + requests_mock.get(MVN_ARTIFACTS_POM[0], content=data_pom_1) + loader = MavenLoader(swh_storage, url1, [artifact1]) + actual_load_status = loader.load() + assert actual_load_status["status"] == "eventful" + assert actual_load_status["snapshot_id"] is not None + assert_last_visit_matches(swh_storage, url1, status="full", type="maven") + + # check expected snapshot + snapshot = loader.last_snapshot() + assert len(snapshot.branches) == 2 + branch_artifact1_name = f"releases/{artifact1['version']}".encode() + assert b"HEAD" in snapshot.branches + assert branch_artifact1_name in snapshot.branches + assert snapshot.branches[b"HEAD"].target == branch_artifact1_name + + # second loading with a second artifact + artifact2 = MVN_ARTIFACTS[1] + url2 = artifact2["url"] + requests_mock.get(url2, content=data_jar_2) + requests_mock.get(MVN_ARTIFACTS_POM[1], content=data_pom_2) + loader = MavenLoader(swh_storage, url2, [artifact2]) + actual_load_status = loader.load() + assert actual_load_status["status"] == "eventful" + assert actual_load_status["snapshot_id"] is not None + assert_last_visit_matches(swh_storage, url2, status="full", type="maven") + + # check expected snapshot, should contain a new branch and the + # branch for the first artifact + snapshot = loader.last_snapshot() + assert len(snapshot.branches) == 2 + branch_artifact2_name = f"releases/{artifact2['version']}".encode() + assert b"HEAD" in snapshot.branches + assert branch_artifact2_name in snapshot.branches + assert branch_artifact1_name not in snapshot.branches + assert snapshot.branches[b"HEAD"].target == branch_artifact2_name diff --git a/swh/loader/package/maven/tests/test_tasks.py b/swh/loader/package/maven/tests/test_tasks.py new file mode 100644 index 0000000..1721219 --- /dev/null +++ b/swh/loader/package/maven/tests/test_tasks.py @@ -0,0 +1,50 @@ +# Copyright (C) 2019-2021 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +MVN_ARTIFACTS = [ + { + "time": 1626109619335, + "url": "https://repo1.maven.org/maven2/al/aldi/sprova4j/0.1.0/" + + "sprova4j-0.1.0.jar", + "gid": "al.aldi", + "aid": "sprova4j", + "filename": "sprova4j-0.1.0.jar", + "version": "0.1.0", + }, +] + + +def test_tasks_jar_loader( + mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config +): + mock_load = mocker.patch("swh.loader.package.maven.loader.MavenLoader.load") + mock_load.return_value = {"status": "eventful"} + + res = swh_scheduler_celery_app.send_task( + "swh.loader.package.maven.tasks.LoadMaven", + kwargs=dict(url=MVN_ARTIFACTS[0]["url"], artifacts=MVN_ARTIFACTS,), + ) + assert res + res.wait() + assert res.successful() + assert mock_load.called + assert res.result == {"status": "eventful"} + + +def test_tasks_jar_loader_snapshot_append( + mocker, swh_scheduler_celery_app, swh_scheduler_celery_worker, swh_config +): + mock_load = mocker.patch("swh.loader.package.maven.loader.MavenLoader.load") + mock_load.return_value = {"status": "eventful"} + + res = swh_scheduler_celery_app.send_task( + "swh.loader.package.maven.tasks.LoadMaven", + kwargs=dict(url=MVN_ARTIFACTS[0]["url"], artifacts=[]), + ) + assert res + res.wait() + assert res.successful() + assert mock_load.called + assert res.result == {"status": "eventful"} diff --git a/swh/loader/package/tests/test_utils.py b/swh/loader/package/tests/test_utils.py index bd7df6c..772379f 100644 --- a/swh/loader/package/tests/test_utils.py +++ b/swh/loader/package/tests/test_utils.py @@ -1,238 +1,238 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import os from unittest.mock import MagicMock from urllib.error import URLError from urllib.parse import quote import pytest from swh.loader.exception import NotFound import swh.loader.package from swh.loader.package.utils import api_info, download, release_name def test_version_generation(): assert ( swh.loader.package.__version__ != "devel" ), "Make sure swh.loader.core is installed (e.g. pip install -e .)" @pytest.mark.fs def test_download_fail_to_download(tmp_path, requests_mock): url = "https://pypi.org/pypi/arrow/json" status_code = 404 requests_mock.get(url, status_code=status_code) with pytest.raises(ValueError) as e: download(url, tmp_path) assert e.value.args[0] == "Fail to query '%s'. Reason: %s" % (url, status_code) _filename = "requests-0.0.1.tar.gz" _data = "this is something" -def _check_download_ok(url, dest, filename=_filename, hashes=None): +def _check_download_ok(url, dest, filename=_filename, hashes={}): actual_filepath, actual_hashes = download(url, dest, hashes=hashes) actual_filename = os.path.basename(actual_filepath) assert actual_filename == filename assert actual_hashes["length"] == len(_data) assert ( actual_hashes["checksums"]["sha1"] == "fdd1ce606a904b08c816ba84f3125f2af44d92b2" ) assert ( actual_hashes["checksums"]["sha256"] == "1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5" ) @pytest.mark.fs def test_download_ok(tmp_path, requests_mock): """Download without issue should provide filename and hashes""" url = f"https://pypi.org/pypi/requests/{_filename}" requests_mock.get(url, text=_data, headers={"content-length": str(len(_data))}) _check_download_ok(url, dest=str(tmp_path)) @pytest.mark.fs def test_download_ok_no_header(tmp_path, requests_mock): """Download without issue should provide filename and hashes""" url = f"https://pypi.org/pypi/requests/{_filename}" requests_mock.get(url, text=_data) # no header information _check_download_ok(url, dest=str(tmp_path)) @pytest.mark.fs def test_download_ok_with_hashes(tmp_path, requests_mock): """Download without issue should provide filename and hashes""" url = f"https://pypi.org/pypi/requests/{_filename}" requests_mock.get(url, text=_data, headers={"content-length": str(len(_data))}) # good hashes for such file good = { "sha1": "fdd1ce606a904b08c816ba84f3125f2af44d92b2", "sha256": "1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5", # noqa } _check_download_ok(url, dest=str(tmp_path), hashes=good) @pytest.mark.fs def test_download_fail_hashes_mismatch(tmp_path, requests_mock): """Mismatch hash after download should raise """ url = f"https://pypi.org/pypi/requests/{_filename}" requests_mock.get(url, text=_data, headers={"content-length": str(len(_data))}) # good hashes for such file good = { "sha1": "fdd1ce606a904b08c816ba84f3125f2af44d92b2", "sha256": "1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5", # noqa } for hash_algo in good.keys(): wrong_hash = good[hash_algo].replace("1", "0") expected_hashes = good.copy() expected_hashes[hash_algo] = wrong_hash # set the wrong hash expected_msg = "Failure when fetching %s. " "Checksum mismatched: %s != %s" % ( url, wrong_hash, good[hash_algo], ) with pytest.raises(ValueError, match=expected_msg): download(url, dest=str(tmp_path), hashes=expected_hashes) @pytest.mark.fs def test_ftp_download_ok(tmp_path, mocker): """Download without issue should provide filename and hashes""" url = f"ftp://pypi.org/pypi/requests/{_filename}" cm = MagicMock() cm.getstatus.return_value = 200 cm.read.side_effect = [_data.encode(), b""] cm.__enter__.return_value = cm mocker.patch("swh.loader.package.utils.urlopen").return_value = cm _check_download_ok(url, dest=str(tmp_path)) @pytest.mark.fs def test_ftp_download_ko(tmp_path, mocker): """Download without issue should provide filename and hashes""" filename = "requests-0.0.1.tar.gz" url = "ftp://pypi.org/pypi/requests/%s" % filename mocker.patch("swh.loader.package.utils.urlopen").side_effect = URLError("FTP error") with pytest.raises(URLError): download(url, dest=str(tmp_path)) @pytest.mark.fs def test_download_with_redirection(tmp_path, requests_mock): """Download with redirection should use the targeted URL to extract filename""" url = "https://example.org/project/requests/download" redirection_url = f"https://example.org/project/requests/files/{_filename}" requests_mock.get(url, status_code=302, headers={"location": redirection_url}) requests_mock.get( redirection_url, text=_data, headers={"content-length": str(len(_data))} ) _check_download_ok(url, dest=str(tmp_path)) def test_download_extracting_filename_from_url(tmp_path, requests_mock): """Extracting filename from url must sanitize the filename first""" url = "https://example.org/project/requests-0.0.1.tar.gz?a=b&c=d&foo=bar" requests_mock.get( url, status_code=200, text=_data, headers={"content-length": str(len(_data))} ) _check_download_ok(url, dest=str(tmp_path)) @pytest.mark.fs @pytest.mark.parametrize( "filename", [f'"{_filename}"', _filename, '"filename with spaces.tar.gz"'] ) def test_download_filename_from_content_disposition(tmp_path, requests_mock, filename): """Filename should be extracted from content-disposition request header when available.""" url = "https://example.org/download/requests/tar.gz/v0.0.1" requests_mock.get( url, text=_data, headers={ "content-length": str(len(_data)), "content-disposition": f"attachment; filename={filename}", }, ) _check_download_ok(url, dest=str(tmp_path), filename=filename.strip('"')) @pytest.mark.fs @pytest.mark.parametrize("filename", ['"archive école.tar.gz"', "archive_école.tgz"]) def test_download_utf8_filename_from_content_disposition( tmp_path, requests_mock, filename ): """Filename should be extracted from content-disposition request header when available.""" url = "https://example.org/download/requests/tar.gz/v0.0.1" data = "this is something" requests_mock.get( url, text=data, headers={ "content-length": str(len(data)), "content-disposition": f"attachment; filename*=utf-8''{quote(filename)}", }, ) _check_download_ok(url, dest=str(tmp_path), filename=filename.strip('"')) def test_api_info_failure(requests_mock): """Failure to fetch info/release information should raise""" url = "https://pypi.org/pypi/requests/json" status_code = 400 requests_mock.get(url, status_code=status_code) with pytest.raises(NotFound) as e0: api_info(url) assert e0.value.args[0] == "Fail to query '%s'. Reason: %s" % (url, status_code) def test_api_info(requests_mock): """Fetching json info from pypi project should be ok""" url = "https://pypi.org/pypi/requests/json" requests_mock.get(url, text='{"version": "0.0.1"}') actual_info = json.loads(api_info(url)) assert actual_info == { "version": "0.0.1", } def test_release_name(): for version, filename, expected_release in [ ("0.0.1", None, "releases/0.0.1"), ("0.0.2", "something", "releases/0.0.2/something"), ]: assert release_name(version, filename) == expected_release diff --git a/swh/loader/package/utils.py b/swh/loader/package/utils.py index 4757581..777ff50 100644 --- a/swh/loader/package/utils.py +++ b/swh/loader/package/utils.py @@ -1,185 +1,185 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import copy import functools import itertools import logging import os import re from typing import Callable, Dict, Optional, Tuple, TypeVar from urllib.parse import unquote, urlsplit from urllib.request import urlopen import requests from swh.loader.exception import NotFound from swh.loader.package import DEFAULT_PARAMS from swh.model.hashutil import HASH_BLOCK_SIZE, MultiHash from swh.model.model import Person logger = logging.getLogger(__name__) DOWNLOAD_HASHES = set(["sha1", "sha256", "length"]) EMPTY_AUTHOR = Person(fullname=b"", name=None, email=None,) def api_info(url: str, **extra_params) -> bytes: """Basic api client to retrieve information on project. This deals with fetching json metadata about pypi projects. Args: url (str): The api url (e.g PyPI, npm, etc...) Raises: NotFound in case of query failures (for some reasons: 404, ...) Returns: The associated response's information """ response = requests.get(url, **{**DEFAULT_PARAMS, **extra_params}) if response.status_code != 200: raise NotFound(f"Fail to query '{url}'. Reason: {response.status_code}") return response.content def _content_disposition_filename(header: str) -> Optional[str]: fname = None fnames = re.findall(r"filename[\*]?=([^;]+)", header) if fnames and "utf-8''" in fnames[0].lower(): # RFC 5987 fname = re.sub("utf-8''", "", fnames[0], flags=re.IGNORECASE) fname = unquote(fname) elif fnames: fname = fnames[0] if fname: fname = os.path.basename(fname.strip().strip('"')) return fname def download( url: str, dest: str, hashes: Dict = {}, filename: Optional[str] = None, auth: Optional[Tuple[str, str]] = None, extra_request_headers: Optional[Dict[str, str]] = None, ) -> Tuple[str, Dict]: """Download a remote tarball from url, uncompresses and computes swh hashes on it. Args: url: Artifact uri to fetch, uncompress and hash dest: Directory to write the archive to hashes: Dict of expected hashes (key is the hash algo) for the artifact to download (those hashes are expected to be hex string) auth: Optional tuple of login/password (for http authentication service, e.g. deposit) Raises: ValueError in case of any error when fetching/computing (length, checksums mismatched...) Returns: Tuple of local (filepath, hashes of filepath) """ params = copy.deepcopy(DEFAULT_PARAMS) if auth is not None: params["auth"] = auth if extra_request_headers is not None: params["headers"].update(extra_request_headers) # so the connection does not hang indefinitely (read/connection timeout) timeout = params.get("timeout", 60) if url.startswith("ftp://"): response = urlopen(url, timeout=timeout) chunks = (response.read(HASH_BLOCK_SIZE) for _ in itertools.count()) response_data = itertools.takewhile(bool, chunks) else: response = requests.get(url, **params, timeout=timeout, stream=True) if response.status_code != 200: raise ValueError( "Fail to query '%s'. Reason: %s" % (url, response.status_code) ) # update URL to response one as requests follow redirection by default # on GET requests url = response.url # try to extract filename from content-disposition header if available if filename is None and "content-disposition" in response.headers: filename = _content_disposition_filename( response.headers["content-disposition"] ) response_data = response.iter_content(chunk_size=HASH_BLOCK_SIZE) filename = filename if filename else os.path.basename(urlsplit(url).path) logger.debug("filename: %s", filename) filepath = os.path.join(dest, filename) logger.debug("filepath: %s", filepath) - h = MultiHash(hash_names=DOWNLOAD_HASHES) + h = MultiHash(hash_names=DOWNLOAD_HASHES | set(hashes.keys())) with open(filepath, "wb") as f: for chunk in response_data: h.update(chunk) f.write(chunk) response.close() # Also check the expected hashes if provided if hashes: actual_hashes = h.hexdigest() for algo_hash in hashes.keys(): actual_digest = actual_hashes[algo_hash] expected_digest = hashes[algo_hash] if actual_digest != expected_digest: raise ValueError( "Failure when fetching %s. " "Checksum mismatched: %s != %s" % (url, expected_digest, actual_digest) ) computed_hashes = h.hexdigest() length = computed_hashes.pop("length") extrinsic_metadata = { "length": length, "filename": filename, "checksums": computed_hashes, "url": url, } logger.debug("extrinsic_metadata", extrinsic_metadata) return filepath, extrinsic_metadata def release_name(version: str, filename: Optional[str] = None) -> str: if filename: return "releases/%s/%s" % (version, filename) return "releases/%s" % version TReturn = TypeVar("TReturn") TSelf = TypeVar("TSelf") _UNDEFINED = object() def cached_method(f: Callable[[TSelf], TReturn]) -> Callable[[TSelf], TReturn]: cache_name = f"_cached_{f.__name__}" @functools.wraps(f) def newf(self): value = getattr(self, cache_name, _UNDEFINED) if value is _UNDEFINED: value = f(self) setattr(self, cache_name, value) return value return newf