diff --git a/PKG-INFO b/PKG-INFO index d077351..2c9a712 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,275 +1,275 @@ Metadata-Version: 2.1 Name: swh.lister -Version: 0.7.1 +Version: 0.8.0 Summary: Software Heritage lister Home-page: https://forge.softwareheritage.org/diffusion/DLSGH/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-lister Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-lister/ Description: swh-lister ========== This component from the Software Heritage stack aims to produce listings of software origins and their urls hosted on various public developer platforms or package managers. As these operations are quite similar, it provides a set of Python modules abstracting common software origins listing behaviors. It also provides several lister implementations, contained in the following Python modules: - `swh.lister.bitbucket` - `swh.lister.cgit` - `swh.lister.cran` - `swh.lister.debian` - `swh.lister.gitea` - `swh.lister.github` - `swh.lister.gitlab` - `swh.lister.gnu` - `swh.lister.launchpad` - `swh.lister.npm` - `swh.lister.packagist` - `swh.lister.phabricator` - `swh.lister.pypi` Dependencies ------------ All required dependencies can be found in the `requirements*.txt` files located at the root of the repository. Local deployment ---------------- ## lister configuration Each lister implemented so far by Software Heritage (`github`, `gitlab`, `debian`, `pypi`, `npm`) must be configured by following the instructions below (please note that you have to replace `` by one of the lister name introduced above). ### Preparation steps 1. `mkdir ~/.config/swh/ ~/.cache/swh/lister//` 2. create configuration file `~/.config/swh/lister_.yml` 3. Bootstrap the db instance schema ```lang=bash $ createdb lister- $ python3 -m swh.lister.cli --db-url postgres:///lister- ``` Note: This bootstraps a minimum data set needed for the lister to run. ### Configuration file sample Minimalistic configuration shared by all listers to add in file `~/.config/swh/lister_.yml`: ```lang=yml storage: cls: 'remote' args: url: 'http://localhost:5002/' scheduler: cls: 'remote' args: url: 'http://localhost:5008/' lister: cls: 'local' args: # see http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls db: 'postgresql:///lister-' credentials: [] cache_responses: True cache_dir: /home/user/.cache/swh/lister// ``` Note: This expects storage (5002) and scheduler (5008) services to run locally ## lister-github Once configured, you can execute a GitHub lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.github.tasks import range_github_lister logging.basicConfig(level=logging.DEBUG) range_github_lister(364, 365) ... ``` ## lister-gitlab Once configured, you can execute a GitLab lister using the instructions detailed in the `python3` scripts below: ```lang=python import logging from swh.lister.gitlab.tasks import range_gitlab_lister logging.basicConfig(level=logging.DEBUG) range_gitlab_lister(1, 2, { 'instance': 'debian', 'api_baseurl': 'https://salsa.debian.org/api/v4', 'sort': 'asc', 'per_page': 20 }) ``` ```lang=python import logging from swh.lister.gitlab.tasks import full_gitlab_relister logging.basicConfig(level=logging.DEBUG) full_gitlab_relister({ 'instance': '0xacab', 'api_baseurl': 'https://0xacab.org/api/v4', 'sort': 'asc', 'per_page': 20 }) ``` ```lang=python import logging from swh.lister.gitlab.tasks import incremental_gitlab_lister logging.basicConfig(level=logging.DEBUG) incremental_gitlab_lister({ 'instance': 'freedesktop.org', 'api_baseurl': 'https://gitlab.freedesktop.org/api/v4', 'sort': 'asc', 'per_page': 20 }) ``` ## lister-debian Once configured, you can execute a Debian lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.debian.tasks import debian_lister logging.basicConfig(level=logging.DEBUG) debian_lister('Debian') ``` ## lister-pypi Once configured, you can execute a PyPI lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.pypi.tasks import pypi_lister logging.basicConfig(level=logging.DEBUG) pypi_lister() ``` ## lister-npm Once configured, you can execute a npm lister using the following instructions in a `python3` REPL: ```lang=python import logging from swh.lister.npm.tasks import npm_lister logging.basicConfig(level=logging.DEBUG) npm_lister() ``` ## lister-phabricator Once configured, you can execute a Phabricator lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.phabricator.tasks import incremental_phabricator_lister logging.basicConfig(level=logging.DEBUG) incremental_phabricator_lister(forge_url='https://forge.softwareheritage.org', api_token='XXXX') ``` ## lister-gnu Once configured, you can execute a PyPI lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.gnu.tasks import gnu_lister logging.basicConfig(level=logging.DEBUG) gnu_lister() ``` ## lister-cran Once configured, you can execute a CRAN lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.cran.tasks import cran_lister logging.basicConfig(level=logging.DEBUG) cran_lister() ``` ## lister-cgit Once configured, you can execute a cgit lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.cgit.tasks import cgit_lister logging.basicConfig(level=logging.DEBUG) # simple cgit instance cgit_lister(url='https://git.kernel.org/') # cgit instance whose listed repositories differ from the base url cgit_lister(url='https://cgit.kde.org/', url_prefix='https://anongit.kde.org/') ``` ## lister-packagist Once configured, you can execute a Packagist lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.packagist.tasks import packagist_lister logging.basicConfig(level=logging.DEBUG) packagist_lister() ``` Licensing --------- This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. See top-level LICENSE file for the full text of the GNU General Public License along with this program. Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing diff --git a/conftest.py b/conftest.py index 2daa11d..da8b930 100644 --- a/conftest.py +++ b/conftest.py @@ -1,10 +1,10 @@ -# Copyright (C) 2020 The Software Heritage developers +# Copyright (C) 2020-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os -pytest_plugins = ["swh.scheduler.pytest_plugin", "swh.lister.pytest_plugin"] +pytest_plugins = ["swh.scheduler.pytest_plugin"] os.environ["LC_ALL"] = "C.UTF-8" diff --git a/mypy.ini b/mypy.ini index fc04b82..c84a7e7 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,48 +1,41 @@ [mypy] namespace_packages = True warn_unused_ignores = True -# support for sqlalchemy magic: see https://github.com/dropbox/sqlalchemy-stubs -plugins = sqlmypy - - # 3rd party libraries without stubs (yet) [mypy-bs4.*] ignore_missing_imports = True [mypy-celery.*] ignore_missing_imports = True [mypy-debian.*] ignore_missing_imports = True [mypy-iso8601.*] ignore_missing_imports = True [mypy-launchpadlib.*] ignore_missing_imports = True [mypy-lazr.*] ignore_missing_imports = True [mypy-pkg_resources.*] ignore_missing_imports = True [mypy-pytest.*] ignore_missing_imports = True [mypy-pytest_postgresql.*] ignore_missing_imports = True [mypy-requests_mock.*] ignore_missing_imports = True -[mypy-testing.postgresql.*] -ignore_missing_imports = True - [mypy-urllib3.util.*] ignore_missing_imports = True [mypy-xmltodict.*] ignore_missing_imports = True diff --git a/requirements-test.txt b/requirements-test.txt index 68a1978..2407c40 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,5 +1,3 @@ pytest pytest-mock requests_mock -sqlalchemy-stubs -testing.postgresql diff --git a/requirements.txt b/requirements.txt index bd9bfc3..34bf339 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,8 @@ -SQLAlchemy -arrow python_debian requests setuptools xmltodict iso8601 beautifulsoup4 -pytz launchpadlib tenacity diff --git a/swh.lister.egg-info/PKG-INFO b/swh.lister.egg-info/PKG-INFO index d077351..2c9a712 100644 --- a/swh.lister.egg-info/PKG-INFO +++ b/swh.lister.egg-info/PKG-INFO @@ -1,275 +1,275 @@ Metadata-Version: 2.1 Name: swh.lister -Version: 0.7.1 +Version: 0.8.0 Summary: Software Heritage lister Home-page: https://forge.softwareheritage.org/diffusion/DLSGH/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-lister Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-lister/ Description: swh-lister ========== This component from the Software Heritage stack aims to produce listings of software origins and their urls hosted on various public developer platforms or package managers. As these operations are quite similar, it provides a set of Python modules abstracting common software origins listing behaviors. It also provides several lister implementations, contained in the following Python modules: - `swh.lister.bitbucket` - `swh.lister.cgit` - `swh.lister.cran` - `swh.lister.debian` - `swh.lister.gitea` - `swh.lister.github` - `swh.lister.gitlab` - `swh.lister.gnu` - `swh.lister.launchpad` - `swh.lister.npm` - `swh.lister.packagist` - `swh.lister.phabricator` - `swh.lister.pypi` Dependencies ------------ All required dependencies can be found in the `requirements*.txt` files located at the root of the repository. Local deployment ---------------- ## lister configuration Each lister implemented so far by Software Heritage (`github`, `gitlab`, `debian`, `pypi`, `npm`) must be configured by following the instructions below (please note that you have to replace `` by one of the lister name introduced above). ### Preparation steps 1. `mkdir ~/.config/swh/ ~/.cache/swh/lister//` 2. create configuration file `~/.config/swh/lister_.yml` 3. Bootstrap the db instance schema ```lang=bash $ createdb lister- $ python3 -m swh.lister.cli --db-url postgres:///lister- ``` Note: This bootstraps a minimum data set needed for the lister to run. ### Configuration file sample Minimalistic configuration shared by all listers to add in file `~/.config/swh/lister_.yml`: ```lang=yml storage: cls: 'remote' args: url: 'http://localhost:5002/' scheduler: cls: 'remote' args: url: 'http://localhost:5008/' lister: cls: 'local' args: # see http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls db: 'postgresql:///lister-' credentials: [] cache_responses: True cache_dir: /home/user/.cache/swh/lister// ``` Note: This expects storage (5002) and scheduler (5008) services to run locally ## lister-github Once configured, you can execute a GitHub lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.github.tasks import range_github_lister logging.basicConfig(level=logging.DEBUG) range_github_lister(364, 365) ... ``` ## lister-gitlab Once configured, you can execute a GitLab lister using the instructions detailed in the `python3` scripts below: ```lang=python import logging from swh.lister.gitlab.tasks import range_gitlab_lister logging.basicConfig(level=logging.DEBUG) range_gitlab_lister(1, 2, { 'instance': 'debian', 'api_baseurl': 'https://salsa.debian.org/api/v4', 'sort': 'asc', 'per_page': 20 }) ``` ```lang=python import logging from swh.lister.gitlab.tasks import full_gitlab_relister logging.basicConfig(level=logging.DEBUG) full_gitlab_relister({ 'instance': '0xacab', 'api_baseurl': 'https://0xacab.org/api/v4', 'sort': 'asc', 'per_page': 20 }) ``` ```lang=python import logging from swh.lister.gitlab.tasks import incremental_gitlab_lister logging.basicConfig(level=logging.DEBUG) incremental_gitlab_lister({ 'instance': 'freedesktop.org', 'api_baseurl': 'https://gitlab.freedesktop.org/api/v4', 'sort': 'asc', 'per_page': 20 }) ``` ## lister-debian Once configured, you can execute a Debian lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.debian.tasks import debian_lister logging.basicConfig(level=logging.DEBUG) debian_lister('Debian') ``` ## lister-pypi Once configured, you can execute a PyPI lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.pypi.tasks import pypi_lister logging.basicConfig(level=logging.DEBUG) pypi_lister() ``` ## lister-npm Once configured, you can execute a npm lister using the following instructions in a `python3` REPL: ```lang=python import logging from swh.lister.npm.tasks import npm_lister logging.basicConfig(level=logging.DEBUG) npm_lister() ``` ## lister-phabricator Once configured, you can execute a Phabricator lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.phabricator.tasks import incremental_phabricator_lister logging.basicConfig(level=logging.DEBUG) incremental_phabricator_lister(forge_url='https://forge.softwareheritage.org', api_token='XXXX') ``` ## lister-gnu Once configured, you can execute a PyPI lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.gnu.tasks import gnu_lister logging.basicConfig(level=logging.DEBUG) gnu_lister() ``` ## lister-cran Once configured, you can execute a CRAN lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.cran.tasks import cran_lister logging.basicConfig(level=logging.DEBUG) cran_lister() ``` ## lister-cgit Once configured, you can execute a cgit lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.cgit.tasks import cgit_lister logging.basicConfig(level=logging.DEBUG) # simple cgit instance cgit_lister(url='https://git.kernel.org/') # cgit instance whose listed repositories differ from the base url cgit_lister(url='https://cgit.kde.org/', url_prefix='https://anongit.kde.org/') ``` ## lister-packagist Once configured, you can execute a Packagist lister using the following instructions in a `python3` script: ```lang=python import logging from swh.lister.packagist.tasks import packagist_lister logging.basicConfig(level=logging.DEBUG) packagist_lister() ``` Licensing --------- This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. See top-level LICENSE file for the full text of the GNU General Public License along with this program. Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing diff --git a/swh.lister.egg-info/SOURCES.txt b/swh.lister.egg-info/SOURCES.txt index 9032074..59f47bb 100644 --- a/swh.lister.egg-info/SOURCES.txt +++ b/swh.lister.egg-info/SOURCES.txt @@ -1,197 +1,187 @@ .gitignore .pre-commit-config.yaml ACKNOWLEDGEMENTS CODE_OF_CONDUCT.md CONTRIBUTORS LICENSE MANIFEST.in Makefile README.md conftest.py mypy.ini pyproject.toml pytest.ini requirements-swh.txt requirements-test.txt requirements.txt setup.cfg setup.py tox.ini docs/.gitignore docs/Makefile docs/cli.rst docs/conf.py docs/index.rst docs/run_a_new_lister.rst docs/tutorial.rst docs/_static/.placeholder docs/_templates/.placeholder docs/images/new_base.png docs/images/new_bitbucket_lister.png docs/images/new_github_lister.png docs/images/old_github_lister.png sql/crawler.sql sql/pimp_db.sql swh/__init__.py swh.lister.egg-info/PKG-INFO swh.lister.egg-info/SOURCES.txt swh.lister.egg-info/dependency_links.txt swh.lister.egg-info/entry_points.txt swh.lister.egg-info/requires.txt swh.lister.egg-info/top_level.txt swh/lister/__init__.py swh/lister/cli.py swh/lister/pattern.py swh/lister/py.typed -swh/lister/pytest_plugin.py swh/lister/utils.py swh/lister/bitbucket/__init__.py swh/lister/bitbucket/lister.py swh/lister/bitbucket/tasks.py swh/lister/bitbucket/tests/__init__.py swh/lister/bitbucket/tests/test_lister.py swh/lister/bitbucket/tests/test_tasks.py swh/lister/bitbucket/tests/data/bb_api_repositories_page1.json swh/lister/bitbucket/tests/data/bb_api_repositories_page2.json swh/lister/cgit/__init__.py swh/lister/cgit/lister.py swh/lister/cgit/tasks.py swh/lister/cgit/tests/__init__.py swh/lister/cgit/tests/repo_list.txt swh/lister/cgit/tests/test_lister.py swh/lister/cgit/tests/test_tasks.py swh/lister/cgit/tests/data/https_git.baserock.org/cgit swh/lister/cgit/tests/data/https_git.eclipse.org/c swh/lister/cgit/tests/data/https_git.savannah.gnu.org/README swh/lister/cgit/tests/data/https_git.savannah.gnu.org/cgit swh/lister/cgit/tests/data/https_git.savannah.gnu.org/cgit_elisp-es.git swh/lister/cgit/tests/data/https_git.tizen/README swh/lister/cgit/tests/data/https_git.tizen/cgit swh/lister/cgit/tests/data/https_git.tizen/cgit,ofs=100 swh/lister/cgit/tests/data/https_git.tizen/cgit,ofs=50 swh/lister/cgit/tests/data/https_git.tizen/cgit_All-Projects swh/lister/cgit/tests/data/https_git.tizen/cgit_All-Users swh/lister/cgit/tests/data/https_git.tizen/cgit_Lock-Projects swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_alsa-scenario-scn-data-0-base swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_alsa-scenario-scn-data-0-mc1n2 swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_ap_samsung_audio-hal-e3250 swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_ap_samsung_audio-hal-e4x12 swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_devices_nfc-plugin-nxp swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_intel_mfld_bootstub-mfld-blackbay swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_mtdev swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_opengl-es-virtual-drv swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_panda_libdrm swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_panda_libnl swh/lister/cgit/tests/data/https_git.tizen/cgit_adaptation_xorg_driver_xserver-xorg-misc swh/lister/cgit/tests/data/https_git.tizen/cgit_apps_core_preloaded_ug-setting-gallery-efl swh/lister/cgit/tests/data/https_git.tizen/cgit_apps_core_preloaded_ug-setting-homescreen-efl swh/lister/cgit/tests/data/https_jff.email/cgit -swh/lister/core/__init__.py -swh/lister/core/abstractattribute.py -swh/lister/core/lister_base.py -swh/lister/core/lister_transports.py -swh/lister/core/models.py -swh/lister/core/simple_lister.py -swh/lister/core/tests/__init__.py -swh/lister/core/tests/test_abstractattribute.py -swh/lister/core/tests/test_lister.py -swh/lister/core/tests/test_model.py swh/lister/cran/__init__.py swh/lister/cran/list_all_packages.R swh/lister/cran/lister.py swh/lister/cran/tasks.py swh/lister/cran/tests/__init__.py swh/lister/cran/tests/test_lister.py swh/lister/cran/tests/test_tasks.py swh/lister/cran/tests/data/list-r-packages.json swh/lister/debian/__init__.py swh/lister/debian/lister.py swh/lister/debian/tasks.py swh/lister/debian/tests/__init__.py swh/lister/debian/tests/test_lister.py swh/lister/debian/tests/test_tasks.py swh/lister/debian/tests/data/Sources_bullseye swh/lister/debian/tests/data/Sources_buster swh/lister/debian/tests/data/Sources_stretch swh/lister/gitea/__init__.py swh/lister/gitea/lister.py swh/lister/gitea/tasks.py swh/lister/gitea/tests/__init__.py swh/lister/gitea/tests/test_lister.py swh/lister/gitea/tests/test_tasks.py swh/lister/gitea/tests/data/https_try.gitea.io/repos_page1 swh/lister/gitea/tests/data/https_try.gitea.io/repos_page2 swh/lister/github/__init__.py swh/lister/github/lister.py swh/lister/github/tasks.py swh/lister/github/tests/__init__.py swh/lister/github/tests/test_lister.py swh/lister/github/tests/test_tasks.py swh/lister/gitlab/__init__.py swh/lister/gitlab/lister.py swh/lister/gitlab/tasks.py swh/lister/gitlab/tests/__init__.py swh/lister/gitlab/tests/test_lister.py swh/lister/gitlab/tests/test_tasks.py swh/lister/gitlab/tests/data/https_gite.lirmm.fr/api_response_page1.json swh/lister/gitlab/tests/data/https_gite.lirmm.fr/api_response_page2.json swh/lister/gitlab/tests/data/https_gite.lirmm.fr/api_response_page3.json swh/lister/gitlab/tests/data/https_gitlab.com/api_response_page1.json swh/lister/gnu/__init__.py swh/lister/gnu/lister.py swh/lister/gnu/tasks.py swh/lister/gnu/tree.py swh/lister/gnu/tests/__init__.py swh/lister/gnu/tests/test_lister.py swh/lister/gnu/tests/test_tasks.py swh/lister/gnu/tests/test_tree.py swh/lister/gnu/tests/data/tree.json swh/lister/gnu/tests/data/tree.min.json swh/lister/gnu/tests/data/https_ftp.gnu.org/tree.json.gz swh/lister/launchpad/__init__.py swh/lister/launchpad/lister.py swh/lister/launchpad/tasks.py swh/lister/launchpad/tests/__init__.py swh/lister/launchpad/tests/conftest.py swh/lister/launchpad/tests/test_lister.py swh/lister/launchpad/tests/test_tasks.py swh/lister/launchpad/tests/data/launchpad_response1.json swh/lister/launchpad/tests/data/launchpad_response2.json swh/lister/npm/__init__.py swh/lister/npm/lister.py swh/lister/npm/tasks.py swh/lister/npm/tests/test_lister.py swh/lister/npm/tests/test_tasks.py swh/lister/npm/tests/data/npm_full_page1.json swh/lister/npm/tests/data/npm_full_page2.json swh/lister/npm/tests/data/npm_incremental_page1.json swh/lister/npm/tests/data/npm_incremental_page2.json swh/lister/packagist/__init__.py swh/lister/packagist/lister.py -swh/lister/packagist/models.py swh/lister/packagist/tasks.py swh/lister/packagist/tests/__init__.py -swh/lister/packagist/tests/conftest.py swh/lister/packagist/tests/test_lister.py swh/lister/packagist/tests/test_tasks.py -swh/lister/packagist/tests/data/https_packagist.org/packages_list.json +swh/lister/packagist/tests/data/den1n_contextmenu.json +swh/lister/packagist/tests/data/ljjackson_linnworks.json +swh/lister/packagist/tests/data/lky_wx_article.json +swh/lister/packagist/tests/data/spryker-eco_computop-api.json swh/lister/phabricator/__init__.py swh/lister/phabricator/lister.py swh/lister/phabricator/tasks.py swh/lister/phabricator/tests/__init__.py swh/lister/phabricator/tests/test_lister.py swh/lister/phabricator/tests/test_tasks.py swh/lister/phabricator/tests/data/__init__.py swh/lister/phabricator/tests/data/phabricator_api_repositories_page1.json swh/lister/phabricator/tests/data/phabricator_api_repositories_page2.json swh/lister/pypi/__init__.py swh/lister/pypi/lister.py swh/lister/pypi/tasks.py swh/lister/pypi/tests/__init__.py swh/lister/pypi/tests/test_lister.py swh/lister/pypi/tests/test_tasks.py swh/lister/pypi/tests/data/https_pypi.org/simple swh/lister/tests/__init__.py swh/lister/tests/test_cli.py swh/lister/tests/test_pattern.py swh/lister/tests/test_utils.py \ No newline at end of file diff --git a/swh.lister.egg-info/requires.txt b/swh.lister.egg-info/requires.txt index a5a76a6..3e69fa1 100644 --- a/swh.lister.egg-info/requires.txt +++ b/swh.lister.egg-info/requires.txt @@ -1,20 +1,15 @@ -SQLAlchemy -arrow python_debian requests setuptools xmltodict iso8601 beautifulsoup4 -pytz launchpadlib tenacity swh.core[db]>=0.9 swh.scheduler>=0.8 [testing] pytest pytest-mock requests_mock -sqlalchemy-stubs -testing.postgresql diff --git a/swh/lister/bitbucket/__init__.py b/swh/lister/bitbucket/__init__.py index 372e8c0..207a35e 100644 --- a/swh/lister/bitbucket/__init__.py +++ b/swh/lister/bitbucket/__init__.py @@ -1,13 +1,12 @@ # Copyright (C) 2019-2021 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def register(): from .lister import BitbucketLister return { - "models": [], "lister": BitbucketLister, "task_modules": ["%s.tasks" % __name__], } diff --git a/swh/lister/cli.py b/swh/lister/cli.py index 650b4e0..770eeeb 100644 --- a/swh/lister/cli.py +++ b/swh/lister/cli.py @@ -1,151 +1,69 @@ -# Copyright (C) 2018-2020 The Software Heritage developers +# Copyright (C) 2018-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from copy import deepcopy import logging # WARNING: do not import unnecessary things here to keep cli startup time under # control import os import click from swh.core.cli import CONTEXT_SETTINGS from swh.core.cli import swh as swh_cli_group -from swh.lister import LISTERS, SUPPORTED_LISTERS, get_lister +from swh.lister import SUPPORTED_LISTERS, get_lister logger = logging.getLogger(__name__) -# the key in this dict is the suffix used to match new task-type to be added. -# For example for a task which function name is "list_gitlab_full', the default -# value used when inserting a new task-type in the scheduler db will be the one -# under the 'full' key below (because it matches xxx_full). -DEFAULT_TASK_TYPE = { - "full": { # for tasks like 'list_xxx_full()' - "default_interval": "90 days", - "min_interval": "90 days", - "max_interval": "90 days", - "backoff_factor": 1, - }, - "*": { # value if not suffix matches - "default_interval": "1 day", - "min_interval": "1 day", - "max_interval": "1 day", - "backoff_factor": 1, - }, -} - - @swh_cli_group.group(name="lister", context_settings=CONTEXT_SETTINGS) @click.option( "--config-file", "-C", default=None, type=click.Path(exists=True, dir_okay=False,), help="Configuration file.", ) -@click.option( - "--db-url", - "-d", - default=None, - help="SQLAlchemy DB URL; see " - "", -) # noqa @click.pass_context -def lister(ctx, config_file, db_url): +def lister(ctx, config_file): """Software Heritage Lister tools.""" from swh.core import config ctx.ensure_object(dict) if not config_file: config_file = os.environ.get("SWH_CONFIG_FILENAME") conf = config.read(config_file) - if db_url: - conf["lister"] = {"cls": "local", "args": {"db": db_url}} - ctx.obj["config"] = conf - - -@lister.command(name="db-init", context_settings=CONTEXT_SETTINGS) -@click.option( - "--drop-tables", - "-D", - is_flag=True, - default=False, - help="Drop tables before creating the database schema", -) -@click.pass_context -def db_init(ctx, drop_tables): - """Initialize the database model for given listers. - """ - from sqlalchemy import create_engine - - from swh.lister.core.models import initialize - - cfg = ctx.obj["config"] - lister_cfg = cfg["lister"] - if lister_cfg["cls"] != "local": - click.echo("A local lister configuration is required") - ctx.exit(1) - - db_url = lister_cfg["args"]["db"] - db_engine = create_engine(db_url) - - registry = {} - for lister, entrypoint in LISTERS.items(): - logger.info("Loading lister %s", lister) - registry[lister] = entrypoint.load()() - - logger.info("Initializing database") - initialize(db_engine, drop_tables) - - for lister, entrypoint in LISTERS.items(): - registry_entry = registry[lister] - init_hook = registry_entry.get("init") - if callable(init_hook): - logger.info("Calling init hook for %s", lister) - init_hook(db_engine) + ctx.obj["config"] = conf @lister.command( name="run", context_settings=CONTEXT_SETTINGS, help="Trigger a full listing run for a particular forge " "instance. The output of this listing results in " '"oneshot" tasks in the scheduler db with a priority ' "defined by the user", ) @click.option( "--lister", "-l", help="Lister to run", type=click.Choice(SUPPORTED_LISTERS) ) -@click.option( - "--priority", - "-p", - default="high", - type=click.Choice(["high", "medium", "low"]), - help="Task priority for the listed repositories to ingest", -) -@click.option("--legacy", help="Allow unported lister to run with such flag") @click.argument("options", nargs=-1) @click.pass_context -def run(ctx, lister, priority, options, legacy): +def run(ctx, lister, options): from swh.scheduler.cli.utils import parse_options config = deepcopy(ctx.obj["config"]) if options: config.update(parse_options(options)[1]) - if legacy: - config["priority"] = priority - config["policy"] = "oneshot" - get_lister(lister, **config).run() if __name__ == "__main__": lister() diff --git a/swh/lister/core/__init__.py b/swh/lister/core/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/swh/lister/core/abstractattribute.py b/swh/lister/core/abstractattribute.py deleted file mode 100644 index 01eb84a..0000000 --- a/swh/lister/core/abstractattribute.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (C) 2017 the Software Heritage developers -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - - -class AbstractAttribute: - """AbstractAttributes in a base class must be overridden by the subclass. - - It's like the :func:`abc.abstractmethod` decorator, but for things that - are explicitly attributes/properties, not methods, without the need for - empty method def boilerplate. Like abc.abstractmethod, the class containing - AbstractAttributes must inherit from :class:`abc.ABC` or use the - :class:`abc.ABCMeta` metaclass. - - Usage example:: - - import abc - class ClassContainingAnAbstractAttribute(abc.ABC): - foo: Union[AbstractAttribute, Any] = \ - AbstractAttribute('docstring for foo') - - """ - - __isabstractmethod__ = True - - def __init__(self, docstring=None): - if docstring is not None: - self.__doc__ = "AbstractAttribute: " + docstring diff --git a/swh/lister/core/lister_base.py b/swh/lister/core/lister_base.py deleted file mode 100644 index ca7d65c..0000000 --- a/swh/lister/core/lister_base.py +++ /dev/null @@ -1,508 +0,0 @@ -# Copyright (C) 2015-2020 the Software Heritage developers -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import abc -import datetime -import gzip -import json -import logging -import os -import re -import time -from typing import Any, Dict, List, Optional, Type, Union - -from requests import Response -from sqlalchemy import create_engine, func -from sqlalchemy.orm import sessionmaker - -from swh.core import config -from swh.core.utils import grouper -from swh.scheduler import get_scheduler, utils - -from .abstractattribute import AbstractAttribute - -logger = logging.getLogger(__name__) - - -def utcnow(): - return datetime.datetime.now(tz=datetime.timezone.utc) - - -class FetchError(RuntimeError): - def __init__(self, response): - self.response = response - - def __str__(self): - return repr(self.response) - - -DEFAULT_CONFIG = { - "scheduler": {"cls": "memory"}, - "lister": {"cls": "local", "args": {"db": "postgresql:///lister",},}, - "credentials": {}, - "cache_responses": False, -} - - -class ListerBase(abc.ABC): - """Lister core base class. - Generally a source code hosting service provides an API endpoint - for listing the set of stored repositories. A Lister is the discovery - service responsible for finding this list, all at once or sequentially - by parts, and queueing local tasks to fetch and ingest the referenced - repositories. - - The core method in this class is ingest_data. Any subclasses should be - calling this method one or more times to fetch and ingest data from API - endpoints. See swh.lister.core.lister_base.IndexingLister for - example usage. - - This class cannot be instantiated. Any instantiable Lister descending - from ListerBase must provide at least the required overrides. - (see member docstrings for details): - - Required Overrides: - MODEL - def transport_request - def transport_response_to_string - def transport_response_simplified - def transport_quota_check - - Optional Overrides: - def filter_before_inject - def is_within_bounds - """ - - MODEL = AbstractAttribute( - "Subclass type (not instance) of swh.lister.core.models.ModelBase " - "customized for a specific service." - ) # type: Union[AbstractAttribute, Type[Any]] - LISTER_NAME = AbstractAttribute( - "Lister's name" - ) # type: Union[AbstractAttribute, str] - - def transport_request(self, identifier): - """Given a target endpoint identifier to query, try once to request it. - - Implementation of this method determines the network request protocol. - - Args: - identifier (string): unique identifier for an endpoint query. - e.g. If the service indexes lists of repositories by date and - time of creation, this might be that as a formatted string. Or - it might be an integer UID. Or it might be nothing. - It depends on what the service needs. - Returns: - the entire request response - Raises: - Will catch internal transport-dependent connection exceptions and - raise swh.lister.core.lister_base.FetchError instead. Other - non-connection exceptions should propagate unchanged. - """ - pass - - def transport_response_to_string(self, response): - """Convert the server response into a formatted string for logging. - - Implementation of this method depends on the shape of the network - response object returned by the transport_request method. - - Args: - response: the server response - Returns: - a pretty string of the response - """ - pass - - def transport_response_simplified(self, response): - """Convert the server response into list of a dict for each repo in the - response, mapping columns in the lister's MODEL class to repo data. - - Implementation of this method depends on the server API spec and the - shape of the network response object returned by the transport_request - method. - - Args: - response: response object from the server. - Returns: - list of repo MODEL dicts - ( eg. [{'uid': r['id'], etc.} for r in response.json()] ) - """ - pass - - def transport_quota_check(self, response): - """Check server response to see if we're hitting request rate limits. - - Implementation of this method depends on the server communication - protocol and API spec and the shape of the network response object - returned by the transport_request method. - - Args: - response (session response): complete API query response - Returns: - 1) must retry request? True/False - 2) seconds to delay if True - """ - pass - - def filter_before_inject(self, models_list: List[Dict]) -> List[Dict]: - """Filter models_list entries prior to injection in the db. - This is ran directly after `transport_response_simplified`. - - Default implementation is to have no filtering. - - Args: - models_list: list of dicts returned by - transport_response_simplified. - Returns: - models_list with entries changed according to custom logic. - - """ - return models_list - - def do_additional_checks(self, models_list: List[Dict]) -> List[Dict]: - """Execute some additional checks on the model list (after the - filtering). - - Default implementation is to run no check at all and to return - the input as is. - - Args: - models_list: list of dicts returned by - transport_response_simplified. - - Returns: - models_list with entries if checks ok, False otherwise - - """ - return models_list - - def is_within_bounds( - self, inner: int, lower: Optional[int] = None, upper: Optional[int] = None - ) -> bool: - """See if a sortable value is inside the range [lower,upper]. - - MAY BE OVERRIDDEN, for example if the server indexable* key is - technically sortable but not automatically so. - - * - ( see: swh.lister.core.indexing_lister.IndexingLister ) - - Args: - inner (sortable type): the value being checked - lower (sortable type): optional lower bound - upper (sortable type): optional upper bound - Returns: - whether inner is confined by the optional lower and upper bounds - """ - try: - if lower is None and upper is None: - return True - elif lower is None: - ret = inner <= upper # type: ignore - elif upper is None: - ret = inner >= lower - else: - ret = lower <= inner <= upper - - self.string_pattern_check(inner, lower, upper) - except Exception as e: - logger.error( - str(e) - + ": %s, %s, %s" - % ( - ("inner=%s%s" % (type(inner), inner)), - ("lower=%s%s" % (type(lower), lower)), - ("upper=%s%s" % (type(upper), upper)), - ) - ) - raise - - return ret - - # You probably don't need to override anything below this line. - - INITIAL_BACKOFF = 10 - MAX_RETRIES = 7 - CONN_SLEEP = 10 - - def __init__(self, override_config=None): - self.backoff = self.INITIAL_BACKOFF - self.config = config.load_from_envvar(DEFAULT_CONFIG) - if self.config["cache_responses"]: - cache_dir = self.config.get( - "cache_dir", f"~/.cache/swh/lister/{self.LISTER_NAME}" - ) - self.config["cache_dir"] = os.path.expanduser(cache_dir) - config.prepare_folders(self.config, "cache_dir") - - if override_config: - self.config.update(override_config) - - logger.debug("%s CONFIG=%s" % (self, self.config)) - self.scheduler = get_scheduler(**self.config["scheduler"]) - self.db_engine = create_engine(self.config["lister"]["args"]["db"]) - self.mk_session = sessionmaker(bind=self.db_engine) - self.db_session = self.mk_session() - - def reset_backoff(self): - """Reset exponential backoff timeout to initial level.""" - self.backoff = self.INITIAL_BACKOFF - - def back_off(self) -> int: - """Get next exponential backoff timeout.""" - ret = self.backoff - self.backoff *= 10 - return ret - - def safely_issue_request(self, identifier: int) -> Optional[Response]: - """Make network request with retries, rate quotas, and response logs. - - Protocol is handled by the implementation of the transport_request - method. - - Args: - identifier: resource identifier - Returns: - server response - """ - retries_left = self.MAX_RETRIES - do_cache = self.config["cache_responses"] - r = None - while retries_left > 0: - try: - r = self.transport_request(identifier) - except FetchError: - # network-level connection error, try again - logger.warning( - "connection error on %s: sleep for %d seconds" - % (identifier, self.CONN_SLEEP) - ) - time.sleep(self.CONN_SLEEP) - retries_left -= 1 - continue - - if do_cache: - self.save_response(r) - - # detect throttling - must_retry, delay = self.transport_quota_check(r) - if must_retry: - logger.warning( - "rate limited on %s: sleep for %f seconds" % (identifier, delay) - ) - time.sleep(delay) - else: # request ok - break - - retries_left -= 1 - - if not retries_left: - logger.warning("giving up on %s: max retries exceeded" % identifier) - - return r - - def db_query_equal(self, key: Any, value: Any): - """Look in the db for a row with key == value - - Args: - key: column key to look at - value: value to look for in that column - Returns: - sqlalchemy.ext.declarative.declarative_base object - with the given key == value - """ - if isinstance(key, str): - key = self.MODEL.__dict__[key] - return self.db_session.query(self.MODEL).filter(key == value).first() - - def winnow_models(self, mlist, key, to_remove): - """Given a list of models, remove any with matching - some member of a list of values. - - Args: - mlist (list of model rows): the initial list of models - key (column): the column to filter on - to_remove (list): if anything in mlist has column equal to - one of the values in to_remove, it will be removed from the - result - Returns: - A list of model rows starting from mlist minus any matching rows - """ - if isinstance(key, str): - key = self.MODEL.__dict__[key] - - if to_remove: - return mlist.filter(~key.in_(to_remove)).all() - else: - return mlist.all() - - def db_num_entries(self): - """Return the known number of entries in the lister db""" - return self.db_session.query(func.count("*")).select_from(self.MODEL).scalar() - - def db_inject_repo(self, model_dict): - """Add/update a new repo to the db and mark it last_seen now. - - Args: - model_dict: dictionary mapping model keys to values - - Returns: - new or updated sqlalchemy.ext.declarative.declarative_base - object associated with the injection - - """ - sql_repo = self.db_query_equal("uid", model_dict["uid"]) - - if not sql_repo: - sql_repo = self.MODEL(**model_dict) - self.db_session.add(sql_repo) - else: - for k in model_dict: - setattr(sql_repo, k, model_dict[k]) - sql_repo.last_seen = utcnow() - - return sql_repo - - def task_dict(self, origin_type: str, origin_url: str, **kwargs) -> Dict[str, Any]: - """Return special dict format for the tasks list - - Args: - origin_type (string) - origin_url (string) - Returns: - the same information in a different form - """ - logger.debug("origin-url: %s, type: %s", origin_url, origin_type) - _type = "load-%s" % origin_type - _policy = kwargs.get("policy", "recurring") - priority = kwargs.get("priority") - kw = {"priority": priority} if priority else {} - return utils.create_task_dict(_type, _policy, url=origin_url, **kw) - - def string_pattern_check(self, a, b, c=None): - """When comparing indexable types in is_within_bounds, complex strings - may not be allowed to differ in basic structure. If they do, it - could be a sign of not understanding the data well. For instance, - an ISO 8601 time string cannot be compared against its urlencoded - equivalent, but this is an easy mistake to accidentally make. This - method acts as a friendly sanity check. - - Args: - a (string): inner component of the is_within_bounds method - b (string): lower component of the is_within_bounds method - c (string): upper component of the is_within_bounds method - Returns: - nothing - Raises: - TypeError if strings a, b, and c don't conform to the same basic - pattern. - """ - if isinstance(a, str): - a_pattern = re.sub("[a-zA-Z0-9]", "[a-zA-Z0-9]", re.escape(a)) - if ( - isinstance(b, str) - and (re.match(a_pattern, b) is None) - or isinstance(c, str) - and (re.match(a_pattern, c) is None) - ): - logger.debug(a_pattern) - raise TypeError("incomparable string patterns detected") - - def inject_repo_data_into_db(self, models_list: List[Dict]) -> Dict: - """Inject data into the db. - - Args: - models_list: list of dicts mapping keys from the db model - for each repo to be injected - Returns: - dict of uid:sql_repo pairs - - """ - injected_repos = {} - for m in models_list: - injected_repos[m["uid"]] = self.db_inject_repo(m) - return injected_repos - - def schedule_missing_tasks( - self, models_list: List[Dict], injected_repos: Dict - ) -> None: - """Schedule any newly created db entries that do not have been - scheduled yet. - - Args: - models_list: List of dicts mapping keys in the db model - for each repo - injected_repos: Dict of uid:sql_repo pairs that have just - been created - - Returns: - Nothing. (Note that it Modifies injected_repos to set the new - task_id). - - """ - tasks = {} - - def _task_key(m): - return "%s-%s" % (m["type"], json.dumps(m["arguments"], sort_keys=True)) - - for m in models_list: - ir = injected_repos[m["uid"]] - if not ir.task_id: - # Patching the model instance to add the policy/priority task - # scheduling - if "policy" in self.config: - m["policy"] = self.config["policy"] - if "priority" in self.config: - m["priority"] = self.config["priority"] - task_dict = self.task_dict(**m) - task_dict.setdefault("retries_left", 3) - tasks[_task_key(task_dict)] = (ir, m, task_dict) - - gen_tasks = (task_dicts for (_, _, task_dicts) in tasks.values()) - for grouped_tasks in grouper(gen_tasks, n=1000): - new_tasks = self.scheduler.create_tasks(list(grouped_tasks)) - for task in new_tasks: - ir, m, _ = tasks[_task_key(task)] - ir.task_id = task["id"] - - def ingest_data(self, identifier: int, checks: bool = False): - """The core data fetch sequence. Request server endpoint. Simplify and - filter response list of repositories. Inject repo information into - local db. Queue loader tasks for linked repositories. - - Args: - identifier: Resource identifier. - checks (bool): Additional checks required - """ - # Request (partial?) list of repositories info - response = self.safely_issue_request(identifier) - if not response: - return response, [] - models_list = self.transport_response_simplified(response) - models_list = self.filter_before_inject(models_list) - if checks: - models_list = self.do_additional_checks(models_list) - if not models_list: - return response, [] - # inject into local db - injected = self.inject_repo_data_into_db(models_list) - # queue workers - self.schedule_missing_tasks(models_list, injected) - return response, injected - - def save_response(self, response): - """Log the response from a server request to a cache dir. - - Args: - response: full server response - cache_dir: system path for cache dir - Returns: - nothing - """ - datepath = utcnow().isoformat() - - fname = os.path.join(self.config["cache_dir"], datepath + ".gz",) - - with gzip.open(fname, "w") as f: - f.write(bytes(self.transport_response_to_string(response), "UTF-8")) diff --git a/swh/lister/core/lister_transports.py b/swh/lister/core/lister_transports.py deleted file mode 100644 index f5f9e80..0000000 --- a/swh/lister/core/lister_transports.py +++ /dev/null @@ -1,233 +0,0 @@ -# Copyright (C) 2017-2018 the Software Heritage developers -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import abc -from datetime import datetime -from email.utils import parsedate -import logging -from pprint import pformat -import random -from typing import Any, Dict, List, Optional, Union - -import requests -from requests import Response -import xmltodict - -from swh.lister import USER_AGENT_TEMPLATE, __version__ - -from .abstractattribute import AbstractAttribute -from .lister_base import FetchError - -logger = logging.getLogger(__name__) - - -class ListerHttpTransport(abc.ABC): - """Use the Requests library for making Lister endpoint requests. - - To be used in conjunction with ListerBase or a subclass of it. - """ - - DEFAULT_URL = None # type: Optional[str] - PATH_TEMPLATE = AbstractAttribute( - "string containing a python string format pattern that produces" - " the API endpoint path for listing stored repositories when given" - ' an index, e.g., "/repositories?after=%s". To be implemented in' - " the API-specific class inheriting this." - ) # type: Union[AbstractAttribute, Optional[str]] - - EXPECTED_STATUS_CODES = (200, 429, 403, 404) - - def request_headers(self) -> Dict[str, Any]: - """Returns dictionary of any request headers needed by the server. - - MAY BE OVERRIDDEN if request headers are needed. - """ - return {"User-Agent": USER_AGENT_TEMPLATE % self.lister_version} - - def request_instance_credentials(self) -> List[Dict[str, Any]]: - """Returns dictionary of any credentials configuration needed by the - forge instance to list. - - The 'credentials' configuration is expected to be a dict of multiple - levels. The first level is the lister's name, the second is the - lister's instance name, which value is expected to be a list of - credential structures (typically a couple username/password). - - For example:: - - credentials: - github: # github lister - github: # has only one instance (so far) - - username: some - password: somekey - - username: one - password: onekey - - ... - gitlab: # gitlab lister - riseup: # has many instances - - username: someone - password: ... - - ... - gitlab: - - username: someone - password: ... - - ... - - Returns: - list of credential dicts for the current lister. - - """ - all_creds = self.config.get("credentials") # type: ignore - if not all_creds: - return [] - lister_creds = all_creds.get(self.LISTER_NAME, {}) # type: ignore - creds = lister_creds.get(self.instance, []) # type: ignore - return creds - - def request_uri(self, identifier: str) -> str: - """Get the full request URI given the transport_request identifier. - - MAY BE OVERRIDDEN if something more complex than the PATH_TEMPLATE is - required. - """ - path = self.PATH_TEMPLATE % identifier # type: ignore - return self.url + path - - def request_params(self, identifier: str) -> Dict[str, Any]: - """Get the full parameters passed to requests given the - transport_request identifier. - - This uses credentials if any are provided (see - request_instance_credentials). - - MAY BE OVERRIDDEN if something more complex than the request headers - is needed. - - """ - params = {} - params["headers"] = self.request_headers() or {} - creds = self.request_instance_credentials() - if not creds: - return params - auth = random.choice(creds) if creds else None - if auth: - params["auth"] = ( - auth["username"], # type: ignore - auth["password"], - ) - return params - - def transport_quota_check(self, response): - """Implements ListerBase.transport_quota_check with standard 429 - code check for HTTP with Requests library. - - MAY BE OVERRIDDEN if the server notifies about rate limits in a - non-standard way that doesn't use HTTP 429 and the Retry-After - response header. ( https://tools.ietf.org/html/rfc6585#section-4 ) - - """ - if response.status_code == 429: # HTTP too many requests - retry_after = response.headers.get("Retry-After", self.back_off()) - try: - # might be seconds - return True, float(retry_after) - except Exception: - # might be http-date - at_date = datetime(*parsedate(retry_after)[:6]) - from_now = (at_date - datetime.today()).total_seconds() + 5 - return True, max(0, from_now) - else: # response ok - self.reset_backoff() - return False, 0 - - def __init__(self, url=None): - if not url: - url = self.config.get("url") - if not url: - url = self.DEFAULT_URL - if not url: - raise NameError("HTTP Lister Transport requires an url.") - self.url = url # eg. 'https://api.github.com' - self.session = requests.Session() - self.lister_version = __version__ - - def _transport_action(self, identifier: str, method: str = "get") -> Response: - """Permit to ask information to the api prior to actually executing - query. - - """ - path = self.request_uri(identifier) - params = self.request_params(identifier) - - logger.debug("path: %s", path) - logger.debug("params: %s", params) - logger.debug("method: %s", method) - try: - if method == "head": - response = self.session.head(path, **params) - else: - response = self.session.get(path, **params) - except requests.exceptions.ConnectionError as e: - logger.warning("Failed to fetch %s: %s", path, e) - raise FetchError(e) - else: - if response.status_code not in self.EXPECTED_STATUS_CODES: - raise FetchError(response) - return response - - def transport_head(self, identifier: str) -> Response: - """Retrieve head information on api. - - """ - return self._transport_action(identifier, method="head") - - def transport_request(self, identifier: str) -> Response: - """Implements ListerBase.transport_request for HTTP using Requests. - - Retrieve get information on api. - - """ - return self._transport_action(identifier) - - def transport_response_to_string(self, response: Response) -> str: - """Implements ListerBase.transport_response_to_string for HTTP given - Requests responses. - """ - s = pformat(response.request.path_url) - s += "\n#\n" + pformat(response.request.headers) - s += "\n#\n" + pformat(response.status_code) - s += "\n#\n" + pformat(response.headers) - s += "\n#\n" - try: # json? - s += pformat(response.json()) - except Exception: # not json - try: # xml? - s += pformat(xmltodict.parse(response.text)) - except Exception: # not xml - s += pformat(response.text) - return s - - -class ListerOnePageApiTransport(ListerHttpTransport): - """Leverage requests library to retrieve basic html page and parse - result. - - To be used in conjunction with ListerBase or a subclass of it. - - """ - - PAGE = AbstractAttribute( - "URL of the API's unique page to retrieve and parse " "for information" - ) # type: Union[AbstractAttribute, str] - PATH_TEMPLATE = None # we do not use it - - def __init__(self, url=None): - self.session = requests.Session() - self.lister_version = __version__ - - def request_uri(self, _): - """Get the full request URI given the transport_request identifier. - - """ - return self.PAGE diff --git a/swh/lister/core/models.py b/swh/lister/core/models.py deleted file mode 100644 index 8d3d381..0000000 --- a/swh/lister/core/models.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright (C) 2015-2019 the Software Heritage developers -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import abc -from datetime import datetime -import logging -from typing import Type, Union - -from sqlalchemy import Column, DateTime, Integer, String -from sqlalchemy.ext.declarative import DeclarativeMeta, declarative_base - -from .abstractattribute import AbstractAttribute - -SQLBase = declarative_base() - - -logger = logging.getLogger(__name__) - - -class ABCSQLMeta(abc.ABCMeta, DeclarativeMeta): - pass - - -class ModelBase(SQLBase, metaclass=ABCSQLMeta): - """a common repository""" - - __abstract__ = True - __tablename__ = AbstractAttribute # type: Union[Type[AbstractAttribute], str] - - uid = AbstractAttribute( - "Column(, primary_key=True)" - ) # type: Union[AbstractAttribute, Column] - - name = Column(String, index=True) - full_name = Column(String, index=True) - html_url = Column(String) - origin_url = Column(String) - origin_type = Column(String) - - last_seen = Column(DateTime, nullable=False) - - task_id = Column(Integer) - - def __init__(self, **kw): - kw["last_seen"] = datetime.now() - super().__init__(**kw) - - -class IndexingModelBase(ModelBase, metaclass=ABCSQLMeta): - __abstract__ = True - __tablename__ = AbstractAttribute # type: Union[Type[AbstractAttribute], str] - - # The value used for sorting, segmenting, or api query paging, - # because uids aren't always sequential. - indexable = AbstractAttribute( - "Column(, index=True)" - ) # type: Union[AbstractAttribute, Column] - - -def initialize(db_engine, drop_tables=False, **kwargs): - """Default database initialization function for a lister. - - Typically called from the lister's initialization hook. - - Args: - models (list): list of SQLAlchemy tables/models to drop/create. - db_engine (): the SQLAlchemy DB engine. - drop_tables (bool): if True, tables will be dropped before - (re)creating them. - """ - if drop_tables: - logger.info("Dropping tables") - SQLBase.metadata.drop_all(db_engine, checkfirst=True) - - logger.info("Creating tables") - SQLBase.metadata.create_all(db_engine, checkfirst=True) diff --git a/swh/lister/core/simple_lister.py b/swh/lister/core/simple_lister.py deleted file mode 100644 index d690b2d..0000000 --- a/swh/lister/core/simple_lister.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright (C) 2018-2019 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import logging -from typing import Any, List - -from swh.core import utils - -from .lister_base import ListerBase - -logger = logging.getLogger(__name__) - - -class SimpleLister(ListerBase): - """Lister* intermediate class for any service that follows the simple, - 'list in oneshot information' pattern. - - - Client sends a request to list repositories in oneshot - - - Client receives structured (json/xml/etc) response with - information and stores those in db - - """ - - flush_packet_db = 2 - """Number of iterations in-between write flushes of lister repositories to - db (see fn:`ingest_data`). - """ - - def list_packages(self, response: Any) -> List[Any]: - """Listing packages method. - - """ - pass - - def ingest_data(self, identifier, checks=False): - """Rework the base ingest_data. - Request server endpoint which gives all in one go. - - Simplify and filter response list of repositories. Inject - repo information into local db. Queue loader tasks for - linked repositories. - - Args: - identifier: Resource identifier (unused) - checks (bool): Additional checks required (unused) - - """ - response = self.safely_issue_request(identifier) - response = self.list_packages(response) - if not response: - return response, [] - models_list = self.transport_response_simplified(response) - models_list = self.filter_before_inject(models_list) - all_injected = [] - for i, models in enumerate(utils.grouper(models_list, n=100), start=1): - models = list(models) - logging.debug("models: %s" % len(models)) - # inject into local db - injected = self.inject_repo_data_into_db(models) - # queue workers - self.schedule_missing_tasks(models, injected) - all_injected.append(injected) - if (i % self.flush_packet_db) == 0: - logger.debug("Flushing updates at index %s", i) - self.db_session.commit() - self.db_session = self.mk_session() - - return response, all_injected - - def transport_response_simplified(self, response): - """Transform response to list for model manipulation - - """ - return [self.get_model_from_repo(repo_name) for repo_name in response] - - def run(self): - """Query the server which answers in one query. Stores the - information, dropping actual redundant information we - already have. - - Returns: - nothing - - """ - dump_not_used_identifier = 0 - response, injected_repos = self.ingest_data(dump_not_used_identifier) - if not response and not injected_repos: - logging.info("No response from api server, stopping") - status = "uneventful" - else: - status = "eventful" - - return {"status": status} diff --git a/swh/lister/core/tests/__init__.py b/swh/lister/core/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/swh/lister/core/tests/test_abstractattribute.py b/swh/lister/core/tests/test_abstractattribute.py deleted file mode 100644 index d804f21..0000000 --- a/swh/lister/core/tests/test_abstractattribute.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (C) 2017 the Software Heritage developers -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import abc -from typing import Any -import unittest - -from swh.lister.core.abstractattribute import AbstractAttribute - - -class BaseClass(abc.ABC): - v1 = AbstractAttribute # type: Any - v2 = AbstractAttribute() # type: Any - v3 = AbstractAttribute("changed docstring") # type: Any - v4 = "qux" - - -class BadSubclass1(BaseClass): - pass - - -class BadSubclass2(BaseClass): - v1 = "foo" - v2 = "bar" - - -class BadSubclass3(BaseClass): - v2 = "bar" - v3 = "baz" - - -class GoodSubclass(BaseClass): - v1 = "foo" - v2 = "bar" - v3 = "baz" - - -class TestAbstractAttributes(unittest.TestCase): - def test_aa(self): - with self.assertRaises(TypeError): - BaseClass() - - with self.assertRaises(TypeError): - BadSubclass1() - - with self.assertRaises(TypeError): - BadSubclass2() - - with self.assertRaises(TypeError): - BadSubclass3() - - self.assertIsInstance(GoodSubclass(), GoodSubclass) - gsc = GoodSubclass() - - self.assertEqual(gsc.v1, "foo") - self.assertEqual(gsc.v2, "bar") - self.assertEqual(gsc.v3, "baz") - self.assertEqual(gsc.v4, "qux") - - def test_aa_docstrings(self): - self.assertEqual(BaseClass.v1.__doc__, AbstractAttribute.__doc__) - self.assertEqual(BaseClass.v2.__doc__, AbstractAttribute.__doc__) - self.assertEqual(BaseClass.v3.__doc__, "AbstractAttribute: changed docstring") diff --git a/swh/lister/core/tests/test_lister.py b/swh/lister/core/tests/test_lister.py deleted file mode 100644 index cb6cf2b..0000000 --- a/swh/lister/core/tests/test_lister.py +++ /dev/null @@ -1,453 +0,0 @@ -# Copyright (C) 2019 the Software Heritage developers -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import abc -import datetime -import time -from typing import Any, Callable, Optional, Pattern, Type, Union -from unittest import TestCase -from unittest.mock import Mock, patch - -import requests_mock -from sqlalchemy import create_engine - -import swh.lister -from swh.lister.core.abstractattribute import AbstractAttribute -from swh.lister.tests.test_utils import init_db - - -def noop(*args, **kwargs): - pass - - -def test_version_generation(): - assert ( - swh.lister.__version__ != "devel" - ), "Make sure swh.lister is installed (e.g. pip install -e .)" - - -class HttpListerTesterBase(abc.ABC): - """Testing base class for listers. - This contains methods for both :class:`HttpSimpleListerTester` and - :class:`HttpListerTester`. - - See :class:`swh.lister.gitlab.tests.test_lister` for an example of how - to customize for a specific listing service. - - """ - - Lister = AbstractAttribute( - "Lister class to test" - ) # type: Union[AbstractAttribute, Type[Any]] - lister_subdir = AbstractAttribute( - "bitbucket, github, etc." - ) # type: Union[AbstractAttribute, str] - good_api_response_file = AbstractAttribute( - "Example good response body" - ) # type: Union[AbstractAttribute, str] - LISTER_NAME = "fake-lister" - - # May need to override this if the headers are used for something - def response_headers(self, request): - return {} - - # May need to override this if the server uses non-standard rate limiting - # method. - # Please keep the requested retry delay reasonably low. - def mock_rate_quota(self, n, request, context): - self.rate_limit += 1 - context.status_code = 429 - context.headers["Retry-After"] = "1" - return '{"error":"dummy"}' - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.rate_limit = 1 - self.response = None - self.fl = None - self.helper = None - self.scheduler_tasks = [] - if self.__class__ != HttpListerTesterBase: - self.run = TestCase.run.__get__(self, self.__class__) - else: - self.run = noop - - def mock_limit_n_response(self, n, request, context): - self.fl.reset_backoff() - if self.rate_limit <= n: - return self.mock_rate_quota(n, request, context) - else: - return self.mock_response(request, context) - - def mock_limit_twice_response(self, request, context): - return self.mock_limit_n_response(2, request, context) - - def get_api_response(self, identifier): - fl = self.get_fl() - if self.response is None: - self.response = fl.safely_issue_request(identifier) - return self.response - - def get_fl(self, override_config=None): - """Retrieve an instance of fake lister (fl). - - """ - if override_config or self.fl is None: - self.fl = self.Lister( - url="https://fakeurl", override_config=override_config - ) - self.fl.INITIAL_BACKOFF = 1 - - self.fl.reset_backoff() - self.scheduler_tasks = [] - return self.fl - - def disable_scheduler(self, fl): - fl.schedule_missing_tasks = Mock(return_value=None) - - def mock_scheduler(self, fl): - def _create_tasks(tasks): - task_id = 0 - current_nb_tasks = len(self.scheduler_tasks) - if current_nb_tasks > 0: - task_id = self.scheduler_tasks[-1]["id"] + 1 - for task in tasks: - scheduler_task = dict(task) - scheduler_task.update( - { - "status": "next_run_not_scheduled", - "retries_left": 0, - "priority": None, - "id": task_id, - "current_interval": datetime.timedelta(days=64), - } - ) - self.scheduler_tasks.append(scheduler_task) - task_id = task_id + 1 - return self.scheduler_tasks[current_nb_tasks:] - - def _disable_tasks(task_ids): - for task_id in task_ids: - self.scheduler_tasks[task_id]["status"] = "disabled" - - fl.scheduler.create_tasks = Mock(wraps=_create_tasks) - fl.scheduler.disable_tasks = Mock(wraps=_disable_tasks) - - def disable_db(self, fl): - fl.winnow_models = Mock(return_value=[]) - fl.db_inject_repo = Mock(return_value=fl.MODEL()) - fl.disable_deleted_repo_tasks = Mock(return_value=None) - - def init_db(self, db, model): - engine = create_engine(db.url()) - model.metadata.create_all(engine) - - @requests_mock.Mocker() - def test_is_within_bounds(self, http_mocker): - fl = self.get_fl() - self.assertFalse(fl.is_within_bounds(1, 2, 3)) - self.assertTrue(fl.is_within_bounds(2, 1, 3)) - self.assertTrue(fl.is_within_bounds(1, 1, 1)) - self.assertTrue(fl.is_within_bounds(1, None, None)) - self.assertTrue(fl.is_within_bounds(1, None, 2)) - self.assertTrue(fl.is_within_bounds(1, 0, None)) - self.assertTrue(fl.is_within_bounds("b", "a", "c")) - self.assertFalse(fl.is_within_bounds("a", "b", "c")) - self.assertTrue(fl.is_within_bounds("a", None, "c")) - self.assertTrue(fl.is_within_bounds("a", None, None)) - self.assertTrue(fl.is_within_bounds("b", "a", None)) - self.assertFalse(fl.is_within_bounds("a", "b", None)) - self.assertTrue(fl.is_within_bounds("aa:02", "aa:01", "aa:03")) - self.assertFalse(fl.is_within_bounds("aa:12", None, "aa:03")) - with self.assertRaises(TypeError): - fl.is_within_bounds(1.0, "b", None) - with self.assertRaises(TypeError): - fl.is_within_bounds("A:B", "A::B", None) - - -class HttpListerTester(HttpListerTesterBase, abc.ABC): - """Base testing class for subclass of - - :class:`swh.lister.core.indexing_lister.IndexingHttpLister` - - See :class:`swh.lister.github.tests.test_gh_lister` for an example of how - to customize for a specific listing service. - - """ - - last_index = AbstractAttribute( - "Last index " "in good_api_response" - ) # type: Union[AbstractAttribute, int] - first_index = AbstractAttribute( - "First index in " " good_api_response" - ) # type: Union[AbstractAttribute, Optional[int]] - bad_api_response_file = AbstractAttribute( - "Example bad response body" - ) # type: Union[AbstractAttribute, str] - entries_per_page = AbstractAttribute( - "Number of results in " "good response" - ) # type: Union[AbstractAttribute, int] - test_re = AbstractAttribute( - "Compiled regex matching the server url. Must capture the " "index value." - ) # type: Union[AbstractAttribute, Pattern] - convert_type = str # type: Callable[..., Any] - """static method used to convert the "request_index" to its right type (for - indexing listers for example, this is in accordance with the model's - "indexable" column). - - """ - - def mock_response(self, request, context): - self.fl.reset_backoff() - self.rate_limit = 1 - context.status_code = 200 - custom_headers = self.response_headers(request) - context.headers.update(custom_headers) - req_index = self.request_index(request) - - if req_index == self.first_index: - response_file = self.good_api_response_file - else: - response_file = self.bad_api_response_file - - with open( - "swh/lister/%s/tests/%s" % (self.lister_subdir, response_file), - "r", - encoding="utf-8", - ) as r: - return r.read() - - def request_index(self, request): - m = self.test_re.search(request.path_url) - if m and (len(m.groups()) > 0): - return self.convert_type(m.group(1)) - - def create_fl_with_db(self, http_mocker): - http_mocker.get(self.test_re, text=self.mock_response) - db = init_db() - - fl = self.get_fl( - override_config={"lister": {"cls": "local", "args": {"db": db.url()}}} - ) - fl.db = db - self.init_db(db, fl.MODEL) - - self.mock_scheduler(fl) - return fl - - @requests_mock.Mocker() - def test_fetch_no_bounds_yesdb(self, http_mocker): - fl = self.create_fl_with_db(http_mocker) - - fl.run() - - self.assertEqual(fl.db_last_index(), self.last_index) - ingested_repos = list(fl.db_query_range(self.first_index, self.last_index)) - self.assertEqual(len(ingested_repos), self.entries_per_page) - - @requests_mock.Mocker() - def test_fetch_multiple_pages_yesdb(self, http_mocker): - - fl = self.create_fl_with_db(http_mocker) - fl.run(min_bound=self.first_index) - - self.assertEqual(fl.db_last_index(), self.last_index) - - partitions = fl.db_partition_indices(5) - self.assertGreater(len(partitions), 0) - for k in partitions: - self.assertLessEqual(len(k), 5) - self.assertGreater(len(k), 0) - - @requests_mock.Mocker() - def test_fetch_none_nodb(self, http_mocker): - http_mocker.get(self.test_re, text=self.mock_response) - fl = self.get_fl() - - self.disable_scheduler(fl) - self.disable_db(fl) - - fl.run(min_bound=1, max_bound=1) # stores no results - # FIXME: Determine what this method tries to test and add checks to - # actually test - - @requests_mock.Mocker() - def test_fetch_one_nodb(self, http_mocker): - http_mocker.get(self.test_re, text=self.mock_response) - fl = self.get_fl() - - self.disable_scheduler(fl) - self.disable_db(fl) - - fl.run(min_bound=self.first_index, max_bound=self.first_index) - # FIXME: Determine what this method tries to test and add checks to - # actually test - - @requests_mock.Mocker() - def test_fetch_multiple_pages_nodb(self, http_mocker): - http_mocker.get(self.test_re, text=self.mock_response) - fl = self.get_fl() - - self.disable_scheduler(fl) - self.disable_db(fl) - - fl.run(min_bound=self.first_index) - # FIXME: Determine what this method tries to test and add checks to - # actually test - - @requests_mock.Mocker() - def test_repos_list(self, http_mocker): - """Test the number of repos listed by the lister - - """ - http_mocker.get(self.test_re, text=self.mock_response) - li = self.get_fl().transport_response_simplified( - self.get_api_response(self.first_index) - ) - self.assertIsInstance(li, list) - self.assertEqual(len(li), self.entries_per_page) - - @requests_mock.Mocker() - def test_model_map(self, http_mocker): - """Check if all the keys of model are present in the model created by - the `transport_response_simplified` - - """ - http_mocker.get(self.test_re, text=self.mock_response) - fl = self.get_fl() - li = fl.transport_response_simplified(self.get_api_response(self.first_index)) - di = li[0] - self.assertIsInstance(di, dict) - pubs = [k for k in vars(fl.MODEL).keys() if not k.startswith("_")] - for k in pubs: - if k not in ["last_seen", "task_id", "id"]: - self.assertIn(k, di) - - @requests_mock.Mocker() - def test_api_request(self, http_mocker): - """Test API request for rate limit handling - - """ - http_mocker.get(self.test_re, text=self.mock_limit_twice_response) - with patch.object(time, "sleep", wraps=time.sleep) as sleepmock: - self.get_api_response(self.first_index) - self.assertEqual(sleepmock.call_count, 2) - - @requests_mock.Mocker() - def test_request_headers(self, http_mocker): - fl = self.create_fl_with_db(http_mocker) - fl.run() - self.assertNotEqual(len(http_mocker.request_history), 0) - for request in http_mocker.request_history: - assert "User-Agent" in request.headers - user_agent = request.headers["User-Agent"] - assert "Software Heritage Lister" in user_agent - assert swh.lister.__version__ in user_agent - - def scheduled_tasks_test( - self, next_api_response_file, next_last_index, http_mocker - ): - """Check that no loading tasks get disabled when processing a new - page of repositories returned by a forge API - """ - fl = self.create_fl_with_db(http_mocker) - - # process first page of repositories listing - fl.run() - - # process second page of repositories listing - prev_last_index = self.last_index - self.first_index = self.last_index - self.last_index = next_last_index - self.good_api_response_file = next_api_response_file - fl.run(min_bound=prev_last_index) - - # check expected number of ingested repos and loading tasks - ingested_repos = list(fl.db_query_range(0, self.last_index)) - self.assertEqual(len(ingested_repos), len(self.scheduler_tasks)) - self.assertEqual(len(ingested_repos), 2 * self.entries_per_page) - - # check tasks are not disabled - for task in self.scheduler_tasks: - self.assertTrue(task["status"] != "disabled") - - -class HttpSimpleListerTester(HttpListerTesterBase, abc.ABC): - """Base testing class for subclass of - :class:`swh.lister.core.simple)_lister.SimpleLister` - - See :class:`swh.lister.pypi.tests.test_lister` for an example of how - to customize for a specific listing service. - - """ - - entries = AbstractAttribute( - "Number of results " "in good response" - ) # type: Union[AbstractAttribute, int] - PAGE = AbstractAttribute( - "URL of the server api's unique page to retrieve and " "parse for information" - ) # type: Union[AbstractAttribute, str] - - def get_fl(self, override_config=None): - """Retrieve an instance of fake lister (fl). - - """ - if override_config or self.fl is None: - self.fl = self.Lister(override_config=override_config) - self.fl.INITIAL_BACKOFF = 1 - - self.fl.reset_backoff() - return self.fl - - def mock_response(self, request, context): - self.fl.reset_backoff() - self.rate_limit = 1 - context.status_code = 200 - custom_headers = self.response_headers(request) - context.headers.update(custom_headers) - response_file = self.good_api_response_file - - with open( - "swh/lister/%s/tests/%s" % (self.lister_subdir, response_file), - "r", - encoding="utf-8", - ) as r: - return r.read() - - @requests_mock.Mocker() - def test_api_request(self, http_mocker): - """Test API request for rate limit handling - - """ - http_mocker.get(self.PAGE, text=self.mock_limit_twice_response) - with patch.object(time, "sleep", wraps=time.sleep) as sleepmock: - self.get_api_response(0) - self.assertEqual(sleepmock.call_count, 2) - - @requests_mock.Mocker() - def test_model_map(self, http_mocker): - """Check if all the keys of model are present in the model created by - the `transport_response_simplified` - - """ - http_mocker.get(self.PAGE, text=self.mock_response) - fl = self.get_fl() - li = fl.list_packages(self.get_api_response(0)) - li = fl.transport_response_simplified(li) - di = li[0] - self.assertIsInstance(di, dict) - pubs = [k for k in vars(fl.MODEL).keys() if not k.startswith("_")] - for k in pubs: - if k not in ["last_seen", "task_id", "id"]: - self.assertIn(k, di) - - @requests_mock.Mocker() - def test_repos_list(self, http_mocker): - """Test the number of packages listed by the lister - - """ - http_mocker.get(self.PAGE, text=self.mock_response) - li = self.get_fl().list_packages(self.get_api_response(0)) - self.assertIsInstance(li, list) - self.assertEqual(len(li), self.entries) diff --git a/swh/lister/core/tests/test_model.py b/swh/lister/core/tests/test_model.py deleted file mode 100644 index f85bbdf..0000000 --- a/swh/lister/core/tests/test_model.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (C) 2017 the Software Heritage developers -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import unittest - -from sqlalchemy import Column, Integer - -from swh.lister.core.models import IndexingModelBase, ModelBase - - -class BadSubclass1(ModelBase): - __abstract__ = True - pass - - -class BadSubclass2(ModelBase): - __abstract__ = True - __tablename__ = "foo" - - -class BadSubclass3(BadSubclass2): - __abstract__ = True - pass - - -class GoodSubclass(BadSubclass2): - uid = Column(Integer, primary_key=True) - indexable = Column(Integer, index=True) - - -class IndexingBadSubclass(IndexingModelBase): - __abstract__ = True - pass - - -class IndexingBadSubclass2(IndexingModelBase): - __abstract__ = True - __tablename__ = "foo" - - -class IndexingBadSubclass3(IndexingBadSubclass2): - __abstract__ = True - pass - - -class IndexingGoodSubclass(IndexingModelBase): - uid = Column(Integer, primary_key=True) - indexable = Column(Integer, index=True) - __tablename__ = "bar" - - -class TestModel(unittest.TestCase): - def test_model_instancing(self): - with self.assertRaises(TypeError): - ModelBase() - - with self.assertRaises(TypeError): - BadSubclass1() - - with self.assertRaises(TypeError): - BadSubclass2() - - with self.assertRaises(TypeError): - BadSubclass3() - - self.assertIsInstance(GoodSubclass(), GoodSubclass) - gsc = GoodSubclass(uid="uid") - - self.assertEqual(gsc.__tablename__, "foo") - self.assertEqual(gsc.uid, "uid") - - def test_indexing_model_instancing(self): - with self.assertRaises(TypeError): - IndexingModelBase() - - with self.assertRaises(TypeError): - IndexingBadSubclass() - - with self.assertRaises(TypeError): - IndexingBadSubclass2() - - with self.assertRaises(TypeError): - IndexingBadSubclass3() - - self.assertIsInstance(IndexingGoodSubclass(), IndexingGoodSubclass) - gsc = IndexingGoodSubclass(uid="uid", indexable="indexable") - - self.assertEqual(gsc.__tablename__, "bar") - self.assertEqual(gsc.uid, "uid") - self.assertEqual(gsc.indexable, "indexable") diff --git a/swh/lister/cran/__init__.py b/swh/lister/cran/__init__.py index 6c085ee..a1cdbc5 100644 --- a/swh/lister/cran/__init__.py +++ b/swh/lister/cran/__init__.py @@ -1,13 +1,12 @@ -# Copyright (C) 2019 the Software Heritage developers +# Copyright (C) 2019-2021 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def register(): from .lister import CRANLister return { - "models": [], "lister": CRANLister, "task_modules": ["%s.tasks" % __name__], } diff --git a/swh/lister/debian/__init__.py b/swh/lister/debian/__init__.py index 0d483d9..8af9d8b 100644 --- a/swh/lister/debian/__init__.py +++ b/swh/lister/debian/__init__.py @@ -1,16 +1,15 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Mapping def register() -> Mapping[str, Any]: from .lister import DebianLister return { - "models": [], "lister": DebianLister, "task_modules": ["%s.tasks" % __name__], } diff --git a/swh/lister/gitea/__init__.py b/swh/lister/gitea/__init__.py index f258483..4c364b8 100644 --- a/swh/lister/gitea/__init__.py +++ b/swh/lister/gitea/__init__.py @@ -1,13 +1,12 @@ # Copyright (C) 2020 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def register(): from .lister import GiteaLister return { - "models": [], "lister": GiteaLister, "task_modules": ["%s.tasks" % __name__], } diff --git a/swh/lister/github/__init__.py b/swh/lister/github/__init__.py index 0444507..d243baa 100644 --- a/swh/lister/github/__init__.py +++ b/swh/lister/github/__init__.py @@ -1,13 +1,12 @@ # Copyright (C) 2019 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def register(): from .lister import GitHubLister return { - "models": [], "lister": GitHubLister, "task_modules": ["%s.tasks" % __name__], } diff --git a/swh/lister/gnu/tree.py b/swh/lister/gnu/tree.py index f666cb5..ba74e04 100644 --- a/swh/lister/gnu/tree.py +++ b/swh/lister/gnu/tree.py @@ -1,337 +1,336 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -from datetime import datetime +from datetime import datetime, timezone import gzip import json import logging from os import path from pathlib import Path import re from typing import Any, List, Mapping, Sequence, Tuple from urllib.parse import urlparse -from pytz import utc import requests logger = logging.getLogger(__name__) class GNUTree: """Gnu Tree's representation """ def __init__(self, url: str): self.url = url # filepath or uri u = urlparse(url) self.base_url = "%s://%s" % (u.scheme, u.netloc) # Interesting top level directories self.top_level_directories = ["gnu", "old-gnu"] # internal state self._artifacts = {} # type: Mapping[str, Any] self._projects = {} # type: Mapping[str, Any] @property def projects(self) -> Mapping[str, Any]: if not self._projects: self._projects, self._artifacts = self._load() return self._projects @property def artifacts(self) -> Mapping[str, Any]: if not self._artifacts: self._projects, self._artifacts = self._load() return self._artifacts def _load(self) -> Tuple[Mapping[str, Any], Mapping[str, Any]]: """Compute projects and artifacts per project Returns: Tuple of dict projects (key project url, value the associated information) and a dict artifacts (key project url, value the info_file list) """ projects = {} artifacts = {} raw_data = load_raw_data(self.url)[0] for directory in raw_data["contents"]: if directory["name"] not in self.top_level_directories: continue infos = directory["contents"] for info in infos: if info["type"] == "directory": package_url = "%s/%s/%s/" % ( self.base_url, directory["name"], info["name"], ) package_artifacts = find_artifacts(info["contents"], package_url) if package_artifacts != []: repo_details = { "name": info["name"], "url": package_url, "time_modified": format_date(info["time"]), } artifacts[package_url] = package_artifacts projects[package_url] = repo_details return projects, artifacts def find_artifacts( filesystem: List[Mapping[str, Any]], url: str ) -> List[Mapping[str, Any]]: """Recursively list artifacts present in the folder and subfolders for a particular package url. Args: filesystem: File structure of the package root directory. This is a list of Dict representing either file or directory information as dict (keys: name, size, time, type). url: URL of the corresponding package Returns List of tarball urls and their associated metadata (time, length, etc...). For example: .. code-block:: python [ { 'url': 'https://ftp.gnu.org/gnu/3dldf/3DLDF-1.1.3.tar.gz', 'time': 1071002600, 'filename': '3DLDF-1.1.3.tar.gz', 'version': '1.1.3', 'length': 543 }, { 'url': 'https://ftp.gnu.org/gnu/3dldf/3DLDF-1.1.4.tar.gz', 'time': 1071078759, 'filename: '3DLDF-1.1.4.tar.gz', 'version': '1.1.4', 'length': 456 }, { 'url': 'https://ftp.gnu.org/gnu/3dldf/3DLDF-1.1.5.tar.gz', 'time': 1074278633, 'filename': '3DLDF-1.1.5.tar.gz', 'version': '1.1.5' 'length': 251 }, ... ] """ artifacts = [] # type: List[Mapping[str, Any]] for info_file in filesystem: filetype = info_file["type"] filename = info_file["name"] if filetype == "file": if check_filename_is_archive(filename): uri = url + filename artifacts.append( { "url": uri, "filename": filename, "time": format_date(info_file["time"]), "length": int(info_file["size"]), "version": get_version(filename), } ) # It will recursively check for artifacts in all sub-folders elif filetype == "directory": tarballs_in_dir = find_artifacts( info_file["contents"], url + filename + "/" ) artifacts.extend(tarballs_in_dir) return artifacts def check_filename_is_archive(filename: str) -> bool: """ Check for the extension of the file, if the file is of zip format of .tar.x format, where x could be anything, then returns true. Args: filename: name of the file for which the extensions is needs to be checked. Returns: Whether filename is an archive or not Example: >>> check_filename_is_archive('abc.zip') True >>> check_filename_is_archive('abc.tar.gz') True >>> check_filename_is_archive('bac.tar') True >>> check_filename_is_archive('abc.tar.gz.sig') False >>> check_filename_is_archive('foobar.tar.') False """ file_suffixes = Path(filename).suffixes if len(file_suffixes) == 1 and file_suffixes[-1] in (".zip", ".tar"): return True elif len(file_suffixes) > 1: if file_suffixes[-1] == ".zip" or file_suffixes[-2] == ".tar": return True return False # to recognize existing naming pattern EXTENSIONS = [ "zip", "tar", "gz", "tgz", "bz2", "bzip2", "lzma", "lz", "xz", "Z", "7z", ] VERSION_KEYWORDS = [ "cygwin_me", "w32", "win32", "nt", "cygwin", "mingw", "latest", "alpha", "beta", "release", "stable", "hppa", "solaris", "sunos", "sun4u", "sparc", "sun", "aix", "ibm", "rs6000", "i386", "i686", "linux", "redhat", "linuxlibc", "mips", "powerpc", "macos", "apple", "darwin", "macosx", "powermacintosh", "unknown", "netbsd", "freebsd", "sgi", "irix", ] # Match a filename into components. # # We use Debian's release number heuristic: A release number starts # with a digit, and is followed by alphanumeric characters or any of # ., +, :, ~ and - # # We hardcode a list of possible extensions, as this release number # scheme would match them too... We match on any combination of those. # # Greedy matching is done right to left (we only match the extension # greedily with +, software_name and release_number are matched lazily # with +? and *?). PATTERN = r""" ^ (?: # We have a software name and a release number, separated with a # -, _ or dot. (?P.+?[-_.]) (?P({vkeywords}|[0-9][0-9a-zA-Z_.+:~-]*?)+) | # We couldn't match a release number, put everything in the # software name. (?P.+?) ) (?P(?:\.(?:{extensions}))+) $ """.format( extensions="|".join(EXTENSIONS), vkeywords="|".join("%s[-]?" % k for k in VERSION_KEYWORDS), ) def get_version(uri: str) -> str: """Extract branch name from tarball uri Args: uri (str): Tarball URI Returns: Version detected Example: >>> uri = 'https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz' >>> get_version(uri) '0.2.0' >>> uri = '8sync-0.3.0.tar.gz' >>> get_version(uri) '0.3.0' """ filename = path.split(uri)[-1] m = re.match(PATTERN, filename, flags=re.VERBOSE | re.IGNORECASE) if m: d = m.groupdict() if d["software_name1"] and d["release_number"]: return d["release_number"] if d["software_name2"]: return d["software_name2"] return "" def load_raw_data(url: str) -> Sequence[Mapping]: """Load the raw json from the tree.json.gz Args: url: Tree.json.gz url or path Returns: The raw json list """ if url.startswith("http://") or url.startswith("https://"): response = requests.get(url, allow_redirects=True) if not response.ok: raise ValueError("Error during query to %s" % url) raw = gzip.decompress(response.content) else: with gzip.open(url, "r") as f: raw = f.read() raw_data = json.loads(raw.decode("utf-8")) return raw_data def format_date(timestamp: str) -> str: """Format a string timestamp to an isoformat string """ - return datetime.fromtimestamp(int(timestamp), tz=utc).isoformat() + return datetime.fromtimestamp(int(timestamp), tz=timezone.utc).isoformat() diff --git a/swh/lister/npm/__init__.py b/swh/lister/npm/__init__.py index 7544bd1..1a5f84b 100644 --- a/swh/lister/npm/__init__.py +++ b/swh/lister/npm/__init__.py @@ -1,20 +1,19 @@ # Copyright (C) 2019-2021 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def register(): from .lister import NpmLister return { - "models": [], "lister": NpmLister, "task_modules": ["%s.tasks" % __name__], "task_types": { "list-npm-full": { "default_interval": "7 days", "min_interval": "7 days", "max_interval": "7 days", }, }, } diff --git a/swh/lister/packagist/__init__.py b/swh/lister/packagist/__init__.py index 262008f..1f4d208 100644 --- a/swh/lister/packagist/__init__.py +++ b/swh/lister/packagist/__init__.py @@ -1,14 +1,12 @@ -# Copyright (C) 2019 the Software Heritage developers +# Copyright (C) 2019-2021 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def register(): from .lister import PackagistLister - from .models import PackagistModel return { - "models": [PackagistModel], "lister": PackagistLister, "task_modules": ["%s.tasks" % __name__], } diff --git a/swh/lister/packagist/lister.py b/swh/lister/packagist/lister.py index e49a99c..9378691 100644 --- a/swh/lister/packagist/lister.py +++ b/swh/lister/packagist/lister.py @@ -1,102 +1,182 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -import json +from dataclasses import dataclass +from datetime import datetime, timezone import logging -import random -from typing import Any, Dict, List, Mapping +from typing import Any, Dict, Iterator, List, Optional -from swh.lister.core.lister_transports import ListerOnePageApiTransport -from swh.lister.core.simple_lister import SimpleLister -from swh.scheduler import utils +import iso8601 +import requests -from .models import PackagistModel +from swh.scheduler.interface import SchedulerInterface +from swh.scheduler.model import ListedOrigin + +from .. import USER_AGENT +from ..pattern import CredentialsType, Lister logger = logging.getLogger(__name__) +PackagistPageType = List[str] -def compute_package_url(repo_name: str) -> str: - """Compute packgist package url from repo name. - """ - return "https://repo.packagist.org/p/%s.json" % repo_name +@dataclass +class PackagistListerState: + """State of Packagist lister""" + last_listing_date: Optional[datetime] = None + """Last date when packagist lister was executed""" -class PackagistLister(ListerOnePageApiTransport, SimpleLister): - """List packages available in the Packagist package manager. - The lister sends the request to the url present in the class - variable `PAGE`, to receive a list of all the package names - present in the Packagist package manager. Iterates over all the - packages and constructs the metadata url of the package from - the name of the package and creates a loading task:: +class PackagistLister(Lister[PackagistListerState, PackagistPageType]): + """ + List all Packagist projects and send associated origins to scheduler. + + The lister queries the Packagist API, whose documentation can be found at + https://packagist.org/apidoc. + + For each package, its metadata are retrieved using Packagist API endpoints + whose responses are served from static files, which are guaranteed to be + efficient on the Packagist side (no dymamic queries). + Furthermore, subsequent listing will send the "If-Modified-Since" HTTP + header to only retrieve packages metadata updated since the previous listing + operation in order to save bandwidth and return only origins which might have + new released versions. + """ - Task: - Type: load-packagist - Policy: recurring - Args: - - + LISTER_NAME = "Packagist" + PACKAGIST_PACKAGES_LIST_URL = "https://packagist.org/packages/list.json" + PACKAGIST_REPO_BASE_URL = "https://repo.packagist.org/p" + + def __init__( + self, scheduler: SchedulerInterface, credentials: CredentialsType = None, + ): + super().__init__( + scheduler=scheduler, + url=self.PACKAGIST_PACKAGES_LIST_URL, + instance="packagist", + credentials=credentials, + ) - Example:: + self.session = requests.Session() + self.session.headers.update( + {"Accept": "application/json", "User-Agent": USER_AGENT} + ) + self.listing_date = datetime.now().astimezone(tz=timezone.utc) - Task: - Type: load-packagist - Policy: recurring - Args: - 'hypejunction/hypegamemechanics' - 'https://repo.packagist.org/p/hypejunction/hypegamemechanics.json' + def state_from_dict(self, d: Dict[str, Any]) -> PackagistListerState: + last_listing_date = d.get("last_listing_date") + if last_listing_date is not None: + d["last_listing_date"] = iso8601.parse_date(last_listing_date) + return PackagistListerState(**d) - """ + def state_to_dict(self, state: PackagistListerState) -> Dict[str, Any]: + d: Dict[str, Optional[str]] = {"last_listing_date": None} + last_listing_date = state.last_listing_date + if last_listing_date is not None: + d["last_listing_date"] = last_listing_date.isoformat() + return d - MODEL = PackagistModel - LISTER_NAME = "packagist" - PAGE = "https://packagist.org/packages/list.json" - instance = "packagist" + def api_request(self, url: str) -> Any: + logger.debug("Fetching URL %s", url) - def __init__(self, override_config=None): - ListerOnePageApiTransport.__init__(self) - SimpleLister.__init__(self, override_config=override_config) + response = self.session.get(url) - def task_dict( - self, origin_type: str, origin_url: str, **kwargs: Mapping[str, str] - ) -> Dict[str, Any]: - """Return task format dict + if response.status_code not in (200, 304): + logger.warning( + "Unexpected HTTP status code %s on %s: %s", + response.status_code, + response.url, + response.content, + ) - This is overridden from the lister_base as more information is - needed for the ingestion task creation. + response.raise_for_status() - """ - return utils.create_task_dict( - "load-%s" % origin_type, - kwargs.get("policy", "recurring"), - kwargs.get("name"), - origin_url, - retries_left=3, - ) - - def list_packages(self, response: Any) -> List[str]: - """List the actual packagist origins from the response. + # response is empty when status code is 304 + return response.json() if response.status_code == 200 else {} + def get_pages(self) -> Iterator[PackagistPageType]: """ - response = json.loads(response.text) - packages = [name for name in response["packageNames"]] - logger.debug("Number of packages: %s", len(packages)) - random.shuffle(packages) - return packages - - def get_model_from_repo(self, repo_name: str) -> Mapping[str, str]: - """Transform from repository representation to model + Yield a single page listing all Packagist projects. + """ + yield self.api_request(self.PACKAGIST_PACKAGES_LIST_URL)["packageNames"] + def get_origins_from_page(self, page: PackagistPageType) -> Iterator[ListedOrigin]: + """ + Iterate on all Packagist projects and yield ListedOrigin instances. """ - url = compute_package_url(repo_name) - return { - "uid": repo_name, - "name": repo_name, - "full_name": repo_name, - "html_url": url, - "origin_url": url, - "origin_type": "packagist", - } + assert self.lister_obj.id is not None + + # save some bandwidth by only getting packages metadata updated since + # last listing + if self.state.last_listing_date is not None: + if_modified_since = self.state.last_listing_date.strftime( + "%a, %d %b %Y %H:%M:%S GMT" + ) + self.session.headers["If-Modified-Since"] = if_modified_since + + # to ensure origins will not be listed multiple times + origin_urls = set() + + for package_name in page: + try: + metadata = self.api_request( + f"{self.PACKAGIST_REPO_BASE_URL}/{package_name}.json" + ) + if not metadata.get("packages", {}): + # package metadata not updated since last listing + continue + if package_name not in metadata["packages"]: + # missing package metadata in response + continue + versions_info = metadata["packages"][package_name].values() + except requests.exceptions.HTTPError: + # error when getting package metadata (usually 404 when a + # package has been removed), skip it and process next package + continue + + origin_url = None + visit_type = None + last_update = None + + # extract origin url for package, vcs type and latest release date + for version_info in versions_info: + origin_url = version_info.get("source", {}).get("url", "") + if not origin_url: + continue + # can be git, hg or svn + visit_type = version_info.get("source", {}).get("type", "") + dist_time_str = version_info.get("time", "") + if not dist_time_str: + continue + dist_time = iso8601.parse_date(dist_time_str) + if last_update is None or dist_time > last_update: + last_update = dist_time + + # skip package with already seen origin url or with missing required info + if visit_type is None or origin_url is None or origin_url in origin_urls: + continue + + # bitbucket closed its mercurial hosting service, those origins can not be + # loaded into the archive anymore + if visit_type == "hg" and origin_url.startswith("https://bitbucket.org/"): + continue + + origin_urls.add(origin_url) + + logger.debug( + "Found package %s last updated on %s", package_name, last_update + ) + + yield ListedOrigin( + lister_id=self.lister_obj.id, + url=origin_url, + visit_type=visit_type, + last_update=last_update, + ) + + def finalize(self) -> None: + self.state.last_listing_date = self.listing_date + self.updated = True diff --git a/swh/lister/packagist/models.py b/swh/lister/packagist/models.py deleted file mode 100644 index 268f884..0000000 --- a/swh/lister/packagist/models.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (C) 2019 the Software Heritage developers -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - -from sqlalchemy import Column, String - -from ..core.models import ModelBase - - -class PackagistModel(ModelBase): - """a Packagist repository representation - - """ - - __tablename__ = "packagist_repo" - - uid = Column(String, primary_key=True) diff --git a/swh/lister/packagist/tasks.py b/swh/lister/packagist/tasks.py index 6f6087b..9146e38 100644 --- a/swh/lister/packagist/tasks.py +++ b/swh/lister/packagist/tasks.py @@ -1,18 +1,18 @@ -# Copyright (C) 2019 the Software Heritage developers +# Copyright (C) 2019-2021 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from celery import shared_task from .lister import PackagistLister @shared_task(name=__name__ + ".PackagistListerTask") def list_packagist(**lister_args): "List the packagist (php) registry" - PackagistLister(**lister_args).run() + return PackagistLister.from_configfile(**lister_args).run().dict() @shared_task(name=__name__ + ".ping") def _ping(): return "OK" diff --git a/swh/lister/packagist/tests/conftest.py b/swh/lister/packagist/tests/conftest.py deleted file mode 100644 index 4482346..0000000 --- a/swh/lister/packagist/tests/conftest.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (C) 2019-2020 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import pytest - - -@pytest.fixture -def lister_under_test(): - return "packagist" - - -@pytest.fixture -def lister_packagist(swh_lister): - # Amend the scheduler with an unknown yet load-packagist task type - swh_lister.scheduler.create_task_type( - { - "type": "load-packagist", - "description": "Load packagist origin", - "backend_name": "swh.loader.package.tasks.LoaderPackagist", - "default_interval": "1 day", - } - ) - - return swh_lister diff --git a/swh/lister/packagist/tests/data/den1n_contextmenu.json b/swh/lister/packagist/tests/data/den1n_contextmenu.json new file mode 100644 index 0000000..d0f6d08 --- /dev/null +++ b/swh/lister/packagist/tests/data/den1n_contextmenu.json @@ -0,0 +1,78 @@ +{ + "packages": { + "den1n/contextmenu": { + "dev-default": { + "name": "den1n/contextmenu", + "description": "Context menu custom element.", + "keywords": [ + "javascript", + "JS", + "contextmenu", + "den1n" + ], + "homepage": "https://bitbucket.org/den1n/contextmenu", + "version": "dev-default", + "version_normalized": "9999999-dev", + "license": [ + "MIT" + ], + "authors": [{ + "name": "Dmitry Kadochnikov", + "email": "iqmass@gmail.com" + }], + "source": { + "type": "hg", + "url": "https://bitbucket.org/den1n/contextmenu", + "reference": "c207786b3dcf90fc7796a99dcb9e5fdb860ef2ba" + }, + "dist": { + "type": "zip", + "url": "https://bitbucket.org/den1n/contextmenu/get/c207786b3dcf90fc7796a99dcb9e5fdb860ef2ba.zip", + "reference": "c207786b3dcf90fc7796a99dcb9e5fdb860ef2ba", + "shasum": "" + }, + "type": "library", + "time": "2019-08-27T10:42:55+00:00", + "default-branch": true, + "require": { + "den1n/xelement": "^1.0" + }, + "uid": 4101245 + }, + "v1.0.0": { + "name": "den1n/contextmenu", + "description": "Simple DOM JS context menu.", + "keywords": [ + "javascript", + "JS", + "contextmenu", + "den1n" + ], + "homepage": "https://bitbucket.org/den1n/contextmenu", + "version": "v1.0.0", + "version_normalized": "1.0.0.0", + "license": [ + "MIT" + ], + "authors": [{ + "name": "Dmitry Kadochnikov", + "email": "iqmass@gmail.com" + }], + "source": { + "type": "hg", + "url": "https://bitbucket.org/den1n/contextmenu", + "reference": "278e30a199d1f0e1a8789a4b798814722bd11065" + }, + "dist": { + "type": "zip", + "url": "https://bitbucket.org/den1n/contextmenu/get/278e30a199d1f0e1a8789a4b798814722bd11065.zip", + "reference": "278e30a199d1f0e1a8789a4b798814722bd11065", + "shasum": "" + }, + "type": "library", + "time": "2018-03-07T10:08:41+00:00", + "uid": 1968017 + } + } + } +} \ No newline at end of file diff --git a/swh/lister/packagist/tests/data/https_packagist.org/packages_list.json b/swh/lister/packagist/tests/data/https_packagist.org/packages_list.json deleted file mode 100644 index 2e4843c..0000000 --- a/swh/lister/packagist/tests/data/https_packagist.org/packages_list.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "packageNames": [ - "0.0.0/composer-include-files", - "0.0.0/laravel-env-shim", - "0.0.1/try-make-package", - "0099ff/dialogflowphp", - "00f100/array_dot" - ] -} \ No newline at end of file diff --git a/swh/lister/packagist/tests/data/ljjackson_linnworks.json b/swh/lister/packagist/tests/data/ljjackson_linnworks.json new file mode 100644 index 0000000..ba57a81 --- /dev/null +++ b/swh/lister/packagist/tests/data/ljjackson_linnworks.json @@ -0,0 +1,83 @@ +{ + "packages": { + "ljjackson/linnworks": { + "0.1": { + "name": "ljjackson/linnworks", + "description": "A PHP API Integration of Linnworks.", + "keywords": [], + "homepage": "https://github.com/ljjackson", + "version": "0.1", + "version_normalized": "0.1.0.0", + "license": [], + "authors": [{ + "name": "Liam Jackson", + "homepage": "https://github.com/ljjackson", + "role": "Developer" + }], + "source": { + "type": "git", + "url": "https://github.com/ljjackson/linnworks.git", + "reference": "b2d16490823a8a9012a83b80cdcd6a129cfc5dea" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/ljjackson/linnworks/zipball/b2d16490823a8a9012a83b80cdcd6a129cfc5dea", + "reference": "b2d16490823a8a9012a83b80cdcd6a129cfc5dea", + "shasum": "" + }, + "type": "library", + "time": "2018-10-22T19:52:25+00:00", + "autoload": { + "psr-4": { + "LJJackson\\Linnworks\\": "src/" + } + }, + "require": { + "php": "^7.0", + "guzzlehttp/guzzle": "^6.3", + "ext-json": "*" + }, + "uid": 2535139 + }, + "dev-master": { + "name": "ljjackson/linnworks", + "description": "A PHP API Integration of Linnworks.", + "keywords": [], + "homepage": "https://github.com/ljjackson", + "version": "dev-master", + "version_normalized": "9999999-dev", + "license": [], + "authors": [{ + "name": "Liam Jackson", + "homepage": "https://github.com/ljjackson", + "role": "Developer" + }], + "source": { + "type": "git", + "url": "https://github.com/ljjackson/linnworks.git", + "reference": "7c6b1209dc3bafad4284b130bda8450f3478ea26" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/ljjackson/linnworks/zipball/7c6b1209dc3bafad4284b130bda8450f3478ea26", + "reference": "7c6b1209dc3bafad4284b130bda8450f3478ea26", + "shasum": "" + }, + "type": "library", + "time": "2018-11-01T21:45:50+00:00", + "autoload": { + "psr-4": { + "LJJackson\\Linnworks\\": "src/" + } + }, + "require": { + "guzzlehttp/guzzle": "^6.3", + "ext-json": "*", + "php": "^7.1.3", + "nesbot/carbon": "*" + }, + "uid": 2517334 + } + } + } +} \ No newline at end of file diff --git a/swh/lister/packagist/tests/data/lky_wx_article.json b/swh/lister/packagist/tests/data/lky_wx_article.json new file mode 100644 index 0000000..5bf3f4c --- /dev/null +++ b/swh/lister/packagist/tests/data/lky_wx_article.json @@ -0,0 +1,239 @@ +{ + "packages": { + "lky/wx_article": { + "1.0": { + "name": "lky/wx_article", + "description": "wx article editor", + "keywords": [ + "laravel", + "WxGzhArticle" + ], + "homepage": "https://github.com/lky/wxgzharticle", + "version": "1.0", + "version_normalized": "1.0.0.0", + "license": [ + "MIT" + ], + "authors": [{ + "name": "lky", + "email": "2747865797@qq.com", + "homepage": "http://lky.kim" + }], + "source": { + "type": "git", + "url": "https://github.com/gitlky/wx_article.git", + "reference": "bd1826f17a42a1d3da44c4562af3be370687466b" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/gitlky/wx_article/zipball/bd1826f17a42a1d3da44c4562af3be370687466b", + "reference": "bd1826f17a42a1d3da44c4562af3be370687466b", + "shasum": "" + }, + "type": "library", + "time": "2018-08-28T06:51:46+00:00", + "autoload": { + "psr-4": { + "lky\\WxGzhArticle\\": "src/" + } + }, + "extra": { + "laravel": { + "providers": [ + "lky\\WxGzhArticle\\WxGzhArticleServiceProvider" + ], + "aliases": { + "WxGzhArticle": "lky\\WxGzhArticle\\Facades\\WxGzhArticle" + } + } + }, + "require": { + "illuminate/support": "~5", + "ixudra/curl": "6.*", + "guzzlehttp/guzzle": "6.*", + "laravel/framework": "5.2.*", + "php": ">=5.6.4" + }, + "require-dev": { + "phpunit/phpunit": "~6.0", + "orchestra/testbench": "~3.0" + }, + "uid": 2493149 + }, + "dev-master": { + "name": "lky/wx_article", + "description": "wx article editor", + "keywords": [ + "laravel", + "WxGzhArticle" + ], + "homepage": "https://github.com/lky/wx_article", + "version": "dev-master", + "version_normalized": "9999999-dev", + "license": [ + "MIT" + ], + "authors": [{ + "name": "lky", + "email": "2747865797@qq.com", + "homepage": "http://lky.kim" + }], + "source": { + "type": "git", + "url": "https://github.com/gitlky/wx_article.git", + "reference": "9ef7cddfe1a9715cee52acc7a97d4f51d0f6e2be" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/gitlky/wx_article/zipball/9ef7cddfe1a9715cee52acc7a97d4f51d0f6e2be", + "reference": "9ef7cddfe1a9715cee52acc7a97d4f51d0f6e2be", + "shasum": "" + }, + "type": "library", + "time": "2018-08-30T07:37:09+00:00", + "autoload": { + "psr-4": { + "lky\\WxGzhArticle\\": "src/" + } + }, + "extra": { + "laravel": { + "providers": [ + "lky\\WxGzhArticle\\WxGzhArticleServiceProvider" + ], + "aliases": { + "WxGzhArticle": "lky\\WxGzhArticle\\Facades\\WxGzhArticle" + } + } + }, + "default-branch": true, + "require": { + "ixudra/curl": "6.*", + "guzzlehttp/guzzle": "6.*", + "laravel/framework": ">=5.2.0", + "php": ">=5.6.4" + }, + "require-dev": { + "phpunit/phpunit": "~6.0", + "orchestra/testbench": "~3.0" + }, + "uid": 4096807 + }, + "v1.2": { + "name": "lky/wx_article", + "description": "wx article editor", + "keywords": [ + "laravel", + "WxGzhArticle" + ], + "homepage": "https://github.com/lky/wx_article", + "version": "v1.2", + "version_normalized": "1.2.0.0", + "license": [ + "MIT" + ], + "authors": [{ + "name": "lky", + "email": "2747865797@qq.com", + "homepage": "http://lky.kim" + }], + "source": { + "type": "git", + "url": "https://github.com/gitlky/wx_article.git", + "reference": "d332d20b8d848018c7e6a43e7fe47a78cdb926b7" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/gitlky/wx_article/zipball/d332d20b8d848018c7e6a43e7fe47a78cdb926b7", + "reference": "d332d20b8d848018c7e6a43e7fe47a78cdb926b7", + "shasum": "" + }, + "type": "library", + "autoload": { + "psr-4": { + "lky\\WxGzhArticle\\": "src/" + } + }, + "extra": { + "laravel": { + "providers": [ + "lky\\WxGzhArticle\\WxGzhArticleServiceProvider" + ], + "aliases": { + "WxGzhArticle": "lky\\WxGzhArticle\\Facades\\WxGzhArticle" + } + } + }, + "require": { + "ixudra/curl": "6.*", + "guzzlehttp/guzzle": "6.*", + "laravel/framework": ">=5.2.0", + "php": ">=5.6.4" + }, + "require-dev": { + "phpunit/phpunit": "~6.0", + "orchestra/testbench": "~3.0" + }, + "uid": 2493150 + }, + "v1.6": { + "name": "lky/wx_article", + "description": "wx article editor", + "keywords": [ + "laravel", + "WxGzhArticle" + ], + "homepage": "https://github.com/lky/wx_article", + "version": "v1.6", + "version_normalized": "1.6.0.0", + "license": [ + "MIT" + ], + "authors": [{ + "name": "lky", + "email": "2747865797@qq.com", + "homepage": "http://lky.kim" + }], + "source": { + "type": "git", + "url": "https://github.com/gitlky/wx_article.git", + "reference": "d332d20b8d848018c7e6a43e7fe47a78cdb926b7" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/gitlky/wx_article/zipball/d332d20b8d848018c7e6a43e7fe47a78cdb926b7", + "reference": "d332d20b8d848018c7e6a43e7fe47a78cdb926b7", + "shasum": "" + }, + "type": "library", + "time": "2018-08-29T08:26:06+00:00", + "autoload": { + "psr-4": { + "lky\\WxGzhArticle\\": "src/" + } + }, + "extra": { + "laravel": { + "providers": [ + "lky\\WxGzhArticle\\WxGzhArticleServiceProvider" + ], + "aliases": { + "WxGzhArticle": "lky\\WxGzhArticle\\Facades\\WxGzhArticle" + } + } + }, + "require": { + "ixudra/curl": "6.*", + "guzzlehttp/guzzle": "6.*", + "laravel/framework": ">=5.2.0", + "php": ">=5.6.4" + }, + "require-dev": { + "phpunit/phpunit": "~6.0", + "orchestra/testbench": "~3.0" + }, + "uid": 2427550 + } + } + } +} \ No newline at end of file diff --git a/swh/lister/packagist/tests/data/spryker-eco_computop-api.json b/swh/lister/packagist/tests/data/spryker-eco_computop-api.json new file mode 100644 index 0000000..c7d2f16 --- /dev/null +++ b/swh/lister/packagist/tests/data/spryker-eco_computop-api.json @@ -0,0 +1,141 @@ +{ + "packages": { + "spryker-eco/computop-api": { + "1.0.0": { + "name": "spryker-eco/computop-api", + "description": "Computop API Module", + "keywords": [], + "homepage": "", + "version": "1.0.0", + "version_normalized": "1.0.0.0", + "license": [ + "MIT" + ], + "authors": [], + "source": { + "type": "git", + "url": "https://github.com/spryker-eco/computop-api.git", + "reference": "d75dc7d2c80bd93e65081b26433ee559d2c92f0a" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/spryker-eco/computop-api/zipball/d75dc7d2c80bd93e65081b26433ee559d2c92f0a", + "reference": "d75dc7d2c80bd93e65081b26433ee559d2c92f0a", + "shasum": "" + }, + "type": "library", + "time": "2018-08-31T11:51:23+00:00", + "autoload": { + "psr-4": { + "SprykerEco\\": "src/SprykerEco/" + } + }, + "extra": { + "branch-alias": { + "dev-master": "1.0.x-dev" + } + }, + "require": { + "php": ">=7.1", + "spryker/kernel": "^3.0.0", + "spryker/transfer": "^3.0.0", + "spryker/util-text": "^1.0.0", + "spryker/guzzle": "^2.2.0" + }, + "require-dev": { + "spryker/code-sniffer": "dev-master" + }, + "uid": 2432548 + }, + "dev-dev": { + "name": "spryker-eco/computop-api", + "description": "Computop API Module", + "keywords": [], + "homepage": "", + "version": "dev-dev", + "version_normalized": "dev-dev", + "license": [ + "MIT" + ], + "authors": [], + "source": {}, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/spryker-eco/computop-api/zipball/7a695d1e412132296546d072364f410186572790", + "reference": "7a695d1e412132296546d072364f410186572790", + "shasum": "" + }, + "type": "library", + "time": "2018-08-31T11:38:22+00:00", + "autoload": { + "psr-4": { + "SprykerEco\\": "src/SprykerEco/" + } + }, + "extra": { + "branch-alias": { + "dev-master": "1.0.x-dev" + } + }, + "require": { + "php": ">=7.1", + "spryker/kernel": "^3.0.0", + "spryker/transfer": "^3.0.0", + "spryker/util-text": "^1.0.0", + "spryker/guzzle": "^2.2.0" + }, + "require-dev": { + "spryker/code-sniffer": "dev-master" + }, + "uid": 2209824 + }, + "dev-master": { + "name": "spryker-eco/computop-api", + "description": "ComputopApi module", + "keywords": [], + "homepage": "", + "version": "dev-master", + "version_normalized": "9999999-dev", + "license": [ + "MIT" + ], + "authors": [], + "source": { + "type": "git", + "url": "https://github.com/spryker-eco/computop-api.git", + "reference": "7ac81d5db52c0639bc06a61a35d7738a964fde88" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/spryker-eco/computop-api/zipball/7ac81d5db52c0639bc06a61a35d7738a964fde88", + "reference": "7ac81d5db52c0639bc06a61a35d7738a964fde88", + "shasum": "" + }, + "type": "library", + "time": "2020-06-22T15:50:29+00:00", + "autoload": { + "psr-4": { + "SprykerEco\\": "src/SprykerEco/" + } + }, + "extra": { + "branch-alias": { + "dev-master": "1.0.x-dev" + } + }, + "default-branch": true, + "require": { + "php": ">=7.1", + "spryker/kernel": "^3.0.0", + "spryker/transfer": "^3.0.0", + "spryker/util-text": "^1.0.0", + "spryker/guzzle": "^2.2.0" + }, + "require-dev": { + "spryker/code-sniffer": "dev-master" + }, + "uid": 4006827 + } + } + } +} \ No newline at end of file diff --git a/swh/lister/packagist/tests/test_lister.py b/swh/lister/packagist/tests/test_lister.py index 808910f..64b4439 100644 --- a/swh/lister/packagist/tests/test_lister.py +++ b/swh/lister/packagist/tests/test_lister.py @@ -1,104 +1,159 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -import unittest -from unittest.mock import patch - -import requests_mock - -from swh.lister.core.tests.test_lister import HttpSimpleListerTester -from swh.lister.packagist.lister import PackagistLister, compute_package_url - -expected_packages = [ - "0.0.0/composer-include-files", - "0.0.0/laravel-env-shim", - "0.0.1/try-make-package", - "0099ff/dialogflowphp", - "00f100/array_dot", -] - -expected_model = { - "uid": "0099ff/dialogflowphp", - "name": "0099ff/dialogflowphp", - "full_name": "0099ff/dialogflowphp", - "html_url": "https://repo.packagist.org/p/0099ff/dialogflowphp.json", - "origin_url": "https://repo.packagist.org/p/0099ff/dialogflowphp.json", - "origin_type": "packagist", +import json +from pathlib import Path + +import iso8601 + +from swh.lister.packagist.lister import PackagistLister + +_packages_list = { + "packageNames": [ + "ljjackson/linnworks", + "lky/wx_article", + "spryker-eco/computop-api", + ] } -class PackagistListerTester(HttpSimpleListerTester, unittest.TestCase): - Lister = PackagistLister - PAGE = "https://packagist.org/packages/list.json" - lister_subdir = "packagist" - good_api_response_file = "data/https_packagist.org/packages_list.json" - entries = 5 +def _package_metadata(datadir, package_name): + return json.loads( + Path(datadir, f"{package_name.replace('/', '_')}.json").read_text() + ) + + +def _package_origin_info(package_name, package_metadata): + origin_url = None + visit_type = None + last_update = None + for version_info in package_metadata["packages"][package_name].values(): + origin_url = version_info["source"].get("url") + visit_type = version_info["source"].get("type") + if "time" in version_info: + version_date = iso8601.parse_date(version_info["time"]) + if last_update is None or version_date > last_update: + last_update = version_date + return origin_url, visit_type, last_update - @requests_mock.Mocker() - def test_list_packages(self, http_mocker): - """List packages from simple api page should retrieve all packages within - """ - http_mocker.get(self.PAGE, text=self.mock_response) - fl = self.get_fl() - packages = fl.list_packages(self.get_api_response(0)) +def _request_without_if_modified_since(request): + return request.headers.get("If-Modified-Since") is None - for package in expected_packages: - assert package in packages - def test_transport_response_simplified(self): - """Test model created by the lister +def _request_with_if_modified_since(request): + return request.headers.get("If-Modified-Since") is not None - """ - fl = self.get_fl() - model = fl.transport_response_simplified(["0099ff/dialogflowphp"]) - assert len(model) == 1 - for key, values in model[0].items(): - assert values == expected_model[key] - @patch("swh.lister.packagist.lister.utils.create_task_dict") - def test_task_dict(self, mock_create_tasks): - """Test the task creation of lister +def test_packagist_lister(swh_scheduler, requests_mock, datadir): + # first listing, should return one origin per package + lister = PackagistLister(scheduler=swh_scheduler) + requests_mock.get(lister.PACKAGIST_PACKAGES_LIST_URL, json=_packages_list) + packages_metadata = {} + for package_name in _packages_list["packageNames"]: + metadata = _package_metadata(datadir, package_name) + packages_metadata[package_name] = metadata + requests_mock.get( + f"{lister.PACKAGIST_REPO_BASE_URL}/{package_name}.json", + json=metadata, + additional_matcher=_request_without_if_modified_since, + ) + stats = lister.run() + + assert stats.pages == 1 + assert stats.origins == len(_packages_list["packageNames"]) + assert lister.updated + + scheduler_origins = swh_scheduler.get_listed_origins(lister.lister_obj.id).results - """ - fl = self.get_fl() - fl.task_dict( - origin_type="packagist", origin_url="https://abc", name="test_pack" + for package_name, package_metadata in packages_metadata.items(): + origin_url, visit_type, last_update = _package_origin_info( + package_name, package_metadata ) - mock_create_tasks.assert_called_once_with( - "load-packagist", "recurring", "test_pack", "https://abc", retries_left=3 + filtered_origins = [o for o in scheduler_origins if o.url == origin_url] + assert filtered_origins + assert filtered_origins[0].visit_type == visit_type + assert filtered_origins[0].last_update == last_update + + # second listing, should return 0 origins as no package metadata + # has been updated since first listing + lister = PackagistLister(scheduler=swh_scheduler) + for package_name in _packages_list["packageNames"]: + requests_mock.get( + f"{lister.PACKAGIST_REPO_BASE_URL}/{package_name}.json", + additional_matcher=_request_with_if_modified_since, + status_code=304, ) + assert lister.get_state_from_scheduler().last_listing_date is not None -def test_compute_package_url(): - expected_url = "https://repo.packagist.org/p/hello.json" - actual_url = compute_package_url("hello") - assert actual_url == expected_url + stats = lister.run() + assert stats.pages == 1 + assert stats.origins == 0 + assert lister.updated -def test_packagist_lister(lister_packagist, requests_mock_datadir): - lister_packagist.run() - r = lister_packagist.scheduler.search_tasks(task_type="load-packagist") - assert len(r) == 5 +def test_packagist_lister_missing_metadata(swh_scheduler, requests_mock, datadir): + lister = PackagistLister(scheduler=swh_scheduler) + requests_mock.get(lister.PACKAGIST_PACKAGES_LIST_URL, json=_packages_list) + for package_name in _packages_list["packageNames"]: + requests_mock.get( + f"{lister.PACKAGIST_REPO_BASE_URL}/{package_name}.json", + additional_matcher=_request_without_if_modified_since, + status_code=404, + ) - for row in r: - assert row["type"] == "load-packagist" - # arguments check - args = row["arguments"]["args"] - assert len(args) == 2 + stats = lister.run() - package = args[0] - url = args[1] + assert stats.pages == 1 + assert stats.origins == 0 - expected_url = compute_package_url(package) - assert url == expected_url - # kwargs - kwargs = row["arguments"]["kwargs"] - assert kwargs == {} +def test_packagist_lister_empty_metadata(swh_scheduler, requests_mock, datadir): + lister = PackagistLister(scheduler=swh_scheduler) + requests_mock.get(lister.PACKAGIST_PACKAGES_LIST_URL, json=_packages_list) + for package_name in _packages_list["packageNames"]: + requests_mock.get( + f"{lister.PACKAGIST_REPO_BASE_URL}/{package_name}.json", + additional_matcher=_request_without_if_modified_since, + json={"packages": {}}, + ) - assert row["policy"] == "recurring" - assert row["priority"] is None + stats = lister.run() + + assert stats.pages == 1 + assert stats.origins == 0 + + +def test_packagist_lister_package_with_bitbucket_hg_origin( + swh_scheduler, requests_mock, datadir +): + package_name = "den1n/contextmenu" + lister = PackagistLister(scheduler=swh_scheduler) + requests_mock.get( + lister.PACKAGIST_PACKAGES_LIST_URL, json={"packageNames": [package_name]} + ) + requests_mock.get( + f"{lister.PACKAGIST_REPO_BASE_URL}/{package_name}.json", + additional_matcher=_request_without_if_modified_since, + json=_package_metadata(datadir, package_name), + ) + + stats = lister.run() + + assert stats.pages == 1 + assert stats.origins == 0 + + +def test_lister_from_configfile(swh_scheduler_config, mocker): + load_from_envvar = mocker.patch("swh.lister.pattern.load_from_envvar") + load_from_envvar.return_value = { + "scheduler": {"cls": "local", **swh_scheduler_config}, + "credentials": {}, + } + lister = PackagistLister.from_configfile() + assert lister.scheduler is not None + assert lister.credentials is not None diff --git a/swh/lister/packagist/tests/test_tasks.py b/swh/lister/packagist/tests/test_tasks.py index 6c5d15d..9db88e3 100644 --- a/swh/lister/packagist/tests/test_tasks.py +++ b/swh/lister/packagist/tests/test_tasks.py @@ -1,31 +1,31 @@ -# Copyright (C) 2019-2020 the Software Heritage developers +# Copyright (C) 2019-2021 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -from unittest.mock import patch +from swh.lister.pattern import ListerStats def test_ping(swh_scheduler_celery_app, swh_scheduler_celery_worker): res = swh_scheduler_celery_app.send_task("swh.lister.packagist.tasks.ping") assert res res.wait() assert res.successful() assert res.result == "OK" -@patch("swh.lister.packagist.tasks.PackagistLister") -def test_lister(lister, swh_scheduler_celery_app, swh_scheduler_celery_worker): - # setup the mocked PackagistLister - lister.return_value = lister - lister.run.return_value = None +def test_lister(swh_scheduler_celery_app, swh_scheduler_celery_worker, mocker): + lister = mocker.patch("swh.lister.packagist.tasks.PackagistLister") + lister.from_configfile.return_value = lister + stats = ListerStats(pages=1, origins=286500) + lister.run.return_value = stats res = swh_scheduler_celery_app.send_task( "swh.lister.packagist.tasks.PackagistListerTask" ) assert res res.wait() assert res.successful() + assert res.result == stats.dict() - lister.assert_called_once_with() - lister.db_last_index.assert_not_called() + lister.from_configfile.assert_called_once_with() lister.run.assert_called_once_with() diff --git a/swh/lister/phabricator/__init__.py b/swh/lister/phabricator/__init__.py index b08cdc9..17bb00e 100644 --- a/swh/lister/phabricator/__init__.py +++ b/swh/lister/phabricator/__init__.py @@ -1,13 +1,12 @@ -# Copyright (C) 2019 the Software Heritage developers +# Copyright (C) 2019-2021 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def register(): from .lister import PhabricatorLister return { - "models": [], "lister": PhabricatorLister, "task_modules": ["%s.tasks" % __name__], } diff --git a/swh/lister/pypi/__init__.py b/swh/lister/pypi/__init__.py index 29a9f87..3637c00 100644 --- a/swh/lister/pypi/__init__.py +++ b/swh/lister/pypi/__init__.py @@ -1,13 +1,12 @@ -# Copyright (C) 2019 the Software Heritage developers +# Copyright (C) 2019-2021 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information def register(): from .lister import PyPILister return { - "models": [], "lister": PyPILister, "task_modules": ["%s.tasks" % __name__], } diff --git a/swh/lister/pytest_plugin.py b/swh/lister/pytest_plugin.py deleted file mode 100644 index 82dd08b..0000000 --- a/swh/lister/pytest_plugin.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (C) 2019-2020 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import logging -import os - -import pytest -from sqlalchemy import create_engine -import yaml - -from swh.lister import SUPPORTED_LISTERS, get_lister -from swh.lister.core.models import initialize - -logger = logging.getLogger(__name__) - - -@pytest.fixture -def lister_db_url(postgresql): - db_params = postgresql.get_dsn_parameters() - db_url = "postgresql://{user}@{host}:{port}/{dbname}".format(**db_params) - logger.debug("lister db_url: %s", db_url) - return db_url - - -@pytest.fixture -def lister_under_test(): - """Fixture to determine which lister to test""" - return "core" - - -@pytest.fixture -def swh_lister_config(lister_db_url, swh_scheduler_config): - return { - "scheduler": {"cls": "local", **swh_scheduler_config}, - "lister": {"cls": "local", "args": {"db": lister_db_url},}, - "credentials": {}, - "cache_responses": False, - } - - -@pytest.fixture(autouse=True) -def swh_config(swh_lister_config, monkeypatch, tmp_path): - conf_path = os.path.join(str(tmp_path), "lister.yml") - with open(conf_path, "w") as f: - f.write(yaml.dump(swh_lister_config)) - monkeypatch.setenv("SWH_CONFIG_FILENAME", conf_path) - return conf_path - - -@pytest.fixture -def engine(lister_db_url): - engine = create_engine(lister_db_url) - initialize(engine, drop_tables=True) - return engine - - -@pytest.fixture -def swh_lister(engine, lister_db_url, lister_under_test, swh_config): - assert lister_under_test in SUPPORTED_LISTERS - return get_lister(lister_under_test, db_url=lister_db_url) diff --git a/swh/lister/tests/test_cli.py b/swh/lister/tests/test_cli.py index 6066dc6..53ec7f2 100644 --- a/swh/lister/tests/test_cli.py +++ b/swh/lister/tests/test_cli.py @@ -1,46 +1,42 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest from swh.lister.cli import SUPPORTED_LISTERS, get_lister -from .test_utils import init_db - lister_args = { "cgit": {"url": "https://git.eclipse.org/c/",}, "phabricator": { "instance": "softwareheritage", "url": "https://forge.softwareheritage.org/api/diffusion.repository.search", "api_token": "bogus", }, "gitea": {"url": "https://try.gitea.io/api/v1/",}, "gitlab": {"url": "https://gitlab.ow2.org/api/v4", "instance": "ow2",}, } def test_get_lister_wrong_input(): """Unsupported lister should raise""" with pytest.raises(ValueError) as e: get_lister("unknown", "db-url") assert "Invalid lister" in str(e.value) def test_get_lister(swh_scheduler_config): """Instantiating a supported lister should be ok """ - db_url = init_db().url() # Drop launchpad lister from the lister to check, its test setup is more involved # than the other listers and it's not currently done here for lister_name in SUPPORTED_LISTERS: lst = get_lister( lister_name, - db_url, scheduler={"cls": "local", **swh_scheduler_config}, **lister_args.get(lister_name, {}), ) assert hasattr(lst, "run") diff --git a/swh/lister/tests/test_utils.py b/swh/lister/tests/test_utils.py index 68aed42..763f743 100644 --- a/swh/lister/tests/test_utils.py +++ b/swh/lister/tests/test_utils.py @@ -1,133 +1,120 @@ # Copyright (C) 2018-2020 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest import requests from requests.status_codes import codes from tenacity.wait import wait_fixed -from testing.postgresql import Postgresql from swh.lister.utils import ( MAX_NUMBER_ATTEMPTS, WAIT_EXP_BASE, split_range, throttling_retry, ) @pytest.mark.parametrize( "total_pages,nb_pages,expected_ranges", [ (14, 5, [(0, 4), (5, 9), (10, 14)]), (19, 10, [(0, 9), (10, 19)]), (20, 3, [(0, 2), (3, 5), (6, 8), (9, 11), (12, 14), (15, 17), (18, 20)]), (21, 3, [(0, 2), (3, 5), (6, 8), (9, 11), (12, 14), (15, 17), (18, 21),],), ], ) def test_split_range(total_pages, nb_pages, expected_ranges): actual_ranges = list(split_range(total_pages, nb_pages)) assert actual_ranges == expected_ranges @pytest.mark.parametrize("total_pages,nb_pages", [(None, 1), (100, None)]) def test_split_range_errors(total_pages, nb_pages): for total_pages, nb_pages in [(None, 1), (100, None)]: with pytest.raises(TypeError): next(split_range(total_pages, nb_pages)) -def init_db(): - """Factorize the db_url instantiation - - Returns: - db object to ease db manipulation - - """ - initdb_args = Postgresql.DEFAULT_SETTINGS["initdb_args"] - initdb_args = " ".join([initdb_args, "-E UTF-8"]) - return Postgresql(initdb_args=initdb_args) - - TEST_URL = "https://example.og/api/repositories" @throttling_retry() def make_request(): response = requests.get(TEST_URL) response.raise_for_status() return response def assert_sleep_calls(mocker, mock_sleep, sleep_params): try: mock_sleep.assert_has_calls([mocker.call(param) for param in sleep_params]) except AssertionError: # tenacity < 5.1 has a different behavior for wait_exponential # https://github.com/jd/tenacity/commit/aac4307a0aa30d7befd0ebe4212ee4fc69083a95 mock_sleep.assert_has_calls( [mocker.call(param * WAIT_EXP_BASE) for param in sleep_params] ) def test_throttling_retry(requests_mock, mocker): data = {"result": {}} requests_mock.get( TEST_URL, [ {"status_code": codes.too_many_requests}, {"status_code": codes.too_many_requests}, {"status_code": codes.ok, "json": data}, ], ) mock_sleep = mocker.patch.object(make_request.retry, "sleep") response = make_request() assert_sleep_calls(mocker, mock_sleep, [1, WAIT_EXP_BASE]) assert response.json() == data def test_throttling_retry_max_attemps(requests_mock, mocker): requests_mock.get( TEST_URL, [{"status_code": codes.too_many_requests}] * (MAX_NUMBER_ATTEMPTS), ) mock_sleep = mocker.patch.object(make_request.retry, "sleep") with pytest.raises(requests.exceptions.HTTPError) as e: make_request() assert e.value.response.status_code == codes.too_many_requests assert_sleep_calls( mocker, mock_sleep, [float(WAIT_EXP_BASE ** i) for i in range(MAX_NUMBER_ATTEMPTS - 1)], ) @throttling_retry(wait=wait_fixed(WAIT_EXP_BASE)) def make_request_wait_fixed(): response = requests.get(TEST_URL) response.raise_for_status() return response def test_throttling_retry_wait_fixed(requests_mock, mocker): requests_mock.get( TEST_URL, [ {"status_code": codes.too_many_requests}, {"status_code": codes.too_many_requests}, {"status_code": codes.ok}, ], ) mock_sleep = mocker.patch.object(make_request_wait_fixed.retry, "sleep") make_request_wait_fixed() assert_sleep_calls(mocker, mock_sleep, [WAIT_EXP_BASE] * 2)