diff --git a/swh/lister/bitbucket/lister.py b/swh/lister/bitbucket/lister.py index 0c78af0..a480532 100644 --- a/swh/lister/bitbucket/lister.py +++ b/swh/lister/bitbucket/lister.py @@ -1,81 +1,82 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import iso8601 from datetime import datetime, timezone from typing import Any from urllib import parse + from swh.lister.bitbucket.models import BitBucketModel from swh.lister.core.indexing_lister import IndexingHttpLister logger = logging.getLogger(__name__) class BitBucketLister(IndexingHttpLister): PATH_TEMPLATE = '/repositories?after=%s' MODEL = BitBucketModel LISTER_NAME = 'bitbucket' DEFAULT_URL = 'https://api.bitbucket.org/2.0' instance = 'bitbucket' default_min_bound = datetime.fromtimestamp(0, timezone.utc) # type: Any def __init__(self, url=None, override_config=None, per_page=100): super().__init__(url=url, override_config=override_config) per_page = self.config.get('per_page', per_page) self.PATH_TEMPLATE = '%s&pagelen=%s' % ( self.PATH_TEMPLATE, per_page) def get_model_from_repo(self, repo): return { 'uid': repo['uuid'], 'indexable': iso8601.parse_date(repo['created_on']), 'name': repo['name'], 'full_name': repo['full_name'], 'html_url': repo['links']['html']['href'], 'origin_url': repo['links']['clone'][0]['href'], 'origin_type': repo['scm'], } def get_next_target_from_response(self, response): """This will read the 'next' link from the api response if any and return it as a datetime. Args: response (Response): requests' response from api call Returns: next date as a datetime """ body = response.json() next_ = body.get('next') if next_ is not None: next_ = parse.urlparse(next_) return iso8601.parse_date(parse.parse_qs(next_.query)['after'][0]) def transport_response_simplified(self, response): repos = response.json()['values'] return [self.get_model_from_repo(repo) for repo in repos] def request_uri(self, identifier): identifier = parse.quote(identifier.isoformat()) return super().request_uri(identifier or '1970-01-01') def is_within_bounds(self, inner, lower=None, upper=None): # values are expected to be datetimes if lower is None and upper is None: ret = True elif lower is None: ret = inner <= upper elif upper is None: ret = inner >= lower else: ret = lower <= inner <= upper return ret diff --git a/swh/lister/bitbucket/tests/data/https_api.bitbucket.org/2.0_repositories,after=1970-01-01T00:00:00+00:00,pagelen=100 b/swh/lister/bitbucket/tests/data/https_api.bitbucket.org/2.0_repositories,after=1970-01-01T00:00:00+00:00,pagelen=100 index 080a2fe..9e64695 100644 --- a/swh/lister/bitbucket/tests/data/https_api.bitbucket.org/2.0_repositories,after=1970-01-01T00:00:00+00:00,pagelen=100 +++ b/swh/lister/bitbucket/tests/data/https_api.bitbucket.org/2.0_repositories,after=1970-01-01T00:00:00+00:00,pagelen=100 @@ -1,806 +1,806 @@ { "pagelen": 10, "values": [ { "scm": "hg", "website": "", "has_wiki": true, "name": "app-template", "links": { "watchers": { "href": "https://api.bitbucket.org/2.0/repositories/bebac/app-template/watchers" }, "branches": { "href": "https://api.bitbucket.org/2.0/repositories/bebac/app-template/refs/branches" }, "tags": { "href": "https://api.bitbucket.org/2.0/repositories/bebac/app-template/refs/tags" }, "commits": { "href": "https://api.bitbucket.org/2.0/repositories/bebac/app-template/commits" }, "clone": [ { "href": "https://bitbucket.org/bebac/app-template", "name": "https" }, { "href": "ssh://hg@bitbucket.org/bebac/app-template", "name": "ssh" } ], "self": { "href": "https://api.bitbucket.org/2.0/repositories/bebac/app-template" }, "html": { "href": "https://bitbucket.org/bebac/app-template" }, "avatar": { "href": "https://bitbucket.org/bebac/app-template/avatar/32/" }, "hooks": { "href": "https://api.bitbucket.org/2.0/repositories/bebac/app-template/hooks" }, "forks": { "href": "https://api.bitbucket.org/2.0/repositories/bebac/app-template/forks" }, "downloads": { "href": "https://api.bitbucket.org/2.0/repositories/bebac/app-template/downloads" }, "pullrequests": { "href": "https://api.bitbucket.org/2.0/repositories/bebac/app-template/pullrequests" } }, "fork_policy": "allow_forks", "uuid": "{0cf80a6e-e91f-4a4c-a61b-8c8ff51ca3ec}", "language": "c++", "created_on": "2008-07-12T07:44:01.476818+00:00", "full_name": "bebac/app-template", "has_issues": true, "owner": { "username": "bebac", "display_name": "Benny Bach", "type": "user", "uuid": "{d1a83a2a-be1b-4034-8c1d-386a6690cddb}", "links": { "self": { "href": "https://api.bitbucket.org/2.0/users/bebac" }, "html": { "href": "https://bitbucket.org/bebac/" }, "avatar": { "href": "https://bitbucket.org/account/bebac/avatar/32/" } } }, "updated_on": "2011-10-05T15:36:19.409008+00:00", "size": 71548, "type": "repository", "slug": "app-template", "is_private": false, "description": "Basic files and directory structure for a C++ project. Intended as a starting point for a new project. Includes a basic cross platform core library." }, { - "scm": "hg", + "scm": "git", "website": "", "has_wiki": true, "name": "mercurialeclipse", "links": { "watchers": { "href": "https://api.bitbucket.org/2.0/repositories/bastiand/mercurialeclipse/watchers" }, "branches": { "href": "https://api.bitbucket.org/2.0/repositories/bastiand/mercurialeclipse/refs/branches" }, "tags": { "href": "https://api.bitbucket.org/2.0/repositories/bastiand/mercurialeclipse/refs/tags" }, "commits": { "href": "https://api.bitbucket.org/2.0/repositories/bastiand/mercurialeclipse/commits" }, "clone": [ { "href": "https://bitbucket.org/bastiand/mercurialeclipse", "name": "https" }, { "href": "ssh://hg@bitbucket.org/bastiand/mercurialeclipse", "name": "ssh" } ], "self": { "href": "https://api.bitbucket.org/2.0/repositories/bastiand/mercurialeclipse" }, "html": { "href": "https://bitbucket.org/bastiand/mercurialeclipse" }, "avatar": { "href": "https://bitbucket.org/bastiand/mercurialeclipse/avatar/32/" }, "hooks": { "href": "https://api.bitbucket.org/2.0/repositories/bastiand/mercurialeclipse/hooks" }, "forks": { "href": "https://api.bitbucket.org/2.0/repositories/bastiand/mercurialeclipse/forks" }, "downloads": { "href": "https://api.bitbucket.org/2.0/repositories/bastiand/mercurialeclipse/downloads" }, "pullrequests": { "href": "https://api.bitbucket.org/2.0/repositories/bastiand/mercurialeclipse/pullrequests" } }, "fork_policy": "allow_forks", "uuid": "{f7a08670-bdd1-4465-aa97-7a5ce8d1a25b}", "language": "", "created_on": "2008-07-12T09:37:06.254721+00:00", "full_name": "bastiand/mercurialeclipse", "has_issues": false, "owner": { "username": "bastiand", "display_name": "Bastian Doetsch", "type": "user", "uuid": "{3742cd48-adad-4205-ab0d-04fc992a1728}", "links": { "self": { "href": "https://api.bitbucket.org/2.0/users/bastiand" }, "html": { "href": "https://bitbucket.org/bastiand/" }, "avatar": { "href": "https://bitbucket.org/account/bastiand/avatar/32/" } } }, "updated_on": "2011-09-17T02:36:59.062596+00:00", "size": 6445145, "type": "repository", "slug": "mercurialeclipse", "is_private": false, "description": "my own repo for MercurialEclipse." }, { "scm": "hg", "website": "", "has_wiki": true, "name": "sandboxpublic", "links": { "watchers": { "href": "https://api.bitbucket.org/2.0/repositories/aleax/sandboxpublic/watchers" }, "branches": { "href": "https://api.bitbucket.org/2.0/repositories/aleax/sandboxpublic/refs/branches" }, "tags": { "href": "https://api.bitbucket.org/2.0/repositories/aleax/sandboxpublic/refs/tags" }, "commits": { "href": "https://api.bitbucket.org/2.0/repositories/aleax/sandboxpublic/commits" }, "clone": [ { "href": "https://bitbucket.org/aleax/sandboxpublic", "name": "https" }, { "href": "ssh://hg@bitbucket.org/aleax/sandboxpublic", "name": "ssh" } ], "self": { "href": "https://api.bitbucket.org/2.0/repositories/aleax/sandboxpublic" }, "html": { "href": "https://bitbucket.org/aleax/sandboxpublic" }, "avatar": { "href": "https://bitbucket.org/aleax/sandboxpublic/avatar/32/" }, "hooks": { "href": "https://api.bitbucket.org/2.0/repositories/aleax/sandboxpublic/hooks" }, "forks": { "href": "https://api.bitbucket.org/2.0/repositories/aleax/sandboxpublic/forks" }, "downloads": { "href": "https://api.bitbucket.org/2.0/repositories/aleax/sandboxpublic/downloads" }, "pullrequests": { "href": "https://api.bitbucket.org/2.0/repositories/aleax/sandboxpublic/pullrequests" } }, "fork_policy": "allow_forks", "uuid": "{452c716c-a1ce-42bc-a95b-d38da49cbb37}", "language": "", "created_on": "2008-07-14T01:59:23.568048+00:00", "full_name": "aleax/sandboxpublic", "has_issues": true, "owner": { "username": "aleax", "display_name": "Alex Martelli", "type": "user", "uuid": "{1155d94d-fb48-43fe-a431-ec07c900b636}", "links": { "self": { "href": "https://api.bitbucket.org/2.0/users/aleax" }, "html": { "href": "https://bitbucket.org/aleax/" }, "avatar": { "href": "https://bitbucket.org/account/aleax/avatar/32/" } } }, "updated_on": "2012-06-22T21:55:28.753727+00:00", "size": 3120, "type": "repository", "slug": "sandboxpublic", "is_private": false, "description": "to help debug ACLs for private vs public bitbucket repos" }, { "scm": "hg", "website": "", "has_wiki": true, "name": "otrsfix-ng", "links": { "watchers": { "href": "https://api.bitbucket.org/2.0/repositories/adiakin/otrsfix-ng/watchers" }, "branches": { "href": "https://api.bitbucket.org/2.0/repositories/adiakin/otrsfix-ng/refs/branches" }, "tags": { "href": "https://api.bitbucket.org/2.0/repositories/adiakin/otrsfix-ng/refs/tags" }, "commits": { "href": "https://api.bitbucket.org/2.0/repositories/adiakin/otrsfix-ng/commits" }, "clone": [ { "href": "https://bitbucket.org/adiakin/otrsfix-ng", "name": "https" }, { "href": "ssh://hg@bitbucket.org/adiakin/otrsfix-ng", "name": "ssh" } ], "self": { "href": "https://api.bitbucket.org/2.0/repositories/adiakin/otrsfix-ng" }, "html": { "href": "https://bitbucket.org/adiakin/otrsfix-ng" }, "avatar": { "href": "https://bitbucket.org/adiakin/otrsfix-ng/avatar/32/" }, "hooks": { "href": "https://api.bitbucket.org/2.0/repositories/adiakin/otrsfix-ng/hooks" }, "forks": { "href": "https://api.bitbucket.org/2.0/repositories/adiakin/otrsfix-ng/forks" }, "downloads": { "href": "https://api.bitbucket.org/2.0/repositories/adiakin/otrsfix-ng/downloads" }, "pullrequests": { "href": "https://api.bitbucket.org/2.0/repositories/adiakin/otrsfix-ng/pullrequests" } }, "fork_policy": "allow_forks", "uuid": "{05b1b9dc-a7b6-46d6-ae1b-e66a17aa4f55}", "language": "", "created_on": "2008-07-15T06:14:39.306314+00:00", "full_name": "adiakin/otrsfix-ng", "has_issues": true, "owner": { "username": "adiakin", "display_name": "adiakin", "type": "user", "uuid": "{414012b5-1ac9-4096-9f46-8893cfa3cda5}", "links": { "self": { "href": "https://api.bitbucket.org/2.0/users/adiakin" }, "html": { "href": "https://bitbucket.org/adiakin/" }, "avatar": { "href": "https://bitbucket.org/account/adiakin/avatar/32/" } } }, "updated_on": "2016-06-02T18:56:34.868302+00:00", "size": 211631, "type": "repository", "slug": "otrsfix-ng", "is_private": false, "description": "OTRS greasemonkey extension" }, { "scm": "hg", "website": "", "has_wiki": true, "name": "pida-pypaned", "links": { "watchers": { "href": "https://api.bitbucket.org/2.0/repositories/aafshar/pida-pypaned/watchers" }, "branches": { "href": "https://api.bitbucket.org/2.0/repositories/aafshar/pida-pypaned/refs/branches" }, "tags": { "href": "https://api.bitbucket.org/2.0/repositories/aafshar/pida-pypaned/refs/tags" }, "commits": { "href": "https://api.bitbucket.org/2.0/repositories/aafshar/pida-pypaned/commits" }, "clone": [ { "href": "https://bitbucket.org/aafshar/pida-pypaned", "name": "https" }, { "href": "ssh://hg@bitbucket.org/aafshar/pida-pypaned", "name": "ssh" } ], "self": { "href": "https://api.bitbucket.org/2.0/repositories/aafshar/pida-pypaned" }, "html": { "href": "https://bitbucket.org/aafshar/pida-pypaned" }, "avatar": { "href": "https://bitbucket.org/aafshar/pida-pypaned/avatar/32/" }, "hooks": { "href": "https://api.bitbucket.org/2.0/repositories/aafshar/pida-pypaned/hooks" }, "forks": { "href": "https://api.bitbucket.org/2.0/repositories/aafshar/pida-pypaned/forks" }, "downloads": { "href": "https://api.bitbucket.org/2.0/repositories/aafshar/pida-pypaned/downloads" }, "pullrequests": { "href": "https://api.bitbucket.org/2.0/repositories/aafshar/pida-pypaned/pullrequests" } }, "fork_policy": "allow_forks", "uuid": "{94cb830a-1784-4e51-9791-8f5cc93990a9}", "language": "", "created_on": "2008-07-16T22:47:38.682491+00:00", "full_name": "aafshar/pida-pypaned", "has_issues": true, "owner": { "username": "aafshar", "display_name": "Ali Afshar", "type": "user", "uuid": "{bcb87110-6a92-41fc-b95c-680feeea5512}", "links": { "self": { "href": "https://api.bitbucket.org/2.0/users/aafshar" }, "html": { "href": "https://bitbucket.org/aafshar/" }, "avatar": { "href": "https://bitbucket.org/account/aafshar/avatar/32/" } } }, "updated_on": "2012-06-22T21:55:42.451431+00:00", "size": 4680652, "type": "repository", "slug": "pida-pypaned", "is_private": false, "description": "" }, { "scm": "hg", "website": "", "has_wiki": true, "name": "TLOMM-testing", "links": { "watchers": { "href": "https://api.bitbucket.org/2.0/repositories/tgrimley/tlomm-testing/watchers" }, "branches": { "href": "https://api.bitbucket.org/2.0/repositories/tgrimley/tlomm-testing/refs/branches" }, "tags": { "href": "https://api.bitbucket.org/2.0/repositories/tgrimley/tlomm-testing/refs/tags" }, "commits": { "href": "https://api.bitbucket.org/2.0/repositories/tgrimley/tlomm-testing/commits" }, "clone": [ { "href": "https://bitbucket.org/tgrimley/tlomm-testing", "name": "https" }, { "href": "ssh://hg@bitbucket.org/tgrimley/tlomm-testing", "name": "ssh" } ], "self": { "href": "https://api.bitbucket.org/2.0/repositories/tgrimley/tlomm-testing" }, "html": { "href": "https://bitbucket.org/tgrimley/tlomm-testing" }, "avatar": { "href": "https://bitbucket.org/tgrimley/tlomm-testing/avatar/32/" }, "hooks": { "href": "https://api.bitbucket.org/2.0/repositories/tgrimley/tlomm-testing/hooks" }, "forks": { "href": "https://api.bitbucket.org/2.0/repositories/tgrimley/tlomm-testing/forks" }, "downloads": { "href": "https://api.bitbucket.org/2.0/repositories/tgrimley/tlomm-testing/downloads" }, "pullrequests": { "href": "https://api.bitbucket.org/2.0/repositories/tgrimley/tlomm-testing/pullrequests" } }, "fork_policy": "allow_forks", "uuid": "{95283ca1-f77e-40d6-b3ed-5bfae6ed2d15}", "language": "", "created_on": "2008-07-18T21:05:17.750587+00:00", "full_name": "tgrimley/tlomm-testing", "has_issues": true, "owner": { "username": "tgrimley", "display_name": "Thomas Grimley", "type": "user", "uuid": "{c958a08f-4669-4c77-81ec-4e2faa8ebf35}", "links": { "self": { "href": "https://api.bitbucket.org/2.0/users/tgrimley" }, "html": { "href": "https://bitbucket.org/tgrimley/" }, "avatar": { "href": "https://bitbucket.org/account/tgrimley/avatar/32/" } } }, "updated_on": "2012-06-22T21:55:46.627825+00:00", "size": 3128, "type": "repository", "slug": "tlomm-testing", "is_private": false, "description": "File related to testing functionality of TLOMM->TLOTTS transition" }, { "scm": "hg", "website": "", "has_wiki": true, "name": "test", "links": { "watchers": { "href": "https://api.bitbucket.org/2.0/repositories/tingle/test/watchers" }, "branches": { "href": "https://api.bitbucket.org/2.0/repositories/tingle/test/refs/branches" }, "tags": { "href": "https://api.bitbucket.org/2.0/repositories/tingle/test/refs/tags" }, "commits": { "href": "https://api.bitbucket.org/2.0/repositories/tingle/test/commits" }, "clone": [ { "href": "https://bitbucket.org/tingle/test", "name": "https" }, { "href": "ssh://hg@bitbucket.org/tingle/test", "name": "ssh" } ], "self": { "href": "https://api.bitbucket.org/2.0/repositories/tingle/test" }, "html": { "href": "https://bitbucket.org/tingle/test" }, "avatar": { "href": "https://bitbucket.org/tingle/test/avatar/32/" }, "hooks": { "href": "https://api.bitbucket.org/2.0/repositories/tingle/test/hooks" }, "forks": { "href": "https://api.bitbucket.org/2.0/repositories/tingle/test/forks" }, "downloads": { "href": "https://api.bitbucket.org/2.0/repositories/tingle/test/downloads" }, "pullrequests": { "href": "https://api.bitbucket.org/2.0/repositories/tingle/test/pullrequests" } }, "fork_policy": "allow_forks", "uuid": "{457953ec-fe87-41b9-b659-94756fb40ece}", "language": "", "created_on": "2008-07-18T22:24:31.984981+00:00", "full_name": "tingle/test", "has_issues": true, "owner": { "username": "tingle", "display_name": "tingle", "type": "user", "uuid": "{dddce42b-bd19-417b-90ff-72cdbfb6eb7d}", "links": { "self": { "href": "https://api.bitbucket.org/2.0/users/tingle" }, "html": { "href": "https://bitbucket.org/tingle/" }, "avatar": { "href": "https://bitbucket.org/account/tingle/avatar/32/" } } }, "updated_on": "2012-06-22T21:55:49.860564+00:00", "size": 3090, "type": "repository", "slug": "test", "is_private": false, "description": "" }, { "scm": "hg", "website": "http://shaze.myopenid.com/", "has_wiki": true, "name": "Repository", "links": { "watchers": { "href": "https://api.bitbucket.org/2.0/repositories/Shaze/repository/watchers" }, "branches": { "href": "https://api.bitbucket.org/2.0/repositories/Shaze/repository/refs/branches" }, "tags": { "href": "https://api.bitbucket.org/2.0/repositories/Shaze/repository/refs/tags" }, "commits": { "href": "https://api.bitbucket.org/2.0/repositories/Shaze/repository/commits" }, "clone": [ { "href": "https://bitbucket.org/Shaze/repository", "name": "https" }, { "href": "ssh://hg@bitbucket.org/Shaze/repository", "name": "ssh" } ], "self": { "href": "https://api.bitbucket.org/2.0/repositories/Shaze/repository" }, "html": { "href": "https://bitbucket.org/Shaze/repository" }, "avatar": { "href": "https://bitbucket.org/Shaze/repository/avatar/32/" }, "hooks": { "href": "https://api.bitbucket.org/2.0/repositories/Shaze/repository/hooks" }, "forks": { "href": "https://api.bitbucket.org/2.0/repositories/Shaze/repository/forks" }, "downloads": { "href": "https://api.bitbucket.org/2.0/repositories/Shaze/repository/downloads" }, "pullrequests": { "href": "https://api.bitbucket.org/2.0/repositories/Shaze/repository/pullrequests" } }, "fork_policy": "allow_forks", "uuid": "{3c0b8076-caef-465a-8d08-a184459f659b}", "language": "", "created_on": "2008-07-18T22:39:51.380134+00:00", "full_name": "Shaze/repository", "has_issues": true, "owner": { "username": "Shaze", "display_name": "Shaze", "type": "user", "uuid": "{f57817e9-bfe4-4c65-84dd-662152430323}", "links": { "self": { "href": "https://api.bitbucket.org/2.0/users/Shaze" }, "html": { "href": "https://bitbucket.org/Shaze/" }, "avatar": { "href": "https://bitbucket.org/account/Shaze/avatar/32/" } } }, "updated_on": "2012-06-22T21:55:51.570502+00:00", "size": 3052, "type": "repository", "slug": "repository", "is_private": false, "description": "Mine, all mine!" }, { "scm": "hg", "website": "http://bitbucket.org/copiesofcopies/identifox/", "has_wiki": true, "name": "identifox", "links": { "watchers": { "href": "https://api.bitbucket.org/2.0/repositories/uncryptic/identifox/watchers" }, "branches": { "href": "https://api.bitbucket.org/2.0/repositories/uncryptic/identifox/refs/branches" }, "tags": { "href": "https://api.bitbucket.org/2.0/repositories/uncryptic/identifox/refs/tags" }, "commits": { "href": "https://api.bitbucket.org/2.0/repositories/uncryptic/identifox/commits" }, "clone": [ { "href": "https://bitbucket.org/uncryptic/identifox", "name": "https" }, { "href": "ssh://hg@bitbucket.org/uncryptic/identifox", "name": "ssh" } ], "self": { "href": "https://api.bitbucket.org/2.0/repositories/uncryptic/identifox" }, "html": { "href": "https://bitbucket.org/uncryptic/identifox" }, "avatar": { "href": "https://bitbucket.org/uncryptic/identifox/avatar/32/" }, "hooks": { "href": "https://api.bitbucket.org/2.0/repositories/uncryptic/identifox/hooks" }, "forks": { "href": "https://api.bitbucket.org/2.0/repositories/uncryptic/identifox/forks" }, "downloads": { "href": "https://api.bitbucket.org/2.0/repositories/uncryptic/identifox/downloads" }, "pullrequests": { "href": "https://api.bitbucket.org/2.0/repositories/uncryptic/identifox/pullrequests" } }, "fork_policy": "allow_forks", "uuid": "{78a1a080-a77e-4d0d-823a-b107484477a8}", "language": "", "created_on": "2008-07-19T00:33:14.065946+00:00", "full_name": "uncryptic/identifox", "has_issues": true, "owner": { "username": "uncryptic", "display_name": "Uncryptic Communications", "type": "user", "uuid": "{db87bb9a-9980-4840-bd4a-61f7748a56b4}", "links": { "self": { "href": "https://api.bitbucket.org/2.0/users/uncryptic" }, "html": { "href": "https://bitbucket.org/uncryptic/" }, "avatar": { "href": "https://bitbucket.org/account/uncryptic/avatar/32/" } } }, "updated_on": "2008-07-19T00:33:14+00:00", "size": 1918, "type": "repository", "slug": "identifox", "is_private": false, "description": "TwitterFox, modified to work with Identi.ca, including cosmetic and subtle code changes. For the most part, the code is nearly identical to the TwitterFox base: http://www.naan.net/trac/wiki/TwitterFox" }, { "scm": "hg", "website": "http://rforce.rubyforge.org", "has_wiki": false, "name": "rforce", "links": { "watchers": { "href": "https://api.bitbucket.org/2.0/repositories/undees/rforce/watchers" }, "branches": { "href": "https://api.bitbucket.org/2.0/repositories/undees/rforce/refs/branches" }, "tags": { "href": "https://api.bitbucket.org/2.0/repositories/undees/rforce/refs/tags" }, "commits": { "href": "https://api.bitbucket.org/2.0/repositories/undees/rforce/commits" }, "clone": [ { "href": "https://bitbucket.org/undees/rforce", "name": "https" }, { "href": "ssh://hg@bitbucket.org/undees/rforce", "name": "ssh" } ], "self": { "href": "https://api.bitbucket.org/2.0/repositories/undees/rforce" }, "html": { "href": "https://bitbucket.org/undees/rforce" }, "avatar": { "href": "https://bitbucket.org/undees/rforce/avatar/32/" }, "hooks": { "href": "https://api.bitbucket.org/2.0/repositories/undees/rforce/hooks" }, "forks": { "href": "https://api.bitbucket.org/2.0/repositories/undees/rforce/forks" }, "downloads": { "href": "https://api.bitbucket.org/2.0/repositories/undees/rforce/downloads" }, "pullrequests": { "href": "https://api.bitbucket.org/2.0/repositories/undees/rforce/pullrequests" } }, "fork_policy": "allow_forks", "uuid": "{ec2ffee7-bfcd-4e95-83c8-22ac31e69fa3}", "language": "", "created_on": "2008-07-19T06:16:43.044743+00:00", "full_name": "undees/rforce", "has_issues": false, "owner": { "username": "undees", "display_name": "Ian Dees", "type": "user", "uuid": "{6ff66a34-6412-4f28-bf57-707a2a5c6d7b}", "links": { "self": { "href": "https://api.bitbucket.org/2.0/users/undees" }, "html": { "href": "https://bitbucket.org/undees/" }, "avatar": { "href": "https://bitbucket.org/account/undees/avatar/32/" } } }, "updated_on": "2015-02-09T00:48:15.408680+00:00", "size": 338402, "type": "repository", "slug": "rforce", "is_private": false, "description": "A simple, usable binding to the SalesForce API." } ], "next": "https://api.bitbucket.org/2.0/repositories?after=2008-07-19T19%3A53%3A07.031845%2B00%3A00" } diff --git a/swh/lister/bitbucket/tests/test_lister.py b/swh/lister/bitbucket/tests/test_lister.py index 9e2378a..eda17d6 100644 --- a/swh/lister/bitbucket/tests/test_lister.py +++ b/swh/lister/bitbucket/tests/test_lister.py @@ -1,101 +1,113 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import re import unittest from datetime import timedelta from urllib.parse import unquote import iso8601 import requests_mock from swh.lister.bitbucket.lister import BitBucketLister from swh.lister.core.tests.test_lister import HttpListerTester def _convert_type(req_index): """Convert the req_index to its right type according to the model's "indexable" column. """ return iso8601.parse_date(unquote(req_index)) class BitBucketListerTester(HttpListerTester, unittest.TestCase): Lister = BitBucketLister test_re = re.compile(r'/repositories\?after=([^?&]+)') lister_subdir = 'bitbucket' good_api_response_file = 'data/https_api.bitbucket.org/response.json' bad_api_response_file = 'data/https_api.bitbucket.org/empty_response.json' first_index = _convert_type('2008-07-12T07:44:01.476818+00:00') last_index = _convert_type('2008-07-19T06:16:43.044743+00:00') entries_per_page = 10 convert_type = _convert_type def request_index(self, request): """(Override) This is needed to emulate the listing bootstrap when no min_bound is provided to run """ m = self.test_re.search(request.path_url) idx = _convert_type(m.group(1)) if idx == self.Lister.default_min_bound: idx = self.first_index return idx @requests_mock.Mocker() def test_fetch_none_nodb(self, http_mocker): """Overridden because index is not an integer nor a string """ http_mocker.get(self.test_re, text=self.mock_response) fl = self.get_fl() self.disable_scheduler(fl) self.disable_db(fl) # stores no results fl.run(min_bound=self.first_index - timedelta(days=3), max_bound=self.first_index) def test_is_within_bounds(self): fl = self.get_fl() self.assertTrue(fl.is_within_bounds( iso8601.parse_date('2008-07-15'), self.first_index, self.last_index)) self.assertFalse(fl.is_within_bounds( iso8601.parse_date('2008-07-20'), self.first_index, self.last_index)) self.assertFalse(fl.is_within_bounds( iso8601.parse_date('2008-07-11'), self.first_index, self.last_index)) def test_lister_bitbucket(swh_listers, requests_mock_datadir): - """Simple bitbucket listing should create scheduled tasks + """Simple bitbucket listing should create scheduled tasks (git, hg) """ lister = swh_listers['bitbucket'] lister.run() r = lister.scheduler.search_tasks(task_type='load-hg') - assert len(r) == 10 + assert len(r) == 9 for row in r: - assert row['type'] == 'load-hg' - # arguments check args = row['arguments']['args'] - assert len(args) == 1 + kwargs = row['arguments']['kwargs'] + + assert len(args) == 0 + assert len(kwargs) == 1 + url = kwargs['url'] - url = args[0] assert url.startswith('https://bitbucket.org') - # kwargs + assert row['policy'] == 'recurring' + assert row['priority'] is None + + r = lister.scheduler.search_tasks(task_type='load-git') + assert len(r) == 1 + + for row in r: + args = row['arguments']['args'] kwargs = row['arguments']['kwargs'] - assert kwargs == {} + assert len(args) == 0 + assert len(kwargs) == 1 + url = kwargs['url'] + + assert url.startswith('https://bitbucket.org') assert row['policy'] == 'recurring' assert row['priority'] is None diff --git a/swh/lister/cgit/tests/test_lister.py b/swh/lister/cgit/tests/test_lister.py index a140cdd..ca8ddd5 100644 --- a/swh/lister/cgit/tests/test_lister.py +++ b/swh/lister/cgit/tests/test_lister.py @@ -1,82 +1,82 @@ # Copyright (C) 2019 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.lister import __version__ def test_lister_no_page(requests_mock_datadir, swh_listers): lister = swh_listers['cgit'] assert lister.url == 'https://git.savannah.gnu.org/cgit/' repos = list(lister.get_repos()) assert len(repos) == 977 assert repos[0] == 'https://git.savannah.gnu.org/cgit/elisp-es.git/' # note the url below is NOT a subpath of /cgit/ assert repos[-1] == 'https://git.savannah.gnu.org/path/to/yetris.git/' # noqa # note the url below is NOT on the same server assert repos[-2] == 'http://example.org/cgit/xstarcastle.git/' def test_lister_model(requests_mock_datadir, swh_listers): lister = swh_listers['cgit'] repo = next(lister.get_repos()) model = lister.build_model(repo) assert model == { 'uid': 'https://git.savannah.gnu.org/cgit/elisp-es.git/', 'name': 'elisp-es.git', 'origin_type': 'git', 'instance': 'git.savannah.gnu.org', 'origin_url': 'https://git.savannah.gnu.org/git/elisp-es.git' } def test_lister_with_pages(requests_mock_datadir, swh_listers): lister = swh_listers['cgit'] lister.url = 'https://git.tizen/cgit/' repos = list(lister.get_repos()) # we should have 16 repos (listed on 3 pages) assert len(repos) == 16 def test_lister_run(requests_mock_datadir, swh_listers): lister = swh_listers['cgit'] lister.url = 'https://git.tizen/cgit/' lister.run() r = lister.scheduler.search_tasks(task_type='load-git') assert len(r) == 16 for row in r: assert row['type'] == 'load-git' # arguments check args = row['arguments']['args'] - assert len(args) == 1 - - url = args[0] - assert url.startswith('https://git.tizen') + assert len(args) == 0 # kwargs kwargs = row['arguments']['kwargs'] - assert kwargs == {} + assert len(kwargs) == 1 + url = kwargs['url'] + assert url.startswith('https://git.tizen') + assert row['policy'] == 'recurring' assert row['priority'] is None def test_lister_requests(requests_mock_datadir, swh_listers): lister = swh_listers['cgit'] lister.url = 'https://git.tizen/cgit/' lister.run() assert len(requests_mock_datadir.request_history) != 0 for request in requests_mock_datadir.request_history: assert 'User-Agent' in request.headers user_agent = request.headers['User-Agent'] assert 'Software Heritage Lister' in user_agent assert __version__ in user_agent diff --git a/swh/lister/core/lister_base.py b/swh/lister/core/lister_base.py index a9f1e02..e4253af 100644 --- a/swh/lister/core/lister_base.py +++ b/swh/lister/core/lister_base.py @@ -1,521 +1,522 @@ -# Copyright (C) 2015-2018 the Software Heritage developers +# Copyright (C) 2015-2019 the Software Heritage developers # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import abc import datetime import gzip import json import logging import os import re import time from sqlalchemy import create_engine, func from sqlalchemy.orm import sessionmaker -from typing import Any, Type, Union +from typing import Any, Dict, Type, Union from swh.core import config from swh.scheduler import get_scheduler, utils from .abstractattribute import AbstractAttribute logger = logging.getLogger(__name__) def utcnow(): return datetime.datetime.now(tz=datetime.timezone.utc) class FetchError(RuntimeError): def __init__(self, response): self.response = response def __str__(self): return repr(self.response) class ListerBase(abc.ABC, config.SWHConfig): """Lister core base class. Generally a source code hosting service provides an API endpoint for listing the set of stored repositories. A Lister is the discovery service responsible for finding this list, all at once or sequentially by parts, and queueing local tasks to fetch and ingest the referenced repositories. The core method in this class is ingest_data. Any subclasses should be calling this method one or more times to fetch and ingest data from API endpoints. See swh.lister.core.lister_base.IndexingLister for example usage. This class cannot be instantiated. Any instantiable Lister descending from ListerBase must provide at least the required overrides. (see member docstrings for details): Required Overrides: MODEL def transport_request def transport_response_to_string def transport_response_simplified def transport_quota_check Optional Overrides: def filter_before_inject def is_within_bounds """ MODEL = AbstractAttribute( 'Subclass type (not instance) of swh.lister.core.models.ModelBase ' 'customized for a specific service.' ) # type: Union[AbstractAttribute, Type[Any]] LISTER_NAME = AbstractAttribute( "Lister's name") # type: Union[AbstractAttribute, str] def transport_request(self, identifier): """Given a target endpoint identifier to query, try once to request it. Implementation of this method determines the network request protocol. Args: identifier (string): unique identifier for an endpoint query. e.g. If the service indexes lists of repositories by date and time of creation, this might be that as a formatted string. Or it might be an integer UID. Or it might be nothing. It depends on what the service needs. Returns: the entire request response Raises: Will catch internal transport-dependent connection exceptions and raise swh.lister.core.lister_base.FetchError instead. Other non-connection exceptions should propagate unchanged. """ pass def transport_response_to_string(self, response): """Convert the server response into a formatted string for logging. Implementation of this method depends on the shape of the network response object returned by the transport_request method. Args: response: the server response Returns: a pretty string of the response """ pass def transport_response_simplified(self, response): """Convert the server response into list of a dict for each repo in the response, mapping columns in the lister's MODEL class to repo data. Implementation of this method depends on the server API spec and the shape of the network response object returned by the transport_request method. Args: response: response object from the server. Returns: list of repo MODEL dicts ( eg. [{'uid': r['id'], etc.} for r in response.json()] ) """ pass def transport_quota_check(self, response): """Check server response to see if we're hitting request rate limits. Implementation of this method depends on the server communication protocol and API spec and the shape of the network response object returned by the transport_request method. Args: response (session response): complete API query response Returns: 1) must retry request? True/False 2) seconds to delay if True """ pass def filter_before_inject(self, models_list): """Filter models_list entries prior to injection in the db. This is ran directly after `transport_response_simplified`. Default implementation is to have no filtering. Args: models_list: list of dicts returned by transport_response_simplified. Returns: models_list with entries changed according to custom logic. """ return models_list def do_additional_checks(self, models_list): """Execute some additional checks on the model list (after the filtering). Default implementation is to run no check at all and to return the input as is. Args: models_list: list of dicts returned by transport_response_simplified. Returns: models_list with entries if checks ok, False otherwise """ return models_list def is_within_bounds(self, inner, lower=None, upper=None): """See if a sortable value is inside the range [lower,upper]. MAY BE OVERRIDDEN, for example if the server indexable* key is technically sortable but not automatically so. * - ( see: swh.lister.core.indexing_lister.IndexingLister ) Args: inner (sortable type): the value being checked lower (sortable type): optional lower bound upper (sortable type): optional upper bound Returns: whether inner is confined by the optional lower and upper bounds """ try: if lower is None and upper is None: return True elif lower is None: ret = inner <= upper elif upper is None: ret = inner >= lower else: ret = lower <= inner <= upper self.string_pattern_check(inner, lower, upper) except Exception as e: logger.error(str(e) + ': %s, %s, %s' % (('inner=%s%s' % (type(inner), inner)), ('lower=%s%s' % (type(lower), lower)), ('upper=%s%s' % (type(upper), upper))) ) raise return ret # You probably don't need to override anything below this line. DEFAULT_CONFIG = { 'scheduler': ('dict', { 'cls': 'remote', 'args': { 'url': 'http://localhost:5008/' }, }), 'lister': ('dict', { 'cls': 'local', 'args': { 'db': 'postgresql:///lister', }, }), } @property def CONFIG_BASE_FILENAME(self): # noqa: N802 return 'lister_%s' % self.LISTER_NAME @property def ADDITIONAL_CONFIG(self): # noqa: N802 return { 'credentials': ('dict', {}), 'cache_responses': ('bool', False), 'cache_dir': ('str', '~/.cache/swh/lister/%s' % self.LISTER_NAME), } INITIAL_BACKOFF = 10 MAX_RETRIES = 7 CONN_SLEEP = 10 def __init__(self, override_config=None): self.backoff = self.INITIAL_BACKOFF logger.debug('Loading config from %s' % self.CONFIG_BASE_FILENAME) self.config = self.parse_config_file( base_filename=self.CONFIG_BASE_FILENAME, additional_configs=[self.ADDITIONAL_CONFIG] ) self.config['cache_dir'] = os.path.expanduser(self.config['cache_dir']) if self.config['cache_responses']: config.prepare_folders(self.config, 'cache_dir') if override_config: self.config.update(override_config) logger.debug('%s CONFIG=%s' % (self, self.config)) self.scheduler = get_scheduler(**self.config['scheduler']) self.db_engine = create_engine(self.config['lister']['args']['db']) self.mk_session = sessionmaker(bind=self.db_engine) self.db_session = self.mk_session() def reset_backoff(self): """Reset exponential backoff timeout to initial level.""" self.backoff = self.INITIAL_BACKOFF def back_off(self): """Get next exponential backoff timeout.""" ret = self.backoff self.backoff *= 10 return ret def safely_issue_request(self, identifier): """Make network request with retries, rate quotas, and response logs. Protocol is handled by the implementation of the transport_request method. Args: identifier: resource identifier Returns: server response """ retries_left = self.MAX_RETRIES do_cache = self.config['cache_responses'] r = None while retries_left > 0: try: r = self.transport_request(identifier) except FetchError: # network-level connection error, try again logger.warning( 'connection error on %s: sleep for %d seconds' % (identifier, self.CONN_SLEEP)) time.sleep(self.CONN_SLEEP) retries_left -= 1 continue if do_cache: self.save_response(r) # detect throttling must_retry, delay = self.transport_quota_check(r) if must_retry: logger.warning( 'rate limited on %s: sleep for %f seconds' % (identifier, delay)) time.sleep(delay) else: # request ok break retries_left -= 1 if not retries_left: logger.warning( 'giving up on %s: max retries exceeded' % identifier) return r def db_query_equal(self, key, value): """Look in the db for a row with key == value Args: key: column key to look at value: value to look for in that column Returns: sqlalchemy.ext.declarative.declarative_base object with the given key == value """ if isinstance(key, str): key = self.MODEL.__dict__[key] return self.db_session.query(self.MODEL) \ .filter(key == value).first() def winnow_models(self, mlist, key, to_remove): """Given a list of models, remove any with matching some member of a list of values. Args: mlist (list of model rows): the initial list of models key (column): the column to filter on to_remove (list): if anything in mlist has column equal to one of the values in to_remove, it will be removed from the result Returns: A list of model rows starting from mlist minus any matching rows """ if isinstance(key, str): key = self.MODEL.__dict__[key] if to_remove: return mlist.filter(~key.in_(to_remove)).all() else: return mlist.all() def db_num_entries(self): """Return the known number of entries in the lister db""" return self.db_session.query(func.count('*')).select_from(self.MODEL) \ .scalar() def db_inject_repo(self, model_dict): """Add/update a new repo to the db and mark it last_seen now. Args: model_dict: dictionary mapping model keys to values Returns: new or updated sqlalchemy.ext.declarative.declarative_base object associated with the injection """ sql_repo = self.db_query_equal('uid', model_dict['uid']) if not sql_repo: sql_repo = self.MODEL(**model_dict) self.db_session.add(sql_repo) else: for k in model_dict: setattr(sql_repo, k, model_dict[k]) sql_repo.last_seen = utcnow() return sql_repo - def task_dict(self, origin_type, origin_url, **kwargs): + def task_dict(self, origin_type: str, + origin_url: str, **kwargs) -> Dict[str, Any]: """Return special dict format for the tasks list Args: origin_type (string) origin_url (string) Returns: the same information in a different form """ logger.debug('origin-url: %s, type: %s', origin_url, origin_type) _type = 'load-%s' % origin_type _policy = kwargs.get('policy', 'recurring') priority = kwargs.get('priority') kw = {'priority': priority} if priority else {} - return utils.create_task_dict(_type, _policy, origin_url, **kw) + return utils.create_task_dict(_type, _policy, url=origin_url, **kw) def string_pattern_check(self, a, b, c=None): """When comparing indexable types in is_within_bounds, complex strings may not be allowed to differ in basic structure. If they do, it could be a sign of not understanding the data well. For instance, an ISO 8601 time string cannot be compared against its urlencoded equivalent, but this is an easy mistake to accidentally make. This method acts as a friendly sanity check. Args: a (string): inner component of the is_within_bounds method b (string): lower component of the is_within_bounds method c (string): upper component of the is_within_bounds method Returns: nothing Raises: TypeError if strings a, b, and c don't conform to the same basic pattern. """ if isinstance(a, str): a_pattern = re.sub('[a-zA-Z0-9]', '[a-zA-Z0-9]', re.escape(a)) if (isinstance(b, str) and (re.match(a_pattern, b) is None) or isinstance(c, str) and (re.match(a_pattern, c) is None)): logger.debug(a_pattern) raise TypeError('incomparable string patterns detected') def inject_repo_data_into_db(self, models_list): """Inject data into the db. Args: models_list: list of dicts mapping keys from the db model for each repo to be injected Returns: dict of uid:sql_repo pairs """ injected_repos = {} for m in models_list: injected_repos[m['uid']] = self.db_inject_repo(m) return injected_repos def schedule_missing_tasks(self, models_list, injected_repos): """Find any newly created db entries that do not have been scheduled yet. Args: models_list ([Model]): List of dicts mapping keys in the db model for each repo injected_repos ([dict]): Dict of uid:sql_repo pairs that have just been created Returns: Nothing. Modifies injected_repos. """ tasks = {} def _task_key(m): return '%s-%s' % ( m['type'], json.dumps(m['arguments'], sort_keys=True) ) for m in models_list: ir = injected_repos[m['uid']] if not ir.task_id: # Patching the model instance to add the policy/priority task # scheduling if 'policy' in self.config: m['policy'] = self.config['policy'] if 'priority' in self.config: m['priority'] = self.config['priority'] task_dict = self.task_dict(**m) tasks[_task_key(task_dict)] = (ir, m, task_dict) new_tasks = self.scheduler.create_tasks( (task_dicts for (_, _, task_dicts) in tasks.values())) for task in new_tasks: ir, m, _ = tasks[_task_key(task)] ir.task_id = task['id'] def ingest_data(self, identifier, checks=False): """The core data fetch sequence. Request server endpoint. Simplify and filter response list of repositories. Inject repo information into local db. Queue loader tasks for linked repositories. Args: identifier: Resource identifier. checks (bool): Additional checks required """ # Request (partial?) list of repositories info response = self.safely_issue_request(identifier) if not response: return response, [] models_list = self.transport_response_simplified(response) models_list = self.filter_before_inject(models_list) if checks: models_list = self.do_additional_checks(models_list) if not models_list: return response, [] # inject into local db injected = self.inject_repo_data_into_db(models_list) # queue workers self.schedule_missing_tasks(models_list, injected) return response, injected def save_response(self, response): """Log the response from a server request to a cache dir. Args: response: full server response cache_dir: system path for cache dir Returns: nothing """ datepath = utcnow().isoformat() fname = os.path.join( self.config['cache_dir'], datepath + '.gz', ) with gzip.open(fname, 'w') as f: f.write(bytes( self.transport_response_to_string(response), 'UTF-8' )) diff --git a/swh/lister/github/tests/test_lister.py b/swh/lister/github/tests/test_lister.py index db11c35..f2c085f 100644 --- a/swh/lister/github/tests/test_lister.py +++ b/swh/lister/github/tests/test_lister.py @@ -1,83 +1,81 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import re import unittest import requests_mock from datetime import datetime, timedelta from swh.lister.core.tests.test_lister import HttpListerTester from swh.lister.github.lister import GitHubLister class GitHubListerTester(HttpListerTester, unittest.TestCase): Lister = GitHubLister test_re = re.compile(r'/repositories\?since=([^?&]+)') lister_subdir = 'github' good_api_response_file = 'data/https_api.github.com/first_response.json' bad_api_response_file = 'data/https_api.github.com/empty_response.json' first_index = 0 last_index = 369 entries_per_page = 100 convert_type = int def response_headers(self, request): headers = {'X-RateLimit-Remaining': '1'} if self.request_index(request) == self.first_index: headers.update({ 'Link': ';' ' rel="next",' ';' ' rel="first"' % self.last_index }) else: headers.update({ 'Link': ';' ' rel="first"' }) return headers def mock_rate_quota(self, n, request, context): self.rate_limit += 1 context.status_code = 403 context.headers['X-RateLimit-Remaining'] = '0' one_second = int((datetime.now() + timedelta(seconds=1.5)).timestamp()) context.headers['X-RateLimit-Reset'] = str(one_second) return '{"error":"dummy"}' @requests_mock.Mocker() def test_scheduled_tasks(self, http_mocker): self.scheduled_tasks_test( 'data/https_api.github.com/next_response.json', 876, http_mocker) def test_lister_github(swh_listers, requests_mock_datadir): """Simple github listing should create scheduled tasks """ lister = swh_listers['github'] lister.run() r = lister.scheduler.search_tasks(task_type='load-git') assert len(r) == 100 for row in r: assert row['type'] == 'load-git' # arguments check args = row['arguments']['args'] - assert len(args) == 1 - - url = args[0] - assert url.startswith('https://github.com') + assert len(args) == 0 # kwargs kwargs = row['arguments']['kwargs'] - assert kwargs == {} + url = kwargs['url'] + assert url.startswith('https://github.com') assert row['policy'] == 'recurring' assert row['priority'] is None diff --git a/swh/lister/gitlab/tests/test_lister.py b/swh/lister/gitlab/tests/test_lister.py index 2201bea..0d02423 100644 --- a/swh/lister/gitlab/tests/test_lister.py +++ b/swh/lister/gitlab/tests/test_lister.py @@ -1,68 +1,66 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import re import unittest from datetime import datetime, timedelta from swh.lister.core.tests.test_lister import HttpListerTesterBase from swh.lister.gitlab.lister import GitLabLister logger = logging.getLogger(__name__) class GitLabListerTester(HttpListerTesterBase, unittest.TestCase): Lister = GitLabLister test_re = re.compile(r'^.*/projects.*page=(\d+).*') lister_subdir = 'gitlab' good_api_response_file = 'data/gitlab.com/api_response.json' bad_api_response_file = 'data/gitlab.com/api_empty_response.json' first_index = 1 entries_per_page = 10 convert_type = int def response_headers(self, request): headers = {'RateLimit-Remaining': '1'} if self.request_index(request) == self.first_index: headers.update({ 'x-next-page': '3', }) return headers def mock_rate_quota(self, n, request, context): self.rate_limit += 1 context.status_code = 403 context.headers['RateLimit-Remaining'] = '0' one_second = int((datetime.now() + timedelta(seconds=1.5)).timestamp()) context.headers['RateLimit-Reset'] = str(one_second) return '{"error":"dummy"}' def test_lister_gitlab(swh_listers, requests_mock_datadir): lister = swh_listers['gitlab'] lister.run() r = lister.scheduler.search_tasks(task_type='load-git') assert len(r) == 10 for row in r: assert row['type'] == 'load-git' # arguments check args = row['arguments']['args'] - assert len(args) == 1 - - url = args[0] - assert url.startswith('https://gitlab.com') + assert len(args) == 0 # kwargs kwargs = row['arguments']['kwargs'] - assert kwargs == {} + url = kwargs['url'] + assert url.startswith('https://gitlab.com') assert row['policy'] == 'recurring' assert row['priority'] is None diff --git a/swh/lister/packagist/lister.py b/swh/lister/packagist/lister.py index fa2d581..5461ed6 100644 --- a/swh/lister/packagist/lister.py +++ b/swh/lister/packagist/lister.py @@ -1,98 +1,98 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import random -from typing import Any, List, Mapping +from typing import Any, Dict, List, Mapping from swh.scheduler import utils from swh.lister.core.simple_lister import SimpleLister from swh.lister.core.lister_transports import ListerOnePageApiTransport from .models import PackagistModel logger = logging.getLogger(__name__) def compute_package_url(repo_name: str) -> str: """Compute packgist package url from repo name. """ return 'https://repo.packagist.org/p/%s.json' % repo_name class PackagistLister(ListerOnePageApiTransport, SimpleLister): """List packages available in the Packagist package manager. The lister sends the request to the url present in the class variable `PAGE`, to receive a list of all the package names present in the Packagist package manager. Iterates over all the packages and constructs the metadata url of the package from the name of the package and creates a loading task. Task: Type: load-packagist Policy: recurring Args: Example: Type: load-packagist Policy: recurring Args: 'hypejunction/hypegamemechanics' 'https://repo.packagist.org/p/hypejunction/hypegamemechanics.json' """ MODEL = PackagistModel LISTER_NAME = 'packagist' PAGE = 'https://packagist.org/packages/list.json' instance = 'packagist' def __init__(self, override_config=None): ListerOnePageApiTransport .__init__(self) SimpleLister.__init__(self, override_config=override_config) def task_dict(self, origin_type: str, origin_url: str, - **kwargs: Mapping[str, str]) -> Mapping[str, str]: + **kwargs: Mapping[str, str]) -> Dict[str, Any]: """Return task format dict This is overridden from the lister_base as more information is needed for the ingestion task creation. """ return utils.create_task_dict( 'load-%s' % origin_type, kwargs.get('policy', 'recurring'), kwargs.get('name'), origin_url, retries_left=3) def list_packages(self, response: Any) -> List[str]: """List the actual packagist origins from the response. """ response = json.loads(response.text) packages = [name for name in response['packageNames']] logger.debug('Number of packages: %s', len(packages)) random.shuffle(packages) return packages def get_model_from_repo(self, repo_name: str) -> Mapping[str, str]: """Transform from repository representation to model """ url = compute_package_url(repo_name) return { 'uid': repo_name, 'name': repo_name, 'full_name': repo_name, 'html_url': url, 'origin_url': url, 'origin_type': 'packagist', } diff --git a/swh/lister/phabricator/tests/test_lister.py b/swh/lister/phabricator/tests/test_lister.py index a433226..dcf76c0 100644 --- a/swh/lister/phabricator/tests/test_lister.py +++ b/swh/lister/phabricator/tests/test_lister.py @@ -1,142 +1,140 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import re import json import logging import unittest import requests_mock from swh.lister.core.tests.test_lister import HttpListerTester from swh.lister.phabricator.lister import PhabricatorLister from swh.lister.phabricator.lister import get_repo_url logger = logging.getLogger(__name__) class PhabricatorListerTester(HttpListerTester, unittest.TestCase): Lister = PhabricatorLister # first request will have the after parameter empty test_re = re.compile(r'\&after=([^?&]*)') lister_subdir = 'phabricator' good_api_response_file = 'data/api_first_response.json' good_api_response_undefined_protocol = \ 'data/api_response_undefined_protocol.json' bad_api_response_file = 'data/api_empty_response.json' # first_index must be retrieved through a bootstrap process for Phabricator first_index = None last_index = 12 entries_per_page = 10 convert_type = int def request_index(self, request): """(Override) This is needed to emulate the listing bootstrap when no min_bound is provided to run """ m = self.test_re.search(request.path_url) idx = m.group(1) if idx not in ('', 'None'): return int(idx) def get_fl(self, override_config=None): """(Override) Retrieve an instance of fake lister (fl). """ if override_config or self.fl is None: credentials = {'phabricator': {'fake': [ {'password': 'toto'} ]}} override_config = dict(credentials=credentials, **(override_config or {})) self.fl = self.Lister(url='https://fakeurl', instance='fake', override_config=override_config) self.fl.INITIAL_BACKOFF = 1 self.fl.reset_backoff() return self.fl def test_get_repo_url(self): f = open('swh/lister/%s/tests/%s' % (self.lister_subdir, self.good_api_response_file)) api_response = json.load(f) repos = api_response['result']['data'] for repo in repos: self.assertEqual( 'https://forge.softwareheritage.org/source/%s.git' % (repo['fields']['shortName']), get_repo_url(repo['attachments']['uris']['uris'])) f = open('swh/lister/%s/tests/%s' % (self.lister_subdir, self.good_api_response_undefined_protocol)) repo = json.load(f) self.assertEqual( 'https://svn.blender.org/svnroot/bf-blender/', get_repo_url(repo['attachments']['uris']['uris'])) @requests_mock.Mocker() def test_scheduled_tasks(self, http_mocker): self.scheduled_tasks_test('data/api_next_response.json', 23, http_mocker) @requests_mock.Mocker() def test_scheduled_tasks_multiple_instances(self, http_mocker): fl = self.create_fl_with_db(http_mocker) # list first Phabricator instance fl.run() fl.instance = 'other_fake' fl.config['credentials'] = { 'phabricator': { 'other_fake': [{ 'password': 'foo' }] } } # list second Phabricator instance hosting repositories having # same ids as those listed from the first instance self.good_api_response_file = \ 'data/api_first_response_other_instance.json' self.last_index = 13 fl.run() # check expected number of loading tasks self.assertEqual(len(self.scheduler_tasks), 2 * self.entries_per_page) # check tasks are not disabled for task in self.scheduler_tasks: self.assertTrue(task['status'] != 'disabled') def test_phabricator_lister(lister_phabricator, requests_mock_datadir): lister = lister_phabricator assert lister.url == lister.DEFAULT_URL assert lister.instance == 'forge.softwareheritage.org' lister.run() r = lister.scheduler.search_tasks(task_type='load-git') assert len(r) == 10 for row in r: assert row['type'] == 'load-git' # arguments check args = row['arguments']['args'] - assert len(args) == 1 - - url = args[0] - assert lister.instance in url + assert len(args) == 0 # kwargs kwargs = row['arguments']['kwargs'] - assert kwargs == {} + url = kwargs['url'] + assert lister.instance in url assert row['policy'] == 'recurring' assert row['priority'] is None