diff --git a/swh/lister/gitlab/lister.py b/swh/lister/gitlab/lister.py index 3032835..5937256 100644 --- a/swh/lister/gitlab/lister.py +++ b/swh/lister/gitlab/lister.py @@ -1,239 +1,258 @@ # Copyright (C) 2018-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from dataclasses import asdict, dataclass import logging import random from typing import Any, Dict, Iterator, Optional, Tuple from urllib.parse import parse_qs, urlencode, urlparse import iso8601 import requests from requests.exceptions import HTTPError from requests.status_codes import codes from tenacity.before_sleep import before_sleep_log from swh.lister import USER_AGENT from swh.lister.pattern import CredentialsType, Lister from swh.lister.utils import is_retryable_exception, retry_attempt, throttling_retry from swh.scheduler.model import ListedOrigin logger = logging.getLogger(__name__) @dataclass class GitLabListerState: """State of the GitLabLister""" last_seen_next_link: Optional[str] = None """Last link header (not visited yet) during an incremental pass """ Repository = Dict[str, Any] @dataclass class PageResult: """Result from a query to a gitlab project api page.""" repositories: Optional[Tuple[Repository, ...]] = None next_page: Optional[str] = None def _if_rate_limited(retry_state) -> bool: """Custom tenacity retry predicate for handling HTTP responses with status code 403 with specific ratelimit header. """ attempt = retry_attempt(retry_state) if attempt.failed: exc = attempt.exception() return ( isinstance(exc, HTTPError) and exc.response.status_code == codes.forbidden and int(exc.response.headers.get("RateLimit-Remaining", "0")) == 0 ) or is_retryable_exception(exc) return False def _parse_id_after(url: Optional[str]) -> Optional[int]: """Given an url, extract a return the 'id_after' query parameter associated value or None. This is the the repository id used for pagination purposes. """ if not url: return None # link: https://${project-api}/?...&id_after=2x... query_data = parse_qs(urlparse(url).query) page = query_data.get("id_after") if page and len(page) > 0: return int(page[0]) return None class GitLabLister(Lister[GitLabListerState, PageResult]): """List origins for a gitlab instance. By default, the lister runs in incremental mode: it lists all repositories, starting with the `last_seen_next_link` stored in the scheduler backend. Args: scheduler: a scheduler instance url: the api v4 url of the gitlab instance to visit (e.g. https://gitlab.com/api/v4/) instance: a specific instance name (e.g. gitlab, tor, git-kernel, ...), url network location will be used if not provided incremental: defines if incremental listing is activated or not """ LISTER_NAME = "gitlab" def __init__( self, scheduler, url: str, instance: Optional[str] = None, credentials: Optional[CredentialsType] = None, incremental: bool = False, ): super().__init__( scheduler=scheduler, url=url.rstrip("/"), instance=instance, credentials=credentials, ) self.incremental = incremental self.last_page: Optional[str] = None + self.per_page = 100 self.session = requests.Session() self.session.headers.update( {"Accept": "application/json", "User-Agent": USER_AGENT} ) if len(self.credentials) > 0: cred = random.choice(self.credentials) logger.info( "Using %s credentials from user %s", self.instance, cred["username"] ) api_token = cred["password"] if api_token: self.session.headers["Authorization"] = f"Bearer {api_token}" def state_from_dict(self, d: Dict[str, Any]) -> GitLabListerState: return GitLabListerState(**d) def state_to_dict(self, state: GitLabListerState) -> Dict[str, Any]: return asdict(state) @throttling_retry( retry=_if_rate_limited, before_sleep=before_sleep_log(logger, logging.WARNING) ) def get_page_result(self, url: str) -> PageResult: logger.debug("Fetching URL %s", url) response = self.session.get(url) if response.status_code != 200: logger.warning( "Unexpected HTTP status code %s on %s: %s", response.status_code, response.url, response.content, ) - response.raise_for_status() + + # GitLab API can return errors 500 when listing projects. + # https://gitlab.com/gitlab-org/gitlab/-/issues/262629 + # To avoid ending the listing prematurely, skip buggy URLs and move + # to next pages. + if response.status_code == 500: + id_after = _parse_id_after(url) + assert id_after is not None + while True: + next_id_after = id_after + self.per_page + url = url.replace(f"id_after={id_after}", f"id_after={next_id_after}") + response = self.session.get(url) + if response.status_code == 200: + break + else: + id_after = next_id_after + else: + response.raise_for_status() + repositories: Tuple[Repository, ...] = tuple(response.json()) if hasattr(response, "links") and response.links.get("next"): next_page = response.links["next"]["url"] else: next_page = None return PageResult(repositories, next_page) def page_url(self, id_after: Optional[int] = None) -> str: parameters = { "pagination": "keyset", "order_by": "id", "sort": "asc", "simple": "true", - "per_page": "100", + "per_page": f"{self.per_page}", } if id_after is not None: parameters["id_after"] = str(id_after) return f"{self.url}/projects?{urlencode(parameters)}" def get_pages(self) -> Iterator[PageResult]: next_page: Optional[str] if self.incremental and self.state and self.state.last_seen_next_link: next_page = self.state.last_seen_next_link else: next_page = self.page_url() while next_page: self.last_page = next_page page_result = self.get_page_result(next_page) yield page_result next_page = page_result.next_page def get_origins_from_page(self, page_result: PageResult) -> Iterator[ListedOrigin]: assert self.lister_obj.id is not None repositories = page_result.repositories if page_result.repositories else [] for repo in repositories: yield ListedOrigin( lister_id=self.lister_obj.id, url=repo["http_url_to_repo"], visit_type="git", last_update=iso8601.parse_date(repo["last_activity_at"]), ) def commit_page(self, page_result: PageResult) -> None: """Update currently stored state using the latest listed "next" page if relevant. Relevancy is determined by the next_page link whose 'page' id must be strictly superior to the currently stored one. Note: this is a noop for full listing mode """ if self.incremental: # link: https://${project-api}/?...&page=2x... next_page = page_result.next_page if not next_page and self.last_page: next_page = self.last_page if next_page: id_after = _parse_id_after(next_page) previous_next_page = self.state.last_seen_next_link previous_id_after = _parse_id_after(previous_next_page) if previous_next_page is None or ( previous_id_after and id_after and previous_id_after < id_after ): self.state.last_seen_next_link = next_page def finalize(self) -> None: """finalize the lister state when relevant (see `fn:commit_page` for details) Note: this is a noop for full listing mode """ next_page = self.state.last_seen_next_link if self.incremental and next_page: # link: https://${project-api}/?...&page=2x... next_id_after = _parse_id_after(next_page) scheduler_state = self.get_state_from_scheduler() previous_next_id_after = _parse_id_after( scheduler_state.last_seen_next_link ) if (not previous_next_id_after and next_id_after) or ( previous_next_id_after and next_id_after and previous_next_id_after < next_id_after ): self.updated = True diff --git a/swh/lister/gitlab/tests/test_lister.py b/swh/lister/gitlab/tests/test_lister.py index 4520aaa..e40e3fb 100644 --- a/swh/lister/gitlab/tests/test_lister.py +++ b/swh/lister/gitlab/tests/test_lister.py @@ -1,282 +1,315 @@ # Copyright (C) 2017-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging from pathlib import Path from typing import Dict, List import pytest from requests.status_codes import codes from swh.lister import USER_AGENT from swh.lister.gitlab.lister import GitLabLister, _parse_id_after from swh.lister.pattern import ListerStats from swh.lister.tests.test_utils import assert_sleep_calls from swh.lister.utils import WAIT_EXP_BASE logger = logging.getLogger(__name__) def api_url(instance: str) -> str: return f"https://{instance}/api/v4/" def _match_request(request): return request.headers.get("User-Agent") == USER_AGENT def test_lister_gitlab(datadir, swh_scheduler, requests_mock): """Gitlab lister supports full listing """ instance = "gitlab.com" lister = GitLabLister(swh_scheduler, url=api_url(instance), instance=instance) response = gitlab_page_response(datadir, instance, 1) requests_mock.get( lister.page_url(), [{"json": response}], additional_matcher=_match_request, ) listed_result = lister.run() expected_nb_origins = len(response) assert listed_result == ListerStats(pages=1, origins=expected_nb_origins) scheduler_origins = lister.scheduler.get_listed_origins( lister.lister_obj.id ).results assert len(scheduler_origins) == expected_nb_origins for listed_origin in scheduler_origins: assert listed_origin.visit_type == "git" assert listed_origin.url.startswith(f"https://{instance}") assert listed_origin.last_update is not None def gitlab_page_response(datadir, instance: str, id_after: int) -> List[Dict]: """Return list of repositories (out of test dataset)""" datapath = Path(datadir, f"https_{instance}", f"api_response_page{id_after}.json") return json.loads(datapath.read_text()) if datapath.exists else [] def test_lister_gitlab_with_pages(swh_scheduler, requests_mock, datadir): """Gitlab lister supports pagination """ instance = "gite.lirmm.fr" lister = GitLabLister(swh_scheduler, url=api_url(instance)) response1 = gitlab_page_response(datadir, instance, 1) response2 = gitlab_page_response(datadir, instance, 2) requests_mock.get( lister.page_url(), [{"json": response1, "headers": {"Link": f"<{lister.page_url(2)}>; rel=next"}}], additional_matcher=_match_request, ) requests_mock.get( lister.page_url(2), [{"json": response2}], additional_matcher=_match_request, ) listed_result = lister.run() expected_nb_origins = len(response1) + len(response2) assert listed_result == ListerStats(pages=2, origins=expected_nb_origins) scheduler_origins = lister.scheduler.get_listed_origins( lister.lister_obj.id ).results assert len(scheduler_origins) == expected_nb_origins for listed_origin in scheduler_origins: assert listed_origin.visit_type == "git" assert listed_origin.url.startswith(f"https://{instance}") assert listed_origin.last_update is not None def test_lister_gitlab_incremental(swh_scheduler, requests_mock, datadir): """Gitlab lister supports incremental visits """ instance = "gite.lirmm.fr" url = api_url(instance) lister = GitLabLister(swh_scheduler, url=url, instance=instance, incremental=True) url_page1 = lister.page_url() response1 = gitlab_page_response(datadir, instance, 1) url_page2 = lister.page_url(2) response2 = gitlab_page_response(datadir, instance, 2) url_page3 = lister.page_url(3) response3 = gitlab_page_response(datadir, instance, 3) requests_mock.get( url_page1, [{"json": response1, "headers": {"Link": f"<{url_page2}>; rel=next"}}], additional_matcher=_match_request, ) requests_mock.get( url_page2, [{"json": response2}], additional_matcher=_match_request, ) listed_result = lister.run() expected_nb_origins = len(response1) + len(response2) assert listed_result == ListerStats(pages=2, origins=expected_nb_origins) assert lister.state.last_seen_next_link == url_page2 lister2 = GitLabLister(swh_scheduler, url=url, instance=instance, incremental=True) # Lister will start back at the last stop requests_mock.get( url_page2, [{"json": response2, "headers": {"Link": f"<{url_page3}>; rel=next"}}], additional_matcher=_match_request, ) requests_mock.get( url_page3, [{"json": response3}], additional_matcher=_match_request, ) listed_result2 = lister2.run() assert listed_result2 == ListerStats( pages=2, origins=len(response2) + len(response3) ) assert lister2.state.last_seen_next_link == url_page3 assert lister.lister_obj.id == lister2.lister_obj.id scheduler_origins = lister2.scheduler.get_listed_origins( lister2.lister_obj.id ).results assert len(scheduler_origins) == len(response1) + len(response2) + len(response3) for listed_origin in scheduler_origins: assert listed_origin.visit_type == "git" assert listed_origin.url.startswith(f"https://{instance}") assert listed_origin.last_update is not None def test_lister_gitlab_rate_limit(swh_scheduler, requests_mock, datadir, mocker): """Gitlab lister supports rate-limit """ instance = "gite.lirmm.fr" url = api_url(instance) lister = GitLabLister(swh_scheduler, url=url, instance=instance) url_page1 = lister.page_url() response1 = gitlab_page_response(datadir, instance, 1) url_page2 = lister.page_url(2) response2 = gitlab_page_response(datadir, instance, 2) requests_mock.get( url_page1, [{"json": response1, "headers": {"Link": f"<{url_page2}>; rel=next"}}], additional_matcher=_match_request, ) requests_mock.get( url_page2, [ # rate limited twice {"status_code": codes.forbidden, "headers": {"RateLimit-Remaining": "0"}}, {"status_code": codes.forbidden, "headers": {"RateLimit-Remaining": "0"}}, # ok {"json": response2}, ], additional_matcher=_match_request, ) # To avoid this test being too slow, we mock sleep within the retry behavior mock_sleep = mocker.patch.object(lister.get_page_result.retry, "sleep") listed_result = lister.run() expected_nb_origins = len(response1) + len(response2) assert listed_result == ListerStats(pages=2, origins=expected_nb_origins) assert_sleep_calls(mocker, mock_sleep, [1, WAIT_EXP_BASE]) @pytest.mark.parametrize("status_code", [502, 503, 520]) def test_lister_gitlab_http_errors( swh_scheduler, requests_mock, datadir, mocker, status_code ): """Gitlab lister should retry requests when encountering HTTP 50x errors """ instance = "gite.lirmm.fr" url = api_url(instance) lister = GitLabLister(swh_scheduler, url=url, instance=instance) url_page1 = lister.page_url() response1 = gitlab_page_response(datadir, instance, 1) url_page2 = lister.page_url(2) response2 = gitlab_page_response(datadir, instance, 2) requests_mock.get( url_page1, [{"json": response1, "headers": {"Link": f"<{url_page2}>; rel=next"}}], additional_matcher=_match_request, ) requests_mock.get( url_page2, [ # first request ends up with error {"status_code": status_code}, # second request is ok {"json": response2}, ], additional_matcher=_match_request, ) # To avoid this test being too slow, we mock sleep within the retry behavior mock_sleep = mocker.patch.object(lister.get_page_result.retry, "sleep") listed_result = lister.run() expected_nb_origins = len(response1) + len(response2) assert listed_result == ListerStats(pages=2, origins=expected_nb_origins) assert_sleep_calls(mocker, mock_sleep, [1]) +def test_lister_gitlab_http_error_500(swh_scheduler, requests_mock, datadir): + """Gitlab lister should skip buggy URL and move to next page. + + """ + instance = "gite.lirmm.fr" + url = api_url(instance) + lister = GitLabLister(swh_scheduler, url=url, instance=instance) + + url_page1 = lister.page_url() + response1 = gitlab_page_response(datadir, instance, 1) + url_page2 = lister.page_url(lister.per_page) + url_page3 = lister.page_url(2 * lister.per_page) + response3 = gitlab_page_response(datadir, instance, 3) + + requests_mock.get( + url_page1, + [{"json": response1, "headers": {"Link": f"<{url_page2}>; rel=next"}}], + additional_matcher=_match_request, + ) + requests_mock.get( + url_page2, [{"status_code": 500},], additional_matcher=_match_request, + ) + + requests_mock.get( + url_page3, [{"json": response3}], additional_matcher=_match_request, + ) + + listed_result = lister.run() + + expected_nb_origins = len(response1) + len(response3) + assert listed_result == ListerStats(pages=2, origins=expected_nb_origins) + + def test_lister_gitlab_credentials(swh_scheduler): """Gitlab lister supports credentials configuration """ instance = "gitlab" credentials = { "gitlab": {instance: [{"username": "user", "password": "api-token"}]} } url = api_url(instance) lister = GitLabLister( scheduler=swh_scheduler, url=url, instance=instance, credentials=credentials ) assert lister.session.headers["Authorization"] == "Bearer api-token" @pytest.mark.parametrize("url", [api_url("gitlab").rstrip("/"), api_url("gitlab"),]) def test_lister_gitlab_url_computation(url, swh_scheduler): lister = GitLabLister(scheduler=swh_scheduler, url=url) assert not lister.url.endswith("/") page_url = lister.page_url() # ensure the generated url contains the separated / assert page_url.startswith(f"{lister.url}/projects") @pytest.mark.parametrize( "url,expected_result", [ (None, None), ("http://dummy/?query=1", None), ("http://dummy/?foo=bar&id_after=1&some=result", 1), ("http://dummy/?foo=bar&id_after=&some=result", None), ], ) def test__parse_id_after(url, expected_result): assert _parse_id_after(url) == expected_result