diff --git a/swh/web/config.py b/swh/web/config.py index 4c675cef..86e32bb1 100644 --- a/swh/web/config.py +++ b/swh/web/config.py @@ -1,241 +1,242 @@ # Copyright (C) 2017-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import os from typing import Any, Dict from swh.core import config from swh.counters import get_counters from swh.indexer.storage import get_indexer_storage from swh.scheduler import get_scheduler from swh.search import get_search from swh.storage import get_storage from swh.vault import get_vault from swh.web import settings SWH_WEB_SERVER_NAME = "archive.softwareheritage.org" SWH_WEB_INTERNAL_SERVER_NAME = "archive.internal.softwareheritage.org" SWH_WEB_STAGING_SERVER_NAMES = [ "webapp.staging.swh.network", "webapp.internal.staging.swh.network", ] SETTINGS_DIR = os.path.dirname(settings.__file__) DEFAULT_CONFIG = { "allowed_hosts": ("list", []), "storage": ( "dict", { "cls": "remote", "url": "http://127.0.0.1:5002/", "timeout": 10, }, ), "indexer_storage": ( "dict", { "cls": "remote", "url": "http://127.0.0.1:5007/", "timeout": 1, }, ), "counters": ( "dict", { "cls": "remote", "url": "http://127.0.0.1:5011/", "timeout": 1, }, ), "search": ( "dict", { "cls": "remote", "url": "http://127.0.0.1:5010/", "timeout": 10, }, ), "search_config": ( "dict", { "metadata_backend": "swh-indexer-storage", }, # or "swh-search" ), "log_dir": ("string", "/tmp/swh/log"), "debug": ("bool", False), "serve_assets": ("bool", False), "host": ("string", "127.0.0.1"), "port": ("int", 5004), "secret_key": ("string", "development key"), # do not display code highlighting for content > 1MB "content_display_max_size": ("int", 5 * 1024 * 1024), "snapshot_content_max_size": ("int", 1000), "throttling": ( "dict", { "cache_uri": None, # production: memcached as cache (127.0.0.1:11211) # development: in-memory cache so None "scopes": { "swh_api": { "limiter_rate": {"default": "120/h"}, "exempted_networks": ["127.0.0.0/8"], }, "swh_api_origin_search": { "limiter_rate": {"default": "10/m"}, "exempted_networks": ["127.0.0.0/8"], }, "swh_vault_cooking": { "limiter_rate": {"default": "120/h", "GET": "60/m"}, "exempted_networks": ["127.0.0.0/8"], }, "swh_save_origin": { "limiter_rate": {"default": "120/h", "POST": "10/h"}, "exempted_networks": ["127.0.0.0/8"], }, "swh_api_origin_visit_latest": { "limiter_rate": {"default": "700/m"}, "exempted_networks": ["127.0.0.0/8"], }, }, }, ), "vault": ( "dict", { "cls": "remote", "args": { "url": "http://127.0.0.1:5005/", }, }, ), "scheduler": ("dict", {"cls": "remote", "url": "http://127.0.0.1:5008/"}), "development_db": ("string", os.path.join(SETTINGS_DIR, "db.sqlite3")), "test_db": ("dict", {"name": "swh-web-test"}), "production_db": ("dict", {"name": "swh-web"}), "deposit": ( "dict", { "private_api_url": "https://deposit.softwareheritage.org/1/private/", "private_api_user": "swhworker", "private_api_password": "some-password", }, ), "e2e_tests_mode": ("bool", False), "es_workers_index_url": ("string", ""), "history_counters_url": ( "string", ( "http://counters1.internal.softwareheritage.org:5011" "/counters_history/history.json" ), ), "client_config": ("dict", {}), "keycloak": ("dict", {"server_url": "", "realm_name": ""}), "graph": ( "dict", { "server_url": "http://graph.internal.softwareheritage.org:5009/graph/", "max_edges": {"staff": 0, "user": 100000, "anonymous": 1000}, }, ), "status": ( "dict", { "server_url": "https://status.softwareheritage.org/", "json_path": "1.0/status/578e5eddcdc0cc7951000520", }, ), "counters_backend": ("string", "swh-storage"), # or "swh-counters" "staging_server_names": ("list", SWH_WEB_STAGING_SERVER_NAMES), "instance_name": ("str", "archive-test.softwareheritage.org"), "give": ("dict", {"public_key": "", "token": ""}), "features": ("dict", {"add_forge_now": True}), "add_forge_now": ("dict", {"email_address": "add-forge-now@example.com"}), "swh_extra_django_apps": ( "list", [ "swh.web.add_forge_now", "swh.web.archive_coverage", "swh.web.badges", "swh.web.banners", "swh.web.deposit", "swh.web.inbound_email", "swh.web.jslicenses", "swh.web.mailmap", "swh.web.metrics", "swh.web.save_code_now", + "swh.web.save_origin_webhooks", "swh.web.vault", ], ), } swhweb_config: Dict[str, Any] = {} def get_config(config_file="web/web"): """Read the configuration file `config_file`. If an environment variable SWH_CONFIG_FILENAME is defined, this takes precedence over the config_file parameter. In any case, update the app with parameters (secret_key, conf) and return the parsed configuration as a dict. If no configuration file is provided, return a default configuration. """ if not swhweb_config: config_filename = os.environ.get("SWH_CONFIG_FILENAME") if config_filename: config_file = config_filename cfg = config.load_named_config(config_file, DEFAULT_CONFIG) swhweb_config.update(cfg) config.prepare_folders(swhweb_config, "log_dir") if swhweb_config.get("search"): swhweb_config["search"] = get_search(**swhweb_config["search"]) else: swhweb_config["search"] = None swhweb_config["storage"] = get_storage(**swhweb_config["storage"]) swhweb_config["vault"] = get_vault(**swhweb_config["vault"]) swhweb_config["indexer_storage"] = get_indexer_storage( **swhweb_config["indexer_storage"] ) swhweb_config["scheduler"] = get_scheduler(**swhweb_config["scheduler"]) swhweb_config["counters"] = get_counters(**swhweb_config["counters"]) return swhweb_config def search(): """Return the current application's search.""" return get_config()["search"] def storage(): """Return the current application's storage.""" return get_config()["storage"] def vault(): """Return the current application's vault.""" return get_config()["vault"] def indexer_storage(): """Return the current application's indexer storage.""" return get_config()["indexer_storage"] def scheduler(): """Return the current application's scheduler.""" return get_config()["scheduler"] def counters(): """Return the current application's counters.""" return get_config()["counters"] diff --git a/swh/web/save_origin_webhooks/__init__.py b/swh/web/save_origin_webhooks/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/swh/web/save_origin_webhooks/bitbucket.py b/swh/web/save_origin_webhooks/bitbucket.py new file mode 100644 index 00000000..21430ff6 --- /dev/null +++ b/swh/web/save_origin_webhooks/bitbucket.py @@ -0,0 +1,44 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from typing import Tuple + +from rest_framework.request import Request + +from swh.web.save_origin_webhooks.generic_receiver import OriginSaveWebhookReceiver + + +class BitbucketOriginSaveWebhookReceiver(OriginSaveWebhookReceiver): + FORGE_TYPE = "Bitbucket" + WEBHOOK_GUIDE_URL = ( + "https://support.atlassian.com/bitbucket-cloud/docs/manage-webhooks/" + ) + REPO_TYPES = "git" + + def is_forge_request(self, request: Request) -> bool: + return ( + request.headers.get("User-Agent", "").startswith( + f"{self.FORGE_TYPE}-Webhooks/" + ) + and "X-Event-Key" in request.headers + ) + + def is_push_event(self, request: Request) -> bool: + return request.headers["X-Event-Key"] == "repo:push" + + def extract_repo_url_and_visit_type(self, request: Request) -> Tuple[str, str]: + repo_url = ( + request.data.get("repository", {}) + .get("links", {}) + .get("html", {}) + .get("href", "") + ) + if repo_url: + repo_url += ".git" + + return repo_url, "git" + + +api_origin_save_webhook_bitbucket = BitbucketOriginSaveWebhookReceiver() diff --git a/swh/web/save_origin_webhooks/generic_receiver.py b/swh/web/save_origin_webhooks/generic_receiver.py new file mode 100644 index 00000000..474eede7 --- /dev/null +++ b/swh/web/save_origin_webhooks/generic_receiver.py @@ -0,0 +1,114 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import abc +from typing import Any, Dict, Tuple + +from rest_framework.request import Request + +from swh.web.api.apidoc import api_doc +from swh.web.api.apiurls import api_route +from swh.web.save_code_now.origin_save import create_save_origin_request +from swh.web.utils.exc import BadInputExc + + +class OriginSaveWebhookReceiver(abc.ABC): + FORGE_TYPE: str + WEBHOOK_GUIDE_URL: str + REPO_TYPES: str + + @abc.abstractmethod + def is_forge_request(self, request: Request) -> bool: + ... + + @abc.abstractmethod + def is_push_event(self, request: Request) -> bool: + ... + + @abc.abstractmethod + def extract_repo_url_and_visit_type(self, request: Request) -> Tuple[str, str]: + ... + + def __init__(self): + self.__doc__ = f""" + .. http:post:: /api/1/origin/save/webhook/{self.FORGE_TYPE.lower()}/ + + Webhook receiver for {self.FORGE_TYPE} to request or update the archival of + a repository when new commits are pushed to it. + + To add such webhook to one of your {self.REPO_TYPES} repository hosted on + {self.FORGE_TYPE}, please follow `{self.FORGE_TYPE}'s webhooks guide + <{self.WEBHOOK_GUIDE_URL}>`_. + + The expected content type for the webhook payload must be ``application/json``. + + :>json string origin_url: the url of the origin to save + :>json string visit_type: the type of visit to perform + :>json string save_request_date: the date (in iso format) the save + request was issued + :>json string save_request_status: the status of the save request, + either **accepted**, **rejected** or **pending** + + :statuscode 200: save request for repository has been successfully created + from the webhook payload. + :statuscode 400: no save request has been created due to invalid POST + request or missing data in webhook payload + """ + self.__name__ = "api_origin_save_webhook_{self.FORGE_TYPE.lower()}" + api_doc( + f"/origin/save/webhook/{self.FORGE_TYPE.lower()}/", + category="Request archival", + )(self) + api_route( + f"/origin/save/webhook/{self.FORGE_TYPE.lower()}/", + f"api-1-origin-save-webhook-{self.FORGE_TYPE.lower()}", + methods=["POST"], + )(self) + + def __call__( + self, + request: Request, + ) -> Dict[str, Any]: + + if not self.is_forge_request(request): + raise BadInputExc( + f"POST request was not sent by a {self.FORGE_TYPE} webhook and " + "has not been processed." + ) + + if not self.is_push_event(request): + raise BadInputExc( + f"Event sent by {self.FORGE_TYPE} webhook is not a push one, request " + "has not been processed." + ) + + content_type = request.headers.get("Content-Type") + if content_type != "application/json": + raise BadInputExc( + f"Invalid content type '{content_type}' for the POST request sent by " + f"{self.FORGE_TYPE} webhook, it should be 'application/json'." + ) + + repo_url, visit_type = self.extract_repo_url_and_visit_type(request) + if not repo_url: + raise BadInputExc( + f"Repository URL could not be extracted from {self.FORGE_TYPE} webhook " + f"payload." + ) + if not visit_type: + raise BadInputExc( + f"Visit type could not be determined for repository {repo_url}." + ) + + save_request = create_save_origin_request( + visit_type=visit_type, origin_url=repo_url + ) + + return { + "origin_url": save_request["origin_url"], + "visit_type": save_request["visit_type"], + "save_request_date": save_request["save_request_date"], + "save_request_status": save_request["save_request_status"], + } diff --git a/swh/web/save_origin_webhooks/gitea.py b/swh/web/save_origin_webhooks/gitea.py new file mode 100644 index 00000000..32383932 --- /dev/null +++ b/swh/web/save_origin_webhooks/gitea.py @@ -0,0 +1,28 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from typing import Tuple + +from rest_framework.request import Request + +from swh.web.save_origin_webhooks.generic_receiver import OriginSaveWebhookReceiver + + +class GiteaOriginSaveWebhookReceiver(OriginSaveWebhookReceiver): + FORGE_TYPE = "Gitea" + WEBHOOK_GUIDE_URL = "https://docs.gitea.io/en-us/webhooks/" + REPO_TYPES = "git" + + def is_forge_request(self, request: Request) -> bool: + return f"X-{self.FORGE_TYPE}-Event" in request.headers + + def is_push_event(self, request: Request) -> bool: + return request.headers[f"X-{self.FORGE_TYPE}-Event"] == "push" + + def extract_repo_url_and_visit_type(self, request: Request) -> Tuple[str, str]: + return request.data.get("repository", {}).get("clone_url", ""), "git" + + +api_origin_save_webhook_gitea = GiteaOriginSaveWebhookReceiver() diff --git a/swh/web/save_origin_webhooks/github.py b/swh/web/save_origin_webhooks/github.py new file mode 100644 index 00000000..95e59a36 --- /dev/null +++ b/swh/web/save_origin_webhooks/github.py @@ -0,0 +1,36 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from typing import Tuple + +from rest_framework.request import Request + +from swh.web.save_origin_webhooks.generic_receiver import OriginSaveWebhookReceiver + + +class GitHubOriginSaveWebhookReceiver(OriginSaveWebhookReceiver): + FORGE_TYPE = "GitHub" + WEBHOOK_GUIDE_URL = ( + "https://docs.github.com/en/developers/webhooks-and-events/" + "webhooks/creating-webhooks#setting-up-a-webhook" + ) + REPO_TYPES = "git" + + def is_forge_request(self, request: Request) -> bool: + return ( + request.headers.get("User-Agent", "").startswith( + f"{self.FORGE_TYPE}-Hookshot/" + ) + and f"X-{self.FORGE_TYPE}-Event" in request.headers + ) + + def is_push_event(self, request: Request) -> bool: + return request.headers[f"X-{self.FORGE_TYPE}-Event"] == "push" + + def extract_repo_url_and_visit_type(self, request: Request) -> Tuple[str, str]: + return request.data.get("repository", {}).get("html_url", ""), "git" + + +api_origin_save_webhook_github = GitHubOriginSaveWebhookReceiver() diff --git a/swh/web/save_origin_webhooks/gitlab.py b/swh/web/save_origin_webhooks/gitlab.py new file mode 100644 index 00000000..e7bcc7a5 --- /dev/null +++ b/swh/web/save_origin_webhooks/gitlab.py @@ -0,0 +1,34 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from typing import Tuple + +from rest_framework.request import Request + +from swh.web.save_origin_webhooks.generic_receiver import OriginSaveWebhookReceiver + + +class GitlabOriginSaveWebhookReceiver(OriginSaveWebhookReceiver): + FORGE_TYPE = "GitLab" + WEBHOOK_GUIDE_URL = ( + "https://docs.gitlab.com/ee/user/project/integrations/" + "webhooks.html#configure-a-webhook-in-gitlab" + ) + REPO_TYPES = "git" + + def is_forge_request(self, request: Request) -> bool: + return ( + request.headers.get("User-Agent", "").startswith(f"{self.FORGE_TYPE}/") + and "X-Gitlab-Event" in request.headers + ) + + def is_push_event(self, request: Request) -> bool: + return request.headers["X-Gitlab-Event"] == "Push Hook" + + def extract_repo_url_and_visit_type(self, request: Request) -> Tuple[str, str]: + return request.data.get("repository", {}).get("git_http_url", ""), "git" + + +api_origin_save_webhook_gitlab = GitlabOriginSaveWebhookReceiver() diff --git a/swh/web/save_origin_webhooks/sourceforge.py b/swh/web/save_origin_webhooks/sourceforge.py new file mode 100644 index 00000000..3667491a --- /dev/null +++ b/swh/web/save_origin_webhooks/sourceforge.py @@ -0,0 +1,61 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from typing import Tuple + +import requests + +from rest_framework.request import Request + +from swh.web.save_origin_webhooks.generic_receiver import OriginSaveWebhookReceiver + + +class SourceforgeOriginSaveWebhookReceiver(OriginSaveWebhookReceiver): + FORGE_TYPE = "SourceForge" + WEBHOOK_GUIDE_URL = ( + "https://sourceforge.net/blog/" + "how-to-use-webhooks-for-git-mercurial-and-svn-repositories/" + ) + REPO_TYPES = "git, hg or svn" + + SOURCE_FORGE_API_PROJECT_URL_PATTERN = ( + "https://sourceforge.net/rest/p/{project_name}" + ) + + def is_forge_request(self, request: Request) -> bool: + return ( + request.headers.get("User-Agent", "") + == "Allura Webhook (https://allura.apache.org/)" + ) + + def is_push_event(self, request: Request) -> bool: + # SourceForge only support webhooks for push events + return True + + def extract_repo_url_and_visit_type(self, request: Request) -> Tuple[str, str]: + repo_url = "" + visit_type = "" + project_full_name = request.data.get("repository", {}).get("full_name") + if project_full_name: + project_name = project_full_name.split("/")[2] + project_api_url = self.SOURCE_FORGE_API_PROJECT_URL_PATTERN.format( + project_name=project_name + ) + response = requests.get(project_api_url) + if response.ok: + project_data = response.json() + for tool in project_data.get("tools", []): + if tool.get("mount_point") == "code" and tool.get( + "url", "" + ).endswith(project_full_name): + repo_url = tool.get( + "clone_url_https_anon", tool.get("clone_url_ro", "") + ) + visit_type = tool.get("name", "") + + return repo_url, visit_type + + +api_origin_save_webhook_sourceforge = SourceforgeOriginSaveWebhookReceiver() diff --git a/swh/web/save_origin_webhooks/urls.py b/swh/web/save_origin_webhooks/urls.py new file mode 100644 index 00000000..bc2ace8f --- /dev/null +++ b/swh/web/save_origin_webhooks/urls.py @@ -0,0 +1,17 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from typing import List, Union + +from django.urls import URLPattern, URLResolver + +# register Web API endpoints +import swh.web.save_origin_webhooks.bitbucket # noqa +import swh.web.save_origin_webhooks.gitea # noqa +import swh.web.save_origin_webhooks.github # noqa +import swh.web.save_origin_webhooks.gitlab # noqa +import swh.web.save_origin_webhooks.sourceforge # noqa + +urlpatterns: List[Union[URLPattern, URLResolver]] = [] diff --git a/swh/web/settings/tests.py b/swh/web/settings/tests.py index e2e600ce..9ef33657 100644 --- a/swh/web/settings/tests.py +++ b/swh/web/settings/tests.py @@ -1,164 +1,165 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information """ Django tests settings for swh-web. """ import os import sys from swh.web.config import get_config scope1_limiter_rate = 3 scope1_limiter_rate_post = 1 scope2_limiter_rate = 5 scope2_limiter_rate_post = 2 scope3_limiter_rate = 1 scope3_limiter_rate_post = 1 save_origin_rate_post = 5 api_raw_object_rate = 5 swh_web_config = get_config() _pytest = "pytest" in sys.argv[0] or "PYTEST_XDIST_WORKER" in os.environ swh_web_config.update( { # enable django debug mode only when running pytest "debug": _pytest, "secret_key": "test", "history_counters_url": "", "throttling": { "cache_uri": None, "scopes": { "swh_api": { "limiter_rate": {"default": "60/min"}, "exempted_networks": ["127.0.0.0/8"], }, "swh_api_origin_search": { "limiter_rate": {"default": "100/min"}, "exempted_networks": ["127.0.0.0/8"], }, "swh_api_origin_visit_latest": { "limiter_rate": {"default": "6000/min"}, "exempted_networks": ["127.0.0.0/8"], }, "swh_vault_cooking": { "limiter_rate": {"default": "120/h", "GET": "60/m"}, "exempted_networks": ["127.0.0.0/8"], }, "swh_save_origin": { "limiter_rate": { "default": "120/h", "POST": "%s/h" % save_origin_rate_post, } }, "swh_raw_object": { "limiter_rate": {"default": f"{api_raw_object_rate}/h"}, }, "scope1": { "limiter_rate": { "default": "%s/min" % scope1_limiter_rate, "POST": "%s/min" % scope1_limiter_rate_post, } }, "scope2": { "limiter_rate": { "default": "%s/min" % scope2_limiter_rate, "POST": "%s/min" % scope2_limiter_rate_post, } }, "scope3": { "limiter_rate": { "default": "%s/min" % scope3_limiter_rate, "POST": "%s/min" % scope3_limiter_rate_post, }, "exempted_networks": ["127.0.0.0/8"], }, }, }, "keycloak": { # disable keycloak use when not running pytest "server_url": "http://localhost:8080/auth/" if _pytest else "", "realm_name": "SoftwareHeritage", }, "swh_extra_django_apps": [ "swh.web.add_forge_now", "swh.web.archive_coverage", "swh.web.badges", "swh.web.banners", "swh.web.deposit", "swh.web.inbound_email", "swh.web.jslicenses", "swh.web.mailmap", "swh.web.metrics", "swh.web.save_code_now", + "swh.web.save_origin_webhooks", "swh.web.vault", ], } ) from .common import * # noqa from .common import LOGGING # noqa, isort: skip ALLOWED_HOSTS = ["*"] DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql", "NAME": swh_web_config["test_db"]["name"], } } # when running cypress tests, make the webapp fetch data from memory storages if not _pytest: swh_web_config.update( { "debug": True, "e2e_tests_mode": True, # ensure scheduler not available to avoid side effects in cypress tests "scheduler": {"cls": "remote", "url": ""}, } ) from django.conf import settings from swh.web.tests.data import get_tests_data, override_storages test_data = get_tests_data() override_storages( test_data["storage"], test_data["idx_storage"], test_data["search"], test_data["counters"], ) # using sqlite3 for frontend tests build_id = os.environ.get("CYPRESS_PARALLEL_BUILD_ID", "") settings.DATABASES["default"].update( { "ENGINE": "django.db.backends.sqlite3", "NAME": f"swh-web-test{build_id}.sqlite3", } ) # to prevent "database is locked" error when running cypress tests from django.db.backends.signals import connection_created def activate_wal_journal_mode(sender, connection, **kwargs): cursor = connection.cursor() cursor.execute("PRAGMA journal_mode = WAL;") connection_created.connect(activate_wal_journal_mode) else: # Silent DEBUG output when running unit tests LOGGING["handlers"]["console"]["level"] = "INFO" # type: ignore LOGIN_URL = "login" if not _pytest else "oidc-login" LOGOUT_URL = "logout" if not _pytest else "oidc-logout" diff --git a/swh/web/tests/conftest.py b/swh/web/tests/conftest.py index 245b7c99..d6eacb3d 100644 --- a/swh/web/tests/conftest.py +++ b/swh/web/tests/conftest.py @@ -1,1261 +1,1255 @@ # Copyright (C) 2018-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from collections import defaultdict from datetime import timedelta import functools from importlib import import_module, reload import json import os import random import shutil import sys import time from typing import Any, Dict, List, Optional from _pytest.python import Function from hypothesis import HealthCheck from hypothesis import settings as hypothesis_settings import pytest from pytest_django.fixtures import SettingsWrapper from django.conf import settings from django.contrib.auth.models import User from django.core.cache import cache from django.test.utils import setup_databases from django.urls import clear_url_caches from rest_framework.test import APIClient, APIRequestFactory from swh.model.hashutil import ( ALGORITHMS, DEFAULT_ALGORITHMS, hash_to_bytes, hash_to_hex, ) from swh.model.model import Content, Directory from swh.model.swhids import CoreSWHID, ObjectType from swh.scheduler.tests.common import TASK_TYPES from swh.storage.algos.origin import origin_get_latest_visit_status from swh.storage.algos.revisions_walker import get_revisions_walker from swh.storage.algos.snapshot import snapshot_get_all_branches, snapshot_get_latest from swh.web.auth.utils import ( ADD_FORGE_MODERATOR_PERMISSION, MAILMAP_ADMIN_PERMISSION, MAILMAP_PERMISSION, ) from swh.web.config import get_config from swh.web.save_code_now.origin_save import get_scheduler_load_task_types from swh.web.tests.data import ( get_tests_data, override_storages, random_content, random_sha1, random_sha1_bytes, random_sha256, ) from swh.web.tests.helpers import create_django_permission from swh.web.utils import browsers_supported_image_mimes, converters from swh.web.utils.typing import OriginVisitInfo os.environ["LC_ALL"] = "C.UTF-8" fossology_missing = shutil.which("nomossa") is None # Register some hypothesis profiles hypothesis_settings.register_profile("default", hypothesis_settings()) # we use getattr here to keep mypy happy regardless hypothesis version function_scoped_fixture_check = ( [getattr(HealthCheck, "function_scoped_fixture")] if hasattr(HealthCheck, "function_scoped_fixture") else [] ) suppress_health_check = [ HealthCheck.too_slow, HealthCheck.filter_too_much, ] + function_scoped_fixture_check hypothesis_settings.register_profile( "swh-web", hypothesis_settings( deadline=None, suppress_health_check=suppress_health_check, ), ) hypothesis_settings.register_profile( "swh-web-fast", hypothesis_settings( deadline=None, max_examples=5, suppress_health_check=suppress_health_check, ), ) def pytest_addoption(parser): parser.addoption("--swh-web-random-seed", action="store", default=None) def pytest_configure(config): # Use fast hypothesis profile by default if none has been # explicitly specified in pytest option if config.getoption("--hypothesis-profile") is None: hypothesis_settings.load_profile("swh-web-fast") # Small hack in order to be able to run the unit tests # without static assets generated by webpack. # Those assets are not really needed for the Python tests # but the django templates will fail to load due to missing # generated file webpack-stats.json describing the js and css # files to include. # So generate a dummy webpack-stats.json file to overcome # that issue. test_dir = os.path.dirname(__file__) # location of the static folder when running tests through tox data_dir = os.path.join(sys.prefix, "share/swh/web") static_dir = os.path.join(data_dir, "static") if not os.path.exists(static_dir): # location of the static folder when running tests locally with pytest static_dir = os.path.join(test_dir, "../../../static") webpack_stats = os.path.join(static_dir, "webpack-stats.json") if os.path.exists(webpack_stats): return django_apps_dir = os.path.join(test_dir, "../../../swh/web") if not os.path.exists(django_apps_dir): # location of the applications folder when running tests with tox django_apps_dir = os.path.join(data_dir, "swh/web") bundles = [] _, apps, _ = next(os.walk(django_apps_dir)) for app in apps: app_assets_dir = os.path.join(django_apps_dir, app, "assets") if os.path.exists(app_assets_dir): if os.path.exists(os.path.join(app_assets_dir, "index.js")): bundles.append(app) else: _, app_bundles, _ = next(os.walk(app_assets_dir)) for app_bundle in app_bundles: if os.path.exists( os.path.join(app_assets_dir, app_bundle, "index.js") ): bundles.append(app_bundle) print(bundles) mock_webpack_stats = { "status": "done", "publicPath": "/static", "chunks": {}, "assets": {}, } for bundle in bundles: asset = f"js/{bundle}.js" mock_webpack_stats["chunks"][bundle] = [asset] mock_webpack_stats["assets"][asset] = { "name": asset, "publicPath": f"/static/{asset}", } with open(webpack_stats, "w") as outfile: json.dump(mock_webpack_stats, outfile) _swh_web_custom_section = "swh-web custom section" _random_seed_cache_key = "swh-web/random-seed" @pytest.fixture(scope="function", autouse=True) def random_seed(pytestconfig): state = random.getstate() seed = pytestconfig.getoption("--swh-web-random-seed") if seed is None: seed = time.time() seed = int(seed) cache.set(_random_seed_cache_key, seed) random.seed(seed) yield seed random.setstate(state) def pytest_report_teststatus(report, *args): if report.when == "call" and report.outcome == "failed": seed = cache.get(_random_seed_cache_key, None) line = ( f'FAILED {report.nodeid}: Use "pytest --swh-web-random-seed={seed} ' f'{report.nodeid}" to reproduce that test failure with same inputs' ) report.sections.append((_swh_web_custom_section, line)) def pytest_terminal_summary(terminalreporter, *args): reports = terminalreporter.getreports("failed") content = os.linesep.join( text for report in reports for secname, text in report.sections if secname == _swh_web_custom_section ) if content: terminalreporter.ensure_newline() terminalreporter.section(_swh_web_custom_section, sep="-", blue=True, bold=True) terminalreporter.line(content) # Clear Django cache before each test @pytest.fixture(autouse=True) def django_cache_cleared(): cache.clear() # Alias rf fixture from pytest-django @pytest.fixture def request_factory(rf): return rf # Fixture to get test client from Django REST Framework @pytest.fixture def api_client(): return APIClient() # Fixture to get API request factory from Django REST Framework @pytest.fixture def api_request_factory(): return APIRequestFactory() # Initialize tests data @pytest.fixture(scope="function", autouse=True) def tests_data(): data = get_tests_data(reset=True) # Update swh-web configuration to use the in-memory storages # instantiated in the tests.data module override_storages( data["storage"], data["idx_storage"], data["search"], data["counters"] ) return data @pytest.fixture(scope="function") def sha1(): """Fixture returning a valid hexadecimal sha1 value.""" return random_sha1() @pytest.fixture(scope="function") def invalid_sha1(): """Fixture returning an invalid sha1 representation.""" return hash_to_hex(bytes(random.randint(0, 255) for _ in range(50))) @pytest.fixture(scope="function") def sha256(): """Fixture returning a valid hexadecimal sha256 value.""" return random_sha256() def _known_swh_objects(tests_data, object_type): return tests_data[object_type] @pytest.fixture(scope="function") def content(tests_data): """Fixture returning a random content ingested into the test archive.""" return random.choice(_known_swh_objects(tests_data, "contents")) @pytest.fixture(scope="function") def contents(tests_data): """Fixture returning random contents ingested into the test archive.""" return random.choices( _known_swh_objects(tests_data, "contents"), k=random.randint(2, 8) ) def _new_content(tests_data): while True: new_content = random_content() sha1_bytes = hash_to_bytes(new_content["sha1"]) if tests_data["storage"].content_get_data(sha1_bytes) is None: return new_content @pytest.fixture(scope="function") def unknown_content(tests_data): """Fixture returning a random content not ingested into the test archive.""" return _new_content(tests_data) @pytest.fixture(scope="function") def unknown_contents(tests_data): """Fixture returning random contents not ingested into the test archive.""" new_contents = [] new_content_ids = set() nb_contents = random.randint(2, 8) while len(new_contents) != nb_contents: new_content = _new_content(tests_data) if new_content["sha1"] not in new_content_ids: new_contents.append(new_content) new_content_ids.add(new_content["sha1"]) return list(new_contents) @pytest.fixture(scope="function") def empty_content(): """Fixture returning the empty content ingested into the test archive.""" empty_content = Content.from_data(data=b"").to_dict() for algo in DEFAULT_ALGORITHMS: empty_content[algo] = hash_to_hex(empty_content[algo]) return empty_content @functools.lru_cache(maxsize=None) def _content_text(): return list( filter( lambda c: c["mimetype"].startswith("text/"), _known_swh_objects(get_tests_data(), "contents"), ) ) @pytest.fixture(scope="function") def content_text(): """ Fixture returning a random textual content ingested into the test archive. """ return random.choice(_content_text()) @functools.lru_cache(maxsize=None) def _content_text_non_utf8(): return list( filter( lambda c: c["mimetype"].startswith("text/") and c["encoding"] not in ("utf-8", "us-ascii"), _known_swh_objects(get_tests_data(), "contents"), ) ) @pytest.fixture(scope="function") def content_text_non_utf8(): """Fixture returning a random textual content not encoded to UTF-8 ingested into the test archive. """ return random.choice(_content_text_non_utf8()) @functools.lru_cache(maxsize=None) def _content_application_no_highlight(): return list( filter( lambda c: c["mimetype"].startswith("application/") and c["hljs_language"] == "plaintext", _known_swh_objects(get_tests_data(), "contents"), ) ) @pytest.fixture(scope="function") def content_application_no_highlight(): """Fixture returning a random textual content with mimetype starting with application/ and no detected programming language to highlight ingested into the test archive. """ return random.choice(_content_application_no_highlight()) @functools.lru_cache(maxsize=None) def _content_text_no_highlight(): return list( filter( lambda c: c["mimetype"].startswith("text/") and c["hljs_language"] == "plaintext", _known_swh_objects(get_tests_data(), "contents"), ) ) @pytest.fixture(scope="function") def content_text_no_highlight(): """Fixture returning a random textual content with no detected programming language to highlight ingested into the test archive. """ return random.choice(_content_text_no_highlight()) @functools.lru_cache(maxsize=None) def _content_image_type(): return list( filter( lambda c: c["mimetype"] in browsers_supported_image_mimes, _known_swh_objects(get_tests_data(), "contents"), ) ) @pytest.fixture(scope="function") def content_image_type(): """Fixture returning a random image content ingested into the test archive.""" return random.choice(_content_image_type()) @functools.lru_cache(maxsize=None) def _content_unsupported_image_type_rendering(): return list( filter( lambda c: c["mimetype"].startswith("image/") and c["mimetype"] not in browsers_supported_image_mimes, _known_swh_objects(get_tests_data(), "contents"), ) ) @pytest.fixture(scope="function") def content_unsupported_image_type_rendering(): """Fixture returning a random image content ingested into the test archive that can not be rendered by browsers. """ return random.choice(_content_unsupported_image_type_rendering()) @functools.lru_cache(maxsize=None) def _content_utf8_detected_as_binary(): def utf8_binary_detected(content): if content["encoding"] != "binary": return False try: content["raw_data"].decode("utf-8") except Exception: return False else: return True return list( filter(utf8_binary_detected, _known_swh_objects(get_tests_data(), "contents")) ) @pytest.fixture(scope="function") def content_utf8_detected_as_binary(): """Fixture returning a random textual content detected as binary by libmagic while they are valid UTF-8 encoded files. """ return random.choice(_content_utf8_detected_as_binary()) @pytest.fixture(scope="function") def directory(tests_data): """Fixture returning a random directory ingested into the test archive.""" return random.choice(_known_swh_objects(tests_data, "directories")) @functools.lru_cache(maxsize=None) def _directory_with_entry_type(type_): tests_data = get_tests_data() return list( filter( lambda d: any( [ e["type"] == type_ for e in list(tests_data["storage"].directory_ls(hash_to_bytes(d))) ] ), _known_swh_objects(tests_data, "directories"), ) ) @pytest.fixture(scope="function") def directory_with_subdirs(): """Fixture returning a random directory containing sub directories ingested into the test archive. """ return random.choice(_directory_with_entry_type("dir")) @pytest.fixture(scope="function") def directory_with_files(): """Fixture returning a random directory containing at least one regular file.""" return random.choice(_directory_with_entry_type("file")) @pytest.fixture(scope="function") def unknown_directory(tests_data): """Fixture returning a random directory not ingested into the test archive.""" while True: new_directory = random_sha1() sha1_bytes = hash_to_bytes(new_directory) if list(tests_data["storage"].directory_missing([sha1_bytes])): return new_directory @pytest.fixture(scope="function") def empty_directory(): """Fixture returning the empty directory ingested into the test archive.""" return Directory(entries=()).id.hex() @pytest.fixture(scope="function") def revision(tests_data): """Fixturereturning a random revision ingested into the test archive.""" return random.choice(_known_swh_objects(tests_data, "revisions")) @pytest.fixture(scope="function") def revisions(tests_data): """Fixture returning random revisions ingested into the test archive.""" return random.choices( _known_swh_objects(tests_data, "revisions"), k=random.randint(2, 8), ) @pytest.fixture(scope="function") def revisions_list(tests_data): """Fixture returning random revisions ingested into the test archive.""" def gen_revisions_list(size): return random.choices( _known_swh_objects(tests_data, "revisions"), k=size, ) return gen_revisions_list @pytest.fixture(scope="function") def unknown_revision(tests_data): """Fixture returning a random revision not ingested into the test archive.""" while True: new_revision = random_sha1() sha1_bytes = hash_to_bytes(new_revision) if tests_data["storage"].revision_get([sha1_bytes])[0] is None: return new_revision def _get_origin_dfs_revisions_walker(tests_data): storage = tests_data["storage"] origin = random.choice(tests_data["origins"][:-1]) snapshot = snapshot_get_latest(storage, origin["url"]) if snapshot.branches[b"HEAD"].target_type.value == "alias": target = snapshot.branches[b"HEAD"].target head = snapshot.branches[target].target else: head = snapshot.branches[b"HEAD"].target return get_revisions_walker("dfs", storage, head) @functools.lru_cache(maxsize=None) def _ancestor_revisions_data(): # get a dfs revisions walker for one of the origins # loaded into the test archive revisions_walker = _get_origin_dfs_revisions_walker(get_tests_data()) master_revisions = [] children = defaultdict(list) init_rev_found = False # get revisions only authored in the master branch for rev in revisions_walker: for rev_p in rev["parents"]: children[rev_p].append(rev["id"]) if not init_rev_found: master_revisions.append(rev) if not rev["parents"]: init_rev_found = True return master_revisions, children @pytest.fixture(scope="function") def ancestor_revisions(): """Fixture returning a pair of revisions ingested into the test archive with an ancestor relation. """ master_revisions, children = _ancestor_revisions_data() # head revision root_rev = master_revisions[0] # pick a random revision, different from head, only authored # in the master branch ancestor_rev_idx = random.choice(list(range(1, len(master_revisions) - 1))) ancestor_rev = master_revisions[ancestor_rev_idx] ancestor_child_revs = children[ancestor_rev["id"]] return { "sha1_git_root": hash_to_hex(root_rev["id"]), "sha1_git": hash_to_hex(ancestor_rev["id"]), "children": [hash_to_hex(r) for r in ancestor_child_revs], } @functools.lru_cache(maxsize=None) def _non_ancestor_revisions_data(): # get a dfs revisions walker for one of the origins # loaded into the test archive revisions_walker = _get_origin_dfs_revisions_walker(get_tests_data()) merge_revs = [] children = defaultdict(list) # get all merge revisions for rev in revisions_walker: if len(rev["parents"]) > 1: merge_revs.append(rev) for rev_p in rev["parents"]: children[rev_p].append(rev["id"]) return merge_revs, children @pytest.fixture(scope="function") def non_ancestor_revisions(): """Fixture returning a pair of revisions ingested into the test archive with no ancestor relation. """ merge_revs, children = _non_ancestor_revisions_data() # find a merge revisions whose parents have a unique child revision random.shuffle(merge_revs) selected_revs = None for merge_rev in merge_revs: if all(len(children[rev_p]) == 1 for rev_p in merge_rev["parents"]): selected_revs = merge_rev["parents"] return { "sha1_git_root": hash_to_hex(selected_revs[0]), "sha1_git": hash_to_hex(selected_revs[1]), } @pytest.fixture(scope="function") def revision_with_submodules(): """Fixture returning a revision that is known to point to a directory with revision entries (aka git submodules) """ return { "rev_sha1_git": "ffcb69001f3f6745dfd5b48f72ab6addb560e234", "rev_dir_sha1_git": "d92a21446387fa28410e5a74379c934298f39ae2", "rev_dir_rev_path": "libtess2", } @pytest.fixture(scope="function") def release(tests_data): """Fixture returning a random release ingested into the test archive.""" return random.choice(_known_swh_objects(tests_data, "releases")) @pytest.fixture(scope="function") def releases(tests_data): """Fixture returning random releases ingested into the test archive.""" return random.choices( _known_swh_objects(tests_data, "releases"), k=random.randint(2, 8) ) @pytest.fixture(scope="function") def unknown_release(tests_data): """Fixture returning a random release not ingested into the test archive.""" while True: new_release = random_sha1() sha1_bytes = hash_to_bytes(new_release) if tests_data["storage"].release_get([sha1_bytes])[0] is None: return new_release @pytest.fixture(scope="function") def snapshot(tests_data): """Fixture returning a random snapshot ingested into the test archive.""" return random.choice(_known_swh_objects(tests_data, "snapshots")) @pytest.fixture(scope="function") def unknown_snapshot(tests_data): """Fixture returning a random snapshot not ingested into the test archive.""" while True: new_snapshot = random_sha1() sha1_bytes = hash_to_bytes(new_snapshot) if tests_data["storage"].snapshot_get_branches(sha1_bytes) is None: return new_snapshot @pytest.fixture(scope="function") def origin(tests_data): """Fixture returning a random origin ingested into the test archive.""" return random.choice(_known_swh_objects(tests_data, "origins")) @functools.lru_cache(maxsize=None) def _origin_with_multiple_visits(): tests_data = get_tests_data() origins = [] storage = tests_data["storage"] for origin in tests_data["origins"]: visit_page = storage.origin_visit_get(origin["url"]) if len(visit_page.results) > 1: origins.append(origin) return origins @pytest.fixture(scope="function") def origin_with_multiple_visits(): """Fixture returning a random origin with multiple visits ingested into the test archive. """ return random.choice(_origin_with_multiple_visits()) @functools.lru_cache(maxsize=None) def _origin_with_releases(): tests_data = get_tests_data() origins = [] for origin in tests_data["origins"]: snapshot = snapshot_get_latest(tests_data["storage"], origin["url"]) if any([b.target_type.value == "release" for b in snapshot.branches.values()]): origins.append(origin) return origins @pytest.fixture(scope="function") def origin_with_releases(): """Fixture returning a random origin with releases ingested into the test archive.""" return random.choice(_origin_with_releases()) @functools.lru_cache(maxsize=None) def _origin_with_pull_request_branches(): tests_data = get_tests_data() origins = [] storage = tests_data["storage"] for origin in storage.origin_list(limit=1000).results: snapshot = snapshot_get_latest(storage, origin.url) if any([b"refs/pull/" in b for b in snapshot.branches]): origins.append(origin) return origins @pytest.fixture(scope="function") def origin_with_pull_request_branches(): """Fixture returning a random origin with pull request branches ingested into the test archive. """ return random.choice(_origin_with_pull_request_branches()) @functools.lru_cache(maxsize=None) def _object_type_swhid(object_type): return list( filter( lambda swhid: swhid.object_type == object_type, _known_swh_objects(get_tests_data(), "swhids"), ) ) @pytest.fixture(scope="function") def content_swhid(): """Fixture returning a qualified SWHID for a random content object ingested into the test archive. """ return random.choice(_object_type_swhid(ObjectType.CONTENT)) @pytest.fixture(scope="function") def directory_swhid(): """Fixture returning a qualified SWHID for a random directory object ingested into the test archive. """ return random.choice(_object_type_swhid(ObjectType.DIRECTORY)) @pytest.fixture(scope="function") def release_swhid(): """Fixture returning a qualified SWHID for a random release object ingested into the test archive. """ return random.choice(_object_type_swhid(ObjectType.RELEASE)) @pytest.fixture(scope="function") def revision_swhid(): """Fixture returning a qualified SWHID for a random revision object ingested into the test archive. """ return random.choice(_object_type_swhid(ObjectType.REVISION)) @pytest.fixture(scope="function") def snapshot_swhid(): """Fixture returning a qualified SWHID for a snapshot object ingested into the test archive. """ return random.choice(_object_type_swhid(ObjectType.SNAPSHOT)) @pytest.fixture(scope="function", params=list(ObjectType)) def unknown_core_swhid(request) -> CoreSWHID: """Fixture returning an unknown core SWHID. Tests using this will be called once per object type. """ return CoreSWHID( object_type=request.param, object_id=random_sha1_bytes(), ) # Fixture to manipulate data from a sample archive used in the tests @pytest.fixture(scope="function") def archive_data(tests_data): return _ArchiveData(tests_data) # Fixture to manipulate indexer data from a sample archive used in the tests @pytest.fixture(scope="function") def indexer_data(tests_data): return _IndexerData(tests_data) -# Custom data directory for requests_mock -@pytest.fixture -def datadir(): - return os.path.join(os.path.abspath(os.path.dirname(__file__)), "resources") - - class _ArchiveData: """ Helper class to manage data from a sample test archive. It is initialized with a reference to an in-memory storage containing raw tests data. It is basically a proxy to Storage interface but it overrides some methods to retrieve those tests data in a json serializable format in order to ease tests implementation. """ def __init__(self, tests_data): self.storage = tests_data["storage"] def __getattr__(self, key): if key == "storage": raise AttributeError(key) # Forward calls to non overridden Storage methods to wrapped # storage instance return getattr(self.storage, key) def content_find(self, content: Dict[str, Any]) -> Dict[str, Any]: cnt_ids_bytes = { algo_hash: hash_to_bytes(content[algo_hash]) for algo_hash in ALGORITHMS if content.get(algo_hash) } cnt = self.storage.content_find(cnt_ids_bytes) return converters.from_content(cnt[0].to_dict()) if cnt else cnt def content_get(self, cnt_id: str) -> Dict[str, Any]: cnt_id_bytes = hash_to_bytes(cnt_id) content = self.storage.content_get([cnt_id_bytes])[0] if content: content_d = content.to_dict() content_d.pop("ctime", None) else: content_d = None return converters.from_swh( content_d, hashess={"sha1", "sha1_git", "sha256", "blake2s256"} ) def content_get_data(self, cnt_id: str) -> Optional[Dict[str, Any]]: cnt_id_bytes = hash_to_bytes(cnt_id) cnt_data = self.storage.content_get_data(cnt_id_bytes) if cnt_data is None: return None return converters.from_content({"data": cnt_data, "sha1": cnt_id_bytes}) def directory_get(self, dir_id): return {"id": dir_id, "content": self.directory_ls(dir_id)} def directory_ls(self, dir_id): cnt_id_bytes = hash_to_bytes(dir_id) dir_content = map( converters.from_directory_entry, self.storage.directory_ls(cnt_id_bytes) ) return list(dir_content) def release_get(self, rel_id: str) -> Optional[Dict[str, Any]]: rel_id_bytes = hash_to_bytes(rel_id) rel_data = self.storage.release_get([rel_id_bytes])[0] return converters.from_release(rel_data) if rel_data else None def revision_get(self, rev_id: str) -> Optional[Dict[str, Any]]: rev_id_bytes = hash_to_bytes(rev_id) rev_data = self.storage.revision_get([rev_id_bytes])[0] return converters.from_revision(rev_data) if rev_data else None def revision_log(self, rev_id, limit=None): rev_id_bytes = hash_to_bytes(rev_id) return list( map( converters.from_revision, self.storage.revision_log([rev_id_bytes], limit=limit), ) ) def snapshot_get_latest(self, origin_url): snp = snapshot_get_latest(self.storage, origin_url) return converters.from_snapshot(snp.to_dict()) def origin_get(self, origin_urls): origins = self.storage.origin_get(origin_urls) return [converters.from_origin(o.to_dict()) for o in origins] def origin_visit_get(self, origin_url): next_page_token = None visits = [] while True: visit_page = self.storage.origin_visit_get( origin_url, page_token=next_page_token ) next_page_token = visit_page.next_page_token for visit in visit_page.results: visit_status = self.storage.origin_visit_status_get_latest( origin_url, visit.visit ) visits.append( converters.from_origin_visit( {**visit_status.to_dict(), "type": visit.type} ) ) if not next_page_token: break return visits def origin_visit_get_by(self, origin_url: str, visit_id: int) -> OriginVisitInfo: visit = self.storage.origin_visit_get_by(origin_url, visit_id) assert visit is not None visit_status = self.storage.origin_visit_status_get_latest(origin_url, visit_id) assert visit_status is not None return converters.from_origin_visit( {**visit_status.to_dict(), "type": visit.type} ) def origin_visit_status_get_latest( self, origin_url, type: Optional[str] = None, allowed_statuses: Optional[List[str]] = None, require_snapshot: bool = False, ): visit_status = origin_get_latest_visit_status( self.storage, origin_url, type=type, allowed_statuses=allowed_statuses, require_snapshot=require_snapshot, ) return ( converters.from_origin_visit(visit_status.to_dict()) if visit_status else None ) def snapshot_get(self, snapshot_id): snp = snapshot_get_all_branches(self.storage, hash_to_bytes(snapshot_id)) return converters.from_snapshot(snp.to_dict()) def snapshot_get_branches( self, snapshot_id, branches_from="", branches_count=1000, target_types=None ): partial_branches = self.storage.snapshot_get_branches( hash_to_bytes(snapshot_id), branches_from.encode(), branches_count, target_types, ) return converters.from_partial_branches(partial_branches) def snapshot_get_head(self, snapshot): if snapshot["branches"]["HEAD"]["target_type"] == "alias": target = snapshot["branches"]["HEAD"]["target"] head = snapshot["branches"][target]["target"] else: head = snapshot["branches"]["HEAD"]["target"] return head def snapshot_count_branches(self, snapshot_id): counts = dict.fromkeys(("alias", "release", "revision"), 0) counts.update(self.storage.snapshot_count_branches(hash_to_bytes(snapshot_id))) counts.pop(None, None) return counts class _IndexerData: """ Helper class to manage indexer tests data It is initialized with a reference to an in-memory indexer storage containing raw tests data. It also defines class methods to retrieve those tests data in a json serializable format in order to ease tests implementation. """ def __init__(self, tests_data): self.idx_storage = tests_data["idx_storage"] self.mimetype_indexer = tests_data["mimetype_indexer"] self.license_indexer = tests_data["license_indexer"] def content_add_mimetype(self, cnt_id): self.mimetype_indexer.run([hash_to_bytes(cnt_id)]) def content_get_mimetype(self, cnt_id): mimetype = self.idx_storage.content_mimetype_get([hash_to_bytes(cnt_id)])[ 0 ].to_dict() return converters.from_filetype(mimetype) def content_add_license(self, cnt_id): self.license_indexer.run([hash_to_bytes(cnt_id)]) def content_get_license(self, cnt_id): cnt_id_bytes = hash_to_bytes(cnt_id) licenses = self.idx_storage.content_fossology_license_get([cnt_id_bytes]) for license in licenses: yield converters.from_swh(license.to_dict(), hashess={"id"}) @pytest.fixture def keycloak_oidc(keycloak_oidc, mocker): keycloak_config = get_config()["keycloak"] keycloak_oidc.server_url = keycloak_config["server_url"] keycloak_oidc.realm_name = keycloak_config["realm_name"] keycloak_oidc.client_id = settings.OIDC_SWH_WEB_CLIENT_ID keycloak_oidc_client = mocker.patch("swh.web.auth.views.keycloak_oidc_client") keycloak_oidc_client.return_value = keycloak_oidc return keycloak_oidc @pytest.fixture def subtest(request): """A hack to explicitly set up and tear down fixtures. This fixture allows you to set up and tear down fixtures within the test function itself. This is useful (necessary!) for using Hypothesis inside pytest, as hypothesis will call the test function multiple times, without setting up or tearing down fixture state as it is normally the case. Copied from the pytest-subtesthack project, public domain license (https://github.com/untitaker/pytest-subtesthack). """ parent_test = request.node def inner(func): if hasattr(Function, "from_parent"): item = Function.from_parent( parent_test, name=request.function.__name__ + "[]", originalname=request.function.__name__, callobj=func, ) else: item = Function( name=request.function.__name__ + "[]", parent=parent_test, callobj=func ) nextitem = parent_test # prevents pytest from tearing down module fixtures item.ihook.pytest_runtest_setup(item=item) try: item.ihook.pytest_runtest_call(item=item) finally: item.ihook.pytest_runtest_teardown(item=item, nextitem=nextitem) return inner @pytest.fixture def swh_scheduler(swh_scheduler): config = get_config() scheduler = config["scheduler"] config["scheduler"] = swh_scheduler # create load-git and load-hg task types for task_type in TASK_TYPES.values(): # see https://forge.softwareheritage.org/rDSCHc46ffadf7adf24c7eb3ffce062e8ade3818c79cc # noqa task_type["type"] = task_type["type"].replace("load-test-", "load-", 1) swh_scheduler.create_task_type(task_type) # create load-svn task type swh_scheduler.create_task_type( { "type": "load-svn", "description": "Update a Subversion repository", "backend_name": "swh.loader.svn.tasks.DumpMountAndLoadSvnRepository", "default_interval": timedelta(days=64), "min_interval": timedelta(hours=12), "max_interval": timedelta(days=64), "backoff_factor": 2, "max_queue_length": None, "num_retries": 7, "retry_delay": timedelta(hours=2), } ) # create load-cvs task type swh_scheduler.create_task_type( { "type": "load-cvs", "description": "Update a CVS repository", "backend_name": "swh.loader.cvs.tasks.DumpMountAndLoadSvnRepository", "default_interval": timedelta(days=64), "min_interval": timedelta(hours=12), "max_interval": timedelta(days=64), "backoff_factor": 2, "max_queue_length": None, "num_retries": 7, "retry_delay": timedelta(hours=2), } ) # create load-bzr task type swh_scheduler.create_task_type( { "type": "load-bzr", "description": "Update a Bazaar repository", "backend_name": "swh.loader.bzr.tasks.LoadBazaar", "default_interval": timedelta(days=64), "min_interval": timedelta(hours=12), "max_interval": timedelta(days=64), "backoff_factor": 2, "max_queue_length": None, "num_retries": 7, "retry_delay": timedelta(hours=2), } ) # add method to add load-archive-files task type during tests def add_load_archive_task_type(): swh_scheduler.create_task_type( { "type": "load-archive-files", "description": "Load tarballs", "backend_name": "swh.loader.package.archive.tasks.LoadArchive", "default_interval": timedelta(days=64), "min_interval": timedelta(hours=12), "max_interval": timedelta(days=64), "backoff_factor": 2, "max_queue_length": None, "num_retries": 7, "retry_delay": timedelta(hours=2), } ) swh_scheduler.add_load_archive_task_type = add_load_archive_task_type yield swh_scheduler config["scheduler"] = scheduler get_scheduler_load_task_types.cache_clear() @pytest.fixture(scope="session") def django_db_setup(request, django_db_blocker, postgresql_proc): from django.conf import settings settings.DATABASES["default"].update( { ("ENGINE", "django.db.backends.postgresql"), ("NAME", get_config()["test_db"]["name"]), ("USER", postgresql_proc.user), ("HOST", postgresql_proc.host), ("PORT", postgresql_proc.port), } ) with django_db_blocker.unblock(): setup_databases( verbosity=request.config.option.verbose, interactive=False, keepdb=False ) @pytest.fixture def staff_user(): return User.objects.create_user(username="admin", password="", is_staff=True) @pytest.fixture def regular_user(): return User.objects.create_user(username="johndoe", password="") @pytest.fixture def regular_user2(): return User.objects.create_user(username="janedoe", password="") @pytest.fixture def add_forge_moderator(): moderator = User.objects.create_user(username="add-forge moderator", password="") moderator.user_permissions.add( create_django_permission(ADD_FORGE_MODERATOR_PERMISSION) ) return moderator @pytest.fixture def mailmap_admin(): mailmap_admin = User.objects.create_user(username="mailmap-admin", password="") mailmap_admin.user_permissions.add( create_django_permission(MAILMAP_ADMIN_PERMISSION) ) return mailmap_admin @pytest.fixture def mailmap_user(): mailmap_user = User.objects.create_user(username="mailmap-user", password="") mailmap_user.user_permissions.add(create_django_permission(MAILMAP_PERMISSION)) return mailmap_user def reload_urlconf(): from django.conf import settings clear_url_caches() # force reloading of all URLs as they depend on django settings # and swh-web configuration urlconfs = [settings.ROOT_URLCONF] urlconfs += [f"{app}.urls" for app in settings.SWH_DJANGO_APPS] for urlconf in urlconfs: try: if urlconf in sys.modules: reload(sys.modules[urlconf]) else: import_module(urlconf) except ModuleNotFoundError: pass class SwhSettingsWrapper(SettingsWrapper): def __setattr__(self, attr: str, value) -> None: super().__setattr__(attr, value) reload_urlconf() def finalize(self) -> None: super().finalize() reload_urlconf() @pytest.fixture def django_settings(): """Override pytest-django settings fixture in order to reload URLs when modifying settings in test and after test execution as most of them depend on installed django apps in swh-web. """ settings = SwhSettingsWrapper() yield settings settings.finalize() diff --git a/swh/web/tests/helpers.py b/swh/web/tests/helpers.py index fca57fdc..5651a946 100644 --- a/swh/web/tests/helpers.py +++ b/swh/web/tests/helpers.py @@ -1,254 +1,267 @@ # Copyright (C) 2020-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Dict, Optional, cast from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType from django.http.response import HttpResponse, HttpResponseBase, StreamingHttpResponse from django.test.client import Client from rest_framework.response import Response from rest_framework.test import APIClient from swh.web.tests.django_asserts import assert_template_used def _assert_http_response( response: HttpResponseBase, status_code: int, content_type: str ) -> HttpResponseBase: if isinstance(response, Response): drf_response = cast(Response, response) error_context = ( drf_response.data.pop("traceback") if isinstance(drf_response.data, dict) and "traceback" in drf_response.data else drf_response.data ) elif isinstance(response, StreamingHttpResponse): error_context = getattr(response, "traceback", response.streaming_content) elif isinstance(response, HttpResponse): error_context = getattr(response, "traceback", response.content) assert response.status_code == status_code, error_context if content_type != "*/*": assert response["Content-Type"].startswith(content_type) return response def check_http_get_response( client: Client, url: str, status_code: int, content_type: str = "*/*", http_origin: Optional[str] = None, server_name: Optional[str] = None, ) -> HttpResponseBase: """Helper function to check HTTP response for a GET request. Args: client: Django test client url: URL to check response status_code: expected HTTP status code content_type: expected response content type http_origin: optional HTTP_ORIGIN header value Returns: The HTTP response """ return _assert_http_response( response=client.get( url, HTTP_ACCEPT=content_type, HTTP_ORIGIN=http_origin, SERVER_NAME=server_name if server_name else "testserver", ), status_code=status_code, content_type=content_type, ) def check_http_post_response( client: Client, url: str, status_code: int, content_type: str = "*/*", request_content_type="application/json", data: Optional[Dict[str, Any]] = None, http_origin: Optional[str] = None, ) -> HttpResponseBase: """Helper function to check HTTP response for a POST request. Args: client: Django test client url: URL to check response status_code: expected HTTP status code content_type: expected response content type request_content_type: content type of request body data: optional POST data Returns: The HTTP response """ return _assert_http_response( response=client.post( url, data=data, content_type=request_content_type, HTTP_ACCEPT=content_type, HTTP_ORIGIN=http_origin, ), status_code=status_code, content_type=content_type, ) def check_api_get_responses( api_client: APIClient, url: str, status_code: int ) -> Response: """Helper function to check Web API responses for GET requests for all accepted content types (JSON, YAML, HTML). Args: api_client: DRF test client url: Web API URL to check responses status_code: expected HTTP status code Returns: The Web API JSON response """ # check JSON response response_json = check_http_get_response( api_client, url, status_code, content_type="application/json" ) # check HTML response (API Web UI) check_http_get_response(api_client, url, status_code, content_type="text/html") # check YAML response check_http_get_response( api_client, url, status_code, content_type="application/yaml" ) return cast(Response, response_json) def check_api_post_response( api_client: APIClient, url: str, status_code: int, content_type: str = "*/*", data: Optional[Dict[str, Any]] = None, + **headers, ) -> HttpResponseBase: """Helper function to check Web API response for a POST request for all accepted content types. Args: api_client: DRF test client url: Web API URL to check response status_code: expected HTTP status code Returns: The HTTP response """ return _assert_http_response( response=api_client.post( url, data=data, format="json", HTTP_ACCEPT=content_type, + **headers, ), status_code=status_code, content_type=content_type, ) def check_api_post_responses( api_client: APIClient, url: str, status_code: int, data: Optional[Dict[str, Any]] = None, + **headers, ) -> Response: """Helper function to check Web API responses for POST requests for all accepted content types (JSON, YAML). Args: api_client: DRF test client url: Web API URL to check responses status_code: expected HTTP status code Returns: The Web API JSON response """ # check JSON response response_json = check_api_post_response( - api_client, url, status_code, content_type="application/json", data=data + api_client, + url, + status_code, + content_type="application/json", + data=data, + **headers, ) # check YAML response check_api_post_response( - api_client, url, status_code, content_type="application/yaml", data=data + api_client, + url, + status_code, + content_type="application/yaml", + data=data, + **headers, ) return cast(Response, response_json) def check_html_get_response( client: Client, url: str, status_code: int, template_used: Optional[str] = None, http_origin: Optional[str] = None, server_name: Optional[str] = None, ) -> HttpResponseBase: """Helper function to check HTML responses for a GET request. Args: client: Django test client url: URL to check responses status_code: expected HTTP status code template_used: optional used Django template to check Returns: The HTML response """ response = check_http_get_response( client, url, status_code, content_type="text/html", http_origin=http_origin, server_name=server_name, ) if template_used is not None: assert_template_used(response, template_used) return response def create_django_permission(perm_name: str) -> Permission: """Create permission out of a permission name string Args: perm_name: Permission name (e.g. swh.web.api.throttling_exempted, swh.ambassador, ...) Returns: The persisted permission """ perm_splitted = perm_name.split(".") app_label = ".".join(perm_splitted[:-1]) perm_name = perm_splitted[-1] content_type = ContentType.objects.create( id=1000 + ContentType.objects.count(), app_label=app_label, model=perm_splitted[-1], ) return Permission.objects.create( codename=perm_name, name=perm_name, content_type=content_type, id=1000 + Permission.objects.count(), ) diff --git a/swh/web/tests/resources/http_esnode1.internal.softwareheritage.org/swh_workers-*__search b/swh/web/tests/save_code_now/data/http_esnode1.internal.softwareheritage.org/swh_workers-*__search similarity index 100% rename from swh/web/tests/resources/http_esnode1.internal.softwareheritage.org/swh_workers-*__search rename to swh/web/tests/save_code_now/data/http_esnode1.internal.softwareheritage.org/swh_workers-*__search diff --git a/swh/web/tests/save_origin_webhooks/__init__.py b/swh/web/tests/save_origin_webhooks/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/swh/web/tests/save_origin_webhooks/data/bitbucket_webhook_payload.json b/swh/web/tests/save_origin_webhooks/data/bitbucket_webhook_payload.json new file mode 100644 index 00000000..0d02466c --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/data/bitbucket_webhook_payload.json @@ -0,0 +1,333 @@ +{ + "push": { + "changes": [ + { + "old": { + "name": "main", + "target": { + "type": "commit", + "hash": "f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72", + "date": "2022-10-26T09:57:22+00:00", + "author": { + "type": "author", + "raw": "John Doe ", + "user": { + "display_name": "John Doe", + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/users/%7B65f18e6c-2e1c-4a89-99c1-f7bad58d5a39%7D" + }, + "avatar": { + "href": "https://secure.gravatar.com/avatar/d522e4a403af0605784d0f7936a506a3?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FAL-1.png" + }, + "html": { + "href": "https://bitbucket.org/%7B65f18e6c-2e1c-4a89-99c1-f7bad58d5a39%7D/" + } + }, + "type": "user", + "uuid": "{65f18e6c-2e1c-4a89-99c1-f7bad58d5a39}", + "account_id": "5d1483f6f46aa30c271c968e", + "nickname": "John Doe" + } + }, + "message": "Initial commit", + "summary": { + "type": "rendered", + "raw": "Initial commit", + "markup": "markdown", + "html": "

Initial commit

" + }, + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commit/f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72" + }, + "html": { + "href": "https://bitbucket.org/johndoe/webhook-test/commits/f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72" + } + }, + "parents": [], + "rendered": {}, + "properties": {} + }, + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/refs/branches/main" + }, + "commits": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commits/main" + }, + "html": { + "href": "https://bitbucket.org/johndoe/webhook-test/branch/main" + } + }, + "type": "branch", + "merge_strategies": [ + "merge_commit", + "squash", + "fast_forward" + ], + "default_merge_strategy": "merge_commit" + }, + "new": { + "name": "main", + "target": { + "type": "commit", + "hash": "45ed1aeeb43008b0ec5666ef67242f66639920d7", + "date": "2022-10-26T10:03:24+00:00", + "author": { + "type": "author", + "raw": "John Doe ", + "user": { + "display_name": "John Doe", + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/users/%7B65f18e6c-2e1c-4a89-99c1-f7bad58d5a39%7D" + }, + "avatar": { + "href": "https://secure.gravatar.com/avatar/d522e4a403af0605784d0f7936a506a3?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FAL-1.png" + }, + "html": { + "href": "https://bitbucket.org/%7B65f18e6c-2e1c-4a89-99c1-f7bad58d5a39%7D/" + } + }, + "type": "user", + "uuid": "{65f18e6c-2e1c-4a89-99c1-f7bad58d5a39}", + "account_id": "5d1483f6f46aa30c271c968e", + "nickname": "John Doe" + } + }, + "message": "Add new line in README to trigger webhook when pushing commit\n", + "summary": { + "type": "rendered", + "raw": "Add new line in README to trigger webhook when pushing commit\n", + "markup": "markdown", + "html": "

Add new line in README to trigger webhook when pushing commit

" + }, + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commit/45ed1aeeb43008b0ec5666ef67242f66639920d7" + }, + "html": { + "href": "https://bitbucket.org/johndoe/webhook-test/commits/45ed1aeeb43008b0ec5666ef67242f66639920d7" + } + }, + "parents": [ + { + "type": "commit", + "hash": "f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72", + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commit/f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72" + }, + "html": { + "href": "https://bitbucket.org/johndoe/webhook-test/commits/f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72" + } + } + } + ], + "rendered": {}, + "properties": {} + }, + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/refs/branches/main" + }, + "commits": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commits/main" + }, + "html": { + "href": "https://bitbucket.org/johndoe/webhook-test/branch/main" + } + }, + "type": "branch", + "merge_strategies": [ + "merge_commit", + "squash", + "fast_forward" + ], + "default_merge_strategy": "merge_commit" + }, + "truncated": false, + "created": false, + "forced": false, + "closed": false, + "links": { + "commits": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commits?include=45ed1aeeb43008b0ec5666ef67242f66639920d7&exclude=f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72" + }, + "diff": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/diff/45ed1aeeb43008b0ec5666ef67242f66639920d7..f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72" + }, + "html": { + "href": "https://bitbucket.org/johndoe/webhook-test/branches/compare/45ed1aeeb43008b0ec5666ef67242f66639920d7..f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72" + } + }, + "commits": [ + { + "type": "commit", + "hash": "45ed1aeeb43008b0ec5666ef67242f66639920d7", + "date": "2022-10-26T10:03:24+00:00", + "author": { + "type": "author", + "raw": "John Doe ", + "user": { + "display_name": "John Doe", + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/users/%7B65f18e6c-2e1c-4a89-99c1-f7bad58d5a39%7D" + }, + "avatar": { + "href": "https://secure.gravatar.com/avatar/d522e4a403af0605784d0f7936a506a3?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FAL-1.png" + }, + "html": { + "href": "https://bitbucket.org/%7B65f18e6c-2e1c-4a89-99c1-f7bad58d5a39%7D/" + } + }, + "type": "user", + "uuid": "{65f18e6c-2e1c-4a89-99c1-f7bad58d5a39}", + "account_id": "5d1483f6f46aa30c271c968e", + "nickname": "John Doe" + } + }, + "message": "Add new line in README to trigger webhook when pushing commit\n", + "summary": { + "type": "rendered", + "raw": "Add new line in README to trigger webhook when pushing commit\n", + "markup": "markdown", + "html": "

Add new line in README to trigger webhook when pushing commit

" + }, + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commit/45ed1aeeb43008b0ec5666ef67242f66639920d7" + }, + "html": { + "href": "https://bitbucket.org/johndoe/webhook-test/commits/45ed1aeeb43008b0ec5666ef67242f66639920d7" + }, + "diff": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/diff/45ed1aeeb43008b0ec5666ef67242f66639920d7" + }, + "approve": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commit/45ed1aeeb43008b0ec5666ef67242f66639920d7/approve" + }, + "comments": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commit/45ed1aeeb43008b0ec5666ef67242f66639920d7/comments" + }, + "statuses": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commit/45ed1aeeb43008b0ec5666ef67242f66639920d7/statuses" + }, + "patch": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/patch/45ed1aeeb43008b0ec5666ef67242f66639920d7" + } + }, + "parents": [ + { + "type": "commit", + "hash": "f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72", + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test/commit/f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72" + }, + "html": { + "href": "https://bitbucket.org/johndoe/webhook-test/commits/f069b8b3a68b043cdab1cabd2b3415eaf1b0ef72" + } + } + } + ], + "rendered": {}, + "properties": {} + } + ] + } + ] + }, + "repository": { + "type": "repository", + "full_name": "johndoe/webhook-test", + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/johndoe/webhook-test" + }, + "html": { + "href": "https://bitbucket.org/johndoe/webhook-test" + }, + "avatar": { + "href": "https://bytebucket.org/ravatar/%7Bb5d2882b-effe-4cf8-bb5a-8a757bdbfe28%7D?ts=default" + } + }, + "name": "webhook-test", + "scm": "git", + "website": null, + "owner": { + "display_name": "John Doe", + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/users/%7B65f18e6c-2e1c-4a89-99c1-f7bad58d5a39%7D" + }, + "avatar": { + "href": "https://secure.gravatar.com/avatar/d522e4a403af0605784d0f7936a506a3?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FAL-1.png" + }, + "html": { + "href": "https://bitbucket.org/%7B65f18e6c-2e1c-4a89-99c1-f7bad58d5a39%7D/" + } + }, + "type": "user", + "uuid": "{65f18e6c-2e1c-4a89-99c1-f7bad58d5a39}", + "account_id": "5d1483f6f46aa30c271c968e", + "nickname": "John Doe" + }, + "workspace": { + "type": "workspace", + "uuid": "{65f18e6c-2e1c-4a89-99c1-f7bad58d5a39}", + "name": "John Doe", + "slug": "johndoe", + "links": { + "avatar": { + "href": "https://bitbucket.org/workspaces/johndoe/avatar/?ts=1561625644" + }, + "html": { + "href": "https://bitbucket.org/johndoe/" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/workspaces/johndoe" + } + } + }, + "is_private": false, + "project": { + "type": "project", + "key": "WEB", + "uuid": "{4c11da16-a545-45c4-84c2-d0ba937a2e23}", + "name": "webhook-test", + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/workspaces/johndoe/projects/WEB" + }, + "html": { + "href": "https://bitbucket.org/johndoe/workspace/projects/WEB" + }, + "avatar": { + "href": "https://bitbucket.org/account/user/johndoe/projects/WEB/avatar/32?ts=1666778241" + } + } + }, + "uuid": "{b5d2882b-effe-4cf8-bb5a-8a757bdbfe28}" + }, + "actor": { + "display_name": "John Doe", + "links": { + "self": { + "href": "https://api.bitbucket.org/2.0/users/%7B65f18e6c-2e1c-4a89-99c1-f7bad58d5a39%7D" + }, + "avatar": { + "href": "https://secure.gravatar.com/avatar/d522e4a403af0605784d0f7936a506a3?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FAL-1.png" + }, + "html": { + "href": "https://bitbucket.org/%7B65f18e6c-2e1c-4a89-99c1-f7bad58d5a39%7D/" + } + }, + "type": "user", + "uuid": "{65f18e6c-2e1c-4a89-99c1-f7bad58d5a39}", + "account_id": "5d1483f6f46aa30c271c968e", + "nickname": "John Doe" + } +} \ No newline at end of file diff --git a/swh/web/tests/save_origin_webhooks/data/gitea_webhook_payload.json b/swh/web/tests/save_origin_webhooks/data/gitea_webhook_payload.json new file mode 100644 index 00000000..f15b5888 --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/data/gitea_webhook_payload.json @@ -0,0 +1,179 @@ +{ + "ref": "refs/heads/main", + "before": "d74a0d9989b1642e82e5f7ab3ba395f4034b1fcd", + "after": "fc0cf34aac1443223c9dd55f3a901e6b38bd25b0", + "compare_url": "https://try.gitea.io/johndoe/webhook-test/compare/d74a0d9989b1642e82e5f7ab3ba395f4034b1fcd...fc0cf34aac1443223c9dd55f3a901e6b38bd25b0", + "commits": [ + { + "id": "fc0cf34aac1443223c9dd55f3a901e6b38bd25b0", + "message": "Add a new line to trigger webhook when pushing commit\n", + "url": "https://try.gitea.io/johndoe/webhook-test/commit/fc0cf34aac1443223c9dd55f3a901e6b38bd25b0", + "author": { + "name": "John Doe", + "email": "john.doe@example.org", + "username": "" + }, + "committer": { + "name": "John Doe", + "email": "john.doe@example.org", + "username": "" + }, + "verification": null, + "timestamp": "2022-10-25T17:34:08+02:00", + "added": [], + "removed": [], + "modified": [ + "README.md" + ] + } + ], + "total_commits": 1, + "head_commit": { + "id": "fc0cf34aac1443223c9dd55f3a901e6b38bd25b0", + "message": "Add a new line to trigger webhook when pushing commit\n", + "url": "https://try.gitea.io/johndoe/webhook-test/commit/fc0cf34aac1443223c9dd55f3a901e6b38bd25b0", + "author": { + "name": "John Doe", + "email": "john.doe@example.org", + "username": "" + }, + "committer": { + "name": "John Doe", + "email": "john.doe@example.org", + "username": "" + }, + "verification": null, + "timestamp": "2022-10-25T17:34:08+02:00", + "added": [], + "removed": [], + "modified": [ + "README.md" + ] + }, + "repository": { + "id": 38171, + "owner": { + "id": 520163, + "login": "johndoe", + "login_name": "", + "full_name": "", + "email": "johndoe@noreply.try.gitea.io", + "avatar_url": "https://try.gitea.io/avatars/1c41d79af82e53f20334d1cf9a3dea7e", + "language": "", + "is_admin": false, + "last_login": "0001-01-01T00:00:00Z", + "created": "2022-10-25T15:30:24Z", + "restricted": false, + "active": false, + "prohibit_login": false, + "location": "", + "website": "", + "description": "", + "visibility": "public", + "followers_count": 0, + "following_count": 0, + "starred_repos_count": 0, + "username": "johndoe" + }, + "name": "webhook-test", + "full_name": "johndoe/webhook-test", + "description": "", + "empty": false, + "private": false, + "fork": false, + "template": false, + "parent": null, + "mirror": false, + "size": 88, + "language": "", + "languages_url": "https://try.gitea.io/api/v1/repos/johndoe/webhook-test/languages", + "html_url": "https://try.gitea.io/johndoe/webhook-test", + "ssh_url": "git@try.gitea.io:johndoe/webhook-test.git", + "clone_url": "https://try.gitea.io/johndoe/webhook-test.git", + "original_url": "", + "website": "", + "stars_count": 0, + "forks_count": 0, + "watchers_count": 1, + "open_issues_count": 0, + "open_pr_counter": 0, + "release_counter": 0, + "default_branch": "main", + "archived": false, + "created_at": "2022-10-25T15:31:27Z", + "updated_at": "2022-10-25T15:31:28Z", + "permissions": { + "admin": true, + "push": true, + "pull": true + }, + "has_issues": true, + "internal_tracker": { + "enable_time_tracker": false, + "allow_only_contributors_to_track_time": true, + "enable_issue_dependencies": true + }, + "has_wiki": true, + "has_pull_requests": true, + "has_projects": true, + "ignore_whitespace_conflicts": false, + "allow_merge_commits": true, + "allow_rebase": true, + "allow_rebase_explicit": true, + "allow_squash_merge": true, + "allow_rebase_update": true, + "default_delete_branch_after_merge": false, + "default_merge_style": "merge", + "avatar_url": "", + "internal": false, + "mirror_interval": "", + "mirror_updated": "0001-01-01T00:00:00Z", + "repo_transfer": null + }, + "pusher": { + "id": 520163, + "login": "johndoe", + "login_name": "", + "full_name": "", + "email": "johndoe@noreply.try.gitea.io", + "avatar_url": "https://try.gitea.io/avatars/1c41d79af82e53f20334d1cf9a3dea7e", + "language": "", + "is_admin": false, + "last_login": "0001-01-01T00:00:00Z", + "created": "2022-10-25T15:30:24Z", + "restricted": false, + "active": false, + "prohibit_login": false, + "location": "", + "website": "", + "description": "", + "visibility": "public", + "followers_count": 0, + "following_count": 0, + "starred_repos_count": 0, + "username": "johndoe" + }, + "sender": { + "id": 520163, + "login": "johndoe", + "login_name": "", + "full_name": "", + "email": "johndoe@noreply.try.gitea.io", + "avatar_url": "https://try.gitea.io/avatars/1c41d79af82e53f20334d1cf9a3dea7e", + "language": "", + "is_admin": false, + "last_login": "0001-01-01T00:00:00Z", + "created": "2022-10-25T15:30:24Z", + "restricted": false, + "active": false, + "prohibit_login": false, + "location": "", + "website": "", + "description": "", + "visibility": "public", + "followers_count": 0, + "following_count": 0, + "starred_repos_count": 0, + "username": "johndoe" + } +} \ No newline at end of file diff --git a/swh/web/tests/save_origin_webhooks/data/github_webhook_payload.json b/swh/web/tests/save_origin_webhooks/data/github_webhook_payload.json new file mode 100644 index 00000000..ed956f87 --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/data/github_webhook_payload.json @@ -0,0 +1,185 @@ +{ + "ref": "refs/heads/main", + "before": "63c72ea620a8d93aa14f0d60b9320881631c83ca", + "after": "366675b3a4017eb3ace4452d8288ae664c4f0af4", + "repository": { + "id": 557370172, + "node_id": "R_kgDOITjLPA", + "name": "webhook-test", + "full_name": "johndoe/webhook-test", + "private": false, + "owner": { + "name": "johndoe", + "email": "john.doe@example.org", + "login": "johndoe", + "id": 5493543, + "node_id": "MDQ6VXNlcjU0OTM1NDM=", + "avatar_url": "", + "gravatar_id": "", + "url": "https://api.github.com/users/johndoe", + "html_url": "https://github.com/johndoe", + "followers_url": "https://api.github.com/users/johndoe/followers", + "following_url": "https://api.github.com/users/johndoe/following{/other_user}", + "gists_url": "https://api.github.com/users/johndoe/gists{/gist_id}", + "starred_url": "https://api.github.com/users/johndoe/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/johndoe/subscriptions", + "organizations_url": "https://api.github.com/users/johndoe/orgs", + "repos_url": "https://api.github.com/users/johndoe/repos", + "events_url": "https://api.github.com/users/johndoe/events{/privacy}", + "received_events_url": "https://api.github.com/users/johndoe/received_events", + "type": "User", + "site_admin": false + }, + "html_url": "https://github.com/johndoe/webhook-test", + "description": "Sample repository to test webhooks delivery", + "fork": false, + "url": "https://github.com/johndoe/webhook-test", + "forks_url": "https://api.github.com/repos/johndoe/webhook-test/forks", + "keys_url": "https://api.github.com/repos/johndoe/webhook-test/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/johndoe/webhook-test/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/johndoe/webhook-test/teams", + "hooks_url": "https://api.github.com/repos/johndoe/webhook-test/hooks", + "issue_events_url": "https://api.github.com/repos/johndoe/webhook-test/issues/events{/number}", + "events_url": "https://api.github.com/repos/johndoe/webhook-test/events", + "assignees_url": "https://api.github.com/repos/johndoe/webhook-test/assignees{/user}", + "branches_url": "https://api.github.com/repos/johndoe/webhook-test/branches{/branch}", + "tags_url": "https://api.github.com/repos/johndoe/webhook-test/tags", + "blobs_url": "https://api.github.com/repos/johndoe/webhook-test/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/johndoe/webhook-test/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/johndoe/webhook-test/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/johndoe/webhook-test/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/johndoe/webhook-test/statuses/{sha}", + "languages_url": "https://api.github.com/repos/johndoe/webhook-test/languages", + "stargazers_url": "https://api.github.com/repos/johndoe/webhook-test/stargazers", + "contributors_url": "https://api.github.com/repos/johndoe/webhook-test/contributors", + "subscribers_url": "https://api.github.com/repos/johndoe/webhook-test/subscribers", + "subscription_url": "https://api.github.com/repos/johndoe/webhook-test/subscription", + "commits_url": "https://api.github.com/repos/johndoe/webhook-test/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/johndoe/webhook-test/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/johndoe/webhook-test/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/johndoe/webhook-test/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/johndoe/webhook-test/contents/{+path}", + "compare_url": "https://api.github.com/repos/johndoe/webhook-test/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/johndoe/webhook-test/merges", + "archive_url": "https://api.github.com/repos/johndoe/webhook-test/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/johndoe/webhook-test/downloads", + "issues_url": "https://api.github.com/repos/johndoe/webhook-test/issues{/number}", + "pulls_url": "https://api.github.com/repos/johndoe/webhook-test/pulls{/number}", + "milestones_url": "https://api.github.com/repos/johndoe/webhook-test/milestones{/number}", + "notifications_url": "https://api.github.com/repos/johndoe/webhook-test/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/johndoe/webhook-test/labels{/name}", + "releases_url": "https://api.github.com/repos/johndoe/webhook-test/releases{/id}", + "deployments_url": "https://api.github.com/repos/johndoe/webhook-test/deployments", + "created_at": 1666710385, + "updated_at": "2022-10-25T15:06:25Z", + "pushed_at": 1666710739, + "git_url": "git://github.com/johndoe/webhook-test.git", + "ssh_url": "git@github.com:johndoe/webhook-test.git", + "clone_url": "https://github.com/johndoe/webhook-test.git", + "svn_url": "https://github.com/johndoe/webhook-test", + "homepage": null, + "size": 0, + "stargazers_count": 0, + "watchers_count": 0, + "language": null, + "has_issues": true, + "has_projects": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "forks_count": 0, + "mirror_url": null, + "archived": false, + "disabled": false, + "open_issues_count": 0, + "license": null, + "allow_forking": true, + "is_template": false, + "web_commit_signoff_required": false, + "topics": [], + "visibility": "public", + "forks": 0, + "open_issues": 0, + "watchers": 0, + "default_branch": "main", + "stargazers": 0, + "master_branch": "main" + }, + "pusher": { + "name": "johndoe", + "email": "john.doe@example.org" + }, + "sender": { + "login": "johndoe", + "id": 5493543, + "node_id": "MDQ6VXNlcjU0OTM1NDM=", + "avatar_url": "", + "gravatar_id": "", + "url": "https://api.github.com/users/johndoe", + "html_url": "https://github.com/johndoe", + "followers_url": "https://api.github.com/users/johndoe/followers", + "following_url": "https://api.github.com/users/johndoe/following{/other_user}", + "gists_url": "https://api.github.com/users/johndoe/gists{/gist_id}", + "starred_url": "https://api.github.com/users/johndoe/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/johndoe/subscriptions", + "organizations_url": "https://api.github.com/users/johndoe/orgs", + "repos_url": "https://api.github.com/users/johndoe/repos", + "events_url": "https://api.github.com/users/johndoe/events{/privacy}", + "received_events_url": "https://api.github.com/users/johndoe/received_events", + "type": "User", + "site_admin": false + }, + "created": false, + "deleted": false, + "forced": false, + "base_ref": null, + "compare": "https://github.com/johndoe/webhook-test/compare/63c72ea620a8...366675b3a401", + "commits": [ + { + "id": "366675b3a4017eb3ace4452d8288ae664c4f0af4", + "tree_id": "e824f22f6528dfae5489e6a08888f460eb74f975", + "distinct": true, + "message": "Add a new line to trigger webhook when pushing commit", + "timestamp": "2022-10-25T17:12:09+02:00", + "url": "https://github.com/johndoe/webhook-test/commit/366675b3a4017eb3ace4452d8288ae664c4f0af4", + "author": { + "name": "John Doe", + "email": "johndoe@softwareheritage.org", + "username": "johndoe" + }, + "committer": { + "name": "John Doe", + "email": "johndoe@softwareheritage.org", + "username": "johndoe" + }, + "added": [], + "removed": [], + "modified": [ + "README.md" + ] + } + ], + "head_commit": { + "id": "366675b3a4017eb3ace4452d8288ae664c4f0af4", + "tree_id": "e824f22f6528dfae5489e6a08888f460eb74f975", + "distinct": true, + "message": "Add a new line to trigger webhook when pushing commit", + "timestamp": "2022-10-25T17:12:09+02:00", + "url": "https://github.com/johndoe/webhook-test/commit/366675b3a4017eb3ace4452d8288ae664c4f0af4", + "author": { + "name": "John Doe", + "email": "johndoe@softwareheritage.org", + "username": "johndoe" + }, + "committer": { + "name": "John Doe", + "email": "johndoe@softwareheritage.org", + "username": "johndoe" + }, + "added": [], + "removed": [], + "modified": [ + "README.md" + ] + } +} \ No newline at end of file diff --git a/swh/web/tests/save_origin_webhooks/data/gitlab_webhook_payload.json b/swh/web/tests/save_origin_webhooks/data/gitlab_webhook_payload.json new file mode 100644 index 00000000..98c17921 --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/data/gitlab_webhook_payload.json @@ -0,0 +1,62 @@ +{ + "object_kind": "push", + "event_name": "push", + "before": "1b0392a66f1439bac26c0845a083fe64f01a00fe", + "after": "87b5530bd21f10330bc1437296295205112088c5", + "ref": "refs/heads/main", + "checkout_sha": "87b5530bd21f10330bc1437296295205112088c5", + "message": null, + "user_id": 4775591, + "user_name": "John Doe", + "user_username": "johndoe", + "user_email": "", + "user_avatar": "", + "project_id": 39639957, + "project": { + "id": 39639957, + "name": "webhook-test", + "description": "", + "web_url": "https://gitlab.com/johndoe/test", + "avatar_url": null, + "git_ssh_url": "git@gitlab.com:johndoe/test.git", + "git_http_url": "https://gitlab.com/johndoe/test.git", + "namespace": "John Doe", + "visibility_level": 20, + "path_with_namespace": "johndoe/test", + "default_branch": "main", + "ci_config_path": "", + "homepage": "https://gitlab.com/johndoe/test", + "url": "git@gitlab.com:johndoe/test.git", + "ssh_url": "git@gitlab.com:johndoe/test.git", + "http_url": "https://gitlab.com/johndoe/test.git" + }, + "commits": [ + { + "id": "87b5530bd21f10330bc1437296295205112088c5", + "message": "Remove line to trigger webhook when pushing commit\n", + "title": "Remove line to trigger webhook when pushing commit", + "timestamp": "2022-10-25T17:24:50+02:00", + "url": "https://gitlab.com/johndoe/test/-/commit/87b5530bd21f10330bc1437296295205112088c5", + "author": { + "name": "John Doe", + "email": "[REDACTED]" + }, + "added": [], + "modified": [ + "README.md" + ], + "removed": [] + } + ], + "total_commits_count": 1, + "push_options": {}, + "repository": { + "name": "webhook-test", + "url": "git@gitlab.com:johndoe/test.git", + "description": "", + "homepage": "https://gitlab.com/johndoe/test", + "git_http_url": "https://gitlab.com/johndoe/test.git", + "git_ssh_url": "git@gitlab.com:johndoe/test.git", + "visibility_level": 20 + } +} \ No newline at end of file diff --git a/swh/web/tests/save_origin_webhooks/data/https_sourceforge.net/rest_p_webhook-test-git b/swh/web/tests/save_origin_webhooks/data/https_sourceforge.net/rest_p_webhook-test-git new file mode 100644 index 00000000..b491c705 --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/data/https_sourceforge.net/rest_p_webhook-test-git @@ -0,0 +1,81 @@ +{ + "shortname": "webhook-test-git", + "name": "webhook-test-git", + "_id": "635802e25fe6a2c790df6731", + "url": "https://sourceforge.net/p/webhook-test-git/", + "private": false, + "short_description": "", + "creation_date": "2022-10-25", + "summary": "", + "external_homepage": "https://webhook-test-git.sourceforge.io", + "video_url": "", + "socialnetworks": [], + "status": "active", + "moved_to_url": "", + "preferred_support_tool": "", + "preferred_support_url": "", + "developers": [ + { + "username": "johndoe", + "name": "John Doe", + "url": "https://sourceforge.net/u/johndoe/" + } + ], + "tools": [ + { + "name": "activity", + "mount_point": "activity", + "url": "https://sourceforge.net/p/webhook-test-git/activity/", + "mount_label": "Activity", + "api_url": "https://sourceforge.net/rest/p/webhook-test-git/activity/" + }, + { + "name": "summary", + "mount_point": "summary", + "url": "https://sourceforge.net/p/webhook-test-git/summary/", + "mount_label": "Summary", + "sourceforge_group_id": 3568118 + }, + { + "name": "files-sf", + "mount_point": "files-sf", + "url": "https://sourceforge.net/p/webhook-test-git/files-sf/", + "mount_label": "Files" + }, + { + "name": "reviews", + "mount_point": "reviews", + "url": "https://sourceforge.net/p/webhook-test-git/reviews/", + "mount_label": "Reviews" + }, + { + "name": "support", + "mount_point": "support", + "url": "https://sourceforge.net/p/webhook-test-git/support/", + "mount_label": "Support" + }, + { + "name": "git", + "mount_point": "code", + "url": "https://sourceforge.net/p/webhook-test-git/code/", + "mount_label": "Code", + "api_url": "https://sourceforge.net/rest/p/webhook-test-git/code/", + "clone_url_https_anon": "https://git.code.sf.net/p/webhook-test-git/code", + "clone_url_ro": "git://git.code.sf.net/p/webhook-test-git/code" + } + ], + "labels": [], + "categories": { + "audience": [], + "developmentstatus": [], + "environment": [], + "language": [], + "license": [], + "translation": [], + "os": [], + "database": [], + "topic": [] + }, + "icon_url": null, + "screenshots": [] +} \ No newline at end of file diff --git a/swh/web/tests/save_origin_webhooks/data/https_sourceforge.net/rest_p_webhook-test-hg b/swh/web/tests/save_origin_webhooks/data/https_sourceforge.net/rest_p_webhook-test-hg new file mode 100644 index 00000000..c5b9f32e --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/data/https_sourceforge.net/rest_p_webhook-test-hg @@ -0,0 +1,80 @@ +{ + "shortname": "webhook-test-hg", + "name": "webhook-test-hg", + "_id": "63580743079e46f00071122b", + "url": "https://sourceforge.net/p/webhook-test-hg/", + "private": false, + "short_description": "", + "creation_date": "2022-10-25", + "summary": "", + "external_homepage": "https://webhook-test-hg.sourceforge.io", + "video_url": "", + "socialnetworks": [], + "status": "active", + "moved_to_url": "", + "preferred_support_tool": "", + "preferred_support_url": "", + "developers": [ + { + "username": "johndoe", + "name": "John Doe", + "url": "https://sourceforge.net/u/johndoe/" + } + ], + "tools": [ + { + "name": "activity", + "mount_point": "activity", + "url": "https://sourceforge.net/p/webhook-test-hg/activity/", + "mount_label": "Activity", + "api_url": "https://sourceforge.net/rest/p/webhook-test-hg/activity/" + }, + { + "name": "summary", + "mount_point": "summary", + "url": "https://sourceforge.net/p/webhook-test-hg/summary/", + "mount_label": "Summary", + "sourceforge_group_id": 3568128 + }, + { + "name": "files-sf", + "mount_point": "files-sf", + "url": "https://sourceforge.net/p/webhook-test-hg/files-sf/", + "mount_label": "Files" + }, + { + "name": "reviews", + "mount_point": "reviews", + "url": "https://sourceforge.net/p/webhook-test-hg/reviews/", + "mount_label": "Reviews" + }, + { + "name": "support", + "mount_point": "support", + "url": "https://sourceforge.net/p/webhook-test-hg/support/", + "mount_label": "Support" + }, + { + "name": "hg", + "mount_point": "code", + "url": "https://sourceforge.net/p/webhook-test-hg/code/", + "mount_label": "Code", + "api_url": "https://sourceforge.net/rest/p/webhook-test-hg/code/", + "clone_url_ro": "http://hg.code.sf.net/p/webhook-test-hg/code" + } + ], + "labels": [], + "categories": { + "audience": [], + "developmentstatus": [], + "environment": [], + "language": [], + "license": [], + "translation": [], + "os": [], + "database": [], + "topic": [] + }, + "icon_url": null, + "screenshots": [] +} \ No newline at end of file diff --git a/swh/web/tests/save_origin_webhooks/data/https_sourceforge.net/rest_p_webhook-test-svn b/swh/web/tests/save_origin_webhooks/data/https_sourceforge.net/rest_p_webhook-test-svn new file mode 100644 index 00000000..a0ea742d --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/data/https_sourceforge.net/rest_p_webhook-test-svn @@ -0,0 +1,81 @@ +{ + "shortname": "webhook-test-svn", + "name": "webhook-test-svn", + "_id": "635805ebe3c1ec1e7d0eab36", + "url": "https://sourceforge.net/p/webhook-test-svn/", + "private": false, + "short_description": "", + "creation_date": "2022-10-25", + "summary": "", + "external_homepage": "https://webhook-test-svn.sourceforge.io", + "video_url": "", + "socialnetworks": [], + "status": "active", + "moved_to_url": "", + "preferred_support_tool": "", + "preferred_support_url": "", + "developers": [ + { + "username": "johndoe", + "name": "John Doe", + "url": "https://sourceforge.net/u/johndoe/" + } + ], + "tools": [ + { + "name": "activity", + "mount_point": "activity", + "url": "https://sourceforge.net/p/webhook-test-svn/activity/", + "mount_label": "Activity", + "api_url": "https://sourceforge.net/rest/p/webhook-test-svn/activity/" + }, + { + "name": "summary", + "mount_point": "summary", + "url": "https://sourceforge.net/p/webhook-test-svn/summary/", + "mount_label": "Summary", + "sourceforge_group_id": 3568124 + }, + { + "name": "files-sf", + "mount_point": "files-sf", + "url": "https://sourceforge.net/p/webhook-test-svn/files-sf/", + "mount_label": "Files" + }, + { + "name": "reviews", + "mount_point": "reviews", + "url": "https://sourceforge.net/p/webhook-test-svn/reviews/", + "mount_label": "Reviews" + }, + { + "name": "support", + "mount_point": "support", + "url": "https://sourceforge.net/p/webhook-test-svn/support/", + "mount_label": "Support" + }, + { + "name": "svn", + "mount_point": "code", + "url": "https://sourceforge.net/p/webhook-test-svn/code/", + "mount_label": "Code", + "api_url": "https://sourceforge.net/rest/p/webhook-test-svn/code/", + "clone_url_https_anon": "https://svn.code.sf.net/p/webhook-test-svn/code/", + "clone_url_ro": "svn://svn.code.sf.net/p/webhook-test-svn/code/" + } + ], + "labels": [], + "categories": { + "audience": [], + "developmentstatus": [], + "environment": [], + "language": [], + "license": [], + "translation": [], + "os": [], + "database": [], + "topic": [] + }, + "icon_url": null, + "screenshots": [] +} \ No newline at end of file diff --git a/swh/web/tests/save_origin_webhooks/data/sourceforge_webhook_payload_git.json b/swh/web/tests/save_origin_webhooks/data/sourceforge_webhook_payload_git.json new file mode 100644 index 00000000..94bf0bed --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/data/sourceforge_webhook_payload_git.json @@ -0,0 +1,36 @@ +{ + "size": 1, + "commits": [ + { + "id": "42157ad6b5d5dee256ba450c9d7a1e70a31bb701", + "url": "https://sourceforge.net/p/webhook-test-git/code/ci/42157ad6b5d5dee256ba450c9d7a1e70a31bb701/", + "timestamp": "2022-10-25T15:43:40Z", + "message": "Add REAME.md", + "author": { + "name": "John Doe", + "email": "john.doe@example.org", + "username": "" + }, + "committer": { + "name": "John Doe", + "email": "john.doe@example.org", + "username": "" + }, + "added": [ + "README.md" + ], + "removed": [], + "modified": [], + "copied": [], + "renamed": [] + } + ], + "before": "", + "after": "42157ad6b5d5dee256ba450c9d7a1e70a31bb701", + "repository": { + "name": "Code", + "full_name": "/p/webhook-test-git/code/", + "url": "https://sourceforge.net/p/webhook-test-git/code/" + }, + "ref": "refs/heads/master" +} \ No newline at end of file diff --git a/swh/web/tests/save_origin_webhooks/data/sourceforge_webhook_payload_hg.json b/swh/web/tests/save_origin_webhooks/data/sourceforge_webhook_payload_hg.json new file mode 100644 index 00000000..cd6f4c5b --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/data/sourceforge_webhook_payload_hg.json @@ -0,0 +1,36 @@ +{ + "size": 1, + "commits": [ + { + "id": "c4e59883381e38e79e9c0418e759392da565629d", + "url": "https://sourceforge.net/p/webhook-test-hg/code/ci/c4e59883381e38e79e9c0418e759392da565629d/", + "timestamp": "2022-10-25T15:59:02Z", + "message": "Initial commit", + "author": { + "name": "johndoe", + "email": "john.doe@example.org", + "username": "" + }, + "committer": { + "name": "johndoe", + "email": "john.doe@example.org", + "username": "" + }, + "added": [ + "README" + ], + "removed": [], + "modified": [], + "copied": [], + "renamed": [] + } + ], + "before": "", + "after": "c4e59883381e38e79e9c0418e759392da565629d", + "repository": { + "name": "Code", + "full_name": "/p/webhook-test-hg/code/", + "url": "https://sourceforge.net/p/webhook-test-hg/code/" + }, + "ref": "refs/heads/default" +} \ No newline at end of file diff --git a/swh/web/tests/save_origin_webhooks/data/sourceforge_webhook_payload_svn.json b/swh/web/tests/save_origin_webhooks/data/sourceforge_webhook_payload_svn.json new file mode 100644 index 00000000..9107a797 --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/data/sourceforge_webhook_payload_svn.json @@ -0,0 +1,37 @@ +{ + "size": 1, + "commits": [ + { + "id": "r1", + "url": "https://sourceforge.net/p/webhook-test-svn/code/1/", + "timestamp": "2022-10-25T15:53:04Z", + "message": "Add initial directories", + "author": { + "name": "johndoe", + "email": "", + "username": "" + }, + "committer": { + "name": "johndoe", + "email": "", + "username": "" + }, + "added": [ + "/branches", + "/tags", + "/trunk" + ], + "removed": [], + "modified": [], + "copied": [], + "renamed": [] + } + ], + "before": "", + "after": "r1", + "repository": { + "name": "Code", + "full_name": "/p/webhook-test-svn/code/", + "url": "https://sourceforge.net/p/webhook-test-svn/code/" + } +} \ No newline at end of file diff --git a/swh/web/tests/save_origin_webhooks/test_bitbucket.py b/swh/web/tests/save_origin_webhooks/test_bitbucket.py new file mode 100644 index 00000000..07db03eb --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/test_bitbucket.py @@ -0,0 +1,88 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import json +import os + +import pytest + +from .utils import ( + origin_save_webhook_receiver_invalid_content_type_test, + origin_save_webhook_receiver_invalid_event_test, + origin_save_webhook_receiver_invalid_request_test, + origin_save_webhook_receiver_no_repo_url_test, + origin_save_webhook_receiver_test, +) + + +@pytest.mark.django_db +def test_origin_save_bitbucket_webhook_receiver(api_client, swh_scheduler, datadir): + with open(os.path.join(datadir, "bitbucket_webhook_payload.json"), "rb") as payload: + origin_save_webhook_receiver_test( + forge_type="Bitbucket", + http_headers={ + "User-Agent": "Bitbucket-Webhooks/2.0", + "X-Event-Key": "repo:push", + }, + payload=json.load(payload), + expected_origin_url="https://bitbucket.org/johndoe/webhook-test.git", + expected_visit_type="git", + api_client=api_client, + swh_scheduler=swh_scheduler, + ) + + +def test_origin_save_bitbucket_webhook_receiver_invalid_request( + api_client, +): + origin_save_webhook_receiver_invalid_request_test( + forge_type="Bitbucket", + http_headers={}, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_bitbucket_webhook_receiver_invalid_event( + api_client, +): + origin_save_webhook_receiver_invalid_event_test( + forge_type="Bitbucket", + http_headers={ + "User-Agent": "Bitbucket-Webhooks/2.0", + "X-Event-Key": "repo:fork", + }, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_bitbucket_webhook_receiver_invalid_content_type( + api_client, +): + origin_save_webhook_receiver_invalid_content_type_test( + forge_type="Bitbucket", + http_headers={ + "User-Agent": "Bitbucket-Webhooks/2.0", + "X-Event-Key": "repo:push", + }, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_bitbucket_webhook_receiver_no_repo_url(api_client, datadir): + with open(os.path.join(datadir, "bitbucket_webhook_payload.json"), "rb") as payload: + payload = json.load(payload) + del payload["repository"] + origin_save_webhook_receiver_no_repo_url_test( + forge_type="Bitbucket", + http_headers={ + "User-Agent": "Bitbucket-Webhooks/2.0", + "X-Event-Key": "repo:push", + }, + payload=payload, + api_client=api_client, + ) diff --git a/swh/web/tests/save_origin_webhooks/test_gitea.py b/swh/web/tests/save_origin_webhooks/test_gitea.py new file mode 100644 index 00000000..05eae0ee --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/test_gitea.py @@ -0,0 +1,87 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import json +import os + +import pytest + +from swh.web.save_code_now.models import SaveAuthorizedOrigin + +from .utils import ( + origin_save_webhook_receiver_invalid_content_type_test, + origin_save_webhook_receiver_invalid_event_test, + origin_save_webhook_receiver_invalid_request_test, + origin_save_webhook_receiver_no_repo_url_test, + origin_save_webhook_receiver_test, +) + + +@pytest.mark.django_db +def test_origin_save_gitea_webhook_receiver(api_client, swh_scheduler, datadir): + SaveAuthorizedOrigin.objects.create(url="https://try.gitea.io/") + with open(os.path.join(datadir, "gitea_webhook_payload.json"), "rb") as payload: + origin_save_webhook_receiver_test( + forge_type="Gitea", + http_headers={ + "X-Gitea-Event": "push", + }, + payload=json.load(payload), + expected_origin_url="https://try.gitea.io/johndoe/webhook-test.git", + expected_visit_type="git", + api_client=api_client, + swh_scheduler=swh_scheduler, + ) + + +def test_origin_save_gitea_webhook_receiver_invalid_request( + api_client, +): + origin_save_webhook_receiver_invalid_request_test( + forge_type="Gitea", + http_headers={}, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_gitea_webhook_receiver_invalid_event( + api_client, +): + origin_save_webhook_receiver_invalid_event_test( + forge_type="Gitea", + http_headers={ + "X-Gitea-Event": "issues", + }, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_gitea_webhook_receiver_invalid_content_type( + api_client, +): + origin_save_webhook_receiver_invalid_content_type_test( + forge_type="Gitea", + http_headers={ + "X-Gitea-Event": "push", + }, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_gitea_webhook_receiver_no_repo_url(api_client, datadir): + with open(os.path.join(datadir, "gitea_webhook_payload.json"), "rb") as payload: + payload = json.load(payload) + del payload["repository"] + origin_save_webhook_receiver_no_repo_url_test( + forge_type="Gitea", + http_headers={ + "X-Gitea-Event": "push", + }, + payload=payload, + api_client=api_client, + ) diff --git a/swh/web/tests/save_origin_webhooks/test_github.py b/swh/web/tests/save_origin_webhooks/test_github.py new file mode 100644 index 00000000..2cc88dcf --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/test_github.py @@ -0,0 +1,88 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import json +import os + +import pytest + +from .utils import ( + origin_save_webhook_receiver_invalid_content_type_test, + origin_save_webhook_receiver_invalid_event_test, + origin_save_webhook_receiver_invalid_request_test, + origin_save_webhook_receiver_no_repo_url_test, + origin_save_webhook_receiver_test, +) + + +@pytest.mark.django_db +def test_origin_save_github_webhook_receiver(api_client, swh_scheduler, datadir): + with open(os.path.join(datadir, "github_webhook_payload.json"), "rb") as payload: + origin_save_webhook_receiver_test( + forge_type="GitHub", + http_headers={ + "User-Agent": "GitHub-Hookshot/ede37db", + "X-GitHub-Event": "push", + }, + payload=json.load(payload), + expected_origin_url="https://github.com/johndoe/webhook-test", + expected_visit_type="git", + api_client=api_client, + swh_scheduler=swh_scheduler, + ) + + +def test_origin_save_github_webhook_receiver_invalid_request( + api_client, +): + origin_save_webhook_receiver_invalid_request_test( + forge_type="GitHub", + http_headers={}, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_github_webhook_receiver_invalid_event( + api_client, +): + origin_save_webhook_receiver_invalid_event_test( + forge_type="GitHub", + http_headers={ + "User-Agent": "GitHub-Hookshot/ede37db", + "X-GitHub-Event": "issues", + }, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_github_webhook_receiver_invalid_content_type( + api_client, +): + origin_save_webhook_receiver_invalid_content_type_test( + forge_type="GitHub", + http_headers={ + "User-Agent": "GitHub-Hookshot/ede37db", + "X-GitHub-Event": "push", + }, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_github_webhook_receiver_no_repo_url(api_client, datadir): + with open(os.path.join(datadir, "github_webhook_payload.json"), "rb") as payload: + payload = json.load(payload) + del payload["repository"] + origin_save_webhook_receiver_no_repo_url_test( + forge_type="GitHub", + http_headers={ + "User-Agent": "GitHub-Hookshot/ede37db", + "X-GitHub-Event": "push", + }, + payload=payload, + api_client=api_client, + ) diff --git a/swh/web/tests/save_origin_webhooks/test_gitlab.py b/swh/web/tests/save_origin_webhooks/test_gitlab.py new file mode 100644 index 00000000..2497d872 --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/test_gitlab.py @@ -0,0 +1,88 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import json +import os + +import pytest + +from .utils import ( + origin_save_webhook_receiver_invalid_content_type_test, + origin_save_webhook_receiver_invalid_event_test, + origin_save_webhook_receiver_invalid_request_test, + origin_save_webhook_receiver_no_repo_url_test, + origin_save_webhook_receiver_test, +) + + +@pytest.mark.django_db +def test_origin_save_gitlab_webhook_receiver(api_client, swh_scheduler, datadir): + with open(os.path.join(datadir, "gitlab_webhook_payload.json"), "rb") as payload: + origin_save_webhook_receiver_test( + forge_type="GitLab", + http_headers={ + "User-Agent": "GitLab/15.6.0-pre", + "X-Gitlab-Event": "Push Hook", + }, + payload=json.load(payload), + expected_origin_url="https://gitlab.com/johndoe/test.git", + expected_visit_type="git", + api_client=api_client, + swh_scheduler=swh_scheduler, + ) + + +def test_origin_save_gitlab_webhook_receiver_invalid_request( + api_client, +): + origin_save_webhook_receiver_invalid_request_test( + forge_type="GitLab", + http_headers={}, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_gitlab_webhook_receiver_invalid_event( + api_client, +): + origin_save_webhook_receiver_invalid_event_test( + forge_type="GitLab", + http_headers={ + "User-Agent": "GitLab/15.6.0-pre", + "X-Gitlab-Event": "Issue Hook", + }, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_gitlab_webhook_receiver_invalid_content_type( + api_client, +): + origin_save_webhook_receiver_invalid_content_type_test( + forge_type="GitLab", + http_headers={ + "User-Agent": "GitLab/15.6.0-pre", + "X-Gitlab-Event": "Push Hook", + }, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_gitlab_webhook_receiver_no_repo_url(api_client, datadir): + with open(os.path.join(datadir, "gitlab_webhook_payload.json"), "rb") as payload: + payload = json.load(payload) + del payload["repository"] + origin_save_webhook_receiver_no_repo_url_test( + forge_type="GitLab", + http_headers={ + "User-Agent": "GitLab/15.6.0-pre", + "X-Gitlab-Event": "Push Hook", + }, + payload=payload, + api_client=api_client, + ) diff --git a/swh/web/tests/save_origin_webhooks/test_sourceforge.py b/swh/web/tests/save_origin_webhooks/test_sourceforge.py new file mode 100644 index 00000000..1c71a2f9 --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/test_sourceforge.py @@ -0,0 +1,100 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import json +import os + +import pytest + +from .utils import ( + origin_save_webhook_receiver_invalid_content_type_test, + origin_save_webhook_receiver_invalid_request_test, + origin_save_webhook_receiver_no_repo_url_test, + origin_save_webhook_receiver_test, +) + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "payload_file,expected_origin_url,expected_visit_type", + [ + ( + "sourceforge_webhook_payload_hg.json", + "http://hg.code.sf.net/p/webhook-test-hg/code", + "hg", + ), + ( + "sourceforge_webhook_payload_git.json", + "https://git.code.sf.net/p/webhook-test-git/code", + "git", + ), + ( + "sourceforge_webhook_payload_svn.json", + "https://svn.code.sf.net/p/webhook-test-svn/code/", + "svn", + ), + ], +) +def test_origin_save_sourceforge_webhook_receiver( + api_client, + swh_scheduler, + datadir, + requests_mock_datadir, + payload_file, + expected_origin_url, + expected_visit_type, +): + with open(os.path.join(datadir, payload_file), "rb") as payload: + origin_save_webhook_receiver_test( + forge_type="SourceForge", + http_headers={ + "User-Agent": "Allura Webhook (https://allura.apache.org/)", + }, + payload=json.load(payload), + expected_origin_url=expected_origin_url, + expected_visit_type=expected_visit_type, + api_client=api_client, + swh_scheduler=swh_scheduler, + ) + + +def test_origin_save_sourceforge_webhook_receiver_invalid_request( + api_client, +): + origin_save_webhook_receiver_invalid_request_test( + forge_type="SourceForge", + http_headers={}, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_sourceforge_webhook_receiver_invalid_content_type( + api_client, +): + origin_save_webhook_receiver_invalid_content_type_test( + forge_type="SourceForge", + http_headers={ + "User-Agent": "Allura Webhook (https://allura.apache.org/)", + }, + payload={}, + api_client=api_client, + ) + + +def test_origin_save_sourceforge_webhook_receiver_no_repo_url(api_client, datadir): + with open( + os.path.join(datadir, "sourceforge_webhook_payload_git.json"), "rb" + ) as payload: + payload = json.load(payload) + del payload["repository"] + origin_save_webhook_receiver_no_repo_url_test( + forge_type="SourceForge", + http_headers={ + "User-Agent": "Allura Webhook (https://allura.apache.org/)", + }, + payload=payload, + api_client=api_client, + ) diff --git a/swh/web/tests/save_origin_webhooks/utils.py b/swh/web/tests/save_origin_webhooks/utils.py new file mode 100644 index 00000000..3322ab29 --- /dev/null +++ b/swh/web/tests/save_origin_webhooks/utils.py @@ -0,0 +1,143 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from typing import Any, Dict + +from swh.web.tests.helpers import check_api_post_responses +from swh.web.utils import reverse + + +def _django_http_headers(http_headers: Dict[str, Any]): + return {f"HTTP_{k.upper().replace('-', '_')}": v for k, v in http_headers.items()} + + +def origin_save_webhook_receiver_test( + forge_type: str, + http_headers: Dict[str, Any], + payload: Dict[str, Any], + expected_origin_url: str, + expected_visit_type: str, + api_client, + swh_scheduler, +): + url = reverse(f"api-1-origin-save-webhook-{forge_type.lower()}") + + resp = check_api_post_responses( + api_client, + url, + status_code=200, + data=payload, + **_django_http_headers(http_headers), + ) + + assert resp.data["origin_url"] == expected_origin_url + assert resp.data["visit_type"] == expected_visit_type + + tasks = swh_scheduler.search_tasks(task_type=f"load-{expected_visit_type}") + assert tasks + task = dict(tasks[0].items()) + assert task["arguments"]["kwargs"]["url"] == expected_origin_url + + +def origin_save_webhook_receiver_invalid_request_test( + forge_type: str, + http_headers: Dict[str, Any], + payload: Dict[str, Any], + api_client, +): + url = reverse(f"api-1-origin-save-webhook-{forge_type.lower()}") + + resp = check_api_post_responses( + api_client, + url, + status_code=400, + data=payload, + **_django_http_headers(http_headers), + ) + + assert resp.data == { + "exception": "BadInputExc", + "reason": ( + f"POST request was not sent by a {forge_type} webhook " + "and has not been processed." + ), + } + + +def origin_save_webhook_receiver_invalid_event_test( + forge_type: str, + http_headers: Dict[str, Any], + payload: Dict[str, Any], + api_client, +): + url = reverse(f"api-1-origin-save-webhook-{forge_type.lower()}") + + resp = check_api_post_responses( + api_client, + url, + status_code=400, + data=payload, + **_django_http_headers(http_headers), + ) + + assert resp.data == { + "exception": "BadInputExc", + "reason": ( + f"Event sent by {forge_type} webhook is not a push one, request has " + "not been processed." + ), + } + + +def origin_save_webhook_receiver_invalid_content_type_test( + forge_type: str, + http_headers: Dict[str, Any], + payload: Dict[str, Any], + api_client, +): + url = reverse(f"api-1-origin-save-webhook-{forge_type.lower()}") + + bad_content_type = "application/x-www-form-urlencoded" + http_headers["Content-Type"] = bad_content_type + + resp = check_api_post_responses( + api_client, + url, + status_code=400, + data=payload, + **_django_http_headers(http_headers), + ) + + assert resp.data == { + "exception": "BadInputExc", + "reason": ( + f"Invalid content type '{bad_content_type}' for the POST request sent by " + f"{forge_type} webhook, it should be 'application/json'." + ), + } + + +def origin_save_webhook_receiver_no_repo_url_test( + forge_type: str, + http_headers: Dict[str, Any], + payload: Dict[str, Any], + api_client, +): + url = reverse(f"api-1-origin-save-webhook-{forge_type.lower()}") + + resp = check_api_post_responses( + api_client, + url, + status_code=400, + data=payload, + **_django_http_headers(http_headers), + ) + + assert resp.data == { + "exception": "BadInputExc", + "reason": ( + f"Repository URL could not be extracted from {forge_type} webhook payload." + ), + }