diff --git a/swh/web/api/views/origin_save.py b/swh/web/api/views/origin_save.py index a2d3366f..d9370bdc 100644 --- a/swh/web/api/views/origin_save.py +++ b/swh/web/api/views/origin_save.py @@ -1,111 +1,113 @@ # Copyright (C) 2018-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.web.api.apidoc import api_doc, format_docstring from swh.web.api.apiurls import api_route from swh.web.auth.utils import privileged_user from swh.web.common.origin_save import ( create_save_origin_request, get_savable_visit_types, get_save_origin_requests, ) def _savable_visit_types(): visit_types = sorted(get_savable_visit_types()) docstring = "" for visit_type in visit_types[:-1]: docstring += f"**{visit_type}**, " docstring += f"and **{visit_types[-1]}**" return docstring @api_route( r"/origin/save/(?P.+)/url/(?P.+)/", "api-1-save-origin", methods=["GET", "POST"], throttle_scope="swh_save_origin", never_cache=True, ) @api_doc("/origin/save/") @format_docstring(visit_types=_savable_visit_types()) def api_save_origin(request, visit_type, origin_url): """ .. http:get:: /api/1/origin/save/(visit_type)/url/(origin_url)/ .. http:post:: /api/1/origin/save/(visit_type)/url/(origin_url)/ Request the saving of a software origin into the archive or check the status of previously created save requests. That endpoint enables to create a saving task for a software origin through a POST request. Depending of the provided origin url, the save request can either be: * immediately **accepted**, for well known code hosting providers like for instance GitHub or GitLab * **rejected**, in case the url is blacklisted by Software Heritage * **put in pending state** until a manual check is done in order to determine if it can be loaded or not Once a saving request has been accepted, its associated saving task status can then be checked through a GET request on the same url. Returned status can either be: * **not created**: no saving task has been created * **not yet scheduled**: saving task has been created but its execution has not yet been scheduled * **scheduled**: the task execution has been scheduled * **succeeded**: the saving task has been successfully executed * **failed**: the saving task has been executed but it failed When issuing a POST request an object will be returned while a GET request will return an array of objects (as multiple save requests might have been submitted for the same origin). :param string visit_type: the type of visit to perform (currently the supported types are {visit_types}) :param string origin_url: the url of the origin to save {common_headers} :>json string origin_url: the url of the origin to save :>json string visit_type: the type of visit to perform :>json string save_request_date: the date (in iso format) the save request was issued :>json string save_request_status: the status of the save request, either **accepted**, **rejected** or **pending** :>json string save_task_status: the status of the origin saving task, either **not created**, **not yet scheduled**, **scheduled**, **succeeded** or **failed** :>json string visit_date: the date (in iso format) of the visit if a visit occurred, null otherwise. :>json string visit_status: the status of the visit, either **full**, **partial**, **not_found** or **failed** if a visit occurred, null otherwise. + :>json string note: optional note giving details about the save request, + for instance why it has been rejected :statuscode 200: no error :statuscode 400: an invalid visit type or origin url has been provided :statuscode 403: the provided origin url is blacklisted :statuscode 404: no save requests have been found for a given origin """ data = request.data or {} if request.method == "POST": sor = create_save_origin_request( visit_type, origin_url, privileged_user(request), user_id=request.user.id, **data, ) del sor["id"] else: sor = get_save_origin_requests(visit_type, origin_url) for s in sor: del s["id"] return sor diff --git a/swh/web/common/migrations/0012_saveoriginrequest_note.py b/swh/web/common/migrations/0012_saveoriginrequest_note.py new file mode 100644 index 00000000..6df1582f --- /dev/null +++ b/swh/web/common/migrations/0012_saveoriginrequest_note.py @@ -0,0 +1,21 @@ +# Copyright (C) 2021 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("swh_web_common", "0011_saveoriginrequest_user_ids"), + ] + + operations = [ + migrations.AddField( + model_name="saveoriginrequest", + name="note", + field=models.TextField(null=True), + ), + ] diff --git a/swh/web/common/models.py b/swh/web/common/models.py index 380ecd77..fc2738aa 100644 --- a/swh/web/common/models.py +++ b/swh/web/common/models.py @@ -1,133 +1,135 @@ # Copyright (C) 2018-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.db import models from swh.web.common.typing import SaveOriginRequestInfo class SaveAuthorizedOrigin(models.Model): """ Model table holding origin urls authorized to be loaded into the archive. """ url = models.CharField(max_length=200, null=False) class Meta: app_label = "swh_web_common" db_table = "save_authorized_origin" indexes = [models.Index(fields=["url"])] def __str__(self): return self.url class SaveUnauthorizedOrigin(models.Model): """ Model table holding origin urls not authorized to be loaded into the archive. """ url = models.CharField(max_length=200, null=False) class Meta: app_label = "swh_web_common" db_table = "save_unauthorized_origin" indexes = [models.Index(fields=["url"])] def __str__(self): return self.url SAVE_REQUEST_ACCEPTED = "accepted" SAVE_REQUEST_REJECTED = "rejected" SAVE_REQUEST_PENDING = "pending" SAVE_REQUEST_STATUS = [ (SAVE_REQUEST_ACCEPTED, SAVE_REQUEST_ACCEPTED), (SAVE_REQUEST_REJECTED, SAVE_REQUEST_REJECTED), (SAVE_REQUEST_PENDING, SAVE_REQUEST_PENDING), ] SAVE_TASK_NOT_CREATED = "not created" SAVE_TASK_NOT_YET_SCHEDULED = "not yet scheduled" SAVE_TASK_SCHEDULED = "scheduled" SAVE_TASK_SUCCEEDED = "succeeded" SAVE_TASK_FAILED = "failed" SAVE_TASK_RUNNING = "running" SAVE_TASK_STATUS = [ (SAVE_TASK_NOT_CREATED, SAVE_TASK_NOT_CREATED), (SAVE_TASK_NOT_YET_SCHEDULED, SAVE_TASK_NOT_YET_SCHEDULED), (SAVE_TASK_SCHEDULED, SAVE_TASK_SCHEDULED), (SAVE_TASK_SUCCEEDED, SAVE_TASK_SUCCEEDED), (SAVE_TASK_FAILED, SAVE_TASK_FAILED), (SAVE_TASK_RUNNING, SAVE_TASK_RUNNING), ] VISIT_STATUS_CREATED = "created" VISIT_STATUS_ONGOING = "ongoing" VISIT_STATUS_FULL = "full" VISIT_STATUS_PARTIAL = "partial" VISIT_STATUS_NOT_FOUND = "not_found" VISIT_STATUS_FAILED = "failed" VISIT_STATUSES = [ (VISIT_STATUS_CREATED, VISIT_STATUS_CREATED), (VISIT_STATUS_ONGOING, VISIT_STATUS_ONGOING), (VISIT_STATUS_FULL, VISIT_STATUS_FULL), (VISIT_STATUS_PARTIAL, VISIT_STATUS_PARTIAL), (VISIT_STATUS_NOT_FOUND, VISIT_STATUS_NOT_FOUND), (VISIT_STATUS_FAILED, VISIT_STATUS_FAILED), ] class SaveOriginRequest(models.Model): """ Model table holding all the save origin requests issued by users. """ id = models.BigAutoField(primary_key=True) request_date = models.DateTimeField(auto_now_add=True) visit_type = models.CharField(max_length=200, null=False) visit_status = models.TextField(choices=VISIT_STATUSES, null=True) origin_url = models.CharField(max_length=200, null=False) status = models.TextField(choices=SAVE_REQUEST_STATUS, default=SAVE_REQUEST_PENDING) loading_task_id = models.IntegerField(default=-1) visit_date = models.DateTimeField(null=True) loading_task_status = models.TextField( choices=SAVE_TASK_STATUS, default=SAVE_TASK_NOT_CREATED ) # store ids of users that submitted the request as string list user_ids = models.TextField(null=True) + note = models.TextField(null=True) class Meta: app_label = "swh_web_common" db_table = "save_origin_request" ordering = ["-id"] indexes = [models.Index(fields=["origin_url", "status"])] def to_dict(self) -> SaveOriginRequestInfo: """Map the request save model object to a json serializable dict. Returns: The corresponding SaveOriginRequetsInfo json serializable dict. """ visit_date = self.visit_date return SaveOriginRequestInfo( id=self.id, origin_url=self.origin_url, visit_type=self.visit_type, save_request_date=self.request_date.isoformat(), save_request_status=self.status, save_task_status=self.loading_task_status, visit_status=self.visit_status, visit_date=visit_date.isoformat() if visit_date else None, loading_task_id=self.loading_task_id, + note=self.note, ) def __str__(self) -> str: return str(self.to_dict()) diff --git a/swh/web/common/typing.py b/swh/web/common/typing.py index 46866201..e9cad745 100644 --- a/swh/web/common/typing.py +++ b/swh/web/common/typing.py @@ -1,259 +1,261 @@ # Copyright (C) 2020-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Dict, List, Optional, TypeVar, Union from typing_extensions import TypedDict from django.http import QueryDict from swh.core.api.classes import PagedResult as CorePagedResult from swh.model.identifiers import ObjectType QueryParameters = Union[Dict[str, Any], QueryDict] class OriginInfo(TypedDict): url: str """URL of the origin""" class OriginMetadataInfo(TypedDict): url: str """URL of the origin""" metadata: Dict[str, Any] """Origin metadata associated to the origin""" class OriginVisitInfo(TypedDict): date: str """date of the visit in iso format""" formatted_date: str """formatted date of the visit""" metadata: Dict[str, Any] """metadata associated to the visit""" origin: str """visited origin URL""" snapshot: str """snapshot identifier computed during the visit""" status: str """status of the visit ("ongoing", "full" or "partial") """ type: str """visit type (git, hg, debian, ...)""" url: str """URL to browse the snapshot""" visit: int """visit identifier""" class SnapshotBranchInfo(TypedDict): date: Optional[str] """"author date of branch heading revision""" directory: Optional[str] """directory associated to branch heading revision""" message: Optional[str] """message of branch heading revision""" name: str """branch name""" alias: bool """define if the branch is an alias""" revision: str """branch heading revision""" url: Optional[str] """optional browse URL (content, directory, ...) scoped to branch""" class SnapshotReleaseInfo(TypedDict): branch_name: str """branch name associated to release in snapshot""" date: str """release date""" directory: Optional[str] """optional directory associatd to the release""" id: str """release identifier""" message: str """release message""" name: str """release name""" alias: bool """define if the branch is an alias""" target: str """release target""" target_type: str """release target_type""" url: Optional[str] """optional browse URL (content, directory, ...) scoped to release""" class SnapshotContext(TypedDict): branch: Optional[str] """optional branch name set when browsing snapshot in that scope""" branch_alias: bool """indicates if the focused branch is an alias""" branches: List[SnapshotBranchInfo] """list of snapshot branches (possibly truncated)""" branches_url: str """snapshot branches list browse URL""" is_empty: bool """indicates if the snapshot is empty""" origin_info: Optional[OriginInfo] """optional origin info associated to the snapshot""" origin_visits_url: Optional[str] """optional origin visits URL""" query_params: QueryParameters """common query parameters when browsing snapshot content""" release: Optional[str] """optional release name set when browsing snapshot in that scope""" release_alias: bool """indicates if the focused release is an alias""" release_id: Optional[str] """optional release identifier set when browsing snapshot in that scope""" releases: List[SnapshotReleaseInfo] """list of snapshot releases (possibly truncated)""" releases_url: str """snapshot releases list browse URL""" revision_id: Optional[str] """optional revision identifier set when browsing snapshot in that scope""" revision_info: Optional[Dict[str, Any]] """optional revision info set when browsing snapshot in that scope""" root_directory: Optional[str] """optional root directory identifier set when browsing snapshot content""" snapshot_id: str """snapshot identifier""" snapshot_sizes: Dict[str, int] """snapshot sizes grouped by branch target type""" snapshot_swhid: str """snapshot SWHID""" url_args: Dict[str, Any] """common URL arguments when browsing snapshot content""" visit_info: Optional[OriginVisitInfo] """optional origin visit info associated to the snapshot""" class SWHObjectInfo(TypedDict): object_type: ObjectType object_id: str class SWHIDContext(TypedDict, total=False): origin: str anchor: str visit: str path: str lines: str class SWHIDInfo(SWHObjectInfo): swhid: str swhid_url: str context: SWHIDContext swhid_with_context: Optional[str] swhid_with_context_url: Optional[str] class SWHObjectInfoMetadata(TypedDict, total=False): origin_url: Optional[str] visit_date: Optional[str] visit_type: Optional[str] class ContentMetadata(SWHObjectInfo, SWHObjectInfoMetadata): sha1: str sha1_git: str sha256: str blake2s256: str content_url: str mimetype: str encoding: str size: int language: str path: Optional[str] filename: Optional[str] directory: Optional[str] root_directory: Optional[str] revision: Optional[str] release: Optional[str] snapshot: Optional[str] class DirectoryMetadata(SWHObjectInfo, SWHObjectInfoMetadata): directory: str nb_files: int nb_dirs: int sum_file_sizes: int root_directory: Optional[str] path: str revision: Optional[str] revision_found: Optional[bool] release: Optional[str] snapshot: Optional[str] class ReleaseMetadata(SWHObjectInfo, SWHObjectInfoMetadata): release: str author: str author_url: str date: str name: str synthetic: bool target: str target_type: str snapshot: Optional[str] class RevisionMetadata(SWHObjectInfo, SWHObjectInfoMetadata): revision: str author: str author_url: str committer: str committer_url: str date: str committer_date: str directory: str merge: bool metadata: str parents: List[str] synthetic: bool type: str snapshot: Optional[str] TResult = TypeVar("TResult") PagedResult = CorePagedResult[TResult, str] class SaveOriginRequestInfo(TypedDict): id: int """Unique key""" save_request_date: str """Date of the creation request""" visit_type: str """Type of the visit""" visit_status: Optional[str] """Status of the visit""" origin_url: str """Origin to ingest""" save_request_status: str """Status of the request""" loading_task_id: Optional[int] """Identifier of the loading task in the scheduler if scheduled""" visit_date: Optional[str] """End of the visit if terminated""" save_task_status: str """Status of the scheduled task""" + note: Optional[str] + """Optional note associated to the request, for instance rejection reason""" class OriginExistenceCheckInfo(TypedDict): origin_url: str """Origin to check""" exists: bool """Does the url exist?""" content_length: Optional[int] """content length of the artifact""" last_modified: Optional[str] """Last modification time reported by the server (as iso8601 string)""" diff --git a/swh/web/tests/common/test_django_command.py b/swh/web/tests/common/test_django_command.py index 3d64bb99..2c975210 100644 --- a/swh/web/tests/common/test_django_command.py +++ b/swh/web/tests/common/test_django_command.py @@ -1,177 +1,178 @@ # Copyright (C) 2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime, timedelta, timezone from io import StringIO import pytest from django.core.management import call_command from swh.core.api.classes import stream_results from swh.web.common.models import ( SAVE_REQUEST_ACCEPTED, SAVE_TASK_FAILED, SAVE_TASK_SCHEDULED, SAVE_TASK_SUCCEEDED, VISIT_STATUS_FAILED, VISIT_STATUS_FULL, VISIT_STATUS_PARTIAL, ) from swh.web.common.typing import SaveOriginRequestInfo from swh.web.config import get_config MODULE_FQDN = "swh.web.common.management.commands" COMMAND_NAME = "refresh_savecodenow_statuses" AUTHORIZED_ORIGIN_URL = "https://scm.ourproject.org/anonscm/%s" @pytest.fixture def mock_refresh(mocker): return mocker.patch( f"{MODULE_FQDN}.{COMMAND_NAME}.refresh_save_origin_request_statuses" ) @pytest.fixture def mock_scheduler(mocker, swh_scheduler): mock_scheduler = mocker.patch(f"{MODULE_FQDN}.{COMMAND_NAME}.get_scheduler") mock_scheduler.return_value = swh_scheduler return mock_scheduler @pytest.mark.parametrize("nb_results", [0, 10, 20]) def test_command_refresh__with_statuses_refreshed( mock_scheduler, mock_refresh, nb_results ): """Refresh status command reports non-terminal statuses updates. """ # fake returned refreshed status for 'archives' visit type mock_refresh.return_value = [{"visit_type": "archives",}] * nb_results out = StringIO() call_command(COMMAND_NAME, stdout=out) actual_output = out.getvalue() if nb_results > 0: assert f"updated {nb_results}" in actual_output else: assert "Nothing" in actual_output assert mock_scheduler.called assert mock_refresh.called @pytest.fixture def fake_refreshed_data(): """Prepare test data within the scheduler and the swh-web model db """ duplicated_origin_url = AUTHORIZED_ORIGIN_URL % "specific-origin" entries = ( [ { "visit_type": "archives", # ignored from recurring task scheduling "visit_status": VISIT_STATUS_FULL, "task_status": SAVE_TASK_SUCCEEDED, }, { "visit_type": "hg", # scheduled as recurring task "visit_status": VISIT_STATUS_PARTIAL, "task_status": SAVE_TASK_SUCCEEDED, }, { "visit_type": "svn", # scheduled as recurring task "visit_status": VISIT_STATUS_PARTIAL, "task_status": SAVE_TASK_SCHEDULED, }, { "visit_type": "svn", # ignored from recurring task scheduling "visit_status": VISIT_STATUS_FAILED, "task_status": SAVE_TASK_FAILED, }, { "visit_type": "hg", # ignored from recurring task scheduling "visit_status": "created", "task_status": SAVE_TASK_SCHEDULED, }, ] + [ { "visit_type": "git", "visit_status": VISIT_STATUS_FULL, "task_status": SAVE_TASK_SUCCEEDED, "origin": duplicated_origin_url, } ] * 3 ) # only 1 of the origin duplicates will be scheduled as recurring task time_now = datetime.now(tz=timezone.utc) - timedelta(days=len(entries)) return [ SaveOriginRequestInfo( visit_type=meta["visit_type"], visit_status=meta["visit_status"], origin_url=( meta["origin"] if "origin" in meta else AUTHORIZED_ORIGIN_URL % i ), save_request_date=time_now + timedelta(days=i - 1), save_request_status=SAVE_REQUEST_ACCEPTED, visit_date=time_now + timedelta(days=i), save_task_status=meta["task_status"], id=i, loading_task_id=i, + note=None, ) for i, meta in enumerate(entries) ] def test_command_refresh__with_recurrent_tasks_scheduling( mock_scheduler, mock_refresh, fake_refreshed_data, swh_scheduler ): """Refresh status command report updates of statuses. The successful ones without the type 'archived' are also scheduled recurringly. """ mock_refresh.return_value = fake_refreshed_data # only visit types (git, hg, svn) types with status (full, partial) are taken into # account for scheduling, so only 3 of those matches in the fake data set. expected_nb_scheduled = 0 origins = set() expected_nb_scheduled = 0 for entry in fake_refreshed_data: visit_type = entry["visit_type"] if visit_type == "archives": # only deal with git, svn, hg continue if entry["visit_status"] not in ("partial", "full"): continue origin = entry["origin_url"] if (visit_type, origin) in origins: continue origins.add((visit_type, origin)) expected_nb_scheduled += 1 assert expected_nb_scheduled == 3 out = StringIO() call_command(COMMAND_NAME, stdout=out) actual_output = out.getvalue() assert f"Successfully updated {len(fake_refreshed_data)}" in actual_output lister = swh_scheduler.get_or_create_lister( name="save-code-now", instance_name=get_config()["instance_name"] ) result = list(stream_results(swh_scheduler.get_listed_origins, lister.id)) assert len(result) == expected_nb_scheduled assert mock_scheduler.called assert mock_refresh.called diff --git a/swh/web/tests/common/test_origin_save.py b/swh/web/tests/common/test_origin_save.py index d9faf684..3eef778f 100644 --- a/swh/web/tests/common/test_origin_save.py +++ b/swh/web/tests/common/test_origin_save.py @@ -1,762 +1,765 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime, timedelta, timezone from functools import partial import re from typing import Optional import uuid import iso8601 import pytest import requests from swh.core.pytest_plugin import get_response_cb from swh.scheduler.utils import create_oneshot_task_dict from swh.web.common.exc import BadInputExc from swh.web.common.models import ( SAVE_REQUEST_ACCEPTED, SAVE_TASK_FAILED, SAVE_TASK_RUNNING, SAVE_TASK_SCHEDULED, SAVE_TASK_SUCCEEDED, VISIT_STATUS_CREATED, VISIT_STATUS_FULL, VISIT_STATUS_ONGOING, VISIT_STATUS_PARTIAL, SaveOriginRequest, ) from swh.web.common.origin_save import ( _check_origin_exists, _check_visit_type_savable, _visit_type_task, _visit_type_task_privileged, get_savable_visit_types, get_save_origin_requests, get_save_origin_task_info, origin_exists, refresh_save_origin_request_statuses, ) from swh.web.common.typing import ( OriginExistenceCheckInfo, OriginVisitInfo, SaveOriginRequestInfo, ) from swh.web.config import get_config _es_url = "http://esnode1.internal.softwareheritage.org:9200" _es_workers_index_url = "%s/swh_workers-*" % _es_url _origin_url = "https://gitlab.com/inkscape/inkscape" _visit_type = "git" _task_id = 1 @pytest.fixture(autouse=True) def requests_mock_datadir(datadir, requests_mock_datadir): """Override default behavior to deal with post method""" cb = partial(get_response_cb, datadir=datadir) requests_mock_datadir.post(re.compile("https?://"), body=cb) return requests_mock_datadir @pytest.mark.django_db def test_get_save_origin_archived_task_info(swh_scheduler): _get_save_origin_task_info_test(swh_scheduler, task_archived=True) @pytest.mark.django_db def test_get_save_origin_task_info_without_es(swh_scheduler): _get_save_origin_task_info_test(swh_scheduler, es_available=False) def _fill_scheduler_db( swh_scheduler, task_status="completed", task_run_status="eventful", task_archived=False, visit_started_date=None, ): task = task_run = None if not task_archived: task = swh_scheduler.create_tasks( [create_oneshot_task_dict("load-git", repo_url=_origin_url)] )[0] backend_id = str(uuid.uuid4()) if task_status != "next_run_not_scheduled": swh_scheduler.schedule_task_run(task["id"], backend_id) if task_run_status is not None: swh_scheduler.start_task_run(backend_id) task_run = dict( swh_scheduler.end_task_run(backend_id, task_run_status).items() ) return task, task_run @pytest.mark.parametrize( "wrong_type,privileged_user", [ ("dummy", True), ("dumb", False), ("archives", False), # when no privilege, this is rejected ], ) def test_check_visit_type_savable(wrong_type, privileged_user, swh_scheduler): swh_scheduler.add_load_archive_task_type() with pytest.raises(BadInputExc, match="Allowed types"): _check_visit_type_savable(wrong_type, privileged_user) # when privileged_user, the following is accepted though _check_visit_type_savable("archives", True) def test_get_savable_visit_types(swh_scheduler): swh_scheduler.add_load_archive_task_type() default_list = list(_visit_type_task.keys()) assert set(get_savable_visit_types()) == set(default_list) privileged_list = default_list.copy() privileged_list += list(_visit_type_task_privileged.keys()) assert set(get_savable_visit_types(privileged_user=True)) == set(privileged_list) def _get_save_origin_task_info_test( swh_scheduler, task_archived=False, es_available=True, full_info=True ): swh_web_config = get_config() if es_available: swh_web_config.update({"es_workers_index_url": _es_workers_index_url}) else: swh_web_config.update({"es_workers_index_url": ""}) sor = SaveOriginRequest.objects.create( request_date=datetime.now(tz=timezone.utc), visit_type=_visit_type, origin_url="https://gitlab.com/inkscape/inkscape", status=SAVE_REQUEST_ACCEPTED, visit_date=datetime.now(tz=timezone.utc) + timedelta(hours=1), loading_task_id=_task_id, ) task, task_run = _fill_scheduler_db(swh_scheduler, task_archived=task_archived) es_response = requests.post("%s/_search" % _es_workers_index_url).json() task_exec_data = es_response["hits"]["hits"][-1]["_source"] sor_task_info = get_save_origin_task_info(sor.id, full_info=full_info) expected_result = ( { "type": task["type"], "arguments": task["arguments"], "id": task["id"], "backend_id": task_run["backend_id"], "scheduled": task_run["scheduled"], "started": task_run["started"], "ended": task_run["ended"], "status": task_run["status"], "visit_status": sor.visit_status, } if not task_archived else {} ) if es_available and not task_archived: expected_result.update( { "message": task_exec_data["message"], "name": task_exec_data["swh_task_name"], "worker": task_exec_data["hostname"], } ) if not full_info: expected_result.pop("id", None) expected_result.pop("backend_id", None) expected_result.pop("worker", None) if "message" in expected_result: message = "" message_lines = expected_result["message"].split("\n") for line in message_lines: if line.startswith("Traceback"): break message += f"{line}\n" message += message_lines[-1] expected_result["message"] = message assert sor_task_info == expected_result @pytest.mark.django_db def test_get_save_origin_requests_find_visit_date(mocker, swh_scheduler): # create a save request SaveOriginRequest.objects.create( request_date=datetime.now(tz=timezone.utc), visit_type=_visit_type, origin_url=_origin_url, status=SAVE_REQUEST_ACCEPTED, visit_date=None, loading_task_id=_task_id, ) # mock scheduler and archive _fill_scheduler_db(swh_scheduler) mock_archive = mocker.patch("swh.web.common.origin_save.archive") mock_archive.lookup_origin.return_value = {"url": _origin_url} mock_get_origin_visits = mocker.patch( "swh.web.common.origin_save.get_origin_visits" ) # create a visit for the save request visit_date = datetime.now(tz=timezone.utc).isoformat() visit_info = OriginVisitInfo( date=visit_date, formatted_date="", metadata={}, origin=_origin_url, snapshot="", status=VISIT_STATUS_FULL, type=_visit_type, url="", visit=34, ) mock_get_origin_visits.return_value = [visit_info] # check visit date has been correctly found sors = get_save_origin_requests(_visit_type, _origin_url) assert len(sors) == 1 assert sors[0]["save_task_status"] == SAVE_TASK_SUCCEEDED assert sors[0]["visit_date"] == visit_date mock_get_origin_visits.assert_called_once() # check visit is not searched again when it has been found get_save_origin_requests(_visit_type, _origin_url) mock_get_origin_visits.assert_called_once() # check visit date are not searched for save requests older than # one month sor = SaveOriginRequest.objects.create( visit_type=_visit_type, origin_url=_origin_url, status=SAVE_REQUEST_ACCEPTED, loading_task_id=_task_id, visit_date=None, ) sor.request_date = datetime.now(tz=timezone.utc) - timedelta(days=31) sor.save() _fill_scheduler_db(swh_scheduler, task_status="disabled", task_run_status="failed") sors = get_save_origin_requests(_visit_type, _origin_url) assert len(sors) == 2 assert sors[0]["save_task_status"] == SAVE_TASK_FAILED assert sors[0]["visit_date"] is None mock_get_origin_visits.assert_called_once() def _get_save_origin_requests( mocker, swh_scheduler, load_status, visit_status, request_date: Optional[datetime] = None, ): """Wrapper around the get_origin_save_origin_request call. """ SaveOriginRequest.objects.create( request_date=datetime.now(tz=timezone.utc), visit_type=_visit_type, visit_status=visit_status, origin_url=_origin_url, status=SAVE_REQUEST_ACCEPTED, visit_date=None, loading_task_id=_task_id, ) # mock scheduler and archives _fill_scheduler_db( swh_scheduler, task_status="next_run_scheduled", task_run_status=load_status ) mock_archive = mocker.patch("swh.web.common.origin_save.archive") mock_archive.lookup_origin.return_value = {"url": _origin_url} mock_get_origin_visits = mocker.patch( "swh.web.common.origin_save.get_origin_visits" ) # create a visit for the save request with status created visit_date = datetime.now(tz=timezone.utc).isoformat() visit_info = OriginVisitInfo( date=visit_date, formatted_date="", metadata={}, origin=_origin_url, snapshot="", # make mypy happy status=visit_status, type=_visit_type, url="", visit=34, ) mock_get_origin_visits.return_value = [visit_info] sors = get_save_origin_requests(_visit_type, _origin_url) mock_get_origin_visits.assert_called_once() return sors @pytest.mark.parametrize("visit_date", [None, "some-date"]) def test_from_save_origin_request_to_save_request_info_dict(visit_date): """Ensure save request to json serializable dict is fine """ request_date = datetime.now(tz=timezone.utc) _visit_date = request_date + timedelta(minutes=5) if visit_date else None request_date = datetime.now(tz=timezone.utc) + note = "request succeeded" sor = SaveOriginRequest( request_date=request_date, visit_type=_visit_type, visit_status=VISIT_STATUS_FULL, origin_url=_origin_url, status=SAVE_REQUEST_ACCEPTED, loading_task_status=None, visit_date=_visit_date, loading_task_id=1, + note=note, ) assert sor.to_dict() == SaveOriginRequestInfo( id=sor.id, origin_url=sor.origin_url, visit_type=sor.visit_type, save_request_date=sor.request_date.isoformat(), save_request_status=sor.status, save_task_status=sor.loading_task_status, visit_status=sor.visit_status, visit_date=_visit_date.isoformat() if _visit_date else None, loading_task_id=sor.loading_task_id, + note=note, ) def test__check_origin_exists_404(requests_mock): url_ko = "https://example.org/some-inexistant-url" requests_mock.head(url_ko, status_code=404) with pytest.raises(BadInputExc, match="not exist"): _check_origin_exists(url_ko) def test__check_origin_exists_200(requests_mock): url = "https://example.org/url" requests_mock.head(url, status_code=200) # passes the check actual_metadata = _check_origin_exists(url) # and we actually may have retrieved some metadata on the origin assert actual_metadata == origin_exists(url) def test_origin_exists_404(requests_mock): """Origin which does not exist should be reported as inexistent""" url_ko = "https://example.org/some-inexistant-url" requests_mock.head(url_ko, status_code=404) actual_result = origin_exists(url_ko) assert actual_result == OriginExistenceCheckInfo( origin_url=url_ko, exists=False, last_modified=None, content_length=None, ) def test_origin_exists_200_no_data(requests_mock): """Existing origin should be reported as such (no extra information)""" url = "http://example.org/real-url" requests_mock.head( url, status_code=200, ) actual_result = origin_exists(url) assert actual_result == OriginExistenceCheckInfo( origin_url=url, exists=True, last_modified=None, content_length=None, ) def test_origin_exists_200_with_data(requests_mock): """Existing origin should be reported as such (+ extra information)""" url = "http://example.org/real-url" requests_mock.head( url, status_code=200, headers={ "content-length": "10", "last-modified": "Sun, 21 Aug 2011 16:26:32 GMT", }, ) actual_result = origin_exists(url) assert actual_result == OriginExistenceCheckInfo( origin_url=url, exists=True, content_length=10, last_modified="2011-08-21T16:26:32", ) def test_origin_exists_internet_archive(requests_mock): """Edge case where an artifact URL to check existence is hosted on the Internet Archive""" url = ( "https://web.archive.org/web/20100705043309/" "http://www.cs.unm.edu/~mccune/old-ftp/eqp-09e.tar.gz" ) redirect_url = ( "https://web.archive.org/web/20100610004108/" "http://www.cs.unm.edu/~mccune/old-ftp/eqp-09e.tar.gz" ) requests_mock.head( url, status_code=302, headers={"Location": redirect_url,}, ) requests_mock.head( redirect_url, status_code=200, headers={ "X-Archive-Orig-Last-Modified": "Tue, 12 May 2009 22:09:43 GMT", "X-Archive-Orig-Content-Length": "121421", }, ) actual_result = origin_exists(url) assert actual_result == OriginExistenceCheckInfo( origin_url=url, exists=True, content_length=121421, last_modified="2009-05-12T22:09:43", ) def test_origin_exists_200_with_data_unexpected_date_format(requests_mock): """Existing origin should be ok, unexpected last modif time result in no time""" url = "http://example.org/real-url2" # this is parsable but not as expected unexpected_format_date = "Sun, 21 Aug 2021 16:26:32" requests_mock.head( url, status_code=200, headers={"last-modified": unexpected_format_date,}, ) actual_result = origin_exists(url) # so the resulting date is None assert actual_result == OriginExistenceCheckInfo( origin_url=url, exists=True, content_length=None, last_modified=None, ) @pytest.mark.django_db @pytest.mark.parametrize("visit_status", [VISIT_STATUS_CREATED, VISIT_STATUS_ONGOING,]) def test_get_save_origin_requests_no_visit_date_found( mocker, swh_scheduler, visit_status ): """Uneventful visits with failed visit status are marked as failed """ sors = _get_save_origin_requests( mocker, swh_scheduler, load_status="scheduled", visit_status=visit_status, ) # check no visit date has been found assert len(sors) == 1 assert sors[0]["save_task_status"] == SAVE_TASK_RUNNING assert sors[0]["visit_date"] is not None assert sors[0]["visit_status"] == visit_status @pytest.mark.django_db @pytest.mark.parametrize("visit_status", ["not_found", "failed",]) def test_get_save_origin_requests_no_failed_status_override( mocker, swh_scheduler, visit_status ): """Uneventful visits with failed statuses (failed, not found) are marked as failed """ sors = _get_save_origin_requests( mocker, swh_scheduler, load_status="uneventful", visit_status=visit_status ) assert len(sors) == 1 assert sors[0]["save_task_status"] == SAVE_TASK_FAILED visit_date = sors[0]["visit_date"] assert visit_date is not None sors = get_save_origin_requests(_visit_type, _origin_url) assert len(sors) == 1 assert sors[0]["save_task_status"] == SAVE_TASK_FAILED assert sors[0]["visit_status"] == visit_status @pytest.mark.django_db @pytest.mark.parametrize( "load_status,visit_status", [ ("eventful", VISIT_STATUS_FULL), ("eventful", VISIT_STATUS_PARTIAL), ("uneventful", VISIT_STATUS_PARTIAL), ], ) def test_get_visit_info_for_save_request_succeeded( mocker, swh_scheduler, load_status, visit_status ): """Nominal scenario, below 30 days, returns something""" sors = _get_save_origin_requests( mocker, swh_scheduler, load_status=load_status, visit_status=visit_status ) assert len(sors) == 1 assert sors[0]["save_task_status"] == SAVE_TASK_SUCCEEDED assert sors[0]["visit_date"] is not None assert sors[0]["visit_status"] == visit_status sors = get_save_origin_requests(_visit_type, _origin_url) assert sors[0]["save_task_status"] == SAVE_TASK_SUCCEEDED assert sors[0]["visit_status"] == visit_status @pytest.mark.django_db @pytest.mark.parametrize("load_status", ["eventful", "uneventful",]) def test_get_visit_info_incomplete_visit_still_successful( mocker, swh_scheduler, load_status ): """Incomplete visit information, yet the task is updated partially """ sors = _get_save_origin_requests( mocker, swh_scheduler, load_status=load_status, visit_status=None, ) assert len(sors) == 1 assert sors[0]["save_task_status"] == SAVE_TASK_SUCCEEDED # As the entry is missing the following information though assert sors[0]["visit_date"] is not None assert sors[0]["visit_status"] is None # It's still detected as to be updated by the refresh routine sors = refresh_save_origin_request_statuses() assert len(sors) == 1 assert sors[0]["save_task_status"] == SAVE_TASK_SUCCEEDED assert sors[0]["visit_date"] is not None assert sors[0]["visit_status"] is None @pytest.mark.django_db def test_refresh_in_progress_save_request_statuses( mocker, swh_scheduler, api_client, archive_data ): """Refresh a pending save origins requests and update if the status changes """ date_now = datetime.now(tz=timezone.utc) date_pivot = date_now - timedelta(days=30) visit_started_date = date_now - timedelta(minutes=1) # returned visit status SaveOriginRequest.objects.create( request_date=datetime.now(tz=timezone.utc), visit_type=_visit_type, visit_status=VISIT_STATUS_CREATED, origin_url=_origin_url, status=SAVE_REQUEST_ACCEPTED, visit_date=None, loading_task_id=_task_id, ) # mock scheduler and archives _fill_scheduler_db( swh_scheduler, task_status="next_run_scheduled", task_run_status=SAVE_TASK_SCHEDULED, ) mock_archive = mocker.patch("swh.web.common.origin_save.archive") mock_archive.lookup_origin.return_value = {"url": _origin_url} mock_get_origin_visits = mocker.patch( "swh.web.common.origin_save.get_origin_visits" ) # create a visit for the save request with status created visit_date = datetime.now(tz=timezone.utc).isoformat() visit_info = OriginVisitInfo( date=visit_date, formatted_date="", metadata={}, origin=_origin_url, snapshot="", # make mypy happy status=VISIT_STATUS_CREATED, type=_visit_type, url="", visit=34, ) mock_get_origin_visits.return_value = [visit_info] # make the scheduler return a running event _fill_scheduler_db( swh_scheduler, task_status="next_run_scheduled", task_run_status="started", visit_started_date=visit_started_date, ) # The visit is detected but still running sors = refresh_save_origin_request_statuses() assert mock_get_origin_visits.called and mock_get_origin_visits.call_count == 1 assert len(sors) == 1 for sor in sors: assert iso8601.parse_date(sor["save_request_date"]) >= date_pivot # The status is updated assert sor["save_task_status"] == SAVE_TASK_RUNNING # but the following entries are missing so it's not updated assert sor["visit_date"] is not None assert sor["visit_status"] == VISIT_STATUS_CREATED # make the visit status completed # make the scheduler return a running event _fill_scheduler_db( swh_scheduler, task_status="completed", task_run_status="eventful", visit_started_date=visit_started_date, ) # This time around, the origin returned will have all required information updated # (visit date and visit status in final state) visit_date = datetime.now(tz=timezone.utc).isoformat() visit_info.update({"date": visit_date, "status": VISIT_STATUS_FULL}) mock_get_origin_visits.return_value = [visit_info] # Detected entry, this time it should be updated sors = refresh_save_origin_request_statuses() assert len(sors) == 1 assert mock_get_origin_visits.called and mock_get_origin_visits.call_count == 1 + 1 for sor in sors: assert iso8601.parse_date(sor["save_request_date"]) >= date_pivot # as it turns out, in this test, this won't update anything as no new status got # returned by the scheduler assert sor["save_task_status"] == SAVE_TASK_SUCCEEDED assert sor["visit_date"] == visit_date assert sor["visit_status"] == VISIT_STATUS_FULL # Once in final state, a sor should not be updated anymore sors = refresh_save_origin_request_statuses() assert len(sors) == 0 @pytest.mark.django_db def test_refresh_save_request_statuses(mocker, swh_scheduler, api_client, archive_data): """Refresh filters save origins requests and update if changes """ date_now = datetime.now(tz=timezone.utc) date_pivot = date_now - timedelta(days=30) # returned visit status SaveOriginRequest.objects.create( request_date=datetime.now(tz=timezone.utc), visit_type=_visit_type, visit_status=None, origin_url=_origin_url, status=SAVE_REQUEST_ACCEPTED, visit_date=None, loading_task_id=_task_id, ) # mock scheduler and archives _fill_scheduler_db( swh_scheduler, task_status="next_run_scheduled", task_run_status=SAVE_TASK_SCHEDULED, ) mock_archive = mocker.patch("swh.web.common.origin_save.archive") mock_archive.lookup_origin.return_value = {"url": _origin_url} mock_get_origin_visits = mocker.patch( "swh.web.common.origin_save.get_origin_visits" ) # create a visit for the save request with status created visit_date = datetime.now(tz=timezone.utc).isoformat() visit_info = OriginVisitInfo( date=visit_date, formatted_date="", metadata={}, origin=_origin_url, snapshot="", # make mypy happy status=VISIT_STATUS_CREATED, type=_visit_type, url="", visit=34, ) mock_get_origin_visits.return_value = [visit_info] # no changes so refresh does detect the entry but does nothing sors = refresh_save_origin_request_statuses() assert len(sors) == 1 for sor in sors: assert iso8601.parse_date(sor["save_request_date"]) >= date_pivot # as it turns out, in this test, this won't update anything as no new status got # returned by the scheduler assert sor["save_task_status"] == SAVE_TASK_RUNNING # Information is empty assert sor["visit_date"] == visit_date assert sor["visit_status"] == VISIT_STATUS_CREATED # A save code now entry is detected for update, but as nothing changes, the entry # remains in the same state sors = refresh_save_origin_request_statuses() assert len(sors) == 1 for sor in sors: assert iso8601.parse_date(sor["save_request_date"]) >= date_pivot # Status is not updated as no new information is available on the visit status # and the task status has not moved assert sor["save_task_status"] == SAVE_TASK_RUNNING # Information is empty assert sor["visit_date"] == visit_date assert sor["visit_status"] == VISIT_STATUS_CREATED # This time around, the origin returned will have all information updated # create a visit for the save request with status created visit_date = datetime.now(tz=timezone.utc).isoformat() visit_info = OriginVisitInfo( date=visit_date, formatted_date="", metadata={}, origin=_origin_url, snapshot="", # make mypy happy status=VISIT_STATUS_FULL, type=_visit_type, url="", visit=34, ) mock_get_origin_visits.return_value = [visit_info] # Detected entry, this time it should be updated sors = refresh_save_origin_request_statuses() assert len(sors) == 1 for sor in sors: assert iso8601.parse_date(sor["save_request_date"]) >= date_pivot # as it turns out, in this test, this won't update anything as no new status got # returned by the scheduler assert sor["save_task_status"] == SAVE_TASK_SUCCEEDED assert sor["visit_date"] == visit_date assert sor["visit_status"] == VISIT_STATUS_FULL # This time, nothing left to update sors = refresh_save_origin_request_statuses() assert len(sors) == 0 diff --git a/swh/web/tests/test_migrations.py b/swh/web/tests/test_migrations.py index 3cb68884..572d77f2 100644 --- a/swh/web/tests/test_migrations.py +++ b/swh/web/tests/test_migrations.py @@ -1,53 +1,54 @@ # Copyright (C) 2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information APP_NAME = "swh_web_common" MIGRATION_0008 = "0008_save-code-now_indexes_20210106_1327" MIGRATION_0009 = "0009_saveoriginrequest_visit_status" MIGRATION_0010 = "0010_saveoriginrequest_user_id" MIGRATION_0011 = "0011_saveoriginrequest_user_ids" +MIGRATION_0012 = "0012_saveoriginrequest_note" def test_migrations_09_add_visit_status_to_sor_model(migrator): """Ensures the migration adds the visit_status field to SaveOriginRequest table""" old_state = migrator.apply_initial_migration((APP_NAME, MIGRATION_0008),) old_model = old_state.apps.get_model(APP_NAME, "SaveOriginRequest") assert hasattr(old_model, "visit_status") is False new_state = migrator.apply_tested_migration((APP_NAME, MIGRATION_0009)) new_model = new_state.apps.get_model(APP_NAME, "SaveOriginRequest") assert hasattr(new_model, "visit_status") is True def test_migrations_10_add_user_id_to_sor_model(migrator): """Ensures the migration adds the user_id field to SaveOriginRequest table""" old_state = migrator.apply_initial_migration((APP_NAME, MIGRATION_0009),) old_model = old_state.apps.get_model(APP_NAME, "SaveOriginRequest") assert hasattr(old_model, "user_id") is False new_state = migrator.apply_tested_migration((APP_NAME, MIGRATION_0010)) new_model = new_state.apps.get_model(APP_NAME, "SaveOriginRequest") assert hasattr(new_model, "user_id") is True -def test_migrations_11_add_user_ids_to_sor_model(migrator): +def test_migrations_12_add_note_to_sor_model(migrator): """Ensures the migration adds the user_id field to SaveOriginRequest table""" - old_state = migrator.apply_initial_migration((APP_NAME, MIGRATION_0010),) + old_state = migrator.apply_initial_migration((APP_NAME, MIGRATION_0011),) old_model = old_state.apps.get_model(APP_NAME, "SaveOriginRequest") - assert hasattr(old_model, "user_ids") is False + assert hasattr(old_model, "note") is False - new_state = migrator.apply_tested_migration((APP_NAME, MIGRATION_0011)) + new_state = migrator.apply_tested_migration((APP_NAME, MIGRATION_0012)) new_model = new_state.apps.get_model(APP_NAME, "SaveOriginRequest") - assert hasattr(new_model, "user_ids") is True + assert hasattr(new_model, "note") is True