diff --git a/swh/web/common/service.py b/swh/web/common/service.py --- a/swh/web/common/service.py +++ b/swh/web/common/service.py @@ -899,7 +899,8 @@ """ visits = _lookup_origin_visits(origin, last_visit=last_visit, limit=per_page) for visit in visits: - yield converters.from_origin_visit(visit) + visit_status = storage.origin_visit_status_get_latest(origin, visit["visit"]) + yield converters.from_origin_visit({**visit, **visit_status.to_dict()}) def lookup_origin_visit_latest( @@ -953,12 +954,13 @@ """ visit = storage.origin_visit_get_by(origin_url, visit_id) + visit_status = storage.origin_visit_status_get_latest(origin_url, visit_id) if not visit: raise NotFoundExc( "Origin %s or its visit " "with id %s not found!" % (origin_url, visit_id) ) visit["origin"] = origin_url - return converters.from_origin_visit(visit) + return converters.from_origin_visit({**visit, **visit_status.to_dict()}) def lookup_snapshot_sizes(snapshot_id): diff --git a/swh/web/tests/api/views/test_origin.py b/swh/web/tests/api/views/test_origin.py --- a/swh/web/tests/api/views/test_origin.py +++ b/swh/web/tests/api/views/test_origin.py @@ -3,6 +3,8 @@ # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information +from datetime import timedelta + from hypothesis import given import pytest from requests.utils import parse_header_links @@ -205,7 +207,7 @@ visit_status = OriginVisitStatus( origin=new_origin.url, visit=origin_visit.visit, - date=visit_date, + date=visit_date + timedelta(minutes=5), status="full", snapshot=new_snapshots[i].id, ) diff --git a/swh/web/tests/conftest.py b/swh/web/tests/conftest.py --- a/swh/web/tests/conftest.py +++ b/swh/web/tests/conftest.py @@ -227,8 +227,15 @@ return converters.from_origin(origin) def origin_visit_get(self, origin_url): - visits = self.storage.origin_visit_get(origin_url) - return list(map(converters.from_origin_visit, visits)) + visits = list(self.storage.origin_visit_get(origin_url)) + for i in range(len(visits)): + visit_status = self.storage.origin_visit_status_get_latest( + origin_url, visits[i]["visit"] + ) + visits[i] = converters.from_origin_visit( + {**visits[i], **visit_status.to_dict()} + ) + return visits def origin_visit_get_by(self, origin_url, visit_id): visit = self.storage.origin_visit_get_by(origin_url, visit_id) diff --git a/swh/web/tests/data.py b/swh/web/tests/data.py --- a/swh/web/tests/data.py +++ b/swh/web/tests/data.py @@ -3,9 +3,10 @@ # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information -import datetime +from datetime import timedelta import os import random +import time from copy import deepcopy @@ -20,6 +21,7 @@ from swh.search import get_search from swh.storage.algos.dir_iterators import dir_iterator from swh.storage.algos.snapshot import snapshot_get_latest +from swh.storage.utils import now from swh.web import config from swh.web.browse.utils import ( get_mimetype_and_encoding_for_content, @@ -121,26 +123,16 @@ "highlightjs-line-numbers.js.zip", "highlightjs-line-numbers.js_visit2.zip", ], - "visit_date": [ - datetime.datetime(2018, 12, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), - datetime.datetime(2019, 1, 20, 15, 0, 0, tzinfo=datetime.timezone.utc), - ], }, { "type": "git", "url": "https://github.com/memononen/libtess2", "archives": ["libtess2.zip"], - "visit_date": [ - datetime.datetime(2018, 5, 25, 1, 0, 0, tzinfo=datetime.timezone.utc), - ], }, { "type": "git", "url": "repo_with_submodules", "archives": ["repo_with_submodules.tgz"], - "visit_date": [ - datetime.datetime(2019, 1, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), - ], }, ] @@ -178,6 +170,10 @@ # Load git repositories from archives for origin in _TEST_ORIGINS: for i, archive in enumerate(origin["archives"]): + if i > 0: + # ensure visit dates will be different when simulating + # multiple visits of an origin + time.sleep(1) origin_repo_archive = os.path.join( os.path.dirname(__file__), "resources/repos/%s" % archive ) @@ -185,7 +181,6 @@ origin["url"], archive_path=origin_repo_archive, config=_TEST_LOADER_CONFIG, - visit_date=origin["visit_date"][i], ) if storage is None: storage = loader.storage @@ -201,13 +196,13 @@ # storage.origin_add([{'url': url}]) storage.origin_add([Origin(url=url)]) search.origin_update([{"url": url, "has_visits": True}]) - date = datetime.datetime(2019, 12, 3, 13, 55, 5, tzinfo=datetime.timezone.utc) + date = now() visit = OriginVisit(origin=url, date=date, type="tar") visit = storage.origin_visit_add([visit])[0] visit_status = OriginVisitStatus( origin=url, visit=visit.visit, - date=date, + date=date + timedelta(minutes=1), status="full", snapshot=hash_to_bytes("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), )