diff --git a/swh/web/browse/snapshot_context.py b/swh/web/browse/snapshot_context.py --- a/swh/web/browse/snapshot_context.py +++ b/swh/web/browse/snapshot_context.py @@ -430,6 +430,12 @@ origin_visits_url = None if origin_url: + + if visit_id is not None: + query_params["visit_id"] = visit_id + elif snapshot_id is not None: + query_params["snapshot"] = snapshot_id + origin_info = service.lookup_origin({"url": origin_url}) visit_info = get_origin_visit(origin_info, timestamp, visit_id, snapshot_id) @@ -455,10 +461,9 @@ query_params["origin_url"] = origin_info["url"] - origin_visits_url = reverse("browse-origin-visits", query_params=query_params) - - if visit_id is not None: - query_params["visit_id"] = visit_id + origin_visits_url = reverse( + "browse-origin-visits", query_params={"origin_url": origin_info["url"]} + ) if timestamp is not None: query_params["timestamp"] = format_utc_iso_date( @@ -729,6 +734,7 @@ history_url = None if snapshot_id != _empty_snapshot_id: + query_params.pop("path", None) history_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) @@ -1223,18 +1229,16 @@ return handle_view_exception(request, exc) for branch in displayed_branches: - if snapshot_id: - revision_url = reverse( - "browse-revision", - url_args={"sha1_git": branch["revision"]}, - query_params={"snapshot_id": snapshot_id}, - ) - else: - revision_url = reverse( - "browse-revision", - url_args={"sha1_git": branch["revision"]}, - query_params={"origin_url": origin_info["url"]}, - ) + rev_query_params = {} + if origin_info: + rev_query_params["origin_url"] = origin_info["url"] + + revision_url = reverse( + "browse-revision", + url_args={"sha1_git": branch["revision"]}, + query_params=query_params, + ) + query_params["branch"] = branch["name"] directory_url = reverse( browse_view_name, url_args=url_args, query_params=query_params @@ -1334,10 +1338,10 @@ return handle_view_exception(request, exc) for release in displayed_releases: - if snapshot_id: - query_params_tgt = {"snapshot_id": snapshot_id} - else: - query_params_tgt = {"origin_url": origin_info["url"]} + query_params_tgt = {"snapshot": snapshot_id} + if origin_info: + query_params_tgt["origin_url"] = origin_info["url"] + release_url = reverse( "browse-release", url_args={"sha1_git": release["id"]}, diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py --- a/swh/web/browse/utils.py +++ b/swh/web/browse/utils.py @@ -315,16 +315,28 @@ def _snapshot_context_query_params(snapshot_context): - query_params = None + query_params = {} + if not snapshot_context: + return query_params if snapshot_context and snapshot_context["origin_info"]: origin_info = snapshot_context["origin_info"] + snp_query_params = snapshot_context["query_params"] query_params = {"origin_url": origin_info["url"]} - if "timestamp" in snapshot_context["query_params"]: - query_params["timestamp"] = snapshot_context["query_params"]["timestamp"] - if "visit_id" in snapshot_context["query_params"]: - query_params["visit_id"] = snapshot_context["query_params"]["visit_id"] + if "timestamp" in snp_query_params: + query_params["timestamp"] = snp_query_params["timestamp"] + if "visit_id" in snp_query_params: + query_params["visit_id"] = snp_query_params["visit_id"] + if "snapshot" in snp_query_params and "visit_id" not in query_params: + query_params["snapshot"] = snp_query_params["snapshot"] elif snapshot_context: - query_params = {"snapshot_id": snapshot_context["snapshot_id"]} + query_params = {"snapshot": snapshot_context["snapshot_id"]} + + if snapshot_context["release"]: + query_params["release"] = snapshot_context["release"] + elif snapshot_context["branch"] and snapshot_context["branch"] != "HEAD": + query_params["branch"] = snapshot_context["branch"] + elif snapshot_context["revision_id"]: + query_params["revision"] = snapshot_context["revision_id"] return query_params @@ -342,6 +354,7 @@ """ query_params = _snapshot_context_query_params(snapshot_context) + query_params.pop("revision", None) return reverse( "browse-revision", url_args={"sha1_git": revision_id}, query_params=query_params @@ -504,17 +517,16 @@ Returns: The revision log view URL """ - query_params = {"revision": revision_id} + query_params = {} + if snapshot_context: + query_params = _snapshot_context_query_params(snapshot_context) + + query_params["revision"] = revision_id if snapshot_context and snapshot_context["origin_info"]: - origin_info = snapshot_context["origin_info"] - query_params["origin_url"] = origin_info["url"] - if "timestamp" in snapshot_context["query_params"]: - query_params["timestamp"] = snapshot_context["query_params"]["timestamp"] - if "visit_id" in snapshot_context["query_params"]: - query_params["visit_id"] = snapshot_context["query_params"]["visit_id"] revision_log_url = reverse("browse-origin-log", query_params=query_params) elif snapshot_context: url_args = {"snapshot_id": snapshot_context["snapshot_id"]} + del query_params["snapshot"] revision_log_url = reverse( "browse-snapshot-log", url_args=url_args, query_params=query_params ) diff --git a/swh/web/browse/views/content.py b/swh/web/browse/views/content.py --- a/swh/web/browse/views/content.py +++ b/swh/web/browse/views/content.py @@ -194,37 +194,44 @@ algo, checksum = query.parse_hash(query_string) checksum = hash_to_hex(checksum) content_data = request_content(query_string, raise_if_unavailable=False) - origin_url = request.GET.get("origin_url", None) - selected_language = request.GET.get("language", None) - + origin_url = request.GET.get("origin_url") + selected_language = request.GET.get("language") if not origin_url: - origin_url = request.GET.get("origin", None) + origin_url = request.GET.get("origin") + snapshot_id = request.GET.get("snapshot") + path = request.GET.get("path") snapshot_context = None - if origin_url: + if origin_url is not None or snapshot_id is not None: try: - snapshot_context = get_snapshot_context(origin_url=origin_url) - except NotFoundExc: - raw_cnt_url = reverse( - "browse-content", url_args={"query_string": query_string} - ) - error_message = ( - "The Software Heritage archive has a content " - "with the hash you provided but the origin " - "mentioned in your request appears broken: %s. " - "Please check the URL and try again.\n\n" - "Nevertheless, you can still browse the content " - "without origin information: %s" - % (gen_link(origin_url), gen_link(raw_cnt_url)) + snapshot_context = get_snapshot_context( + origin_url=origin_url, + snapshot_id=snapshot_id, + branch_name=request.GET.get("branch"), + release_name=request.GET.get("release"), + revision_id=request.GET.get("revision"), + path=path, + browse_context=CONTENT, ) - - raise NotFoundExc(error_message) - if snapshot_context: - snapshot_context["visit_info"] = None + except NotFoundExc as e: + if str(e).startswith("Origin"): + raw_cnt_url = reverse( + "browse-content", url_args={"query_string": query_string} + ) + error_message = ( + "The Software Heritage archive has a content " + "with the hash you provided but the origin " + "mentioned in your request appears broken: %s. " + "Please check the URL and try again.\n\n" + "Nevertheless, you can still browse the content " + "without origin information: %s" + % (gen_link(origin_url), gen_link(raw_cnt_url)) + ) + raise NotFoundExc(error_message) + else: + raise e except Exception as exc: return handle_view_exception(request, exc) - path = request.GET.get("path", None) - content = None language = None mimetype = None @@ -245,24 +252,28 @@ if mimetype and "text/" in mimetype: available_languages = highlightjs.get_supported_languages() - root_dir = None filename = None path_info = None directory_id = None directory_url = None - query_params = {"origin_url": origin_url} + root_dir = None + if snapshot_context: + root_dir = snapshot_context.get("root_directory") + + query_params = snapshot_context["query_params"] if snapshot_context else {} breadcrumbs = [] if path: split_path = path.split("/") - root_dir = split_path[0] + root_dir = root_dir or split_path[0] filename = split_path[-1] if root_dir != path: path = path.replace(root_dir + "/", "") path = path[: -len(filename)] path_info = gen_path_info(path) + query_params.pop("path", None) dir_url = reverse( "browse-directory", url_args={"sha1_git": root_dir}, diff --git a/swh/web/browse/views/directory.py b/swh/web/browse/views/directory.py --- a/swh/web/browse/views/directory.py +++ b/swh/web/browse/views/directory.py @@ -23,7 +23,7 @@ from swh.web.common.exc import handle_view_exception, NotFoundExc from swh.web.common.identifiers import get_swhids_info from swh.web.common.typing import DirectoryMetadata, SWHObjectInfo -from swh.web.common.utils import reverse, gen_path_info +from swh.web.common.utils import reverse, gen_path_info, swh_object_icons def _directory_browse(request, sha1_git, path=None): @@ -34,36 +34,44 @@ sha1_git = dir_info["target"] dirs, files = get_directory_entries(sha1_git) - origin_url = request.GET.get("origin_url", None) + origin_url = request.GET.get("origin_url") if not origin_url: - origin_url = request.GET.get("origin", None) + origin_url = request.GET.get("origin") + snapshot_id = request.GET.get("snapshot") snapshot_context = None - if origin_url: + if origin_url is not None or snapshot_id is not None: try: - snapshot_context = get_snapshot_context(origin_url=origin_url) - except NotFoundExc: - raw_dir_url = reverse( - "browse-directory", url_args={"sha1_git": sha1_git} + snapshot_context = get_snapshot_context( + snapshot_id=snapshot_id, + origin_url=origin_url, + branch_name=request.GET.get("branch"), + release_name=request.GET.get("release"), + revision_id=request.GET.get("revision"), + path=path, ) - error_message = ( - "The Software Heritage archive has a directory " - "with the hash you provided but the origin " - "mentioned in your request appears broken: %s. " - "Please check the URL and try again.\n\n" - "Nevertheless, you can still browse the directory " - "without origin information: %s" - % (gen_link(origin_url), gen_link(raw_dir_url)) - ) - - raise NotFoundExc(error_message) - if snapshot_context: - snapshot_context["visit_info"] = None + except NotFoundExc as e: + if str(e).startswith("Origin"): + raw_dir_url = reverse( + "browse-directory", url_args={"sha1_git": sha1_git} + ) + error_message = ( + "The Software Heritage archive has a directory " + "with the hash you provided but the origin " + "mentioned in your request appears broken: %s. " + "Please check the URL and try again.\n\n" + "Nevertheless, you can still browse the directory " + "without origin information: %s" + % (gen_link(origin_url), gen_link(raw_dir_url)) + ) + raise NotFoundExc(error_message) + else: + raise e except Exception as exc: return handle_view_exception(request, exc) path_info = gen_path_info(path) - query_params = {"origin_url": origin_url} + query_params = snapshot_context["query_params"] if snapshot_context else {} breadcrumbs = [] breadcrumbs.append( @@ -115,7 +123,7 @@ url_args={"query_string": query_string}, query_params={ "path": root_sha1_git + "/" + path + f["name"], - "origin_url": origin_url, + **query_params, }, ) if f["length"] is not None: @@ -159,6 +167,19 @@ dir_path = "/".join([bc["name"] for bc in breadcrumbs]) + "/" heading += " - %s" % dir_path + top_right_link = None + if snapshot_context is not None and not snapshot_context["is_empty"]: + history_url = reverse( + "browse-revision-log", + url_args={"sha1_git": snapshot_context["revision_id"]}, + query_params=query_params, + ) + top_right_link = { + "url": history_url, + "icon": swh_object_icons["revisions history"], + "text": "History", + } + return render( request, "browse/directory.html", @@ -170,7 +191,7 @@ "dirs": dirs, "files": files, "breadcrumbs": breadcrumbs, - "top_right_link": None, + "top_right_link": top_right_link, "readme_name": readme_name, "readme_url": readme_url, "readme_html": readme_html, diff --git a/swh/web/browse/views/origin.py b/swh/web/browse/views/origin.py --- a/swh/web/browse/views/origin.py +++ b/swh/web/browse/views/origin.py @@ -33,6 +33,7 @@ return browse_snapshot_directory( request, origin_url=request.GET.get("origin_url"), + snapshot_id=request.GET.get("snapshot"), timestamp=request.GET.get("timestamp"), path=request.GET.get("path"), ) @@ -54,7 +55,11 @@ :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/directory/[(path)/]` """ return browse_snapshot_directory( - request, origin_url=origin_url, timestamp=timestamp, path=path + request, + origin_url=origin_url, + snapshot_id=request.GET.get("snapshot"), + timestamp=timestamp, + path=path, ) @@ -71,6 +76,7 @@ return browse_snapshot_content( request, origin_url=request.GET.get("origin_url"), + snapshot_id=request.GET.get("snapshot"), timestamp=request.GET.get("timestamp"), path=request.GET.get("path"), selected_language=request.GET.get("language"), @@ -95,6 +101,7 @@ return browse_snapshot_content( request, origin_url=origin_url, + snapshot_id=request.GET.get("snapshot"), timestamp=timestamp, path=path, selected_language=request.GET.get("language"), @@ -113,6 +120,7 @@ return browse_snapshot_log( request, origin_url=request.GET.get("origin_url"), + snapshot_id=request.GET.get("snapshot"), timestamp=request.GET.get("timestamp"), ) @@ -131,7 +139,12 @@ :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/log/` """ - return browse_snapshot_log(request, origin_url=origin_url, timestamp=timestamp) + return browse_snapshot_log( + request, + origin_url=origin_url, + snapshot_id=request.GET.get("snapshot"), + timestamp=timestamp, + ) @browse_route( @@ -147,6 +160,7 @@ return browse_snapshot_branches( request, origin_url=request.GET.get("origin_url"), + snapshot_id=request.GET.get("snapshot"), timestamp=request.GET.get("timestamp"), ) @@ -165,7 +179,12 @@ :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/branches/` """ - return browse_snapshot_branches(request, origin_url=origin_url, timestamp=timestamp) + return browse_snapshot_branches( + request, + origin_url=origin_url, + snapshot_id=request.GET.get("snapshot"), + timestamp=timestamp, + ) @browse_route( @@ -181,6 +200,7 @@ return browse_snapshot_releases( request, origin_url=request.GET.get("origin_url"), + snapshot_id=request.GET.get("snapshot"), timestamp=request.GET.get("timestamp"), ) @@ -199,7 +219,12 @@ :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/releases/` """ - return browse_snapshot_releases(request, origin_url=origin_url, timestamp=timestamp) + return browse_snapshot_releases( + request, + origin_url=origin_url, + snapshot_id=request.GET.get("snapshot"), + timestamp=timestamp, + ) def _origin_visits_browse(request, origin_url): diff --git a/swh/web/browse/views/release.py b/swh/web/browse/views/release.py --- a/swh/web/browse/views/release.py +++ b/swh/web/browse/views/release.py @@ -41,18 +41,20 @@ release = service.lookup_release(sha1_git) snapshot_context = {} origin_info = None - snapshot_id = request.GET.get("snapshot_id", None) - origin_url = request.GET.get("origin_url", None) + snapshot_id = request.GET.get("snapshot_id") + if not snapshot_id: + snapshot_id = request.GET.get("snapshot") + origin_url = request.GET.get("origin_url") if not origin_url: - origin_url = request.GET.get("origin", None) - timestamp = request.GET.get("timestamp", None) - visit_id = request.GET.get("visit_id", None) + origin_url = request.GET.get("origin") + timestamp = request.GET.get("timestamp") + visit_id = request.GET.get("visit_id") if origin_url: try: snapshot_context = get_snapshot_context( snapshot_id, origin_url, timestamp, visit_id ) - except NotFoundExc: + except NotFoundExc as e: raw_rel_url = reverse("browse-release", url_args={"sha1_git": sha1_git}) error_message = ( "The Software Heritage archive has a release " @@ -63,8 +65,10 @@ "without origin information: %s" % (gen_link(origin_url), gen_link(raw_rel_url)) ) - - raise NotFoundExc(error_message) + if str(e).startswith("Origin"): + raise NotFoundExc(error_message) + else: + raise e origin_info = snapshot_context["origin_info"] elif snapshot_id: snapshot_context = get_snapshot_context(snapshot_id) @@ -174,6 +178,7 @@ query_params={ "origin_url": origin_info["url"], "release": release["name"], + "snapshot": snapshot_id, }, ) elif snapshot_id: diff --git a/swh/web/browse/views/revision.py b/swh/web/browse/views/revision.py --- a/swh/web/browse/views/revision.py +++ b/swh/web/browse/views/revision.py @@ -208,6 +208,19 @@ The url that points to it is :http:get:`/browse/revision/(sha1_git)/log/` """ try: + origin_url = request.GET.get("origin_url") + snapshot_id = request.GET.get("snapshot") + snapshot_context = None + if origin_url or snapshot_id: + snapshot_context = get_snapshot_context( + snapshot_id=snapshot_id, + origin_url=origin_url, + timestamp=request.GET.get("timestamp"), + visit_id=request.GET.get("visit_id"), + branch_name=request.GET.get("branch"), + release_name=request.GET.get("release"), + revision_id=sha1_git, + ) per_page = int(request.GET.get("per_page", NB_LOG_ENTRIES)) offset = int(request.GET.get("offset", 0)) revs_ordering = request.GET.get("revs_ordering", "committer_date") @@ -284,7 +297,7 @@ "prev_log_url": prev_log_url, "breadcrumbs": None, "top_right_link": None, - "snapshot_context": None, + "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions_menu": True, "swhids_info": None, @@ -308,22 +321,30 @@ revision = service.lookup_revision(sha1_git) origin_info = None snapshot_context = None - origin_url = request.GET.get("origin_url", None) + origin_url = request.GET.get("origin_url") if not origin_url: - origin_url = request.GET.get("origin", None) - timestamp = request.GET.get("timestamp", None) - visit_id = request.GET.get("visit_id", None) - snapshot_id = request.GET.get("snapshot_id", None) - path = request.GET.get("path", None) + origin_url = request.GET.get("origin") + timestamp = request.GET.get("timestamp") + visit_id = request.GET.get("visit_id") + snapshot_id = request.GET.get("snapshot_id") + if not snapshot_id: + snapshot_id = request.GET.get("snapshot") + path = request.GET.get("path") dir_id = None dirs, files = None, None content_data = {} if origin_url: try: snapshot_context = get_snapshot_context( - origin_url=origin_url, timestamp=timestamp, visit_id=visit_id + snapshot_id=snapshot_id, + origin_url=origin_url, + timestamp=timestamp, + visit_id=visit_id, + branch_name=request.GET.get("branch"), + release_name=request.GET.get("release"), + revision_id=request.GET.get("revision"), ) - except NotFoundExc: + except NotFoundExc as e: raw_rev_url = reverse( "browse-revision", url_args={"sha1_git": sha1_git} ) @@ -336,11 +357,15 @@ "without origin information: %s" % (gen_link(origin_url), gen_link(raw_rev_url)) ) - raise NotFoundExc(error_message) + if str(e).startswith("Origin"): + raise NotFoundExc(error_message) + else: + raise e origin_info = snapshot_context["origin_info"] snapshot_id = snapshot_context["snapshot_id"] elif snapshot_id: snapshot_context = get_snapshot_context(snapshot_id) + if path: file_info = service.lookup_directory_with_path(revision["directory"], path) if file_info["type"] == "dir": @@ -399,12 +424,7 @@ path_info = gen_path_info(path) - query_params = { - "snapshot_id": snapshot_id, - "origin_url": origin_url, - "timestamp": timestamp, - "visit_id": visit_id, - } + query_params = snapshot_context["query_params"] if snapshot_context else {} breadcrumbs = [] breadcrumbs.append( @@ -466,17 +486,15 @@ content = content_display_data["content_data"] language = content_display_data["language"] mimetype = content_display_data["mimetype"] - query_params = {} if path: filename = path_info[-1]["name"] - query_params["filename"] = filename extra_context["filename"] = filename top_right_link = { "url": reverse( "browse-content-raw", url_args={"query_string": query_string}, - query_params=query_params, + query_params={"filename": filename}, ), "icon": swh_object_icons["content"], "text": "Raw File", @@ -527,14 +545,10 @@ swh_objects.append(SWHObjectInfo(object_type=DIRECTORY, object_id=dir_id)) + query_params.pop("path", None) + diff_revision_url = reverse( - "diff-revision", - url_args={"sha1_git": sha1_git}, - query_params={ - "origin_url": origin_url, - "timestamp": timestamp, - "visit_id": visit_id, - }, + "diff-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ) if snapshot_id: diff --git a/swh/web/browse/views/snapshot.py b/swh/web/browse/views/snapshot.py --- a/swh/web/browse/views/snapshot.py +++ b/swh/web/browse/views/snapshot.py @@ -51,7 +51,7 @@ request, snapshot_id=snapshot_id, path=request.GET.get("path"), - origin_url=request.GET.get("origin"), + origin_url=request.GET.get("origin_url"), ) diff --git a/swh/web/tests/browse/views/test_content.py b/swh/web/tests/browse/views/test_content.py --- a/swh/web/tests/browse/views/test_content.py +++ b/swh/web/tests/browse/views/test_content.py @@ -3,6 +3,7 @@ # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information +import random import textwrap from django.utils.html import escape @@ -10,6 +11,7 @@ from hypothesis import given from swh.model.identifiers import CONTENT, DIRECTORY +from swh.web.browse.snapshot_context import process_snapshot_branches from swh.web.browse.utils import ( get_mimetype_and_encoding_for_content, prepare_content_for_display, @@ -33,6 +35,7 @@ invalid_sha1, unknown_content, content_utf8_detected_as_binary, + origin_with_multiple_visits, ) @@ -420,6 +423,112 @@ assert_contains(resp, escape(content_display["content_data"])) +@given(origin_with_multiple_visits()) +def test_content_origin_snapshot_branch_browse(client, archive_data, origin): + visits = archive_data.origin_visit_get(origin["url"]) + visit = random.choice(visits) + snapshot = archive_data.snapshot_get(visit["snapshot"]) + branches, releases = process_snapshot_branches(snapshot) + branch_info = random.choice(branches) + + directory = archive_data.revision_get(branch_info["revision"])["directory"] + directory_content = archive_data.directory_ls(directory) + directory_file = random.choice( + [e for e in directory_content if e["type"] == "file"] + ) + + url = reverse( + "browse-content", + url_args={"query_string": directory_file["checksums"]["sha1"]}, + query_params={ + "origin_url": origin["url"], + "snapshot": snapshot["id"], + "branch": branch_info["name"], + "path": directory_file["name"], + }, + ) + + resp = client.get(url) + assert resp.status_code == 200 + assert_template_used(resp, "browse/content.html") + _check_origin_snapshot_related_html(resp, origin, snapshot, branches, releases) + assert_contains(resp, directory_file["name"]) + assert_contains(resp, f"Branch: {branch_info['name']}") + + +@given(origin_with_multiple_visits()) +def test_content_origin_snapshot_release_browse(client, archive_data, origin): + visits = archive_data.origin_visit_get(origin["url"]) + visit = random.choice(visits) + snapshot = archive_data.snapshot_get(visit["snapshot"]) + branches, releases = process_snapshot_branches(snapshot) + release_info = random.choice(releases) + + directory_content = archive_data.directory_ls(release_info["directory"]) + directory_file = random.choice( + [e for e in directory_content if e["type"] == "file"] + ) + + url = reverse( + "browse-content", + url_args={"query_string": directory_file["checksums"]["sha1"]}, + query_params={ + "origin_url": origin["url"], + "snapshot": snapshot["id"], + "release": release_info["name"], + "path": directory_file["name"], + }, + ) + + resp = client.get(url) + assert resp.status_code == 200 + assert_template_used(resp, "browse/content.html") + _check_origin_snapshot_related_html(resp, origin, snapshot, branches, releases) + assert_contains(resp, directory_file["name"]) + assert_contains(resp, f"Release: {release_info['name']}") + + +def _check_origin_snapshot_related_html(resp, origin, snapshot, branches, releases): + browse_origin_url = reverse( + "browse-origin", query_params={"origin_url": origin["url"]} + ) + assert_contains( + resp, + textwrap.indent( + ( + "Browse archived content for origin\n" + f'\n' + f" {origin['url']}\n" + f"" + ), + " " * 6, + ), + ) + + origin_branches_url = reverse( + "browse-origin-branches", + query_params={"origin_url": origin["url"], "snapshot": snapshot["id"]}, + ) + + assert_contains( + resp, + 'Branches (%s)' % (escape(origin_branches_url), len(branches)), + ) + + origin_releases_url = reverse( + "browse-origin-releases", + query_params={"origin_url": origin["url"], "snapshot": snapshot["id"]}, + ) + + assert_contains( + resp, + 'Releases (%s)' % (escape(origin_releases_url), len(releases)), + ) + + assert_contains(resp, '
  • ', count=len(branches)) + assert_contains(resp, '
  • ', count=len(releases)) + + def _process_content_for_display(archive_data, content): content_data = archive_data.content_get(content["sha1"]) diff --git a/swh/web/tests/browse/views/test_directory.py b/swh/web/tests/browse/views/test_directory.py --- a/swh/web/tests/browse/views/test_directory.py +++ b/swh/web/tests/browse/views/test_directory.py @@ -6,9 +6,11 @@ import random import textwrap +from django.utils.html import escape from hypothesis import given from swh.model.identifiers import DIRECTORY +from swh.web.browse.snapshot_context import process_snapshot_branches from swh.web.common.identifiers import get_swh_persistent_id from swh.web.common.utils import gen_path_info, reverse from swh.web.tests.django_asserts import assert_contains, assert_template_used @@ -17,16 +19,17 @@ directory_with_subdirs, invalid_sha1, unknown_directory, + origin_with_multiple_visits, ) @given(directory()) -def test_root_directory_view_checks(client, archive_data, directory): +def test_root_directory_view(client, archive_data, directory): _directory_view_checks(client, directory, archive_data.directory_ls(directory)) @given(directory_with_subdirs()) -def test_sub_directory_view_checks(client, archive_data, directory): +def test_sub_directory_view(client, archive_data, directory): dir_content = archive_data.directory_ls(directory) subdir = random.choice([e for e in dir_content if e["type"] == "dir"]) subdir_content = archive_data.directory_ls(subdir["target"]) @@ -77,14 +80,126 @@ assert_contains(resp, 'id="swh-id-context-option-directory"') -def _directory_view_checks(client, root_directory_sha1, directory_entries, path=None): +@given(origin_with_multiple_visits()) +def test_directory_origin_snapshot_branch_browse(client, archive_data, origin): + visits = archive_data.origin_visit_get(origin["url"]) + visit = random.choice(visits) + snapshot = archive_data.snapshot_get(visit["snapshot"]) + branches, releases = process_snapshot_branches(snapshot) + branch_info = random.choice(branches) + + directory = archive_data.revision_get(branch_info["revision"])["directory"] + directory_content = archive_data.directory_ls(directory) + directory_subdir = random.choice( + [e for e in directory_content if e["type"] == "dir"] + ) + + url = reverse( + "browse-directory", + url_args={"sha1_git": directory}, + query_params={ + "origin_url": origin["url"], + "snapshot": snapshot["id"], + "branch": branch_info["name"], + "path": directory_subdir["name"], + }, + ) + + resp = client.get(url) + assert resp.status_code == 200 + assert_template_used(resp, "browse/directory.html") + _check_origin_snapshot_related_html(resp, origin, snapshot, branches, releases) + assert_contains(resp, directory_subdir["name"]) + assert_contains(resp, f"Branch: {branch_info['name']}") + + +@given(origin_with_multiple_visits()) +def test_content_origin_snapshot_release_browse(client, archive_data, origin): + visits = archive_data.origin_visit_get(origin["url"]) + visit = random.choice(visits) + snapshot = archive_data.snapshot_get(visit["snapshot"]) + branches, releases = process_snapshot_branches(snapshot) + release_info = random.choice(releases) + + directory = release_info["directory"] + directory_content = archive_data.directory_ls(directory) + directory_subdir = random.choice( + [e for e in directory_content if e["type"] == "dir"] + ) + + url = reverse( + "browse-directory", + url_args={"sha1_git": directory}, + query_params={ + "origin_url": origin["url"], + "snapshot": snapshot["id"], + "release": release_info["name"], + "path": directory_subdir["name"], + }, + ) + + resp = client.get(url) + assert resp.status_code == 200 + assert_template_used(resp, "browse/directory.html") + _check_origin_snapshot_related_html(resp, origin, snapshot, branches, releases) + assert_contains(resp, directory_subdir["name"]) + assert_contains(resp, f"Release: {release_info['name']}") + + +def _check_origin_snapshot_related_html(resp, origin, snapshot, branches, releases): + browse_origin_url = reverse( + "browse-origin", query_params={"origin_url": origin["url"]} + ) + assert_contains( + resp, + textwrap.indent( + ( + "Browse archived directory for origin\n" + f'\n' + f" {origin['url']}\n" + f"" + ), + " " * 6, + ), + ) + + origin_branches_url = reverse( + "browse-origin-branches", + query_params={"origin_url": origin["url"], "snapshot": snapshot["id"]}, + ) + + assert_contains( + resp, + 'Branches (%s)' % (escape(origin_branches_url), len(branches)), + ) + + origin_releases_url = reverse( + "browse-origin-releases", + query_params={"origin_url": origin["url"], "snapshot": snapshot["id"]}, + ) + + assert_contains( + resp, + 'Releases (%s)' % (escape(origin_releases_url), len(releases)), + ) + + assert_contains(resp, '
  • ', count=len(branches)) + assert_contains(resp, '
  • ', count=len(releases)) + + +def _directory_view_checks( + client, + root_directory_sha1, + directory_entries, + path=None, + origin_url=None, + snapshot_id=None, +): dirs = [e for e in directory_entries if e["type"] in ("dir", "rev")] files = [e for e in directory_entries if e["type"] == "file"] url_args = {"sha1_git": root_directory_sha1} - query_params = {} - if path: - query_params["path"] = path + query_params = {"path": path, "origin_url": origin_url, "snapshot": snapshot_id} url = reverse("browse-directory", url_args=url_args, query_params=query_params) diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py --- a/swh/web/tests/browse/views/test_origin.py +++ b/swh/web/tests/browse/views/test_origin.py @@ -131,6 +131,18 @@ timestamp=visit_unix_ts, ) + _origin_content_view_test_helper( + client, + archive_data, + origin, + origin_visits[-1], + tdata["branches"], + tdata["releases"], + tdata["root_dir_sha1"], + tdata["content"], + snapshot_id=tdata["visit"]["snapshot"], + ) + tdata = _get_archive_data(0) _origin_content_view_test_helper( @@ -145,6 +157,18 @@ visit_id=tdata["visit"]["visit"], ) + _origin_content_view_test_helper( + client, + archive_data, + origin, + origin_visits[0], + tdata["branches"], + tdata["releases"], + tdata["root_dir_sha1"], + tdata["content"], + snapshot_id=tdata["visit"]["snapshot"], + ) + @given(origin()) def test_origin_root_directory_view(client, archive_data, origin): @@ -207,8 +231,17 @@ timestamp=visit["date"], ) - origin = dict(origin) - del origin["type"] + _origin_directory_view_test_helper( + client, + archive_data, + origin, + visit, + branches, + releases, + root_dir_sha1, + dir_content, + snapshot_id=visit["snapshot"], + ) _origin_directory_view_test_helper( client, @@ -257,6 +290,18 @@ timestamp=visit["date"], ) + _origin_directory_view_test_helper( + client, + archive_data, + origin, + visit, + branches, + releases, + root_dir_sha1, + dir_content, + snapshot_id=visit["snapshot"], + ) + @given(origin()) def test_origin_sub_directory_view(client, archive_data, origin): @@ -332,8 +377,18 @@ timestamp=visit["date"], ) - origin = dict(origin) - del origin["type"] + _origin_directory_view_test_helper( + client, + archive_data, + origin, + visit, + branches, + releases, + root_dir_sha1, + subdir_content, + path=subdir_path, + snapshot_id=visit["snapshot"], + ) _origin_directory_view_test_helper( client, @@ -386,6 +441,19 @@ timestamp=visit["date"], ) + _origin_directory_view_test_helper( + client, + archive_data, + origin, + visit, + branches, + releases, + root_dir_sha1, + subdir_content, + path=subdir_path, + snapshot_id=visit["snapshot"], + ) + @given(origin()) def test_origin_branches(client, archive_data, origin): @@ -397,10 +465,9 @@ _origin_branches_test_helper(client, origin, snapshot_content) - origin = dict(origin) - origin["type"] = None - - _origin_branches_test_helper(client, origin, snapshot_content) + _origin_branches_test_helper( + client, origin, snapshot_content, snapshot_id=visit["snapshot"] + ) @given(origin()) @@ -413,10 +480,9 @@ _origin_releases_test_helper(client, origin, snapshot_content) - origin = dict(origin) - origin["type"] = None - - _origin_releases_test_helper(client, origin, snapshot_content) + _origin_releases_test_helper( + client, origin, snapshot_content, snapshot_id=visit["snapshot"] + ) @given( @@ -773,10 +839,11 @@ content, visit_id=None, timestamp=None, + snapshot_id=None, ): content_path = "/".join(content["path"].split("/")[1:]) - if not visit_id: + if not visit_id and not snapshot_id: visit_id = origin_visit["visit"] query_params = {"origin_url": origin_info["url"], "path": content_path} @@ -786,6 +853,8 @@ if visit_id: query_params["visit_id"] = visit_id + elif snapshot_id: + query_params["snapshot"] = snapshot_id url = reverse("browse-origin-content", query_params=query_params) @@ -916,20 +985,23 @@ directory_entries, visit_id=None, timestamp=None, + snapshot_id=None, path=None, ): dirs = [e for e in directory_entries if e["type"] in ("dir", "rev")] files = [e for e in directory_entries if e["type"] == "file"] - if not visit_id: + if not visit_id and not snapshot_id: visit_id = origin_visit["visit"] query_params = {"origin_url": origin_info["url"]} if timestamp: query_params["timestamp"] = timestamp - else: + elif visit_id: query_params["visit_id"] = visit_id + else: + query_params["snapshot"] = snapshot_id if path: query_params["path"] = path @@ -1052,8 +1124,10 @@ _check_origin_view_title(resp, origin_info["url"], "directory") -def _origin_branches_test_helper(client, origin_info, origin_snapshot): - query_params = {"origin_url": origin_info["url"]} +def _origin_branches_test_helper( + client, origin_info, origin_snapshot, snapshot_id=None +): + query_params = {"origin_url": origin_info["url"], "snapshot": snapshot_id} url = reverse("browse-origin-branches", query_params=query_params) @@ -1069,7 +1143,8 @@ assert_contains( resp, - 'Branches (%s)' % (origin_branches_url, len(origin_branches)), + 'Branches (%s)' + % (escape(origin_branches_url), len(origin_branches)), ) origin_releases_url = reverse("browse-origin-releases", query_params=query_params) @@ -1077,7 +1152,9 @@ nb_releases = len(origin_releases) if nb_releases > 0: assert_contains( - resp, 'Releases (%s)' % (origin_releases_url, nb_releases) + resp, + 'Releases (%s)' + % (escape(origin_releases_url), nb_releases), ) assert_contains(resp, '' % escape(browse_branch_url)) browse_revision_url = reverse( "browse-revision", url_args={"sha1_git": branch["revision"]}, - query_params={"origin_url": origin_info["url"]}, + query_params=query_params, ) assert_contains(resp, '' % escape(browse_revision_url)) _check_origin_view_title(resp, origin_info["url"], "branches") -def _origin_releases_test_helper(client, origin_info, origin_snapshot): - query_params = {"origin_url": origin_info["url"]} +def _origin_releases_test_helper( + client, origin_info, origin_snapshot, snapshot_id=None +): + query_params = {"origin_url": origin_info["url"], "snapshot": snapshot_id} url = reverse("browse-origin-releases", query_params=query_params) @@ -1115,7 +1194,8 @@ assert_contains( resp, - 'Branches (%s)' % (origin_branches_url, len(origin_branches)), + 'Branches (%s)' + % (escape(origin_branches_url), len(origin_branches)), ) origin_releases_url = reverse("browse-origin-releases", query_params=query_params) @@ -1123,7 +1203,9 @@ nb_releases = len(origin_releases) if nb_releases > 0: assert_contains( - resp, 'Releases (%s)' % (origin_releases_url, nb_releases) + resp, + 'Releases (%s)' + % (escape(origin_releases_url), nb_releases), ) assert_contains(resp, '' % escape(browse_release_url)) diff --git a/swh/web/tests/browse/views/test_release.py b/swh/web/tests/browse/views/test_release.py --- a/swh/web/tests/browse/views/test_release.py +++ b/swh/web/tests/browse/views/test_release.py @@ -6,6 +6,7 @@ import random import textwrap +from django.utils.html import escape from hypothesis import given from swh.web.common.identifiers import get_swh_persistent_id @@ -16,32 +17,29 @@ @given(release()) def test_release_browse(client, archive_data, release): - url = reverse("browse-release", url_args={"sha1_git": release}) - - release_data = archive_data.release_get(release) - - resp = client.get(url) - - _release_browse_checks(resp, release_data, archive_data) + _release_browse_checks(client, release, archive_data) @given(origin_with_releases()) -def test_release_browse_with_origin(client, archive_data, origin): +def test_release_browse_with_origin_snapshot(client, archive_data, origin): snapshot = archive_data.snapshot_get_latest(origin["url"]) release = random.choice( - [b for b in snapshot["branches"].values() if b["target_type"] == "release"] - ) - url = reverse( - "browse-release", - url_args={"sha1_git": release["target"]}, - query_params={"origin_url": origin["url"]}, + [ + b["target"] + for b in snapshot["branches"].values() + if b["target_type"] == "release" + ] ) - release_data = archive_data.release_get(release["target"]) - - resp = client.get(url) - - _release_browse_checks(resp, release_data, archive_data, origin) + _release_browse_checks(client, release, archive_data, origin_url=origin["url"]) + _release_browse_checks(client, release, archive_data, snapshot_id=snapshot["id"]) + _release_browse_checks( + client, + release, + archive_data, + origin_url=origin["url"], + snapshot_id=snapshot["id"], + ) @given(unknown_release()) @@ -68,10 +66,18 @@ assert resp["location"] == redirect_url -def _release_browse_checks(resp, release_data, archive_data, origin_info=None): - query_params = {} - if origin_info: - query_params["origin_url"] = origin_info["url"] +def _release_browse_checks( + client, release, archive_data, origin_url=None, snapshot_id=None +): + query_params = {"origin_url": origin_url, "snapshot": snapshot_id} + + url = reverse( + "browse-release", url_args={"sha1_git": release}, query_params=query_params + ) + + release_data = archive_data.release_get(release) + + resp = client.get(url) release_id = release_data["id"] release_name = release_data["name"] @@ -98,21 +104,31 @@ assert_contains(resp, release_id) assert_contains(resp, release_name) assert_contains(resp, target_type) - assert_contains(resp, '%s' % (target_url, target)) + assert_contains(resp, '%s' % (escape(target_url), target)) swh_rel_id = get_swh_persistent_id("release", release_id) swh_rel_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_rel_id}) assert_contains(resp, swh_rel_id) assert_contains(resp, swh_rel_id_url) - if origin_info: + if origin_url: browse_origin_url = reverse( - "browse-origin", query_params={"origin_url": origin_info["url"]} + "browse-origin", query_params={"origin_url": origin_url} ) title = ( f"Browse archived release for origin\n" f'\n' - f' {origin_info["url"]}\n' + f" {origin_url}\n" + f"" + ) + indent = " " * 6 + elif snapshot_id: + swh_snp_id = get_swh_persistent_id("snapshot", snapshot_id) + swh_snp_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_snp_id}) + title = ( + f"Browse archived release for snapshot\n" + f'\n' + f" {swh_snp_id}\n" f"" ) indent = " " * 6 @@ -130,17 +146,24 @@ ) if release_data["target_type"] == "revision": - if origin_info: + if origin_url: directory_url = reverse( "browse-origin-directory", query_params={ - "origin_url": origin_info["url"], + "origin_url": origin_url, "release": release_data["name"], + "snapshot": snapshot_id, }, ) + elif snapshot_id: + directory_url = reverse( + "browse-snapshot-directory", + url_args={"snapshot_id": snapshot_id}, + query_params={"release": release_data["name"],}, + ) else: rev = archive_data.revision_get(release_data["target"]) directory_url = reverse( "browse-directory", url_args={"sha1_git": rev["directory"]} ) - assert_contains(resp, directory_url) + assert_contains(resp, escape(directory_url)) diff --git a/swh/web/tests/browse/views/test_revision.py b/swh/web/tests/browse/views/test_revision.py --- a/swh/web/tests/browse/views/test_revision.py +++ b/swh/web/tests/browse/views/test_revision.py @@ -17,119 +17,20 @@ @given(revision()) def test_revision_browse(client, archive_data, revision): - url = reverse("browse-revision", url_args={"sha1_git": revision}) - - revision_data = archive_data.revision_get(revision) - - author_name = revision_data["author"]["name"] - committer_name = revision_data["committer"]["name"] - dir_id = revision_data["directory"] - - directory_url = reverse("browse-directory", url_args={"sha1_git": dir_id}) - - history_url = reverse("browse-revision-log", url_args={"sha1_git": revision}) - - resp = client.get(url) - - assert resp.status_code == 200 - assert_template_used(resp, "browse/revision.html") - assert_contains(resp, author_name) - assert_contains(resp, committer_name) - assert_contains(resp, directory_url) - assert_contains(resp, history_url) - - for parent in revision_data["parents"]: - parent_url = reverse("browse-revision", url_args={"sha1_git": parent}) - assert_contains(resp, '%s' % (parent_url, parent[:7])) - - author_date = revision_data["date"] - committer_date = revision_data["committer_date"] - - message_lines = revision_data["message"].split("\n") - - assert_contains(resp, format_utc_iso_date(author_date)) - assert_contains(resp, format_utc_iso_date(committer_date)) - assert_contains(resp, escape(message_lines[0])) - assert_contains(resp, escape("\n".join(message_lines[1:]))) - - swh_rev_id = get_swh_persistent_id(REVISION, revision) - swh_rev_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_rev_id}) - - assert_contains( - resp, - textwrap.indent( - ( - f"Browse archived revision\n" - f'\n' - f" {swh_rev_id}\n" - f"" - ), - " " * 4, - ), - ) - - swhid_context = {"anchor": swh_rev_id, "path": "/"} - - swh_dir_id = get_swh_persistent_id(DIRECTORY, dir_id, metadata=swhid_context) - swh_dir_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_dir_id}) - - assert_contains(resp, swh_dir_id) - assert_contains(resp, swh_dir_id_url) + _revision_browse_checks(client, archive_data, revision) @given(origin()) -def test_revision_origin_browse(client, archive_data, origin): +def test_revision_origin_snapshot_browse(client, archive_data, origin): snapshot = archive_data.snapshot_get_latest(origin["url"]) revision = archive_data.snapshot_get_head(snapshot) - revision_data = archive_data.revision_get(revision) - dir_id = revision_data["directory"] - - origin_revision_log_url = reverse( - "browse-origin-log", - query_params={"origin_url": origin["url"], "revision": revision}, - ) - url = reverse( - "browse-revision", - url_args={"sha1_git": revision}, - query_params={"origin_url": origin["url"]}, + _revision_browse_checks(client, archive_data, revision, origin_url=origin["url"]) + _revision_browse_checks(client, archive_data, revision, snapshot=snapshot) + _revision_browse_checks( + client, archive_data, revision, origin_url=origin["url"], snapshot=snapshot, ) - resp = client.get(url) - - assert_contains(resp, origin_revision_log_url) - - for parent in revision_data["parents"]: - parent_url = reverse( - "browse-revision", - url_args={"sha1_git": parent}, - query_params={"origin_url": origin["url"]}, - ) - assert_contains(resp, '%s' % (parent_url, parent[:7])) - - assert_contains(resp, "vault-cook-directory") - assert_contains(resp, "vault-cook-revision") - - swhid_context = { - "origin": origin["url"], - "visit": get_swh_persistent_id(SNAPSHOT, snapshot["id"]), - } - - swh_rev_id = get_swh_persistent_id(REVISION, revision, metadata=swhid_context) - swh_rev_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_rev_id}) - assert_contains(resp, swh_rev_id) - assert_contains(resp, swh_rev_id_url) - - swhid_context["anchor"] = get_swh_persistent_id(REVISION, revision) - swhid_context["path"] = "/" - - swh_dir_id = get_swh_persistent_id(DIRECTORY, dir_id, metadata=swhid_context) - swh_dir_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_dir_id}) - assert_contains(resp, swh_dir_id) - assert_contains(resp, swh_dir_id_url) - - assert_contains(resp, "swh-take-new-snapshot") - @given(revision()) def test_revision_log_browse(client, archive_data, revision): @@ -297,3 +198,134 @@ redirect_url = reverse("browse-revision", url_args={"sha1_git": revision}) assert resp["location"] == redirect_url + + +def _revision_browse_checks( + client, archive_data, revision, origin_url=None, snapshot=None +): + + query_params = {} + if origin_url: + query_params["origin_url"] = origin_url + if snapshot: + query_params["snapshot"] = snapshot["id"] + + url = reverse( + "browse-revision", url_args={"sha1_git": revision}, query_params=query_params + ) + + revision_data = archive_data.revision_get(revision) + + author_name = revision_data["author"]["name"] + committer_name = revision_data["committer"]["name"] + dir_id = revision_data["directory"] + + if origin_url: + snapshot = archive_data.snapshot_get_latest(origin_url) + history_url = reverse( + "browse-origin-log", query_params={"revision": revision, **query_params}, + ) + elif snapshot: + history_url = reverse( + "browse-snapshot-log", + url_args={"snapshot_id": snapshot["id"]}, + query_params={"revision": revision}, + ) + else: + history_url = reverse("browse-revision-log", url_args={"sha1_git": revision}) + + resp = client.get(url) + + assert resp.status_code == 200 + assert_template_used(resp, "browse/revision.html") + assert_contains(resp, author_name) + assert_contains(resp, committer_name) + assert_contains(resp, history_url) + + for parent in revision_data["parents"]: + parent_url = reverse( + "browse-revision", url_args={"sha1_git": parent}, query_params=query_params + ) + assert_contains(resp, '%s' % (escape(parent_url), parent[:7])) + + author_date = revision_data["date"] + committer_date = revision_data["committer_date"] + + message_lines = revision_data["message"].split("\n") + + assert_contains(resp, format_utc_iso_date(author_date)) + assert_contains(resp, format_utc_iso_date(committer_date)) + assert_contains(resp, escape(message_lines[0])) + assert_contains(resp, escape("\n".join(message_lines[1:]))) + + assert_contains(resp, "vault-cook-directory") + assert_contains(resp, "vault-cook-revision") + + swh_rev_id = get_swh_persistent_id("revision", revision) + swh_rev_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_rev_id}) + assert_contains(resp, swh_rev_id) + assert_contains(resp, swh_rev_id_url) + + swh_dir_id = get_swh_persistent_id("directory", dir_id) + swh_dir_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_dir_id}) + assert_contains(resp, swh_dir_id) + assert_contains(resp, swh_dir_id_url) + + if origin_url: + assert_contains(resp, "swh-take-new-snapshot") + + swh_rev_id = get_swh_persistent_id(REVISION, revision) + swh_rev_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_rev_id}) + + if origin_url: + browse_origin_url = reverse( + "browse-origin", query_params={"origin_url": origin_url} + ) + title = ( + f"Browse archived revision for origin\n" + f'\n' + f" {origin_url}\n" + f"" + ) + indent = " " * 6 + elif snapshot: + swh_snp_id = get_swh_persistent_id("snapshot", snapshot["id"]) + swh_snp_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_snp_id}) + title = ( + f"Browse archived revision for snapshot\n" + f'\n' + f" {swh_snp_id}\n" + f"" + ) + indent = " " * 6 + else: + title = ( + f"Browse archived revision\n" + f'\n' + f" {swh_rev_id}\n" + f"" + ) + indent = " " * 4 + + assert_contains( + resp, textwrap.indent(title, indent), + ) + + swhid_context = {} + if origin_url: + swhid_context["origin"] = origin_url + if snapshot: + swhid_context["visit"] = get_swh_persistent_id(SNAPSHOT, snapshot["id"]) + + swh_rev_id = get_swh_persistent_id(REVISION, revision, metadata=swhid_context) + swh_rev_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_rev_id}) + assert_contains(resp, swh_rev_id) + assert_contains(resp, swh_rev_id_url) + + swhid_context["anchor"] = get_swh_persistent_id(REVISION, revision) + swhid_context["path"] = "/" + + swh_dir_id = get_swh_persistent_id(DIRECTORY, dir_id, metadata=swhid_context) + swh_dir_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_dir_id}) + assert_contains(resp, swh_dir_id) + assert_contains(resp, swh_dir_id_url)