Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F9345780
D3089.diff
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
11 KB
Subscribers
None
D3089.diff
View Options
diff --git a/swh/web/browse/snapshot_context.py b/swh/web/browse/snapshot_context.py
--- a/swh/web/browse/snapshot_context.py
+++ b/swh/web/browse/snapshot_context.py
@@ -16,7 +16,11 @@
from django.utils.html import escape
import sentry_sdk
-from swh.model.identifiers import persistent_identifier, snapshot_identifier
+from swh.model.identifiers import (
+ persistent_identifier,
+ snapshot_identifier,
+ CONTENT,
+)
from swh.web.browse.utils import (
get_directory_entries,
@@ -42,6 +46,7 @@
SnapshotBranchInfo,
SnapshotReleaseInfo,
SnapshotContext,
+ ContentMetadata,
)
from swh.web.common.utils import (
reverse,
@@ -857,7 +862,7 @@
root_directory = snapshot_context["root_directory"]
sha1_git = None
query_string = None
- content_data = None
+ content_data = {}
directory_id = None
split_path = path.split("/")
filename = split_path[-1]
@@ -882,24 +887,19 @@
visit_info = snapshot_context["visit_info"]
snapshot_id = snapshot_context["snapshot_id"]
- content = None
- language = None
- mimetype = None
- if content_data and content_data["raw_data"] is not None:
+ if content_data.get("raw_data") is not None:
content_display_data = prepare_content_for_display(
content_data["raw_data"], content_data["mimetype"], path
)
- content = content_display_data["content_data"]
- language = content_display_data["language"]
- mimetype = content_display_data["mimetype"]
+ content_data.update(content_display_data)
# Override language with user-selected language
if selected_language is not None:
- language = selected_language
+ content_data["language"] = selected_language
available_languages = None
- if mimetype and "text/" in mimetype:
+ if content_data.get("mimetype") is not None and "text/" in content_data["mimetype"]:
available_languages = highlightjs.get_supported_languages()
breadcrumbs = _build_breadcrumbs(snapshot_context, filepath)
@@ -920,59 +920,53 @@
browse_dir_link = gen_directory_link(directory_id)
- content_metadata = {
- "context-independent content": browse_content_link,
- "path": None,
- "filename": None,
- "directory": directory_id,
- "context-independent directory": browse_dir_link,
- "revision": revision_id,
- "context-independent revision": browse_rev_link,
- "snapshot": snapshot_id,
- }
-
- cnt_sha1_git = None
- content_size = None
- error_code = 200
- error_description = ""
- error_message = ""
- if content_data:
- for checksum in content_data["checksums"].keys():
- content_metadata[checksum] = content_data["checksums"][checksum]
- content_metadata["mimetype"] = content_data["mimetype"]
- content_metadata["encoding"] = content_data["encoding"]
- content_metadata["size"] = filesizeformat(content_data["length"])
- content_metadata["language"] = content_data["language"]
- content_metadata["licenses"] = content_data["licenses"]
- content_metadata["path"] = "/" + filepath
- content_metadata["filename"] = filename
-
- cnt_sha1_git = content_data["checksums"]["sha1_git"]
- content_size = content_data["length"]
- error_code = content_data["error_code"]
- error_message = content_data["error_message"]
- error_description = content_data["error_description"]
-
- if origin_info:
- content_metadata["origin url"] = origin_info["url"]
- content_metadata["origin visit date"] = format_utc_iso_date(visit_info["date"])
- content_metadata["origin visit type"] = visit_info["type"]
- browse_snapshot_link = gen_snapshot_link(snapshot_id)
- content_metadata["context-independent snapshot"] = browse_snapshot_link
+ content_checksums = content_data.get("checksums", {})
swh_objects = [
- {"type": "content", "id": cnt_sha1_git},
+ {"type": "content", "id": content_checksums.get("sha1_git")},
{"type": "directory", "id": directory_id},
{"type": "revision", "id": revision_id},
{"type": "snapshot", "id": snapshot_id},
]
+ visit_date = None
+ visit_type = None
+ if visit_info:
+ visit_date = format_utc_iso_date(visit_info["date"])
+ visit_type = visit_info["type"]
+
release_id = snapshot_context["release_id"]
+ browse_rel_link = None
if release_id:
swh_objects.append({"type": "release", "id": release_id})
browse_rel_link = gen_release_link(release_id)
- content_metadata["release"] = release_id
- content_metadata["context-independent release"] = browse_rel_link
+
+ content_metadata = ContentMetadata(
+ object_type=CONTENT,
+ sha1=content_checksums.get("sha1"),
+ sha1_git=content_checksums.get("sha1_git"),
+ sha256=content_checksums.get("sha256"),
+ blake2s256=content_checksums.get("blake2s256"),
+ content_url=browse_content_link,
+ mimetype=content_data.get("mimetype"),
+ encoding=content_data.get("encoding"),
+ size=filesizeformat(content_data.get("length", 0)),
+ language=content_data.get("language"),
+ licenses=content_data.get("licenses"),
+ path=f"/{filepath}",
+ filename=filename,
+ directory=directory_id,
+ directory_url=browse_dir_link,
+ revision=revision_id,
+ revision_url=browse_rev_link,
+ release=release_id,
+ release_url=browse_rel_link,
+ snapshot=snapshot_id,
+ snapshot_url=gen_snapshot_link(snapshot_id),
+ origin_url=origin_url,
+ visit_date=visit_date,
+ visit_type=visit_type,
+ )
swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context)
@@ -1001,11 +995,11 @@
"heading": heading,
"swh_object_name": "Content",
"swh_object_metadata": content_metadata,
- "content": content,
- "content_size": content_size,
+ "content": content_data.get("content_data"),
+ "content_size": content_data.get("length"),
"max_content_size": content_display_max_size,
- "mimetype": mimetype,
- "language": language,
+ "mimetype": content_data.get("mimetype"),
+ "language": content_data.get("language"),
"available_languages": available_languages,
"breadcrumbs": breadcrumbs if root_directory else [],
"top_right_link": top_right_link,
@@ -1013,11 +1007,11 @@
"vault_cooking": None,
"show_actions_menu": True,
"swh_ids": swh_ids,
- "error_code": error_code,
- "error_message": error_message,
- "error_description": error_description,
+ "error_code": content_data.get("error_code"),
+ "error_message": content_data.get("error_message"),
+ "error_description": content_data.get("error_description"),
},
- status=error_code,
+ status=content_data.get("error_code", 200),
)
diff --git a/swh/web/browse/views/content.py b/swh/web/browse/views/content.py
--- a/swh/web/browse/views/content.py
+++ b/swh/web/browse/views/content.py
@@ -14,6 +14,7 @@
import sentry_sdk
from swh.model.hashutil import hash_to_hex
+from swh.model.identifiers import CONTENT
from swh.web.browse.browseurls import browse_route
from swh.web.browse.snapshot_context import get_snapshot_context
@@ -27,6 +28,7 @@
)
from swh.web.common import query, service, highlightjs
from swh.web.common.exc import NotFoundExc, handle_view_exception
+from swh.web.common.typing import ContentMetadata
from swh.web.common.utils import reverse, gen_path_info, swh_object_icons
@@ -291,34 +293,46 @@
query_params = {"filename": filename}
+ content_checksums = content_data["checksums"]
+
+ content_url = reverse(
+ "browse-content",
+ url_args={"query_string": f'sha1_git:{content_checksums["sha1_git"]}'},
+ )
+
content_raw_url = reverse(
"browse-content-raw",
url_args={"query_string": query_string},
query_params=query_params,
)
- content_metadata = {
- "sha1": content_data["checksums"]["sha1"],
- "sha1_git": content_data["checksums"]["sha1_git"],
- "sha256": content_data["checksums"]["sha256"],
- "blake2s256": content_data["checksums"]["blake2s256"],
- "mimetype": content_data["mimetype"],
- "encoding": content_data["encoding"],
- "size": filesizeformat(content_data["length"]),
- "language": content_data["language"],
- "licenses": content_data["licenses"],
- "filename": filename,
- "directory": directory_id,
- "context-independent directory": directory_url,
- }
-
- if filename:
- content_metadata["filename"] = filename
-
- sha1_git = content_data["checksums"]["sha1_git"]
- swh_ids = get_swh_persistent_ids([{"type": "content", "id": sha1_git}])
-
- heading = "Content - %s" % sha1_git
+ content_metadata = ContentMetadata(
+ object_type=CONTENT,
+ sha1=content_checksums["sha1"],
+ sha1_git=content_checksums["sha1_git"],
+ sha256=content_checksums["sha256"],
+ blake2s256=content_checksums["blake2s256"],
+ content_url=content_url,
+ mimetype=content_data["mimetype"],
+ encoding=content_data["encoding"],
+ size=filesizeformat(content_data["length"]),
+ language=content_data["language"],
+ licenses=content_data["licenses"],
+ path=path,
+ filename=filename,
+ directory=directory_id,
+ directory_url=directory_url,
+ revision=None,
+ release=None,
+ snapshot=None,
+ origin_url=origin_url,
+ )
+
+ swh_ids = get_swh_persistent_ids(
+ [{"type": "content", "id": content_checksums["sha1_git"]}]
+ )
+
+ heading = "Content - %s" % content_checksums["sha1_git"]
if breadcrumbs:
content_path = "/".join([bc["name"] for bc in breadcrumbs])
heading += " - %s" % content_path
diff --git a/swh/web/common/typing.py b/swh/web/common/typing.py
--- a/swh/web/common/typing.py
+++ b/swh/web/common/typing.py
@@ -110,3 +110,33 @@
"""common URL arguments when browsing snapshot content"""
visit_info: Optional[OriginVisitInfo]
"""optional origin visit info associated to the snapshot"""
+
+
+class SWHObjectMetadata(TypedDict, total=False):
+ object_type: str
+ origin_url: Optional[str]
+ visit_date: Optional[str]
+ visit_type: Optional[str]
+ directory_url: Optional[str]
+ revision_url: Optional[str]
+ release_url: Optional[str]
+ snapshot_url: Optional[str]
+
+
+class ContentMetadata(SWHObjectMetadata):
+ sha1: str
+ sha1_git: str
+ sha256: str
+ blake2s256: str
+ content_url: str
+ mimetype: str
+ encoding: str
+ size: str
+ language: str
+ licenses: str
+ path: Optional[str]
+ filename: Optional[str]
+ directory: Optional[str]
+ revision: Optional[str]
+ release: Optional[str]
+ snapshot: Optional[str]
File Metadata
Details
Attached
Mime Type
text/plain
Expires
Thu, Jul 3, 3:31 PM (1 w, 1 d ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3215633
Attached To
D3089: browse/content: Refactor metadata handling and add typing
Event Timeline
Log In to Comment