\ No newline at end of file
+
diff --git a/assets/src/bundles/vault/vault-ui.js b/assets/src/bundles/vault/vault-ui.js
--- a/assets/src/bundles/vault/vault-ui.js
+++ b/assets/src/bundles/vault/vault-ui.js
@@ -72,10 +72,10 @@
clearTimeout(checkVaultId);
// build cook request url
let cookingUrl;
- if (recookTask.object_type === 'directory') {
- cookingUrl = Urls.api_1_vault_cook_directory(recookTask.object_id);
+ if (recookTask.bundle_type === 'flat') {
+ cookingUrl = Urls.api_1_vault_cook_flat(recookTask.swhid);
} else {
- cookingUrl = Urls.api_1_vault_cook_revision_gitfast(recookTask.object_id);
+ cookingUrl = Urls.api_1_vault_cook_gitfast(recookTask.swhid);
}
if (recookTask.email) {
cookingUrl += '?email=' + recookTask.email;
@@ -89,21 +89,21 @@
recookTask.status = 'new';
const vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks'));
for (let i = 0; i < vaultCookingTasks.length; ++i) {
- if (vaultCookingTasks[i].object_id === recookTask.object_id) {
+ if (vaultCookingTasks[i].swhid === recookTask.swhid) {
vaultCookingTasks[i] = recookTask;
break;
}
}
// save updated tasks to local storage
localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks));
- // restart cooking tasks status polling
- checkVaultCookingTasks();
// hide recook archive modal
$('#vault-recook-object-modal').modal('hide');
+ // restart cooking tasks status polling
+ await checkVaultCookingTasks();
} catch (_) {
// something went wrong
- checkVaultCookingTasks();
$('#vault-recook-object-modal').modal('hide');
+ await checkVaultCookingTasks();
}
}
}
@@ -121,48 +121,50 @@
for (let i = 0; i < vaultCookingTasks.length; ++i) {
const cookingTask = vaultCookingTasks[i];
- currentObjectIds.push(cookingTask.object_id);
- tasks[cookingTask.object_id] = cookingTask;
+ console.log(cookingTask);
+ currentObjectIds.push(cookingTask.swhid);
+ tasks[cookingTask.swhid] = cookingTask;
let cookingUrl;
- if (cookingTask.object_type === 'directory') {
- cookingUrl = Urls.api_1_vault_cook_directory(cookingTask.object_id);
+ if (cookingTask.bundle_type === 'flat') {
+ cookingUrl = Urls.api_1_vault_cook_flat(cookingTask.swhid);
} else {
- cookingUrl = Urls.api_1_vault_cook_revision_gitfast(cookingTask.object_id);
+ cookingUrl = Urls.api_1_vault_cook_gitfast(cookingTask.swhid);
}
if (cookingTask.status !== 'done' && cookingTask.status !== 'failed') {
cookingTaskRequests.push(fetch(cookingUrl));
}
}
$('.swh-vault-table tbody tr').each((i, row) => {
- const objectId = $(row).find('.vault-object-info').data('object-id');
- if ($.inArray(objectId, currentObjectIds) === -1) {
+ const swhid = $(row).find('.vault-object-info').data('swhid');
+ if ($.inArray(swhid, currentObjectIds) === -1) {
$(row).remove();
}
});
- try {
+ // try {
+ if (vaultCookingTasks.length > -1) {
const responses = await Promise.all(cookingTaskRequests);
handleFetchErrors(responses);
const cookingTasks = await Promise.all(responses.map(r => r.json()));
const table = $('#vault-cooking-tasks tbody');
for (let i = 0; i < cookingTasks.length; ++i) {
- const cookingTask = tasks[cookingTasks[i].obj_id];
+ const cookingTask = tasks[cookingTasks[i].swhid];
cookingTask.status = cookingTasks[i].status;
cookingTask.fetch_url = cookingTasks[i].fetch_url;
cookingTask.progress_message = cookingTasks[i].progress_message;
}
for (let i = 0; i < vaultCookingTasks.length; ++i) {
const cookingTask = vaultCookingTasks[i];
- const rowTask = $(`#vault-task-${cookingTask.object_id}`);
+ const rowTask = $(`#vault-task-${CSS.escape(cookingTask.swhid)}`);
if (!rowTask.length) {
let browseUrl = cookingTask.browse_url;
if (!browseUrl) {
- if (cookingTask.object_type === 'directory') {
- browseUrl = Urls.browse_directory(cookingTask.object_id);
+ if (cookingTask.bundle_type === 'flat') {
+ browseUrl = Urls.browse_flat(cookingTask.swhid);
} else {
- browseUrl = Urls.browse_revision(cookingTask.object_id);
+ browseUrl = Urls.browse_gitfast(cookingTask.swhid);
}
}
@@ -193,8 +195,8 @@
localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks));
checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval);
- } catch (error) {
- console.log('Error when fetching vault cooking tasks:', error);
+ // } catch (error) {
+ // console.log('Error when fetching vault cooking tasks:', error);
}
}
@@ -204,7 +206,7 @@
return;
}
vaultCookingTasks = $.grep(vaultCookingTasks, task => {
- return $.inArray(task.object_id, tasksToRemove) === -1;
+ return $.inArray(task.swhid, tasksToRemove) === -1;
});
localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks));
}
@@ -221,8 +223,8 @@
$('.swh-vault-table tbody tr').each((i, row) => {
const taskSelected = $(row).find('.vault-task-toggle-selection').prop('checked');
if (taskSelected) {
- const objectId = $(row).find('.vault-object-info').data('object-id');
- tasksToRemove.push(objectId);
+ const swhid = $(row).find('.vault-object-info').data('swhid');
+ tasksToRemove.push(swhid);
$(row).remove();
}
});
diff --git a/swh/web/api/views/vault.py b/swh/web/api/views/vault.py
--- a/swh/web/api/views/vault.py
+++ b/swh/web/api/views/vault.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2015-2020 The Software Heritage developers
+# Copyright (C) 2015-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
@@ -8,36 +8,38 @@
from django.http import HttpResponse
from django.shortcuts import redirect
-from swh.model import hashutil
+from swh.model.identifiers import CoreSWHID, ObjectType
from swh.web.api.apidoc import api_doc, format_docstring
from swh.web.api.apiurls import api_route
from swh.web.api.views.utils import api_lookup
from swh.web.common import archive, query
+from swh.web.common.exc import BadInputExc
from swh.web.common.utils import reverse
+######################################################
+# Common
+
+SWHID_RE = "swh:1:[a-z]{3}:[0-9a-z]{40}"
+
# XXX: a bit spaghetti. Would be better with class-based views.
-def _dispatch_cook_progress(request, obj_type, obj_id):
- hex_id = hashutil.hash_to_hex(obj_id)
- object_name = obj_type.split("_")[0]
+def _dispatch_cook_progress(request, bundle_type: str, swhid: CoreSWHID):
if request.method == "GET":
return api_lookup(
archive.vault_progress,
- obj_type,
- obj_id,
- notfound_msg=(
- "Cooking of {} '{}' was never requested.".format(object_name, hex_id)
- ),
+ bundle_type,
+ swhid,
+ notfound_msg=f"Cooking of {swhid} was never requested.",
request=request,
)
elif request.method == "POST":
email = request.POST.get("email", request.GET.get("email", None))
return api_lookup(
archive.vault_cook,
- obj_type,
- obj_id,
+ bundle_type,
+ swhid,
email,
- notfound_msg=("{} '{}' not found.".format(object_name.title(), hex_id)),
+ notfound_msg=f"{swhid} not found.",
request=request,
)
@@ -45,31 +47,32 @@
def _vault_response(vault_response: Dict[str, Any]) -> Dict[str, Any]:
return {
"fetch_url": vault_response["fetch_url"],
- "obj_type": vault_response["type"],
"progress_message": vault_response["progress_msg"],
"id": vault_response["task_id"],
"status": vault_response["task_status"],
- "obj_id": vault_response["object_id"],
+ "swhid": str(vault_response["swhid"]),
}
+######################################################
+# Flat bundles
+
+
@api_route(
- r"/vault/directory/(?P[0-9a-f]+)/",
- "api-1-vault-cook-directory",
+ f"/vault/flat/(?P{SWHID_RE})/",
+ "api-1-vault-cook-flat",
methods=["GET", "POST"],
- checksum_args=["dir_id"],
throttle_scope="swh_vault_cooking",
never_cache=True,
)
-@api_doc("/vault/directory/")
+@api_doc("/vault/flat/")
@format_docstring()
-def api_vault_cook_directory(request, dir_id):
+def api_vault_cook_flat(request, swhid):
"""
- .. http:get:: /api/1/vault/directory/(dir_id)/
- .. http:post:: /api/1/vault/directory/(dir_id)/
+ .. http:get:: /api/1/vault/flat/(swhid)/
+ .. http:post:: /api/1/vault/flat/(swhid)/
- Request the cooking of an archive for a directory or check
- its cooking status.
+ Request the cooking of a simple archive, typically for a directory.
That endpoint enables to create a vault cooking task for a directory
through a POST request or check the status of a previously created one
@@ -77,13 +80,13 @@
Once the cooking task has been executed, the resulting archive can
be downloaded using the dedicated endpoint
- :http:get:`/api/1/vault/directory/(dir_id)/raw/`.
+ :http:get:`/api/1/vault/flat/(swhid)/raw/`.
Then to extract the cooked directory in the current one, use::
- $ tar xvf path/to/directory.tar.gz
+ $ tar xvf path/to/swh:1:*.tar.gz
- :param string dir_id: the directory's sha1 identifier
+ :param string swhid: the object's SWHID
:query string email: e-mail to notify when the archive is ready
@@ -91,15 +94,13 @@
:>json string fetch_url: the url from which to download the archive
once it has been cooked
- (see :http:get:`/api/1/vault/directory/(dir_id)/raw/`)
- :>json string obj_type: the type of object to cook
- (directory or revision)
+ (see :http:get:`/api/1/vault/flat/(swhid)/raw/`)
:>json string progress_message: message describing the cooking task
progress
:>json number id: the cooking task id
:>json string status: the cooking task status
(either **new**, **pending**, **done** or **failed**)
- :>json string obj_id: the identifier of the object to cook
+ :>json string swhid: the identifier of the object to cook
:statuscode 200: no error
:statuscode 400: an invalid directory identifier has been provided
@@ -107,72 +108,125 @@
request yet (in case of GET) or can not be found in the archive
(in case of POST)
"""
+ swhid = CoreSWHID.from_string(swhid)
+ if swhid.object_type == ObjectType.DIRECTORY:
+ res = _dispatch_cook_progress(request, "flat", swhid)
+ res["fetch_url"] = reverse(
+ "api-1-vault-fetch-flat", url_args={"swhid": str(swhid)}, request=request,
+ )
+ return _vault_response(res)
+ elif swhid.object_type == ObjectType.CONTENT:
+ raise BadInputExc(
+ "Content objects do not need to be cooked, "
+ "use `/api/1/content/raw/` instead."
+ )
+ elif swhid.object_type == ObjectType.REVISION:
+ # TODO: support revisions too? (the vault allows it)
+ raise BadInputExc(
+ "Only directories can be cooked as 'flat' bundles. "
+ "Use `/api/1/vault/gitfast/` to cook revisions, as gitfast bundles."
+ )
+ else:
+ raise BadInputExc("Only directories can be cooked as 'flat' bundles.")
+
+
+@api_route(
+ r"/vault/directory/(?P[0-9a-f]+)/",
+ "api-1-vault-cook-directory",
+ methods=["GET", "POST"],
+ checksum_args=["dir_id"],
+ throttle_scope="swh_vault_cooking",
+ never_cache=True,
+)
+@api_doc("/vault/directory/", tags=["hidden"])
+@format_docstring()
+def api_vault_cook_directory(request, dir_id):
+ """
+ Replaced by :http:get:`/api/1/vault/flat/(swhid)/`
+ """
_, obj_id = query.parse_hash_with_algorithms_or_throws(
dir_id, ["sha1"], "Only sha1_git is supported."
)
- res = _dispatch_cook_progress(request, "directory", obj_id)
+ swhid = f"swh:1:dir:{obj_id.hex()}"
+ res = _dispatch_cook_progress(request, "flat", CoreSWHID.from_string(swhid))
res["fetch_url"] = reverse(
- "api-1-vault-fetch-directory", url_args={"dir_id": dir_id}, request=request,
+ "api-1-vault-fetch-flat", url_args={"swhid": swhid}, request=request,
)
return _vault_response(res)
@api_route(
- r"/vault/directory/(?P[0-9a-f]+)/raw/",
- "api-1-vault-fetch-directory",
- checksum_args=["dir_id"],
+ f"/vault/flat/(?P{SWHID_RE})/raw/", "api-1-vault-fetch-flat",
)
@api_doc("/vault/directory/raw/")
-def api_vault_fetch_directory(request, dir_id):
+def api_vault_fetch_flat(request, swhid):
"""
- .. http:get:: /api/1/vault/directory/(dir_id)/raw/
+ .. http:get:: /api/1/vault/flat/(swhid)/raw/
- Fetch the cooked archive for a directory.
+ Fetch the cooked archive for a flat bundle.
- See :http:get:`/api/1/vault/directory/(dir_id)/` to get more
- details on directory cooking.
+ See :http:get:`/api/1/vault/flat/(swhid)/` to get more
+ details on 'flat' bundle cooking.
- :param string dir_id: the directory's sha1 identifier
+ :param string swhid: the SWHID of the object to cook
:resheader Content-Type: application/octet-stream
:statuscode 200: no error
- :statuscode 400: an invalid directory identifier has been provided
:statuscode 404: requested directory did not receive any cooking
request yet (in case of GET) or can not be found in the archive
(in case of POST)
"""
- _, obj_id = query.parse_hash_with_algorithms_or_throws(
- dir_id, ["sha1"], "Only sha1_git is supported."
- )
res = api_lookup(
archive.vault_fetch,
- "directory",
- obj_id,
- notfound_msg="Cooked archive for directory '{}' not found.".format(dir_id),
+ "flat",
+ CoreSWHID.from_string(swhid),
+ notfound_msg=f"Cooked archive for {swhid} not found.",
request=request,
)
- fname = "{}.tar.gz".format(dir_id)
+ fname = "{}.tar.gz".format(swhid)
response = HttpResponse(res, content_type="application/gzip")
response["Content-disposition"] = "attachment; filename={}".format(fname)
return response
@api_route(
- r"/vault/revision/(?P[0-9a-f]+)/gitfast/",
- "api-1-vault-cook-revision_gitfast",
+ r"/vault/directory/(?P[0-9a-f]+)/raw/",
+ "api-1-vault-fetch-directory",
+ checksum_args=["dir_id"],
+)
+@api_doc("/vault/directory/raw/", tags=["hidden"])
+def api_vault_fetch_directory(request, dir_id):
+ """
+ Replaced by :http:get:`/api/1/vault/flat/raw/`
+ """
+ _, obj_id = query.parse_hash_with_algorithms_or_throws(
+ dir_id, ["sha1"], "Only sha1_git is supported."
+ )
+ rev_flat_raw_url = reverse(
+ "api-1-vault-fetch-flat", url_args={"swhid": f"swh:1:dir:{dir_id}"}
+ )
+ return redirect(rev_flat_raw_url)
+
+
+######################################################
+# gitfast bundles
+
+
+@api_route(
+ f"/vault/gitfast/(?P{SWHID_RE})/",
+ "api-1-vault-cook-gitfast",
methods=["GET", "POST"],
- checksum_args=["rev_id"],
throttle_scope="swh_vault_cooking",
never_cache=True,
)
-@api_doc("/vault/revision/gitfast/")
+@api_doc("/vault/gitfast/", tags=["hidden"])
@format_docstring()
-def api_vault_cook_revision_gitfast(request, rev_id):
+def api_vault_cook_gitfast(request, swhid):
"""
- .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/
- .. http:post:: /api/1/vault/revision/(rev_id)/gitfast/
+ .. http:get:: /api/1/vault/gitfast/(swhid)/
+ .. http:post:: /api/1/vault/gitfast/(swhid)/
Request the cooking of a gitfast archive for a revision or check
its cooking status.
@@ -188,7 +242,7 @@
Then to import the revision in the current directory, use::
$ git init
- $ zcat path/to/revision.gitfast.gz | git fast-import
+ $ zcat path/to/swh:1:rev:*.gitfast.gz | git fast-import
$ git checkout HEAD
:param string rev_id: the revision's sha1 identifier
@@ -199,88 +253,113 @@
:>json string fetch_url: the url from which to download the archive
once it has been cooked
- (see :http:get:`/api/1/vault/revision/(rev_id)/gitfast/raw/`)
- :>json string obj_type: the type of object to cook
- (directory or revision)
+ (see :http:get:`/api/1/vault/gitfast/(rev_id)/raw/`)
:>json string progress_message: message describing the cooking task
progress
:>json number id: the cooking task id
:>json string status: the cooking task status (new/pending/done/failed)
- :>json string obj_id: the identifier of the object to cook
+ :>json string swhid: the identifier of the object to cook
:statuscode 200: no error
- :statuscode 400: an invalid revision identifier has been provided
:statuscode 404: requested directory did not receive any cooking
request yet (in case of GET) or can not be found in the archive
(in case of POST)
"""
+ swhid = CoreSWHID.from_string(swhid)
+ if swhid.object_type == ObjectType.REVISION:
+ res = _dispatch_cook_progress(request, "gitfast", swhid)
+ res["fetch_url"] = reverse(
+ "api-1-vault-fetch-gitfast",
+ url_args={"swhid": str(swhid)},
+ request=request,
+ )
+ return _vault_response(res)
+ elif swhid.object_type == ObjectType.CONTENT:
+ raise BadInputExc(
+ "Content objects do not need to be cooked, "
+ "use `/api/1/content/raw/` instead."
+ )
+ elif swhid.object_type == ObjectType.DIRECTORY:
+ raise BadInputExc(
+ "Only revisions can be cooked as 'gitfast' bundles. "
+ "Use `/api/1/vault/flat/` to cook directories, as flat bundles."
+ )
+ else:
+ raise BadInputExc("Only revisions can be cooked as 'gitfast' bundles.")
+
+
+@api_route(
+ r"/vault/revision/(?P[0-9a-f]+)/gitfast/",
+ "api-1-vault-cook-revision_gitfast",
+ methods=["GET", "POST"],
+ checksum_args=["rev_id"],
+ throttle_scope="swh_vault_cooking",
+ never_cache=True,
+)
+@api_doc("/vault/revision/gitfast/", tags=["hidden"])
+@format_docstring()
+def api_vault_cook_revision_gitfast(request, rev_id):
+ """
+ Replaced by :http:get:`/api/1/vault/gitfast/`
+ """
_, obj_id = query.parse_hash_with_algorithms_or_throws(
rev_id, ["sha1"], "Only sha1_git is supported."
)
- res = _dispatch_cook_progress(request, "revision_gitfast", obj_id)
+ swhid = f"swh:1:rev:{obj_id.hex()}"
+ res = _dispatch_cook_progress(request, "gitfast", CoreSWHID.from_string(swhid))
res["fetch_url"] = reverse(
- "api-1-vault-fetch-revision_gitfast",
- url_args={"rev_id": rev_id},
- request=request,
+ "api-1-vault-fetch-gitfast", url_args={"swhid": swhid}, request=request,
)
return _vault_response(res)
@api_route(
- r"/vault/revision/(?P[0-9a-f]+)/gitfast/raw/",
- "api-1-vault-fetch-revision_gitfast",
- checksum_args=["rev_id"],
+ f"/vault/gitfast/(?P{SWHID_RE})/raw/", "api-1-vault-fetch-gitfast",
)
-@api_doc("/vault/revision/gitfast/raw/")
-def api_vault_fetch_revision_gitfast(request, rev_id):
+@api_doc("/vault/gitfast/raw/")
+def api_vault_fetch_revision_gitfast(request, swhid):
"""
- .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/raw/
+ .. http:get:: /api/1/vault/gitfast/(swhid)/raw/
Fetch the cooked gitfast archive for a revision.
- See :http:get:`/api/1/vault/revision/(rev_id)/gitfast/` to get more
- details on directory cooking.
+ See :http:get:`/api/1/vault/gitfast/(swhid)/` to get more
+ details on gitfast cooking.
:param string rev_id: the revision's sha1 identifier
:resheader Content-Type: application/octet-stream
:statuscode 200: no error
- :statuscode 400: an invalid revision identifier has been provided
:statuscode 404: requested directory did not receive any cooking
request yet (in case of GET) or can not be found in the archive
(in case of POST)
"""
- _, obj_id = query.parse_hash_with_algorithms_or_throws(
- rev_id, ["sha1"], "Only sha1_git is supported."
- )
res = api_lookup(
archive.vault_fetch,
- "revision_gitfast",
- obj_id,
- notfound_msg="Cooked archive for revision '{}' not found.".format(rev_id),
+ "gitfast",
+ CoreSWHID.from_string(swhid),
+ notfound_msg="Cooked archive for {} not found.".format(swhid),
request=request,
)
- fname = "{}.gitfast.gz".format(rev_id)
+ fname = "{}.gitfast.gz".format(swhid)
response = HttpResponse(res, content_type="application/gzip")
response["Content-disposition"] = "attachment; filename={}".format(fname)
return response
@api_route(
- r"/vault/revision_gitfast/(?P[0-9a-f]+)/raw/",
- "api-1-vault-revision_gitfast-raw",
+ r"/vault/revision/(?P[0-9a-f]+)/gitfast/raw/",
+ "api-1-vault-fetch-revision_gitfast",
checksum_args=["rev_id"],
)
@api_doc("/vault/revision_gitfast/raw/", tags=["hidden"])
def _api_vault_revision_gitfast_raw(request, rev_id):
"""
- The vault backend sends an email containing an invalid url to fetch a
- gitfast archive. So setup a redirection to the correct one as a temporary
- workaround.
+ Replaced by :http:get:`/api/1/vault/gitfast/raw/`
"""
rev_gitfast_raw_url = reverse(
- "api-1-vault-fetch-revision_gitfast", url_args={"rev_id": rev_id}
+ "api-1-vault-fetch-gitfast", url_args={"swhid": f"swh:1:rev:{rev_id}"}
)
return redirect(rev_gitfast_raw_url)
diff --git a/swh/web/browse/snapshot_context.py b/swh/web/browse/snapshot_context.py
--- a/swh/web/browse/snapshot_context.py
+++ b/swh/web/browse/snapshot_context.py
@@ -839,9 +839,9 @@
vault_cooking = {
"directory_context": True,
- "directory_id": sha1_git,
+ "directory_swhid": f"swh:1:dir:{sha1_git}",
"revision_context": True,
- "revision_id": revision_id,
+ "revision_swhid": f"swh:1:rev:{revision_id}",
}
swhids_info = get_swhids_info(swh_objects, snapshot_context, dir_metadata)
diff --git a/swh/web/browse/views/directory.py b/swh/web/browse/views/directory.py
--- a/swh/web/browse/views/directory.py
+++ b/swh/web/browse/views/directory.py
@@ -155,9 +155,9 @@
vault_cooking = {
"directory_context": True,
- "directory_id": sha1_git,
+ "directory_swhid": f"swh:1:dir:{sha1_git}",
"revision_context": False,
- "revision_id": None,
+ "revision_swhid": None,
}
swh_objects = [SWHObjectInfo(object_type=DIRECTORY, object_id=sha1_git)]
diff --git a/swh/web/browse/views/release.py b/swh/web/browse/views/release.py
--- a/swh/web/browse/views/release.py
+++ b/swh/web/browse/views/release.py
@@ -119,9 +119,9 @@
rev_directory = revision["directory"]
vault_cooking = {
"directory_context": True,
- "directory_id": rev_directory,
+ "directory_swhid": f"swh:1:dir:{rev_directory}",
"revision_context": True,
- "revision_id": release["target"],
+ "revision_swhid": f"swh:1:rev:{release['target']}",
}
swh_objects.append(
SWHObjectInfo(object_type=REVISION, object_id=release["target"])
@@ -143,9 +143,9 @@
archive.lookup_directory(release["target"])
vault_cooking = {
"directory_context": True,
- "directory_id": release["target"],
+ "directory_swhid": f"swh:1:dir:{release['target']}",
"revision_context": False,
- "revision_id": None,
+ "revision_swhid": None,
}
swh_objects.append(
SWHObjectInfo(object_type=DIRECTORY, object_id=release["target"])
diff --git a/swh/web/browse/views/revision.py b/swh/web/browse/views/revision.py
--- a/swh/web/browse/views/revision.py
+++ b/swh/web/browse/views/revision.py
@@ -444,9 +444,9 @@
vault_cooking = {
"directory_context": False,
- "directory_id": None,
+ "directory_swhid": None,
"revision_context": True,
- "revision_id": sha1_git,
+ "revision_swhid": f"swh:1:ref:{sha1_git}",
}
swh_objects = [SWHObjectInfo(object_type=REVISION, object_id=sha1_git)]
@@ -529,7 +529,7 @@
}
vault_cooking["directory_context"] = True
- vault_cooking["directory_id"] = dir_id
+ vault_cooking["directory_swhid"] = f"swh:1:dir:{dir_id}"
swh_objects.append(SWHObjectInfo(object_type=DIRECTORY, object_id=dir_id))
diff --git a/swh/web/common/archive.py b/swh/web/common/archive.py
--- a/swh/web/common/archive.py
+++ b/swh/web/common/archive.py
@@ -11,7 +11,14 @@
from urllib.parse import urlparse
from swh.model import hashutil
-from swh.model.identifiers import CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT
+from swh.model.identifiers import (
+ CONTENT,
+ DIRECTORY,
+ RELEASE,
+ REVISION,
+ SNAPSHOT,
+ CoreSWHID,
+)
from swh.model.model import OriginVisit, Revision
from swh.storage.algos import diff, revisions_walker
from swh.storage.algos.origin import origin_get_latest_visit_status
@@ -1234,29 +1241,29 @@
return (rev["id"], lookup_directory_with_revision(rev["id"], path, with_data))
-def _vault_request(vault_fn, *args, **kwargs):
+def _vault_request(vault_fn, bundle_type: str, swhid: CoreSWHID, **kwargs):
try:
- return vault_fn(*args, **kwargs)
+ return vault_fn(bundle_type, swhid, **kwargs)
except VaultNotFoundExc:
return None
-def vault_cook(obj_type, obj_id, email=None):
+def vault_cook(bundle_type: str, swhid: CoreSWHID, email=None):
"""Cook a vault bundle.
"""
- return _vault_request(vault.cook, obj_type, obj_id, email=email)
+ return _vault_request(vault.cook, bundle_type, swhid, email=email)
-def vault_fetch(obj_type, obj_id):
+def vault_fetch(bundle_type: str, swhid: CoreSWHID):
"""Fetch a vault bundle.
"""
- return _vault_request(vault.fetch, obj_type, obj_id)
+ return _vault_request(vault.fetch, bundle_type, swhid)
-def vault_progress(obj_type, obj_id):
+def vault_progress(bundle_type: str, swhid: CoreSWHID):
"""Get the current progress of a vault bundle.
"""
- return _vault_request(vault.progress, obj_type, obj_id)
+ return _vault_request(vault.progress, bundle_type, swhid)
def diff_revision(rev_id):
diff --git a/swh/web/templates/includes/vault-create-tasks.html b/swh/web/templates/includes/vault-create-tasks.html
--- a/swh/web/templates/includes/vault-create-tasks.html
+++ b/swh/web/templates/includes/vault-create-tasks.html
@@ -15,20 +15,20 @@
{% if vault_cooking.directory_context %}
-
{% else %}
-
+
Download
@@ -48,7 +48,7 @@
- You have requested the cooking of the directory with identifier {{ vault_cooking.directory_id }}
+ You have requested the cooking of the directory with identifier {{ vault_cooking.directory_swhid }}
into a standard tar.gz archive.
@@ -63,7 +63,7 @@
@@ -80,7 +80,7 @@
- You have requested the download of the directory with identifier {{ vault_cooking.directory_id }}
+ You have requested the download of the directory with identifier {{ vault_cooking.directory_swhid }}
as a standard tar.gz archive.
@@ -89,7 +89,7 @@
@@ -106,7 +106,7 @@
- You have requested the cooking of the history heading to revision with identifier {{ vault_cooking.revision_id }}
+ You have requested the cooking of the history heading to revision with identifier {{ vault_cooking.revision_swhid }}
into a git fast-import archive.
@@ -121,7 +121,7 @@
@@ -138,7 +138,7 @@
- You have requested the download of the history heading to revision with identifier {{ vault_cooking.revision_id }}
+ You have requested the download of the history heading to revision with identifier {{ vault_cooking.revision_swhid }}
as a git fast-import archive.