\ No newline at end of file
+
diff --git a/assets/src/bundles/vault/vault-ui.js b/assets/src/bundles/vault/vault-ui.js
--- a/assets/src/bundles/vault/vault-ui.js
+++ b/assets/src/bundles/vault/vault-ui.js
@@ -72,10 +72,10 @@
clearTimeout(checkVaultId);
// build cook request url
let cookingUrl;
- if (recookTask.object_type === 'directory') {
- cookingUrl = Urls.api_1_vault_cook_directory(recookTask.object_id);
+ if (recookTask.bundle_type === 'flat') {
+ cookingUrl = Urls.api_1_vault_cook_flat(recookTask.swhid);
} else {
- cookingUrl = Urls.api_1_vault_cook_revision_gitfast(recookTask.object_id);
+ cookingUrl = Urls.api_1_vault_cook_gitfast(recookTask.swhid);
}
if (recookTask.email) {
cookingUrl += '?email=' + recookTask.email;
@@ -89,21 +89,21 @@
recookTask.status = 'new';
const vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks'));
for (let i = 0; i < vaultCookingTasks.length; ++i) {
- if (vaultCookingTasks[i].object_id === recookTask.object_id) {
+ if (vaultCookingTasks[i].swhid === recookTask.swhid) {
vaultCookingTasks[i] = recookTask;
break;
}
}
// save updated tasks to local storage
localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks));
- // restart cooking tasks status polling
- checkVaultCookingTasks();
// hide recook archive modal
$('#vault-recook-object-modal').modal('hide');
+ // restart cooking tasks status polling
+ await checkVaultCookingTasks();
} catch (_) {
// something went wrong
- checkVaultCookingTasks();
$('#vault-recook-object-modal').modal('hide');
+ await checkVaultCookingTasks();
}
}
}
@@ -121,21 +121,40 @@
for (let i = 0; i < vaultCookingTasks.length; ++i) {
const cookingTask = vaultCookingTasks[i];
- currentObjectIds.push(cookingTask.object_id);
- tasks[cookingTask.object_id] = cookingTask;
+
+ if (typeof cookingTask.object_type !== 'undefined') {
+ // Legacy cooking task, upgrade it to the new schema
+ if (cookingTask.object_type === 'directory') {
+ cookingTask.swhid = `swh:1:dir:${cookingTask.object_id}`;
+ cookingTask.bundle_type = 'flat';
+ } else if (cookingTask.object_type === 'revision') {
+ cookingTask.swhid = `swh:1:rev:${cookingTask.object_id}`;
+ cookingTask.bundle_type = 'gitfast';
+ } else {
+ // Log to the console + Sentry
+ console.error(`Unexpected cookingTask.object_type: ${cookingTask.object_type}`);
+ // Ignore it for now and hope a future version will fix it
+ continue;
+ }
+ delete cookingTask.object_type;
+ delete cookingTask.object_id;
+ }
+
+ currentObjectIds.push(cookingTask.swhid);
+ tasks[cookingTask.swhid] = cookingTask;
let cookingUrl;
- if (cookingTask.object_type === 'directory') {
- cookingUrl = Urls.api_1_vault_cook_directory(cookingTask.object_id);
+ if (cookingTask.bundle_type === 'flat') {
+ cookingUrl = Urls.api_1_vault_cook_flat(cookingTask.swhid);
} else {
- cookingUrl = Urls.api_1_vault_cook_revision_gitfast(cookingTask.object_id);
+ cookingUrl = Urls.api_1_vault_cook_gitfast(cookingTask.swhid);
}
if (cookingTask.status !== 'done' && cookingTask.status !== 'failed') {
cookingTaskRequests.push(fetch(cookingUrl));
}
}
$('.swh-vault-table tbody tr').each((i, row) => {
- const objectId = $(row).find('.vault-object-info').data('object-id');
- if ($.inArray(objectId, currentObjectIds) === -1) {
+ const swhid = $(row).find('.vault-object-info').data('swhid');
+ if ($.inArray(swhid, currentObjectIds) === -1) {
$(row).remove();
}
});
@@ -146,24 +165,20 @@
const table = $('#vault-cooking-tasks tbody');
for (let i = 0; i < cookingTasks.length; ++i) {
- const cookingTask = tasks[cookingTasks[i].obj_id];
+ const cookingTask = tasks[cookingTasks[i].swhid];
cookingTask.status = cookingTasks[i].status;
cookingTask.fetch_url = cookingTasks[i].fetch_url;
cookingTask.progress_message = cookingTasks[i].progress_message;
}
for (let i = 0; i < vaultCookingTasks.length; ++i) {
const cookingTask = vaultCookingTasks[i];
- const rowTask = $(`#vault-task-${cookingTask.object_id}`);
+ const rowTask = $(`#vault-task-${CSS.escape(cookingTask.swhid)}`);
if (!rowTask.length) {
let browseUrl = cookingTask.browse_url;
if (!browseUrl) {
- if (cookingTask.object_type === 'directory') {
- browseUrl = Urls.browse_directory(cookingTask.object_id);
- } else {
- browseUrl = Urls.browse_revision(cookingTask.object_id);
- }
+ browseUrl = Urls.browse_swhid(cookingTask.swhid);
}
const progressBar = $.parseHTML(progress)[0];
@@ -204,7 +219,7 @@
return;
}
vaultCookingTasks = $.grep(vaultCookingTasks, task => {
- return $.inArray(task.object_id, tasksToRemove) === -1;
+ return $.inArray(task.swhid, tasksToRemove) === -1;
});
localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks));
}
@@ -221,8 +236,8 @@
$('.swh-vault-table tbody tr').each((i, row) => {
const taskSelected = $(row).find('.vault-task-toggle-selection').prop('checked');
if (taskSelected) {
- const objectId = $(row).find('.vault-object-info').data('object-id');
- tasksToRemove.push(objectId);
+ const swhid = $(row).find('.vault-object-info').data('swhid');
+ tasksToRemove.push(swhid);
$(row).remove();
}
});
diff --git a/cypress/fixtures/cd19126d815470b28919d64b2a8e6a3e37f900dd.tar.gz b/cypress/fixtures/swh:1:dir:cd19126d815470b28919d64b2a8e6a3e37f900dd.tar.gz
rename from cypress/fixtures/cd19126d815470b28919d64b2a8e6a3e37f900dd.tar.gz
rename to cypress/fixtures/swh:1:dir:cd19126d815470b28919d64b2a8e6a3e37f900dd.tar.gz
diff --git a/cypress/fixtures/1c480a4573d2a003fc2630c21c2b25829de49972.gitfast.gz b/cypress/fixtures/swh:1:rev:1c480a4573d2a003fc2630c21c2b25829de49972.gitfast.gz
rename from cypress/fixtures/1c480a4573d2a003fc2630c21c2b25829de49972.gitfast.gz
rename to cypress/fixtures/swh:1:rev:1c480a4573d2a003fc2630c21c2b25829de49972.gitfast.gz
diff --git a/cypress/integration/vault.spec.js b/cypress/integration/vault.spec.js
--- a/cypress/integration/vault.spec.js
+++ b/cypress/integration/vault.spec.js
@@ -21,6 +21,10 @@
cy.wait('@checkVaultCookingTask');
}
+function getVaultItemList() {
+ return JSON.parse(window.localStorage.getItem('swh-vault-cooking-tasks'));
+}
+
function updateVaultItemList(vaultUrl, vaultItems) {
cy.visit(vaultUrl)
.then(() => {
@@ -30,15 +34,15 @@
});
}
-// Mocks API response : /api/1/vault/(:objectType)/(:hash)
-// objectType : {'directory', 'revision'}
-function genVaultCookingResponse(objectType, objectId, status, message, fetchUrl) {
+// Mocks API response : /api/1/vault/(:bundleType)/(:swhid)
+// bundleType : {'flat', 'gitfast'}
+function genVaultCookingResponse(bundleType, swhid, status, message, fetchUrl) {
return {
- 'obj_type': objectType,
+ 'bundle_type': bundleType,
'id': 1,
'progress_message': message,
'status': status,
- 'obj_id': objectId,
+ 'swhid': swhid,
'fetch_url': fetchUrl
};
};
@@ -46,7 +50,7 @@
// Tests progressbar color, status
// And status in localStorage
function testStatus(taskId, color, statusMsg, status) {
- cy.get(`.swh-vault-table #vault-task-${taskId}`)
+ cy.get(`.swh-vault-table #vault-task-${CSS.escape(taskId)}`)
.should('be.visible')
.find('.progress-bar')
.should('be.visible')
@@ -54,8 +58,8 @@
.and('contain', statusMsg)
.then(() => {
// Vault item with object_id as taskId should exist in localStorage
- const currentVaultItems = JSON.parse(window.localStorage.getItem('swh-vault-cooking-tasks'));
- const vaultItem = currentVaultItems.find(obj => obj.object_id === taskId);
+ const currentVaultItems = getVaultItemList();
+ const vaultItem = currentVaultItems.find(obj => obj.swhid === taskId);
assert.isNotNull(vaultItem);
assert.strictEqual(vaultItem.status, status);
@@ -66,43 +70,54 @@
before(function() {
const dirInfo = this.origin[0].directory[0];
- this.directory = dirInfo.id;
+ this.directory = `swh:1:dir:${dirInfo.id}`;
this.directoryUrl = this.Urls.browse_origin_directory() +
`?origin_url=${this.origin[0].url}&path=${dirInfo.path}`;
- this.vaultDirectoryUrl = this.Urls.api_1_vault_cook_directory(this.directory);
- this.vaultFetchDirectoryUrl = this.Urls.api_1_vault_fetch_directory(this.directory);
+ this.vaultDirectoryUrl = this.Urls.api_1_vault_cook_flat(this.directory);
+ this.vaultFetchDirectoryUrl = this.Urls.api_1_vault_fetch_flat(this.directory);
- this.revision = this.origin[1].revisions[0];
- this.revisionUrl = this.Urls.browse_revision(this.revision);
- this.vaultRevisionUrl = this.Urls.api_1_vault_cook_revision_gitfast(this.revision);
- this.vaultFetchRevisionUrl = this.Urls.api_1_vault_fetch_revision_gitfast(this.revision);
+ this.revisionId = this.origin[1].revisions[0];
+ this.revision = `swh:1:rev:${this.revisionId}`;
+ this.revisionUrl = this.Urls.browse_revision(this.revisionId);
+ this.vaultRevisionUrl = this.Urls.api_1_vault_cook_gitfast(this.revision);
+ this.vaultFetchRevisionUrl = this.Urls.api_1_vault_fetch_gitfast(this.revision);
const release = this.origin[1].release;
this.releaseUrl = this.Urls.browse_release(release.id) + `?origin_url=${this.origin[1].url}`;
- this.vaultReleaseDirectoryUrl = this.Urls.api_1_vault_cook_directory(release.directory);
+ this.vaultReleaseDirectoryUrl = this.Urls.api_1_vault_cook_flat(`swh:1:dir:${release.directory}`);
});
beforeEach(function() {
// For some reason, this gets reset if we define it in the before() hook,
// so we need to define it here
this.vaultItems = [
+ {
+ 'bundle_type': 'gitfast',
+ 'swhid': this.revision,
+ 'email': '',
+ 'status': 'done',
+ 'fetch_url': `/api/1/vault/gitfast/${this.revision}/raw/`,
+ 'progress_message': null
+ }
+ ];
+ this.legacyVaultItems = [
{
'object_type': 'revision',
- 'object_id': this.revision,
+ 'object_id': this.revisionId,
'email': '',
'status': 'done',
- 'fetch_url': `/api/1/vault/revision/${this.revision}/gitfast/raw/`,
+ 'fetch_url': `/api/1/vault/revision/${this.revisionId}/gitfast/raw/`,
'progress_message': null
}
];
this.genVaultDirCookingResponse = (status, message = null) => {
- return genVaultCookingResponse('directory', this.directory, status,
+ return genVaultCookingResponse('flat', this.directory, status,
message, this.vaultFetchDirectoryUrl);
};
this.genVaultRevCookingResponse = (status, message = null) => {
- return genVaultCookingResponse('revision', this.revision, status,
+ return genVaultCookingResponse('gitfast', this.revision, status,
message, this.vaultFetchRevisionUrl);
};
@@ -166,19 +181,24 @@
cy.visit(this.Urls.browse_vault());
- // trick to override the response of an intercepted request
- // https://github.com/cypress-io/cypress/issues/9302
- cy.intercept('GET', this.vaultDirectoryUrl, req => this.genVaultDirCookingResponse('done'))
- .as('checkVaultCookingTask');
+ cy.contains(`#vault-task-${CSS.escape(this.revision)} button`, 'Download')
+ .click();
+ });
- // Stub responses when requesting the vault API to simulate
- // a task has been created
- cy.intercept('POST', this.vaultDirectoryUrl, {
- body: this.genVaultDirCookingResponse('new')
- }).as('createVaultCookingTask');
+ it('should display and upgrade previous cooking tasks from the legacy format', function() {
+ updateVaultItemList(this.Urls.browse_vault(), this.legacyVaultItems);
+
+ // updateVaultItemList doesn't work in this test?!?!
+ window.localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(this.vaultItems));
- cy.contains(`#vault-task-${this.revision} button`, 'Download')
+ cy.visit(this.Urls.browse_vault());
+
+ // Check it is displayed
+ cy.contains(`#vault-task-${CSS.escape(this.revision)} button`, 'Download')
.click();
+
+ // Check the LocalStorage was upgraded
+ expect(getVaultItemList()).to.deep.equal(this.vaultItems);
});
it('should create a directory cooking task and report the success', function() {
@@ -248,15 +268,15 @@
testStatus(this.directory, progressbarColors['done'], 'done', 'done');
});
- cy.get(`#vault-task-${this.directory} .vault-origin a`)
+ cy.get(`#vault-task-${CSS.escape(this.directory)} .vault-origin a`)
.should('contain', this.origin[0].url)
.should('have.attr', 'href', `${this.Urls.browse_origin()}?origin_url=${this.origin[0].url}`);
- cy.get(`#vault-task-${this.directory} .vault-object-info a`)
+ cy.get(`#vault-task-${CSS.escape(this.directory)} .vault-object-info a`)
.should('have.text', this.directory)
.should('have.attr', 'href', browseDirectoryUrl);
- cy.get(`#vault-task-${this.directory} .vault-dl-link button`)
+ cy.get(`#vault-task-${CSS.escape(this.directory)} .vault-dl-link button`)
.click();
cy.wait('@fetchCookedArchive').then((xhr) => {
@@ -333,14 +353,14 @@
testStatus(this.revision, progressbarColors['done'], 'done', 'done');
});
- cy.get(`#vault-task-${this.revision} .vault-origin`)
+ cy.get(`#vault-task-${CSS.escape(this.revision)} .vault-origin`)
.should('have.text', 'unknown');
- cy.get(`#vault-task-${this.revision} .vault-object-info a`)
+ cy.get(`#vault-task-${CSS.escape(this.revision)} .vault-object-info a`)
.should('have.text', this.revision)
.should('have.attr', 'href', browseRevisionUrl);
- cy.get(`#vault-task-${this.revision} .vault-dl-link button`)
+ cy.get(`#vault-task-${CSS.escape(this.revision)} .vault-dl-link button`)
.click();
cy.wait('@fetchCookedArchive').then((xhr) => {
@@ -403,7 +423,7 @@
}
}).as('fetchCookedArchive');
- cy.get(`#vault-task-${this.revision} .vault-dl-link button`)
+ cy.get(`#vault-task-${CSS.escape(this.revision)} .vault-dl-link button`)
.click();
cy.wait('@fetchCookedArchive').then(() => {
@@ -431,13 +451,13 @@
updateVaultItemList(this.Urls.browse_vault(), this.vaultItems);
- cy.get(`#vault-task-${this.revision}`)
+ cy.get(`#vault-task-${CSS.escape(this.revision)}`)
.find('input[type="checkbox"]')
.click({force: true});
cy.contains('button', 'Remove selected tasks')
.click();
- cy.get(`#vault-task-${this.revision}`)
+ cy.get(`#vault-task-${CSS.escape(this.revision)}`)
.should('not.exist');
});
diff --git a/swh/web/api/views/vault.py b/swh/web/api/views/vault.py
--- a/swh/web/api/views/vault.py
+++ b/swh/web/api/views/vault.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2015-2020 The Software Heritage developers
+# Copyright (C) 2015-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
@@ -8,36 +8,38 @@
from django.http import HttpResponse
from django.shortcuts import redirect
-from swh.model import hashutil
+from swh.model.identifiers import CoreSWHID, ObjectType
from swh.web.api.apidoc import api_doc, format_docstring
from swh.web.api.apiurls import api_route
from swh.web.api.views.utils import api_lookup
from swh.web.common import archive, query
+from swh.web.common.exc import BadInputExc
from swh.web.common.utils import reverse
+######################################################
+# Common
+
+SWHID_RE = "swh:1:[a-z]{3}:[0-9a-z]{40}"
+
# XXX: a bit spaghetti. Would be better with class-based views.
-def _dispatch_cook_progress(request, obj_type, obj_id):
- hex_id = hashutil.hash_to_hex(obj_id)
- object_name = obj_type.split("_")[0]
+def _dispatch_cook_progress(request, bundle_type: str, swhid: CoreSWHID):
if request.method == "GET":
return api_lookup(
archive.vault_progress,
- obj_type,
- obj_id,
- notfound_msg=(
- "Cooking of {} '{}' was never requested.".format(object_name, hex_id)
- ),
+ bundle_type,
+ swhid,
+ notfound_msg=f"Cooking of {swhid} was never requested.",
request=request,
)
elif request.method == "POST":
email = request.POST.get("email", request.GET.get("email", None))
return api_lookup(
archive.vault_cook,
- obj_type,
- obj_id,
+ bundle_type,
+ swhid,
email,
- notfound_msg=("{} '{}' not found.".format(object_name.title(), hex_id)),
+ notfound_msg=f"{swhid} not found.",
request=request,
)
@@ -45,31 +47,32 @@
def _vault_response(vault_response: Dict[str, Any]) -> Dict[str, Any]:
return {
"fetch_url": vault_response["fetch_url"],
- "obj_type": vault_response["type"],
"progress_message": vault_response["progress_msg"],
"id": vault_response["task_id"],
"status": vault_response["task_status"],
- "obj_id": vault_response["object_id"],
+ "swhid": str(vault_response["swhid"]),
}
+######################################################
+# Flat bundles
+
+
@api_route(
- r"/vault/directory/(?P[0-9a-f]+)/",
- "api-1-vault-cook-directory",
+ f"/vault/flat/(?P{SWHID_RE})/",
+ "api-1-vault-cook-flat",
methods=["GET", "POST"],
- checksum_args=["dir_id"],
throttle_scope="swh_vault_cooking",
never_cache=True,
)
-@api_doc("/vault/directory/")
+@api_doc("/vault/flat/")
@format_docstring()
-def api_vault_cook_directory(request, dir_id):
+def api_vault_cook_flat(request, swhid):
"""
- .. http:get:: /api/1/vault/directory/(dir_id)/
- .. http:post:: /api/1/vault/directory/(dir_id)/
+ .. http:get:: /api/1/vault/flat/(swhid)/
+ .. http:post:: /api/1/vault/flat/(swhid)/
- Request the cooking of an archive for a directory or check
- its cooking status.
+ Request the cooking of a simple archive, typically for a directory.
That endpoint enables to create a vault cooking task for a directory
through a POST request or check the status of a previously created one
@@ -77,13 +80,13 @@
Once the cooking task has been executed, the resulting archive can
be downloaded using the dedicated endpoint
- :http:get:`/api/1/vault/directory/(dir_id)/raw/`.
+ :http:get:`/api/1/vault/flat/(swhid)/raw/`.
Then to extract the cooked directory in the current one, use::
- $ tar xvf path/to/directory.tar.gz
+ $ tar xvf path/to/swh:1:*.tar.gz
- :param string dir_id: the directory's sha1 identifier
+ :param string swhid: the object's SWHID
:query string email: e-mail to notify when the archive is ready
@@ -91,15 +94,13 @@
:>json string fetch_url: the url from which to download the archive
once it has been cooked
- (see :http:get:`/api/1/vault/directory/(dir_id)/raw/`)
- :>json string obj_type: the type of object to cook
- (directory or revision)
+ (see :http:get:`/api/1/vault/flat/(swhid)/raw/`)
:>json string progress_message: message describing the cooking task
progress
:>json number id: the cooking task id
:>json string status: the cooking task status
(either **new**, **pending**, **done** or **failed**)
- :>json string obj_id: the identifier of the object to cook
+ :>json string swhid: the identifier of the object to cook
:statuscode 200: no error
:statuscode 400: an invalid directory identifier has been provided
@@ -107,72 +108,129 @@
request yet (in case of GET) or can not be found in the archive
(in case of POST)
"""
+ swhid = CoreSWHID.from_string(swhid)
+ if swhid.object_type == ObjectType.DIRECTORY:
+ res = _dispatch_cook_progress(request, "flat", swhid)
+ res["fetch_url"] = reverse(
+ "api-1-vault-fetch-flat", url_args={"swhid": str(swhid)}, request=request,
+ )
+ return _vault_response(res)
+ elif swhid.object_type == ObjectType.CONTENT:
+ raise BadInputExc(
+ "Content objects do not need to be cooked, "
+ "use `/api/1/content/raw/` instead."
+ )
+ elif swhid.object_type == ObjectType.REVISION:
+ # TODO: support revisions too? (the vault allows it)
+ raise BadInputExc(
+ "Only directories can be cooked as 'flat' bundles. "
+ "Use `/api/1/vault/gitfast/` to cook revisions, as gitfast bundles."
+ )
+ else:
+ raise BadInputExc("Only directories can be cooked as 'flat' bundles.")
+
+
+@api_route(
+ r"/vault/directory/(?P[0-9a-f]+)/",
+ "api-1-vault-cook-directory",
+ methods=["GET", "POST"],
+ checksum_args=["dir_id"],
+ throttle_scope="swh_vault_cooking",
+ never_cache=True,
+)
+@api_doc("/vault/directory/", tags=["hidden"])
+@format_docstring()
+def api_vault_cook_directory(request, dir_id):
+ """
+ .. http:get:: /api/1/vault/directory/(dir_id)/
+
+ This endpoint was replaced by :http:get:`/api/1/vault/flat/(swhid)/`
+ """
_, obj_id = query.parse_hash_with_algorithms_or_throws(
dir_id, ["sha1"], "Only sha1_git is supported."
)
- res = _dispatch_cook_progress(request, "directory", obj_id)
+ swhid = f"swh:1:dir:{obj_id.hex()}"
+ res = _dispatch_cook_progress(request, "flat", CoreSWHID.from_string(swhid))
res["fetch_url"] = reverse(
- "api-1-vault-fetch-directory", url_args={"dir_id": dir_id}, request=request,
+ "api-1-vault-fetch-flat", url_args={"swhid": swhid}, request=request,
)
return _vault_response(res)
@api_route(
- r"/vault/directory/(?P[0-9a-f]+)/raw/",
- "api-1-vault-fetch-directory",
- checksum_args=["dir_id"],
+ f"/vault/flat/(?P{SWHID_RE})/raw/", "api-1-vault-fetch-flat",
)
-@api_doc("/vault/directory/raw/")
-def api_vault_fetch_directory(request, dir_id):
+@api_doc("/vault/flat/raw/")
+def api_vault_fetch_flat(request, swhid):
"""
- .. http:get:: /api/1/vault/directory/(dir_id)/raw/
+ .. http:get:: /api/1/vault/flat/(swhid)/raw/
- Fetch the cooked archive for a directory.
+ Fetch the cooked archive for a flat bundle.
- See :http:get:`/api/1/vault/directory/(dir_id)/` to get more
- details on directory cooking.
+ See :http:get:`/api/1/vault/flat/(swhid)/` to get more
+ details on 'flat' bundle cooking.
- :param string dir_id: the directory's sha1 identifier
+ :param string swhid: the SWHID of the object to cook
:resheader Content-Type: application/octet-stream
:statuscode 200: no error
- :statuscode 400: an invalid directory identifier has been provided
:statuscode 404: requested directory did not receive any cooking
request yet (in case of GET) or can not be found in the archive
(in case of POST)
"""
- _, obj_id = query.parse_hash_with_algorithms_or_throws(
- dir_id, ["sha1"], "Only sha1_git is supported."
- )
res = api_lookup(
archive.vault_fetch,
- "directory",
- obj_id,
- notfound_msg="Cooked archive for directory '{}' not found.".format(dir_id),
+ "flat",
+ CoreSWHID.from_string(swhid),
+ notfound_msg=f"Cooked archive for {swhid} not found.",
request=request,
)
- fname = "{}.tar.gz".format(dir_id)
+ fname = "{}.tar.gz".format(swhid)
response = HttpResponse(res, content_type="application/gzip")
response["Content-disposition"] = "attachment; filename={}".format(fname)
return response
@api_route(
- r"/vault/revision/(?P[0-9a-f]+)/gitfast/",
- "api-1-vault-cook-revision_gitfast",
+ r"/vault/directory/(?P[0-9a-f]+)/raw/",
+ "api-1-vault-fetch-directory",
+ checksum_args=["dir_id"],
+)
+@api_doc("/vault/directory/raw/", tags=["hidden"])
+def api_vault_fetch_directory(request, dir_id):
+ """
+ .. http:get:: /api/1/vault/directory/(dir_id)/raw/
+
+ This endpoint was replaced by :http:get:`/api/1/vault/flat/raw/`
+ """
+ _, obj_id = query.parse_hash_with_algorithms_or_throws(
+ dir_id, ["sha1"], "Only sha1_git is supported."
+ )
+ rev_flat_raw_url = reverse(
+ "api-1-vault-fetch-flat", url_args={"swhid": f"swh:1:dir:{dir_id}"}
+ )
+ return redirect(rev_flat_raw_url)
+
+
+######################################################
+# gitfast bundles
+
+
+@api_route(
+ f"/vault/gitfast/(?P{SWHID_RE})/",
+ "api-1-vault-cook-gitfast",
methods=["GET", "POST"],
- checksum_args=["rev_id"],
throttle_scope="swh_vault_cooking",
never_cache=True,
)
-@api_doc("/vault/revision/gitfast/")
+@api_doc("/vault/gitfast/", tags=["hidden"])
@format_docstring()
-def api_vault_cook_revision_gitfast(request, rev_id):
+def api_vault_cook_gitfast(request, swhid):
"""
- .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/
- .. http:post:: /api/1/vault/revision/(rev_id)/gitfast/
+ .. http:get:: /api/1/vault/gitfast/(swhid)/
+ .. http:post:: /api/1/vault/gitfast/(swhid)/
Request the cooking of a gitfast archive for a revision or check
its cooking status.
@@ -188,7 +246,7 @@
Then to import the revision in the current directory, use::
$ git init
- $ zcat path/to/revision.gitfast.gz | git fast-import
+ $ zcat path/to/swh:1:rev:*.gitfast.gz | git fast-import
$ git checkout HEAD
:param string rev_id: the revision's sha1 identifier
@@ -199,88 +257,117 @@
:>json string fetch_url: the url from which to download the archive
once it has been cooked
- (see :http:get:`/api/1/vault/revision/(rev_id)/gitfast/raw/`)
- :>json string obj_type: the type of object to cook
- (directory or revision)
+ (see :http:get:`/api/1/vault/gitfast/(rev_id)/raw/`)
:>json string progress_message: message describing the cooking task
progress
:>json number id: the cooking task id
:>json string status: the cooking task status (new/pending/done/failed)
- :>json string obj_id: the identifier of the object to cook
+ :>json string swhid: the identifier of the object to cook
:statuscode 200: no error
- :statuscode 400: an invalid revision identifier has been provided
:statuscode 404: requested directory did not receive any cooking
request yet (in case of GET) or can not be found in the archive
(in case of POST)
"""
+ swhid = CoreSWHID.from_string(swhid)
+ if swhid.object_type == ObjectType.REVISION:
+ res = _dispatch_cook_progress(request, "gitfast", swhid)
+ res["fetch_url"] = reverse(
+ "api-1-vault-fetch-gitfast",
+ url_args={"swhid": str(swhid)},
+ request=request,
+ )
+ return _vault_response(res)
+ elif swhid.object_type == ObjectType.CONTENT:
+ raise BadInputExc(
+ "Content objects do not need to be cooked, "
+ "use `/api/1/content/raw/` instead."
+ )
+ elif swhid.object_type == ObjectType.DIRECTORY:
+ raise BadInputExc(
+ "Only revisions can be cooked as 'gitfast' bundles. "
+ "Use `/api/1/vault/flat/` to cook directories, as flat bundles."
+ )
+ else:
+ raise BadInputExc("Only revisions can be cooked as 'gitfast' bundles.")
+
+
+@api_route(
+ r"/vault/revision/(?P[0-9a-f]+)/gitfast/",
+ "api-1-vault-cook-revision_gitfast",
+ methods=["GET", "POST"],
+ checksum_args=["rev_id"],
+ throttle_scope="swh_vault_cooking",
+ never_cache=True,
+)
+@api_doc("/vault/revision/gitfast/", tags=["hidden"])
+@format_docstring()
+def api_vault_cook_revision_gitfast(request, rev_id):
+ """
+ .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/
+
+ This endpoint was replaced by :http:get:`/api/1/vault/gitfast/`
+ """
_, obj_id = query.parse_hash_with_algorithms_or_throws(
rev_id, ["sha1"], "Only sha1_git is supported."
)
- res = _dispatch_cook_progress(request, "revision_gitfast", obj_id)
+ swhid = f"swh:1:rev:{obj_id.hex()}"
+ res = _dispatch_cook_progress(request, "gitfast", CoreSWHID.from_string(swhid))
res["fetch_url"] = reverse(
- "api-1-vault-fetch-revision_gitfast",
- url_args={"rev_id": rev_id},
- request=request,
+ "api-1-vault-fetch-gitfast", url_args={"swhid": swhid}, request=request,
)
return _vault_response(res)
@api_route(
- r"/vault/revision/(?P[0-9a-f]+)/gitfast/raw/",
- "api-1-vault-fetch-revision_gitfast",
- checksum_args=["rev_id"],
+ f"/vault/gitfast/(?P{SWHID_RE})/raw/", "api-1-vault-fetch-gitfast",
)
-@api_doc("/vault/revision/gitfast/raw/")
-def api_vault_fetch_revision_gitfast(request, rev_id):
+@api_doc("/vault/gitfast/raw/")
+def api_vault_fetch_revision_gitfast(request, swhid):
"""
- .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/raw/
+ .. http:get:: /api/1/vault/gitfast/(swhid)/raw/
Fetch the cooked gitfast archive for a revision.
- See :http:get:`/api/1/vault/revision/(rev_id)/gitfast/` to get more
- details on directory cooking.
+ See :http:get:`/api/1/vault/gitfast/(swhid)/` to get more
+ details on gitfast cooking.
:param string rev_id: the revision's sha1 identifier
:resheader Content-Type: application/octet-stream
:statuscode 200: no error
- :statuscode 400: an invalid revision identifier has been provided
:statuscode 404: requested directory did not receive any cooking
request yet (in case of GET) or can not be found in the archive
(in case of POST)
"""
- _, obj_id = query.parse_hash_with_algorithms_or_throws(
- rev_id, ["sha1"], "Only sha1_git is supported."
- )
res = api_lookup(
archive.vault_fetch,
- "revision_gitfast",
- obj_id,
- notfound_msg="Cooked archive for revision '{}' not found.".format(rev_id),
+ "gitfast",
+ CoreSWHID.from_string(swhid),
+ notfound_msg="Cooked archive for {} not found.".format(swhid),
request=request,
)
- fname = "{}.gitfast.gz".format(rev_id)
+ fname = "{}.gitfast.gz".format(swhid)
response = HttpResponse(res, content_type="application/gzip")
response["Content-disposition"] = "attachment; filename={}".format(fname)
return response
@api_route(
- r"/vault/revision_gitfast/(?P[0-9a-f]+)/raw/",
- "api-1-vault-revision_gitfast-raw",
+ r"/vault/revision/(?P[0-9a-f]+)/gitfast/raw/",
+ "api-1-vault-fetch-revision_gitfast",
checksum_args=["rev_id"],
)
@api_doc("/vault/revision_gitfast/raw/", tags=["hidden"])
def _api_vault_revision_gitfast_raw(request, rev_id):
"""
- The vault backend sends an email containing an invalid url to fetch a
- gitfast archive. So setup a redirection to the correct one as a temporary
- workaround.
+ .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/raw/
+
+ This endpoint was replaced by :http:get:`/api/1/vault/gitfast/raw/`
"""
rev_gitfast_raw_url = reverse(
- "api-1-vault-fetch-revision_gitfast", url_args={"rev_id": rev_id}
+ "api-1-vault-fetch-gitfast", url_args={"swhid": f"swh:1:rev:{rev_id}"}
)
return redirect(rev_gitfast_raw_url)
diff --git a/swh/web/browse/snapshot_context.py b/swh/web/browse/snapshot_context.py
--- a/swh/web/browse/snapshot_context.py
+++ b/swh/web/browse/snapshot_context.py
@@ -839,9 +839,9 @@
vault_cooking = {
"directory_context": True,
- "directory_id": sha1_git,
+ "directory_swhid": f"swh:1:dir:{sha1_git}",
"revision_context": True,
- "revision_id": revision_id,
+ "revision_swhid": f"swh:1:rev:{revision_id}",
}
swhids_info = get_swhids_info(swh_objects, snapshot_context, dir_metadata)
diff --git a/swh/web/browse/urls.py b/swh/web/browse/urls.py
--- a/swh/web/browse/urls.py
+++ b/swh/web/browse/urls.py
@@ -54,7 +54,11 @@
url(r"^vault/$", _browse_vault_view, name="browse-vault"),
# for backward compatibility
url(r"^origin/save/$", _browse_origin_save_view, name="browse-origin-save"),
- url(r"^(?Pswh:[0-9]+:[a-z]+:[0-9a-f]+.*)/$", swhid_browse),
+ url(
+ r"^(?Pswh:[0-9]+:[a-z]+:[0-9a-f]+.*)/$",
+ swhid_browse,
+ name="browse-swhid",
+ ),
]
urlpatterns += BrowseUrls.get_url_patterns()
diff --git a/swh/web/browse/views/directory.py b/swh/web/browse/views/directory.py
--- a/swh/web/browse/views/directory.py
+++ b/swh/web/browse/views/directory.py
@@ -155,9 +155,9 @@
vault_cooking = {
"directory_context": True,
- "directory_id": sha1_git,
+ "directory_swhid": f"swh:1:dir:{sha1_git}",
"revision_context": False,
- "revision_id": None,
+ "revision_swhid": None,
}
swh_objects = [SWHObjectInfo(object_type=DIRECTORY, object_id=sha1_git)]
diff --git a/swh/web/browse/views/release.py b/swh/web/browse/views/release.py
--- a/swh/web/browse/views/release.py
+++ b/swh/web/browse/views/release.py
@@ -119,9 +119,9 @@
rev_directory = revision["directory"]
vault_cooking = {
"directory_context": True,
- "directory_id": rev_directory,
+ "directory_swhid": f"swh:1:dir:{rev_directory}",
"revision_context": True,
- "revision_id": release["target"],
+ "revision_swhid": f"swh:1:rev:{release['target']}",
}
swh_objects.append(
SWHObjectInfo(object_type=REVISION, object_id=release["target"])
@@ -143,9 +143,9 @@
archive.lookup_directory(release["target"])
vault_cooking = {
"directory_context": True,
- "directory_id": release["target"],
+ "directory_swhid": f"swh:1:dir:{release['target']}",
"revision_context": False,
- "revision_id": None,
+ "revision_swhid": None,
}
swh_objects.append(
SWHObjectInfo(object_type=DIRECTORY, object_id=release["target"])
diff --git a/swh/web/browse/views/revision.py b/swh/web/browse/views/revision.py
--- a/swh/web/browse/views/revision.py
+++ b/swh/web/browse/views/revision.py
@@ -444,9 +444,9 @@
vault_cooking = {
"directory_context": False,
- "directory_id": None,
+ "directory_swhid": None,
"revision_context": True,
- "revision_id": sha1_git,
+ "revision_swhid": f"swh:1:rev:{sha1_git}",
}
swh_objects = [SWHObjectInfo(object_type=REVISION, object_id=sha1_git)]
@@ -529,7 +529,7 @@
}
vault_cooking["directory_context"] = True
- vault_cooking["directory_id"] = dir_id
+ vault_cooking["directory_swhid"] = f"swh:1:dir:{dir_id}"
swh_objects.append(SWHObjectInfo(object_type=DIRECTORY, object_id=dir_id))
diff --git a/swh/web/common/archive.py b/swh/web/common/archive.py
--- a/swh/web/common/archive.py
+++ b/swh/web/common/archive.py
@@ -11,7 +11,14 @@
from urllib.parse import urlparse
from swh.model import hashutil
-from swh.model.identifiers import CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT
+from swh.model.identifiers import (
+ CONTENT,
+ DIRECTORY,
+ RELEASE,
+ REVISION,
+ SNAPSHOT,
+ CoreSWHID,
+)
from swh.model.model import OriginVisit, Revision
from swh.storage.algos import diff, revisions_walker
from swh.storage.algos.origin import origin_get_latest_visit_status
@@ -1234,29 +1241,29 @@
return (rev["id"], lookup_directory_with_revision(rev["id"], path, with_data))
-def _vault_request(vault_fn, *args, **kwargs):
+def _vault_request(vault_fn, bundle_type: str, swhid: CoreSWHID, **kwargs):
try:
- return vault_fn(*args, **kwargs)
+ return vault_fn(bundle_type, swhid, **kwargs)
except VaultNotFoundExc:
return None
-def vault_cook(obj_type, obj_id, email=None):
+def vault_cook(bundle_type: str, swhid: CoreSWHID, email=None):
"""Cook a vault bundle.
"""
- return _vault_request(vault.cook, obj_type, obj_id, email=email)
+ return _vault_request(vault.cook, bundle_type, swhid, email=email)
-def vault_fetch(obj_type, obj_id):
+def vault_fetch(bundle_type: str, swhid: CoreSWHID):
"""Fetch a vault bundle.
"""
- return _vault_request(vault.fetch, obj_type, obj_id)
+ return _vault_request(vault.fetch, bundle_type, swhid)
-def vault_progress(obj_type, obj_id):
+def vault_progress(bundle_type: str, swhid: CoreSWHID):
"""Get the current progress of a vault bundle.
"""
- return _vault_request(vault.progress, obj_type, obj_id)
+ return _vault_request(vault.progress, bundle_type, swhid)
def diff_revision(rev_id):
diff --git a/swh/web/templates/includes/vault-create-tasks.html b/swh/web/templates/includes/vault-create-tasks.html
--- a/swh/web/templates/includes/vault-create-tasks.html
+++ b/swh/web/templates/includes/vault-create-tasks.html
@@ -15,20 +15,20 @@
{% if vault_cooking.directory_context %}
-
{% else %}
-
+
Download
@@ -48,7 +48,7 @@
- You have requested the cooking of the directory with identifier {{ vault_cooking.directory_id }}
+ You have requested the cooking of the directory with identifier {{ vault_cooking.directory_swhid }}
into a standard tar.gz archive.
@@ -63,7 +63,7 @@
@@ -80,7 +80,7 @@
- You have requested the download of the directory with identifier {{ vault_cooking.directory_id }}
+ You have requested the download of the directory with identifier {{ vault_cooking.directory_swhid }}
as a standard tar.gz archive.
@@ -89,7 +89,7 @@
@@ -106,7 +106,7 @@
- You have requested the cooking of the history heading to revision with identifier {{ vault_cooking.revision_id }}
+ You have requested the cooking of the history heading to revision with identifier {{ vault_cooking.revision_swhid }}
into a git fast-import archive.
@@ -121,7 +121,7 @@
@@ -138,7 +138,7 @@
- You have requested the download of the history heading to revision with identifier {{ vault_cooking.revision_id }}
+ You have requested the download of the history heading to revision with identifier {{ vault_cooking.revision_swhid }}
as a git fast-import archive.