diff --git a/assets/src/bundles/vault/vault-create-tasks.js b/assets/src/bundles/vault/vault-create-tasks.js index e1aa4062..420b92cf 100644 --- a/assets/src/bundles/vault/vault-create-tasks.js +++ b/assets/src/bundles/vault/vault-create-tasks.js @@ -1,155 +1,155 @@ /** * Copyright (C) 2018-2019 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {handleFetchError, csrfPost, htmlAlert} from 'utils/functions'; const alertStyle = { 'position': 'fixed', 'left': '1rem', 'bottom': '1rem', 'z-index': '100000' }; -export async function vaultRequest(objectType, objectId) { +export async function vaultRequest(objectType, swhid) { let vaultUrl; if (objectType === 'directory') { - vaultUrl = Urls.api_1_vault_cook_directory(objectId); + vaultUrl = Urls.api_1_vault_cook_flat(swhid); } else { - vaultUrl = Urls.api_1_vault_cook_revision_gitfast(objectId); + vaultUrl = Urls.api_1_vault_cook_gitfast(swhid); } // check if object has already been cooked const response = await fetch(vaultUrl); const data = await response.json(); // object needs to be cooked if (data.exception === 'NotFoundExc' || data.status === 'failed') { // if last cooking has failed, remove previous task info from localStorage // in order to force the recooking of the object - swh.vault.removeCookingTaskInfo([objectId]); + swh.vault.removeCookingTaskInfo([swhid]); $(`#vault-cook-${objectType}-modal`).modal('show'); // object has been cooked and should be in the vault cache, // it will be asked to cook it again if it is not } else if (data.status === 'done') { $(`#vault-fetch-${objectType}-modal`).modal('show'); } else { const cookingServiceDownAlert = $(htmlAlert('danger', 'Archive cooking service is currently experiencing issues.
' + 'Please try again later.', true)); cookingServiceDownAlert.css(alertStyle); $('body').append(cookingServiceDownAlert); } } -async function addVaultCookingTask(cookingTask) { +async function addVaultCookingTask(objectType, cookingTask) { const swhidsContext = swh.webapp.getSwhIdsContext(); - cookingTask.origin = swhidsContext[cookingTask.object_type].context.origin; - cookingTask.path = swhidsContext[cookingTask.object_type].context.path; - cookingTask.browse_url = swhidsContext[cookingTask.object_type].swhid_with_context_url; + cookingTask.origin = swhidsContext[objectType].context.origin; + cookingTask.path = swhidsContext[objectType].context.path; + cookingTask.browse_url = swhidsContext[objectType].swhid_with_context_url; if (!cookingTask.browse_url) { - cookingTask.browse_url = swhidsContext[cookingTask.object_type].swhid_url; + cookingTask.browse_url = swhidsContext[objectType].swhid_url; } let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks')); if (!vaultCookingTasks) { vaultCookingTasks = []; } if (vaultCookingTasks.find(val => { - return val.object_type === cookingTask.object_type && - val.object_id === cookingTask.object_id; + return val.bundle_type === cookingTask.bundle_type && + val.swhid === cookingTask.swhid; }) === undefined) { let cookingUrl; - if (cookingTask.object_type === 'directory') { - cookingUrl = Urls.api_1_vault_cook_directory(cookingTask.object_id); + if (cookingTask.bundle_type === 'flat') { + cookingUrl = Urls.api_1_vault_cook_flat(cookingTask.swhid); } else { - cookingUrl = Urls.api_1_vault_cook_revision_gitfast(cookingTask.object_id); + cookingUrl = Urls.api_1_vault_cook_gitfast(cookingTask.swhid); } if (cookingTask.email) { cookingUrl += '?email=' + cookingTask.email; } try { const response = await csrfPost(cookingUrl); handleFetchError(response); vaultCookingTasks.push(cookingTask); localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks)); $('#vault-cook-directory-modal').modal('hide'); $('#vault-cook-revision-modal').modal('hide'); const cookingTaskCreatedAlert = $(htmlAlert('success', 'Archive cooking request successfully submitted.
' + `Go to the Downloads page ` + 'to get the download link once it is ready.', true)); cookingTaskCreatedAlert.css(alertStyle); $('body').append(cookingTaskCreatedAlert); } catch (_) { $('#vault-cook-directory-modal').modal('hide'); $('#vault-cook-revision-modal').modal('hide'); const cookingTaskFailedAlert = $(htmlAlert('danger', 'Archive cooking request submission failed.', true)); cookingTaskFailedAlert.css(alertStyle); $('body').append(cookingTaskFailedAlert); } } } function validateEmail(email) { const re = /^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; return re.test(String(email).toLowerCase()); } -export function cookDirectoryArchive(directoryId) { +export function cookDirectoryArchive(swhid) { const email = $('#swh-vault-directory-email').val().trim(); if (!email || validateEmail(email)) { const cookingTask = { - 'object_type': 'directory', - 'object_id': directoryId, + 'bundle_type': 'flat', + 'swhid': swhid, 'email': email, 'status': 'new' }; - addVaultCookingTask(cookingTask); + addVaultCookingTask('directory', cookingTask); } else { $('#invalid-email-modal').modal('show'); } } -export async function fetchDirectoryArchive(directoryId) { +export async function fetchDirectoryArchive(directorySwhid) { $('#vault-fetch-directory-modal').modal('hide'); - const vaultUrl = Urls.api_1_vault_cook_directory(directoryId); + const vaultUrl = Urls.api_1_vault_cook_flat(directorySwhid); const response = await fetch(vaultUrl); const data = await response.json(); swh.vault.fetchCookedObject(data.fetch_url); } export function cookRevisionArchive(revisionId) { const email = $('#swh-vault-revision-email').val().trim(); if (!email || validateEmail(email)) { const cookingTask = { - 'object_type': 'revision', - 'object_id': revisionId, + 'bundle_type': 'gitfast', + 'swhid': revisionId, 'email': email, 'status': 'new' }; - addVaultCookingTask(cookingTask); + addVaultCookingTask('revision', cookingTask); } else { $('#invalid-email-modal').modal('show'); } } -export async function fetchRevisionArchive(revisionId) { +export async function fetchRevisionArchive(revisionSwhid) { $('#vault-fetch-directory-modal').modal('hide'); - const vaultUrl = Urls.api_1_vault_cook_revision_gitfast(revisionId); + const vaultUrl = Urls.api_1_vault_cook_gitfast(revisionSwhid); const response = await fetch(vaultUrl); const data = await response.json(); swh.vault.fetchCookedObject(data.fetch_url); } diff --git a/assets/src/bundles/vault/vault-table-row.ejs b/assets/src/bundles/vault/vault-table-row.ejs index fccf9b3b..e40d2e5d 100644 --- a/assets/src/bundles/vault/vault-table-row.ejs +++ b/assets/src/bundles/vault/vault-table-row.ejs @@ -1,56 +1,56 @@ <%# Copyright (C) 2020 The Software Heritage developers See the AUTHORS file at the top-level directory of this distribution License: GNU Affero General Public License version 3, or any later version See top-level LICENSE file for more information %> <% if (cookingTask.object_type === 'directory') { %> - +$ tar xvzf <%= cookingTask.swhid %>.tar.gz"> <% } else { %> - +$ zcat <%= cookingTask.swhid %>.gitfast.gz | git fast-import"> <% } %>
+ id="vault-task-toggle-selection-<%= cookingTask.swhid %>"/> <% if (cookingTask.origin) { %> <%= decodeURIComponent(cookingTask.origin) %> <% } else { %> unknown <% } %> <%= cookingTask.object_type %> - - id: <%= cookingTask.object_id %> + + id: <%= cookingTask.swhid %> <% if (cookingTask.path) { %>
path: <%= cookingTask.path %> <% } %> <%- progressBar.outerHTML %> <% if (cookingTask.status === 'done') { %> <% } %> - \ No newline at end of file + diff --git a/assets/src/bundles/vault/vault-ui.js b/assets/src/bundles/vault/vault-ui.js index 9cd669c7..10373703 100644 --- a/assets/src/bundles/vault/vault-ui.js +++ b/assets/src/bundles/vault/vault-ui.js @@ -1,241 +1,256 @@ /** * Copyright (C) 2018-2019 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {handleFetchError, handleFetchErrors, csrfPost} from 'utils/functions'; import vaultTableRowTemplate from './vault-table-row.ejs'; const progress = `
;`; const pollingInterval = 5000; let checkVaultId; function updateProgressBar(progressBar, cookingTask) { if (cookingTask.status === 'new') { progressBar.css('background-color', 'rgba(128, 128, 128, 0.5)'); } else if (cookingTask.status === 'pending') { progressBar.css('background-color', 'rgba(0, 0, 255, 0.5)'); } else if (cookingTask.status === 'done') { progressBar.css('background-color', '#5cb85c'); } else if (cookingTask.status === 'failed') { progressBar.css('background-color', 'rgba(255, 0, 0, 0.5)'); progressBar.css('background-image', 'none'); } progressBar.text(cookingTask.progress_message || cookingTask.status); if (cookingTask.status === 'new' || cookingTask.status === 'pending') { progressBar.addClass('progress-bar-animated'); } else { progressBar.removeClass('progress-bar-striped'); } } let recookTask; // called when the user wants to download a cooked archive export async function fetchCookedObject(fetchUrl) { recookTask = null; // first, check if the link is still available from the vault const response = await fetch(fetchUrl); // link is still alive, proceed to download if (response.ok) { $('#vault-fetch-iframe').attr('src', fetchUrl); // link is dead } else { // get the associated cooking task const vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks')); for (let i = 0; i < vaultCookingTasks.length; ++i) { if (vaultCookingTasks[i].fetch_url === fetchUrl) { recookTask = vaultCookingTasks[i]; break; } } // display a modal asking the user if he wants to recook the archive $('#vault-recook-object-modal').modal('show'); } } // called when the user wants to recook an archive // for which the download link is not available anymore export async function recookObject() { if (recookTask) { // stop cooking tasks status polling clearTimeout(checkVaultId); // build cook request url let cookingUrl; - if (recookTask.object_type === 'directory') { - cookingUrl = Urls.api_1_vault_cook_directory(recookTask.object_id); + if (recookTask.bundle_type === 'flat') { + cookingUrl = Urls.api_1_vault_cook_flat(recookTask.swhid); } else { - cookingUrl = Urls.api_1_vault_cook_revision_gitfast(recookTask.object_id); + cookingUrl = Urls.api_1_vault_cook_gitfast(recookTask.swhid); } if (recookTask.email) { cookingUrl += '?email=' + recookTask.email; } try { // request archive cooking const response = await csrfPost(cookingUrl); handleFetchError(response); // update task status recookTask.status = 'new'; const vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks')); for (let i = 0; i < vaultCookingTasks.length; ++i) { - if (vaultCookingTasks[i].object_id === recookTask.object_id) { + if (vaultCookingTasks[i].swhid === recookTask.swhid) { vaultCookingTasks[i] = recookTask; break; } } // save updated tasks to local storage localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks)); - // restart cooking tasks status polling - checkVaultCookingTasks(); // hide recook archive modal $('#vault-recook-object-modal').modal('hide'); + // restart cooking tasks status polling + await checkVaultCookingTasks(); } catch (_) { // something went wrong - checkVaultCookingTasks(); $('#vault-recook-object-modal').modal('hide'); + await checkVaultCookingTasks(); } } } async function checkVaultCookingTasks() { const vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks')); if (!vaultCookingTasks || vaultCookingTasks.length === 0) { $('.swh-vault-table tbody tr').remove(); checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval); return; } const cookingTaskRequests = []; const tasks = {}; const currentObjectIds = []; for (let i = 0; i < vaultCookingTasks.length; ++i) { const cookingTask = vaultCookingTasks[i]; - currentObjectIds.push(cookingTask.object_id); - tasks[cookingTask.object_id] = cookingTask; + + if (typeof cookingTask.object_type !== 'undefined') { + // Legacy cooking task, upgrade it to the new schema + if (cookingTask.object_type === 'directory') { + cookingTask.swhid = `swh:1:dir:${cookingTask.object_id}`; + cookingTask.bundle_type = 'flat'; + } else if (cookingTask.object_type === 'revision') { + cookingTask.swhid = `swh:1:rev:${cookingTask.object_id}`; + cookingTask.bundle_type = 'gitfast'; + } else { + // Log to the console + Sentry + console.error(`Unexpected cookingTask.object_type: ${cookingTask.object_type}`); + // Ignore it for now and hope a future version will fix it + continue; + } + delete cookingTask.object_type; + delete cookingTask.object_id; + } + + currentObjectIds.push(cookingTask.swhid); + tasks[cookingTask.swhid] = cookingTask; let cookingUrl; - if (cookingTask.object_type === 'directory') { - cookingUrl = Urls.api_1_vault_cook_directory(cookingTask.object_id); + if (cookingTask.bundle_type === 'flat') { + cookingUrl = Urls.api_1_vault_cook_flat(cookingTask.swhid); } else { - cookingUrl = Urls.api_1_vault_cook_revision_gitfast(cookingTask.object_id); + cookingUrl = Urls.api_1_vault_cook_gitfast(cookingTask.swhid); } if (cookingTask.status !== 'done' && cookingTask.status !== 'failed') { cookingTaskRequests.push(fetch(cookingUrl)); } } $('.swh-vault-table tbody tr').each((i, row) => { - const objectId = $(row).find('.vault-object-info').data('object-id'); - if ($.inArray(objectId, currentObjectIds) === -1) { + const swhid = $(row).find('.vault-object-info').data('swhid'); + if ($.inArray(swhid, currentObjectIds) === -1) { $(row).remove(); } }); try { const responses = await Promise.all(cookingTaskRequests); handleFetchErrors(responses); const cookingTasks = await Promise.all(responses.map(r => r.json())); const table = $('#vault-cooking-tasks tbody'); for (let i = 0; i < cookingTasks.length; ++i) { - const cookingTask = tasks[cookingTasks[i].obj_id]; + const cookingTask = tasks[cookingTasks[i].swhid]; cookingTask.status = cookingTasks[i].status; cookingTask.fetch_url = cookingTasks[i].fetch_url; cookingTask.progress_message = cookingTasks[i].progress_message; } for (let i = 0; i < vaultCookingTasks.length; ++i) { const cookingTask = vaultCookingTasks[i]; - const rowTask = $(`#vault-task-${cookingTask.object_id}`); + const rowTask = $(`#vault-task-${CSS.escape(cookingTask.swhid)}`); if (!rowTask.length) { let browseUrl = cookingTask.browse_url; if (!browseUrl) { - if (cookingTask.object_type === 'directory') { - browseUrl = Urls.browse_directory(cookingTask.object_id); - } else { - browseUrl = Urls.browse_revision(cookingTask.object_id); - } + browseUrl = Urls.browse_swhid(cookingTask.swhid); } const progressBar = $.parseHTML(progress)[0]; const progressBarContent = $(progressBar).find('.progress-bar'); updateProgressBar(progressBarContent, cookingTask); table.prepend(vaultTableRowTemplate({ browseUrl: browseUrl, cookingTask: cookingTask, progressBar: progressBar, Urls: Urls, swh: swh })); } else { const progressBar = rowTask.find('.progress-bar'); updateProgressBar(progressBar, cookingTask); const downloadLink = rowTask.find('.vault-dl-link'); if (cookingTask.status === 'done') { downloadLink[0].innerHTML = ''; } else { downloadLink[0].innerHTML = ''; } } } localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks)); checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval); } catch (error) { console.log('Error when fetching vault cooking tasks:', error); } } export function removeCookingTaskInfo(tasksToRemove) { let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks')); if (!vaultCookingTasks) { return; } vaultCookingTasks = $.grep(vaultCookingTasks, task => { - return $.inArray(task.object_id, tasksToRemove) === -1; + return $.inArray(task.swhid, tasksToRemove) === -1; }); localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks)); } export function initUi() { $('#vault-tasks-toggle-selection').change(event => { $('.vault-task-toggle-selection').prop('checked', event.currentTarget.checked); }); $('#vault-remove-tasks').click(() => { clearTimeout(checkVaultId); const tasksToRemove = []; $('.swh-vault-table tbody tr').each((i, row) => { const taskSelected = $(row).find('.vault-task-toggle-selection').prop('checked'); if (taskSelected) { - const objectId = $(row).find('.vault-object-info').data('object-id'); - tasksToRemove.push(objectId); + const swhid = $(row).find('.vault-object-info').data('swhid'); + tasksToRemove.push(swhid); $(row).remove(); } }); removeCookingTaskInfo(tasksToRemove); $('#vault-tasks-toggle-selection').prop('checked', false); checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval); }); checkVaultCookingTasks(); window.onfocus = () => { clearTimeout(checkVaultId); checkVaultCookingTasks(); }; } diff --git a/cypress/fixtures/cd19126d815470b28919d64b2a8e6a3e37f900dd.tar.gz b/cypress/fixtures/swh:1:dir:cd19126d815470b28919d64b2a8e6a3e37f900dd.tar.gz similarity index 100% rename from cypress/fixtures/cd19126d815470b28919d64b2a8e6a3e37f900dd.tar.gz rename to cypress/fixtures/swh:1:dir:cd19126d815470b28919d64b2a8e6a3e37f900dd.tar.gz diff --git a/cypress/fixtures/1c480a4573d2a003fc2630c21c2b25829de49972.gitfast.gz b/cypress/fixtures/swh:1:rev:1c480a4573d2a003fc2630c21c2b25829de49972.gitfast.gz similarity index 100% rename from cypress/fixtures/1c480a4573d2a003fc2630c21c2b25829de49972.gitfast.gz rename to cypress/fixtures/swh:1:rev:1c480a4573d2a003fc2630c21c2b25829de49972.gitfast.gz diff --git a/cypress/integration/vault.spec.js b/cypress/integration/vault.spec.js index 982897cc..f83388d0 100644 --- a/cypress/integration/vault.spec.js +++ b/cypress/integration/vault.spec.js @@ -1,527 +1,547 @@ /** * Copyright (C) 2019-2021 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ const progressbarColors = { 'new': 'rgba(128, 128, 128, 0.5)', 'pending': 'rgba(0, 0, 255, 0.5)', 'done': 'rgb(92, 184, 92)' }; function checkVaultCookingTask(objectType) { cy.contains('button', 'Download') .click(); cy.contains('.dropdown-item', objectType) .click(); cy.wait('@checkVaultCookingTask'); } +function getVaultItemList() { + return JSON.parse(window.localStorage.getItem('swh-vault-cooking-tasks')); +} + function updateVaultItemList(vaultUrl, vaultItems) { cy.visit(vaultUrl) .then(() => { // Add uncooked task to localStorage // which updates it in vault items list window.localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultItems)); }); } -// Mocks API response : /api/1/vault/(:objectType)/(:hash) -// objectType : {'directory', 'revision'} -function genVaultCookingResponse(objectType, objectId, status, message, fetchUrl) { +// Mocks API response : /api/1/vault/(:bundleType)/(:swhid) +// bundleType : {'flat', 'gitfast'} +function genVaultCookingResponse(bundleType, swhid, status, message, fetchUrl) { return { - 'obj_type': objectType, + 'bundle_type': bundleType, 'id': 1, 'progress_message': message, 'status': status, - 'obj_id': objectId, + 'swhid': swhid, 'fetch_url': fetchUrl }; }; // Tests progressbar color, status // And status in localStorage function testStatus(taskId, color, statusMsg, status) { - cy.get(`.swh-vault-table #vault-task-${taskId}`) + cy.get(`.swh-vault-table #vault-task-${CSS.escape(taskId)}`) .should('be.visible') .find('.progress-bar') .should('be.visible') .and('have.css', 'background-color', color) .and('contain', statusMsg) .then(() => { // Vault item with object_id as taskId should exist in localStorage - const currentVaultItems = JSON.parse(window.localStorage.getItem('swh-vault-cooking-tasks')); - const vaultItem = currentVaultItems.find(obj => obj.object_id === taskId); + const currentVaultItems = getVaultItemList(); + const vaultItem = currentVaultItems.find(obj => obj.swhid === taskId); assert.isNotNull(vaultItem); assert.strictEqual(vaultItem.status, status); }); } describe('Vault Cooking User Interface Tests', function() { before(function() { const dirInfo = this.origin[0].directory[0]; - this.directory = dirInfo.id; + this.directory = `swh:1:dir:${dirInfo.id}`; this.directoryUrl = this.Urls.browse_origin_directory() + `?origin_url=${this.origin[0].url}&path=${dirInfo.path}`; - this.vaultDirectoryUrl = this.Urls.api_1_vault_cook_directory(this.directory); - this.vaultFetchDirectoryUrl = this.Urls.api_1_vault_fetch_directory(this.directory); + this.vaultDirectoryUrl = this.Urls.api_1_vault_cook_flat(this.directory); + this.vaultFetchDirectoryUrl = this.Urls.api_1_vault_fetch_flat(this.directory); - this.revision = this.origin[1].revisions[0]; - this.revisionUrl = this.Urls.browse_revision(this.revision); - this.vaultRevisionUrl = this.Urls.api_1_vault_cook_revision_gitfast(this.revision); - this.vaultFetchRevisionUrl = this.Urls.api_1_vault_fetch_revision_gitfast(this.revision); + this.revisionId = this.origin[1].revisions[0]; + this.revision = `swh:1:rev:${this.revisionId}`; + this.revisionUrl = this.Urls.browse_revision(this.revisionId); + this.vaultRevisionUrl = this.Urls.api_1_vault_cook_gitfast(this.revision); + this.vaultFetchRevisionUrl = this.Urls.api_1_vault_fetch_gitfast(this.revision); const release = this.origin[1].release; this.releaseUrl = this.Urls.browse_release(release.id) + `?origin_url=${this.origin[1].url}`; - this.vaultReleaseDirectoryUrl = this.Urls.api_1_vault_cook_directory(release.directory); + this.vaultReleaseDirectoryUrl = this.Urls.api_1_vault_cook_flat(`swh:1:dir:${release.directory}`); }); beforeEach(function() { // For some reason, this gets reset if we define it in the before() hook, // so we need to define it here this.vaultItems = [ + { + 'bundle_type': 'gitfast', + 'swhid': this.revision, + 'email': '', + 'status': 'done', + 'fetch_url': `/api/1/vault/gitfast/${this.revision}/raw/`, + 'progress_message': null + } + ]; + this.legacyVaultItems = [ { 'object_type': 'revision', - 'object_id': this.revision, + 'object_id': this.revisionId, 'email': '', 'status': 'done', - 'fetch_url': `/api/1/vault/revision/${this.revision}/gitfast/raw/`, + 'fetch_url': `/api/1/vault/revision/${this.revisionId}/gitfast/raw/`, 'progress_message': null } ]; this.genVaultDirCookingResponse = (status, message = null) => { - return genVaultCookingResponse('directory', this.directory, status, + return genVaultCookingResponse('flat', this.directory, status, message, this.vaultFetchDirectoryUrl); }; this.genVaultRevCookingResponse = (status, message = null) => { - return genVaultCookingResponse('revision', this.revision, status, + return genVaultCookingResponse('gitfast', this.revision, status, message, this.vaultFetchRevisionUrl); }; }); it('should report an error when vault service is experiencing issues', function() { // Browse a directory cy.visit(this.directoryUrl); // Stub responses when requesting the vault API to simulate // an internal server error cy.intercept(this.vaultDirectoryUrl, { body: {'exception': 'APIError'}, statusCode: 500 }).as('checkVaultCookingTask'); cy.contains('button', 'Download') .click(); // Check error alert is displayed cy.get('.alert-danger') .should('be.visible') .should('contain', 'Archive cooking service is currently experiencing issues.'); }); it('should report an error when a cooking task creation failed', function() { // Browse a directory cy.visit(this.directoryUrl); // Stub responses when requesting the vault API to simulate // a task can not be created cy.intercept('GET', this.vaultDirectoryUrl, { body: {'exception': 'NotFoundExc'} }).as('checkVaultCookingTask'); cy.intercept('POST', this.vaultDirectoryUrl, { body: {'exception': 'ValueError'}, statusCode: 500 }).as('createVaultCookingTask'); cy.contains('button', 'Download') .click(); // Create a vault cooking task through the GUI cy.get('.modal-dialog') .contains('button:visible', 'Ok') .click(); cy.wait('@createVaultCookingTask'); // Check error alert is displayed cy.get('.alert-danger') .should('be.visible') .should('contain', 'Archive cooking request submission failed.'); }); it('should display previous cooking tasks', function() { updateVaultItemList(this.Urls.browse_vault(), this.vaultItems); cy.visit(this.Urls.browse_vault()); - // trick to override the response of an intercepted request - // https://github.com/cypress-io/cypress/issues/9302 - cy.intercept('GET', this.vaultDirectoryUrl, req => this.genVaultDirCookingResponse('done')) - .as('checkVaultCookingTask'); + cy.contains(`#vault-task-${CSS.escape(this.revision)} button`, 'Download') + .click(); + }); - // Stub responses when requesting the vault API to simulate - // a task has been created - cy.intercept('POST', this.vaultDirectoryUrl, { - body: this.genVaultDirCookingResponse('new') - }).as('createVaultCookingTask'); + it('should display and upgrade previous cooking tasks from the legacy format', function() { + updateVaultItemList(this.Urls.browse_vault(), this.legacyVaultItems); + + // updateVaultItemList doesn't work in this test?!?! + window.localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(this.vaultItems)); - cy.contains(`#vault-task-${this.revision} button`, 'Download') + cy.visit(this.Urls.browse_vault()); + + // Check it is displayed + cy.contains(`#vault-task-${CSS.escape(this.revision)} button`, 'Download') .click(); + + // Check the LocalStorage was upgraded + expect(getVaultItemList()).to.deep.equal(this.vaultItems); }); it('should create a directory cooking task and report the success', function() { // Browse a directory cy.visit(this.directoryUrl); // Stub response to the vault API to simulate archive download cy.intercept('GET', this.vaultFetchDirectoryUrl, { fixture: `${this.directory}.tar.gz`, headers: { 'Content-disposition': `attachment; filename=${this.directory}.tar.gz`, 'Content-Type': 'application/gzip' } }).as('fetchCookedArchive'); // Stub responses when checking vault task status const checkVaulResponses = [ {'exception': 'NotFoundExc'}, this.genVaultDirCookingResponse('new'), this.genVaultDirCookingResponse('pending', 'Processing...'), this.genVaultDirCookingResponse('done') ]; // trick to override the response of an intercepted request // https://github.com/cypress-io/cypress/issues/9302 cy.intercept('GET', this.vaultDirectoryUrl, req => req.reply(checkVaulResponses.shift())) .as('checkVaultCookingTask'); // Stub responses when requesting the vault API to simulate // a task has been created cy.intercept('POST', this.vaultDirectoryUrl, { body: this.genVaultDirCookingResponse('new') }).as('createVaultCookingTask'); cy.contains('button', 'Download') .click(); cy.window().then(win => { const swhIdsContext = win.swh.webapp.getSwhIdsContext(); const browseDirectoryUrl = swhIdsContext.directory.swhid_with_context_url; // Create a vault cooking task through the GUI cy.get('.modal-dialog') .contains('button:visible', 'Ok') .click(); cy.wait('@createVaultCookingTask'); // Check success alert is displayed cy.get('.alert-success') .should('be.visible') .should('contain', 'Archive cooking request successfully submitted.'); // Go to Downloads page cy.visit(this.Urls.browse_vault()); cy.wait('@checkVaultCookingTask').then(() => { testStatus(this.directory, progressbarColors['new'], 'new', 'new'); }); cy.wait('@checkVaultCookingTask').then(() => { testStatus(this.directory, progressbarColors['pending'], 'Processing...', 'pending'); }); cy.wait('@checkVaultCookingTask').then(() => { testStatus(this.directory, progressbarColors['done'], 'done', 'done'); }); - cy.get(`#vault-task-${this.directory} .vault-origin a`) + cy.get(`#vault-task-${CSS.escape(this.directory)} .vault-origin a`) .should('contain', this.origin[0].url) .should('have.attr', 'href', `${this.Urls.browse_origin()}?origin_url=${this.origin[0].url}`); - cy.get(`#vault-task-${this.directory} .vault-object-info a`) + cy.get(`#vault-task-${CSS.escape(this.directory)} .vault-object-info a`) .should('have.text', this.directory) .should('have.attr', 'href', browseDirectoryUrl); - cy.get(`#vault-task-${this.directory} .vault-dl-link button`) + cy.get(`#vault-task-${CSS.escape(this.directory)} .vault-dl-link button`) .click(); cy.wait('@fetchCookedArchive').then((xhr) => { assert.isNotNull(xhr.response.body); }); }); }); it('should create a revision cooking task and report its status', function() { cy.adminLogin(); // Browse a revision cy.visit(this.revisionUrl); // Stub response to the vault API indicating to simulate archive download cy.intercept({url: this.vaultFetchRevisionUrl}, { fixture: `${this.revision}.gitfast.gz`, headers: { 'Content-disposition': `attachment; filename=${this.revision}.gitfast.gz`, 'Content-Type': 'application/gzip' } }).as('fetchCookedArchive'); // Stub responses when checking vault task status const checkVaultResponses = [ {'exception': 'NotFoundExc'}, this.genVaultRevCookingResponse('new'), this.genVaultRevCookingResponse('pending', 'Processing...'), this.genVaultRevCookingResponse('done') ]; // trick to override the response of an intercepted request // https://github.com/cypress-io/cypress/issues/9302 cy.intercept('GET', this.vaultRevisionUrl, req => req.reply(checkVaultResponses.shift())) .as('checkVaultCookingTask'); // Stub responses when requesting the vault API to simulate // a task has been created cy.intercept('POST', this.vaultRevisionUrl, { body: this.genVaultRevCookingResponse('new') }).as('createVaultCookingTask'); // Create a vault cooking task through the GUI checkVaultCookingTask('as git'); cy.window().then(win => { const swhIdsContext = win.swh.webapp.getSwhIdsContext(); const browseRevisionUrl = swhIdsContext.revision.swhid_url; // Create a vault cooking task through the GUI cy.get('.modal-dialog') .contains('button:visible', 'Ok') .click(); cy.wait('@createVaultCookingTask'); // Check success alert is displayed cy.get('.alert-success') .should('be.visible') .should('contain', 'Archive cooking request successfully submitted.'); // Go to Downloads page cy.visit(this.Urls.browse_vault()); cy.wait('@checkVaultCookingTask').then(() => { testStatus(this.revision, progressbarColors['new'], 'new', 'new'); }); cy.wait('@checkVaultCookingTask').then(() => { testStatus(this.revision, progressbarColors['pending'], 'Processing...', 'pending'); }); cy.wait('@checkVaultCookingTask').then(() => { testStatus(this.revision, progressbarColors['done'], 'done', 'done'); }); - cy.get(`#vault-task-${this.revision} .vault-origin`) + cy.get(`#vault-task-${CSS.escape(this.revision)} .vault-origin`) .should('have.text', 'unknown'); - cy.get(`#vault-task-${this.revision} .vault-object-info a`) + cy.get(`#vault-task-${CSS.escape(this.revision)} .vault-object-info a`) .should('have.text', this.revision) .should('have.attr', 'href', browseRevisionUrl); - cy.get(`#vault-task-${this.revision} .vault-dl-link button`) + cy.get(`#vault-task-${CSS.escape(this.revision)} .vault-dl-link button`) .click(); cy.wait('@fetchCookedArchive').then((xhr) => { assert.isNotNull(xhr.response.body); }); }); }); it('should create a directory cooking task from the release view', function() { // Browse a directory cy.visit(this.releaseUrl); // Stub responses when checking vault task status const checkVaultResponses = [ {'exception': 'NotFoundExc'}, this.genVaultDirCookingResponse('new') ]; // trick to override the response of an intercepted request // https://github.com/cypress-io/cypress/issues/9302 cy.intercept('GET', this.vaultReleaseDirectoryUrl, req => req.reply(checkVaultResponses.shift())) .as('checkVaultCookingTask'); // Stub responses when requesting the vault API to simulate // a task has been created cy.intercept('POST', this.vaultReleaseDirectoryUrl, { body: this.genVaultDirCookingResponse('new') }).as('createVaultCookingTask'); cy.contains('button', 'Download') .click(); // Create a vault cooking task through the GUI cy.get('.modal-dialog') .contains('button:visible', 'Ok') .click(); cy.wait('@createVaultCookingTask'); // Check success alert is displayed cy.get('.alert-success') .should('be.visible') .should('contain', 'Archive cooking request successfully submitted.'); }); it('should offer to recook an archive if no more available to download', function() { updateVaultItemList(this.Urls.browse_vault(), this.vaultItems); // Send 404 when fetching vault item cy.intercept({url: this.vaultFetchRevisionUrl}, { statusCode: 404, body: { 'exception': 'NotFoundExc', 'reason': `Revision with ID '${this.revision}' not found.` }, headers: { 'Content-Type': 'json' } }).as('fetchCookedArchive'); - cy.get(`#vault-task-${this.revision} .vault-dl-link button`) + cy.get(`#vault-task-${CSS.escape(this.revision)} .vault-dl-link button`) .click(); cy.wait('@fetchCookedArchive').then(() => { cy.intercept('POST', this.vaultRevisionUrl, { body: this.genVaultRevCookingResponse('new') }).as('createVaultCookingTask'); cy.intercept(this.vaultRevisionUrl, { body: this.genVaultRevCookingResponse('new') }).as('checkVaultCookingTask'); cy.get('#vault-recook-object-modal > .modal-dialog') .should('be.visible') .contains('button:visible', 'Ok') .click(); cy.wait('@checkVaultCookingTask') .then(() => { testStatus(this.revision, progressbarColors['new'], 'new', 'new'); }); }); }); it('should remove selected vault items', function() { updateVaultItemList(this.Urls.browse_vault(), this.vaultItems); - cy.get(`#vault-task-${this.revision}`) + cy.get(`#vault-task-${CSS.escape(this.revision)}`) .find('input[type="checkbox"]') .click({force: true}); cy.contains('button', 'Remove selected tasks') .click(); - cy.get(`#vault-task-${this.revision}`) + cy.get(`#vault-task-${CSS.escape(this.revision)}`) .should('not.exist'); }); it('should offer to immediately download a directory tarball if already cooked', function() { // Browse a directory cy.visit(this.directoryUrl); // Stub response to the vault API to simulate archive download cy.intercept({url: this.vaultFetchDirectoryUrl}, { fixture: `${this.directory}.tar.gz`, headers: { 'Content-disposition': `attachment; filename=${this.directory}.tar.gz`, 'Content-Type': 'application/gzip' } }).as('fetchCookedArchive'); // Stub responses when requesting the vault API to simulate // the directory tarball has already been cooked cy.intercept(this.vaultDirectoryUrl, { body: this.genVaultDirCookingResponse('done') }).as('checkVaultCookingTask'); // Create a vault cooking task through the GUI cy.contains('button', 'Download') .click(); // Start archive download through the GUI cy.get('.modal-dialog') .contains('button:visible', 'Ok') .click(); cy.wait('@fetchCookedArchive'); }); it('should offer to immediately download a revision gitfast archive if already cooked', function() { cy.adminLogin(); // Browse a directory cy.visit(this.revisionUrl); // Stub response to the vault API to simulate archive download cy.intercept({url: this.vaultFetchRevisionUrl}, { fixture: `${this.revision}.gitfast.gz`, headers: { 'Content-disposition': `attachment; filename=${this.revision}.gitfast.gz`, 'Content-Type': 'application/gzip' } }).as('fetchCookedArchive'); // Stub responses when requesting the vault API to simulate // the directory tarball has already been cooked cy.intercept(this.vaultRevisionUrl, { body: this.genVaultRevCookingResponse('done') }).as('checkVaultCookingTask'); checkVaultCookingTask('as git'); // Start archive download through the GUI cy.get('.modal-dialog') .contains('button:visible', 'Ok') .click(); cy.wait('@fetchCookedArchive'); }); it('should offer to recook an object if previous vault task failed', function() { cy.visit(this.directoryUrl); // Stub responses when requesting the vault API to simulate // the last cooking of the directory tarball has failed cy.intercept(this.vaultDirectoryUrl, { body: this.genVaultDirCookingResponse('failed') }).as('checkVaultCookingTask'); cy.contains('button', 'Download') .click(); // Check that recooking the directory is offered to user cy.get('.modal-dialog') .contains('button:visible', 'Ok') .should('be.visible'); }); }); diff --git a/swh/web/api/views/vault.py b/swh/web/api/views/vault.py index 9789b2a9..422ec345 100644 --- a/swh/web/api/views/vault.py +++ b/swh/web/api/views/vault.py @@ -1,286 +1,373 @@ -# Copyright (C) 2015-2020 The Software Heritage developers +# Copyright (C) 2015-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Any, Dict from django.http import HttpResponse from django.shortcuts import redirect -from swh.model import hashutil +from swh.model.identifiers import CoreSWHID, ObjectType from swh.web.api.apidoc import api_doc, format_docstring from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup from swh.web.common import archive, query +from swh.web.common.exc import BadInputExc from swh.web.common.utils import reverse +###################################################### +# Common + +SWHID_RE = "swh:1:[a-z]{3}:[0-9a-z]{40}" + # XXX: a bit spaghetti. Would be better with class-based views. -def _dispatch_cook_progress(request, obj_type, obj_id): - hex_id = hashutil.hash_to_hex(obj_id) - object_name = obj_type.split("_")[0] +def _dispatch_cook_progress(request, bundle_type: str, swhid: CoreSWHID): if request.method == "GET": return api_lookup( archive.vault_progress, - obj_type, - obj_id, - notfound_msg=( - "Cooking of {} '{}' was never requested.".format(object_name, hex_id) - ), + bundle_type, + swhid, + notfound_msg=f"Cooking of {swhid} was never requested.", request=request, ) elif request.method == "POST": email = request.POST.get("email", request.GET.get("email", None)) return api_lookup( archive.vault_cook, - obj_type, - obj_id, + bundle_type, + swhid, email, - notfound_msg=("{} '{}' not found.".format(object_name.title(), hex_id)), + notfound_msg=f"{swhid} not found.", request=request, ) def _vault_response(vault_response: Dict[str, Any]) -> Dict[str, Any]: return { "fetch_url": vault_response["fetch_url"], - "obj_type": vault_response["type"], "progress_message": vault_response["progress_msg"], "id": vault_response["task_id"], "status": vault_response["task_status"], - "obj_id": vault_response["object_id"], + "swhid": str(vault_response["swhid"]), } +###################################################### +# Flat bundles + + @api_route( - r"/vault/directory/(?P[0-9a-f]+)/", - "api-1-vault-cook-directory", + f"/vault/flat/(?P{SWHID_RE})/", + "api-1-vault-cook-flat", methods=["GET", "POST"], - checksum_args=["dir_id"], throttle_scope="swh_vault_cooking", never_cache=True, ) -@api_doc("/vault/directory/") +@api_doc("/vault/flat/") @format_docstring() -def api_vault_cook_directory(request, dir_id): +def api_vault_cook_flat(request, swhid): """ - .. http:get:: /api/1/vault/directory/(dir_id)/ - .. http:post:: /api/1/vault/directory/(dir_id)/ + .. http:get:: /api/1/vault/flat/(swhid)/ + .. http:post:: /api/1/vault/flat/(swhid)/ - Request the cooking of an archive for a directory or check - its cooking status. + Request the cooking of a simple archive, typically for a directory. That endpoint enables to create a vault cooking task for a directory through a POST request or check the status of a previously created one through a GET request. Once the cooking task has been executed, the resulting archive can be downloaded using the dedicated endpoint - :http:get:`/api/1/vault/directory/(dir_id)/raw/`. + :http:get:`/api/1/vault/flat/(swhid)/raw/`. Then to extract the cooked directory in the current one, use:: - $ tar xvf path/to/directory.tar.gz + $ tar xvf path/to/swh:1:*.tar.gz - :param string dir_id: the directory's sha1 identifier + :param string swhid: the object's SWHID :query string email: e-mail to notify when the archive is ready {common_headers} :>json string fetch_url: the url from which to download the archive once it has been cooked - (see :http:get:`/api/1/vault/directory/(dir_id)/raw/`) - :>json string obj_type: the type of object to cook - (directory or revision) + (see :http:get:`/api/1/vault/flat/(swhid)/raw/`) :>json string progress_message: message describing the cooking task progress :>json number id: the cooking task id :>json string status: the cooking task status (either **new**, **pending**, **done** or **failed**) - :>json string obj_id: the identifier of the object to cook + :>json string swhid: the identifier of the object to cook :statuscode 200: no error :statuscode 400: an invalid directory identifier has been provided :statuscode 404: requested directory did not receive any cooking request yet (in case of GET) or can not be found in the archive (in case of POST) """ + swhid = CoreSWHID.from_string(swhid) + if swhid.object_type == ObjectType.DIRECTORY: + res = _dispatch_cook_progress(request, "flat", swhid) + res["fetch_url"] = reverse( + "api-1-vault-fetch-flat", url_args={"swhid": str(swhid)}, request=request, + ) + return _vault_response(res) + elif swhid.object_type == ObjectType.CONTENT: + raise BadInputExc( + "Content objects do not need to be cooked, " + "use `/api/1/content/raw/` instead." + ) + elif swhid.object_type == ObjectType.REVISION: + # TODO: support revisions too? (the vault allows it) + raise BadInputExc( + "Only directories can be cooked as 'flat' bundles. " + "Use `/api/1/vault/gitfast/` to cook revisions, as gitfast bundles." + ) + else: + raise BadInputExc("Only directories can be cooked as 'flat' bundles.") + + +@api_route( + r"/vault/directory/(?P[0-9a-f]+)/", + "api-1-vault-cook-directory", + methods=["GET", "POST"], + checksum_args=["dir_id"], + throttle_scope="swh_vault_cooking", + never_cache=True, +) +@api_doc("/vault/directory/", tags=["hidden"]) +@format_docstring() +def api_vault_cook_directory(request, dir_id): + """ + .. http:get:: /api/1/vault/directory/(dir_id)/ + + This endpoint was replaced by :http:get:`/api/1/vault/flat/(swhid)/` + """ _, obj_id = query.parse_hash_with_algorithms_or_throws( dir_id, ["sha1"], "Only sha1_git is supported." ) - res = _dispatch_cook_progress(request, "directory", obj_id) + swhid = f"swh:1:dir:{obj_id.hex()}" + res = _dispatch_cook_progress(request, "flat", CoreSWHID.from_string(swhid)) res["fetch_url"] = reverse( - "api-1-vault-fetch-directory", url_args={"dir_id": dir_id}, request=request, + "api-1-vault-fetch-flat", url_args={"swhid": swhid}, request=request, ) return _vault_response(res) @api_route( - r"/vault/directory/(?P[0-9a-f]+)/raw/", - "api-1-vault-fetch-directory", - checksum_args=["dir_id"], + f"/vault/flat/(?P{SWHID_RE})/raw/", "api-1-vault-fetch-flat", ) -@api_doc("/vault/directory/raw/") -def api_vault_fetch_directory(request, dir_id): +@api_doc("/vault/flat/raw/") +def api_vault_fetch_flat(request, swhid): """ - .. http:get:: /api/1/vault/directory/(dir_id)/raw/ + .. http:get:: /api/1/vault/flat/(swhid)/raw/ - Fetch the cooked archive for a directory. + Fetch the cooked archive for a flat bundle. - See :http:get:`/api/1/vault/directory/(dir_id)/` to get more - details on directory cooking. + See :http:get:`/api/1/vault/flat/(swhid)/` to get more + details on 'flat' bundle cooking. - :param string dir_id: the directory's sha1 identifier + :param string swhid: the SWHID of the object to cook :resheader Content-Type: application/octet-stream :statuscode 200: no error - :statuscode 400: an invalid directory identifier has been provided :statuscode 404: requested directory did not receive any cooking request yet (in case of GET) or can not be found in the archive (in case of POST) """ - _, obj_id = query.parse_hash_with_algorithms_or_throws( - dir_id, ["sha1"], "Only sha1_git is supported." - ) res = api_lookup( archive.vault_fetch, - "directory", - obj_id, - notfound_msg="Cooked archive for directory '{}' not found.".format(dir_id), + "flat", + CoreSWHID.from_string(swhid), + notfound_msg=f"Cooked archive for {swhid} not found.", request=request, ) - fname = "{}.tar.gz".format(dir_id) + fname = "{}.tar.gz".format(swhid) response = HttpResponse(res, content_type="application/gzip") response["Content-disposition"] = "attachment; filename={}".format(fname) return response @api_route( - r"/vault/revision/(?P[0-9a-f]+)/gitfast/", - "api-1-vault-cook-revision_gitfast", + r"/vault/directory/(?P[0-9a-f]+)/raw/", + "api-1-vault-fetch-directory", + checksum_args=["dir_id"], +) +@api_doc("/vault/directory/raw/", tags=["hidden"]) +def api_vault_fetch_directory(request, dir_id): + """ + .. http:get:: /api/1/vault/directory/(dir_id)/raw/ + + This endpoint was replaced by :http:get:`/api/1/vault/flat/raw/` + """ + _, obj_id = query.parse_hash_with_algorithms_or_throws( + dir_id, ["sha1"], "Only sha1_git is supported." + ) + rev_flat_raw_url = reverse( + "api-1-vault-fetch-flat", url_args={"swhid": f"swh:1:dir:{dir_id}"} + ) + return redirect(rev_flat_raw_url) + + +###################################################### +# gitfast bundles + + +@api_route( + f"/vault/gitfast/(?P{SWHID_RE})/", + "api-1-vault-cook-gitfast", methods=["GET", "POST"], - checksum_args=["rev_id"], throttle_scope="swh_vault_cooking", never_cache=True, ) -@api_doc("/vault/revision/gitfast/") +@api_doc("/vault/gitfast/", tags=["hidden"]) @format_docstring() -def api_vault_cook_revision_gitfast(request, rev_id): +def api_vault_cook_gitfast(request, swhid): """ - .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/ - .. http:post:: /api/1/vault/revision/(rev_id)/gitfast/ + .. http:get:: /api/1/vault/gitfast/(swhid)/ + .. http:post:: /api/1/vault/gitfast/(swhid)/ Request the cooking of a gitfast archive for a revision or check its cooking status. That endpoint enables to create a vault cooking task for a revision through a POST request or check the status of a previously created one through a GET request. Once the cooking task has been executed, the resulting gitfast archive can be downloaded using the dedicated endpoint :http:get:`/api/1/vault/revision/(rev_id)/gitfast/raw/`. Then to import the revision in the current directory, use:: $ git init - $ zcat path/to/revision.gitfast.gz | git fast-import + $ zcat path/to/swh:1:rev:*.gitfast.gz | git fast-import $ git checkout HEAD :param string rev_id: the revision's sha1 identifier :query string email: e-mail to notify when the gitfast archive is ready {common_headers} :>json string fetch_url: the url from which to download the archive once it has been cooked - (see :http:get:`/api/1/vault/revision/(rev_id)/gitfast/raw/`) - :>json string obj_type: the type of object to cook - (directory or revision) + (see :http:get:`/api/1/vault/gitfast/(rev_id)/raw/`) :>json string progress_message: message describing the cooking task progress :>json number id: the cooking task id :>json string status: the cooking task status (new/pending/done/failed) - :>json string obj_id: the identifier of the object to cook + :>json string swhid: the identifier of the object to cook :statuscode 200: no error - :statuscode 400: an invalid revision identifier has been provided :statuscode 404: requested directory did not receive any cooking request yet (in case of GET) or can not be found in the archive (in case of POST) """ + swhid = CoreSWHID.from_string(swhid) + if swhid.object_type == ObjectType.REVISION: + res = _dispatch_cook_progress(request, "gitfast", swhid) + res["fetch_url"] = reverse( + "api-1-vault-fetch-gitfast", + url_args={"swhid": str(swhid)}, + request=request, + ) + return _vault_response(res) + elif swhid.object_type == ObjectType.CONTENT: + raise BadInputExc( + "Content objects do not need to be cooked, " + "use `/api/1/content/raw/` instead." + ) + elif swhid.object_type == ObjectType.DIRECTORY: + raise BadInputExc( + "Only revisions can be cooked as 'gitfast' bundles. " + "Use `/api/1/vault/flat/` to cook directories, as flat bundles." + ) + else: + raise BadInputExc("Only revisions can be cooked as 'gitfast' bundles.") + + +@api_route( + r"/vault/revision/(?P[0-9a-f]+)/gitfast/", + "api-1-vault-cook-revision_gitfast", + methods=["GET", "POST"], + checksum_args=["rev_id"], + throttle_scope="swh_vault_cooking", + never_cache=True, +) +@api_doc("/vault/revision/gitfast/", tags=["hidden"]) +@format_docstring() +def api_vault_cook_revision_gitfast(request, rev_id): + """ + .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/ + + This endpoint was replaced by :http:get:`/api/1/vault/gitfast/` + """ _, obj_id = query.parse_hash_with_algorithms_or_throws( rev_id, ["sha1"], "Only sha1_git is supported." ) - res = _dispatch_cook_progress(request, "revision_gitfast", obj_id) + swhid = f"swh:1:rev:{obj_id.hex()}" + res = _dispatch_cook_progress(request, "gitfast", CoreSWHID.from_string(swhid)) res["fetch_url"] = reverse( - "api-1-vault-fetch-revision_gitfast", - url_args={"rev_id": rev_id}, - request=request, + "api-1-vault-fetch-gitfast", url_args={"swhid": swhid}, request=request, ) return _vault_response(res) @api_route( - r"/vault/revision/(?P[0-9a-f]+)/gitfast/raw/", - "api-1-vault-fetch-revision_gitfast", - checksum_args=["rev_id"], + f"/vault/gitfast/(?P{SWHID_RE})/raw/", "api-1-vault-fetch-gitfast", ) -@api_doc("/vault/revision/gitfast/raw/") -def api_vault_fetch_revision_gitfast(request, rev_id): +@api_doc("/vault/gitfast/raw/") +def api_vault_fetch_revision_gitfast(request, swhid): """ - .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/raw/ + .. http:get:: /api/1/vault/gitfast/(swhid)/raw/ Fetch the cooked gitfast archive for a revision. - See :http:get:`/api/1/vault/revision/(rev_id)/gitfast/` to get more - details on directory cooking. + See :http:get:`/api/1/vault/gitfast/(swhid)/` to get more + details on gitfast cooking. :param string rev_id: the revision's sha1 identifier :resheader Content-Type: application/octet-stream :statuscode 200: no error - :statuscode 400: an invalid revision identifier has been provided :statuscode 404: requested directory did not receive any cooking request yet (in case of GET) or can not be found in the archive (in case of POST) """ - _, obj_id = query.parse_hash_with_algorithms_or_throws( - rev_id, ["sha1"], "Only sha1_git is supported." - ) res = api_lookup( archive.vault_fetch, - "revision_gitfast", - obj_id, - notfound_msg="Cooked archive for revision '{}' not found.".format(rev_id), + "gitfast", + CoreSWHID.from_string(swhid), + notfound_msg="Cooked archive for {} not found.".format(swhid), request=request, ) - fname = "{}.gitfast.gz".format(rev_id) + fname = "{}.gitfast.gz".format(swhid) response = HttpResponse(res, content_type="application/gzip") response["Content-disposition"] = "attachment; filename={}".format(fname) return response @api_route( - r"/vault/revision_gitfast/(?P[0-9a-f]+)/raw/", - "api-1-vault-revision_gitfast-raw", + r"/vault/revision/(?P[0-9a-f]+)/gitfast/raw/", + "api-1-vault-fetch-revision_gitfast", checksum_args=["rev_id"], ) @api_doc("/vault/revision_gitfast/raw/", tags=["hidden"]) def _api_vault_revision_gitfast_raw(request, rev_id): """ - The vault backend sends an email containing an invalid url to fetch a - gitfast archive. So setup a redirection to the correct one as a temporary - workaround. + .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/raw/ + + This endpoint was replaced by :http:get:`/api/1/vault/gitfast/raw/` """ rev_gitfast_raw_url = reverse( - "api-1-vault-fetch-revision_gitfast", url_args={"rev_id": rev_id} + "api-1-vault-fetch-gitfast", url_args={"swhid": f"swh:1:rev:{rev_id}"} ) return redirect(rev_gitfast_raw_url) diff --git a/swh/web/browse/snapshot_context.py b/swh/web/browse/snapshot_context.py index 76c64fc3..db5b2260 100644 --- a/swh/web/browse/snapshot_context.py +++ b/swh/web/browse/snapshot_context.py @@ -1,1470 +1,1470 @@ # Copyright (C) 2018-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information # Utility module for browsing the archive in a snapshot context. from collections import defaultdict from typing import Any, Dict, List, Optional, Tuple from django.core.cache import cache from django.shortcuts import render from django.template.defaultfilters import filesizeformat from django.utils.html import escape from swh.model.hashutil import hash_to_bytes from swh.model.identifiers import ( CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT, CoreSWHID, ObjectType, ) from swh.model.model import Snapshot from swh.web.browse.utils import ( content_display_max_size, format_log_entries, gen_content_link, gen_release_link, gen_revision_link, gen_revision_log_link, gen_revision_url, gen_snapshot_link, get_directory_entries, get_readme_to_display, prepare_content_for_display, request_content, ) from swh.web.common import archive, highlightjs from swh.web.common.exc import BadInputExc, NotFoundExc, http_status_code_message from swh.web.common.identifiers import get_swhids_info from swh.web.common.origin_visits import get_origin_visit from swh.web.common.typing import ( ContentMetadata, DirectoryMetadata, OriginInfo, SnapshotBranchInfo, SnapshotContext, SnapshotReleaseInfo, SWHObjectInfo, ) from swh.web.common.utils import ( format_utc_iso_date, gen_path_info, reverse, swh_object_icons, ) from swh.web.config import get_config _empty_snapshot_id = Snapshot(branches={}).id.hex() def _get_branch(branches, branch_name, snapshot_id): """ Utility function to get a specific branch from a branches list. Its purpose is to get the default HEAD branch as some software origin (e.g those with svn type) does not have it. In that latter case, check if there is a master branch instead and returns it. """ filtered_branches = [b for b in branches if b["name"] == branch_name] if filtered_branches: return filtered_branches[0] elif branch_name == "HEAD": filtered_branches = [b for b in branches if b["name"].endswith("master")] if filtered_branches: return filtered_branches[0] elif branches: return branches[0] else: # case where a large branches list has been truncated snp = archive.lookup_snapshot( snapshot_id, branches_from=branch_name, branches_count=1, target_types=["revision", "alias"], ) snp_branch, _, _ = process_snapshot_branches(snp) if snp_branch and snp_branch[0]["name"] == branch_name: branches.append(snp_branch[0]) return snp_branch[0] def _get_release(releases, release_name, snapshot_id): """ Utility function to get a specific release from a releases list. Returns None if the release can not be found in the list. """ filtered_releases = [r for r in releases if r["name"] == release_name] if filtered_releases: return filtered_releases[0] else: # case where a large branches list has been truncated try: # git origins have specific branches for releases snp = archive.lookup_snapshot( snapshot_id, branches_from=f"refs/tags/{release_name}", branches_count=1, target_types=["release"], ) except NotFoundExc: snp = archive.lookup_snapshot( snapshot_id, branches_from=release_name, branches_count=1, target_types=["release"], ) _, snp_release, _ = process_snapshot_branches(snp) if snp_release and snp_release[0]["name"] == release_name: releases.append(snp_release[0]) return snp_release[0] def _branch_not_found( branch_type, branch, snapshot_id, snapshot_sizes, origin_info, timestamp, visit_id ): """ Utility function to raise an exception when a specified branch/release can not be found. """ if branch_type == "branch": branch_type = "Branch" branch_type_plural = "branches" target_type = "revision" else: branch_type = "Release" branch_type_plural = "releases" target_type = "release" if snapshot_id and snapshot_sizes[target_type] == 0: msg = "Snapshot with id %s has an empty list" " of %s!" % ( snapshot_id, branch_type_plural, ) elif snapshot_id: msg = "%s %s for snapshot with id %s" " not found!" % ( branch_type, branch, snapshot_id, ) elif visit_id and snapshot_sizes[target_type] == 0: msg = ( "Origin with url %s" " for visit with id %s has an empty list" " of %s!" % (origin_info["url"], visit_id, branch_type_plural) ) elif visit_id: msg = ( "%s %s associated to visit with" " id %s for origin with url %s" " not found!" % (branch_type, branch, visit_id, origin_info["url"]) ) elif snapshot_sizes[target_type] == 0: msg = ( "Origin with url %s" " for visit with timestamp %s has an empty list" " of %s!" % (origin_info["url"], timestamp, branch_type_plural) ) else: msg = ( "%s %s associated to visit with" " timestamp %s for origin with " "url %s not found!" % (branch_type, branch, timestamp, origin_info["url"]) ) raise NotFoundExc(escape(msg)) def process_snapshot_branches( snapshot: Dict[str, Any] ) -> Tuple[List[SnapshotBranchInfo], List[SnapshotReleaseInfo], Dict[str, Any]]: """ Process a dictionary describing snapshot branches: extract those targeting revisions and releases, put them in two different lists, then sort those lists in lexicographical order of the branches' names. Args: snapshot: A dict describing a snapshot as returned for instance by :func:`swh.web.common.archive.lookup_snapshot` Returns: A tuple whose first member is the sorted list of branches targeting revisions, second member the sorted list of branches targeting releases and third member a dict mapping resolved branch aliases to their real target. """ snapshot_branches = snapshot["branches"] branches: Dict[str, SnapshotBranchInfo] = {} branch_aliases: Dict[str, str] = {} releases: Dict[str, SnapshotReleaseInfo] = {} revision_to_branch = defaultdict(set) revision_to_release = defaultdict(set) release_to_branch = defaultdict(set) for branch_name, target in snapshot_branches.items(): if not target: # FIXME: display branches with an unknown target anyway continue target_id = target["target"] target_type = target["target_type"] if target_type == "revision": branches[branch_name] = SnapshotBranchInfo( name=branch_name, alias=False, revision=target_id, date=None, directory=None, message=None, url=None, ) revision_to_branch[target_id].add(branch_name) elif target_type == "release": release_to_branch[target_id].add(branch_name) elif target_type == "alias": branch_aliases[branch_name] = target_id # FIXME: handle pointers to other object types def _add_release_info(branch, release, alias=False): releases[branch] = SnapshotReleaseInfo( name=release["name"], alias=alias, branch_name=branch, date=format_utc_iso_date(release["date"]), directory=None, id=release["id"], message=release["message"], target_type=release["target_type"], target=release["target"], url=None, ) def _add_branch_info(branch, revision, alias=False): branches[branch] = SnapshotBranchInfo( name=branch, alias=alias, revision=revision["id"], directory=revision["directory"], date=format_utc_iso_date(revision["date"]), message=revision["message"], url=None, ) releases_info = archive.lookup_release_multiple(release_to_branch.keys()) for release in releases_info: if release is None: continue branches_to_update = release_to_branch[release["id"]] for branch in branches_to_update: _add_release_info(branch, release) if release["target_type"] == "revision": revision_to_release[release["target"]].update(branches_to_update) revisions = archive.lookup_revision_multiple( set(revision_to_branch.keys()) | set(revision_to_release.keys()) ) for revision in revisions: if not revision: continue for branch in revision_to_branch[revision["id"]]: _add_branch_info(branch, revision) for release_id in revision_to_release[revision["id"]]: releases[release_id]["directory"] = revision["directory"] resolved_aliases = {} for branch_alias, branch_target in branch_aliases.items(): resolved_alias = archive.lookup_snapshot_alias(snapshot["id"], branch_alias) resolved_aliases[branch_alias] = resolved_alias if resolved_alias is None: continue target_type = resolved_alias["target_type"] target = resolved_alias["target"] if target_type == "revision": revision = archive.lookup_revision(target) _add_branch_info(branch_alias, revision, alias=True) elif target_type == "release": release = archive.lookup_release(target) _add_release_info(branch_alias, release, alias=True) if branch_alias in branches: branches[branch_alias]["name"] = branch_alias ret_branches = list(sorted(branches.values(), key=lambda b: b["name"])) ret_releases = list(sorted(releases.values(), key=lambda b: b["name"])) return ret_branches, ret_releases, resolved_aliases def get_snapshot_content( snapshot_id: str, ) -> Tuple[List[SnapshotBranchInfo], List[SnapshotReleaseInfo], Dict[str, Any]]: """Returns the lists of branches and releases associated to a swh snapshot. That list is put in cache in order to speedup the navigation in the swh-web/browse ui. .. warning:: At most 1000 branches contained in the snapshot will be returned for performance reasons. Args: snapshot_id: hexadecimal representation of the snapshot identifier Returns: A tuple with three members. The first one is a list of dict describing the snapshot branches. The second one is a list of dict describing the snapshot releases. The third one is a dict mapping resolved branch aliases to their real target. Raises: NotFoundExc if the snapshot does not exist """ cache_entry_id = "swh_snapshot_%s" % snapshot_id cache_entry = cache.get(cache_entry_id) if cache_entry: return ( cache_entry["branches"], cache_entry["releases"], cache_entry.get("aliases", {}), ) branches: List[SnapshotBranchInfo] = [] releases: List[SnapshotReleaseInfo] = [] aliases: Dict[str, Any] = {} snapshot_content_max_size = get_config()["snapshot_content_max_size"] if snapshot_id: snapshot = archive.lookup_snapshot( snapshot_id, branches_count=snapshot_content_max_size ) branches, releases, aliases = process_snapshot_branches(snapshot) cache.set( cache_entry_id, {"branches": branches, "releases": releases, "aliases": aliases} ) return branches, releases, aliases def get_origin_visit_snapshot( origin_info: OriginInfo, visit_ts: Optional[str] = None, visit_id: Optional[int] = None, snapshot_id: Optional[str] = None, ) -> Tuple[List[SnapshotBranchInfo], List[SnapshotReleaseInfo], Dict[str, Any]]: """Returns the lists of branches and releases associated to an origin for a given visit. The visit is expressed by either: * a snapshot identifier * a timestamp, if no visit with that exact timestamp is found, the closest one from the provided timestamp will be used. If no visit parameter is provided, it returns the list of branches found for the latest visit. That list is put in cache in order to speedup the navigation in the swh-web/browse ui. .. warning:: At most 1000 branches contained in the snapshot will be returned for performance reasons. Args: origin_info: a dict filled with origin information visit_ts: an ISO 8601 datetime string to parse visit_id: visit id for disambiguation in case several visits have the same timestamp snapshot_id: if provided, visit associated to the snapshot will be processed Returns: A tuple with three members. The first one is a list of dict describing the origin branches for the given visit. The second one is a list of dict describing the origin releases for the given visit. The third one is a dict mapping resolved branch aliases to their real target. Raises: NotFoundExc if the origin or its visit are not found """ visit_info = get_origin_visit(origin_info, visit_ts, visit_id, snapshot_id) return get_snapshot_content(visit_info["snapshot"]) def get_snapshot_context( snapshot_id: Optional[str] = None, origin_url: Optional[str] = None, timestamp: Optional[str] = None, visit_id: Optional[int] = None, branch_name: Optional[str] = None, release_name: Optional[str] = None, revision_id: Optional[str] = None, path: Optional[str] = None, browse_context: str = "directory", ) -> SnapshotContext: """ Utility function to compute relevant information when navigating the archive in a snapshot context. The snapshot is either referenced by its id or it will be retrieved from an origin visit. Args: snapshot_id: hexadecimal representation of a snapshot identifier origin_url: an origin_url timestamp: a datetime string for retrieving the closest visit of the origin visit_id: optional visit id for disambiguation in case of several visits with the same timestamp branch_name: optional branch name set when browsing the snapshot in that scope (will default to "HEAD" if not provided) release_name: optional release name set when browsing the snapshot in that scope revision_id: optional revision identifier set when browsing the snapshot in that scope path: optional path of the object currently browsed in the snapshot browse_context: indicates which type of object is currently browsed Returns: A dict filled with snapshot context information. Raises: swh.web.common.exc.NotFoundExc: if no snapshot is found for the visit of an origin. """ assert origin_url is not None or snapshot_id is not None origin_info = None visit_info = None url_args = {} query_params: Dict[str, Any] = {} origin_visits_url = None if origin_url: if visit_id is not None: query_params["visit_id"] = visit_id elif snapshot_id is not None: query_params["snapshot"] = snapshot_id origin_info = archive.lookup_origin({"url": origin_url}) visit_info = get_origin_visit(origin_info, timestamp, visit_id, snapshot_id) formatted_date = format_utc_iso_date(visit_info["date"]) visit_info["formatted_date"] = formatted_date snapshot_id = visit_info["snapshot"] if not snapshot_id: raise NotFoundExc( "No snapshot associated to the visit of origin " "%s on %s" % (escape(origin_url), formatted_date) ) # provided timestamp is not necessarily equals to the one # of the retrieved visit, so get the exact one in order # to use it in the urls generated below if timestamp: timestamp = visit_info["date"] branches, releases, aliases = get_origin_visit_snapshot( origin_info, timestamp, visit_id, snapshot_id ) query_params["origin_url"] = origin_info["url"] origin_visits_url = reverse( "browse-origin-visits", query_params={"origin_url": origin_info["url"]} ) if timestamp is not None: query_params["timestamp"] = format_utc_iso_date( timestamp, "%Y-%m-%dT%H:%M:%SZ" ) visit_url = reverse("browse-origin-directory", query_params=query_params) visit_info["url"] = visit_url branches_url = reverse("browse-origin-branches", query_params=query_params) releases_url = reverse("browse-origin-releases", query_params=query_params) else: assert snapshot_id is not None branches, releases, aliases = get_snapshot_content(snapshot_id) url_args = {"snapshot_id": snapshot_id} branches_url = reverse("browse-snapshot-branches", url_args=url_args) releases_url = reverse("browse-snapshot-releases", url_args=url_args) releases = list(reversed(releases)) snapshot_sizes_cache_id = f"swh_snapshot_{snapshot_id}_sizes" snapshot_sizes = cache.get(snapshot_sizes_cache_id) if snapshot_sizes is None: snapshot_sizes = archive.lookup_snapshot_sizes(snapshot_id) cache.set(snapshot_sizes_cache_id, snapshot_sizes) is_empty = (snapshot_sizes["release"] + snapshot_sizes["revision"]) == 0 swh_snp_id = str( CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=hash_to_bytes(snapshot_id)) ) if visit_info: timestamp = format_utc_iso_date(visit_info["date"]) if origin_info: browse_view_name = f"browse-origin-{browse_context}" else: browse_view_name = f"browse-snapshot-{browse_context}" release_id = None root_directory = None snapshot_total_size = snapshot_sizes["release"] + snapshot_sizes["revision"] if path is not None: query_params["path"] = path if snapshot_total_size and revision_id is not None: revision = archive.lookup_revision(revision_id) root_directory = revision["directory"] branches.append( SnapshotBranchInfo( name=revision_id, alias=False, revision=revision_id, directory=root_directory, date=revision["date"], message=revision["message"], url=None, ) ) query_params["revision"] = revision_id elif snapshot_total_size and release_name: release = _get_release(releases, release_name, snapshot_id) if release is None: _branch_not_found( "release", release_name, snapshot_id, snapshot_sizes, origin_info, timestamp, visit_id, ) else: root_directory = release["directory"] revision_id = release["target"] release_id = release["id"] query_params["release"] = release_name elif snapshot_total_size: if branch_name: query_params["branch"] = branch_name branch = _get_branch(branches, branch_name or "HEAD", snapshot_id) if branch is None: _branch_not_found( "branch", branch_name, snapshot_id, snapshot_sizes, origin_info, timestamp, visit_id, ) else: branch_name = branch["name"] revision_id = branch["revision"] root_directory = branch["directory"] for b in branches: branch_query_params = dict(query_params) branch_query_params.pop("release", None) if b["name"] != b["revision"]: branch_query_params.pop("revision", None) branch_query_params["branch"] = b["name"] b["url"] = reverse( browse_view_name, url_args=url_args, query_params=branch_query_params ) for r in releases: release_query_params = dict(query_params) release_query_params.pop("branch", None) release_query_params.pop("revision", None) release_query_params["release"] = r["name"] r["url"] = reverse( browse_view_name, url_args=url_args, query_params=release_query_params, ) revision_info = None if revision_id: try: revision_info = archive.lookup_revision(revision_id) except NotFoundExc: pass else: revision_info["date"] = format_utc_iso_date(revision_info["date"]) revision_info["committer_date"] = format_utc_iso_date( revision_info["committer_date"] ) if revision_info["message"]: message_lines = revision_info["message"].split("\n") revision_info["message_header"] = message_lines[0] else: revision_info["message_header"] = "" snapshot_context = SnapshotContext( branch=branch_name, branch_alias=branch_name in aliases, branches=branches, branches_url=branches_url, is_empty=is_empty, origin_info=origin_info, origin_visits_url=origin_visits_url, release=release_name, release_alias=release_name in aliases, release_id=release_id, query_params=query_params, releases=releases, releases_url=releases_url, revision_id=revision_id, revision_info=revision_info, root_directory=root_directory, snapshot_id=snapshot_id, snapshot_sizes=snapshot_sizes, snapshot_swhid=swh_snp_id, url_args=url_args, visit_info=visit_info, ) if revision_info: revision_info["revision_url"] = gen_revision_url(revision_id, snapshot_context) return snapshot_context def _build_breadcrumbs(snapshot_context: SnapshotContext, path: str): origin_info = snapshot_context["origin_info"] url_args = snapshot_context["url_args"] query_params = dict(snapshot_context["query_params"]) root_directory = snapshot_context["root_directory"] path_info = gen_path_info(path) if origin_info: browse_view_name = "browse-origin-directory" else: browse_view_name = "browse-snapshot-directory" breadcrumbs = [] if root_directory: query_params.pop("path", None) breadcrumbs.append( { "name": root_directory[:7], "url": reverse( browse_view_name, url_args=url_args, query_params=query_params ), } ) for pi in path_info: query_params["path"] = pi["path"] breadcrumbs.append( { "name": pi["name"], "url": reverse( browse_view_name, url_args=url_args, query_params=query_params ), } ) return breadcrumbs def _check_origin_url(snapshot_id, origin_url): if snapshot_id is None and origin_url is None: raise BadInputExc("An origin URL must be provided as query parameter.") def browse_snapshot_directory( request, snapshot_id=None, origin_url=None, timestamp=None, path=None ): """ Django view implementation for browsing a directory in a snapshot context. """ _check_origin_url(snapshot_id, origin_url) snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), path=path, browse_context="directory", branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=request.GET.get("revision"), ) root_directory = snapshot_context["root_directory"] sha1_git = root_directory error_info = { "status_code": 200, "description": None, } if root_directory and path: try: dir_info = archive.lookup_directory_with_path(root_directory, path) sha1_git = dir_info["target"] except NotFoundExc as e: sha1_git = None error_info["status_code"] = 404 error_info["description"] = f"NotFoundExc: {str(e)}" dirs = [] files = [] if sha1_git: dirs, files = get_directory_entries(sha1_git) origin_info = snapshot_context["origin_info"] visit_info = snapshot_context["visit_info"] url_args = snapshot_context["url_args"] query_params = dict(snapshot_context["query_params"]) revision_id = snapshot_context["revision_id"] snapshot_id = snapshot_context["snapshot_id"] if origin_info: browse_view_name = "browse-origin-directory" else: browse_view_name = "browse-snapshot-directory" breadcrumbs = _build_breadcrumbs(snapshot_context, path) path = "" if path is None else (path + "/") for d in dirs: if d["type"] == "rev": d["url"] = reverse("browse-revision", url_args={"sha1_git": d["target"]}) else: query_params["path"] = path + d["name"] d["url"] = reverse( browse_view_name, url_args=url_args, query_params=query_params ) sum_file_sizes = 0 readmes = {} if origin_info: browse_view_name = "browse-origin-content" else: browse_view_name = "browse-snapshot-content" for f in files: query_params["path"] = path + f["name"] f["url"] = reverse( browse_view_name, url_args=url_args, query_params=query_params ) if f["length"] is not None: sum_file_sizes += f["length"] f["length"] = filesizeformat(f["length"]) if f["name"].lower().startswith("readme"): readmes[f["name"]] = f["checksums"]["sha1"] readme_name, readme_url, readme_html = get_readme_to_display(readmes) if origin_info: browse_view_name = "browse-origin-log" else: browse_view_name = "browse-snapshot-log" history_url = None if snapshot_id != _empty_snapshot_id: query_params.pop("path", None) history_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) nb_files = None nb_dirs = None dir_path = None if root_directory: nb_files = len(files) nb_dirs = len(dirs) sum_file_sizes = filesizeformat(sum_file_sizes) dir_path = "/" + path revision_found = True if sha1_git is None and revision_id is not None: try: archive.lookup_revision(revision_id) except NotFoundExc: revision_found = False swh_objects = [ SWHObjectInfo(object_type=DIRECTORY, object_id=sha1_git), SWHObjectInfo(object_type=REVISION, object_id=revision_id), SWHObjectInfo(object_type=SNAPSHOT, object_id=snapshot_id), ] visit_date = None visit_type = None if visit_info: visit_date = format_utc_iso_date(visit_info["date"]) visit_type = visit_info["type"] release_id = snapshot_context["release_id"] if release_id: swh_objects.append(SWHObjectInfo(object_type=RELEASE, object_id=release_id)) dir_metadata = DirectoryMetadata( object_type=DIRECTORY, object_id=sha1_git, directory=sha1_git, nb_files=nb_files, nb_dirs=nb_dirs, sum_file_sizes=sum_file_sizes, root_directory=root_directory, path=dir_path, revision=revision_id, revision_found=revision_found, release=release_id, snapshot=snapshot_id, origin_url=origin_url, visit_date=visit_date, visit_type=visit_type, ) vault_cooking = { "directory_context": True, - "directory_id": sha1_git, + "directory_swhid": f"swh:1:dir:{sha1_git}", "revision_context": True, - "revision_id": revision_id, + "revision_swhid": f"swh:1:rev:{revision_id}", } swhids_info = get_swhids_info(swh_objects, snapshot_context, dir_metadata) dir_path = "/".join([bc["name"] for bc in breadcrumbs]) + "/" context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading = "Directory - %s - %s - %s" % ( dir_path, snapshot_context["branch"], context_found, ) top_right_link = None if not snapshot_context["is_empty"]: top_right_link = { "url": history_url, "icon": swh_object_icons["revisions history"], "text": "History", } return render( request, "browse/directory.html", { "heading": heading, "swh_object_name": "Directory", "swh_object_metadata": dir_metadata, "dirs": dirs, "files": files, "breadcrumbs": breadcrumbs if root_directory else [], "top_right_link": top_right_link, "readme_name": readme_name, "readme_url": readme_url, "readme_html": readme_html, "snapshot_context": snapshot_context, "vault_cooking": vault_cooking, "show_actions": True, "swhids_info": swhids_info, "error_code": error_info["status_code"], "error_message": http_status_code_message.get(error_info["status_code"]), "error_description": error_info["description"], }, status=error_info["status_code"], ) def browse_snapshot_content( request, snapshot_id=None, origin_url=None, timestamp=None, path=None, selected_language=None, ): """ Django view implementation for browsing a content in a snapshot context. """ _check_origin_url(snapshot_id, origin_url) if path is None: raise BadInputExc("The path of a content must be given as query parameter.") snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), path=path, browse_context="content", branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=request.GET.get("revision"), ) root_directory = snapshot_context["root_directory"] sha1_git = None query_string = None content_data = {} directory_id = None split_path = path.split("/") filename = split_path[-1] filepath = path[: -len(filename)] error_info = { "status_code": 200, "description": None, } if root_directory: try: content_info = archive.lookup_directory_with_path(root_directory, path) sha1_git = content_info["target"] query_string = "sha1_git:" + sha1_git content_data = request_content(query_string) if filepath: dir_info = archive.lookup_directory_with_path(root_directory, filepath) directory_id = dir_info["target"] else: directory_id = root_directory except NotFoundExc as e: error_info["status_code"] = 404 error_info["description"] = f"NotFoundExc: {str(e)}" revision_id = snapshot_context["revision_id"] origin_info = snapshot_context["origin_info"] visit_info = snapshot_context["visit_info"] snapshot_id = snapshot_context["snapshot_id"] if content_data.get("raw_data") is not None: content_display_data = prepare_content_for_display( content_data["raw_data"], content_data["mimetype"], path ) content_data.update(content_display_data) # Override language with user-selected language if selected_language is not None: content_data["language"] = selected_language available_languages = None if content_data.get("mimetype") is not None and "text/" in content_data["mimetype"]: available_languages = highlightjs.get_supported_languages() breadcrumbs = _build_breadcrumbs(snapshot_context, filepath) breadcrumbs.append({"name": filename, "url": None}) browse_content_link = gen_content_link(sha1_git) content_raw_url = None if query_string: content_raw_url = reverse( "browse-content-raw", url_args={"query_string": query_string}, query_params={"filename": filename}, ) content_checksums = content_data.get("checksums", {}) swh_objects = [ SWHObjectInfo(object_type=CONTENT, object_id=content_checksums.get("sha1_git")), SWHObjectInfo(object_type=DIRECTORY, object_id=directory_id), SWHObjectInfo(object_type=REVISION, object_id=revision_id), SWHObjectInfo(object_type=SNAPSHOT, object_id=snapshot_id), ] visit_date = None visit_type = None if visit_info: visit_date = format_utc_iso_date(visit_info["date"]) visit_type = visit_info["type"] release_id = snapshot_context["release_id"] if release_id: swh_objects.append(SWHObjectInfo(object_type=RELEASE, object_id=release_id)) content_metadata = ContentMetadata( object_type=CONTENT, object_id=content_checksums.get("sha1_git"), sha1=content_checksums.get("sha1"), sha1_git=content_checksums.get("sha1_git"), sha256=content_checksums.get("sha256"), blake2s256=content_checksums.get("blake2s256"), content_url=browse_content_link, mimetype=content_data.get("mimetype"), encoding=content_data.get("encoding"), size=filesizeformat(content_data.get("length", 0)), language=content_data.get("language"), root_directory=root_directory, path=f"/{filepath}", filename=filename, directory=directory_id, revision=revision_id, release=release_id, snapshot=snapshot_id, origin_url=origin_url, visit_date=visit_date, visit_type=visit_type, ) swhids_info = get_swhids_info(swh_objects, snapshot_context, content_metadata) content_path = "/".join([bc["name"] for bc in breadcrumbs]) context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading = "Content - %s - %s - %s" % ( content_path, snapshot_context["branch"], context_found, ) top_right_link = None if not snapshot_context["is_empty"]: top_right_link = { "url": content_raw_url, "icon": swh_object_icons["content"], "text": "Raw File", } return render( request, "browse/content.html", { "heading": heading, "swh_object_name": "Content", "swh_object_metadata": content_metadata, "content": content_data.get("content_data"), "content_size": content_data.get("length"), "max_content_size": content_display_max_size, "filename": filename, "encoding": content_data.get("encoding"), "mimetype": content_data.get("mimetype"), "language": content_data.get("language"), "available_languages": available_languages, "breadcrumbs": breadcrumbs if root_directory else [], "top_right_link": top_right_link, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions": True, "swhids_info": swhids_info, "error_code": error_info["status_code"], "error_message": http_status_code_message.get(error_info["status_code"]), "error_description": error_info["description"], }, status=error_info["status_code"], ) PER_PAGE = 100 def browse_snapshot_log(request, snapshot_id=None, origin_url=None, timestamp=None): """ Django view implementation for browsing a revision history in a snapshot context. """ _check_origin_url(snapshot_id, origin_url) snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), browse_context="log", branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=request.GET.get("revision"), ) revision_id = snapshot_context["revision_id"] per_page = int(request.GET.get("per_page", PER_PAGE)) offset = int(request.GET.get("offset", 0)) revs_ordering = request.GET.get("revs_ordering", "committer_date") session_key = "rev_%s_log_ordering_%s" % (revision_id, revs_ordering) rev_log_session = request.session.get(session_key, None) rev_log = [] revs_walker_state = None if rev_log_session: rev_log = rev_log_session["rev_log"] revs_walker_state = rev_log_session["revs_walker_state"] if len(rev_log) < offset + per_page: revs_walker = archive.get_revisions_walker( revs_ordering, revision_id, max_revs=offset + per_page + 1, state=revs_walker_state, ) rev_log += [rev["id"] for rev in revs_walker] revs_walker_state = revs_walker.export_state() revs = rev_log[offset : offset + per_page] revision_log = archive.lookup_revision_multiple(revs) request.session[session_key] = { "rev_log": rev_log, "revs_walker_state": revs_walker_state, } origin_info = snapshot_context["origin_info"] visit_info = snapshot_context["visit_info"] url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] snapshot_id = snapshot_context["snapshot_id"] query_params["per_page"] = per_page revs_ordering = request.GET.get("revs_ordering", "") query_params["revs_ordering"] = revs_ordering or None if origin_info: browse_view_name = "browse-origin-log" else: browse_view_name = "browse-snapshot-log" prev_log_url = None if len(rev_log) > offset + per_page: query_params["offset"] = offset + per_page prev_log_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) next_log_url = None if offset != 0: query_params["offset"] = offset - per_page next_log_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) revision_log_data = format_log_entries(revision_log, per_page, snapshot_context) browse_rev_link = gen_revision_link(revision_id) browse_log_link = gen_revision_log_link(revision_id) browse_snp_link = gen_snapshot_link(snapshot_id) revision_metadata = { "context-independent revision": browse_rev_link, "context-independent revision history": browse_log_link, "context-independent snapshot": browse_snp_link, "snapshot": snapshot_id, } if origin_info: revision_metadata["origin url"] = origin_info["url"] revision_metadata["origin visit date"] = format_utc_iso_date(visit_info["date"]) revision_metadata["origin visit type"] = visit_info["type"] swh_objects = [ SWHObjectInfo(object_type=REVISION, object_id=revision_id), SWHObjectInfo(object_type=SNAPSHOT, object_id=snapshot_id), ] release_id = snapshot_context["release_id"] if release_id: swh_objects.append(SWHObjectInfo(object_type=RELEASE, object_id=release_id)) browse_rel_link = gen_release_link(release_id) revision_metadata["release"] = release_id revision_metadata["context-independent release"] = browse_rel_link swhids_info = get_swhids_info(swh_objects, snapshot_context) context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading = "Revision history - %s - %s" % (snapshot_context["branch"], context_found) return render( request, "browse/revision-log.html", { "heading": heading, "swh_object_name": "Revisions history", "swh_object_metadata": revision_metadata, "revision_log": revision_log_data, "revs_ordering": revs_ordering, "next_log_url": next_log_url, "prev_log_url": prev_log_url, "breadcrumbs": None, "top_right_link": None, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions": True, "swhids_info": swhids_info, }, ) def browse_snapshot_branches( request, snapshot_id=None, origin_url=None, timestamp=None, branch_name_include=None ): """ Django view implementation for browsing a list of branches in a snapshot context. """ _check_origin_url(snapshot_id, origin_url) snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), ) branches_bc = request.GET.get("branches_breadcrumbs", "") branches_bc = branches_bc.split(",") if branches_bc else [] branches_from = branches_bc[-1] if branches_bc else "" origin_info = snapshot_context["origin_info"] url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] if origin_info: browse_view_name = "browse-origin-directory" else: browse_view_name = "browse-snapshot-directory" snapshot = archive.lookup_snapshot( snapshot_context["snapshot_id"], branches_from, PER_PAGE + 1, target_types=["revision", "alias"], branch_name_include_substring=branch_name_include, ) displayed_branches = [] if snapshot: displayed_branches, _, _ = process_snapshot_branches(snapshot) for branch in displayed_branches: rev_query_params = {} if origin_info: rev_query_params["origin_url"] = origin_info["url"] revision_url = reverse( "browse-revision", url_args={"sha1_git": branch["revision"]}, query_params=query_params, ) query_params["branch"] = branch["name"] directory_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) del query_params["branch"] branch["revision_url"] = revision_url branch["directory_url"] = directory_url if origin_info: browse_view_name = "browse-origin-branches" else: browse_view_name = "browse-snapshot-branches" prev_branches_url = None next_branches_url = None if branches_bc: query_params_prev = dict(query_params) query_params_prev["branches_breadcrumbs"] = ",".join(branches_bc[:-1]) prev_branches_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_prev ) elif branches_from: prev_branches_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) if snapshot and snapshot["next_branch"] is not None: query_params_next = dict(query_params) next_branch = displayed_branches[-1]["name"] del displayed_branches[-1] branches_bc.append(next_branch) query_params_next["branches_breadcrumbs"] = ",".join(branches_bc) next_branches_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_next ) heading = "Branches - " if origin_info: heading += "origin: %s" % origin_info["url"] else: heading += "snapshot: %s" % snapshot_id return render( request, "browse/branches.html", { "heading": heading, "swh_object_name": "Branches", "swh_object_metadata": {}, "top_right_link": None, "displayed_branches": displayed_branches, "prev_branches_url": prev_branches_url, "next_branches_url": next_branches_url, "snapshot_context": snapshot_context, "search_string": branch_name_include or "", }, ) def browse_snapshot_releases( request, snapshot_id=None, origin_url=None, timestamp=None, release_name_include=None, ): """ Django view implementation for browsing a list of releases in a snapshot context. """ _check_origin_url(snapshot_id, origin_url) snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), ) rel_bc = request.GET.get("releases_breadcrumbs", "") rel_bc = rel_bc.split(",") if rel_bc else [] rel_from = rel_bc[-1] if rel_bc else "" origin_info = snapshot_context["origin_info"] url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] snapshot = archive.lookup_snapshot( snapshot_context["snapshot_id"], rel_from, PER_PAGE + 1, target_types=["release", "alias"], branch_name_include_substring=release_name_include, ) displayed_releases = [] if snapshot: _, displayed_releases, _ = process_snapshot_branches(snapshot) for release in displayed_releases: query_params_tgt = {"snapshot": snapshot_id} if origin_info: query_params_tgt["origin_url"] = origin_info["url"] release_url = reverse( "browse-release", url_args={"sha1_git": release["id"]}, query_params=query_params_tgt, ) target_url = "" tooltip = ( f"The release {release['name']} targets " f"{release['target_type']} {release['target']}" ) if release["target_type"] == "revision": target_url = reverse( "browse-revision", url_args={"sha1_git": release["target"]}, query_params=query_params_tgt, ) elif release["target_type"] == "directory": target_url = reverse( "browse-directory", url_args={"sha1_git": release["target"]}, query_params=query_params_tgt, ) elif release["target_type"] == "content": target_url = reverse( "browse-content", url_args={"query_string": release["target"]}, query_params=query_params_tgt, ) elif release["target_type"] == "release": target_url = reverse( "browse-release", url_args={"sha1_git": release["target"]}, query_params=query_params_tgt, ) tooltip = ( f"The release {release['name']} " f"is an alias for release {release['target']}" ) release["release_url"] = release_url release["target_url"] = target_url release["tooltip"] = tooltip if origin_info: browse_view_name = "browse-origin-releases" else: browse_view_name = "browse-snapshot-releases" prev_releases_url = None next_releases_url = None if rel_bc: query_params_prev = dict(query_params) query_params_prev["releases_breadcrumbs"] = ",".join(rel_bc[:-1]) prev_releases_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_prev ) elif rel_from: prev_releases_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) if snapshot and snapshot["next_branch"] is not None: query_params_next = dict(query_params) next_rel = displayed_releases[-1]["branch_name"] del displayed_releases[-1] rel_bc.append(next_rel) query_params_next["releases_breadcrumbs"] = ",".join(rel_bc) next_releases_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_next ) heading = "Releases - " if origin_info: heading += "origin: %s" % origin_info["url"] else: heading += "snapshot: %s" % snapshot_id return render( request, "browse/releases.html", { "heading": heading, "top_panel_visible": False, "top_panel_collapsible": False, "swh_object_name": "Releases", "swh_object_metadata": {}, "top_right_link": None, "displayed_releases": displayed_releases, "prev_releases_url": prev_releases_url, "next_releases_url": next_releases_url, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions": False, "search_string": release_name_include or "", }, ) diff --git a/swh/web/browse/urls.py b/swh/web/browse/urls.py index 0b3f19f2..b7118949 100644 --- a/swh/web/browse/urls.py +++ b/swh/web/browse/urls.py @@ -1,60 +1,64 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import redirect, render from swh.web import config from swh.web.browse.browseurls import BrowseUrls from swh.web.browse.identifiers import swhid_browse import swh.web.browse.views.content # noqa import swh.web.browse.views.directory # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.common.utils import reverse def _browse_help_view(request): return render( request, "browse/help.html", {"heading": "How to browse the archive ?"} ) def _browse_search_view(request): return render( request, "browse/search.html", { "heading": "Search software origins to browse", "enable_ql": config.get_config()["search_config"].get("enable_ql", False), }, ) def _browse_vault_view(request): return render( request, "browse/vault-ui.html", {"heading": "Download archive content from the Vault"}, ) def _browse_origin_save_view(request): return redirect(reverse("origin-save")) urlpatterns = [ url(r"^$", _browse_search_view), url(r"^help/$", _browse_help_view, name="browse-help"), url(r"^search/$", _browse_search_view, name="browse-search"), url(r"^vault/$", _browse_vault_view, name="browse-vault"), # for backward compatibility url(r"^origin/save/$", _browse_origin_save_view, name="browse-origin-save"), - url(r"^(?Pswh:[0-9]+:[a-z]+:[0-9a-f]+.*)/$", swhid_browse), + url( + r"^(?Pswh:[0-9]+:[a-z]+:[0-9a-f]+.*)/$", + swhid_browse, + name="browse-swhid", + ), ] urlpatterns += BrowseUrls.get_url_patterns() diff --git a/swh/web/browse/views/directory.py b/swh/web/browse/views/directory.py index e12ef47e..7a032326 100644 --- a/swh/web/browse/views/directory.py +++ b/swh/web/browse/views/directory.py @@ -1,282 +1,282 @@ # Copyright (C) 2017-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import os import sentry_sdk from django.http import HttpResponse from django.shortcuts import redirect, render from django.template.defaultfilters import filesizeformat from swh.model.identifiers import DIRECTORY, RELEASE, REVISION, SNAPSHOT from swh.web.browse.browseurls import browse_route from swh.web.browse.snapshot_context import get_snapshot_context from swh.web.browse.utils import gen_link, get_directory_entries, get_readme_to_display from swh.web.common import archive from swh.web.common.exc import NotFoundExc, http_status_code_message from swh.web.common.identifiers import get_swhids_info from swh.web.common.typing import DirectoryMetadata, SWHObjectInfo from swh.web.common.utils import gen_path_info, reverse, swh_object_icons def _directory_browse(request, sha1_git, path=None): root_sha1_git = sha1_git error_info = {"status_code": 200, "description": None} if path: try: dir_info = archive.lookup_directory_with_path(sha1_git, path) sha1_git = dir_info["target"] except NotFoundExc as e: error_info["status_code"] = 404 error_info["description"] = f"NotFoundExc: {str(e)}" sha1_git = None dirs, files = [], [] if sha1_git is not None: dirs, files = get_directory_entries(sha1_git) origin_url = request.GET.get("origin_url") if not origin_url: origin_url = request.GET.get("origin") snapshot_id = request.GET.get("snapshot") snapshot_context = None if origin_url is not None or snapshot_id is not None: try: snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=request.GET.get("revision"), path=path, ) except NotFoundExc as e: if str(e).startswith("Origin"): raw_dir_url = reverse( "browse-directory", url_args={"sha1_git": sha1_git} ) error_message = ( "The Software Heritage archive has a directory " "with the hash you provided but the origin " "mentioned in your request appears broken: %s. " "Please check the URL and try again.\n\n" "Nevertheless, you can still browse the directory " "without origin information: %s" % (gen_link(origin_url), gen_link(raw_dir_url)) ) raise NotFoundExc(error_message) else: raise e path_info = gen_path_info(path) query_params = snapshot_context["query_params"] if snapshot_context else {} breadcrumbs = [] breadcrumbs.append( { "name": root_sha1_git[:7], "url": reverse( "browse-directory", url_args={"sha1_git": root_sha1_git}, query_params={**query_params, "path": None}, ), } ) for pi in path_info: breadcrumbs.append( { "name": pi["name"], "url": reverse( "browse-directory", url_args={"sha1_git": root_sha1_git}, query_params={**query_params, "path": pi["path"],}, ), } ) path = "" if path is None else (path + "/") for d in dirs: if d["type"] == "rev": d["url"] = reverse( "browse-revision", url_args={"sha1_git": d["target"]}, query_params=query_params, ) else: d["url"] = reverse( "browse-directory", url_args={"sha1_git": root_sha1_git}, query_params={**query_params, "path": path + d["name"],}, ) sum_file_sizes = 0 readmes = {} for f in files: query_string = "sha1_git:" + f["target"] f["url"] = reverse( "browse-content", url_args={"query_string": query_string}, query_params={ **query_params, "path": root_sha1_git + "/" + path + f["name"], }, ) if f["length"] is not None: sum_file_sizes += f["length"] f["length"] = filesizeformat(f["length"]) if f["name"].lower().startswith("readme"): readmes[f["name"]] = f["checksums"]["sha1"] readme_name, readme_url, readme_html = get_readme_to_display(readmes) sum_file_sizes = filesizeformat(sum_file_sizes) dir_metadata = DirectoryMetadata( object_type=DIRECTORY, object_id=sha1_git, directory=root_sha1_git, nb_files=len(files), nb_dirs=len(dirs), sum_file_sizes=sum_file_sizes, root_directory=root_sha1_git, path=f"/{path}" if path else None, revision=None, revision_found=None, release=None, snapshot=None, ) vault_cooking = { "directory_context": True, - "directory_id": sha1_git, + "directory_swhid": f"swh:1:dir:{sha1_git}", "revision_context": False, - "revision_id": None, + "revision_swhid": None, } swh_objects = [SWHObjectInfo(object_type=DIRECTORY, object_id=sha1_git)] if snapshot_context: swh_objects.append( SWHObjectInfo( object_type=REVISION, object_id=snapshot_context["revision_id"] ) ) swh_objects.append( SWHObjectInfo( object_type=SNAPSHOT, object_id=snapshot_context["snapshot_id"] ) ) if snapshot_context["release_id"]: swh_objects.append( SWHObjectInfo( object_type=RELEASE, object_id=snapshot_context["release_id"] ) ) swhids_info = get_swhids_info(swh_objects, snapshot_context, dir_metadata) heading = "Directory - %s" % sha1_git if breadcrumbs: dir_path = "/".join([bc["name"] for bc in breadcrumbs]) + "/" heading += " - %s" % dir_path top_right_link = None if snapshot_context is not None and not snapshot_context["is_empty"]: history_url = reverse( "browse-revision-log", url_args={"sha1_git": snapshot_context["revision_id"]}, query_params=query_params, ) top_right_link = { "url": history_url, "icon": swh_object_icons["revisions history"], "text": "History", } return render( request, "browse/directory.html", { "heading": heading, "swh_object_id": swhids_info[0]["swhid"], "swh_object_name": "Directory", "swh_object_metadata": dir_metadata, "dirs": dirs, "files": files, "breadcrumbs": breadcrumbs, "top_right_link": top_right_link, "readme_name": readme_name, "readme_url": readme_url, "readme_html": readme_html, "snapshot_context": snapshot_context, "vault_cooking": vault_cooking, "show_actions": True, "swhids_info": swhids_info, "error_code": error_info["status_code"], "error_message": http_status_code_message.get(error_info["status_code"]), "error_description": error_info["description"], }, status=error_info["status_code"], ) @browse_route( r"directory/(?P[0-9a-f]+)/", view_name="browse-directory", checksum_args=["sha1_git"], ) def directory_browse(request, sha1_git): """Django view for browsing the content of a directory identified by its sha1_git value. The url that points to it is :http:get:`/browse/directory/(sha1_git)/` """ return _directory_browse(request, sha1_git, request.GET.get("path")) @browse_route( r"directory/(?P[0-9a-f]+)/(?P.+)/", view_name="browse-directory-legacy", checksum_args=["sha1_git"], ) def directory_browse_legacy(request, sha1_git, path): """Django view for browsing the content of a directory identified by its sha1_git value. The url that points to it is :http:get:`/browse/directory/(sha1_git)/(path)/` """ return _directory_browse(request, sha1_git, path) @browse_route( r"directory/resolve/content-path/(?P[0-9a-f]+)/", view_name="browse-directory-resolve-content-path", checksum_args=["sha1_git"], ) def _directory_resolve_content_path(request, sha1_git): """ Internal endpoint redirecting to data url for a specific file path relative to a root directory. """ try: path = os.path.normpath(request.GET.get("path")) if not path.startswith("../"): dir_info = archive.lookup_directory_with_path(sha1_git, path) if dir_info["type"] == "file": sha1 = dir_info["checksums"]["sha1"] data_url = reverse( "browse-content-raw", url_args={"query_string": sha1} ) return redirect(data_url) except Exception as exc: sentry_sdk.capture_exception(exc) return HttpResponse(status=404) diff --git a/swh/web/browse/views/release.py b/swh/web/browse/views/release.py index 7a985607..8edc000c 100644 --- a/swh/web/browse/views/release.py +++ b/swh/web/browse/views/release.py @@ -1,239 +1,239 @@ # Copyright (C) 2017-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import sentry_sdk from django.shortcuts import render from swh.model.identifiers import CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT from swh.web.browse.browseurls import browse_route from swh.web.browse.snapshot_context import get_snapshot_context from swh.web.browse.utils import ( gen_content_link, gen_directory_link, gen_link, gen_person_mail_link, gen_release_link, gen_revision_link, ) from swh.web.common import archive from swh.web.common.exc import NotFoundExc from swh.web.common.identifiers import get_swhids_info from swh.web.common.typing import ReleaseMetadata, SWHObjectInfo from swh.web.common.utils import format_utc_iso_date, reverse @browse_route( r"release/(?P[0-9a-f]+)/", view_name="browse-release", checksum_args=["sha1_git"], ) def release_browse(request, sha1_git): """ Django view that produces an HTML display of a release identified by its id. The url that points to it is :http:get:`/browse/release/(sha1_git)/`. """ release = archive.lookup_release(sha1_git) snapshot_context = {} origin_info = None snapshot_id = request.GET.get("snapshot_id") if not snapshot_id: snapshot_id = request.GET.get("snapshot") origin_url = request.GET.get("origin_url") if not origin_url: origin_url = request.GET.get("origin") timestamp = request.GET.get("timestamp") visit_id = request.GET.get("visit_id") if origin_url: try: snapshot_context = get_snapshot_context( snapshot_id, origin_url, timestamp, visit_id, release_name=release["name"], ) except NotFoundExc as e: raw_rel_url = reverse("browse-release", url_args={"sha1_git": sha1_git}) error_message = ( "The Software Heritage archive has a release " "with the hash you provided but the origin " "mentioned in your request appears broken: %s. " "Please check the URL and try again.\n\n" "Nevertheless, you can still browse the release " "without origin information: %s" % (gen_link(origin_url), gen_link(raw_rel_url)) ) if str(e).startswith("Origin"): raise NotFoundExc(error_message) else: raise e origin_info = snapshot_context["origin_info"] elif snapshot_id: snapshot_context = get_snapshot_context( snapshot_id, release_name=release["name"] ) snapshot_id = snapshot_context.get("snapshot_id", None) release_metadata = ReleaseMetadata( object_type=RELEASE, object_id=sha1_git, release=sha1_git, author=release["author"]["fullname"] if release["author"] else "None", author_url=gen_person_mail_link(release["author"]) if release["author"] else "None", date=format_utc_iso_date(release["date"]), name=release["name"], synthetic=release["synthetic"], target=release["target"], target_type=release["target_type"], snapshot=snapshot_id, origin_url=origin_url, ) release_note_lines = [] if release["message"]: release_note_lines = release["message"].split("\n") swh_objects = [SWHObjectInfo(object_type=RELEASE, object_id=sha1_git)] vault_cooking = None rev_directory = None target_link = None if release["target_type"] == REVISION: target_link = gen_revision_link( release["target"], snapshot_context=snapshot_context, link_text=None, link_attrs=None, ) try: revision = archive.lookup_revision(release["target"]) rev_directory = revision["directory"] vault_cooking = { "directory_context": True, - "directory_id": rev_directory, + "directory_swhid": f"swh:1:dir:{rev_directory}", "revision_context": True, - "revision_id": release["target"], + "revision_swhid": f"swh:1:rev:{release['target']}", } swh_objects.append( SWHObjectInfo(object_type=REVISION, object_id=release["target"]) ) swh_objects.append( SWHObjectInfo(object_type=DIRECTORY, object_id=rev_directory) ) except Exception as exc: sentry_sdk.capture_exception(exc) elif release["target_type"] == DIRECTORY: target_link = gen_directory_link( release["target"], snapshot_context=snapshot_context, link_text=None, link_attrs=None, ) try: # check directory exists archive.lookup_directory(release["target"]) vault_cooking = { "directory_context": True, - "directory_id": release["target"], + "directory_swhid": f"swh:1:dir:{release['target']}", "revision_context": False, - "revision_id": None, + "revision_swhid": None, } swh_objects.append( SWHObjectInfo(object_type=DIRECTORY, object_id=release["target"]) ) except Exception as exc: sentry_sdk.capture_exception(exc) elif release["target_type"] == CONTENT: target_link = gen_content_link( release["target"], snapshot_context=snapshot_context, link_text=None, link_attrs=None, ) swh_objects.append( SWHObjectInfo(object_type=CONTENT, object_id=release["target"]) ) elif release["target_type"] == RELEASE: target_link = gen_release_link( release["target"], snapshot_context=snapshot_context, link_text=None, link_attrs=None, ) rev_directory_url = None if rev_directory is not None: if origin_info: rev_directory_url = reverse( "browse-origin-directory", query_params={ "origin_url": origin_info["url"], "release": release["name"], "snapshot": snapshot_id, }, ) elif snapshot_id: rev_directory_url = reverse( "browse-snapshot-directory", url_args={"snapshot_id": snapshot_id}, query_params={"release": release["name"]}, ) else: rev_directory_url = reverse( "browse-directory", url_args={"sha1_git": rev_directory} ) directory_link = None if rev_directory_url is not None: directory_link = gen_link(rev_directory_url, rev_directory) release["directory_link"] = directory_link release["target_link"] = target_link if snapshot_context: snapshot_id = snapshot_context["snapshot_id"] if snapshot_id: swh_objects.append(SWHObjectInfo(object_type=SNAPSHOT, object_id=snapshot_id)) swhids_info = get_swhids_info(swh_objects, snapshot_context) note_header = "None" if len(release_note_lines) > 0: note_header = release_note_lines[0] release["note_header"] = note_header release["note_body"] = "\n".join(release_note_lines[1:]) heading = "Release - %s" % release["name"] if snapshot_context: context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading += " - %s" % context_found return render( request, "browse/release.html", { "heading": heading, "swh_object_id": swhids_info[0]["swhid"], "swh_object_name": "Release", "swh_object_metadata": release_metadata, "release": release, "snapshot_context": snapshot_context, "show_actions": True, "breadcrumbs": None, "vault_cooking": vault_cooking, "top_right_link": None, "swhids_info": swhids_info, }, ) diff --git a/swh/web/browse/views/revision.py b/swh/web/browse/views/revision.py index 9d9266a8..447eebb6 100644 --- a/swh/web/browse/views/revision.py +++ b/swh/web/browse/views/revision.py @@ -1,592 +1,592 @@ # Copyright (C) 2017-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import hashlib import json import textwrap from django.http import JsonResponse from django.shortcuts import render from django.template.defaultfilters import filesizeformat from django.utils.safestring import mark_safe from swh.model.hashutil import hash_to_bytes from swh.model.identifiers import ( CONTENT, DIRECTORY, REVISION, SNAPSHOT, CoreSWHID, ObjectType, ) from swh.web.browse.browseurls import browse_route from swh.web.browse.snapshot_context import get_snapshot_context from swh.web.browse.utils import ( content_display_max_size, format_log_entries, gen_link, gen_person_mail_link, gen_revision_url, get_directory_entries, get_readme_to_display, get_revision_log_url, prepare_content_for_display, request_content, ) from swh.web.common import archive from swh.web.common.exc import NotFoundExc, http_status_code_message from swh.web.common.identifiers import get_swhids_info from swh.web.common.typing import RevisionMetadata, SWHObjectInfo from swh.web.common.utils import ( format_utc_iso_date, gen_path_info, reverse, swh_object_icons, ) def _gen_content_url(revision, query_string, path, snapshot_context): if snapshot_context: query_params = snapshot_context["query_params"] query_params["path"] = path query_params["revision"] = revision["id"] content_url = reverse("browse-origin-content", query_params=query_params) else: content_path = "%s/%s" % (revision["directory"], path) content_url = reverse( "browse-content", url_args={"query_string": query_string}, query_params={"path": content_path}, ) return content_url def _gen_diff_link(idx, diff_anchor, link_text): if idx < _max_displayed_file_diffs: return gen_link(diff_anchor, link_text) else: return link_text # TODO: put in conf _max_displayed_file_diffs = 1000 def _gen_revision_changes_list(revision, changes, snapshot_context): """ Returns a HTML string describing the file changes introduced in a revision. As this string will be displayed in the browse revision view, links to adequate file diffs are also generated. Args: revision (str): hexadecimal representation of a revision identifier changes (list): list of file changes in the revision snapshot_context (dict): optional origin context used to reverse the content urls Returns: A string to insert in a revision HTML view. """ changes_msg = [] for i, change in enumerate(changes): hasher = hashlib.sha1() from_query_string = "" to_query_string = "" diff_id = "diff-" if change["from"]: from_query_string = "sha1_git:" + change["from"]["target"] diff_id += change["from"]["target"] + "-" + change["from_path"] diff_id += "-" if change["to"]: to_query_string = "sha1_git:" + change["to"]["target"] diff_id += change["to"]["target"] + change["to_path"] change["path"] = change["to_path"] or change["from_path"] url_args = { "from_query_string": from_query_string, "to_query_string": to_query_string, } query_params = {"path": change["path"]} change["diff_url"] = reverse( "diff-contents", url_args=url_args, query_params=query_params ) hasher.update(diff_id.encode("utf-8")) diff_id = hasher.hexdigest() change["id"] = diff_id diff_link = "#diff_" + diff_id if change["type"] == "modify": change["content_url"] = _gen_content_url( revision, to_query_string, change["to_path"], snapshot_context ) changes_msg.append( "modified: %s" % _gen_diff_link(i, diff_link, change["to_path"]) ) elif change["type"] == "insert": change["content_url"] = _gen_content_url( revision, to_query_string, change["to_path"], snapshot_context ) changes_msg.append( "new file: %s" % _gen_diff_link(i, diff_link, change["to_path"]) ) elif change["type"] == "delete": parent = archive.lookup_revision(revision["parents"][0]) change["content_url"] = _gen_content_url( parent, from_query_string, change["from_path"], snapshot_context ) changes_msg.append( "deleted: %s" % _gen_diff_link(i, diff_link, change["from_path"]) ) elif change["type"] == "rename": change["content_url"] = _gen_content_url( revision, to_query_string, change["to_path"], snapshot_context ) link_text = change["from_path"] + " → " + change["to_path"] changes_msg.append( "renamed: %s" % _gen_diff_link(i, diff_link, link_text) ) if not changes: changes_msg.append("No changes") return mark_safe("\n".join(changes_msg)) @browse_route( r"revision/(?P[0-9a-f]+)/diff/", view_name="diff-revision", checksum_args=["sha1_git"], ) def _revision_diff(request, sha1_git): """ Browse internal endpoint to compute revision diff """ revision = archive.lookup_revision(sha1_git) snapshot_context = None origin_url = request.GET.get("origin_url", None) if not origin_url: origin_url = request.GET.get("origin", None) timestamp = request.GET.get("timestamp", None) visit_id = request.GET.get("visit_id", None) if origin_url: snapshot_context = get_snapshot_context( origin_url=origin_url, timestamp=timestamp, visit_id=visit_id ) changes = archive.diff_revision(sha1_git) changes_msg = _gen_revision_changes_list(revision, changes, snapshot_context) diff_data = { "total_nb_changes": len(changes), "changes": changes[:_max_displayed_file_diffs], "changes_msg": changes_msg, } return JsonResponse(diff_data) NB_LOG_ENTRIES = 100 @browse_route( r"revision/(?P[0-9a-f]+)/log/", view_name="browse-revision-log", checksum_args=["sha1_git"], ) def revision_log_browse(request, sha1_git): """ Django view that produces an HTML display of the history log for a revision identified by its id. The url that points to it is :http:get:`/browse/revision/(sha1_git)/log/` """ origin_url = request.GET.get("origin_url") snapshot_id = request.GET.get("snapshot") snapshot_context = None if origin_url or snapshot_id: snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=request.GET.get("timestamp"), visit_id=request.GET.get("visit_id"), branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=sha1_git, ) per_page = int(request.GET.get("per_page", NB_LOG_ENTRIES)) offset = int(request.GET.get("offset", 0)) revs_ordering = request.GET.get("revs_ordering", "committer_date") session_key = "rev_%s_log_ordering_%s" % (sha1_git, revs_ordering) rev_log_session = request.session.get(session_key, None) rev_log = [] revs_walker_state = None if rev_log_session: rev_log = rev_log_session["rev_log"] revs_walker_state = rev_log_session["revs_walker_state"] if len(rev_log) < offset + per_page: revs_walker = archive.get_revisions_walker( revs_ordering, sha1_git, max_revs=offset + per_page + 1, state=revs_walker_state, ) rev_log += [rev["id"] for rev in revs_walker] revs_walker_state = revs_walker.export_state() revs = rev_log[offset : offset + per_page] revision_log = archive.lookup_revision_multiple(revs) request.session[session_key] = { "rev_log": rev_log, "revs_walker_state": revs_walker_state, } revs_ordering = request.GET.get("revs_ordering", "") prev_log_url = None if len(rev_log) > offset + per_page: prev_log_url = reverse( "browse-revision-log", url_args={"sha1_git": sha1_git}, query_params={ "per_page": per_page, "offset": offset + per_page, "revs_ordering": revs_ordering or None, }, ) next_log_url = None if offset != 0: next_log_url = reverse( "browse-revision-log", url_args={"sha1_git": sha1_git}, query_params={ "per_page": per_page, "offset": offset - per_page, "revs_ordering": revs_ordering or None, }, ) revision_log_data = format_log_entries(revision_log, per_page) swh_rev_id = str( CoreSWHID(object_type=ObjectType.REVISION, object_id=hash_to_bytes(sha1_git)) ) return render( request, "browse/revision-log.html", { "heading": "Revision history", "swh_object_id": swh_rev_id, "swh_object_name": "Revisions history", "swh_object_metadata": None, "revision_log": revision_log_data, "revs_ordering": revs_ordering, "next_log_url": next_log_url, "prev_log_url": prev_log_url, "breadcrumbs": None, "top_right_link": None, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions": True, "swhids_info": None, }, ) @browse_route( r"revision/(?P[0-9a-f]+)/", view_name="browse-revision", checksum_args=["sha1_git"], ) def revision_browse(request, sha1_git): """ Django view that produces an HTML display of a revision identified by its id. The url that points to it is :http:get:`/browse/revision/(sha1_git)/`. """ revision = archive.lookup_revision(sha1_git) origin_info = None snapshot_context = None origin_url = request.GET.get("origin_url") if not origin_url: origin_url = request.GET.get("origin") timestamp = request.GET.get("timestamp") visit_id = request.GET.get("visit_id") snapshot_id = request.GET.get("snapshot_id") if not snapshot_id: snapshot_id = request.GET.get("snapshot") path = request.GET.get("path") dir_id = None dirs, files = [], [] content_data = {} if origin_url: try: snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=visit_id, branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=sha1_git, path=path, ) except NotFoundExc as e: raw_rev_url = reverse("browse-revision", url_args={"sha1_git": sha1_git}) error_message = ( "The Software Heritage archive has a revision " "with the hash you provided but the origin " "mentioned in your request appears broken: %s. " "Please check the URL and try again.\n\n" "Nevertheless, you can still browse the revision " "without origin information: %s" % (gen_link(origin_url), gen_link(raw_rev_url)) ) if str(e).startswith("Origin"): raise NotFoundExc(error_message) else: raise e origin_info = snapshot_context["origin_info"] snapshot_id = snapshot_context["snapshot_id"] elif snapshot_id: snapshot_context = get_snapshot_context(snapshot_id) error_info = {"status_code": 200, "description": None} if path: try: file_info = archive.lookup_directory_with_path(revision["directory"], path) if file_info["type"] == "dir": dir_id = file_info["target"] else: query_string = "sha1_git:" + file_info["target"] content_data = request_content(query_string) except NotFoundExc as e: error_info["status_code"] = 404 error_info["description"] = f"NotFoundExc: {str(e)}" else: dir_id = revision["directory"] if dir_id: path = "" if path is None else (path + "/") dirs, files = get_directory_entries(dir_id) revision_metadata = RevisionMetadata( object_type=REVISION, object_id=sha1_git, revision=sha1_git, author=revision["author"]["fullname"] if revision["author"] else "None", author_url=gen_person_mail_link(revision["author"]) if revision["author"] else "None", committer=revision["committer"]["fullname"] if revision["committer"] else "None", committer_url=gen_person_mail_link(revision["committer"]) if revision["committer"] else "None", committer_date=format_utc_iso_date(revision["committer_date"]), date=format_utc_iso_date(revision["date"]), directory=revision["directory"], merge=revision["merge"], metadata=json.dumps( revision["metadata"], sort_keys=True, indent=4, separators=(",", ": ") ), parents=revision["parents"], synthetic=revision["synthetic"], type=revision["type"], snapshot=snapshot_id, origin_url=origin_url, ) message_lines = ["None"] if revision["message"]: message_lines = revision["message"].split("\n") parents = [] for p in revision["parents"]: parent_url = gen_revision_url(p, snapshot_context) parents.append({"id": p, "url": parent_url}) path_info = gen_path_info(path) query_params = snapshot_context["query_params"] if snapshot_context else {} breadcrumbs = [] breadcrumbs.append( { "name": revision["directory"][:7], "url": reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ), } ) for pi in path_info: query_params["path"] = pi["path"] breadcrumbs.append( { "name": pi["name"], "url": reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ), } ) vault_cooking = { "directory_context": False, - "directory_id": None, + "directory_swhid": None, "revision_context": True, - "revision_id": sha1_git, + "revision_swhid": f"swh:1:rev:{sha1_git}", } swh_objects = [SWHObjectInfo(object_type=REVISION, object_id=sha1_git)] content = None content_size = None filename = None mimetype = None language = None readme_name = None readme_url = None readme_html = None readmes = {} extra_context = dict(revision_metadata) extra_context["path"] = f"/{path}" if path else None if content_data: breadcrumbs[-1]["url"] = None content_size = content_data["length"] mimetype = content_data["mimetype"] if content_data["raw_data"]: content_display_data = prepare_content_for_display( content_data["raw_data"], content_data["mimetype"], path ) content = content_display_data["content_data"] language = content_display_data["language"] mimetype = content_display_data["mimetype"] if path: filename = path_info[-1]["name"] query_params["filename"] = filename filepath = "/".join(pi["name"] for pi in path_info[:-1]) extra_context["path"] = f"/{filepath}/" if filepath else "/" extra_context["filename"] = filename top_right_link = { "url": reverse( "browse-content-raw", url_args={"query_string": query_string}, query_params={"filename": filename}, ), "icon": swh_object_icons["content"], "text": "Raw File", } swh_objects.append( SWHObjectInfo(object_type=CONTENT, object_id=file_info["target"]) ) else: for d in dirs: if d["type"] == "rev": d["url"] = reverse( "browse-revision", url_args={"sha1_git": d["target"]} ) else: query_params["path"] = path + d["name"] d["url"] = reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ) for f in files: query_params["path"] = path + f["name"] f["url"] = reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ) if f["length"] is not None: f["length"] = filesizeformat(f["length"]) if f["name"].lower().startswith("readme"): readmes[f["name"]] = f["checksums"]["sha1"] readme_name, readme_url, readme_html = get_readme_to_display(readmes) top_right_link = { "url": get_revision_log_url(sha1_git, snapshot_context), "icon": swh_object_icons["revisions history"], "text": "History", } vault_cooking["directory_context"] = True - vault_cooking["directory_id"] = dir_id + vault_cooking["directory_swhid"] = f"swh:1:dir:{dir_id}" swh_objects.append(SWHObjectInfo(object_type=DIRECTORY, object_id=dir_id)) query_params.pop("path", None) diff_revision_url = reverse( "diff-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ) if snapshot_id: swh_objects.append(SWHObjectInfo(object_type=SNAPSHOT, object_id=snapshot_id)) swhids_info = get_swhids_info(swh_objects, snapshot_context, extra_context) heading = "Revision - %s - %s" % ( sha1_git[:7], textwrap.shorten(message_lines[0], width=70), ) if snapshot_context: context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading += " - %s" % context_found return render( request, "browse/revision.html", { "heading": heading, "swh_object_id": swhids_info[0]["swhid"], "swh_object_name": "Revision", "swh_object_metadata": revision_metadata, "message_header": message_lines[0], "message_body": "\n".join(message_lines[1:]), "parents": parents, "snapshot_context": snapshot_context, "dirs": dirs, "files": files, "content": content, "content_size": content_size, "max_content_size": content_display_max_size, "filename": filename, "encoding": content_data.get("encoding"), "mimetype": mimetype, "language": language, "readme_name": readme_name, "readme_url": readme_url, "readme_html": readme_html, "breadcrumbs": breadcrumbs, "top_right_link": top_right_link, "vault_cooking": vault_cooking, "diff_revision_url": diff_revision_url, "show_actions": True, "swhids_info": swhids_info, "error_code": error_info["status_code"], "error_message": http_status_code_message.get(error_info["status_code"]), "error_description": error_info["description"], }, status=error_info["status_code"], ) diff --git a/swh/web/common/archive.py b/swh/web/common/archive.py index a45ede34..34b356fc 100644 --- a/swh/web/common/archive.py +++ b/swh/web/common/archive.py @@ -1,1408 +1,1415 @@ # Copyright (C) 2015-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from collections import defaultdict import itertools import os import re from typing import Any, Dict, Iterable, Iterator, List, Optional, Set, Tuple, Union from urllib.parse import urlparse from swh.model import hashutil -from swh.model.identifiers import CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT +from swh.model.identifiers import ( + CONTENT, + DIRECTORY, + RELEASE, + REVISION, + SNAPSHOT, + CoreSWHID, +) from swh.model.model import OriginVisit, Revision from swh.storage.algos import diff, revisions_walker from swh.storage.algos.origin import origin_get_latest_visit_status from swh.storage.algos.snapshot import snapshot_get_latest, snapshot_resolve_alias from swh.vault.exc import NotFoundExc as VaultNotFoundExc from swh.web import config from swh.web.common import converters, query from swh.web.common.exc import BadInputExc, NotFoundExc from swh.web.common.typing import ( OriginInfo, OriginMetadataInfo, OriginVisitInfo, PagedResult, ) search = config.search() storage = config.storage() vault = config.vault() idx_storage = config.indexer_storage() counters = config.counters() MAX_LIMIT = 50 # Top limit the users can ask for def _first_element(lst): """Returns the first element in the provided list or None if it is empty or None""" return next(iter(lst or []), None) def lookup_multiple_hashes(hashes): """Lookup the passed hashes in a single DB connection, using batch processing. Args: An array of {filename: X, sha1: Y}, string X, hex sha1 string Y. Returns: The same array with elements updated with elem['found'] = true if the hash is present in storage, elem['found'] = false if not. """ hashlist = [hashutil.hash_to_bytes(elem["sha1"]) for elem in hashes] content_missing = storage.content_missing_per_sha1(hashlist) missing = [hashutil.hash_to_hex(x) for x in content_missing] for x in hashes: x.update({"found": True}) for h in hashes: if h["sha1"] in missing: h["found"] = False return hashes def lookup_expression(expression, last_sha1, per_page): """Lookup expression in raw content. Args: expression (str): An expression to lookup through raw indexed content last_sha1 (str): Last sha1 seen per_page (int): Number of results per page Yields: ctags whose content match the expression """ limit = min(per_page, MAX_LIMIT) ctags = idx_storage.content_ctags_search( expression, last_sha1=last_sha1, limit=limit ) for ctag in ctags: ctag = converters.from_swh(ctag, hashess={"id"}) ctag["sha1"] = ctag["id"] ctag.pop("id") yield ctag def lookup_hash(q: str) -> Dict[str, Any]: """Check if the storage contains a given content checksum and return it if found. Args: q: query string of the form Returns: Dict with key found containing the hash info if the hash is present, None if not. """ algo, hash_ = query.parse_hash(q) found = _first_element(storage.content_find({algo: hash_})) if found: content = converters.from_content(found.to_dict()) else: content = None return {"found": content, "algo": algo} def search_hash(q: str) -> Dict[str, bool]: """Search storage for a given content checksum. Args: q: query string of the form Returns: Dict with key found to True or False, according to whether the checksum is present or not """ algo, hash_ = query.parse_hash(q) found = _first_element(storage.content_find({algo: hash_})) return {"found": found is not None} def _lookup_content_sha1(q: str) -> Optional[bytes]: """Given a possible input, query for the content's sha1. Args: q: query string of the form Returns: binary sha1 if found or None """ algo, hash_ = query.parse_hash(q) if algo != "sha1": hashes = _first_element(storage.content_find({algo: hash_})) if not hashes: return None return hashes.sha1 return hash_ def lookup_content_ctags(q): """Return ctags information from a specified content. Args: q: query string of the form Yields: ctags information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None ctags = list(idx_storage.content_ctags_get([sha1])) if not ctags: return None for ctag in ctags: yield converters.from_swh(ctag, hashess={"id"}) def lookup_content_filetype(q): """Return filetype information from a specified content. Args: q: query string of the form Yields: filetype information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None filetype = _first_element(list(idx_storage.content_mimetype_get([sha1]))) if not filetype: return None return converters.from_filetype(filetype.to_dict()) def lookup_content_language(q): """Always returns None. This used to return language information from a specified content, but this is currently disabled. Args: q: query string of the form Yields: language information (dict) list if the content is found. """ return None def lookup_content_license(q): """Return license information from a specified content. Args: q: query string of the form Yields: license information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None licenses = list(idx_storage.content_fossology_license_get([sha1])) if not licenses: return None license_dicts = [license.to_dict() for license in licenses] for license_dict in license_dicts: del license_dict["id"] lic = { "id": sha1, "facts": license_dicts, } return converters.from_swh(lic, hashess={"id"}) def lookup_origin(origin: OriginInfo) -> OriginInfo: """Return information about the origin matching dict origin. Args: origin: origin's dict with 'url' key Returns: origin information as dict. """ origin_urls = [origin["url"]] if origin["url"]: # handle case when user provided an origin url with a trailing # slash while the url in storage does not have it (e.g. GitHub) if origin["url"].endswith("/"): origin_urls.append(origin["url"][:-1]) # handle case when user provided an origin url without a trailing # slash while the url in storage have it (e.g. Debian source package) else: origin_urls.append(f"{origin['url']}/") try: # handle case where the "://" character sequence was mangled into ":/" parsed_url = urlparse(origin["url"]) if ( parsed_url.scheme and not parsed_url.netloc and origin["url"].startswith(f"{parsed_url.scheme}:/") and not origin["url"].startswith(f"{parsed_url.scheme}://") ): origin_urls.append( origin["url"].replace( f"{parsed_url.scheme}:/", f"{parsed_url.scheme}://" ) ) except Exception: pass origins = [o for o in storage.origin_get(origin_urls) if o is not None] if not origins: msg = "Origin with url %s not found!" % origin["url"] raise NotFoundExc(msg) return converters.from_origin(origins[0].to_dict()) def lookup_origins( page_token: Optional[str], limit: int = 100 ) -> PagedResult[OriginInfo]: """Get list of archived software origins in a paginated way. Origins are sorted by id before returning them Args: origin_from (int): The minimum id of the origins to return origin_count (int): The maximum number of origins to return Returns: Page of OriginInfo """ page = storage.origin_list(page_token=page_token, limit=limit) return PagedResult( [converters.from_origin(o.to_dict()) for o in page.results], next_page_token=page.next_page_token, ) def search_origin( url_pattern: str, use_ql: bool = False, limit: int = 50, with_visit: bool = False, visit_types: Optional[List[str]] = None, page_token: Optional[str] = None, ) -> Tuple[List[OriginInfo], Optional[str]]: """Search for origins whose urls contain a provided string pattern or match a provided regular expression. Args: url_pattern: the string pattern to search for in origin urls use_ql: whether to use swh search query language or not limit: the maximum number of found origins to return with_visit: Whether origins with no visit are to be filtered out visit_types: Only origins having any of the provided visit types (e.g. git, svn, pypi) will be returned page_token: opaque string used to get the next results of a search Returns: list of origin information as dict. """ if page_token: assert isinstance(page_token, str) if search: if config.get_config()["search_config"].get("enable_ql") and use_ql: page_result = search.origin_search( query=url_pattern, page_token=page_token, with_visit=with_visit, visit_types=visit_types, limit=limit, ) else: page_result = search.origin_search( url_pattern=url_pattern, page_token=page_token, with_visit=with_visit, visit_types=visit_types, limit=limit, ) origins = [converters.from_origin(ori_dict) for ori_dict in page_result.results] else: # Fallback to swh-storage if swh-search is not configured search_words = [re.escape(word) for word in url_pattern.split()] if len(search_words) >= 7: url_pattern = ".*".join(search_words) else: pattern_parts = [] for permut in itertools.permutations(search_words): pattern_parts.append(".*".join(permut)) url_pattern = "|".join(pattern_parts) page_result = storage.origin_search( url_pattern, page_token=page_token, with_visit=with_visit, limit=limit, visit_types=visit_types, regexp=True, ) origins = [converters.from_origin(ori.to_dict()) for ori in page_result.results] return (origins, page_result.next_page_token) def search_origin_metadata( fulltext: str, limit: int = 50 ) -> Iterable[OriginMetadataInfo]: """Search for origins whose metadata match a provided string pattern. Args: fulltext: the string pattern to search for in origin metadata limit: the maximum number of found origins to return Returns: Iterable of origin metadata information for existing origins """ results = [] if search and config.get_config()["search_config"]["backend"] == "swh-search": page_result = search.origin_search(metadata_pattern=fulltext, limit=limit,) matches = idx_storage.origin_intrinsic_metadata_get( [r["url"] for r in page_result.results] ) else: matches = idx_storage.origin_intrinsic_metadata_search_fulltext( conjunction=[fulltext], limit=limit ) matches = [match.to_dict() for match in matches] origins = storage.origin_get([match["id"] for match in matches]) for origin, match in zip(origins, matches): if not origin: continue match["from_revision"] = hashutil.hash_to_hex(match["from_revision"]) del match["id"] results.append(OriginMetadataInfo(url=origin.url, metadata=match)) return results def lookup_origin_intrinsic_metadata(origin_url: str) -> Dict[str, Any]: """Return intrinsic metadata for origin whose origin matches given origin. Args: origin_url: origin url Raises: NotFoundExc when the origin is not found Returns: origin metadata. """ origins = [origin_url] origin_info = storage.origin_get(origins)[0] if not origin_info: raise NotFoundExc(f"Origin with url {origin_url} not found!") match = _first_element(idx_storage.origin_intrinsic_metadata_get(origins)) result = {} if match: result = match.metadata return result def _to_sha1_bin(sha1_hex): _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_hex, ["sha1"], "Only sha1_git is supported." # HACK: sha1_git really ) return sha1_git_bin def _check_directory_exists(sha1_git, sha1_git_bin): if len(list(storage.directory_missing([sha1_git_bin]))): raise NotFoundExc("Directory with sha1_git %s not found" % sha1_git) def lookup_directory(sha1_git): """Return information about the directory with id sha1_git. Args: sha1_git as string Returns: directory information as dict. """ empty_dir_sha1 = "4b825dc642cb6eb9a060e54bf8d69288fbee4904" if sha1_git == empty_dir_sha1: return [] sha1_git_bin = _to_sha1_bin(sha1_git) _check_directory_exists(sha1_git, sha1_git_bin) directory_entries = storage.directory_ls(sha1_git_bin) return map(converters.from_directory_entry, directory_entries) def lookup_directory_with_path(sha1_git: str, path: str) -> Dict[str, Any]: """Return directory information for entry with specified path w.r.t. root directory pointed by sha1_git Args: sha1_git: sha1_git corresponding to the directory to which we append paths to (hopefully) find the entry path: the relative path to the entry starting from the root directory pointed by sha1_git Returns: Directory entry information as dict. Raises: NotFoundExc if the directory entry is not found """ sha1_git_bin = _to_sha1_bin(sha1_git) _check_directory_exists(sha1_git, sha1_git_bin) paths = path.strip(os.path.sep).split(os.path.sep) queried_dir = storage.directory_entry_get_by_path( sha1_git_bin, [p.encode("utf-8") for p in paths] ) if not queried_dir: raise NotFoundExc( f"Directory entry with path {path} from root directory {sha1_git} not found" ) return converters.from_directory_entry(queried_dir) def lookup_release(release_sha1_git: str) -> Dict[str, Any]: """Return information about the release with sha1 release_sha1_git. Args: release_sha1_git: The release's sha1 as hexadecimal Returns: Release information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ sha1_git_bin = _to_sha1_bin(release_sha1_git) release = _first_element(storage.release_get([sha1_git_bin])) if not release: raise NotFoundExc(f"Release with sha1_git {release_sha1_git} not found.") return converters.from_release(release) def lookup_release_multiple(sha1_git_list) -> Iterator[Optional[Dict[str, Any]]]: """Return information about the releases identified with their sha1_git identifiers. Args: sha1_git_list: A list of release sha1_git identifiers Returns: Iterator of Release metadata information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ sha1_bin_list = [_to_sha1_bin(sha1_git) for sha1_git in sha1_git_list] releases = storage.release_get(sha1_bin_list) for r in releases: if r is not None: yield converters.from_release(r) else: yield None def lookup_revision(rev_sha1_git) -> Dict[str, Any]: """Return information about the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Revision information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. NotFoundExc if there is no revision with the provided sha1_git. """ sha1_git_bin = _to_sha1_bin(rev_sha1_git) revision = storage.revision_get([sha1_git_bin])[0] if not revision: raise NotFoundExc(f"Revision with sha1_git {rev_sha1_git} not found.") return converters.from_revision(revision) def lookup_revision_multiple(sha1_git_list) -> Iterator[Optional[Dict[str, Any]]]: """Return information about the revisions identified with their sha1_git identifiers. Args: sha1_git_list: A list of revision sha1_git identifiers Yields: revision information as dict if the revision exists, None otherwise. Raises: ValueError if the identifier provided is not of sha1 nature. """ sha1_bin_list = [_to_sha1_bin(sha1_git) for sha1_git in sha1_git_list] revisions = storage.revision_get(sha1_bin_list) for revision in revisions: if revision is not None: yield converters.from_revision(revision) else: yield None def lookup_revision_message(rev_sha1_git) -> Dict[str, bytes]: """Return the raw message of the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Decoded revision message as dict {'message': } Raises: ValueError if the identifier provided is not of sha1 nature. NotFoundExc if the revision is not found, or if it has no message """ sha1_git_bin = _to_sha1_bin(rev_sha1_git) revision = storage.revision_get([sha1_git_bin])[0] if not revision: raise NotFoundExc(f"Revision with sha1_git {rev_sha1_git} not found.") if not revision.message: raise NotFoundExc(f"No message for revision with sha1_git {rev_sha1_git}.") return {"message": revision.message} def _lookup_revision_id_by(origin, branch_name, timestamp): def _get_snapshot_branch(snapshot, branch_name): snapshot = lookup_snapshot( visit["snapshot"], branches_from=branch_name, branches_count=10, branch_name_exclude_prefix=None, ) branch = None if branch_name in snapshot["branches"]: branch = snapshot["branches"][branch_name] return branch if isinstance(origin, int): origin = {"id": origin} elif isinstance(origin, str): origin = {"url": origin} else: raise TypeError('"origin" must be an int or a string.') from swh.web.common.origin_visits import get_origin_visit visit = get_origin_visit(origin, visit_ts=timestamp) branch = _get_snapshot_branch(visit["snapshot"], branch_name) rev_id = None if branch and branch["target_type"] == "revision": rev_id = branch["target"] elif branch and branch["target_type"] == "alias": branch = _get_snapshot_branch(visit["snapshot"], branch["target"]) if branch and branch["target_type"] == "revision": rev_id = branch["target"] if not rev_id: raise NotFoundExc( "Revision for origin %s and branch %s not found." % (origin.get("url"), branch_name) ) return rev_id def lookup_revision_by(origin, branch_name="HEAD", timestamp=None): """Lookup revision by origin, snapshot branch name and visit timestamp. If branch_name is not provided, lookup using 'HEAD' as default. If timestamp is not provided, use the most recent. Args: origin (Union[int,str]): origin of the revision branch_name (str): snapshot branch name timestamp (str/int): origin visit time frame Returns: dict: The revision matching the criterions Raises: NotFoundExc if no revision corresponds to the criterion """ rev_id = _lookup_revision_id_by(origin, branch_name, timestamp) return lookup_revision(rev_id) def lookup_revision_log(rev_sha1_git, limit): """Lookup revision log by revision id. Args: rev_sha1_git (str): The revision's sha1 as hexadecimal limit (int): the maximum number of revisions returned Returns: list: Revision log as list of revision dicts Raises: ValueError: if the identifier provided is not of sha1 nature. swh.web.common.exc.NotFoundExc: if there is no revision with the provided sha1_git. """ lookup_revision(rev_sha1_git) sha1_git_bin = _to_sha1_bin(rev_sha1_git) revision_entries = storage.revision_log([sha1_git_bin], limit) return map(converters.from_revision, revision_entries) def lookup_revision_log_by(origin, branch_name, timestamp, limit): """Lookup revision by origin, snapshot branch name and visit timestamp. Args: origin (Union[int,str]): origin of the revision branch_name (str): snapshot branch timestamp (str/int): origin visit time frame limit (int): the maximum number of revisions returned Returns: list: Revision log as list of revision dicts Raises: swh.web.common.exc.NotFoundExc: if no revision corresponds to the criterion """ rev_id = _lookup_revision_id_by(origin, branch_name, timestamp) return lookup_revision_log(rev_id, limit) def lookup_revision_with_context_by( origin, branch_name, timestamp, sha1_git, limit=100 ): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. sha1_git_root being resolved through the lookup of a revision by origin, branch_name and ts. In other words, sha1_git is an ancestor of sha1_git_root. Args: - origin: origin of the revision. - branch_name: revision's branch. - timestamp: revision's time frame. - sha1_git: one of sha1_git_root's ancestors. - limit: limit the lookup to 100 revisions back. Returns: Pair of (root_revision, revision). Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root. """ rev_root_id = _lookup_revision_id_by(origin, branch_name, timestamp) rev_root_id_bin = hashutil.hash_to_bytes(rev_root_id) rev_root = storage.revision_get([rev_root_id_bin])[0] return ( converters.from_revision(rev_root) if rev_root else None, lookup_revision_with_context(rev_root, sha1_git, limit), ) def lookup_revision_with_context( sha1_git_root: Union[str, Dict[str, Any], Revision], sha1_git: str, limit: int = 100 ) -> Dict[str, Any]: """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. In other words, sha1_git is an ancestor of sha1_git_root. Args: sha1_git_root: latest revision. The type is either a sha1 (as an hex string) or a non converted dict. sha1_git: one of sha1_git_root's ancestors limit: limit the lookup to 100 revisions back Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root Raises: BadInputExc in case of unknown algo_hash or bad hash NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root """ sha1_git_bin = _to_sha1_bin(sha1_git) revision = storage.revision_get([sha1_git_bin])[0] if not revision: raise NotFoundExc(f"Revision {sha1_git} not found") if isinstance(sha1_git_root, str): sha1_git_root_bin = _to_sha1_bin(sha1_git_root) revision_root = storage.revision_get([sha1_git_root_bin])[0] if not revision_root: raise NotFoundExc(f"Revision root {sha1_git_root} not found") elif isinstance(sha1_git_root, Revision): sha1_git_root_bin = sha1_git_root.id else: sha1_git_root_bin = sha1_git_root["id"] revision_log = storage.revision_log([sha1_git_root_bin], limit) parents: Dict[str, List[str]] = {} children = defaultdict(list) for rev in revision_log: rev_id = rev["id"] parents[rev_id] = [] for parent_id in rev["parents"]: parents[rev_id].append(parent_id) children[parent_id].append(rev_id) if revision.id not in parents: raise NotFoundExc(f"Revision {sha1_git} is not an ancestor of {sha1_git_root}") revision_d = revision.to_dict() revision_d["children"] = children[revision.id] return converters.from_revision(revision_d) def lookup_directory_with_revision(sha1_git, dir_path=None, with_data=False): """Return information on directory pointed by revision with sha1_git. If dir_path is not provided, display top level directory. Otherwise, display the directory pointed by dir_path (if it exists). Args: sha1_git: revision's hash. dir_path: optional directory pointed to by that revision. with_data: boolean that indicates to retrieve the raw data if the path resolves to a content. Default to False (for the api) Returns: Information on the directory pointed to by that revision. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc either if the revision is not found or the path referenced does not exist. NotImplementedError in case of dir_path exists but do not reference a type 'dir' or 'file'. """ sha1_git_bin = _to_sha1_bin(sha1_git) revision = storage.revision_get([sha1_git_bin])[0] if not revision: raise NotFoundExc(f"Revision {sha1_git} not found") dir_sha1_git_bin = revision.directory if dir_path: paths = dir_path.strip(os.path.sep).split(os.path.sep) entity = storage.directory_entry_get_by_path( dir_sha1_git_bin, list(map(lambda p: p.encode("utf-8"), paths)) ) if not entity: raise NotFoundExc( "Directory or File '%s' pointed to by revision %s not found" % (dir_path, sha1_git) ) else: entity = {"type": "dir", "target": dir_sha1_git_bin} if entity["type"] == "dir": directory_entries = storage.directory_ls(entity["target"]) or [] return { "type": "dir", "path": "." if not dir_path else dir_path, "revision": sha1_git, "content": list(map(converters.from_directory_entry, directory_entries)), } elif entity["type"] == "file": # content content = _first_element(storage.content_find({"sha1_git": entity["target"]})) if not content: raise NotFoundExc(f"Content not found for revision {sha1_git}") content_d = content.to_dict() if with_data: data = storage.content_get_data(content.sha1) if data: content_d["data"] = data return { "type": "file", "path": "." if not dir_path else dir_path, "revision": sha1_git, "content": converters.from_content(content_d), } elif entity["type"] == "rev": # revision revision = storage.revision_get([entity["target"]])[0] return { "type": "rev", "path": "." if not dir_path else dir_path, "revision": sha1_git, "content": converters.from_revision(revision) if revision else None, } else: raise NotImplementedError("Entity of type %s not implemented." % entity["type"]) def lookup_content(q: str) -> Dict[str, Any]: """Lookup the content designed by q. Args: q: The release's sha1 as hexadecimal Raises: NotFoundExc if the requested content is not found """ algo, hash_ = query.parse_hash(q) c = _first_element(storage.content_find({algo: hash_})) if not c: hhex = hashutil.hash_to_hex(hash_) raise NotFoundExc(f"Content with {algo} checksum equals to {hhex} not found!") return converters.from_content(c.to_dict()) def lookup_content_raw(q: str) -> Dict[str, Any]: """Lookup the content defined by q. Args: q: query string of the form Returns: dict with 'sha1' and 'data' keys. data representing its raw data decoded. Raises: NotFoundExc if the requested content is not found or if the content bytes are not available in the storage """ c = lookup_content(q) content_sha1_bytes = hashutil.hash_to_bytes(c["checksums"]["sha1"]) content_data = storage.content_get_data(content_sha1_bytes) if content_data is None: algo, hash_ = query.parse_hash(q) raise NotFoundExc( f"Bytes of content with {algo} checksum equals " f"to {hashutil.hash_to_hex(hash_)} are not available!" ) return converters.from_content({"sha1": content_sha1_bytes, "data": content_data}) def stat_counters(): """Return the stat counters for Software Heritage Returns: A dict mapping textual labels to integer values. """ res = {} if counters and config.get_config()["counters_backend"] == "swh-counters": res = counters.get_counts( ["origin", "revision", "content", "directory", "release", "person"] ) else: res = storage.stat_counters() return res def _lookup_origin_visits( origin_url: str, last_visit: Optional[int] = None, limit: int = 10 ) -> Iterator[OriginVisit]: """Yields the origin origins' visits. Args: origin_url (str): origin to list visits for last_visit (int): last visit to lookup from limit (int): Number of elements max to display Yields: OriginVisit for that origin """ limit = min(limit, MAX_LIMIT) page_token: Optional[str] if last_visit is not None: page_token = str(last_visit) else: page_token = None visit_page = storage.origin_visit_get( origin_url, page_token=page_token, limit=limit ) yield from visit_page.results def lookup_origin_visits( origin: str, last_visit: Optional[int] = None, per_page: int = 10 ) -> Iterator[OriginVisitInfo]: """Yields the origin origins' visits. Args: origin: origin to list visits for Yields: Dictionaries of origin_visit for that origin """ for visit in _lookup_origin_visits(origin, last_visit=last_visit, limit=per_page): visit_status = storage.origin_visit_status_get_latest(origin, visit.visit) yield converters.from_origin_visit( {**visit_status.to_dict(), "type": visit.type} ) def lookup_origin_visit_latest( origin_url: str, require_snapshot: bool = False, type: Optional[str] = None, allowed_statuses: Optional[List[str]] = None, ) -> Optional[OriginVisitInfo]: """Return the origin's latest visit Args: origin_url: origin to list visits for type: Optional visit type to filter on (e.g git, tar, dsc, svn, hg, npm, pypi, ...) allowed_statuses: list of visit statuses considered to find the latest visit. For instance, ``allowed_statuses=['full']`` will only consider visits that have successfully run to completion. require_snapshot: filter out origins without a snapshot Returns: The origin visit info as dict if found """ visit_status = origin_get_latest_visit_status( storage, origin_url, type=type, allowed_statuses=allowed_statuses, require_snapshot=require_snapshot, ) return ( converters.from_origin_visit(visit_status.to_dict()) if visit_status else None ) def lookup_origin_visit(origin_url: str, visit_id: int) -> OriginVisitInfo: """Return information about visit visit_id with origin origin. Args: origin: origin concerned by the visit visit_id: the visit identifier to lookup Yields: The dict origin_visit concerned """ visit = storage.origin_visit_get_by(origin_url, visit_id) visit_status = storage.origin_visit_status_get_latest(origin_url, visit_id) if not visit: raise NotFoundExc( f"Origin {origin_url} or its visit with id {visit_id} not found!" ) return converters.from_origin_visit({**visit_status.to_dict(), "type": visit.type}) def lookup_snapshot_sizes( snapshot_id: str, branch_name_exclude_prefix: Optional[str] = "refs/pull/" ) -> Dict[str, int]: """Count the number of branches in the snapshot with the given id Args: snapshot_id (str): sha1 identifier of the snapshot Returns: dict: A dict whose keys are the target types of branches and values their corresponding amount """ snapshot_id_bin = _to_sha1_bin(snapshot_id) snapshot_sizes = dict.fromkeys(("alias", "release", "revision"), 0) branch_counts = storage.snapshot_count_branches( snapshot_id_bin, branch_name_exclude_prefix.encode() if branch_name_exclude_prefix else None, ) # remove possible None key returned by snapshot_count_branches # when null branches are present in the snapshot branch_counts.pop(None, None) snapshot_sizes.update(branch_counts) return snapshot_sizes def lookup_snapshot( snapshot_id: str, branches_from: str = "", branches_count: int = 1000, target_types: Optional[List[str]] = None, branch_name_include_substring: Optional[str] = None, branch_name_exclude_prefix: Optional[str] = "refs/pull/", ) -> Dict[str, Any]: """Return information about a snapshot, aka the list of named branches found during a specific visit of an origin. Args: snapshot_id: sha1 identifier of the snapshot branches_from: optional parameter used to skip branches whose name is lesser than it before returning them branches_count: optional parameter used to restrain the amount of returned branches target_types: optional parameter used to filter the target types of branch to return (possible values that can be contained in that list are `'content', 'directory', 'revision', 'release', 'snapshot', 'alias'`) branch_name_include_substring: if provided, only return branches whose name contains given substring branch_name_exclude_prefix: if provided, do not return branches whose name starts with given pattern Raises: NotFoundExc if the given snapshot_id is missing Returns: A dict filled with the snapshot content. """ snapshot_id_bin = _to_sha1_bin(snapshot_id) if storage.snapshot_missing([snapshot_id_bin]): raise NotFoundExc(f"Snapshot with id {snapshot_id} not found!") partial_branches = storage.snapshot_get_branches( snapshot_id_bin, branches_from.encode(), branches_count, target_types, branch_name_include_substring.encode() if branch_name_include_substring else None, branch_name_exclude_prefix.encode() if branch_name_exclude_prefix else None, ) return ( converters.from_partial_branches(partial_branches) if partial_branches else None ) def lookup_latest_origin_snapshot( origin: str, allowed_statuses: List[str] = None ) -> Optional[Dict[str, Any]]: """Return information about the latest snapshot of an origin. .. warning:: At most 1000 branches contained in the snapshot will be returned for performance reasons. Args: origin: URL or integer identifier of the origin allowed_statuses: list of visit statuses considered to find the latest snapshot for the visit. For instance, ``allowed_statuses=['full']`` will only consider visits that have successfully run to completion. Returns: A dict filled with the snapshot content. """ snp = snapshot_get_latest( storage, origin, allowed_statuses=allowed_statuses, branches_count=1000 ) return converters.from_snapshot(snp.to_dict()) if snp is not None else None def lookup_snapshot_alias( snapshot_id: str, alias_name: str ) -> Optional[Dict[str, Any]]: """Try to resolve a branch alias in a snapshot. Args: snapshot_id: hexadecimal representation of a snapshot id alias_name: name of the branch alias to resolve Returns: Target branch information or None if the alias does not exist or target a dangling branch. """ resolved_alias = snapshot_resolve_alias( storage, _to_sha1_bin(snapshot_id), alias_name.encode() ) return ( converters.from_swh(resolved_alias.to_dict(), hashess={"target"}) if resolved_alias is not None else None ) def lookup_revision_through(revision, limit=100): """Retrieve a revision from the criterion stored in revision dictionary. Args: revision: Dictionary of criterion to lookup the revision with. Here are the supported combination of possible values: - origin_url, branch_name, ts, sha1_git - origin_url, branch_name, ts - sha1_git_root, sha1_git - sha1_git Returns: None if the revision is not found or the actual revision. """ if ( "origin_url" in revision and "branch_name" in revision and "ts" in revision and "sha1_git" in revision ): return lookup_revision_with_context_by( revision["origin_url"], revision["branch_name"], revision["ts"], revision["sha1_git"], limit, ) if "origin_url" in revision and "branch_name" in revision and "ts" in revision: return lookup_revision_by( revision["origin_url"], revision["branch_name"], revision["ts"] ) if "sha1_git_root" in revision and "sha1_git" in revision: return lookup_revision_with_context( revision["sha1_git_root"], revision["sha1_git"], limit ) if "sha1_git" in revision: return lookup_revision(revision["sha1_git"]) # this should not happen raise NotImplementedError("Should not happen!") def lookup_directory_through_revision(revision, path=None, limit=100, with_data=False): """Retrieve the directory information from the revision. Args: revision: dictionary of criterion representing a revision to lookup path: directory's path to lookup. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of. with_data: indicate to retrieve the content's raw data if path resolves to a content. Returns: The directory pointing to by the revision criterions at path. """ rev = lookup_revision_through(revision, limit) if not rev: raise NotFoundExc("Revision with criterion %s not found!" % revision) return (rev["id"], lookup_directory_with_revision(rev["id"], path, with_data)) -def _vault_request(vault_fn, *args, **kwargs): +def _vault_request(vault_fn, bundle_type: str, swhid: CoreSWHID, **kwargs): try: - return vault_fn(*args, **kwargs) + return vault_fn(bundle_type, swhid, **kwargs) except VaultNotFoundExc: return None -def vault_cook(obj_type, obj_id, email=None): +def vault_cook(bundle_type: str, swhid: CoreSWHID, email=None): """Cook a vault bundle. """ - return _vault_request(vault.cook, obj_type, obj_id, email=email) + return _vault_request(vault.cook, bundle_type, swhid, email=email) -def vault_fetch(obj_type, obj_id): +def vault_fetch(bundle_type: str, swhid: CoreSWHID): """Fetch a vault bundle. """ - return _vault_request(vault.fetch, obj_type, obj_id) + return _vault_request(vault.fetch, bundle_type, swhid) -def vault_progress(obj_type, obj_id): +def vault_progress(bundle_type: str, swhid: CoreSWHID): """Get the current progress of a vault bundle. """ - return _vault_request(vault.progress, obj_type, obj_id) + return _vault_request(vault.progress, bundle_type, swhid) def diff_revision(rev_id): """Get the list of file changes (insertion / deletion / modification / renaming) for a particular revision. """ rev_sha1_git_bin = _to_sha1_bin(rev_id) changes = diff.diff_revision(storage, rev_sha1_git_bin, track_renaming=True) for change in changes: change["from"] = converters.from_directory_entry(change["from"]) change["to"] = converters.from_directory_entry(change["to"]) if change["from_path"]: change["from_path"] = change["from_path"].decode("utf-8") if change["to_path"]: change["to_path"] = change["to_path"].decode("utf-8") return changes class _RevisionsWalkerProxy(object): """ Proxy class wrapping a revisions walker iterator from swh-storage and performing needed conversions. """ def __init__(self, rev_walker_type, rev_start, *args, **kwargs): rev_start_bin = hashutil.hash_to_bytes(rev_start) self.revisions_walker = revisions_walker.get_revisions_walker( rev_walker_type, storage, rev_start_bin, *args, **kwargs ) def export_state(self): return self.revisions_walker.export_state() def __next__(self): return converters.from_revision(next(self.revisions_walker)) def __iter__(self): return self def get_revisions_walker(rev_walker_type, rev_start, *args, **kwargs): """ Utility function to instantiate a revisions walker of a given type, see :mod:`swh.storage.algos.revisions_walker`. Args: rev_walker_type (str): the type of revisions walker to return, possible values are: ``committer_date``, ``dfs``, ``dfs_post``, ``bfs`` and ``path`` rev_start (str): hexadecimal representation of a revision identifier args (list): position arguments to pass to the revisions walker constructor kwargs (dict): keyword arguments to pass to the revisions walker constructor """ # first check if the provided revision is valid lookup_revision(rev_start) return _RevisionsWalkerProxy(rev_walker_type, rev_start, *args, **kwargs) def lookup_object(object_type: str, object_id: str) -> Dict[str, Any]: """ Utility function for looking up an object in the archive by its type and id. Args: object_type (str): the type of object to lookup, either *content*, *directory*, *release*, *revision* or *snapshot* object_id (str): the *sha1_git* checksum identifier in hexadecimal form of the object to lookup Returns: Dict[str, Any]: A dictionary describing the object or a list of dictionary for the directory object type. Raises: swh.web.common.exc.NotFoundExc: if the object could not be found in the archive BadInputExc: if the object identifier is invalid """ if object_type == CONTENT: return lookup_content(f"sha1_git:{object_id}") elif object_type == DIRECTORY: return {"id": object_id, "content": list(lookup_directory(object_id))} elif object_type == RELEASE: return lookup_release(object_id) elif object_type == REVISION: return lookup_revision(object_id) elif object_type == SNAPSHOT: return lookup_snapshot(object_id) raise BadInputExc( ( "Invalid swh object type! Valid types are " f"{CONTENT}, {DIRECTORY}, {RELEASE} " f"{REVISION} or {SNAPSHOT}." ) ) def lookup_missing_hashes(grouped_swhids: Dict[str, List[bytes]]) -> Set[str]: """Lookup missing Software Heritage persistent identifier hash, using batch processing. Args: A dictionary with: keys: object types values: object hashes Returns: A set(hexadecimal) of the hashes not found in the storage """ missing_hashes = [] for obj_type, obj_ids in grouped_swhids.items(): if obj_type == CONTENT: missing_hashes.append(storage.content_missing_per_sha1_git(obj_ids)) elif obj_type == DIRECTORY: missing_hashes.append(storage.directory_missing(obj_ids)) elif obj_type == REVISION: missing_hashes.append(storage.revision_missing(obj_ids)) elif obj_type == RELEASE: missing_hashes.append(storage.release_missing(obj_ids)) elif obj_type == SNAPSHOT: missing_hashes.append(storage.snapshot_missing(obj_ids)) missing = set( map(lambda x: hashutil.hash_to_hex(x), itertools.chain(*missing_hashes)) ) return missing def lookup_origins_by_sha1s(sha1s: List[str]) -> Iterator[Optional[OriginInfo]]: """Lookup origins from the sha1 hash values of their URLs. Args: sha1s: list of sha1s hexadecimal representation Yields: origin information as dict """ sha1s_bytes = [hashutil.hash_to_bytes(sha1) for sha1 in sha1s] origins = storage.origin_get_by_sha1(sha1s_bytes) for origin in origins: yield converters.from_origin(origin) diff --git a/swh/web/templates/includes/vault-create-tasks.html b/swh/web/templates/includes/vault-create-tasks.html index 0d5dda24..e7bc0f3e 100644 --- a/swh/web/templates/includes/vault-create-tasks.html +++ b/swh/web/templates/includes/vault-create-tasks.html @@ -1,175 +1,175 @@ {% comment %} Copyright (C) 2017-2021 The Software Heritage developers See the AUTHORS file at the top-level directory of this distribution License: GNU Affero General Public License version 3, or any later version See top-level LICENSE file for more information {% endcomment %} {% load swh_templatetags %} {% if vault_cooking %} {% if user.is_staff %} {% else %} - {% endif %} {% include "includes/vault-common.html" %} {% endif %} diff --git a/swh/web/tests/api/views/test_vault.py b/swh/web/tests/api/views/test_vault.py index 870029b3..7fdff652 100644 --- a/swh/web/tests/api/views/test_vault.py +++ b/swh/web/tests/api/views/test_vault.py @@ -1,168 +1,332 @@ -# Copyright (C) 2017-2020 The Software Heritage developers +# Copyright (C) 2017-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information +import re + from hypothesis import given +import pytest -from swh.model import hashutil +from swh.model.identifiers import CoreSWHID from swh.vault.exc import NotFoundExc from swh.web.common.utils import reverse from swh.web.tests.strategies import ( directory, revision, unknown_directory, unknown_revision, ) from swh.web.tests.utils import ( check_api_get_responses, check_api_post_responses, check_http_get_response, check_http_post_response, ) +##################### +# Current API: + @given(directory(), revision()) def test_api_vault_cook(api_client, mocker, directory, revision): mock_archive = mocker.patch("swh.web.api.views.vault.archive") - for obj_type, obj_id in ( - ("directory", directory), - ("revision_gitfast", revision), + for bundle_type, swhid in ( + ("flat", f"swh:1:dir:{directory}"), + ("gitfast", f"swh:1:rev:{revision}"), ): fetch_url = reverse( - f"api-1-vault-fetch-{obj_type}", url_args={f"{obj_type[:3]}_id": obj_id}, + f"api-1-vault-fetch-{bundle_type}", url_args={"swhid": swhid}, + ) + stub_cook = { + "type": bundle_type, + "progress_msg": None, + "task_id": 1, + "task_status": "done", + "swhid": swhid, + } + stub_fetch = b"content" + + mock_archive.vault_cook.return_value = stub_cook + mock_archive.vault_fetch.return_value = stub_fetch + + email = "test@test.mail" + url = reverse( + f"api-1-vault-cook-{bundle_type}", + url_args={"swhid": swhid}, + query_params={"email": email}, + ) + + rv = check_api_post_responses(api_client, url, data=None, status_code=200) + assert rv.data == { + "fetch_url": rv.wsgi_request.build_absolute_uri(fetch_url), + "progress_message": None, + "id": 1, + "status": "done", + "swhid": swhid, + } + mock_archive.vault_cook.assert_called_with( + bundle_type, CoreSWHID.from_string(swhid), email + ) + + rv = check_http_get_response(api_client, fetch_url, status_code=200) + assert rv["Content-Type"] == "application/gzip" + assert rv.content == stub_fetch + mock_archive.vault_fetch.assert_called_with( + bundle_type, CoreSWHID.from_string(swhid) + ) + + +@given(directory(), revision(), unknown_directory(), unknown_revision()) +def test_api_vault_cook_notfound( + api_client, mocker, directory, revision, unknown_directory, unknown_revision +): + mock_vault = mocker.patch("swh.web.common.archive.vault") + mock_vault.cook.side_effect = NotFoundExc("object not found") + mock_vault.fetch.side_effect = NotFoundExc("cooked archive not found") + mock_vault.progress.side_effect = NotFoundExc("cooking request not found") + + for bundle_type, swhid in ( + ("flat", f"swh:1:dir:{directory}"), + ("gitfast", f"swh:1:rev:{revision}"), + ): + + url = reverse(f"api-1-vault-cook-{bundle_type}", url_args={"swhid": swhid}) + + rv = check_api_get_responses(api_client, url, status_code=404) + + assert rv.data["exception"] == "NotFoundExc" + assert rv.data["reason"] == f"Cooking of {swhid} was never requested." + mock_vault.progress.assert_called_with( + bundle_type, CoreSWHID.from_string(swhid) + ) + + for bundle_type, swhid in ( + ("flat", f"swh:1:dir:{unknown_directory}"), + ("gitfast", f"swh:1:rev:{unknown_revision}"), + ): + url = reverse(f"api-1-vault-cook-{bundle_type}", url_args={"swhid": swhid}) + rv = check_api_post_responses(api_client, url, data=None, status_code=404) + + assert rv.data["exception"] == "NotFoundExc" + assert rv.data["reason"] == f"{swhid} not found." + mock_vault.cook.assert_called_with( + bundle_type, CoreSWHID.from_string(swhid), email=None + ) + + fetch_url = reverse( + f"api-1-vault-fetch-{bundle_type}", url_args={"swhid": swhid}, + ) + + rv = check_api_get_responses(api_client, fetch_url, status_code=404) + assert rv.data["exception"] == "NotFoundExc" + assert rv.data["reason"] == f"Cooked archive for {swhid} not found." + mock_vault.fetch.assert_called_with(bundle_type, CoreSWHID.from_string(swhid)) + + +@pytest.mark.parametrize("bundle_type", ["flat", "gitfast"]) +def test_api_vault_cook_error_content(api_client, mocker, bundle_type): + swhid = "swh:1:cnt:" + "0" * 40 + + email = "test@test.mail" + url = reverse( + f"api-1-vault-cook-{bundle_type}", + url_args={"swhid": swhid}, + query_params={"email": email}, + ) + + rv = check_api_post_responses(api_client, url, data=None, status_code=400) + assert rv.data == { + "exception": "BadInputExc", + "reason": ( + "Content objects do not need to be cooked, " + "use `/api/1/content/raw/` instead." + ), + } + + +@pytest.mark.parametrize( + "bundle_type,swhid_type,hint", + [ + ("flat", "rev", True), + ("flat", "rel", False), + ("flat", "snp", False), + ("gitfast", "dir", True), + ("gitfast", "rel", False), + ("gitfast", "snp", False), + ], +) +def test_api_vault_cook_error(api_client, mocker, bundle_type, swhid_type, hint): + swhid = f"swh:1:{swhid_type}:" + "0" * 40 + + email = "test@test.mail" + url = reverse( + f"api-1-vault-cook-{bundle_type}", + url_args={"swhid": swhid}, + query_params={"email": email}, + ) + + rv = check_api_post_responses(api_client, url, data=None, status_code=400) + assert rv.data["exception"] == "BadInputExc" + if hint: + assert re.match( + r"Only .* can be cooked as .* bundles\. Use .*", rv.data["reason"] + ) + else: + assert re.match(r"Only .* can be cooked as .* bundles\.", rv.data["reason"]) + + +##################### +# Legacy API: + + +@given(directory(), revision()) +def test_api_vault_cook_legacy(api_client, mocker, directory, revision): + mock_archive = mocker.patch("swh.web.api.views.vault.archive") + + for obj_type, bundle_type, obj_id in ( + ("directory", "flat", directory), + ("revision_gitfast", "gitfast", revision), + ): + swhid = f"swh:1:{obj_type[:3]}:{obj_id}" + + fetch_url = reverse( + f"api-1-vault-fetch-{bundle_type}", url_args={"swhid": swhid}, ) stub_cook = { "type": obj_type, "progress_msg": None, "task_id": 1, "task_status": "done", - "object_id": obj_id, + "swhid": swhid, } stub_fetch = b"content" mock_archive.vault_cook.return_value = stub_cook mock_archive.vault_fetch.return_value = stub_fetch email = "test@test.mail" url = reverse( f"api-1-vault-cook-{obj_type}", url_args={f"{obj_type[:3]}_id": obj_id}, query_params={"email": email}, ) rv = check_api_post_responses(api_client, url, data=None, status_code=200) assert rv.data == { "fetch_url": rv.wsgi_request.build_absolute_uri(fetch_url), - "obj_type": obj_type, "progress_message": None, "id": 1, "status": "done", - "obj_id": obj_id, + "swhid": swhid, } mock_archive.vault_cook.assert_called_with( - obj_type, hashutil.hash_to_bytes(obj_id), email + bundle_type, CoreSWHID.from_string(swhid), email ) rv = check_http_get_response(api_client, fetch_url, status_code=200) assert rv["Content-Type"] == "application/gzip" assert rv.content == stub_fetch mock_archive.vault_fetch.assert_called_with( - obj_type, hashutil.hash_to_bytes(obj_id) + bundle_type, CoreSWHID.from_string(swhid) ) @given(directory(), revision()) -def test_api_vault_cook_uppercase_hash(api_client, directory, revision): +def test_api_vault_cook_uppercase_hash_legacy(api_client, directory, revision): for obj_type, obj_id in ( ("directory", directory), ("revision_gitfast", revision), ): url = reverse( f"api-1-vault-cook-{obj_type}-uppercase-checksum", url_args={f"{obj_type[:3]}_id": obj_id.upper()}, ) rv = check_http_post_response( api_client, url, data={"email": "test@test.mail"}, status_code=302 ) redirect_url = reverse( f"api-1-vault-cook-{obj_type}", url_args={f"{obj_type[:3]}_id": obj_id} ) assert rv["location"] == redirect_url fetch_url = reverse( f"api-1-vault-fetch-{obj_type}-uppercase-checksum", url_args={f"{obj_type[:3]}_id": obj_id.upper()}, ) rv = check_http_get_response(api_client, fetch_url, status_code=302) redirect_url = reverse( f"api-1-vault-fetch-{obj_type}", url_args={f"{obj_type[:3]}_id": obj_id}, ) assert rv["location"] == redirect_url @given(directory(), revision(), unknown_directory(), unknown_revision()) -def test_api_vault_cook_notfound( +def test_api_vault_cook_notfound_legacy( api_client, mocker, directory, revision, unknown_directory, unknown_revision ): mock_vault = mocker.patch("swh.web.common.archive.vault") mock_vault.cook.side_effect = NotFoundExc("object not found") mock_vault.fetch.side_effect = NotFoundExc("cooked archive not found") mock_vault.progress.side_effect = NotFoundExc("cooking request not found") - for obj_type, obj_id in ( - ("directory", directory), - ("revision_gitfast", revision), + for obj_type, bundle_type, obj_id in ( + ("directory", "flat", directory), + ("revision_gitfast", "gitfast", revision), ): - obj_name = obj_type.split("_")[0] - url = reverse( f"api-1-vault-cook-{obj_type}", url_args={f"{obj_type[:3]}_id": obj_id}, ) + swhid = f"swh:1:{obj_type[:3]}:{obj_id}" + rv = check_api_get_responses(api_client, url, status_code=404) assert rv.data["exception"] == "NotFoundExc" - assert ( - rv.data["reason"] - == f"Cooking of {obj_name} '{obj_id}' was never requested." + assert rv.data["reason"] == f"Cooking of {swhid} was never requested." + mock_vault.progress.assert_called_with( + bundle_type, CoreSWHID.from_string(swhid) ) - mock_vault.progress.assert_called_with(obj_type, hashutil.hash_to_bytes(obj_id)) - for obj_type, obj_id in ( - ("directory", unknown_directory), - ("revision_gitfast", unknown_revision), + for obj_type, bundle_type, obj_id in ( + ("directory", "flat", unknown_directory), + ("revision_gitfast", "gitfast", unknown_revision), ): - obj_name = obj_type.split("_")[0] + swhid = f"swh:1:{obj_type[:3]}:{obj_id}" url = reverse( f"api-1-vault-cook-{obj_type}", url_args={f"{obj_type[:3]}_id": obj_id} ) rv = check_api_post_responses(api_client, url, data=None, status_code=404) assert rv.data["exception"] == "NotFoundExc" - assert rv.data["reason"] == f"{obj_name.title()} '{obj_id}' not found." + assert rv.data["reason"] == f"{swhid} not found." mock_vault.cook.assert_called_with( - obj_type, hashutil.hash_to_bytes(obj_id), email=None + bundle_type, CoreSWHID.from_string(swhid), email=None ) fetch_url = reverse( f"api-1-vault-fetch-{obj_type}", url_args={f"{obj_type[:3]}_id": obj_id}, ) - rv = check_api_get_responses(api_client, fetch_url, status_code=404) - assert rv.data["exception"] == "NotFoundExc" - assert ( - rv.data["reason"] == f"Cooked archive for {obj_name} '{obj_id}' not found." + # Redirected to the current 'fetch' url + rv = check_http_get_response(api_client, fetch_url, status_code=302) + redirect_url = reverse( + f"api-1-vault-fetch-{bundle_type}", url_args={"swhid": swhid}, ) - mock_vault.fetch.assert_called_with(obj_type, hashutil.hash_to_bytes(obj_id)) + assert rv["location"] == redirect_url + + rv = check_api_get_responses(api_client, redirect_url, status_code=404) + assert rv.data["exception"] == "NotFoundExc" + assert rv.data["reason"] == f"Cooked archive for {swhid} not found." + mock_vault.fetch.assert_called_with(bundle_type, CoreSWHID.from_string(swhid))