diff --git a/cypress/integration/persistent-identifiers.spec.js b/cypress/integration/persistent-identifiers.spec.js
index 9d32be25..a4d57cfd 100644
--- a/cypress/integration/persistent-identifiers.spec.js
+++ b/cypress/integration/persistent-identifiers.spec.js
@@ -1,228 +1,228 @@
/**
* Copyright (C) 2019-2020 The Software Heritage developers
* See the AUTHORS file at the top-level directory of this distribution
* License: GNU Affero General Public License version 3, or any later version
* See top-level LICENSE file for more information
*/
let origin, originBadgeUrl, originBrowseUrl;
let url, urlPrefix;
let cntSWHID, cntSWHIDWithContext;
let dirSWHID, dirSWHIDWithContext;
let relSWHID, relSWHIDWithContext;
let revSWHID, revSWHIDWithContext;
let snpSWHID, snpSWHIDWithContext;
let testsData;
const firstSelLine = 6;
const lastSelLine = 12;
describe('Persistent Identifiers Tests', function() {
before(function() {
origin = this.origin[1];
url = `${this.Urls.browse_origin_content()}?origin_url=${origin.url}&path=${origin.content[0].path}`;
- url = `${url}&release=${origin.release}#L${firstSelLine}-L${lastSelLine}`;
+ url = `${url}&release=${origin.release.name}#L${firstSelLine}-L${lastSelLine}`;
originBadgeUrl = this.Urls.swh_badge('origin', origin.url);
originBrowseUrl = `${this.Urls.browse_origin()}?origin_url=${origin.url}`;
cy.visit(url).window().then(win => {
urlPrefix = `${win.location.protocol}//${win.location.hostname}`;
if (win.location.port) {
urlPrefix += `:${win.location.port}`;
}
const swhids = win.swh.webapp.getSwhIdsContext();
cntSWHID = swhids.content.swhid;
cntSWHIDWithContext = swhids.content.swhid_with_context;
cntSWHIDWithContext += `;lines=${firstSelLine}-${lastSelLine}`;
dirSWHID = swhids.directory.swhid;
dirSWHIDWithContext = swhids.directory.swhid_with_context;
revSWHID = swhids.revision.swhid;
revSWHIDWithContext = swhids.revision.swhid_with_context;
relSWHID = swhids.release.swhid;
relSWHIDWithContext = swhids.release.swhid_with_context;
snpSWHID = swhids.snapshot.swhid;
snpSWHIDWithContext = swhids.snapshot.swhid_with_context;
testsData = [
{
'objectType': 'content',
'objectSWHIDs': [cntSWHIDWithContext, cntSWHID],
'badgeUrl': this.Urls.swh_badge('content', swhids.content.object_id),
'badgeSWHIDUrl': this.Urls.swh_badge_swhid(cntSWHID),
'browseUrl': this.Urls.browse_swhid(cntSWHIDWithContext)
},
{
'objectType': 'directory',
'objectSWHIDs': [dirSWHIDWithContext, dirSWHID],
'badgeUrl': this.Urls.swh_badge('directory', swhids.directory.object_id),
'badgeSWHIDUrl': this.Urls.swh_badge_swhid(dirSWHID),
'browseUrl': this.Urls.browse_swhid(dirSWHIDWithContext)
},
{
'objectType': 'release',
'objectSWHIDs': [relSWHIDWithContext, relSWHID],
'badgeUrl': this.Urls.swh_badge('release', swhids.release.object_id),
'badgeSWHIDUrl': this.Urls.swh_badge_swhid(relSWHID),
'browseUrl': this.Urls.browse_swhid(relSWHIDWithContext)
},
{
'objectType': 'revision',
'objectSWHIDs': [revSWHIDWithContext, revSWHID],
'badgeUrl': this.Urls.swh_badge('revision', swhids.revision.object_id),
'badgeSWHIDUrl': this.Urls.swh_badge_swhid(revSWHID),
'browseUrl': this.Urls.browse_swhid(revSWHIDWithContext)
},
{
'objectType': 'snapshot',
'objectSWHIDs': [snpSWHIDWithContext, snpSWHID],
'badgeUrl': this.Urls.swh_badge('snapshot', swhids.snapshot.object_id),
'badgeSWHIDUrl': this.Urls.swh_badge_swhid(snpSWHID),
'browseUrl': this.Urls.browse_swhid(snpSWHIDWithContext)
}
];
});
});
beforeEach(function() {
cy.visit(url);
});
it('should open and close identifiers tab when clicking on handle', function() {
cy.get('#swh-identifiers')
.should('have.class', 'ui-slideouttab-ready');
cy.get('.ui-slideouttab-handle')
.click();
cy.get('#swh-identifiers')
.should('have.class', 'ui-slideouttab-open');
cy.get('.ui-slideouttab-handle')
.click();
cy.get('#swh-identifiers')
.should('not.have.class', 'ui-slideouttab-open');
});
it('should display identifiers with permalinks for browsed objects', function() {
cy.get('.ui-slideouttab-handle')
.click();
for (let td of testsData) {
cy.get(`a[href="#swhid-tab-${td.objectType}"]`)
.click();
cy.get(`#swhid-tab-${td.objectType}`)
.should('be.visible');
cy.get(`#swhid-tab-${td.objectType} .swhid`)
.should('have.text', td.objectSWHIDs[0].replace(/;/g, ';\n'))
.should('have.attr', 'href', this.Urls.browse_swhid(td.objectSWHIDs[0]));
}
});
it('should update other object identifiers contextual info when toggling context checkbox', function() {
cy.get('.ui-slideouttab-handle')
.click();
for (let td of testsData) {
cy.get(`a[href="#swhid-tab-${td.objectType}"]`)
.click();
cy.get(`#swhid-tab-${td.objectType} .swhid`)
.should('have.text', td.objectSWHIDs[0].replace(/;/g, ';\n'))
.should('have.attr', 'href', this.Urls.browse_swhid(td.objectSWHIDs[0]));
cy.get(`#swhid-tab-${td.objectType} .swhid-option`)
.click();
cy.get(`#swhid-tab-${td.objectType} .swhid`)
.contains(td.objectSWHIDs[1])
.should('have.attr', 'href', this.Urls.browse_swhid(td.objectSWHIDs[1]));
cy.get(`#swhid-tab-${td.objectType} .swhid-option`)
.click();
cy.get(`#swhid-tab-${td.objectType} .swhid`)
.should('have.text', td.objectSWHIDs[0].replace(/;/g, ';\n'))
.should('have.attr', 'href', this.Urls.browse_swhid(td.objectSWHIDs[0]));
}
});
it('should display swh badges in identifiers tab for browsed objects', function() {
cy.get('.ui-slideouttab-handle')
.click();
const originBadgeUrl = this.Urls.swh_badge('origin', origin.url);
for (let td of testsData) {
cy.get(`a[href="#swhid-tab-${td.objectType}"]`)
.click();
cy.get(`#swhid-tab-${td.objectType} .swh-badge-origin`)
.should('have.attr', 'src', originBadgeUrl);
cy.get(`#swhid-tab-${td.objectType} .swh-badge-${td.objectType}`)
.should('have.attr', 'src', td.badgeUrl);
}
});
it('should display badge integration info when clicking on it', function() {
cy.get('.ui-slideouttab-handle')
.click();
for (let td of testsData) {
cy.get(`a[href="#swhid-tab-${td.objectType}"]`)
.click();
cy.get(`#swhid-tab-${td.objectType} .swh-badge-origin`)
.click()
.wait(500);
for (let badgeType of ['html', 'md', 'rst']) {
cy.get(`.modal .swh-badge-${badgeType}`)
.contains(`${urlPrefix}${originBrowseUrl}`)
.contains(`${urlPrefix}${originBadgeUrl}`);
}
cy.get('.modal.show .close')
.click()
.wait(500);
cy.get(`#swhid-tab-${td.objectType} .swh-badge-${td.objectType}`)
.click()
.wait(500);
for (let badgeType of ['html', 'md', 'rst']) {
cy.get(`.modal .swh-badge-${badgeType}`)
.contains(`${urlPrefix}${td.browseUrl}`)
.contains(`${urlPrefix}${td.badgeSWHIDUrl}`);
}
cy.get('.modal.show .close')
.click()
.wait(500);
}
});
it('should be possible to retrieve SWHIDs context from JavaScript', function() {
cy.window().then(win => {
const swhIdsContext = win.swh.webapp.getSwhIdsContext();
for (let testData of testsData) {
assert.isTrue(swhIdsContext.hasOwnProperty(testData.objectType));
assert.equal(swhIdsContext[testData.objectType].swhid,
testData.objectSWHIDs.slice(-1)[0]);
}
});
});
});
diff --git a/cypress/integration/vault.spec.js b/cypress/integration/vault.spec.js
index 5bce6f4d..1b589fcc 100644
--- a/cypress/integration/vault.spec.js
+++ b/cypress/integration/vault.spec.js
@@ -1,521 +1,566 @@
/**
* Copyright (C) 2019-2020 The Software Heritage developers
* See the AUTHORS file at the top-level directory of this distribution
* License: GNU Affero General Public License version 3, or any later version
* See top-level LICENSE file for more information
*/
let vaultItems = [];
const progressbarColors = {
'new': 'rgba(128, 128, 128, 0.5)',
'pending': 'rgba(0, 0, 255, 0.5)',
'done': 'rgb(92, 184, 92)'
};
function checkVaultCookingTask(objectType) {
cy.contains('button', 'Download')
.click();
cy.contains('.dropdown-item', objectType)
.click();
cy.wait('@checkVaultCookingTask');
}
function updateVaultItemList(vaultUrl, vaultItems) {
cy.visit(vaultUrl)
.then(() => {
// Add uncooked task to localStorage
// which updates it in vault items list
window.localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultItems));
});
}
// Mocks API response : /api/1/vault/(:objectType)/(:hash)
// objectType : {'directory', 'revision'}
function genVaultCookingResponse(objectType, objectId, status, message, fetchUrl) {
return {
'obj_type': objectType,
'id': 1,
'progress_message': message,
'status': status,
'obj_id': objectId,
'fetch_url': fetchUrl
};
};
// Tests progressbar color, status
// And status in localStorage
function testStatus(taskId, color, statusMsg, status) {
cy.get(`.swh-vault-table #vault-task-${taskId}`)
.should('be.visible')
.find('.progress-bar')
.should('be.visible')
.and('have.css', 'background-color', color)
.and('contain', statusMsg)
.then(() => {
// Vault item with object_id as taskId should exist in localStorage
const currentVaultItems = JSON.parse(window.localStorage.getItem('swh-vault-cooking-tasks'));
const vaultItem = currentVaultItems.find(obj => obj.object_id === taskId);
assert.isNotNull(vaultItem);
assert.strictEqual(vaultItem.status, status);
});
}
describe('Vault Cooking User Interface Tests', function() {
before(function() {
const dirInfo = this.origin[0].directory[0];
this.directory = dirInfo.id;
this.directoryUrl = this.Urls.browse_origin_directory() +
`?origin_url=${this.origin[0].url}&path=${dirInfo.path}`;
this.vaultDirectoryUrl = this.Urls.api_1_vault_cook_directory(this.directory);
this.vaultFetchDirectoryUrl = this.Urls.api_1_vault_fetch_directory(this.directory);
this.revision = this.origin[1].revisions[0];
this.revisionUrl = this.Urls.browse_revision(this.revision);
this.vaultRevisionUrl = this.Urls.api_1_vault_cook_revision_gitfast(this.revision);
this.vaultFetchRevisionUrl = this.Urls.api_1_vault_fetch_revision_gitfast(this.revision);
+ const release = this.origin[1].release;
+ this.releaseUrl = this.Urls.browse_release(release.id) + `?origin_url=${this.origin[1].url}`;
+ this.vaultReleaseDirectoryUrl = this.Urls.api_1_vault_cook_directory(release.directory);
+
vaultItems[0] = {
'object_type': 'revision',
'object_id': this.revision,
'email': '',
'status': 'done',
'fetch_url': `/api/1/vault/revision/${this.revision}/gitfast/raw/`,
'progress_message': null
};
});
beforeEach(function() {
this.genVaultDirCookingResponse = (status, message = null) => {
return genVaultCookingResponse('directory', this.directory, status,
message, this.vaultFetchDirectoryUrl);
};
this.genVaultRevCookingResponse = (status, message = null) => {
return genVaultCookingResponse('revision', this.revision, status,
message, this.vaultFetchRevisionUrl);
};
cy.server();
});
it('should report an error when vault service is experiencing issues', function() {
// Browse a directory
cy.visit(this.directoryUrl);
// Stub responses when requesting the vault API to simulate
// an internal server error
cy.route({
method: 'GET',
url: this.vaultDirectoryUrl,
response: {'exception': 'APIError'},
status: 500
}).as('checkVaultCookingTask');
cy.contains('button', 'Download')
.click();
// Check error alert is displayed
cy.get('.alert-danger')
.should('be.visible')
.should('contain', 'Archive cooking service is currently experiencing issues.');
});
it('should report an error when a cooking task creation failed', function() {
// Browse a directory
cy.visit(this.directoryUrl);
// Stub responses when requesting the vault API to simulate
// a task can not be created
cy.route({
method: 'GET',
url: this.vaultDirectoryUrl,
response: {'exception': 'NotFoundExc'}
}).as('checkVaultCookingTask');
cy.route({
method: 'POST',
url: this.vaultDirectoryUrl,
response: {'exception': 'ValueError'},
status: 500
}).as('createVaultCookingTask');
cy.contains('button', 'Download')
.click();
// Create a vault cooking task through the GUI
cy.get('.modal-dialog')
.contains('button:visible', 'Ok')
.click();
cy.wait('@createVaultCookingTask');
// Check error alert is displayed
cy.get('.alert-danger')
.should('be.visible')
.should('contain', 'Archive cooking request submission failed.');
});
it('should create a directory cooking task and report the success', function() {
// Browse a directory
cy.visit(this.directoryUrl);
// Stub responses when requesting the vault API to simulate
// a task has been created
cy.route({
method: 'GET',
url: this.vaultDirectoryUrl,
response: {'exception': 'NotFoundExc'}
}).as('checkVaultCookingTask');
cy.route({
method: 'POST',
url: this.vaultDirectoryUrl,
response: this.genVaultDirCookingResponse('new')
}).as('createVaultCookingTask');
cy.contains('button', 'Download')
.click();
cy.route({
method: 'GET',
url: this.vaultDirectoryUrl,
response: this.genVaultDirCookingResponse('new')
}).as('checkVaultCookingTask');
cy.window().then(win => {
const swhIdsContext = win.swh.webapp.getSwhIdsContext();
const browseDirectoryUrl = swhIdsContext.directory.swhid_with_context_url;
// Create a vault cooking task through the GUI
cy.get('.modal-dialog')
.contains('button:visible', 'Ok')
.click();
cy.wait('@createVaultCookingTask');
// Check success alert is displayed
cy.get('.alert-success')
.should('be.visible')
.should('contain', 'Archive cooking request successfully submitted.');
// Go to Downloads page
cy.visit(this.Urls.browse_vault());
cy.wait('@checkVaultCookingTask').then(() => {
testStatus(this.directory, progressbarColors['new'], 'new', 'new');
});
// Stub response to the vault API indicating the task is processing
cy.route({
method: 'GET',
url: this.vaultDirectoryUrl,
response: this.genVaultDirCookingResponse('pending', 'Processing...')
}).as('checkVaultCookingTask');
cy.wait('@checkVaultCookingTask').then(() => {
testStatus(this.directory, progressbarColors['pending'], 'Processing...', 'pending');
});
// Stub response to the vault API indicating the task is finished
cy.route({
method: 'GET',
url: this.vaultDirectoryUrl,
response: this.genVaultDirCookingResponse('done')
}).as('checkVaultCookingTask');
cy.wait('@checkVaultCookingTask').then(() => {
testStatus(this.directory, progressbarColors['done'], 'done', 'done');
});
// Stub response to the vault API to simulate archive download
cy.route({
method: 'GET',
url: this.vaultFetchDirectoryUrl,
response: `fx:${this.directory}.tar.gz`,
headers: {
'Content-disposition': `attachment; filename=${this.directory}.tar.gz`,
'Content-Type': 'application/gzip'
}
}).as('fetchCookedArchive');
cy.get(`#vault-task-${this.directory} .vault-origin a`)
.should('contain', this.origin[0].url)
.should('have.attr', 'href', `${this.Urls.browse_origin()}?origin_url=${this.origin[0].url}`);
cy.get(`#vault-task-${this.directory} .vault-object-info a`)
.should('have.text', this.directory)
.should('have.attr', 'href', browseDirectoryUrl);
cy.get(`#vault-task-${this.directory} .vault-dl-link button`)
.click();
cy.wait('@fetchCookedArchive').then((xhr) => {
assert.isNotNull(xhr.response.body);
});
});
});
it('should create a revision cooking task and report its status', function() {
cy.adminLogin();
// Browse a revision
cy.visit(this.revisionUrl);
// Stub responses when requesting the vault API to simulate
// a task has been created
cy.route({
method: 'GET',
url: this.vaultRevisionUrl,
response: {'exception': 'NotFoundExc'}
}).as('checkVaultCookingTask');
cy.route({
method: 'POST',
url: this.vaultRevisionUrl,
response: this.genVaultRevCookingResponse('new')
}).as('createVaultCookingTask');
// Create a vault cooking task through the GUI
checkVaultCookingTask('as git');
cy.route({
method: 'GET',
url: this.vaultRevisionUrl,
response: this.genVaultRevCookingResponse('new')
}).as('checkVaultCookingTask');
cy.window().then(win => {
const swhIdsContext = win.swh.webapp.getSwhIdsContext();
const browseRevisionUrl = swhIdsContext.revision.swhid_url;
// Create a vault cooking task through the GUI
cy.get('.modal-dialog')
.contains('button:visible', 'Ok')
.click();
cy.wait('@createVaultCookingTask');
// Check success alert is displayed
cy.get('.alert-success')
.should('be.visible')
.should('contain', 'Archive cooking request successfully submitted.');
// Go to Downloads page
cy.visit(this.Urls.browse_vault());
cy.wait('@checkVaultCookingTask').then(() => {
testStatus(this.revision, progressbarColors['new'], 'new', 'new');
});
// Stub response to the vault API indicating the task is processing
cy.route({
method: 'GET',
url: this.vaultRevisionUrl,
response: this.genVaultRevCookingResponse('pending', 'Processing...')
}).as('checkVaultCookingTask');
cy.wait('@checkVaultCookingTask').then(() => {
testStatus(this.revision, progressbarColors['pending'], 'Processing...', 'pending');
});
// Stub response to the vault API indicating the task is finished
cy.route({
method: 'GET',
url: this.vaultRevisionUrl,
response: this.genVaultRevCookingResponse('done')
}).as('checkVaultCookingTask');
cy.wait('@checkVaultCookingTask').then(() => {
testStatus(this.revision, progressbarColors['done'], 'done', 'done');
});
// Stub response to the vault API indicating to simulate archive
// download
cy.route({
method: 'GET',
url: this.vaultFetchRevisionUrl,
response: `fx:${this.revision}.gitfast.gz`,
headers: {
'Content-disposition': `attachment; filename=${this.revision}.gitfast.gz`,
'Content-Type': 'application/gzip'
}
}).as('fetchCookedArchive');
cy.get(`#vault-task-${this.revision} .vault-origin`)
.should('have.text', 'unknown');
cy.get(`#vault-task-${this.revision} .vault-object-info a`)
.should('have.text', this.revision)
.should('have.attr', 'href', browseRevisionUrl);
cy.get(`#vault-task-${this.revision} .vault-dl-link button`)
.click();
cy.wait('@fetchCookedArchive').then((xhr) => {
assert.isNotNull(xhr.response.body);
});
});
});
+ it('should create a directory cooking task from the release view', function() {
+
+ // Browse a directory
+ cy.visit(this.releaseUrl);
+
+ // Stub responses when requesting the vault API to simulate
+ // a task has been created
+ cy.route({
+ method: 'GET',
+ url: this.vaultReleaseDirectoryUrl,
+ response: {'exception': 'NotFoundExc'}
+ }).as('checkVaultCookingTask');
+
+ cy.route({
+ method: 'POST',
+ url: this.vaultReleaseDirectoryUrl,
+ response: this.genVaultDirCookingResponse('new')
+ }).as('createVaultCookingTask');
+
+ cy.contains('button', 'Download')
+ .click();
+
+ cy.route({
+ method: 'GET',
+ url: this.vaultReleaseDirectoryUrl,
+ response: this.genVaultDirCookingResponse('new')
+ }).as('checkVaultCookingTask');
+
+ // Create a vault cooking task through the GUI
+ cy.get('.modal-dialog')
+ .contains('button:visible', 'Ok')
+ .click();
+
+ cy.wait('@createVaultCookingTask');
+
+ // Check success alert is displayed
+ cy.get('.alert-success')
+ .should('be.visible')
+ .should('contain', 'Archive cooking request successfully submitted.');
+ });
+
it('should offer to recook an archive if no more available to download', function() {
updateVaultItemList(this.Urls.browse_vault(), vaultItems);
// Send 404 when fetching vault item
cy.route({
method: 'GET',
status: 404,
url: this.vaultFetchRevisionUrl,
response: {
'exception': 'NotFoundExc',
'reason': `Revision with ID '${this.revision}' not found.`
},
headers: {
'Content-Type': 'json'
}
}).as('fetchCookedArchive');
cy.get(`#vault-task-${this.revision} .vault-dl-link button`)
.click();
cy.wait('@fetchCookedArchive').then(() => {
cy.route({
method: 'POST',
url: this.vaultRevisionUrl,
response: this.genVaultRevCookingResponse('new')
}).as('createVaultCookingTask');
cy.route({
method: 'GET',
url: this.vaultRevisionUrl,
response: this.genVaultRevCookingResponse('new')
}).as('checkVaultCookingTask');
cy.get('#vault-recook-object-modal > .modal-dialog')
.should('be.visible')
.contains('button:visible', 'Ok')
.click();
cy.wait('@createVaultCookingTask')
.wait('@checkVaultCookingTask')
.then(() => {
testStatus(this.revision, progressbarColors['new'], 'new', 'new');
});
});
});
it('should remove selected vault items', function() {
updateVaultItemList(this.Urls.browse_vault(), vaultItems);
cy.get(`#vault-task-${this.revision}`)
.find('input[type="checkbox"]')
.click({force: true});
cy.contains('button', 'Remove selected tasks')
.click();
cy.get(`#vault-task-${this.revision}`)
.should('not.exist');
});
it('should offer to immediately download a directory tarball if already cooked', function() {
// Browse a directory
cy.visit(this.directoryUrl);
// Stub responses when requesting the vault API to simulate
// the directory tarball has already been cooked
cy.route({
method: 'GET',
url: this.vaultDirectoryUrl,
response: this.genVaultDirCookingResponse('done')
}).as('checkVaultCookingTask');
// Stub response to the vault API to simulate archive download
cy.route({
method: 'GET',
url: this.vaultFetchDirectoryUrl,
response: `fx:${this.directory}.tar.gz`,
headers: {
'Content-disposition': `attachment; filename=${this.directory}.tar.gz`,
'Content-Type': 'application/gzip'
}
}).as('fetchCookedArchive');
// Create a vault cooking task through the GUI
cy.contains('button', 'Download')
.click();
// Start archive download through the GUI
cy.get('.modal-dialog')
.contains('button:visible', 'Ok')
.click();
cy.wait('@fetchCookedArchive');
});
it('should offer to immediately download a revision gitfast archive if already cooked', function() {
cy.adminLogin();
// Browse a directory
cy.visit(this.revisionUrl);
// Stub responses when requesting the vault API to simulate
// the directory tarball has already been cooked
cy.route({
method: 'GET',
url: this.vaultRevisionUrl,
response: this.genVaultRevCookingResponse('done')
}).as('checkVaultCookingTask');
// Stub response to the vault API to simulate archive download
cy.route({
method: 'GET',
url: this.vaultFetchRevisionUrl,
response: `fx:${this.revision}.gitfast.gz`,
headers: {
'Content-disposition': `attachment; filename=${this.revision}.gitfast.gz`,
'Content-Type': 'application/gzip'
}
}).as('fetchCookedArchive');
checkVaultCookingTask('as git');
// Start archive download through the GUI
cy.get('.modal-dialog')
.contains('button:visible', 'Ok')
.click();
cy.wait('@fetchCookedArchive');
});
it('should offer to recook an object if previous vault task failed', function() {
cy.visit(this.directoryUrl);
// Stub responses when requesting the vault API to simulate
// the last cooking of the directory tarball has failed
cy.route({
method: 'GET',
url: this.vaultDirectoryUrl,
response: this.genVaultDirCookingResponse('failed')
}).as('checkVaultCookingTask');
cy.contains('button', 'Download')
.click();
// Check that recooking the directory is offered to user
cy.get('.modal-dialog')
.contains('button:visible', 'Ok')
.should('be.visible');
});
});
diff --git a/cypress/support/index.js b/cypress/support/index.js
index 5d2b99c5..ffee5d29 100644
--- a/cypress/support/index.js
+++ b/cypress/support/index.js
@@ -1,148 +1,152 @@
/**
* Copyright (C) 2019-2020 The Software Heritage developers
* See the AUTHORS file at the top-level directory of this distribution
* License: GNU Affero General Public License version 3, or any later version
* See top-level LICENSE file for more information
*/
import '@cypress/code-coverage/support';
import {httpGetJson} from '../utils';
Cypress.Screenshot.defaults({
screenshotOnRunFailure: false
});
Cypress.Commands.add('xhrShouldBeCalled', (alias, timesCalled) => {
expect(
cy.state('requests').filter(call => call.alias === alias),
`${alias} should have been called ${timesCalled} times`
).to.have.length(timesCalled);
});
Cypress.Commands.add('adminLogin', () => {
const url = '/admin/login/';
return cy.request({
url: url,
method: 'GET'
}).then(() => {
cy.getCookie('sessionid').should('not.exist');
cy.getCookie('csrftoken').its('value').then((token) => {
cy.request({
url: url,
method: 'POST',
form: true,
followRedirect: false,
body: {
username: 'admin',
password: 'admin',
csrfmiddlewaretoken: token
}
}).then(() => {
cy.getCookie('sessionid').should('exist');
return cy.getCookie('csrftoken').its('value');
});
});
});
});
before(function() {
this.unarchivedRepo = {
url: 'https://github.com/SoftwareHeritage/swh-web',
type: 'git',
revision: '7bf1b2f489f16253527807baead7957ca9e8adde',
snapshot: 'd9829223095de4bb529790de8ba4e4813e38672d',
rootDirectory: '7d887d96c0047a77e2e8c4ee9bb1528463677663',
content: [{
sha1git: 'b203ec39300e5b7e97b6e20986183cbd0b797859'
}]
};
this.origin = [{
url: 'https://github.com/memononen/libtess2',
type: 'git',
content: [{
path: 'Source/tess.h'
}, {
path: 'premake4.lua'
}],
directory: [{
path: 'Source',
id: 'cd19126d815470b28919d64b2a8e6a3e37f900dd'
}],
revisions: [],
invalidSubDir: 'Source1'
}, {
url: 'https://github.com/wcoder/highlightjs-line-numbers.js',
type: 'git',
content: [{
path: 'src/highlightjs-line-numbers.js'
}],
directory: [],
revisions: ['1c480a4573d2a003fc2630c21c2b25829de49972'],
- release: 'v2.6.0'
+ release: {
+ name: 'v2.6.0',
+ id: '6877028d6e5412780517d0bfa81f07f6c51abb41',
+ directory: '5b61d50ef35ca9a4618a3572bde947b8cccf71ad'
+ }
}];
const getMetadataForOrigin = async originUrl => {
const originVisitsApiUrl = this.Urls.api_1_origin_visits(originUrl);
const originVisits = await httpGetJson(originVisitsApiUrl);
const lastVisit = originVisits[0];
const snapshotApiUrl = this.Urls.api_1_snapshot(lastVisit.snapshot);
const lastOriginSnapshot = await httpGetJson(snapshotApiUrl);
let revision = lastOriginSnapshot.branches.HEAD.target;
if (lastOriginSnapshot.branches.HEAD.target_type === 'alias') {
revision = lastOriginSnapshot.branches[revision].target;
}
const revisionApiUrl = this.Urls.api_1_revision(revision);
const lastOriginHeadRevision = await httpGetJson(revisionApiUrl);
return {
'directory': lastOriginHeadRevision.directory,
'revision': lastOriginHeadRevision.id,
'snapshot': lastOriginSnapshot.id
};
};
cy.visit('/').window().then(async win => {
this.Urls = win.Urls;
for (let origin of this.origin) {
const metadata = await getMetadataForOrigin(origin.url);
const directoryApiUrl = this.Urls.api_1_directory(metadata.directory);
origin.dirContent = await httpGetJson(directoryApiUrl);
origin.rootDirectory = metadata.directory;
origin.revisions.push(metadata.revision);
origin.snapshot = metadata.snapshot;
for (let content of origin.content) {
const contentPathApiUrl = this.Urls.api_1_directory(origin.rootDirectory, content.path);
const contentMetaData = await httpGetJson(contentPathApiUrl);
content.name = contentMetaData.name.split('/').slice(-1)[0];
content.sha1git = contentMetaData.target;
content.directory = contentMetaData.dir_id;
content.rawFilePath = this.Urls.browse_content_raw(`sha1_git:${content.sha1git}`) +
`?filename=${encodeURIComponent(content.name)}`;
cy.request(content.rawFilePath)
.then((response) => {
const fileText = response.body;
const fileLines = fileText.split('\n');
content.numberLines = fileLines.length;
// If last line is empty its not shown
if (!fileLines[content.numberLines - 1]) content.numberLines -= 1;
});
}
}
});
});
// force the use of fetch polyfill wrapping XmlHttpRequest
// in order for cypress to be able to intercept and stub them
Cypress.on('window:before:load', win => {
win.fetch = null;
});
diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py
index 319d09f7..2cfeb070 100644
--- a/swh/web/browse/utils.py
+++ b/swh/web/browse/utils.py
@@ -1,721 +1,723 @@
# Copyright (C) 2017-2020 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import base64
import stat
import textwrap
from threading import Lock
import magic
import sentry_sdk
from django.core.cache import cache
from django.utils.html import escape
from django.utils.safestring import mark_safe
from swh.web.common import archive, highlightjs
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import (
browsers_supported_image_mimes,
format_utc_iso_date,
reverse,
rst_to_html,
)
from swh.web.config import get_config
def get_directory_entries(sha1_git):
"""Function that retrieves the content of a directory
from the archive.
The directories entries are first sorted in lexicographical order.
Sub-directories and regular files are then extracted.
Args:
sha1_git: sha1_git identifier of the directory
Returns:
A tuple whose first member corresponds to the sub-directories list
and second member the regular files list
Raises:
NotFoundExc if the directory is not found
"""
cache_entry_id = "directory_entries_%s" % sha1_git
cache_entry = cache.get(cache_entry_id)
if cache_entry:
return cache_entry
entries = list(archive.lookup_directory(sha1_git))
for e in entries:
e["perms"] = stat.filemode(e["perms"])
if e["type"] == "rev":
# modify dir entry name to explicitly show it points
# to a revision
e["name"] = "%s @ %s" % (e["name"], e["target"][:7])
dirs = [e for e in entries if e["type"] in ("dir", "rev")]
files = [e for e in entries if e["type"] == "file"]
dirs = sorted(dirs, key=lambda d: d["name"])
files = sorted(files, key=lambda f: f["name"])
cache.set(cache_entry_id, (dirs, files))
return dirs, files
_lock = Lock()
def get_mimetype_and_encoding_for_content(content):
"""Function that returns the mime type and the encoding associated to
a content buffer using the magic module under the hood.
Args:
content (bytes): a content buffer
Returns:
A tuple (mimetype, encoding), for instance ('text/plain', 'us-ascii'),
associated to the provided content.
"""
m = magic.Magic(mime=True, mime_encoding=True)
mime_encoding = m.from_buffer(content)
mime_type, encoding = mime_encoding.split(";")
encoding = encoding.replace(" charset=", "")
return mime_type, encoding
# maximum authorized content size in bytes for HTML display
# with code highlighting
content_display_max_size = get_config()["content_display_max_size"]
def _re_encode_content(mimetype, encoding, content_data):
# encode textual content to utf-8 if needed
if mimetype.startswith("text/"):
# probably a malformed UTF-8 content, re-encode it
# by replacing invalid chars with a substitution one
if encoding == "unknown-8bit":
content_data = content_data.decode("utf-8", "replace").encode("utf-8")
elif encoding not in ["utf-8", "binary"]:
content_data = content_data.decode(encoding, "replace").encode("utf-8")
elif mimetype.startswith("application/octet-stream"):
# file may detect a text content as binary
# so try to decode it for display
encodings = ["us-ascii", "utf-8"]
encodings += ["iso-8859-%s" % i for i in range(1, 17)]
for enc in encodings:
try:
content_data = content_data.decode(enc).encode("utf-8")
except Exception as exc:
sentry_sdk.capture_exception(exc)
else:
# ensure display in content view
encoding = enc
mimetype = "text/plain"
break
return mimetype, encoding, content_data
def request_content(
query_string, max_size=content_display_max_size, re_encode=True,
):
"""Function that retrieves a content from the archive.
Raw bytes content is first retrieved, then the content mime type.
If the mime type is not stored in the archive, it will be computed
using Python magic module.
Args:
query_string: a string of the form "[ALGO_HASH:]HASH" where
optional ALGO_HASH can be either ``sha1``, ``sha1_git``,
``sha256``, or ``blake2s256`` (default to ``sha1``) and HASH
the hexadecimal representation of the hash value
max_size: the maximum size for a content to retrieve (default to 1MB,
no size limit if None)
Returns:
A tuple whose first member corresponds to the content raw bytes
and second member the content mime type
Raises:
NotFoundExc if the content is not found
"""
content_data = archive.lookup_content(query_string)
filetype = None
language = None
# requests to the indexer db may fail so properly handle
# those cases in order to avoid content display errors
try:
filetype = archive.lookup_content_filetype(query_string)
language = archive.lookup_content_language(query_string)
except Exception as exc:
sentry_sdk.capture_exception(exc)
mimetype = "unknown"
encoding = "unknown"
if filetype:
mimetype = filetype["mimetype"]
encoding = filetype["encoding"]
# workaround when encountering corrupted data due to implicit
# conversion from bytea to text in the indexer db (see T818)
# TODO: Remove that code when all data have been correctly converted
if mimetype.startswith("\\"):
filetype = None
if not max_size or content_data["length"] < max_size:
try:
content_raw = archive.lookup_content_raw(query_string)
except Exception as exc:
sentry_sdk.capture_exception(exc)
raise NotFoundExc(
"The bytes of the content are currently not available "
"in the archive."
)
else:
content_data["raw_data"] = content_raw["data"]
if not filetype:
mimetype, encoding = get_mimetype_and_encoding_for_content(
content_data["raw_data"]
)
if re_encode:
mimetype, encoding, raw_data = _re_encode_content(
mimetype, encoding, content_data["raw_data"]
)
content_data["raw_data"] = raw_data
else:
content_data["raw_data"] = None
content_data["mimetype"] = mimetype
content_data["encoding"] = encoding
if language:
content_data["language"] = language["lang"]
else:
content_data["language"] = "not detected"
return content_data
def prepare_content_for_display(content_data, mime_type, path):
"""Function that prepares a content for HTML display.
The function tries to associate a programming language to a
content in order to perform syntax highlighting client-side
using highlightjs. The language is determined using either
the content filename or its mime type.
If the mime type corresponds to an image format supported
by web browsers, the content will be encoded in base64
for displaying the image.
Args:
content_data (bytes): raw bytes of the content
mime_type (string): mime type of the content
path (string): path of the content including filename
Returns:
A dict containing the content bytes (possibly different from the one
provided as parameter if it is an image) under the key 'content_data
and the corresponding highlightjs language class under the
key 'language'.
"""
language = highlightjs.get_hljs_language_from_filename(path)
if not language:
language = highlightjs.get_hljs_language_from_mime_type(mime_type)
if not language:
language = "nohighlight"
elif mime_type.startswith("application/"):
mime_type = mime_type.replace("application/", "text/")
if mime_type.startswith("image/"):
if mime_type in browsers_supported_image_mimes:
content_data = base64.b64encode(content_data).decode("ascii")
if mime_type.startswith("image/svg"):
mime_type = "image/svg+xml"
if mime_type.startswith("text/"):
content_data = content_data.decode("utf-8", errors="replace")
return {"content_data": content_data, "language": language, "mimetype": mime_type}
def gen_link(url, link_text=None, link_attrs=None):
"""
Utility function for generating an HTML link to insert
in Django templates.
Args:
url (str): an url
link_text (str): optional text for the produced link,
if not provided the url will be used
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
attrs = " "
if link_attrs:
for k, v in link_attrs.items():
attrs += '%s="%s" ' % (k, v)
if not link_text:
link_text = url
link = '%s' % (attrs, escape(url), escape(link_text))
return mark_safe(link)
def _snapshot_context_query_params(snapshot_context):
query_params = {}
if not snapshot_context:
return query_params
if snapshot_context and snapshot_context["origin_info"]:
origin_info = snapshot_context["origin_info"]
snp_query_params = snapshot_context["query_params"]
query_params = {"origin_url": origin_info["url"]}
if "timestamp" in snp_query_params:
query_params["timestamp"] = snp_query_params["timestamp"]
if "visit_id" in snp_query_params:
query_params["visit_id"] = snp_query_params["visit_id"]
if "snapshot" in snp_query_params and "visit_id" not in query_params:
query_params["snapshot"] = snp_query_params["snapshot"]
elif snapshot_context:
query_params = {"snapshot": snapshot_context["snapshot_id"]}
if snapshot_context["release"]:
query_params["release"] = snapshot_context["release"]
elif snapshot_context["branch"] and snapshot_context["branch"] not in (
"HEAD",
snapshot_context["revision_id"],
):
query_params["branch"] = snapshot_context["branch"]
elif snapshot_context["revision_id"]:
query_params["revision"] = snapshot_context["revision_id"]
return query_params
def gen_revision_url(revision_id, snapshot_context=None):
"""
Utility function for generating an url to a revision.
Args:
revision_id (str): a revision id
snapshot_context (dict): if provided, generate snapshot-dependent
browsing url
Returns:
str: The url to browse the revision
"""
query_params = _snapshot_context_query_params(snapshot_context)
+ # remove query parameters not needed for a revision view
query_params.pop("revision", None)
+ query_params.pop("release", None)
return reverse(
"browse-revision", url_args={"sha1_git": revision_id}, query_params=query_params
)
def gen_revision_link(
revision_id,
shorten_id=False,
snapshot_context=None,
link_text="Browse",
link_attrs={"class": "btn btn-default btn-sm", "role": "button"},
):
"""
Utility function for generating a link to a revision HTML view
to insert in Django templates.
Args:
revision_id (str): a revision id
shorten_id (boolean): whether to shorten the revision id to 7
characters for the link text
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
link_text (str): optional text for the generated link
(the revision id will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
str: An HTML link in the form 'revision_id'
"""
if not revision_id:
return None
revision_url = gen_revision_url(revision_id, snapshot_context)
if shorten_id:
return gen_link(revision_url, revision_id[:7], link_attrs)
else:
if not link_text:
link_text = revision_id
return gen_link(revision_url, link_text, link_attrs)
def gen_directory_link(
sha1_git,
snapshot_context=None,
link_text="Browse",
link_attrs={"class": "btn btn-default btn-sm", "role": "button"},
):
"""
Utility function for generating a link to a directory HTML view
to insert in Django templates.
Args:
sha1_git (str): directory identifier
link_text (str): optional text for the generated link
(the directory id will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
if not sha1_git:
return None
query_params = _snapshot_context_query_params(snapshot_context)
directory_url = reverse(
"browse-directory", url_args={"sha1_git": sha1_git}, query_params=query_params
)
if not link_text:
link_text = sha1_git
return gen_link(directory_url, link_text, link_attrs)
def gen_snapshot_link(
snapshot_id,
snapshot_context=None,
link_text="Browse",
link_attrs={"class": "btn btn-default btn-sm", "role": "button"},
):
"""
Utility function for generating a link to a snapshot HTML view
to insert in Django templates.
Args:
snapshot_id (str): snapshot identifier
link_text (str): optional text for the generated link
(the snapshot id will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
query_params = _snapshot_context_query_params(snapshot_context)
snapshot_url = reverse(
"browse-snapshot",
url_args={"snapshot_id": snapshot_id},
query_params=query_params,
)
if not link_text:
link_text = snapshot_id
return gen_link(snapshot_url, link_text, link_attrs)
def gen_content_link(
sha1_git,
snapshot_context=None,
link_text="Browse",
link_attrs={"class": "btn btn-default btn-sm", "role": "button"},
):
"""
Utility function for generating a link to a content HTML view
to insert in Django templates.
Args:
sha1_git (str): content identifier
link_text (str): optional text for the generated link
(the content sha1_git will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
if not sha1_git:
return None
query_params = _snapshot_context_query_params(snapshot_context)
content_url = reverse(
"browse-content",
url_args={"query_string": "sha1_git:" + sha1_git},
query_params=query_params,
)
if not link_text:
link_text = sha1_git
return gen_link(content_url, link_text, link_attrs)
def get_revision_log_url(revision_id, snapshot_context=None):
"""
Utility function for getting the URL for a revision log HTML view
(possibly in the context of an origin).
Args:
revision_id (str): revision identifier the history heads to
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
Returns:
The revision log view URL
"""
query_params = {}
if snapshot_context:
query_params = _snapshot_context_query_params(snapshot_context)
query_params["revision"] = revision_id
if snapshot_context and snapshot_context["origin_info"]:
revision_log_url = reverse("browse-origin-log", query_params=query_params)
elif snapshot_context:
url_args = {"snapshot_id": snapshot_context["snapshot_id"]}
del query_params["snapshot"]
revision_log_url = reverse(
"browse-snapshot-log", url_args=url_args, query_params=query_params
)
else:
revision_log_url = reverse(
"browse-revision-log", url_args={"sha1_git": revision_id}
)
return revision_log_url
def gen_revision_log_link(
revision_id,
snapshot_context=None,
link_text="Browse",
link_attrs={"class": "btn btn-default btn-sm", "role": "button"},
):
"""
Utility function for generating a link to a revision log HTML view
(possibly in the context of an origin) to insert in Django templates.
Args:
revision_id (str): revision identifier the history heads to
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
link_text (str): optional text to use for the generated link
(the revision id will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form
'link_text'
"""
if not revision_id:
return None
revision_log_url = get_revision_log_url(revision_id, snapshot_context)
if not link_text:
link_text = revision_id
return gen_link(revision_log_url, link_text, link_attrs)
def gen_person_mail_link(person, link_text=None):
"""
Utility function for generating a mail link to a person to insert
in Django templates.
Args:
person (dict): dictionary containing person data
(*name*, *email*, *fullname*)
link_text (str): optional text to use for the generated mail link
(the person name will be used by default)
Returns:
str: A mail link to the person or the person name if no email is
present in person data
"""
person_name = person["name"] or person["fullname"] or "None"
if link_text is None:
link_text = person_name
person_email = person["email"] if person["email"] else None
if person_email is None and "@" in person_name and " " not in person_name:
person_email = person_name
if person_email:
return gen_link(url="mailto:%s" % person_email, link_text=link_text)
else:
return person_name
def gen_release_link(
sha1_git,
snapshot_context=None,
link_text="Browse",
link_attrs={"class": "btn btn-default btn-sm", "role": "button"},
):
"""
Utility function for generating a link to a release HTML view
to insert in Django templates.
Args:
sha1_git (str): release identifier
link_text (str): optional text for the generated link
(the release id will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
query_params = _snapshot_context_query_params(snapshot_context)
release_url = reverse(
"browse-release", url_args={"sha1_git": sha1_git}, query_params=query_params
)
if not link_text:
link_text = sha1_git
return gen_link(release_url, link_text, link_attrs)
def format_log_entries(revision_log, per_page, snapshot_context=None):
"""
Utility functions that process raw revision log data for HTML display.
Its purpose is to:
* add links to relevant browse views
* format date in human readable format
* truncate the message log
Args:
revision_log (list): raw revision log as returned by the swh-web api
per_page (int): number of log entries per page
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
"""
revision_log_data = []
for i, rev in enumerate(revision_log):
if i == per_page:
break
author_name = "None"
author_fullname = "None"
committer_fullname = "None"
if rev["author"]:
author_name = gen_person_mail_link(rev["author"])
author_fullname = rev["author"]["fullname"]
if rev["committer"]:
committer_fullname = rev["committer"]["fullname"]
author_date = format_utc_iso_date(rev["date"])
committer_date = format_utc_iso_date(rev["committer_date"])
tooltip = "revision %s\n" % rev["id"]
tooltip += "author: %s\n" % author_fullname
tooltip += "author date: %s\n" % author_date
tooltip += "committer: %s\n" % committer_fullname
tooltip += "committer date: %s\n\n" % committer_date
if rev["message"]:
tooltip += textwrap.indent(rev["message"], " " * 4)
revision_log_data.append(
{
"author": author_name,
"id": rev["id"][:7],
"message": rev["message"],
"date": author_date,
"commit_date": committer_date,
"url": gen_revision_url(rev["id"], snapshot_context),
"tooltip": tooltip,
}
)
return revision_log_data
# list of common readme names ordered by preference
# (lower indices have higher priority)
_common_readme_names = [
"readme.markdown",
"readme.md",
"readme.rst",
"readme.txt",
"readme",
]
def get_readme_to_display(readmes):
"""
Process a list of readme files found in a directory
in order to find the adequate one to display.
Args:
readmes: a list of dict where keys are readme file names and values
are readme sha1s
Returns:
A tuple (readme_name, readme_sha1)
"""
readme_name = None
readme_url = None
readme_sha1 = None
readme_html = None
lc_readmes = {k.lower(): {"orig_name": k, "sha1": v} for k, v in readmes.items()}
# look for readme names according to the preference order
# defined by the _common_readme_names list
for common_readme_name in _common_readme_names:
if common_readme_name in lc_readmes:
readme_name = lc_readmes[common_readme_name]["orig_name"]
readme_sha1 = lc_readmes[common_readme_name]["sha1"]
readme_url = reverse(
"browse-content-raw",
url_args={"query_string": readme_sha1},
query_params={"re_encode": "true"},
)
break
# otherwise pick the first readme like file if any
if not readme_name and len(readmes.items()) > 0:
readme_name = next(iter(readmes))
readme_sha1 = readmes[readme_name]
readme_url = reverse(
"browse-content-raw",
url_args={"query_string": readme_sha1},
query_params={"re_encode": "true"},
)
# convert rst README to html server side as there is
# no viable solution to perform that task client side
if readme_name and readme_name.endswith(".rst"):
cache_entry_id = "readme_%s" % readme_sha1
cache_entry = cache.get(cache_entry_id)
if cache_entry:
readme_html = cache_entry
else:
try:
rst_doc = request_content(readme_sha1)
readme_html = rst_to_html(rst_doc["raw_data"])
cache.set(cache_entry_id, readme_html)
except Exception as exc:
sentry_sdk.capture_exception(exc)
readme_html = "Readme bytes are not available"
return readme_name, readme_url, readme_html
diff --git a/swh/web/browse/views/release.py b/swh/web/browse/views/release.py
index 6f1119c0..7a985607 100644
--- a/swh/web/browse/views/release.py
+++ b/swh/web/browse/views/release.py
@@ -1,221 +1,239 @@
# Copyright (C) 2017-2020 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import sentry_sdk
from django.shortcuts import render
from swh.model.identifiers import CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT
from swh.web.browse.browseurls import browse_route
from swh.web.browse.snapshot_context import get_snapshot_context
from swh.web.browse.utils import (
gen_content_link,
gen_directory_link,
gen_link,
gen_person_mail_link,
gen_release_link,
gen_revision_link,
)
from swh.web.common import archive
from swh.web.common.exc import NotFoundExc
from swh.web.common.identifiers import get_swhids_info
from swh.web.common.typing import ReleaseMetadata, SWHObjectInfo
from swh.web.common.utils import format_utc_iso_date, reverse
@browse_route(
r"release/(?P[0-9a-f]+)/",
view_name="browse-release",
checksum_args=["sha1_git"],
)
def release_browse(request, sha1_git):
"""
Django view that produces an HTML display of a release
identified by its id.
The url that points to it is :http:get:`/browse/release/(sha1_git)/`.
"""
release = archive.lookup_release(sha1_git)
snapshot_context = {}
origin_info = None
snapshot_id = request.GET.get("snapshot_id")
if not snapshot_id:
snapshot_id = request.GET.get("snapshot")
origin_url = request.GET.get("origin_url")
if not origin_url:
origin_url = request.GET.get("origin")
timestamp = request.GET.get("timestamp")
visit_id = request.GET.get("visit_id")
if origin_url:
try:
snapshot_context = get_snapshot_context(
- snapshot_id, origin_url, timestamp, visit_id
+ snapshot_id,
+ origin_url,
+ timestamp,
+ visit_id,
+ release_name=release["name"],
)
except NotFoundExc as e:
raw_rel_url = reverse("browse-release", url_args={"sha1_git": sha1_git})
error_message = (
"The Software Heritage archive has a release "
"with the hash you provided but the origin "
"mentioned in your request appears broken: %s. "
"Please check the URL and try again.\n\n"
"Nevertheless, you can still browse the release "
"without origin information: %s"
% (gen_link(origin_url), gen_link(raw_rel_url))
)
if str(e).startswith("Origin"):
raise NotFoundExc(error_message)
else:
raise e
origin_info = snapshot_context["origin_info"]
elif snapshot_id:
- snapshot_context = get_snapshot_context(snapshot_id)
+ snapshot_context = get_snapshot_context(
+ snapshot_id, release_name=release["name"]
+ )
snapshot_id = snapshot_context.get("snapshot_id", None)
release_metadata = ReleaseMetadata(
object_type=RELEASE,
object_id=sha1_git,
release=sha1_git,
author=release["author"]["fullname"] if release["author"] else "None",
author_url=gen_person_mail_link(release["author"])
if release["author"]
else "None",
date=format_utc_iso_date(release["date"]),
name=release["name"],
synthetic=release["synthetic"],
target=release["target"],
target_type=release["target_type"],
snapshot=snapshot_id,
origin_url=origin_url,
)
release_note_lines = []
if release["message"]:
release_note_lines = release["message"].split("\n")
+ swh_objects = [SWHObjectInfo(object_type=RELEASE, object_id=sha1_git)]
+
vault_cooking = None
rev_directory = None
target_link = None
if release["target_type"] == REVISION:
target_link = gen_revision_link(
release["target"],
snapshot_context=snapshot_context,
link_text=None,
link_attrs=None,
)
try:
revision = archive.lookup_revision(release["target"])
rev_directory = revision["directory"]
vault_cooking = {
"directory_context": True,
"directory_id": rev_directory,
"revision_context": True,
"revision_id": release["target"],
}
+ swh_objects.append(
+ SWHObjectInfo(object_type=REVISION, object_id=release["target"])
+ )
+ swh_objects.append(
+ SWHObjectInfo(object_type=DIRECTORY, object_id=rev_directory)
+ )
except Exception as exc:
sentry_sdk.capture_exception(exc)
elif release["target_type"] == DIRECTORY:
target_link = gen_directory_link(
release["target"],
snapshot_context=snapshot_context,
link_text=None,
link_attrs=None,
)
try:
# check directory exists
archive.lookup_directory(release["target"])
vault_cooking = {
"directory_context": True,
"directory_id": release["target"],
"revision_context": False,
"revision_id": None,
}
+ swh_objects.append(
+ SWHObjectInfo(object_type=DIRECTORY, object_id=release["target"])
+ )
except Exception as exc:
sentry_sdk.capture_exception(exc)
elif release["target_type"] == CONTENT:
target_link = gen_content_link(
release["target"],
snapshot_context=snapshot_context,
link_text=None,
link_attrs=None,
)
+ swh_objects.append(
+ SWHObjectInfo(object_type=CONTENT, object_id=release["target"])
+ )
elif release["target_type"] == RELEASE:
target_link = gen_release_link(
release["target"],
snapshot_context=snapshot_context,
link_text=None,
link_attrs=None,
)
rev_directory_url = None
if rev_directory is not None:
if origin_info:
rev_directory_url = reverse(
"browse-origin-directory",
query_params={
"origin_url": origin_info["url"],
"release": release["name"],
"snapshot": snapshot_id,
},
)
elif snapshot_id:
rev_directory_url = reverse(
"browse-snapshot-directory",
url_args={"snapshot_id": snapshot_id},
query_params={"release": release["name"]},
)
else:
rev_directory_url = reverse(
"browse-directory", url_args={"sha1_git": rev_directory}
)
directory_link = None
if rev_directory_url is not None:
directory_link = gen_link(rev_directory_url, rev_directory)
release["directory_link"] = directory_link
release["target_link"] = target_link
- swh_objects = [SWHObjectInfo(object_type=RELEASE, object_id=sha1_git)]
-
if snapshot_context:
snapshot_id = snapshot_context["snapshot_id"]
if snapshot_id:
swh_objects.append(SWHObjectInfo(object_type=SNAPSHOT, object_id=snapshot_id))
swhids_info = get_swhids_info(swh_objects, snapshot_context)
note_header = "None"
if len(release_note_lines) > 0:
note_header = release_note_lines[0]
release["note_header"] = note_header
release["note_body"] = "\n".join(release_note_lines[1:])
heading = "Release - %s" % release["name"]
if snapshot_context:
context_found = "snapshot: %s" % snapshot_context["snapshot_id"]
if origin_info:
context_found = "origin: %s" % origin_info["url"]
heading += " - %s" % context_found
return render(
request,
"browse/release.html",
{
"heading": heading,
"swh_object_id": swhids_info[0]["swhid"],
"swh_object_name": "Release",
"swh_object_metadata": release_metadata,
"release": release,
"snapshot_context": snapshot_context,
"show_actions": True,
"breadcrumbs": None,
"vault_cooking": vault_cooking,
"top_right_link": None,
"swhids_info": swhids_info,
},
)
diff --git a/swh/web/tests/browse/views/test_release.py b/swh/web/tests/browse/views/test_release.py
index 048bbf94..561407bb 100644
--- a/swh/web/tests/browse/views/test_release.py
+++ b/swh/web/tests/browse/views/test_release.py
@@ -1,145 +1,166 @@
# Copyright (C) 2018-2020 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import random
from hypothesis import given
from django.utils.html import escape
+from swh.model.identifiers import DIRECTORY, RELEASE, REVISION, SNAPSHOT
from swh.web.common.identifiers import gen_swhid
from swh.web.common.utils import format_utc_iso_date, reverse
from swh.web.tests.django_asserts import assert_contains
from swh.web.tests.strategies import origin_with_releases, release, unknown_release
from swh.web.tests.utils import check_html_get_response
@given(release())
def test_release_browse(client, archive_data, release):
_release_browse_checks(client, release, archive_data)
@given(origin_with_releases())
def test_release_browse_with_origin_snapshot(client, archive_data, origin):
snapshot = archive_data.snapshot_get_latest(origin["url"])
release = random.choice(
[
b["target"]
for b in snapshot["branches"].values()
if b["target_type"] == "release"
]
)
_release_browse_checks(client, release, archive_data, origin_url=origin["url"])
_release_browse_checks(client, release, archive_data, snapshot_id=snapshot["id"])
_release_browse_checks(
client,
release,
archive_data,
origin_url=origin["url"],
snapshot_id=snapshot["id"],
)
@given(unknown_release())
def test_release_browse_not_found(client, archive_data, unknown_release):
url = reverse("browse-release", url_args={"sha1_git": unknown_release})
resp = check_html_get_response(
client, url, status_code=404, template_used="error.html"
)
err_msg = "Release with sha1_git %s not found" % unknown_release
assert_contains(resp, err_msg, status_code=404)
@given(release())
def test_release_uppercase(client, release):
url = reverse(
"browse-release-uppercase-checksum", url_args={"sha1_git": release.upper()}
)
resp = check_html_get_response(client, url, status_code=302)
redirect_url = reverse("browse-release", url_args={"sha1_git": release})
assert resp["location"] == redirect_url
def _release_browse_checks(
client, release, archive_data, origin_url=None, snapshot_id=None
):
query_params = {"origin_url": origin_url, "snapshot": snapshot_id}
url = reverse(
"browse-release", url_args={"sha1_git": release}, query_params=query_params
)
release_data = archive_data.release_get(release)
release_id = release_data["id"]
release_name = release_data["name"]
author_name = release_data["author"]["name"]
release_date = release_data["date"]
message = release_data["message"]
target_type = release_data["target_type"]
target = release_data["target"]
target_url = reverse(
"browse-revision", url_args={"sha1_git": target}, query_params=query_params
)
message_lines = message.split("\n")
resp = check_html_get_response(
client, url, status_code=200, template_used="browse/release.html"
)
assert_contains(resp, author_name)
assert_contains(resp, format_utc_iso_date(release_date))
assert_contains(
resp,
"%s
%s" % (message_lines[0] or "None", "\n".join(message_lines[1:])),
)
assert_contains(resp, release_id)
assert_contains(resp, release_name)
assert_contains(resp, target_type)
assert_contains(resp, '%s' % (escape(target_url), target))
- swh_rel_id = gen_swhid("release", release_id)
+ swh_rel_id = gen_swhid(RELEASE, release_id)
swh_rel_id_url = reverse("browse-swhid", url_args={"swhid": swh_rel_id})
assert_contains(resp, swh_rel_id)
assert_contains(resp, swh_rel_id_url)
if origin_url:
browse_origin_url = reverse(
"browse-origin", query_params={"origin_url": origin_url}
)
assert_contains(resp, f'href="{browse_origin_url}"')
elif snapshot_id:
- swh_snp_id = gen_swhid("snapshot", snapshot_id)
+ swh_snp_id = gen_swhid(SNAPSHOT, snapshot_id)
swh_snp_id_url = reverse("browse-swhid", url_args={"swhid": swh_snp_id})
assert_contains(resp, f'href="{swh_snp_id_url}"')
if release_data["target_type"] == "revision":
+ rev = archive_data.revision_get(release_data["target"])
+ rev_dir = rev["directory"]
+ rev_metadata = {}
+ dir_metadata = {}
if origin_url:
directory_url = reverse(
"browse-origin-directory",
query_params={
"origin_url": origin_url,
"release": release_data["name"],
"snapshot": snapshot_id,
},
)
+ rev_metadata["origin"] = dir_metadata["origin"] = origin_url
+ snapshot = archive_data.snapshot_get_latest(origin_url)
+ rev_metadata["visit"] = dir_metadata["visit"] = gen_swhid(
+ SNAPSHOT, snapshot["id"]
+ )
+ dir_metadata["anchor"] = gen_swhid(RELEASE, release_id)
+
elif snapshot_id:
directory_url = reverse(
"browse-snapshot-directory",
url_args={"snapshot_id": snapshot_id},
query_params={"release": release_data["name"],},
)
- else:
- rev = archive_data.revision_get(release_data["target"])
- directory_url = reverse(
- "browse-directory", url_args={"sha1_git": rev["directory"]}
+ rev_metadata["visit"] = dir_metadata["visit"] = gen_swhid(
+ SNAPSHOT, snapshot_id
)
+ dir_metadata["anchor"] = gen_swhid(RELEASE, release_id)
+ else:
+ directory_url = reverse("browse-directory", url_args={"sha1_git": rev_dir})
assert_contains(resp, escape(directory_url))
+
+ swh_rev_id = gen_swhid(REVISION, rev["id"], metadata=rev_metadata)
+ swh_rev_id_url = reverse("browse-swhid", url_args={"swhid": swh_rev_id})
+ assert_contains(resp, swh_rev_id_url)
+
+ swh_dir_id = gen_swhid(DIRECTORY, rev_dir, metadata=dir_metadata)
+ swh_dir_id_url = reverse("browse-swhid", url_args={"swhid": swh_dir_id})
+ assert_contains(resp, swh_dir_id_url)