diff --git a/cypress/integration/code-highlighting.spec.js b/cypress/integration/code-highlighting.spec.js index 069c875a..a5d95d43 100644 --- a/cypress/integration/code-highlighting.spec.js +++ b/cypress/integration/code-highlighting.spec.js @@ -1,94 +1,94 @@ /** - * Copyright (C) 2019 The Software Heritage developers + * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {random} from '../utils'; const $ = Cypress.$; let origin; const lineStart = 32; const lineEnd = 42; let url; describe('Code highlighting tests', function() { before(function() { origin = this.origin[0]; - url = `${this.Urls.browse_origin_content(origin.url)}?path=${origin.content[0].path}`; + url = `${this.Urls.browse_origin_content()}?origin_url=${origin.url}&path=${origin.content[0].path}`; }); it('should highlight source code and add line numbers', function() { cy.visit(url); cy.get('.hljs-ln-numbers').then(lnNumbers => { cy.get('.hljs-ln-code') .should('have.length', lnNumbers.length); }); }); it('should emphasize source code lines based on url fragment', function() { cy.visit(`${url}/#L${lineStart}-L${lineEnd}`); cy.get('.hljs-ln-line').then(lines => { for (let line of lines) { const lineElt = $(line); const lineNumber = parseInt(lineElt.data('line-number')); if (lineNumber >= lineStart && lineNumber <= lineEnd) { assert.notEqual(lineElt.css('background-color'), 'rgba(0, 0, 0, 0)'); } else { assert.equal(lineElt.css('background-color'), 'rgba(0, 0, 0, 0)'); } } }); }); it('should emphasize a line by clicking on its number', function() { cy.visit(url); cy.get('.hljs-ln-numbers').then(lnNumbers => { const lnNumber = lnNumbers[random(0, lnNumbers.length)]; const lnNumberElt = $(lnNumber); assert.equal(lnNumberElt.css('background-color'), 'rgba(0, 0, 0, 0)'); const line = parseInt(lnNumberElt.data('line-number')); cy.get(`.hljs-ln-numbers[data-line-number="${line}"]`) .click() .then(() => { assert.notEqual(lnNumberElt.css('background-color'), 'rgba(0, 0, 0, 0)'); }); }); }); it('should emphasize a range of lines by clicking on two line numbers and holding shift', function() { cy.visit(url); cy.get(`.hljs-ln-numbers[data-line-number="${lineStart}"]`) .click() .get(`body`) .type(`{shift}`, {release: false}) .get(`.hljs-ln-numbers[data-line-number="${lineEnd}"]`) .click() .get('.hljs-ln-line') .then(lines => { for (let line of lines) { const lineElt = $(line); const lineNumber = parseInt(lineElt.data('line-number')); if (lineNumber >= lineStart && lineNumber <= lineEnd) { assert.notEqual(lineElt.css('background-color'), 'rgba(0, 0, 0, 0)'); } else { assert.equal(lineElt.css('background-color'), 'rgba(0, 0, 0, 0)'); } } }); }); it('should remove emphasized lines when clicking anywhere in code', function() { cy.visit(`${url}/#L${lineStart}-L${lineEnd}`); cy.get(`.hljs-ln-code[data-line-number="1"]`) .click() .get('.hljs-ln-line') .should('have.css', 'background-color', 'rgba(0, 0, 0, 0)'); }); }); diff --git a/cypress/integration/content-display.spec.js b/cypress/integration/content-display.spec.js index 4526cd6a..36e51341 100644 --- a/cypress/integration/content-display.spec.js +++ b/cypress/integration/content-display.spec.js @@ -1,66 +1,66 @@ /** - * Copyright (C) 2019 The Software Heritage developers + * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ let origin; let url; describe('Test File Rendering', function() { before(function() { origin = this.origin[0]; - url = `${this.Urls.browse_origin_content(origin.url)}?path=${origin.content[0].path}`; + url = `${this.Urls.browse_origin_content()}?origin_url=${origin.url}&path=${origin.content[0].path}`; }); beforeEach(function() { cy.visit(url); }); it('should display correct file name', function() { cy.get('.swh-content-filename') .should('be.visible') .and('contain', origin.content[0].name) .and('have.css', 'background-color', 'rgb(242, 244, 245)'); }); it('should display all lines', function() { cy.get('.hljs-ln-code') .should('have.length', origin.content[0].numberLines) .and('be.visible') .and('have.css', 'background-color', 'rgba(0, 0, 0, 0)'); }); it('should show correct path', function() { // Array containing names of all the ancestor directories of the file const filePathArr = origin.content[0].path.slice(1, -1).slice('/'); filePathArr.split('/').forEach(dirName => { cy.get('.swh-browse-bread-crumbs') .should('contain', dirName); }); }); it('should have links to all ancestor directories', function() { - const rootDirUrl = this.Urls.browse_origin_directory(origin.url); + const rootDirUrl = `${this.Urls.browse_origin_directory()}?origin_url=${origin.url}`; cy.get(`a[href='${rootDirUrl}']`) .should('be.visible'); const splittedPath = origin.content[0].path.split('/'); for (let i = 2; i < splittedPath.length; ++i) { const subDirPath = splittedPath.slice(1, i).join('/'); - const subDirUrl = `${this.Urls.browse_origin_directory(origin.url)}?path=${subDirPath}`; + const subDirUrl = `${this.Urls.browse_origin_directory()}?origin_url=${origin.url}&path=${subDirPath}`; cy.get(`a[href='${subDirUrl}']`) .should('be.visible'); } }); it('should have correct url to raw file', function() { cy.get(`a[href='${origin.content[0].rawFilePath}']`) .should('be.visible'); }); }); diff --git a/cypress/integration/directory.spec.js b/cypress/integration/directory.spec.js index 3094c64c..4b697e2c 100644 --- a/cypress/integration/directory.spec.js +++ b/cypress/integration/directory.spec.js @@ -1,75 +1,75 @@ /** - * Copyright (C) 2019 The Software Heritage developers + * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ const $ = Cypress.$; let origin; let url; let dirs = []; let files = []; describe('Directory Tests', function() { before(function() { origin = this.origin[0]; - url = this.Urls.browse_origin_directory(origin.url); + url = `${this.Urls.browse_origin_directory()}?origin_url=${origin.url}`; for (let entry of origin.dirContent) { if (entry.type === 'file') { files.push(entry); } else { dirs.push(entry); } } }); beforeEach(function() { cy.visit(url); }); it('should display all files and directories', function() { cy.get('.swh-directory') .should('have.length', dirs.length) .and('be.visible'); cy.get('.swh-content') .should('have.length', files.length) .and('be.visible'); }); it('should display sizes for files', function() { cy.get('.swh-content') .parent('tr') .then((rows) => { for (let row of rows) { let text = $(row).children('td').eq(2).text(); expect(text.trim()).to.not.be.empty; } }); }); it('should display readme when it is present', function() { cy.get('#readme-panel > .card-body') .should('be.visible') .and('have.class', 'swh-showdown') .and('not.be.empty') .and('not.contain', 'Readme bytes are not available'); }); it('should open subdirectory when clicked', function() { cy.get('.swh-directory') .first() .children('a') .click(); cy.url() - .should('include', `${url}?path=${dirs[0]['name']}`); + .should('include', `${url}&path=${dirs[0]['name']}`); cy.get('.swh-directory-table') .should('be.visible'); }); }); diff --git a/cypress/integration/errors.spec.js b/cypress/integration/errors.spec.js index b1ef99c2..e945f301 100644 --- a/cypress/integration/errors.spec.js +++ b/cypress/integration/errors.spec.js @@ -1,148 +1,147 @@ /** - * Copyright (C) 2019 The Software Heritage developers + * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ let origin; const invalidChecksum = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'; const invalidPageUrl = '/invalidPath'; function urlShouldShowError(url, error) { cy.visit(url, { failOnStatusCode: false }); cy.get('.swh-http-error') .should('be.visible'); cy.get('.swh-http-error-code') .should('contain', error.code); cy.get('.swh-http-error-desc') .should('contain', error.msg); } describe('Test Errors', function() { before(function() { origin = this.origin[0]; }); it('should show navigation buttons on error page', function() { cy.visit(invalidPageUrl, { failOnStatusCode: false }); cy.get('a[onclick="window.history.back();"]') .should('be.visible'); cy.get('a[href="/"') .should('be.visible'); }); context('For unarchived repositories', function() { it('should display NotFoundExc for unarchived repo', function() { - const url = this.Urls.browse_origin_directory(this.unarchivedRepo.url); + const url = `${this.Urls.browse_origin_directory()}?origin_url=${this.unarchivedRepo.url}`; urlShouldShowError(url, { code: '404', msg: 'NotFoundExc: Origin with url ' + this.unarchivedRepo.url + ' not found!' }); }); it('should display NotFoundExc for unarchived content', function() { const url = this.Urls.browse_content(`sha1_git:${this.unarchivedRepo.content[0].sha1git}`); urlShouldShowError(url, { code: '404', msg: 'NotFoundExc: Content with sha1_git checksum equals to ' + this.unarchivedRepo.content[0].sha1git + ' not found!' }); }); it('should display NotFoundExc for unarchived directory sha1git', function() { const url = this.Urls.browse_directory(this.unarchivedRepo.rootDirectory); urlShouldShowError(url, { code: '404', msg: 'NotFoundExc: Directory with sha1_git ' + this.unarchivedRepo.rootDirectory + ' not found' }); }); it('should display NotFoundExc for unarchived revision sha1git', function() { const url = this.Urls.browse_revision(this.unarchivedRepo.revision); urlShouldShowError(url, { code: '404', msg: 'NotFoundExc: Revision with sha1_git ' + this.unarchivedRepo.revision + ' not found.' }); }); it('should display NotFoundExc for unarchived snapshot sha1git', function() { const url = this.Urls.browse_snapshot(this.unarchivedRepo.snapshot); urlShouldShowError(url, { code: '404', msg: 'Snapshot with id ' + this.unarchivedRepo.snapshot + ' not found!' }); }); }); context('For archived repositories', function() { before(function() { - const url = this.Urls.browse_origin_directory(origin.url); + const url = `${this.Urls.browse_origin_directory()}?origin_url=${origin.url}`; cy.visit(url); }); it('should display NotFoundExc for invalid directory from archived repo', function() { - const rootDir = this.Urls.browse_origin_directory(origin.url); - const subDir = rootDir + origin.invalidSubDir; + const subDir = `${this.Urls.browse_origin_directory()}?origin_url=${origin.url}&path=${origin.invalidSubDir}`; urlShouldShowError(subDir, { code: '404', msg: 'NotFoundExc: Directory entry with path ' + origin.invalidSubDir + ' from ' + origin.rootDirectory + ' not found' }); }); it(`should display NotFoundExc for incorrect origin_url with correct content hash`, function() { const url = this.Urls.browse_content(`sha1_git:${origin.content[0].sha1git}`) + `?origin_url=${this.unarchivedRepo.url}`; urlShouldShowError(url, { code: '404', msg: 'The Software Heritage archive has a content ' + 'with the hash you provided but the origin ' + 'mentioned in your request appears broken: ' + this.unarchivedRepo.url + '. ' + 'Please check the URL and try again.\n\n' + 'Nevertheless, you can still browse the content ' + 'without origin information: ' + '/browse/content/sha1_git:' + origin.content[0].sha1git + '/' }); }); }); context('For invalid data', function() { it(`should display 400 for invalid checksum for directory, snapshot, revision, content`, function() { const types = ['directory', 'snapshot', 'revision', 'content']; for (let type of types) { const url = this.Urls[`browse_${type}`](invalidChecksum); urlShouldShowError(url, { code: '400', msg: 'BadInputExc: Invalid checksum query string ' + invalidChecksum }); } }); it('should show 404 error for invalid path', function() { urlShouldShowError(invalidPageUrl, { code: '404', msg: 'The resource ' + invalidPageUrl + ' could not be found on the server.' }); }); }); }); diff --git a/cypress/integration/language-select.spec.js b/cypress/integration/language-select.spec.js index 1547a000..b1df65bd 100644 --- a/cypress/integration/language-select.spec.js +++ b/cypress/integration/language-select.spec.js @@ -1,73 +1,73 @@ /** - * Copyright (C) 2019 The Software Heritage developers + * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {random, checkLanguageHighlighting} from '../utils'; const $ = Cypress.$; let origin; let contentWithLanguageInfo, contentWithoutLanguageInfo; const languageSelect = 'python'; describe('Test Content Language Select', function() { before(function() { origin = this.origin[0]; - contentWithLanguageInfo = `${this.Urls.browse_origin_content(origin.url)}?path=${origin.content[1].path}`; + contentWithLanguageInfo = `${this.Urls.browse_origin_content()}?origin_url=${origin.url}&path=${origin.content[1].path}`; contentWithoutLanguageInfo = this.Urls.browse_content(`sha1_git:${origin.content[1].sha1git}`); }); context('When Language is detected', function() { it('should display correct language in dropdown', function() { cy.visit(contentWithLanguageInfo) .then(() => { cy.get(`code.${$('.language-select').val()}`) .should('exist'); }); }); }); context('When Language is not detected', function() { it('should have no selected language in dropdown', function() { cy.visit(contentWithoutLanguageInfo).then(() => { assert.strictEqual($('.language-select').val(), null); }); }); }); context('When language is switched from dropdown', function() { before(function() { cy.visit(contentWithLanguageInfo); cy.get('.chosen-container') .click() .get('.chosen-results > li') .its('length') .then(numOptions => { const languageIndex = random(0, numOptions); cy.get('.chosen-results > li') .eq(languageIndex) .click(); }); }); it('should contain argument with language in url', function() { cy.location('search') .should('contain', `language=${$('.language-select').val()}`); }); it('should highlight according to new language', function() { checkLanguageHighlighting($('.language-select').val()); }); }); it('should highlight according to the language passed as argument in url', function() { cy.visit(`${contentWithLanguageInfo}&language=${languageSelect}`); checkLanguageHighlighting(languageSelect); }); }); diff --git a/cypress/integration/origin-visits.spec.js b/cypress/integration/origin-visits.spec.js index ef9ed891..f0b1b000 100644 --- a/cypress/integration/origin-visits.spec.js +++ b/cypress/integration/origin-visits.spec.js @@ -1,78 +1,78 @@ /** - * Copyright (C) 2019 The Software Heritage developers + * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {getTime} from '../utils'; let origin; function checkTimeLink(element) { expect(element.text()).not.to.be.empty; const urlParams = new URLSearchParams(element.attr('href').split('?')[1]); const timeStringLink = urlParams.get('timestamp'); // time in link should be equal to that in text assert.deepEqual(getTime(timeStringLink), getTime(element.text())); } function searchInCalendar(date) { cy.contains('label', 'Show all visits') .click(); cy.get(`.year${date.year}`) .click({force: true}); cy.contains('.month', date.monthName) .find('.day-content') .eq(date.date - 1) .trigger('mouseenter') .get('.popover-body') .should('be.visible') .and('contain', `${date.hours}:${date.minutes} UTC`); } describe('Visits tests', function() { before(function() { origin = this.origin[1]; }); beforeEach(function() { - cy.visit(this.Urls.browse_origin_visits(origin.url)); + cy.visit(`${this.Urls.browse_origin_visits()}?origin_url=${origin.url}`); }); it('should display first full visit time', function() { cy.get('#swh-first-full-visit > .swh-visit-full') .then(($el) => { checkTimeLink($el); searchInCalendar(getTime($el.text())); }); }); it('should display last full visit time', function() { cy.get('#swh-last-full-visit > .swh-visit-full') .then(($el) => { checkTimeLink($el); searchInCalendar(getTime($el.text())); }); }); it('should display last visit time', function() { cy.get('#swh-last-visit > .swh-visit-full') .then(($el) => { checkTimeLink($el); searchInCalendar(getTime($el.text())); }); }); it('should display list of visits and mark them on calendar', function() { cy.get('.swh-visits-list-row .swh-visit-full') .should('be.visible') .each(($el) => { checkTimeLink($el); searchInCalendar(getTime($el.text())); }); }); }); diff --git a/cypress/integration/persistent-identifiers.spec.js b/cypress/integration/persistent-identifiers.spec.js index fa5266ed..afac0fd7 100644 --- a/cypress/integration/persistent-identifiers.spec.js +++ b/cypress/integration/persistent-identifiers.spec.js @@ -1,259 +1,259 @@ /** - * Copyright (C) 2019 The Software Heritage developers + * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ let origin, originBadgeUrl, originBrowseUrl; let url, urlPrefix; let browsedObjectMetadata; let cntPid, cntPidWithOrigin, cntPidWithOriginAndLines; let dirPid, dirPidWithOrigin; let relPid, relPidWithOrigin; let revPid, revPidWithOrigin; let snpPid, snpPidWithOrigin; let testsData; const firstSelLine = 6; const lastSelLine = 12; describe('Persistent Identifiers Tests', function() { before(function() { origin = this.origin[1]; - url = `${this.Urls.browse_origin_content(origin.url)}?path=${origin.content[0].path}`; + url = `${this.Urls.browse_origin_content()}?origin_url=${origin.url}&path=${origin.content[0].path}`; url = `${url}&release=${origin.release}#L${firstSelLine}-L${lastSelLine}`; originBadgeUrl = this.Urls.swh_badge('origin', origin.url); - originBrowseUrl = this.Urls.browse_origin(origin.url); + originBrowseUrl = `${this.Urls.browse_origin()}?origin_url=${origin.url}`; cy.visit(url).window().then(win => { urlPrefix = `${win.location.protocol}//${win.location.hostname}`; if (win.location.port) { urlPrefix += `:${win.location.port}`; } browsedObjectMetadata = win.swh.webapp.getBrowsedSwhObjectMetadata(); cntPid = `swh:1:cnt:${browsedObjectMetadata.sha1_git}`; cntPidWithOrigin = `${cntPid};origin=${origin.url}`; cntPidWithOriginAndLines = `${cntPidWithOrigin};lines=${firstSelLine}-${lastSelLine}`; dirPid = `swh:1:dir:${browsedObjectMetadata.directory}`; dirPidWithOrigin = `${dirPid};origin=${origin.url}`; revPid = `swh:1:rev:${browsedObjectMetadata.revision}`; revPidWithOrigin = `${revPid};origin=${origin.url}`; relPid = `swh:1:rel:${browsedObjectMetadata.release}`; relPidWithOrigin = `${relPid};origin=${origin.url}`; snpPid = `swh:1:snp:${browsedObjectMetadata.snapshot}`; snpPidWithOrigin = `${snpPid};origin=${origin.url}`; testsData = [ { 'objectType': 'content', 'objectPids': [cntPidWithOriginAndLines, cntPidWithOrigin, cntPid], 'badgeUrl': this.Urls.swh_badge('content', browsedObjectMetadata.sha1_git), 'badgePidUrl': this.Urls.swh_badge_pid(cntPidWithOriginAndLines), 'browseUrl': this.Urls.browse_swh_id(cntPidWithOriginAndLines) }, { 'objectType': 'directory', 'objectPids': [dirPidWithOrigin, dirPid], 'badgeUrl': this.Urls.swh_badge('directory', browsedObjectMetadata.directory), 'badgePidUrl': this.Urls.swh_badge_pid(dirPidWithOrigin), 'browseUrl': this.Urls.browse_swh_id(dirPidWithOrigin) }, { 'objectType': 'release', 'objectPids': [relPidWithOrigin, relPid], 'badgeUrl': this.Urls.swh_badge('release', browsedObjectMetadata.release), 'badgePidUrl': this.Urls.swh_badge_pid(relPidWithOrigin), 'browseUrl': this.Urls.browse_swh_id(relPidWithOrigin) }, { 'objectType': 'revision', 'objectPids': [revPidWithOrigin, revPid], 'badgeUrl': this.Urls.swh_badge('revision', browsedObjectMetadata.revision), 'badgePidUrl': this.Urls.swh_badge_pid(revPidWithOrigin), 'browseUrl': this.Urls.browse_swh_id(revPidWithOrigin) }, { 'objectType': 'snapshot', 'objectPids': [snpPidWithOrigin, snpPid], 'badgeUrl': this.Urls.swh_badge('snapshot', browsedObjectMetadata.snapshot), 'badgePidUrl': this.Urls.swh_badge_pid(snpPidWithOrigin), 'browseUrl': this.Urls.browse_swh_id(snpPidWithOrigin) } ]; }); }); beforeEach(function() { cy.visit(url); }); it('should open and close identifiers tab when clicking on handle', function() { cy.get('#swh-identifiers') .should('have.class', 'ui-slideouttab-ready'); cy.get('.ui-slideouttab-handle') .click(); cy.get('#swh-identifiers') .should('have.class', 'ui-slideouttab-open'); cy.get('.ui-slideouttab-handle') .click(); cy.get('#swh-identifiers') .should('not.have.class', 'ui-slideouttab-open'); }); it('should display identifiers with permalinks for browsed objects', function() { cy.get('.ui-slideouttab-handle') .click(); for (let td of testsData) { cy.get(`a[href="#swh-id-tab-${td.objectType}"]`) .click(); cy.get(`#swh-id-tab-${td.objectType}`) .should('be.visible'); cy.get(`#swh-id-tab-${td.objectType} .swh-id`) .contains(td.objectPids[0]) .should('have.attr', 'href', this.Urls.browse_swh_id(td.objectPids[0])); } }); it('should update content identifier metadata when toggling option checkboxes', function() { cy.get('.ui-slideouttab-handle') .click(); cy.get(`#swh-id-tab-content .swh-id`) .contains(cntPidWithOriginAndLines) .should('have.attr', 'href', this.Urls.browse_swh_id(cntPidWithOriginAndLines)); cy.get('#swh-id-tab-content .swh-id-option-lines') .click(); cy.get(`#swh-id-tab-content .swh-id`) .contains(cntPidWithOrigin) .should('have.attr', 'href', this.Urls.browse_swh_id(cntPidWithOrigin)); cy.get('#swh-id-tab-content .swh-id-option-origin') .click(); cy.get(`#swh-id-tab-content .swh-id`) .contains(cntPid) .should('have.attr', 'href', this.Urls.browse_swh_id(cntPid)); cy.get('#swh-id-tab-content .swh-id-option-origin') .click(); cy.get(`#swh-id-tab-content .swh-id`) .contains(cntPidWithOrigin) .should('have.attr', 'href', this.Urls.browse_swh_id(cntPidWithOrigin)); cy.get('#swh-id-tab-content .swh-id-option-lines') .click(); cy.get(`#swh-id-tab-content .swh-id`) .contains(cntPidWithOriginAndLines) .should('have.attr', 'href', this.Urls.browse_swh_id(cntPidWithOriginAndLines)); }); it('should update other object identifiers metadata when toggling option checkboxes', function() { cy.get('.ui-slideouttab-handle') .click(); for (let td of testsData) { // already tested if (td.objectType === 'content') continue; cy.get(`a[href="#swh-id-tab-${td.objectType}"]`) .click(); cy.get(`#swh-id-tab-${td.objectType} .swh-id`) .contains(td.objectPids[0]) .should('have.attr', 'href', this.Urls.browse_swh_id(td.objectPids[0])); cy.get(`#swh-id-tab-${td.objectType} .swh-id-option-origin`) .click(); cy.get(`#swh-id-tab-${td.objectType} .swh-id`) .contains(td.objectPids[1]) .should('have.attr', 'href', this.Urls.browse_swh_id(td.objectPids[1])); cy.get(`#swh-id-tab-${td.objectType} .swh-id-option-origin`) .click(); cy.get(`#swh-id-tab-${td.objectType} .swh-id`) .contains(td.objectPids[0]) .should('have.attr', 'href', this.Urls.browse_swh_id(td.objectPids[0])); } }); it('should display swh badges in identifiers tab for browsed objects', function() { cy.get('.ui-slideouttab-handle') .click(); const originBadgeUrl = this.Urls.swh_badge('origin', origin.url); for (let td of testsData) { cy.get(`a[href="#swh-id-tab-${td.objectType}"]`) .click(); cy.get(`#swh-id-tab-${td.objectType} .swh-badge-origin`) .should('have.attr', 'src', originBadgeUrl); cy.get(`#swh-id-tab-${td.objectType} .swh-badge-${td.objectType}`) .should('have.attr', 'src', td.badgeUrl); } }); it('should display badge integration info when clicking on it', function() { cy.get('.ui-slideouttab-handle') .click(); for (let td of testsData) { cy.get(`a[href="#swh-id-tab-${td.objectType}"]`) .click(); cy.get(`#swh-id-tab-${td.objectType} .swh-badge-origin`) .click() .wait(500); for (let badgeType of ['html', 'md', 'rst']) { cy.get(`.modal .swh-badge-${badgeType}`) .contains(`${urlPrefix}${originBrowseUrl}`) .contains(`${urlPrefix}${originBadgeUrl}`); } cy.get('.modal.show .close') .click() .wait(500); cy.get(`#swh-id-tab-${td.objectType} .swh-badge-${td.objectType}`) .click() .wait(500); for (let badgeType of ['html', 'md', 'rst']) { cy.get(`.modal .swh-badge-${badgeType}`) .contains(`${urlPrefix}${td.browseUrl}`) .contains(`${urlPrefix}${td.badgePidUrl}`); } cy.get('.modal.show .close') .click() .wait(500); } }); }); diff --git a/docs/uri-scheme-browse-origin.rst b/docs/uri-scheme-browse-origin.rst index 59886ebf..eecc8ddf 100644 --- a/docs/uri-scheme-browse-origin.rst +++ b/docs/uri-scheme-browse-origin.rst @@ -1,627 +1,861 @@ Origin ^^^^^^ This describes the URI scheme when one wants to browse the Software Heritage archive in the context of an origin (for instance, a repository crawled from GitHub or a Debian source package). All the views pointed by that scheme offer quick links to browse objects as found during the associated crawls performed by Software Heritage: * the root directory of the origin * the list of branches of the origin * the list of releases of the origin Origin visits """"""""""""" +.. http:get:: /browse/origin/visits/ + + HTML view that displays visits reporting for a software origin identified by + its type and url. + + :query string origin_url: mandatory parameter providing the url of the origin + (e.g. https://github.com/(user)/(repo)) + :statuscode 200: no error + :statuscode 400: no origin url has been provided as parameter + :statuscode 404: requested origin can not be found in the archive + + **Examples:** + + .. parsed-literal:: + + :swh_web_browse:`origin/visits/?origin_url=https://github.com/torvalds/linux` + :swh_web_browse:`origin/visits/?origin_url=https://github.com/python/cpython` + :swh_web_browse:`origin/visits/?origin_url=deb://Debian-Security/packages/mediawiki` + :swh_web_browse:`origin/visits/?origin_url=https://gitorious.org/qt/qtbase.git` + + .. http:get:: /browse/origin/(origin_url)/visits/ + :deprecated: + + .. warning:: + That endpoint is deprecated, use :http:get:`/browse/origin/visits/` instead. HTML view that displays a visits reporting for a software origin identified by its type and url. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`origin/https://github.com/torvalds/linux/visits/` :swh_web_browse:`origin/https://github.com/python/cpython/visits/` :swh_web_browse:`origin/deb://Debian-Security/packages/mediawiki/visits/` :swh_web_browse:`origin/https://gitorious.org/qt/qtbase.git/visits/` Origin directory """""""""""""""" -.. http:get:: /browse/origin/(origin_url)/directory/ +.. http:get:: /browse/origin/directory/ HTML view for browsing the content of a directory reachable from the root directory (including itself) associated to the latest full visit of a software origin. The content of the directory is first sorted in lexicographical order and the sub-directories are displayed before the regular files. The view enables to navigate from the requested directory to directories reachable from it in a recursive way but also up to the origin root directory. A breadcrumb located in the top part of the view allows to keep track of the paths navigated so far. The view also enables to easily switch between the origin branches and releases through a dropdown menu. - The origin branch (default to master) from which to retrieve the directory + The origin branch (default to HEAD) from which to retrieve the directory content can also be specified by using the branch query parameter. - :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) + :query string origin_url: mandatory parameter providing the url of the origin + (e.g. https://github.com/(user)/(repo)) :query string path: optional parameter used to specify the path of a directory reachable from the origin root one :query string branch: specify the origin branch name from which to retrieve the root directory :query string release: specify the origin release name from which to retrieve the root directory :query string revision: specify the origin revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the root directory :query string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) or Unix timestamp to parse in order to find the closest visit. :query int visit_id: specify a visit id to retrieve the directory from instead of using the latest full visit by default :statuscode 200: no error + :statuscode 400: no origin url has been provided as parameter :statuscode 404: requested origin can not be found in the archive or the provided path does not exist from the origin root directory **Examples:** .. parsed-literal:: - :swh_web_browse:`origin/https://github.com/torvalds/linux/directory/` - :swh_web_browse:`origin/https://github.com/torvalds/linux/directory/?path=net/ethernet` - :swh_web_browse:`origin/https://github.com/python/cpython/directory/` - :swh_web_browse:`origin/https://github.com/python/cpython/directory/?path=Python` - :swh_web_browse:`origin/https://github.com/python/cpython/directory/?branch=refs/heads/2.7` - :swh_web_browse:`origin/https://github.com/torvalds/linux/directory/?timestamp=1493926809` - :swh_web_browse:`origin/https://github.com/torvalds/linux/directory/?path=net/ethernet×tamp=2016-09-14T10:36:21` - :swh_web_browse:`origin/https://github.com/python/cpython/directory/?timestamp=1474620651` - :swh_web_browse:`origin/https://github.com/python/cpython/directory/?path=Python×tamp=2017-05-05` - :swh_web_browse:`origin/https://github.com/python/cpython/directory/?branch=refs/heads/2.7&/timestamp=2015-08` + :swh_web_browse:`origin/directory/?origin_url=https://github.com/torvalds/linux` + :swh_web_browse:`origin/directory/?origin_url=https://github.com/torvalds/linux&path=net/ethernet` + :swh_web_browse:`origin/directory/?origin_url=https://github.com/python/cpython` + :swh_web_browse:`origin/directory/?origin_url=https://github.com/python/cpython&path=Python` + :swh_web_browse:`origin/directory/?origin_url=https://github.com/python/cpython&branch=refs/heads/2.7` + :swh_web_browse:`origin/directory/?origin_url=https://github.com/torvalds/linux×tamp=1493926809` + :swh_web_browse:`origin/directory/?origin_url=https://github.com/torvalds/linux&path=net/ethernet×tamp=2016-09-14T10:36:21Z` + :swh_web_browse:`origin/directory/?origin_url=https://github.com/python/cpython×tamp=1474620651` + :swh_web_browse:`origin/directory/?origin_url=https://github.com/python/cpython&path=Python×tamp=2017-05-05` + :swh_web_browse:`origin/directory/?origin_url=https://github.com/python/cpython&branch=refs/heads/2.7×tamp=2015-08` -.. http:get:: /browse/origin/(origin_url)/directory/(path)/ +.. http:get:: /browse/origin/(origin_url)/directory/[(path)/] :deprecated: .. warning:: - That endpoint is deprecated, use :http:get:`/browse/origin/(origin_url)/directory/` instead. + That endpoint is deprecated, use :http:get:`/browse/origin/directory/` instead. HTML view for browsing the content of a directory reachable from the root directory (including itself) associated to the latest full visit of a software origin. The content of the directory is first sorted in lexicographical order and the sub-directories are displayed before the regular files. The view enables to navigate from the requested directory to directories reachable from it in a recursive way but also up to the origin root directory. A breadcrumb located in the top part of the view allows to keep track of the paths navigated so far. The view also enables to easily switch between the origin branches and releases through a dropdown menu. - The origin branch (default to master) from which to retrieve the directory + The origin branch (default to HEAD) from which to retrieve the directory content can also be specified by using the branch query parameter. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :param string path: optional parameter used to specify the path of a directory reachable from the origin root one :query string branch: specify the origin branch name from which to retrieve the root directory :query string release: specify the origin release name from which to retrieve the root directory :query string revision: specify the origin revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the root directory :query int visit_id: specify a visit id to retrieve the directory from instead of using the latest full visit by default :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive or the provided path does not exist from the origin root directory **Examples:** .. parsed-literal:: :swh_web_browse:`origin/https://github.com/torvalds/linux/directory/` :swh_web_browse:`origin/https://github.com/torvalds/linux/directory/net/ethernet/` :swh_web_browse:`origin/https://github.com/python/cpython/directory/` :swh_web_browse:`origin/https://github.com/python/cpython/directory/Python/` :swh_web_browse:`origin/https://github.com/python/cpython/directory/?branch=refs/heads/2.7` -.. http:get:: /browse/origin/(origin_url)/visit/(timestamp)/directory/(path)/ +.. http:get:: /browse/origin/(origin_url)/visit/(timestamp)/directory/[(path)/] :deprecated: .. warning:: - That endpoint is deprecated, use :http:get:`/browse/origin/(origin_url)/directory/` instead. + That endpoint is deprecated, use :http:get:`/browse/origin/directory/` instead. HTML view for browsing the content of a directory reachable from the root directory (including itself) associated to a visit of a software origin closest to a provided timestamp. The content of the directory is first sorted in lexicographical order and the sub-directories are displayed before the regular files. The view enables to navigate from the requested directory to directories reachable from it in a recursive way but also up to the origin root directory. A breadcrumb located in the top part of the view allows to keep track of the paths navigated so far. The view also enables to easily switch between the origin branches and releases through a dropdown menu. - The origin branch (default to master) from which to retrieve the directory + The origin branch (default to HEAD) from which to retrieve the directory content can also be specified by using the branch query parameter. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :param string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) or Unix timestamp to parse in order to find the closest visit. :param path: optional parameter used to specify the path of a directory reachable from the origin root one :type path: string :query string branch: specify the origin branch name from which to retrieve the root directory :query string release: specify the origin release name from which to retrieve the root directory :query string revision: specify the origin revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the directory :query int visit_id: specify a visit id to retrieve the directory from instead of using the provided timestamp :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive, requested visit timestamp does not exist or the provided path does not exist from the origin root directory **Examples:** .. parsed-literal:: :swh_web_browse:`origin/https://github.com/torvalds/linux/visit/1493926809/directory/` - :swh_web_browse:`origin/https://github.com/torvalds/linux/visit/2016-09-14T10:36:21/directory/net/ethernet/` + :swh_web_browse:`origin/https://github.com/torvalds/linux/visit/2016-09-14T10:36:21Z/directory/net/ethernet/` :swh_web_browse:`origin/https://github.com/python/cpython/visit/1474620651/directory/` :swh_web_browse:`origin/https://github.com/python/cpython/visit/2017-05-05/directory/Python/` :swh_web_browse:`origin/https://github.com/python/cpython/visit/2015-08/directory/?branch=refs/heads/2.7` Origin content """""""""""""" +.. http:get:: /browse/origin/content/ + + HTML view that produces a display of a content + associated to the latest full visit of a software origin. + + If the content to display is textual, it will be highlighted client-side + if possible using highlightjs_. The procedure to perform that task is described + in :http:get:`/browse/content/[(algo_hash):](hash)/`. + + It is also possible to highlight specific lines of a textual + content (not in terms of syntax highlighting but to emphasize + some relevant content part) by either: + + * clicking on line numbers (holding shift to highlight a lines range) + + * using an url fragment in the form '#Ln' or '#Lm-Ln' + + The view displays a breadcrumb on top of the rendered + content in order to easily navigate up to the origin root directory. + + The view also enables to easily switch between the origin branches + and releases through a dropdown menu. + + The origin branch (default to HEAD) from which to retrieve the content + can also be specified by using the branch query parameter. + + :query string origin_url: mandatory parameter providing the url of the origin + (e.g. https://github.com/(user)/(repo)) + :query string path: path of a content reachable from the origin root directory + :query string branch: specify the origin branch name from which + to retrieve the content + :query string release: specify the origin release name from which + to retrieve the content + :query string revision: specify the origin revision, identified by the hexadecimal + representation of its **sha1_git** value, from which to retrieve the content + :query string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) + or Unix timestamp to parse in order to find the closest visit. + :query int visit_id: specify a visit id to retrieve the content from instead + of using the latest full visit by default + :statuscode 200: no error + :statuscode 400: no origin url has been provided as parameter + :statuscode 404: requested origin can not be found in the archive, + or the provided content path does not exist from the origin root directory + + **Examples:** + + .. parsed-literal:: + + :swh_web_browse:`origin/content/?origin_url=https://github.com/git/git?path=git.c` + :swh_web_browse:`origin/content/?origin_url=https://github.com/mozilla/gecko-dev&path=js/src/json.cpp` + :swh_web_browse:`origin/content/?origin_url=https://github.com/git/git?path=git.c&branch=refs/heads/next` + :swh_web_browse:`origin/content/?origin_url=https://github.com/git/git&path=git.c×tamp=1473933564` + :swh_web_browse:`origin/content/?origin_url=https://github.com/git/git&path=git.c×tamp=2016-05-05T00:0:00+00:00Z` + :swh_web_browse:`origin/content/?origin_url=https://github.com/mozilla/gecko-dev&path=js/src/json.cpp×tamp=1490126182` + :swh_web_browse:`origin/content/?origin_url=https://github.com/mozilla/gecko-dev&path=js/src/json.cpp×tamp=2017-03-21#L904-L931` + :swh_web_browse:`origin/content/?origin_url=https://github.com/git/git&path=git.c&branch=refs/heads/next×tamp=2017-09-15` + + .. http:get:: /browse/origin/(origin_url)/content/ + :deprecated: + + .. warning:: + That endpoint is deprecated, use :http:get:`/browse/origin/content/` instead. HTML view that produces a display of a content associated to the latest full visit of a software origin. If the content to display is textual, it will be highlighted client-side if possible using highlightjs_. The procedure to perform that task is described in :http:get:`/browse/content/[(algo_hash):](hash)/`. It is also possible to highlight specific lines of a textual content (not in terms of syntax highlighting but to emphasize some relevant content part) by either: * clicking on line numbers (holding shift to highlight a lines range) * using an url fragment in the form '#Ln' or '#Lm-Ln' The view displays a breadcrumb on top of the rendered content in order to easily navigate up to the origin root directory. The view also enables to easily switch between the origin branches and releases through a dropdown menu. - The origin branch (default to master) from which to retrieve the content + The origin branch (default to HEAD) from which to retrieve the content can also be specified by using the branch query parameter. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :query string path: path of a content reachable from the origin root directory :query string branch: specify the origin branch name from which to retrieve the content :query string release: specify the origin release name from which to retrieve the content :query string revision: specify the origin revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the content :query string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) or Unix timestamp to parse in order to find the closest visit. :query int visit_id: specify a visit id to retrieve the content from instead of using the latest full visit by default :statuscode 200: no error + :statuscode 400: no origin url has been provided as parameter :statuscode 404: requested origin can not be found in the archive, or the provided content path does not exist from the origin root directory **Examples:** .. parsed-literal:: :swh_web_browse:`origin/https://github.com/git/git/content/?path=git.c` :swh_web_browse:`origin/https://github.com/mozilla/gecko-dev/content/?path=js/src/json.cpp` :swh_web_browse:`origin/https://github.com/git/git/content/?path=git.c&branch=refs/heads/next` :swh_web_browse:`origin/https://github.com/git/git/content/?path=git.c×tamp=1473933564` - :swh_web_browse:`origin/https://github.com/git/git/content/?path=git.c×tamp=2016-05-05T00:0:00+00:00` + :swh_web_browse:`origin/https://github.com/git/git/content/?path=git.c×tamp=2016-05-05T00:0:00+00:00Z` :swh_web_browse:`origin/https://github.com/mozilla/gecko-dev/content?path=js/src/json.cpp×tamp=1490126182` :swh_web_browse:`origin/https://github.com/mozilla/gecko-dev/content?path=js/src/json.cpp×tamp=2017-03-21#L904-L931` :swh_web_browse:`origin/https://github.com/git/git/content/git.c/?branch=refs/heads/next×tamp=2017-09-15` .. http:get:: /browse/origin/(origin_url)/content/(path)/ :deprecated: .. warning:: - That endpoint is deprecated, use :http:get:`/browse/origin/(origin_url)/content/` instead. + That endpoint is deprecated, use :http:get:`/browse/origin/content/` instead. HTML view that produces a display of a content associated to the latest full visit of a software origin. If the content to display is textual, it will be highlighted client-side if possible using highlightjs_. The procedure to perform that task is described in :http:get:`/browse/content/[(algo_hash):](hash)/`. It is also possible to highlight specific lines of a textual content (not in terms of syntax highlighting but to emphasize some relevant content part) by either: * clicking on line numbers (holding shift to highlight a lines range) * using an url fragment in the form '#Ln' or '#Lm-Ln' The view displays a breadcrumb on top of the rendered content in order to easily navigate up to the origin root directory. The view also enables to easily switch between the origin branches and releases through a dropdown menu. - The origin branch (default to master) from which to retrieve the content + The origin branch (default to HEAD) from which to retrieve the content can also be specified by using the branch query parameter. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :param string path: path of a content reachable from the origin root directory :query string branch: specify the origin branch name from which to retrieve the content :query string release: specify the origin release name from which to retrieve the content :query string revision: specify the origin revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the content :query int visit_id: specify a visit id to retrieve the content from instead of using the latest full visit by default :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive, or the provided content path does not exist from the origin root directory **Examples:** .. parsed-literal:: :swh_web_browse:`origin/https://github.com/git/git/content/git.c/` :swh_web_browse:`origin/https://github.com/git/git/content/git.c/` :swh_web_browse:`origin/https://github.com/mozilla/gecko-dev/content/js/src/json.cpp/` :swh_web_browse:`origin/https://github.com/git/git/content/git.c/?branch=refs/heads/next` .. http:get:: /browse/origin/(origin_url)/visit/(timestamp)/content/(path)/ :deprecated: .. warning:: - That endpoint is deprecated, use :http:get:`/browse/origin/(origin_url)/content/` instead. + That endpoint is deprecated, use :http:get:`/browse/origin/content/` instead. HTML view that produces a display of a content associated to a visit of a software origin closest to a provided timestamp. If the content to display is textual, it will be highlighted client-side if possible using highlightjs_. The procedure to perform that task is described in :http:get:`/browse/content/[(algo_hash):](hash)/`. It is also possible to highlight specific lines of a textual content (not in terms of syntax highlighting but to emphasize some relevant content part) by either: * clicking on line numbers (holding shift to highlight a lines range) * using an url fragment in the form '#Ln' or '#Lm-Ln' The view displays a breadcrumb on top of the rendered content in order to easily navigate up to the origin root directory. The view also enables to easily switch between the origin branches and releases through a dropdown menu. - The origin branch (default to master) from which to retrieve the content + The origin branch (default to HEAD) from which to retrieve the content can also be specified by using the branch query parameter. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :param string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) or Unix timestamp to parse in order to find the closest visit. :param string path: path of a content reachable from the origin root directory :query string branch: specify the origin branch name from which to retrieve the content :query string release: specify the origin release name from which to retrieve the content :query string revision: specify the origin revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the content :query int visit_id: specify a visit id to retrieve the content from instead of using the provided timestamp :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive, requested visit timestamp does not exist or the provided content path does not exist from the origin root directory **Examples:** .. parsed-literal:: :swh_web_browse:`origin/https://github.com/git/git/visit/1473933564/content/git.c/` - :swh_web_browse:`origin/https://github.com/git/git/visit/2016-05-05T00:0:00+00:00/content/git.c/` + :swh_web_browse:`origin/https://github.com/git/git/visit/2016-05-05T00:0:00+00:00Z/content/git.c/` :swh_web_browse:`origin/https://github.com/mozilla/gecko-dev/visit/1490126182/content/js/src/json.cpp/` :swh_web_browse:`origin/https://github.com/mozilla/gecko-dev/visit/2017-03-21/content/js/src/json.cpp/#L904-L931` :swh_web_browse:`origin/https://github.com/git/git/visit/2017-09-15/content/git.c/?branch=refs/heads/next` Origin history """""""""""""" +.. http:get:: /browse/origin/log/ + + HTML view that produces a display of revisions history heading + to the last revision found during the latest visit of a software origin. + In other words, it shows the commit log associated to the latest + full visit of a software origin. + + The following data are displayed for each log entry: + + * link to browse the associated revision in the origin context + * author of the revision + * date of the revision + * message associated the revision + * commit date of the revision + + By default, the revisions are ordered in reverse chronological order of + their commit date. + + N log entries are displayed per page (default is 100). In order to navigate + in a large history, two buttons are present at the bottom of the view: + + * **Newer**: fetch and display if available the N more recent log entries + than the ones currently displayed + * **Older**: fetch and display if available the N older log entries + than the ones currently displayed + + The view also enables to easily switch between the origin branches + and releases through a dropdown menu. + + The origin branch (default to HEAD) from which to retrieve the content + can also be specified by using the branch query parameter. + + :query string origin_url: mandatory parameter providing the url of the origin + (e.g. https://github.com/(user)/(repo)) + :query int per_page: the number of log entries to display per page + :query int offset: the number of revisions to skip before returning those to display + :query str revs_ordering: specify the revisions ordering, possible values are ``committer_date``, + ``dfs``, ``dfs_post`` and ``bfs`` + :query string branch: specify the origin branch name from which + to retrieve the commit log + :query string release: specify the origin release name from which + to retrieve the commit log + :query string revision: specify the origin revision, identified by the hexadecimal + representation of its **sha1_git** value, from which to retrieve the commit log + :query string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) + or Unix timestamp to parse in order to find the closest visit. + :query int visit_id: specify a visit id to retrieve the history log from instead + of using the latest visit by default + :statuscode 200: no error + :statuscode 400: no origin url has been provided as parameter + :statuscode 404: requested origin can not be found in the archive + + **Examples:** + + .. parsed-literal:: + + :swh_web_browse:`origin/log/?origin_url=https://github.com/videolan/vlc` + :swh_web_browse:`origin/log/?origin_url=https://github.com/Kitware/CMake` + :swh_web_browse:`origin/log/?origin_url=https://github.com/Kitware/CMake&branch=refs/heads/release` + :swh_web_browse:`origin/log/?origin_url=https://github.com/videolan/vlc&visit=1459651262` + :swh_web_browse:`origin/log/?origin_url=https://github.com/Kitware/CMake×tamp=2016-04-01` + :swh_web_browse:`origin/log/?origin_url=https://github.com/Kitware/CMake&branch=refs/heads/release×tamp=1438116814` + :swh_web_browse:`origin/log/?origin_url=https://github.com/Kitware/CMake&branch=refs/heads/release×tamp=2017-05-05T03:14:23Z` + + .. http:get:: /browse/origin/(origin_url)/log/ + :deprecated: + + .. warning:: + That endpoint is deprecated, use :http:get:`/browse/origin/log/` instead. HTML view that produces a display of revisions history heading to the last revision found during the latest visit of a software origin. In other words, it shows the commit log associated to the latest full visit of a software origin. The following data are displayed for each log entry: * link to browse the associated revision in the origin context * author of the revision * date of the revision * message associated the revision * commit date of the revision By default, the revisions are ordered in reverse chronological order of their commit date. N log entries are displayed per page (default is 100). In order to navigate in a large history, two buttons are present at the bottom of the view: * **Newer**: fetch and display if available the N more recent log entries than the ones currently displayed * **Older**: fetch and display if available the N older log entries than the ones currently displayed The view also enables to easily switch between the origin branches and releases through a dropdown menu. - The origin branch (default to master) from which to retrieve the content + The origin branch (default to HEAD) from which to retrieve the content can also be specified by using the branch query parameter. - :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) + :query string origin_url: mandatory parameter providing the url of the origin + (e.g. https://github.com/(user)/(repo)) :query int per_page: the number of log entries to display per page :query int offset: the number of revisions to skip before returning those to display :query str revs_ordering: specify the revisions ordering, possible values are ``committer_date``, ``dfs``, ``dfs_post`` and ``bfs`` :query string branch: specify the origin branch name from which to retrieve the commit log :query string release: specify the origin release name from which to retrieve the commit log :query string revision: specify the origin revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the commit log :query string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) or Unix timestamp to parse in order to find the closest visit. :query int visit_id: specify a visit id to retrieve the history log from instead of using the latest visit by default :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`origin/https://github.com/videolan/vlc/log/` :swh_web_browse:`origin/https://github.com/Kitware/CMake/log/` :swh_web_browse:`origin/https://github.com/Kitware/CMake/log/?branch=refs/heads/release` :swh_web_browse:`origin/https://github.com/videolan/vlc/log/?visit=1459651262` :swh_web_browse:`origin/https://github.com/Kitware/CMake/log/?timestamp=2016-04-01` :swh_web_browse:`origin/https://github.com/Kitware/CMake/log/?branch=refs/heads/release×tamp=1438116814` - :swh_web_browse:`origin/https://github.com/Kitware/CMake/log/?branch=refs/heads/release×tamp=2017-05-05T03:14:23` + :swh_web_browse:`origin/https://github.com/Kitware/CMake/log/?branch=refs/heads/release×tamp=2017-05-05T03:14:23Z` .. http:get:: /browse/origin/(origin_url)/visit/(timestamp)/log/ :deprecated: .. warning:: - That endpoint is deprecated, use :http:get:`/browse/origin/(origin_url)/log/` instead. + That endpoint is deprecated, use :http:get:`/browse/origin/log/` instead. HTML view that produces a display of revisions history heading to the last revision found during a visit of a software origin closest to the provided timestamp. In other words, it shows the commit log associated to a visit of a software origin closest to a provided timestamp. The following data are displayed for each log entry: * author of the revision * link to the revision metadata * message associated the revision * date of the revision * link to browse the associated source tree in the origin context N log entries are displayed per page (default is 20). In order to navigate in a large history, two buttons are present at the bottom of the view: * **Newer**: fetch and display if available the N more recent log entries than the ones currently displayed * **Older**: fetch and display if available the N older log entries than the ones currently displayed The view also enables to easily switch between the origin branches and releases through a dropdown menu. - The origin branch (default to master) from which to retrieve the content + The origin branch (default to HEAD) from which to retrieve the content can also be specified by using the branch query parameter. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :param string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) or Unix timestamp to parse in order to find the closest visit. :query int per_page: the number of log entries to display per page (default is 20, max is 50) :query string branch: specify the origin branch name from which to retrieve the commit log :query string release: specify the origin release name from which to retrieve the commit log :query string revision: specify the origin revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the commit log :query int visit_id: specify a visit id to retrieve the history log from instead of using the provided timestamp :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`origin/https://github.com/videolan/vlc/visit/1459651262/log/` :swh_web_browse:`origin/https://github.com/Kitware/CMake/visit/2016-04-01/log/` :swh_web_browse:`origin/https://github.com/Kitware/CMake/visit/1438116814/log/?branch=refs/heads/release` - :swh_web_browse:`origin/https://github.com/Kitware/CMake/visit/2017-05-05T03:14:23/log/?branch=refs/heads/release` + :swh_web_browse:`origin/https://github.com/Kitware/CMake/visit/2017-05-05T03:14:23Z/log/?branch=refs/heads/release` Origin branches """"""""""""""" +.. http:get:: /browse/origin/branches/ + + HTML view that produces a display of the list of branches + found during the latest full visit of a software origin. + + The following data are displayed for each branch: + + * its name + * a link to browse the associated directory + * a link to browse the associated revision + * last commit message + * last commit date + + That list of branches is paginated, each page displaying a maximum of 100 branches. + + :query string origin_url: mandatory parameter providing the url of the origin + (e.g. https://github.com/(user)/(repo)) + :query string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) + or Unix timestamp to parse in order to find the closest visit. + :statuscode 200: no error + :statuscode 400: no origin url has been provided as parameter + :statuscode 404: requested origin can not be found in the archive + + **Examples:** + + .. parsed-literal:: + + :swh_web_browse:`origin/branches/?origin_url=deb://Debian/packages/linux` + :swh_web_browse:`origin/branches/?origin_url=https://github.com/webpack/webpack` + :swh_web_browse:`origin/branches/?origin_url=https://github.com/kripken/emscripten×tamp=2017-05-05T12:02:03Z` + :swh_web_browse:`origin/branches/?origin_url=deb://Debian/packages/apache2-mod-xforward×tamp=2017-11-15T05:15:09Z` + + .. http:get:: /browse/origin/(origin_url)/branches/ + :deprecated: + + .. warning:: + That endpoint is deprecated, use :http:get:`/browse/origin/branches/` instead. HTML view that produces a display of the list of branches found during the latest full visit of a software origin. The following data are displayed for each branch: * its name * a link to browse the associated directory * a link to browse the associated revision * last commit message * last commit date That list of branches is paginated, each page displaying a maximum of 100 branches. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :query string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) or Unix timestamp to parse in order to find the closest visit. :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`origin/deb://Debian/packages/linux/branches/` :swh_web_browse:`origin/https://github.com/webpack/webpack/branches/` - :swh_web_browse:`origin/https://github.com/kripken/emscripten/branches/?timestamp=2017-05-05T12:02:03` + :swh_web_browse:`origin/https://github.com/kripken/emscripten/branches/?timestamp=2017-05-05T12:02:03Z` :swh_web_browse:`origin/deb://Debian/packages/apache2-mod-xforward/branches/?timestamp=2017-11-15T05:15:09` .. http:get:: /browse/origin/(origin_url)/visit/(timestamp)/branches/ :deprecated: .. warning:: - That endpoint is deprecated, use :http:get:`/browse/origin/(origin_url)/branches/` instead. + That endpoint is deprecated, use :http:get:`/browse/origin/branches/` instead. HTML view that produces a display of the list of branches found during a visit of a software origin closest to the provided timestamp. The following data are displayed for each branch: * its name * a link to browse the associated directory * a link to browse the associated revision * last commit message * last commit date That list of branches is paginated, each page displaying a maximum of 100 branches. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :param string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) or Unix timestamp to parse in order to find the closest visit. :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive **Examples:** .. parsed-literal:: - :swh_web_browse:`origin/https://github.com/kripken/emscripten/visit/2017-05-05T12:02:03/branches/` - :swh_web_browse:`origin/deb://Debian/packages/apache2-mod-xforward/visit/2017-11-15T05:15:09/branches/` + :swh_web_browse:`origin/https://github.com/kripken/emscripten/visit/2017-05-05T12:02:03Z/branches/` + :swh_web_browse:`origin/deb://Debian/packages/apache2-mod-xforward/visit/2017-11-15T05:15:09Z/branches/` Origin releases """"""""""""""" +.. http:get:: /browse/origin/releases/ + + HTML view that produces a display of the list of releases + found during the latest full visit of a software origin. + + The following data are displayed for each release: + + * its name + * a link to browse the release details + * its target type (revision, directory, content or release) + * its associated message + * its date + + That list of releases is paginated, each page displaying a maximum of 100 releases. + + :query string origin_url: mandatory parameter providing the url of the origin + (e.g. https://github.com/(user)/(repo)) + :query string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) + or Unix timestamp to parse in order to find the closest visit. + :statuscode 200: no error + :statuscode 400: no origin url has been provided as parameter + :statuscode 404: requested origin can not be found in the archive + + **Examples:** + + .. parsed-literal:: + + :swh_web_browse:`origin/releases/?origin_url=https://github.com/git/git` + :swh_web_browse:`origin/releases/?origin_url=https://github.com/webpack/webpack` + :swh_web_browse:`origin/releases/?origin_url=https://github.com/torvalds/linux×tamp=2017-11-21T19:37:42Z` + :swh_web_browse:`origin/releases/?origin_url=https://github.com/Kitware/CMake×tamp=2016-09-23T14:06:35Z` + + .. http:get:: /browse/origin/(origin_url)/releases/ + :deprecated: + + .. warning:: + That endpoint is deprecated, use :http:get:`/browse/origin/releases/` instead. HTML view that produces a display of the list of releases found during the latest full visit of a software origin. The following data are displayed for each release: * its name * a link to browse the release details * its target type (revision, directory, content or release) * its associated message * its date That list of releases is paginated, each page displaying a maximum of 100 releases. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :query string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) or Unix timestamp to parse in order to find the closest visit. :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`origin/https://github.com/git/git/releases/` :swh_web_browse:`origin/https://github.com/webpack/webpack/releases/` - :swh_web_browse:`origin/https://github.com/torvalds/linux/releases/?timestamp=2017-11-21T19:37:42` - :swh_web_browse:`origin/https://github.com/Kitware/CMake/releases/?timestamp=2016-09-23T14:06:35` + :swh_web_browse:`origin/https://github.com/torvalds/linux/releases/?timestamp=2017-11-21T19:37:42Z` + :swh_web_browse:`origin/https://github.com/Kitware/CMake/releases/?timestamp=2016-09-23T14:06:35Z` .. http:get:: /browse/origin/(origin_url)/visit/(timestamp)/releases/ :deprecated: .. warning:: - That endpoint is deprecated, use :http:get:`/browse/origin/(origin_url)/releases/` instead. + That endpoint is deprecated, use :http:get:`/browse/origin/releases/` instead. HTML view that produces a display of the list of releases found during a visit of a software origin closest to the provided timestamp. The following data are displayed for each release: * its name * a link to browse the release details * its target type (revision, directory, content or release) * its associated message * its date That list of releases is paginated, each page displaying a maximum of 100 releases. :param string origin_url: the url of the origin (e.g. https://github.com/(user)/(repo)/) :param string timestamp: a date string (any format parsable by `dateutil.parser.parse`_) or Unix timestamp to parse in order to find the closest visit. :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive **Examples:** .. parsed-literal:: - :swh_web_browse:`origin/https://github.com/torvalds/linux/visit/2017-11-21T19:37:42/releases/` - :swh_web_browse:`origin/https://github.com/Kitware/CMake/visit/2016-09-23T14:06:35/releases/` + :swh_web_browse:`origin/https://github.com/torvalds/linux/visit/2017-11-21T19:37:42Z/releases/` + :swh_web_browse:`origin/https://github.com/Kitware/CMake/visit/2016-09-23T14:06:35Z/releases/` .. _highlightjs: https://highlightjs.org/ .. _dateutil.parser.parse: http://dateutil.readthedocs.io/en/stable/parser.html diff --git a/docs/uri-scheme-browse-snapshot.rst b/docs/uri-scheme-browse-snapshot.rst index 92612582..2ce74be0 100644 --- a/docs/uri-scheme-browse-snapshot.rst +++ b/docs/uri-scheme-browse-snapshot.rst @@ -1,258 +1,258 @@ Snapshot ^^^^^^^^ .. http:get:: /browse/snapshot/(snapshot_id)/ HTML view that displays the content of a snapshot from its identifier (see :func:`swh.model.identifiers.snapshot_identifier` in our data model module for details about how they are computed). A snapshot is a set of named branches, which are pointers to objects at any level of the Software Heritage DAG. It represents a full picture of an origin at a given time. Thus, multiple visits of different origins can point to the same snapshot (for instance, when several projects are forks of a common one). Currently, that endpoint simply performs a redirection to :http:get:`/browse/snapshot/(snapshot_id)/directory/` in order to display the root directory associated to the default snapshot branch (usually master). :param string snapshot_id: hexadecimal representation of the snapshot **sha1** identifier :statuscode 200: no error :statuscode 400: an invalid snapshot identifier has been provided :statuscode 404: requested snapshot can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`snapshot/baebc2109e4a2ec22a1129a3859647e191d04df4/` :swh_web_browse:`snapshot/673156c31a876c5b99b2fe3e89615529de9a3c44/` Snapshot directory """""""""""""""""" .. http:get:: /browse/snapshot/(snapshot_id)/directory/ HTML view that displays the content of a directory reachable from a snapshot. The features offered by the view are similar to the one for browsing a directory in an origin context - (see :http:get:`/browse/origin/(origin_url)/directory/`). + (see :http:get:`/browse/origin/(origin_url)/directory/[(path)/]`). :param string snapshot_id: hexadecimal representation of the snapshot **sha1** identifier :query string path: optional parameter used to specify the path of a directory reachable from the snapshot root one :query string branch: specify the snapshot branch name from which to retrieve the root directory :query string release: specify the snapshot release name from which to retrieve the root directory :query string revision: specify the snapshot revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the root directory :statuscode 200: no error :statuscode 400: an invalid snapshot identifier has been provided :statuscode 404: requested snapshot can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`snapshot/baebc2109e4a2ec22a1129a3859647e191d04df4/directory/?path=drivers/gpu` :swh_web_browse:`snapshot/673156c31a876c5b99b2fe3e89615529de9a3c44/directory/?path=src/opengl` :swh_web_browse:`snapshot/673156c31a876c5b99b2fe3e89615529de9a3c44/directory/?release=v5.7.0` .. http:get:: /browse/snapshot/(snapshot_id)/directory/(path)/ :deprecated: .. warning:: That endpoint is deprecated, use :http:get:`/browse/snapshot/(snapshot_id)/directory/` instead. HTML view that displays the content of a directory reachable from a snapshot. The features offered by the view are similar to the one for browsing a directory in an origin context - (see :http:get:`/browse/origin/(origin_url)/directory/(path)/`). + (see :http:get:`/browse/origin/(origin_url)/directory/[(path)/]`). :param string snapshot_id: hexadecimal representation of the snapshot **sha1** identifier :param string path: optional parameter used to specify the path of a directory reachable from the snapshot root one :query string branch: specify the snapshot branch name from which to retrieve the root directory :query string release: specify the snapshot release name from which to retrieve the root directory :query string revision: specify the snapshot revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the root directory :statuscode 200: no error :statuscode 400: an invalid snapshot identifier has been provided :statuscode 404: requested snapshot can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`snapshot/baebc2109e4a2ec22a1129a3859647e191d04df4/directory/drivers/gpu/` :swh_web_browse:`snapshot/673156c31a876c5b99b2fe3e89615529de9a3c44/directory/src/opengl/` :swh_web_browse:`snapshot/673156c31a876c5b99b2fe3e89615529de9a3c44/directory/?release=v5.7.0` Snapshot content """""""""""""""" .. http:get:: /browse/snapshot/(snapshot_id)/content/ HTML view that produces a display of a content reachable from a snapshot. The features offered by the view are similar to the one for browsing a content in an origin context (see :http:get:`/browse/origin/(origin_url)/content/`). :param string snapshot_id: hexadecimal representation of the snapshot **sha1** identifier :query string path: path of a content reachable from the snapshot root directory :query string branch: specify the snapshot branch name from which to retrieve the content :query string release: specify the snapshot release name from which to retrieve the content :query string revision: specify the snapshot revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the content :statuscode 200: no error :statuscode 400: an invalid snapshot identifier has been provided :statuscode 404: requested snapshot can not be found in the archive, or the provided content path does not exist from the origin root directory **Examples:** .. parsed-literal:: :swh_web_browse:`snapshot/baebc2109e4a2ec22a1129a3859647e191d04df4/content/?path=init/initramfs.c` :swh_web_browse:`snapshot/673156c31a876c5b99b2fe3e89615529de9a3c44/content/?path=src/opengl/qglbuffer.h` :swh_web_browse:`snapshot/673156c31a876c5b99b2fe3e89615529de9a3c44/content/?path=src/opengl/qglbuffer.h&?release=v5.0.0` .. http:get:: /browse/snapshot/(snapshot_id)/content/(path)/ :deprecated: .. warning:: That endpoint is deprecated, use :http:get:`/browse/snapshot/(snapshot_id)/content/` instead. HTML view that produces a display of a content reachable from a snapshot. The features offered by the view are similar to the one for browsing a content in an origin context (see :http:get:`/browse/origin/(origin_url)/content/(path)/`). :param string snapshot_id: hexadecimal representation of the snapshot **sha1** identifier :param string path: path of a content reachable from the snapshot root directory :query string branch: specify the snapshot branch name from which to retrieve the content :query string release: specify the snapshot release name from which to retrieve the content :query string revision: specify the snapshot revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the content :statuscode 200: no error :statuscode 400: an invalid snapshot identifier has been provided :statuscode 404: requested snapshot can not be found in the archive, or the provided content path does not exist from the origin root directory **Examples:** .. parsed-literal:: :swh_web_browse:`snapshot/baebc2109e4a2ec22a1129a3859647e191d04df4/content/init/initramfs.c` :swh_web_browse:`snapshot/673156c31a876c5b99b2fe3e89615529de9a3c44/content/src/opengl/qglbuffer.h/` :swh_web_browse:`snapshot/673156c31a876c5b99b2fe3e89615529de9a3c44/content/src/opengl/qglbuffer.h/?release=v5.0.0` Snapshot history """""""""""""""" .. http:get:: /browse/snapshot/(snapshot_id)/log/ HTML view that produces a display of revisions history (aka the commit log) heading to the last revision collected in a snapshot. The features offered by the view are similar to the one for browsing the history in an origin context (see :http:get:`/browse/origin/(origin_url)/log/`). :param string snapshot_id: hexadecimal representation of the snapshot **sha1** identifier :query int per_page: the number of log entries to display per page (default is 20, max is 50) :query string branch: specify the snapshot branch name from which to retrieve the commit log :query string release: specify the snapshot release name from which to retrieve the commit log :query string revision: specify the snapshot revision, identified by the hexadecimal representation of its **sha1_git** value, from which to retrieve the commit log :statuscode 200: no error :statuscode 400: an invalid snapshot identifier has been provided :statuscode 404: requested snapshot can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`snapshot/a274b44111f777209556e94920b7e71cf5c305cd/log/` :swh_web_browse:`snapshot/9ca9e75279df5f4e3fee19bf5190ed672dcdfb33/log/?branch=refs/heads/emacs-unicode` Snapshot branches """"""""""""""""" .. http:get:: /browse/snapshot/(snapshot_id)/branches/ HTML view that produces a display of the list of branches collected in a snapshot. The features offered by the view are similar to the one for browsing the list of branches in an origin context (see :http:get:`/browse/origin/(origin_url)/branches/`). :param string snapshot_id: hexadecimal representation of the snapshot **sha1** identifier :statuscode 200: no error :statuscode 400: an invalid snapshot identifier has been provided :statuscode 404: requested snapshot can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`snapshot/03d7897352541e78ee7b13a580dc836778e8126a/branches/` :swh_web_browse:`snapshot/f37563b953327f8fd83e39af6ebb929ef85103d5/branches/` Snapshot releases """"""""""""""""" .. http:get:: /browse/snapshot/(snapshot_id)/releases/ HTML view that produces a display of the list of releases collected in a snapshot. The features offered by the view are similar to the one for browsing the list of releases in an origin context (see :http:get:`/browse/origin/(origin_url)/releases/`). :param string snapshot_id: hexadecimal representation of the snapshot **sha1** identifier :statuscode 200: no error :statuscode 400: an invalid snapshot identifier has been provided :statuscode 404: requested snapshot can not be found in the archive **Examples:** .. parsed-literal:: :swh_web_browse:`snapshot/673156c31a876c5b99b2fe3e89615529de9a3c44/releases/` :swh_web_browse:`snapshot/23e6fb084a60cc909b9e222d80d89fdb98756dee/releases/` diff --git a/docs/uri-scheme-browse.rst b/docs/uri-scheme-browse.rst index 193c3f0b..6f5b7668 100644 --- a/docs/uri-scheme-browse.rst +++ b/docs/uri-scheme-browse.rst @@ -1,93 +1,93 @@ URI scheme for swh-web Browse application ========================================= This web application aims to provide HTML views to easily navigate in the archive, thus it needs to be reached from a web browser. If you intend to query the archive programmatically through any HTTP client, please refer to the :ref:`swh-web-api-urls` section instead. Context-independent browsing ---------------------------- Context-independent URLs provide information about objects (e.g., revisions, directories, contents, person, ...), independently of the contexts where they have been found (e.g., specific repositories, branches, commits, ...). The following endpoints are the same of the API case (see below), and just render the corresponding information for user consumption. Where hyperlinks are created, they always point to other context-independent user URLs: * :http:get:`/browse/content/[(algo_hash):](hash)/`: Display a content * :http:get:`/browse/content/[(algo_hash):](hash)/raw/`: Get / Download content raw data * :http:get:`/browse/directory/(sha1_git)/[(path)/]`: Browse the content of a directory * :http:get:`/browse/person/(person_id)/`: Information on a person * :http:get:`/browse/revision/(sha1_git)/`: Browse a revision * :http:get:`/browse/revision/(sha1_git)/log/`: Browse history log heading to a revision Context-dependent browsing -------------------------- Context-dependent URLs provide information about objects, limited to specific contexts where the objects have been found. For instance, instead of having to specify a (root) revision by **sha1_git**, users might want to specify a place and a time. In Software Heritage a "place" is an origin, with an optional branch name; a "time" is a timestamp at which some place has been observed by Software Heritage crawlers. Wherever a revision context is expected in a path (i.e., a **/browse/revision/(sha1_git)/** path fragment) we can put in its stead a path fragment -of the form **/browse/origin/(origin_url)/?timestamp=(timestamp)&branch=(branch)**. +of the form **/browse/origin/?origin_url=(origin_url)×tamp=(timestamp)&branch=(branch)**. Such a fragment is resolved, internally by the archive, to a revision **sha1_git** as follows: - if **timestamp** is not given as query parameter: look for the most recent crawl of origin identified by **origin_url** - if **timestamp** is given: look for the closest crawl of origin identified by **origin_url** from timestamp **timestamp** - if **branch** is given as a query parameter: look for the branch **branch** - if **branch** is absent: look for branch "HEAD" or "master" - return the revision **sha1_git** pointed by the chosen branch The already mentioned URLs for revision contexts can therefore be alternatively specified by users as: -* :http:get:`/browse/origin/(origin_url)/directory/` -* :http:get:`/browse/origin/(origin_url)/content/` -* :http:get:`/browse/origin/(origin_url)/log/` +* :http:get:`/browse/origin/directory/` +* :http:get:`/browse/origin/content/` +* :http:get:`/browse/origin/log/` Typing: - **origin_url** corresponds to the URL the origin was crawled from, for instance https://github.com/(user)/(repo)/ - **branch** name is given as per the corresponding VCS (e.g., Git) as a query parameter to the requested URL. - **timestamp** is given in a format as liberal as possible, to uphold the principle of least surprise. At the very minimum it is possible to enter timestamps as: - Unix epoch timestamp (see for instance the output of `date +%s`) - ISO 8601 timestamps (see for instance the output of `date -I`, `date -Is`) - YYYY[MM[DD[HH[MM[SS]]]]] ad-hoc format - YYYY[-MM[-DD[ HH:[MM:[SS:]]]]] ad-hoc format swh-web Browse Urls ------------------- .. include:: uri-scheme-browse-content.rst .. include:: uri-scheme-browse-directory.rst .. include:: uri-scheme-browse-origin.rst .. include:: uri-scheme-browse-person.rst .. include:: uri-scheme-browse-release.rst .. include:: uri-scheme-browse-revision.rst .. include:: uri-scheme-browse-snapshot.rst diff --git a/swh/web/assets/src/bundles/admin/origin-save.js b/swh/web/assets/src/bundles/admin/origin-save.js index 8afaa478..7801bbe9 100644 --- a/swh/web/assets/src/bundles/admin/origin-save.js +++ b/swh/web/assets/src/bundles/admin/origin-save.js @@ -1,450 +1,450 @@ /** - * Copyright (C) 2018-2019 The Software Heritage developers + * Copyright (C) 2018-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {handleFetchError, csrfPost, htmlAlert} from 'utils/functions'; import {swhSpinnerSrc} from 'utils/constants'; let authorizedOriginTable; let unauthorizedOriginTable; let pendingSaveRequestsTable; let acceptedSaveRequestsTable; let rejectedSaveRequestsTable; function enableRowSelection(tableSel) { $(`${tableSel} tbody`).on('click', 'tr', function() { if ($(this).hasClass('selected')) { $(this).removeClass('selected'); $(tableSel).closest('.tab-pane').find('.swh-action-need-selection').prop('disabled', true); } else { $(`${tableSel} tr.selected`).removeClass('selected'); $(this).addClass('selected'); $(tableSel).closest('.tab-pane').find('.swh-action-need-selection').prop('disabled', false); } }); } export function initOriginSaveAdmin() { $(document).ready(() => { $.fn.dataTable.ext.errMode = 'throw'; authorizedOriginTable = $('#swh-authorized-origin-urls').DataTable({ serverSide: true, ajax: Urls.admin_origin_save_authorized_urls_list(), columns: [{data: 'url', name: 'url'}], scrollY: '50vh', scrollCollapse: true, info: false }); enableRowSelection('#swh-authorized-origin-urls'); swh.webapp.addJumpToPagePopoverToDataTable(authorizedOriginTable); unauthorizedOriginTable = $('#swh-unauthorized-origin-urls').DataTable({ serverSide: true, ajax: Urls.admin_origin_save_unauthorized_urls_list(), columns: [{data: 'url', name: 'url'}], scrollY: '50vh', scrollCollapse: true, info: false }); enableRowSelection('#swh-unauthorized-origin-urls'); swh.webapp.addJumpToPagePopoverToDataTable(unauthorizedOriginTable); let columnsData = [ { data: 'id', name: 'id', visible: false, searchable: false }, { data: 'save_request_date', name: 'request_date', render: (data, type, row) => { if (type === 'display') { let date = new Date(data); return date.toLocaleString(); } return data; } }, { data: 'visit_type', name: 'visit_type' }, { data: 'origin_url', name: 'origin_url', render: (data, type, row) => { if (type === 'display') { const sanitizedURL = $.fn.dataTable.render.text().display(data); return `${sanitizedURL}`; } return data; } } ]; pendingSaveRequestsTable = $('#swh-origin-save-pending-requests').DataTable({ serverSide: true, processing: true, language: { processing: `` }, ajax: Urls.origin_save_requests_list('pending'), searchDelay: 1000, columns: columnsData, scrollY: '50vh', scrollCollapse: true, order: [[0, 'desc']], responsive: { details: { type: 'none' } } }); enableRowSelection('#swh-origin-save-pending-requests'); swh.webapp.addJumpToPagePopoverToDataTable(pendingSaveRequestsTable); rejectedSaveRequestsTable = $('#swh-origin-save-rejected-requests').DataTable({ serverSide: true, processing: true, language: { processing: `` }, ajax: Urls.origin_save_requests_list('rejected'), searchDelay: 1000, columns: columnsData, scrollY: '50vh', scrollCollapse: true, order: [[0, 'desc']], responsive: { details: { type: 'none' } } }); enableRowSelection('#swh-origin-save-rejected-requests'); swh.webapp.addJumpToPagePopoverToDataTable(rejectedSaveRequestsTable); columnsData.push({ data: 'save_task_status', name: 'save_task_status', render: (data, type, row) => { if (data === 'succeed' && row.visit_date) { - let browseOriginUrl = Urls.browse_origin(row.origin_url); + let browseOriginUrl = `${Urls.browse_origin()}?origin_url=${row.origin_url}`; browseOriginUrl += `visit/${row.visit_date}/`; return `${data}`; } return data; } }); columnsData.push({ name: 'info', render: (data, type, row) => { if (row.save_task_status === 'succeed' || row.save_task_status === 'failed') { return '`; } else { return ''; } } }); acceptedSaveRequestsTable = $('#swh-origin-save-accepted-requests').DataTable({ serverSide: true, processing: true, language: { processing: `` }, ajax: Urls.origin_save_requests_list('accepted'), searchDelay: 1000, columns: columnsData, scrollY: '50vh', scrollCollapse: true, order: [[0, 'desc']], responsive: { details: { type: 'none' } } }); enableRowSelection('#swh-origin-save-accepted-requests'); swh.webapp.addJumpToPagePopoverToDataTable(acceptedSaveRequestsTable); $('#swh-origin-save-requests-nav-item').on('shown.bs.tab', () => { pendingSaveRequestsTable.draw(); }); $('#swh-origin-save-url-filters-nav-item').on('shown.bs.tab', () => { authorizedOriginTable.draw(); }); $('#swh-authorized-origins-tab').on('shown.bs.tab', () => { authorizedOriginTable.draw(); }); $('#swh-unauthorized-origins-tab').on('shown.bs.tab', () => { unauthorizedOriginTable.draw(); }); $('#swh-save-requests-pending-tab').on('shown.bs.tab', () => { pendingSaveRequestsTable.draw(); }); $('#swh-save-requests-accepted-tab').on('shown.bs.tab', () => { acceptedSaveRequestsTable.draw(); }); $('#swh-save-requests-rejected-tab').on('shown.bs.tab', () => { rejectedSaveRequestsTable.draw(); }); $('#swh-save-requests-pending-tab').click(() => { pendingSaveRequestsTable.ajax.reload(null, false); }); $('#swh-save-requests-accepted-tab').click(() => { acceptedSaveRequestsTable.ajax.reload(null, false); }); $('#swh-save-requests-rejected-tab').click(() => { rejectedSaveRequestsTable.ajax.reload(null, false); }); $('body').on('click', e => { if ($(e.target).parents('.popover').length > 0) { event.stopPropagation(); } else if ($(e.target).parents('.swh-save-request-info').length === 0) { $('.swh-save-request-info').popover('dispose'); } }); }); } export function addAuthorizedOriginUrl() { let originUrl = $('#swh-authorized-url-prefix').val(); let addOriginUrl = Urls.admin_origin_save_add_authorized_url(originUrl); csrfPost(addOriginUrl) .then(handleFetchError) .then(() => { authorizedOriginTable.row.add({'url': originUrl}).draw(); $('.swh-add-authorized-origin-status').html( htmlAlert('success', 'The origin url prefix has been successfully added in the authorized list.', true) ); }) .catch(response => { $('.swh-add-authorized-origin-status').html( htmlAlert('warning', 'The provided origin url prefix is already registered in the authorized list.', true) ); }); } export function removeAuthorizedOriginUrl() { let originUrl = $('#swh-authorized-origin-urls tr.selected').text(); if (originUrl) { let removeOriginUrl = Urls.admin_origin_save_remove_authorized_url(originUrl); csrfPost(removeOriginUrl) .then(handleFetchError) .then(() => { authorizedOriginTable.row('.selected').remove().draw(); }) .catch(() => {}); } } export function addUnauthorizedOriginUrl() { let originUrl = $('#swh-unauthorized-url-prefix').val(); let addOriginUrl = Urls.admin_origin_save_add_unauthorized_url(originUrl); csrfPost(addOriginUrl) .then(handleFetchError) .then(() => { unauthorizedOriginTable.row.add({'url': originUrl}).draw(); $('.swh-add-unauthorized-origin-status').html( htmlAlert('success', 'The origin url prefix has been successfully added in the unauthorized list.', true) ); }) .catch(() => { $('.swh-add-unauthorized-origin-status').html( htmlAlert('warning', 'The provided origin url prefix is already registered in the unauthorized list.', true) ); }); } export function removeUnauthorizedOriginUrl() { let originUrl = $('#swh-unauthorized-origin-urls tr.selected').text(); if (originUrl) { let removeOriginUrl = Urls.admin_origin_save_remove_unauthorized_url(originUrl); csrfPost(removeOriginUrl) .then(handleFetchError) .then(() => { unauthorizedOriginTable.row('.selected').remove().draw(); }) .catch(() => {}); } } export function acceptOriginSaveRequest() { let selectedRow = pendingSaveRequestsTable.row('.selected'); if (selectedRow.length) { let acceptOriginSaveRequestCallback = () => { let rowData = selectedRow.data(); let acceptSaveRequestUrl = Urls.admin_origin_save_request_accept(rowData['visit_type'], rowData['origin_url']); csrfPost(acceptSaveRequestUrl) .then(() => { pendingSaveRequestsTable.ajax.reload(null, false); }); }; swh.webapp.showModalConfirm( 'Accept origin save request ?', 'Are you sure to accept this origin save request ?', acceptOriginSaveRequestCallback); } } export function rejectOriginSaveRequest() { let selectedRow = pendingSaveRequestsTable.row('.selected'); if (selectedRow.length) { let rejectOriginSaveRequestCallback = () => { let rowData = selectedRow.data(); let rejectSaveRequestUrl = Urls.admin_origin_save_request_reject(rowData['visit_type'], rowData['origin_url']); csrfPost(rejectSaveRequestUrl) .then(() => { pendingSaveRequestsTable.ajax.reload(null, false); }); }; swh.webapp.showModalConfirm( 'Reject origin save request ?', 'Are you sure to reject this origin save request ?', rejectOriginSaveRequestCallback); } } function removeOriginSaveRequest(requestTable) { let selectedRow = requestTable.row('.selected'); if (selectedRow.length) { let requestId = selectedRow.data()['id']; let removeOriginSaveRequestCallback = () => { let removeSaveRequestUrl = Urls.admin_origin_save_request_remove(requestId); csrfPost(removeSaveRequestUrl) .then(() => { requestTable.ajax.reload(null, false); }); }; swh.webapp.showModalConfirm( 'Remove origin save request ?', 'Are you sure to remove this origin save request ?', removeOriginSaveRequestCallback); } } export function removePendingOriginSaveRequest() { removeOriginSaveRequest(pendingSaveRequestsTable); } export function removeAcceptedOriginSaveRequest() { removeOriginSaveRequest(acceptedSaveRequestsTable); } export function removeRejectedOriginSaveRequest() { removeOriginSaveRequest(rejectedSaveRequestsTable); } export function displaySaveRequestInfo(event, saveRequestId) { event.stopPropagation(); const saveRequestTaskInfoUrl = Urls.admin_origin_save_task_info(saveRequestId); $('.swh-save-request-info').popover('dispose'); $(event.target).popover({ 'title': 'Save request task information', 'content': `

Fetching task information ...

`, 'html': true, 'placement': 'left', 'sanitizeFn': swh.webapp.filterXSS }); $(event.target).popover('show'); fetch(saveRequestTaskInfoUrl) .then(response => response.json()) .then(saveRequestTaskInfo => { let content; if ($.isEmptyObject(saveRequestTaskInfo)) { content = 'Not available'; } else { let saveRequestInfo = []; saveRequestInfo.push({ key: 'Task type', value: saveRequestTaskInfo.type }); if (saveRequestTaskInfo.hasOwnProperty('task_name')) { saveRequestInfo.push({ key: 'Task name', value: saveRequestTaskInfo.name }); } saveRequestInfo.push({ key: 'Task arguments', value: JSON.stringify(saveRequestTaskInfo.arguments, null, 2) }); saveRequestInfo.push({ key: 'Task id', value: saveRequestTaskInfo.id }); saveRequestInfo.push({ key: 'Task backend id', value: saveRequestTaskInfo.backend_id }); saveRequestInfo.push({ key: 'Task scheduling date', value: new Date(saveRequestTaskInfo.scheduled).toLocaleString() }); saveRequestInfo.push({ key: 'Task termination date', value: new Date(saveRequestTaskInfo.ended).toLocaleString() }); if (saveRequestTaskInfo.hasOwnProperty('duration')) { saveRequestInfo.push({ key: 'Task duration', value: saveRequestTaskInfo.duration + ' s' }); } if (saveRequestTaskInfo.hasOwnProperty('worker')) { saveRequestInfo.push({ key: 'Task executor', value: saveRequestTaskInfo.worker }); } if (saveRequestTaskInfo.hasOwnProperty('message')) { saveRequestInfo.push({ key: 'Task log', value: saveRequestTaskInfo.message }); } content = ''; for (let info of saveRequestInfo) { content += ``; } content += '
'; } $('.swh-popover').html(content); $(event.target).popover('update'); }); } diff --git a/swh/web/assets/src/bundles/browse/origin-search.js b/swh/web/assets/src/bundles/browse/origin-search.js index 925703ab..d6d15af6 100644 --- a/swh/web/assets/src/bundles/browse/origin-search.js +++ b/swh/web/assets/src/bundles/browse/origin-search.js @@ -1,229 +1,229 @@ /** * Copyright (C) 2018-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {handleFetchError} from 'utils/functions'; const limit = 100; let linksPrev = []; let linkNext = null; let linkCurrent = null; let inSearch = false; function parseLinkHeader(s) { let re = /<(.+)>; rel="next"/; return s.match(re)[1]; } function fixTableRowsStyle() { setTimeout(() => { $('#origin-search-results tbody tr').removeAttr('style'); }); } function clearOriginSearchResultsTable() { $('#origin-search-results tbody tr').remove(); } function populateOriginSearchResultsTable(origins) { if (origins.length > 0) { $('#swh-origin-search-results').show(); $('#swh-no-result').hide(); clearOriginSearchResultsTable(); let table = $('#origin-search-results tbody'); for (let [i, origin] of origins.entries()) { - let browseUrl = Urls.browse_origin(origin.url); + let browseUrl = `${Urls.browse_origin()}?origin_url=${origin.url}`; let tableRow = ``; tableRow += `${encodeURI(origin.url)}`; tableRow += ``; tableRow += ``; tableRow += ''; table.append(tableRow); // get async latest visit snapshot and update visit status icon let latestSnapshotUrl = Urls.api_1_origin_visit_latest(origin.url); latestSnapshotUrl += '?require_snapshot=true'; fetch(latestSnapshotUrl) .then(response => response.json()) .then(data => { $(`#visit-type-origin-${i}`).text(data.type); $(`#visit-status-origin-${i}`).children().remove(); if (data) { $(`#visit-status-origin-${i}`).append(''); } else { $(`#visit-status-origin-${i}`).append(''); if ($('#swh-filter-empty-visits').prop('checked')) { $(`#origin-${i}`).remove(); } } }); } fixTableRowsStyle(); } else { $('#swh-origin-search-results').hide(); $('#swh-no-result').text('No origins matching the search criteria were found.'); $('#swh-no-result').show(); } if (linkNext === null) { $('#origins-next-results-button').addClass('disabled'); } else { $('#origins-next-results-button').removeClass('disabled'); } if (linksPrev.length === 0) { $('#origins-prev-results-button').addClass('disabled'); } else { $('#origins-prev-results-button').removeClass('disabled'); } inSearch = false; setTimeout(() => { window.scrollTo(0, 0); }); } function searchOriginsFirst(searchQueryText, limit) { let baseSearchUrl; let searchMetadata = $('#swh-search-origin-metadata').prop('checked'); if (searchMetadata) { baseSearchUrl = new URL(Urls.api_1_origin_metadata_search(), window.location); baseSearchUrl.searchParams.append('fulltext', searchQueryText); } else { baseSearchUrl = new URL(Urls.api_1_origin_search(searchQueryText), window.location); } let withVisit = $('#swh-search-origins-with-visit').prop('checked'); baseSearchUrl.searchParams.append('limit', limit); baseSearchUrl.searchParams.append('with_visit', withVisit); let searchUrl = baseSearchUrl.toString(); searchOrigins(searchUrl); } function searchOrigins(searchUrl) { clearOriginSearchResultsTable(); $('.swh-loading').addClass('show'); let response = fetch(searchUrl) .then(handleFetchError) .then(resp => { response = resp; return response.json(); }) .then(data => { // Save link to the current results page linkCurrent = searchUrl; // Save link to the next results page. linkNext = null; if (response.headers.has('Link')) { let parsedLink = parseLinkHeader(response.headers.get('Link')); if (parsedLink !== undefined) { linkNext = parsedLink; } } // prevLinks is updated by the caller, which is the one to know if // we're going forward or backward in the pages. $('.swh-loading').removeClass('show'); populateOriginSearchResultsTable(data); }) .catch(response => { $('.swh-loading').removeClass('show'); inSearch = false; $('#swh-origin-search-results').hide(); $('#swh-no-result').text(`Error ${response.status}: ${response.statusText}`); $('#swh-no-result').show(); }); } function doSearch() { $('#swh-no-result').hide(); let searchQueryText = $('#origins-url-patterns').val(); inSearch = true; if (searchQueryText.startsWith('swh:')) { // searchQueryText may be a PID so sending search queries to PID resolve endpoint let resolvePidUrl = Urls.api_1_resolve_swh_pid(searchQueryText); fetch(resolvePidUrl) .then(handleFetchError) .then(response => response.json()) .then(data => { // pid has been successfully resolved, // so redirect to browse page window.location = data.browse_url; }) .catch(response => { // display a useful error message if the input // looks like a swh pid response.json().then(data => { $('#swh-origin-search-results').hide(); $('.swh-search-pagination').hide(); $('#swh-no-result').text(data.reason); $('#swh-no-result').show(); }); }); } else { // otherwise, proceed with origins search $('#swh-origin-search-results').show(); $('.swh-search-pagination').show(); searchOriginsFirst(searchQueryText, limit); } } export function initOriginSearch() { $(document).ready(() => { $('#swh-search-origins').submit(event => { event.preventDefault(); let searchQueryText = $('#origins-url-patterns').val().trim(); let withVisit = $('#swh-search-origins-with-visit').prop('checked'); let withContent = $('#swh-filter-empty-visits').prop('checked'); let searchMetadata = $('#swh-search-origin-metadata').prop('checked'); let queryParameters = new URLSearchParams(); queryParameters.append('q', searchQueryText); if (withVisit) { queryParameters.append('with_visit', withVisit); } if (withContent) { queryParameters.append('with_content', withContent); } if (searchMetadata) { queryParameters.append('search_metadata', searchMetadata); } // Update the url, triggering page reload and effective search window.location.search = `?${queryParameters.toString()}`; }); $('#origins-next-results-button').click(event => { if ($('#origins-next-results-button').hasClass('disabled') || inSearch) { return; } inSearch = true; linksPrev.push(linkCurrent); searchOrigins(linkNext); event.preventDefault(); }); $('#origins-prev-results-button').click(event => { if ($('#origins-prev-results-button').hasClass('disabled') || inSearch) { return; } inSearch = true; searchOrigins(linksPrev.pop()); event.preventDefault(); }); let urlParams = new URLSearchParams(window.location.search); let query = urlParams.get('q'); let withVisit = urlParams.has('with_visit'); let withContent = urlParams.has('with_content'); let searchMetadata = urlParams.has('search_metadata'); if (query) { $('#origins-url-patterns').val(query); $('#swh-search-origins-with-visit').prop('checked', withVisit); $('#swh-filter-empty-visits').prop('checked', withContent); $('#swh-search-origin-metadata').prop('checked', searchMetadata); doSearch(); } }); } diff --git a/swh/web/assets/src/bundles/save/index.js b/swh/web/assets/src/bundles/save/index.js index 2a9cf8e5..faa5ba8b 100644 --- a/swh/web/assets/src/bundles/save/index.js +++ b/swh/web/assets/src/bundles/save/index.js @@ -1,291 +1,291 @@ /** - * Copyright (C) 2018-2019 The Software Heritage developers + * Copyright (C) 2018-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {handleFetchError, csrfPost, isGitRepoUrl, htmlAlert, removeUrlFragment} from 'utils/functions'; import {swhSpinnerSrc} from 'utils/constants'; import {validate} from 'validate.js'; let saveRequestsTable; function originSaveRequest(originType, originUrl, acceptedCallback, pendingCallback, errorCallback) { let addSaveOriginRequestUrl = Urls.origin_save_request(originType, originUrl); let headers = { 'Accept': 'application/json', 'Content-Type': 'application/json' }; $('.swh-processing-save-request').css('display', 'block'); csrfPost(addSaveOriginRequestUrl, headers) .then(handleFetchError) .then(response => response.json()) .then(data => { $('.swh-processing-save-request').css('display', 'none'); if (data.save_request_status === 'accepted') { acceptedCallback(); } else { pendingCallback(); } }) .catch(response => { $('.swh-processing-save-request').css('display', 'none'); response.json().then(errorData => { errorCallback(response.status, errorData); }); }); } export function initOriginSave() { $(document).ready(() => { $.fn.dataTable.ext.errMode = 'none'; fetch(Urls.origin_save_types_list()) .then(response => response.json()) .then(data => { for (let originType of data) { $('#swh-input-visit-type').append(``); } }); saveRequestsTable = $('#swh-origin-save-requests') .on('error.dt', (e, settings, techNote, message) => { $('#swh-origin-save-request-list-error').text('An error occurred while retrieving the save requests list'); console.log(message); }) .DataTable({ serverSide: true, processing: true, language: { processing: `` }, ajax: Urls.origin_save_requests_list('all'), searchDelay: 1000, columns: [ { data: 'save_request_date', name: 'request_date', render: (data, type, row) => { if (type === 'display') { let date = new Date(data); return date.toLocaleString(); } return data; } }, { data: 'visit_type', name: 'visit_type' }, { data: 'origin_url', name: 'origin_url', render: (data, type, row) => { if (type === 'display') { const sanitizedURL = $.fn.dataTable.render.text().display(data); return `${sanitizedURL}`; } return data; } }, { data: 'save_request_status', name: 'status' }, { data: 'save_task_status', name: 'loading_task_status', render: (data, type, row) => { if (data === 'succeed' && row.visit_date) { - let browseOriginUrl = Urls.browse_origin(row.origin_url); + let browseOriginUrl = `${Urls.browse_origin()}?origin_url=${row.origin_url}`; browseOriginUrl += `visit/${row.visit_date}/`; return `${data}`; } return data; } } ], scrollY: '50vh', scrollCollapse: true, order: [[0, 'desc']], responsive: { details: { type: 'none' } } }); swh.webapp.addJumpToPagePopoverToDataTable(saveRequestsTable); $('#swh-origin-save-requests-list-tab').on('shown.bs.tab', () => { saveRequestsTable.draw(); window.location.hash = '#requests'; }); $('#swh-origin-save-request-create-tab').on('shown.bs.tab', () => { removeUrlFragment(); }); let saveRequestAcceptedAlert = htmlAlert( 'success', 'The "save code now" request has been accepted and will be processed as soon as possible.' ); let saveRequestPendingAlert = htmlAlert( 'warning', 'The "save code now" request has been put in pending state and may be accepted for processing after manual review.' ); let saveRequestRateLimitedAlert = htmlAlert( 'danger', 'The rate limit for "save code now" requests has been reached. Please try again later.' ); let saveRequestUnknownErrorAlert = htmlAlert( 'danger', 'An unexpected error happened when submitting the "save code now request".' ); $('#swh-save-origin-form').submit(event => { event.preventDefault(); event.stopPropagation(); $('.alert').alert('close'); if (event.target.checkValidity()) { $(event.target).removeClass('was-validated'); let originType = $('#swh-input-visit-type').val(); let originUrl = $('#swh-input-origin-url').val(); originSaveRequest(originType, originUrl, () => $('#swh-origin-save-request-status').html(saveRequestAcceptedAlert), () => $('#swh-origin-save-request-status').html(saveRequestPendingAlert), (statusCode, errorData) => { $('#swh-origin-save-request-status').css('color', 'red'); if (statusCode === 403) { const errorAlert = htmlAlert('danger', `Error: ${errorData['detail']}`); $('#swh-origin-save-request-status').html(errorAlert); } else if (statusCode === 429) { $('#swh-origin-save-request-status').html(saveRequestRateLimitedAlert); } else { $('#swh-origin-save-request-status').html(saveRequestUnknownErrorAlert); } }); } else { $(event.target).addClass('was-validated'); } }); $('#swh-show-origin-save-requests-list').on('click', (event) => { event.preventDefault(); $('.nav-tabs a[href="#swh-origin-save-requests-list"]').tab('show'); }); $('#swh-input-origin-url').on('input', function(event) { let originUrl = $(this).val().trim(); $(this).val(originUrl); $('#swh-input-visit-type option').each(function() { let val = $(this).val(); if (val && originUrl.includes(val)) { $(this).prop('selected', true); } }); }); if (window.location.hash === '#requests') { $('.nav-tabs a[href="#swh-origin-save-requests-list"]').tab('show'); } }); } export function validateSaveOriginUrl(input) { let originUrl = input.value.trim(); let validUrl = validate({website: originUrl}, { website: { url: { schemes: ['http', 'https', 'svn', 'git'] } } }) === undefined; let originType = $('#swh-input-visit-type').val(); if (originType === 'git' && validUrl) { // additional checks for well known code hosting providers let githubIdx = originUrl.indexOf('://github.com'); let gitlabIdx = originUrl.indexOf('://gitlab.'); let gitSfIdx = originUrl.indexOf('://git.code.sf.net'); let bitbucketIdx = originUrl.indexOf('://bitbucket.org'); if (githubIdx !== -1 && githubIdx <= 5) { validUrl = isGitRepoUrl(originUrl, 'github.com'); } else if (gitlabIdx !== -1 && gitlabIdx <= 5) { let startIdx = gitlabIdx + 3; let idx = originUrl.indexOf('/', startIdx); if (idx !== -1) { let gitlabDomain = originUrl.substr(startIdx, idx - startIdx); validUrl = isGitRepoUrl(originUrl, gitlabDomain); } else { validUrl = false; } } else if (gitSfIdx !== -1 && gitSfIdx <= 5) { validUrl = isGitRepoUrl(originUrl, 'git.code.sf.net/p'); } else if (bitbucketIdx !== -1 && bitbucketIdx <= 5) { validUrl = isGitRepoUrl(originUrl, 'bitbucket.org'); } } if (validUrl) { input.setCustomValidity(''); } else { input.setCustomValidity('The origin url is not valid or does not reference a code repository'); } } export function initTakeNewSnapshot() { let newSnapshotRequestAcceptedAlert = htmlAlert( 'success', 'The "take new snapshot" request has been accepted and will be processed as soon as possible.' ); let newSnapshotRequestPendingAlert = htmlAlert( 'warning', 'The "take new snapshot" request has been put in pending state and may be accepted for processing after manual review.' ); let newSnapshotRequestRateLimitAlert = htmlAlert( 'danger', 'The rate limit for "take new snapshot" requests has been reached. Please try again later.' ); let newSnapshotRequestUnknownErrorAlert = htmlAlert( 'danger', 'An unexpected error happened when submitting the "save code now request".' ); $(document).ready(() => { $('#swh-take-new-snapshot-form').submit(event => { event.preventDefault(); event.stopPropagation(); let originType = $('#swh-input-visit-type').val(); let originUrl = $('#swh-input-origin-url').val(); originSaveRequest(originType, originUrl, () => $('#swh-take-new-snapshot-request-status').html(newSnapshotRequestAcceptedAlert), () => $('#swh-take-new-snapshot-request-status').html(newSnapshotRequestPendingAlert), (statusCode, errorData) => { $('#swh-take-new-snapshot-request-status').css('color', 'red'); if (statusCode === 403) { const errorAlert = htmlAlert('danger', `Error: ${errorData['detail']}`); $('#swh-take-new-snapshot-request-status').html(errorAlert); } else if (statusCode === 429) { $('#swh-take-new-snapshot-request-status').html(newSnapshotRequestRateLimitAlert); } else { $('#swh-take-new-snapshot-request-status').html(newSnapshotRequestUnknownErrorAlert); } }); }); }); } diff --git a/swh/web/assets/src/bundles/webapp/badges.js b/swh/web/assets/src/bundles/webapp/badges.js index 193bbad5..dd175490 100644 --- a/swh/web/assets/src/bundles/webapp/badges.js +++ b/swh/web/assets/src/bundles/webapp/badges.js @@ -1,44 +1,44 @@ /** - * Copyright (C) 2019 The Software Heritage developers + * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ export function showBadgeInfoModal(objectType, objectPid) { let badgeImageUrl; let badgeLinkUrl; if (objectType === 'origin') { badgeImageUrl = Urls.swh_badge(objectType, objectPid); - badgeLinkUrl = Urls.browse_origin(objectPid); + badgeLinkUrl = `${Urls.browse_origin()}?origin_url=${objectPid}`; } else { badgeImageUrl = Urls.swh_badge_pid(objectPid); badgeLinkUrl = Urls.browse_swh_id(objectPid); } let urlPrefix = `${window.location.protocol}//${window.location.hostname}`; if (window.location.port) { urlPrefix += `:${window.location.port}`; } const absoluteBadgeImageUrl = `${urlPrefix}${badgeImageUrl}`; const absoluteBadgeLinkUrl = `${urlPrefix}${badgeLinkUrl}`; const html = `
<a href="${absoluteBadgeLinkUrl}">
     <img src="${absoluteBadgeImageUrl}">
 </a>
[![SWH](${absoluteBadgeImageUrl})](${absoluteBadgeLinkUrl})
.. image:: ${absoluteBadgeImageUrl}
     :target: ${absoluteBadgeLinkUrl}
`; swh.webapp.showModalHtml('Software Heritage badge integration', html); } diff --git a/swh/web/browse/snapshot_context.py b/swh/web/browse/snapshot_context.py index 92ab894c..8dd00afc 100644 --- a/swh/web/browse/snapshot_context.py +++ b/swh/web/browse/snapshot_context.py @@ -1,1407 +1,1417 @@ -# Copyright (C) 2018-2019 The Software Heritage developers +# Copyright (C) 2018-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information # Utility module for browsing the archive in a snapshot context. from collections import defaultdict from copy import copy from typing import Any, Dict, List, Optional, Union, Tuple from django.core.cache import cache from django.shortcuts import render from django.template.defaultfilters import filesizeformat from django.utils.html import escape import sentry_sdk from swh.model.identifiers import persistent_identifier, snapshot_identifier from swh.web.browse.utils import ( get_directory_entries, gen_directory_link, gen_revision_link, request_content, gen_content_link, prepare_content_for_display, content_display_max_size, format_log_entries, gen_revision_log_link, gen_release_link, get_readme_to_display, get_swh_persistent_ids, gen_snapshot_link, ) from swh.web.common import service, highlightjs from swh.web.common.exc import handle_view_exception, NotFoundExc, BadInputExc from swh.web.common.origin_visits import get_origin_visit from swh.web.common.typing import ( OriginInfo, SnapshotBranchInfo, SnapshotReleaseInfo, SnapshotContext, ) from swh.web.common.utils import ( reverse, gen_path_info, format_utc_iso_date, swh_object_icons, ) from swh.web.config import get_config _empty_snapshot_id = snapshot_identifier({"branches": {}}) def _get_branch(branches, branch_name, snapshot_id): """ Utility function to get a specific branch from a branches list. Its purpose is to get the default HEAD branch as some software origin (e.g those with svn type) does not have it. In that latter case, check if there is a master branch instead and returns it. """ filtered_branches = [b for b in branches if b["name"] == branch_name] if filtered_branches: return filtered_branches[0] elif branch_name == "HEAD": filtered_branches = [b for b in branches if b["name"].endswith("master")] if filtered_branches: return filtered_branches[0] elif branches: return branches[0] else: # case where a large branches list has been truncated snp = service.lookup_snapshot( snapshot_id, branches_from=branch_name, branches_count=1, target_types=["revision", "alias"], ) snp_branch, _ = process_snapshot_branches(snp) if snp_branch and snp_branch[0]["name"] == branch_name: branches.append(snp_branch[0]) return snp_branch[0] def _get_release(releases, release_name, snapshot_id): """ Utility function to get a specific release from a releases list. Returns None if the release can not be found in the list. """ filtered_releases = [r for r in releases if r["name"] == release_name] if filtered_releases: return filtered_releases[0] else: # case where a large branches list has been truncated for branch_name in (release_name, f"refs/tags/{release_name}"): snp = service.lookup_snapshot( snapshot_id, branches_from=branch_name, branches_count=1, target_types=["release"], ) _, snp_release = process_snapshot_branches(snp) if snp_release and snp_release[0]["name"] == release_name: releases.append(snp_release[0]) return snp_release[0] def _branch_not_found( branch_type, branch, snapshot_id, snapshot_sizes, origin_info, timestamp, visit_id ): """ Utility function to raise an exception when a specified branch/release can not be found. """ if branch_type == "branch": branch_type = "Branch" branch_type_plural = "branches" target_type = "revision" else: branch_type = "Release" branch_type_plural = "releases" target_type = "release" if snapshot_id and snapshot_sizes[target_type] == 0: msg = "Snapshot with id %s has an empty list" " of %s!" % ( snapshot_id, branch_type_plural, ) elif snapshot_id: msg = "%s %s for snapshot with id %s" " not found!" % ( branch_type, branch, snapshot_id, ) elif visit_id and snapshot_sizes[target_type] == 0: msg = ( "Origin with url %s" " for visit with id %s has an empty list" " of %s!" % (origin_info["url"], visit_id, branch_type_plural) ) elif visit_id: msg = ( "%s %s associated to visit with" " id %s for origin with url %s" " not found!" % (branch_type, branch, visit_id, origin_info["url"]) ) elif snapshot_sizes[target_type] == 0: msg = ( "Origin with url %s" " for visit with timestamp %s has an empty list" " of %s!" % (origin_info["url"], timestamp, branch_type_plural) ) else: msg = ( "%s %s associated to visit with" " timestamp %s for origin with " "url %s not found!" % (branch_type, branch, timestamp, origin_info["url"]) ) raise NotFoundExc(escape(msg)) def process_snapshot_branches( snapshot: Dict[str, Any] ) -> Tuple[List[SnapshotBranchInfo], List[SnapshotReleaseInfo]]: """ Process a dictionary describing snapshot branches: extract those targeting revisions and releases, put them in two different lists, then sort those lists in lexicographical order of the branches' names. Args: snapshot: A dict describing a snapshot as returned for instance by :func:`swh.web.common.service.lookup_snapshot` Returns: A tuple whose first member is the sorted list of branches targeting revisions and second member the sorted list of branches targeting releases """ snapshot_branches = snapshot["branches"] branches: Dict[str, SnapshotBranchInfo] = {} branch_aliases: Dict[str, str] = {} releases: Dict[str, SnapshotReleaseInfo] = {} revision_to_branch = defaultdict(set) revision_to_release = defaultdict(set) release_to_branch = defaultdict(set) for branch_name, target in snapshot_branches.items(): if not target: # FIXME: display branches with an unknown target anyway continue target_id = target["target"] target_type = target["target_type"] if target_type == "revision": branches[branch_name] = SnapshotBranchInfo( name=branch_name, revision=target_id, date=None, directory=None, message=None, url=None, ) revision_to_branch[target_id].add(branch_name) elif target_type == "release": release_to_branch[target_id].add(branch_name) elif target_type == "alias": branch_aliases[branch_name] = target_id # FIXME: handle pointers to other object types def _add_release_info(branch, release): releases[branch] = SnapshotReleaseInfo( name=release["name"], branch_name=branch, date=format_utc_iso_date(release["date"]), directory=None, id=release["id"], message=release["message"], target_type=release["target_type"], target=release["target"], url=None, ) def _add_branch_info(branch, revision): branches[branch] = SnapshotBranchInfo( name=branch, revision=revision["id"], directory=revision["directory"], date=format_utc_iso_date(revision["date"]), message=revision["message"], url=None, ) releases_info = service.lookup_release_multiple(release_to_branch.keys()) for release in releases_info: branches_to_update = release_to_branch[release["id"]] for branch in branches_to_update: _add_release_info(branch, release) if release["target_type"] == "revision": revision_to_release[release["target"]].update(branches_to_update) revisions = service.lookup_revision_multiple( set(revision_to_branch.keys()) | set(revision_to_release.keys()) ) for revision in revisions: if not revision: continue for branch in revision_to_branch[revision["id"]]: _add_branch_info(branch, revision) for release in revision_to_release[revision["id"]]: releases[release]["directory"] = revision["directory"] for branch_alias, branch_target in branch_aliases.items(): if branch_target in branches: branches[branch_alias] = copy(branches[branch_target]) else: snp = service.lookup_snapshot( snapshot["id"], branches_from=branch_target, branches_count=1 ) if snp and branch_target in snp["branches"]: if snp["branches"][branch_target] is None: continue target_type = snp["branches"][branch_target]["target_type"] target = snp["branches"][branch_target]["target"] if target_type == "revision": branches[branch_alias] = snp["branches"][branch_target] revision = service.lookup_revision(target) _add_branch_info(branch_alias, revision) elif target_type == "release": release = service.lookup_release(target) _add_release_info(branch_alias, release) if branch_alias in branches: branches[branch_alias]["name"] = branch_alias ret_branches = list(sorted(branches.values(), key=lambda b: b["name"])) ret_releases = list(sorted(releases.values(), key=lambda b: b["name"])) return ret_branches, ret_releases def get_snapshot_content( snapshot_id: str, ) -> Tuple[List[SnapshotBranchInfo], List[SnapshotReleaseInfo]]: """Returns the lists of branches and releases associated to a swh snapshot. That list is put in cache in order to speedup the navigation in the swh-web/browse ui. .. warning:: At most 1000 branches contained in the snapshot will be returned for performance reasons. Args: snapshot_id: hexadecimal representation of the snapshot identifier Returns: A tuple with two members. The first one is a list of dict describing the snapshot branches. The second one is a list of dict describing the snapshot releases. Raises: NotFoundExc if the snapshot does not exist """ cache_entry_id = "swh_snapshot_%s" % snapshot_id cache_entry = cache.get(cache_entry_id) if cache_entry: return cache_entry["branches"], cache_entry["releases"] branches: List[SnapshotBranchInfo] = [] releases: List[SnapshotReleaseInfo] = [] snapshot_content_max_size = get_config()["snapshot_content_max_size"] if snapshot_id: snapshot = service.lookup_snapshot( snapshot_id, branches_count=snapshot_content_max_size ) branches, releases = process_snapshot_branches(snapshot) cache.set(cache_entry_id, {"branches": branches, "releases": releases,}) return branches, releases def get_origin_visit_snapshot( origin_info: OriginInfo, visit_ts: Optional[Union[int, str]] = None, visit_id: Optional[int] = None, snapshot_id: Optional[str] = None, ) -> Tuple[List[SnapshotBranchInfo], List[SnapshotReleaseInfo]]: """Returns the lists of branches and releases associated to an origin for a given visit. The visit is expressed by either: * a snapshot identifier * a timestamp, if no visit with that exact timestamp is found, the closest one from the provided timestamp will be used. If no visit parameter is provided, it returns the list of branches found for the latest visit. That list is put in cache in order to speedup the navigation in the swh-web/browse ui. .. warning:: At most 1000 branches contained in the snapshot will be returned for performance reasons. Args: origin_info: a dict filled with origin information visit_ts: an ISO date string or Unix timestamp to parse visit_id: visit id for disambiguation in case several visits have the same timestamp snapshot_id: if provided, visit associated to the snapshot will be processed Returns: A tuple with two members. The first one is a list of dict describing the origin branches for the given visit. The second one is a list of dict describing the origin releases for the given visit. Raises: NotFoundExc if the origin or its visit are not found """ visit_info = get_origin_visit(origin_info, visit_ts, visit_id, snapshot_id) return get_snapshot_content(visit_info["snapshot"]) def get_snapshot_context( snapshot_id: Optional[str] = None, origin_url: Optional[str] = None, timestamp: Optional[str] = None, visit_id: Optional[int] = None, branch_name: Optional[str] = None, release_name: Optional[str] = None, revision_id: Optional[str] = None, path: Optional[str] = None, browse_context: str = "directory", ) -> SnapshotContext: """ Utility function to compute relevant information when navigating the archive in a snapshot context. The snapshot is either referenced by its id or it will be retrieved from an origin visit. Args: snapshot_id: hexadecimal representation of a snapshot identifier origin_url: an origin_url timestamp: a datetime string for retrieving the closest visit of the origin visit_id: optional visit id for disambiguation in case of several visits with the same timestamp branch_name: optional branch name set when browsing the snapshot in that scope (will default to "HEAD" if not provided) release_name: optional release name set when browsing the snapshot in that scope revision_id: optional revision identifier set when browsing the snapshot in that scope path: optional path of the object currently browsed in the snapshot browse_context: indicates which type of object is currently browsed Returns: A dict filled with snapshot context information. Raises: swh.web.common.exc.NotFoundExc: if no snapshot is found for the visit of an origin. """ assert origin_url is not None or snapshot_id is not None origin_info = None visit_info = None url_args = {} query_params: Dict[str, Any] = {} origin_visits_url = None if origin_url: origin_info = service.lookup_origin({"url": origin_url}) visit_info = get_origin_visit(origin_info, timestamp, visit_id, snapshot_id) formatted_date = format_utc_iso_date(visit_info["date"]) visit_info["formatted_date"] = formatted_date snapshot_id = visit_info["snapshot"] if not snapshot_id: raise NotFoundExc( "No snapshot associated to the visit of origin " "%s on %s" % (escape(origin_url), formatted_date) ) # provided timestamp is not necessarily equals to the one # of the retrieved visit, so get the exact one in order # to use it in the urls generated below if timestamp: timestamp = visit_info["date"] branches, releases = get_origin_visit_snapshot( origin_info, timestamp, visit_id, snapshot_id ) - url_args = {"origin_url": origin_info["url"]} + query_params["origin_url"] = origin_info["url"] + + origin_visits_url = reverse("browse-origin-visits", query_params=query_params) if visit_id is not None: query_params["visit_id"] = visit_id - origin_visits_url = reverse("browse-origin-visits", url_args=url_args) - if timestamp is not None: query_params["timestamp"] = format_utc_iso_date( timestamp, "%Y-%m-%dT%H:%M:%SZ" ) - visit_url = reverse( - "browse-origin-directory", url_args=url_args, query_params=query_params - ) + visit_url = reverse("browse-origin-directory", query_params=query_params) visit_info["url"] = visit_url - branches_url = reverse( - "browse-origin-branches", url_args=url_args, query_params=query_params - ) + branches_url = reverse("browse-origin-branches", query_params=query_params) - releases_url = reverse( - "browse-origin-releases", url_args=url_args, query_params=query_params - ) + releases_url = reverse("browse-origin-releases", query_params=query_params) else: assert snapshot_id is not None branches, releases = get_snapshot_content(snapshot_id) url_args = {"snapshot_id": snapshot_id} branches_url = reverse("browse-snapshot-branches", url_args=url_args) releases_url = reverse("browse-snapshot-releases", url_args=url_args) releases = list(reversed(releases)) snapshot_sizes = service.lookup_snapshot_sizes(snapshot_id) is_empty = sum(snapshot_sizes.values()) == 0 swh_snp_id = persistent_identifier("snapshot", snapshot_id) if visit_info: timestamp = format_utc_iso_date(visit_info["date"]) if origin_info: browse_view_name = f"browse-origin-{browse_context}" else: browse_view_name = f"browse-snapshot-{browse_context}" release_id = None root_directory = None snapshot_total_size = sum(snapshot_sizes.values()) if path is not None: query_params["path"] = path if snapshot_total_size and revision_id is not None: revision = service.lookup_revision(revision_id) root_directory = revision["directory"] branches.append( SnapshotBranchInfo( name=revision_id, revision=revision_id, directory=root_directory, date=revision["date"], message=revision["message"], url=None, ) ) branch_name = revision_id query_params["revision"] = revision_id elif snapshot_total_size and release_name: release = _get_release(releases, release_name, snapshot_id) try: root_directory = release["directory"] revision_id = release["target"] release_id = release["id"] query_params["release"] = release_name except Exception as exc: sentry_sdk.capture_exception(exc) _branch_not_found( "release", release_name, snapshot_id, snapshot_sizes, origin_info, timestamp, visit_id, ) elif snapshot_total_size: if branch_name: query_params["branch"] = branch_name branch = _get_branch(branches, branch_name or "HEAD", snapshot_id) try: branch_name = branch["name"] revision_id = branch["revision"] root_directory = branch["directory"] except Exception as exc: sentry_sdk.capture_exception(exc) _branch_not_found( "branch", branch_name, snapshot_id, snapshot_sizes, origin_info, timestamp, visit_id, ) for b in branches: branch_query_params = dict(query_params) branch_query_params.pop("release", None) if b["name"] != b["revision"]: branch_query_params.pop("revision", None) branch_query_params["branch"] = b["name"] b["url"] = reverse( browse_view_name, url_args=url_args, query_params=branch_query_params ) for r in releases: release_query_params = dict(query_params) release_query_params.pop("branch", None) release_query_params.pop("revision", None) release_query_params["release"] = r["name"] r["url"] = reverse( browse_view_name, url_args=url_args, query_params=release_query_params, ) return SnapshotContext( branch=branch_name, branches=branches, branches_url=branches_url, is_empty=is_empty, origin_info=origin_info, origin_visits_url=origin_visits_url, release=release_name, release_id=release_id, query_params=query_params, releases=releases, releases_url=releases_url, revision_id=revision_id, root_directory=root_directory, snapshot_id=snapshot_id, snapshot_sizes=snapshot_sizes, snapshot_swhid=swh_snp_id, url_args=url_args, visit_info=visit_info, ) def _build_breadcrumbs(snapshot_context: SnapshotContext, path: str): origin_info = snapshot_context["origin_info"] url_args = snapshot_context["url_args"] query_params = dict(snapshot_context["query_params"]) root_directory = snapshot_context["root_directory"] path_info = gen_path_info(path) if origin_info: browse_view_name = "browse-origin-directory" else: browse_view_name = "browse-snapshot-directory" breadcrumbs = [] if root_directory: query_params.pop("path", None) breadcrumbs.append( { "name": root_directory[:7], "url": reverse( browse_view_name, url_args=url_args, query_params=query_params ), } ) for pi in path_info: query_params["path"] = pi["path"] breadcrumbs.append( { "name": pi["name"], "url": reverse( browse_view_name, url_args=url_args, query_params=query_params ), } ) return breadcrumbs +def _check_origin_url(snapshot_id, origin_url): + if snapshot_id is None and origin_url is None: + raise BadInputExc("An origin URL must be provided as query parameter.") + + def browse_snapshot_directory( request, snapshot_id=None, origin_url=None, timestamp=None, path=None ): """ Django view implementation for browsing a directory in a snapshot context. """ try: + _check_origin_url(snapshot_id, origin_url) + snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), path=path, browse_context="directory", branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=request.GET.get("revision"), ) root_directory = snapshot_context["root_directory"] sha1_git = root_directory if root_directory and path: dir_info = service.lookup_directory_with_path(root_directory, path) sha1_git = dir_info["target"] dirs = [] files = [] if sha1_git: dirs, files = get_directory_entries(sha1_git) except Exception as exc: return handle_view_exception(request, exc) origin_info = snapshot_context["origin_info"] visit_info = snapshot_context["visit_info"] url_args = snapshot_context["url_args"] query_params = dict(snapshot_context["query_params"]) revision_id = snapshot_context["revision_id"] snapshot_id = snapshot_context["snapshot_id"] if origin_info: browse_view_name = "browse-origin-directory" else: browse_view_name = "browse-snapshot-directory" breadcrumbs = _build_breadcrumbs(snapshot_context, path) path = "" if path is None else (path + "/") for d in dirs: if d["type"] == "rev": d["url"] = reverse("browse-revision", url_args={"sha1_git": d["target"]}) else: query_params["path"] = path + d["name"] d["url"] = reverse( browse_view_name, url_args=url_args, query_params=query_params ) sum_file_sizes = 0 readmes = {} if origin_info: browse_view_name = "browse-origin-content" else: browse_view_name = "browse-snapshot-content" for f in files: query_params["path"] = path + f["name"] f["url"] = reverse( browse_view_name, url_args=url_args, query_params=query_params ) if f["length"] is not None: sum_file_sizes += f["length"] f["length"] = filesizeformat(f["length"]) if f["name"].lower().startswith("readme"): readmes[f["name"]] = f["checksums"]["sha1"] readme_name, readme_url, readme_html = get_readme_to_display(readmes) if origin_info: browse_view_name = "browse-origin-log" else: browse_view_name = "browse-snapshot-log" history_url = None if snapshot_id != _empty_snapshot_id: history_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) nb_files = None nb_dirs = None dir_path = None if root_directory: nb_files = len(files) nb_dirs = len(dirs) sum_file_sizes = filesizeformat(sum_file_sizes) dir_path = "/" + path browse_dir_link = gen_directory_link(sha1_git) browse_rev_link = gen_revision_link(revision_id) browse_snp_link = gen_snapshot_link(snapshot_id) revision_found = True if sha1_git is None and revision_id is not None: try: service.lookup_revision(revision_id) except NotFoundExc: revision_found = False dir_metadata = { "directory": sha1_git, "context-independent directory": browse_dir_link, "number of regular files": nb_files, "number of subdirectories": nb_dirs, "sum of regular file sizes": sum_file_sizes, "path": dir_path, "revision": revision_id, "revision_found": revision_found, "context-independent revision": browse_rev_link, "snapshot": snapshot_id, "context-independent snapshot": browse_snp_link, } if origin_info: dir_metadata["origin url"] = origin_info["url"] dir_metadata["origin visit date"] = format_utc_iso_date(visit_info["date"]) dir_metadata["origin visit type"] = visit_info["type"] vault_cooking = { "directory_context": True, "directory_id": sha1_git, "revision_context": True, "revision_id": revision_id, } swh_objects = [ {"type": "directory", "id": sha1_git}, {"type": "revision", "id": revision_id}, {"type": "snapshot", "id": snapshot_id}, ] release_id = snapshot_context["release_id"] if release_id: swh_objects.append({"type": "release", "id": release_id}) browse_rel_link = gen_release_link(release_id) dir_metadata["release"] = release_id dir_metadata["context-independent release"] = browse_rel_link swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context) dir_path = "/".join([bc["name"] for bc in breadcrumbs]) + "/" context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading = "Directory - %s - %s - %s" % ( dir_path, snapshot_context["branch"], context_found, ) top_right_link = None if not snapshot_context["is_empty"]: top_right_link = { "url": history_url, "icon": swh_object_icons["revisions history"], "text": "History", } return render( request, "browse/directory.html", { "heading": heading, "swh_object_name": "Directory", "swh_object_metadata": dir_metadata, "dirs": dirs, "files": files, "breadcrumbs": breadcrumbs if root_directory else [], "top_right_link": top_right_link, "readme_name": readme_name, "readme_url": readme_url, "readme_html": readme_html, "snapshot_context": snapshot_context, "vault_cooking": vault_cooking, "show_actions_menu": True, "swh_ids": swh_ids, }, ) def browse_snapshot_content( request, snapshot_id=None, origin_url=None, timestamp=None, path=None, selected_language=None, ): """ Django view implementation for browsing a content in a snapshot context. """ try: + _check_origin_url(snapshot_id, origin_url) + if path is None: raise BadInputExc("The path of a content must be given as query parameter.") snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), path=path, browse_context="content", branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=request.GET.get("revision"), ) root_directory = snapshot_context["root_directory"] sha1_git = None query_string = None content_data = None directory_id = None split_path = path.split("/") filename = split_path[-1] filepath = path[: -len(filename)] if root_directory: content_info = service.lookup_directory_with_path(root_directory, path) sha1_git = content_info["target"] query_string = "sha1_git:" + sha1_git content_data = request_content(query_string, raise_if_unavailable=False) if filepath: dir_info = service.lookup_directory_with_path(root_directory, filepath) directory_id = dir_info["target"] else: directory_id = root_directory except Exception as exc: return handle_view_exception(request, exc) revision_id = snapshot_context["revision_id"] origin_info = snapshot_context["origin_info"] visit_info = snapshot_context["visit_info"] snapshot_id = snapshot_context["snapshot_id"] content = None language = None mimetype = None if content_data and content_data["raw_data"] is not None: content_display_data = prepare_content_for_display( content_data["raw_data"], content_data["mimetype"], path ) content = content_display_data["content_data"] language = content_display_data["language"] mimetype = content_display_data["mimetype"] # Override language with user-selected language if selected_language is not None: language = selected_language available_languages = None if mimetype and "text/" in mimetype: available_languages = highlightjs.get_supported_languages() breadcrumbs = _build_breadcrumbs(snapshot_context, filepath) breadcrumbs.append({"name": filename, "url": None}) browse_content_link = gen_content_link(sha1_git) content_raw_url = None if query_string: content_raw_url = reverse( "browse-content-raw", url_args={"query_string": query_string}, query_params={"filename": filename}, ) browse_rev_link = gen_revision_link(revision_id) browse_dir_link = gen_directory_link(directory_id) content_metadata = { "context-independent content": browse_content_link, "path": None, "filename": None, "directory": directory_id, "context-independent directory": browse_dir_link, "revision": revision_id, "context-independent revision": browse_rev_link, "snapshot": snapshot_id, } cnt_sha1_git = None content_size = None error_code = 200 error_description = "" error_message = "" if content_data: for checksum in content_data["checksums"].keys(): content_metadata[checksum] = content_data["checksums"][checksum] content_metadata["mimetype"] = content_data["mimetype"] content_metadata["encoding"] = content_data["encoding"] content_metadata["size"] = filesizeformat(content_data["length"]) content_metadata["language"] = content_data["language"] content_metadata["licenses"] = content_data["licenses"] content_metadata["path"] = "/" + filepath content_metadata["filename"] = filename cnt_sha1_git = content_data["checksums"]["sha1_git"] content_size = content_data["length"] error_code = content_data["error_code"] error_message = content_data["error_message"] error_description = content_data["error_description"] if origin_info: content_metadata["origin url"] = origin_info["url"] content_metadata["origin visit date"] = format_utc_iso_date(visit_info["date"]) content_metadata["origin visit type"] = visit_info["type"] browse_snapshot_link = gen_snapshot_link(snapshot_id) content_metadata["context-independent snapshot"] = browse_snapshot_link swh_objects = [ {"type": "content", "id": cnt_sha1_git}, {"type": "directory", "id": directory_id}, {"type": "revision", "id": revision_id}, {"type": "snapshot", "id": snapshot_id}, ] release_id = snapshot_context["release_id"] if release_id: swh_objects.append({"type": "release", "id": release_id}) browse_rel_link = gen_release_link(release_id) content_metadata["release"] = release_id content_metadata["context-independent release"] = browse_rel_link swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context) content_path = "/".join([bc["name"] for bc in breadcrumbs]) context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading = "Content - %s - %s - %s" % ( content_path, snapshot_context["branch"], context_found, ) top_right_link = None if not snapshot_context["is_empty"]: top_right_link = { "url": content_raw_url, "icon": swh_object_icons["content"], "text": "Raw File", } return render( request, "browse/content.html", { "heading": heading, "swh_object_name": "Content", "swh_object_metadata": content_metadata, "content": content, "content_size": content_size, "max_content_size": content_display_max_size, "mimetype": mimetype, "language": language, "available_languages": available_languages, "breadcrumbs": breadcrumbs if root_directory else [], "top_right_link": top_right_link, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions_menu": True, "swh_ids": swh_ids, "error_code": error_code, "error_message": error_message, "error_description": error_description, }, status=error_code, ) PER_PAGE = 100 def browse_snapshot_log(request, snapshot_id=None, origin_url=None, timestamp=None): """ Django view implementation for browsing a revision history in a snapshot context. """ try: + _check_origin_url(snapshot_id, origin_url) + snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), browse_context="log", branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=request.GET.get("revision"), ) revision_id = snapshot_context["revision_id"] per_page = int(request.GET.get("per_page", PER_PAGE)) offset = int(request.GET.get("offset", 0)) revs_ordering = request.GET.get("revs_ordering", "committer_date") session_key = "rev_%s_log_ordering_%s" % (revision_id, revs_ordering) rev_log_session = request.session.get(session_key, None) rev_log = [] revs_walker_state = None if rev_log_session: rev_log = rev_log_session["rev_log"] revs_walker_state = rev_log_session["revs_walker_state"] if len(rev_log) < offset + per_page: revs_walker = service.get_revisions_walker( revs_ordering, revision_id, max_revs=offset + per_page + 1, state=revs_walker_state, ) rev_log += [rev["id"] for rev in revs_walker] revs_walker_state = revs_walker.export_state() revs = rev_log[offset : offset + per_page] revision_log = service.lookup_revision_multiple(revs) request.session[session_key] = { "rev_log": rev_log, "revs_walker_state": revs_walker_state, } except Exception as exc: return handle_view_exception(request, exc) origin_info = snapshot_context["origin_info"] visit_info = snapshot_context["visit_info"] url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] snapshot_id = snapshot_context["snapshot_id"] query_params["per_page"] = per_page revs_ordering = request.GET.get("revs_ordering", "") query_params["revs_ordering"] = revs_ordering if origin_info: browse_view_name = "browse-origin-log" else: browse_view_name = "browse-snapshot-log" prev_log_url = None if len(rev_log) > offset + per_page: query_params["offset"] = offset + per_page prev_log_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) next_log_url = None if offset != 0: query_params["offset"] = offset - per_page next_log_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) revision_log_data = format_log_entries(revision_log, per_page, snapshot_context) browse_rev_link = gen_revision_link(revision_id) browse_log_link = gen_revision_log_link(revision_id) browse_snp_link = gen_snapshot_link(snapshot_id) revision_metadata = { "context-independent revision": browse_rev_link, "context-independent revision history": browse_log_link, "context-independent snapshot": browse_snp_link, "snapshot": snapshot_id, } if origin_info: revision_metadata["origin url"] = origin_info["url"] revision_metadata["origin visit date"] = format_utc_iso_date(visit_info["date"]) revision_metadata["origin visit type"] = visit_info["type"] swh_objects = [ {"type": "revision", "id": revision_id}, {"type": "snapshot", "id": snapshot_id}, ] release_id = snapshot_context["release_id"] if release_id: swh_objects.append({"type": "release", "id": release_id}) browse_rel_link = gen_release_link(release_id) revision_metadata["release"] = release_id revision_metadata["context-independent release"] = browse_rel_link swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context) context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading = "Revision history - %s - %s" % (snapshot_context["branch"], context_found) return render( request, "browse/revision-log.html", { "heading": heading, "swh_object_name": "Revisions history", "swh_object_metadata": revision_metadata, "revision_log": revision_log_data, "revs_ordering": revs_ordering, "next_log_url": next_log_url, "prev_log_url": prev_log_url, "breadcrumbs": None, "top_right_link": None, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions_menu": True, "swh_ids": swh_ids, }, ) def browse_snapshot_branches( request, snapshot_id=None, origin_url=None, timestamp=None ): """ Django view implementation for browsing a list of branches in a snapshot context. """ try: + _check_origin_url(snapshot_id, origin_url) + snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), ) branches_bc = request.GET.get("branches_breadcrumbs", "") branches_bc = branches_bc.split(",") if branches_bc else [] branches_from = branches_bc[-1] if branches_bc else "" origin_info = snapshot_context["origin_info"] url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] if origin_info: browse_view_name = "browse-origin-directory" else: browse_view_name = "browse-snapshot-directory" snapshot = service.lookup_snapshot( snapshot_context["snapshot_id"], branches_from, PER_PAGE + 1, target_types=["revision", "alias"], ) displayed_branches, _ = process_snapshot_branches(snapshot) except Exception as exc: return handle_view_exception(request, exc) for branch in displayed_branches: if snapshot_id: revision_url = reverse( "browse-revision", url_args={"sha1_git": branch["revision"]}, query_params={"snapshot_id": snapshot_id}, ) else: revision_url = reverse( "browse-revision", url_args={"sha1_git": branch["revision"]}, query_params={"origin": origin_info["url"]}, ) query_params["branch"] = branch["name"] directory_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) del query_params["branch"] branch["revision_url"] = revision_url branch["directory_url"] = directory_url if origin_info: browse_view_name = "browse-origin-branches" else: browse_view_name = "browse-snapshot-branches" prev_branches_url = None next_branches_url = None if branches_bc: query_params_prev = dict(query_params) query_params_prev["branches_breadcrumbs"] = ",".join(branches_bc[:-1]) prev_branches_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_prev ) elif branches_from: prev_branches_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) if snapshot["next_branch"] is not None: query_params_next = dict(query_params) next_branch = displayed_branches[-1]["name"] del displayed_branches[-1] branches_bc.append(next_branch) query_params_next["branches_breadcrumbs"] = ",".join(branches_bc) next_branches_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_next ) heading = "Branches - " if origin_info: heading += "origin: %s" % origin_info["url"] else: heading += "snapshot: %s" % snapshot_id return render( request, "browse/branches.html", { "heading": heading, "swh_object_name": "Branches", "swh_object_metadata": {}, "top_right_link": None, "displayed_branches": displayed_branches, "prev_branches_url": prev_branches_url, "next_branches_url": next_branches_url, "snapshot_context": snapshot_context, }, ) def browse_snapshot_releases( request, snapshot_id=None, origin_url=None, timestamp=None ): """ Django view implementation for browsing a list of releases in a snapshot context. """ try: + + _check_origin_url(snapshot_id, origin_url) + snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), ) rel_bc = request.GET.get("releases_breadcrumbs", "") rel_bc = rel_bc.split(",") if rel_bc else [] rel_from = rel_bc[-1] if rel_bc else "" origin_info = snapshot_context["origin_info"] url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] snapshot = service.lookup_snapshot( snapshot_context["snapshot_id"], rel_from, PER_PAGE + 1, target_types=["release", "alias"], ) _, displayed_releases = process_snapshot_branches(snapshot) except Exception as exc: return handle_view_exception(request, exc) for release in displayed_releases: if snapshot_id: query_params_tgt = {"snapshot_id": snapshot_id} else: query_params_tgt = {"origin": origin_info["url"]} release_url = reverse( "browse-release", url_args={"sha1_git": release["id"]}, query_params=query_params_tgt, ) target_url = "" if release["target_type"] == "revision": target_url = reverse( "browse-revision", url_args={"sha1_git": release["target"]}, query_params=query_params_tgt, ) elif release["target_type"] == "directory": target_url = reverse( "browse-directory", url_args={"sha1_git": release["target"]}, query_params=query_params_tgt, ) elif release["target_type"] == "content": target_url = reverse( "browse-content", url_args={"query_string": release["target"]}, query_params=query_params_tgt, ) elif release["target_type"] == "release": target_url = reverse( "browse-release", url_args={"sha1_git": release["target"]}, query_params=query_params_tgt, ) release["release_url"] = release_url release["target_url"] = target_url if origin_info: browse_view_name = "browse-origin-releases" else: browse_view_name = "browse-snapshot-releases" prev_releases_url = None next_releases_url = None if rel_bc: query_params_prev = dict(query_params) query_params_prev["releases_breadcrumbs"] = ",".join(rel_bc[:-1]) prev_releases_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_prev ) elif rel_from: prev_releases_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) if snapshot["next_branch"] is not None: query_params_next = dict(query_params) next_rel = displayed_releases[-1]["branch_name"] del displayed_releases[-1] rel_bc.append(next_rel) query_params_next["releases_breadcrumbs"] = ",".join(rel_bc) next_releases_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_next ) heading = "Releases - " if origin_info: heading += "origin: %s" % origin_info["url"] else: heading += "snapshot: %s" % snapshot_id return render( request, "browse/releases.html", { "heading": heading, "top_panel_visible": False, "top_panel_collapsible": False, "swh_object_name": "Releases", "swh_object_metadata": {}, "top_right_link": None, "displayed_releases": displayed_releases, "prev_releases_url": prev_releases_url, "next_releases_url": next_releases_url, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions_menu": False, }, ) diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py index 008495da..9f074f1f 100644 --- a/swh/web/browse/utils.py +++ b/swh/web/browse/utils.py @@ -1,811 +1,809 @@ # Copyright (C) 2017-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import base64 import magic import stat import textwrap from threading import Lock from django.core.cache import cache from django.utils.safestring import mark_safe from django.utils.html import escape import sentry_sdk from swh.web.common import highlightjs, service from swh.web.common.exc import http_status_code_message from swh.web.common.identifiers import get_swh_persistent_id from swh.web.common.utils import ( reverse, format_utc_iso_date, swh_object_icons, rst_to_html, ) from swh.web.config import get_config def get_directory_entries(sha1_git): """Function that retrieves the content of a directory from the archive. The directories entries are first sorted in lexicographical order. Sub-directories and regular files are then extracted. Args: sha1_git: sha1_git identifier of the directory Returns: A tuple whose first member corresponds to the sub-directories list and second member the regular files list Raises: NotFoundExc if the directory is not found """ cache_entry_id = "directory_entries_%s" % sha1_git cache_entry = cache.get(cache_entry_id) if cache_entry: return cache_entry entries = list(service.lookup_directory(sha1_git)) for e in entries: e["perms"] = stat.filemode(e["perms"]) if e["type"] == "rev": # modify dir entry name to explicitly show it points # to a revision e["name"] = "%s @ %s" % (e["name"], e["target"][:7]) dirs = [e for e in entries if e["type"] in ("dir", "rev")] files = [e for e in entries if e["type"] == "file"] dirs = sorted(dirs, key=lambda d: d["name"]) files = sorted(files, key=lambda f: f["name"]) cache.set(cache_entry_id, (dirs, files)) return dirs, files _lock = Lock() def get_mimetype_and_encoding_for_content(content): """Function that returns the mime type and the encoding associated to a content buffer using the magic module under the hood. Args: content (bytes): a content buffer Returns: A tuple (mimetype, encoding), for instance ('text/plain', 'us-ascii'), associated to the provided content. """ # https://pypi.org/project/python-magic/ # packaged as python3-magic in debian buster if hasattr(magic, "from_buffer"): m = magic.Magic(mime=True, mime_encoding=True) mime_encoding = m.from_buffer(content) mime_type, encoding = mime_encoding.split(";") encoding = encoding.replace(" charset=", "") # https://pypi.org/project/file-magic/ # packaged as python3-magic in debian stretch else: # TODO: Remove that code when production environment is upgraded # to debian buster # calls to the file-magic API are not thread-safe so they must # be protected with a Lock to guarantee they will succeed _lock.acquire() magic_result = magic.detect_from_content(content) _lock.release() mime_type = magic_result.mime_type encoding = magic_result.encoding return mime_type, encoding # maximum authorized content size in bytes for HTML display # with code highlighting content_display_max_size = get_config()["content_display_max_size"] def _re_encode_content(mimetype, encoding, content_data): # encode textual content to utf-8 if needed if mimetype.startswith("text/"): # probably a malformed UTF-8 content, re-encode it # by replacing invalid chars with a substitution one if encoding == "unknown-8bit": content_data = content_data.decode("utf-8", "replace").encode("utf-8") elif encoding not in ["utf-8", "binary"]: content_data = content_data.decode(encoding, "replace").encode("utf-8") elif mimetype.startswith("application/octet-stream"): # file may detect a text content as binary # so try to decode it for display encodings = ["us-ascii", "utf-8"] encodings += ["iso-8859-%s" % i for i in range(1, 17)] for enc in encodings: try: content_data = content_data.decode(enc).encode("utf-8") except Exception as exc: sentry_sdk.capture_exception(exc) else: # ensure display in content view encoding = enc mimetype = "text/plain" break return mimetype, encoding, content_data def request_content( query_string, max_size=content_display_max_size, raise_if_unavailable=True, re_encode=True, ): """Function that retrieves a content from the archive. Raw bytes content is first retrieved, then the content mime type. If the mime type is not stored in the archive, it will be computed using Python magic module. Args: query_string: a string of the form "[ALGO_HASH:]HASH" where optional ALGO_HASH can be either ``sha1``, ``sha1_git``, ``sha256``, or ``blake2s256`` (default to ``sha1``) and HASH the hexadecimal representation of the hash value max_size: the maximum size for a content to retrieve (default to 1MB, no size limit if None) Returns: A tuple whose first member corresponds to the content raw bytes and second member the content mime type Raises: NotFoundExc if the content is not found """ content_data = service.lookup_content(query_string) filetype = None language = None license = None # requests to the indexer db may fail so properly handle # those cases in order to avoid content display errors try: filetype = service.lookup_content_filetype(query_string) language = service.lookup_content_language(query_string) license = service.lookup_content_license(query_string) except Exception as exc: sentry_sdk.capture_exception(exc) mimetype = "unknown" encoding = "unknown" if filetype: mimetype = filetype["mimetype"] encoding = filetype["encoding"] # workaround when encountering corrupted data due to implicit # conversion from bytea to text in the indexer db (see T818) # TODO: Remove that code when all data have been correctly converted if mimetype.startswith("\\"): filetype = None content_data["error_code"] = 200 content_data["error_message"] = "" content_data["error_description"] = "" if not max_size or content_data["length"] < max_size: try: content_raw = service.lookup_content_raw(query_string) except Exception as exc: if raise_if_unavailable: raise exc else: sentry_sdk.capture_exception(exc) content_data["raw_data"] = None content_data["error_code"] = 404 content_data["error_description"] = ( "The bytes of the content are currently not available " "in the archive." ) content_data["error_message"] = http_status_code_message[ content_data["error_code"] ] else: content_data["raw_data"] = content_raw["data"] if not filetype: mimetype, encoding = get_mimetype_and_encoding_for_content( content_data["raw_data"] ) if re_encode: mimetype, encoding, raw_data = _re_encode_content( mimetype, encoding, content_data["raw_data"] ) content_data["raw_data"] = raw_data else: content_data["raw_data"] = None content_data["mimetype"] = mimetype content_data["encoding"] = encoding if language: content_data["language"] = language["lang"] else: content_data["language"] = "not detected" if license: content_data["licenses"] = ", ".join(license["facts"][0]["licenses"]) else: content_data["licenses"] = "not detected" return content_data _browsers_supported_image_mimes = set( [ "image/gif", "image/png", "image/jpeg", "image/bmp", "image/webp", "image/svg", "image/svg+xml", ] ) def prepare_content_for_display(content_data, mime_type, path): """Function that prepares a content for HTML display. The function tries to associate a programming language to a content in order to perform syntax highlighting client-side using highlightjs. The language is determined using either the content filename or its mime type. If the mime type corresponds to an image format supported by web browsers, the content will be encoded in base64 for displaying the image. Args: content_data (bytes): raw bytes of the content mime_type (string): mime type of the content path (string): path of the content including filename Returns: A dict containing the content bytes (possibly different from the one provided as parameter if it is an image) under the key 'content_data and the corresponding highlightjs language class under the key 'language'. """ language = highlightjs.get_hljs_language_from_filename(path) if not language: language = highlightjs.get_hljs_language_from_mime_type(mime_type) if not language: language = "nohighlight" elif mime_type.startswith("application/"): mime_type = mime_type.replace("application/", "text/") if mime_type.startswith("image/"): if mime_type in _browsers_supported_image_mimes: content_data = base64.b64encode(content_data).decode("ascii") else: content_data = None if mime_type.startswith("image/svg"): mime_type = "image/svg+xml" if mime_type.startswith("text/"): content_data = content_data.decode("utf-8", errors="replace") return {"content_data": content_data, "language": language, "mimetype": mime_type} def gen_link(url, link_text=None, link_attrs=None): """ Utility function for generating an HTML link to insert in Django templates. Args: url (str): an url link_text (str): optional text for the produced link, if not provided the url will be used link_attrs (dict): optional attributes (e.g. class) to add to the link Returns: An HTML link in the form 'link_text' """ attrs = " " if link_attrs: for k, v in link_attrs.items(): attrs += '%s="%s" ' % (k, v) if not link_text: link_text = url link = '%s' % (attrs, escape(url), escape(link_text)) return mark_safe(link) def _snapshot_context_query_params(snapshot_context): query_params = None if snapshot_context and snapshot_context["origin_info"]: origin_info = snapshot_context["origin_info"] query_params = {"origin": origin_info["url"]} if "timestamp" in snapshot_context["query_params"]: query_params["timestamp"] = snapshot_context["query_params"]["timestamp"] if "visit_id" in snapshot_context["query_params"]: query_params["visit_id"] = snapshot_context["query_params"]["visit_id"] elif snapshot_context: query_params = {"snapshot_id": snapshot_context["snapshot_id"]} return query_params def gen_revision_url(revision_id, snapshot_context=None): """ Utility function for generating an url to a revision. Args: revision_id (str): a revision id snapshot_context (dict): if provided, generate snapshot-dependent browsing url Returns: str: The url to browse the revision """ query_params = _snapshot_context_query_params(snapshot_context) return reverse( "browse-revision", url_args={"sha1_git": revision_id}, query_params=query_params ) def gen_revision_link( revision_id, shorten_id=False, snapshot_context=None, link_text="Browse", link_attrs={"class": "btn btn-default btn-sm", "role": "button"}, ): """ Utility function for generating a link to a revision HTML view to insert in Django templates. Args: revision_id (str): a revision id shorten_id (boolean): whether to shorten the revision id to 7 characters for the link text snapshot_context (dict): if provided, generate snapshot-dependent browsing link link_text (str): optional text for the generated link (the revision id will be used by default) link_attrs (dict): optional attributes (e.g. class) to add to the link Returns: str: An HTML link in the form 'revision_id' """ if not revision_id: return None revision_url = gen_revision_url(revision_id, snapshot_context) if shorten_id: return gen_link(revision_url, revision_id[:7], link_attrs) else: if not link_text: link_text = revision_id return gen_link(revision_url, link_text, link_attrs) def gen_directory_link( sha1_git, snapshot_context=None, link_text="Browse", link_attrs={"class": "btn btn-default btn-sm", "role": "button"}, ): """ Utility function for generating a link to a directory HTML view to insert in Django templates. Args: sha1_git (str): directory identifier link_text (str): optional text for the generated link (the directory id will be used by default) link_attrs (dict): optional attributes (e.g. class) to add to the link Returns: An HTML link in the form 'link_text' """ if not sha1_git: return None query_params = _snapshot_context_query_params(snapshot_context) directory_url = reverse( "browse-directory", url_args={"sha1_git": sha1_git}, query_params=query_params ) if not link_text: link_text = sha1_git return gen_link(directory_url, link_text, link_attrs) def gen_snapshot_link( snapshot_id, snapshot_context=None, link_text="Browse", link_attrs={"class": "btn btn-default btn-sm", "role": "button"}, ): """ Utility function for generating a link to a snapshot HTML view to insert in Django templates. Args: snapshot_id (str): snapshot identifier link_text (str): optional text for the generated link (the snapshot id will be used by default) link_attrs (dict): optional attributes (e.g. class) to add to the link Returns: An HTML link in the form 'link_text' """ query_params = _snapshot_context_query_params(snapshot_context) snapshot_url = reverse( "browse-snapshot", url_args={"snapshot_id": snapshot_id}, query_params=query_params, ) if not link_text: link_text = snapshot_id return gen_link(snapshot_url, link_text, link_attrs) def gen_content_link( sha1_git, snapshot_context=None, link_text="Browse", link_attrs={"class": "btn btn-default btn-sm", "role": "button"}, ): """ Utility function for generating a link to a content HTML view to insert in Django templates. Args: sha1_git (str): content identifier link_text (str): optional text for the generated link (the content sha1_git will be used by default) link_attrs (dict): optional attributes (e.g. class) to add to the link Returns: An HTML link in the form 'link_text' """ if not sha1_git: return None query_params = _snapshot_context_query_params(snapshot_context) content_url = reverse( "browse-content", url_args={"query_string": "sha1_git:" + sha1_git}, query_params=query_params, ) if not link_text: link_text = sha1_git return gen_link(content_url, link_text, link_attrs) def get_revision_log_url(revision_id, snapshot_context=None): """ Utility function for getting the URL for a revision log HTML view (possibly in the context of an origin). Args: revision_id (str): revision identifier the history heads to snapshot_context (dict): if provided, generate snapshot-dependent browsing link Returns: The revision log view URL """ query_params = {"revision": revision_id} if snapshot_context and snapshot_context["origin_info"]: origin_info = snapshot_context["origin_info"] - url_args = {"origin_url": origin_info["url"]} + query_params["origin_url"] = origin_info["url"] if "timestamp" in snapshot_context["query_params"]: query_params["timestamp"] = snapshot_context["query_params"]["timestamp"] if "visit_id" in snapshot_context["query_params"]: query_params["visit_id"] = snapshot_context["query_params"]["visit_id"] - revision_log_url = reverse( - "browse-origin-log", url_args=url_args, query_params=query_params - ) + revision_log_url = reverse("browse-origin-log", query_params=query_params) elif snapshot_context: url_args = {"snapshot_id": snapshot_context["snapshot_id"]} revision_log_url = reverse( "browse-snapshot-log", url_args=url_args, query_params=query_params ) else: revision_log_url = reverse( "browse-revision-log", url_args={"sha1_git": revision_id} ) return revision_log_url def gen_revision_log_link( revision_id, snapshot_context=None, link_text="Browse", link_attrs={"class": "btn btn-default btn-sm", "role": "button"}, ): """ Utility function for generating a link to a revision log HTML view (possibly in the context of an origin) to insert in Django templates. Args: revision_id (str): revision identifier the history heads to snapshot_context (dict): if provided, generate snapshot-dependent browsing link link_text (str): optional text to use for the generated link (the revision id will be used by default) link_attrs (dict): optional attributes (e.g. class) to add to the link Returns: An HTML link in the form 'link_text' """ if not revision_id: return None revision_log_url = get_revision_log_url(revision_id, snapshot_context) if not link_text: link_text = revision_id return gen_link(revision_log_url, link_text, link_attrs) def gen_person_mail_link(person, link_text=None): """ Utility function for generating a mail link to a person to insert in Django templates. Args: person (dict): dictionary containing person data (*name*, *email*, *fullname*) link_text (str): optional text to use for the generated mail link (the person name will be used by default) Returns: str: A mail link to the person or the person name if no email is present in person data """ person_name = person["name"] or person["fullname"] or "None" if link_text is None: link_text = person_name person_email = person["email"] if person["email"] else None if person_email is None and "@" in person_name and " " not in person_name: person_email = person_name if person_email: return gen_link(url="mailto:%s" % person_email, link_text=link_text) else: return person_name def gen_release_link( sha1_git, snapshot_context=None, link_text="Browse", link_attrs={"class": "btn btn-default btn-sm", "role": "button"}, ): """ Utility function for generating a link to a release HTML view to insert in Django templates. Args: sha1_git (str): release identifier link_text (str): optional text for the generated link (the release id will be used by default) link_attrs (dict): optional attributes (e.g. class) to add to the link Returns: An HTML link in the form 'link_text' """ query_params = _snapshot_context_query_params(snapshot_context) release_url = reverse( "browse-release", url_args={"sha1_git": sha1_git}, query_params=query_params ) if not link_text: link_text = sha1_git return gen_link(release_url, link_text, link_attrs) def format_log_entries(revision_log, per_page, snapshot_context=None): """ Utility functions that process raw revision log data for HTML display. Its purpose is to: * add links to relevant browse views * format date in human readable format * truncate the message log Args: revision_log (list): raw revision log as returned by the swh-web api per_page (int): number of log entries per page snapshot_context (dict): if provided, generate snapshot-dependent browsing link """ revision_log_data = [] for i, rev in enumerate(revision_log): if i == per_page: break author_name = "None" author_fullname = "None" committer_fullname = "None" if rev["author"]: author_name = gen_person_mail_link(rev["author"]) author_fullname = rev["author"]["fullname"] if rev["committer"]: committer_fullname = rev["committer"]["fullname"] author_date = format_utc_iso_date(rev["date"]) committer_date = format_utc_iso_date(rev["committer_date"]) tooltip = "revision %s\n" % rev["id"] tooltip += "author: %s\n" % author_fullname tooltip += "author date: %s\n" % author_date tooltip += "committer: %s\n" % committer_fullname tooltip += "committer date: %s\n\n" % committer_date if rev["message"]: tooltip += textwrap.indent(rev["message"], " " * 4) revision_log_data.append( { "author": author_name, "id": rev["id"][:7], "message": rev["message"], "date": author_date, "commit_date": committer_date, "url": gen_revision_url(rev["id"], snapshot_context), "tooltip": tooltip, } ) return revision_log_data # list of common readme names ordered by preference # (lower indices have higher priority) _common_readme_names = [ "readme.markdown", "readme.md", "readme.rst", "readme.txt", "readme", ] def get_readme_to_display(readmes): """ Process a list of readme files found in a directory in order to find the adequate one to display. Args: readmes: a list of dict where keys are readme file names and values are readme sha1s Returns: A tuple (readme_name, readme_sha1) """ readme_name = None readme_url = None readme_sha1 = None readme_html = None lc_readmes = {k.lower(): {"orig_name": k, "sha1": v} for k, v in readmes.items()} # look for readme names according to the preference order # defined by the _common_readme_names list for common_readme_name in _common_readme_names: if common_readme_name in lc_readmes: readme_name = lc_readmes[common_readme_name]["orig_name"] readme_sha1 = lc_readmes[common_readme_name]["sha1"] readme_url = reverse( "browse-content-raw", url_args={"query_string": readme_sha1}, query_params={"re_encode": "true"}, ) break # otherwise pick the first readme like file if any if not readme_name and len(readmes.items()) > 0: readme_name = next(iter(readmes)) readme_sha1 = readmes[readme_name] readme_url = reverse( "browse-content-raw", url_args={"query_string": readme_sha1}, query_params={"re_encode": "true"}, ) # convert rst README to html server side as there is # no viable solution to perform that task client side if readme_name and readme_name.endswith(".rst"): cache_entry_id = "readme_%s" % readme_sha1 cache_entry = cache.get(cache_entry_id) if cache_entry: readme_html = cache_entry else: try: rst_doc = request_content(readme_sha1) readme_html = rst_to_html(rst_doc["raw_data"]) cache.set(cache_entry_id, readme_html) except Exception as exc: sentry_sdk.capture_exception(exc) readme_html = "Readme bytes are not available" return readme_name, readme_url, readme_html def get_swh_persistent_ids(swh_objects, snapshot_context=None): """ Returns a list of dict containing info related to persistent identifiers of swh objects. Args: swh_objects (list): a list of dict with the following keys: * type: swh object type (content/directory/release/revision/snapshot) * id: swh object id snapshot_context (dict): optional parameter describing the snapshot in which the object has been found Returns: list: a list of dict with the following keys: * object_type: the swh object type (content/directory/release/revision/snapshot) * object_icon: the swh object icon to use in HTML views * swh_id: the computed swh object persistent identifier * swh_id_url: the url resolving the persistent identifier * show_options: boolean indicating if the persistent id options must be displayed in persistent ids HTML view """ swh_ids = [] for swh_object in swh_objects: if not swh_object["id"]: continue swh_id = get_swh_persistent_id(swh_object["type"], swh_object["id"]) show_options = swh_object["type"] == "content" or ( snapshot_context and snapshot_context["origin_info"] is not None ) object_icon = swh_object_icons[swh_object["type"]] swh_ids.append( { "object_type": swh_object["type"], "object_id": swh_object["id"], "object_icon": object_icon, "swh_id": swh_id, "swh_id_url": reverse("browse-swh-id", url_args={"swh_id": swh_id}), "show_options": show_options, } ) return swh_ids diff --git a/swh/web/browse/views/origin.py b/swh/web/browse/views/origin.py index 56094b59..095d52b7 100644 --- a/swh/web/browse/views/origin.py +++ b/swh/web/browse/views/origin.py @@ -1,258 +1,295 @@ -# Copyright (C) 2017-2019 The Software Heritage developers +# Copyright (C) 2017-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.shortcuts import render, redirect from swh.web.browse.browseurls import browse_route from swh.web.browse.snapshot_context import ( browse_snapshot_directory, browse_snapshot_content, browse_snapshot_log, browse_snapshot_branches, browse_snapshot_releases, get_snapshot_context, ) from swh.web.common import service -from swh.web.common.exc import handle_view_exception +from swh.web.common.exc import handle_view_exception, BadInputExc from swh.web.common.origin_visits import get_origin_visits from swh.web.common.utils import reverse, format_utc_iso_date, parse_timestamp @browse_route( - r"origin/(?P.+)/directory/", view_name="browse-origin-directory", + r"origin/directory/", view_name="browse-origin-directory", ) -def origin_directory_browse(request, origin_url): +def origin_directory_browse(request): """Django view for browsing the content of a directory associated to an origin for a given visit. - The URL that points to it is :http:get:`/browse/origin/(origin_url)/directory/` + The URL that points to it is :http:get:`/browse/origin/directory/` """ return browse_snapshot_directory( request, - origin_url=origin_url, + origin_url=request.GET.get("origin_url"), timestamp=request.GET.get("timestamp"), path=request.GET.get("path"), ) @browse_route( r"origin/(?P.+)/visit/(?P.+)/directory/", r"origin/(?P.+)/visit/(?P.+)/directory/(?P.+)/", r"origin/(?P.+)/directory/(?P.+)/", + r"origin/(?P.+)/directory/", view_name="browse-origin-directory-legacy", ) def origin_directory_browse_legacy(request, origin_url, timestamp=None, path=None): """Django view for browsing the content of a directory associated to an origin for a given visit. - The url scheme that points to it is the following: - - * :http:get:`/browse/origin/(origin_url)/directory/(path)/` - * :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/directory/(path)/` + The URLs that point to it are + :http:get:`/browse/origin/(origin_url)/directory/[(path)/]` and + :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/directory/[(path)/]` """ return browse_snapshot_directory( request, origin_url=origin_url, timestamp=timestamp, path=path ) @browse_route( - r"origin/(?P.+)/content/", view_name="browse-origin-content", + r"origin/content/", view_name="browse-origin-content", ) -def origin_content_browse(request, origin_url): +def origin_content_browse(request): """Django view that produces an HTML display of a content associated to an origin for a given visit. - The URL that points to it is :http:get:`/browse/origin/(origin_url)/content/` + The URL that points to it is :http:get:`/browse/origin/content/` """ return browse_snapshot_content( request, - origin_url=origin_url, + origin_url=request.GET.get("origin_url"), timestamp=request.GET.get("timestamp"), path=request.GET.get("path"), selected_language=request.GET.get("language"), ) @browse_route( r"origin/(?P.+)/visit/(?P.+)/content/(?P.+)/", r"origin/(?P.+)/content/(?P.+)/", + r"origin/(?P.+)/content/", view_name="browse-origin-content-legacy", ) def origin_content_browse_legacy(request, origin_url, path=None, timestamp=None): """Django view that produces an HTML display of a content associated to an origin for a given visit. - The url scheme that points to it is the following: - - * :http:get:`/browse/origin/(origin_url)/content/(path)/` - * :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/content/(path)/` + The URLs that point to it are + :http:get:`/browse/origin/(origin_url)/content/(path)/` and + :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/content/(path)/` """ - language = request.GET.get("language", None) return browse_snapshot_content( request, origin_url=origin_url, timestamp=timestamp, path=path, - selected_language=language, + selected_language=request.GET.get("language"), ) @browse_route( - r"origin/(?P.+)/log/", view_name="browse-origin-log", + r"origin/log/", view_name="browse-origin-log", ) -def origin_log_browse(request, origin_url): +def origin_log_browse(request): """Django view that produces an HTML display of revisions history (aka the commit log) associated to a software origin. - The URL that points to it is :http:get:`/browse/origin/(origin_url)/log/` + The URL that points to it is :http:get:`/browse/origin/log/` """ return browse_snapshot_log( - request, origin_url=origin_url, timestamp=request.GET.get("timestamp") + request, + origin_url=request.GET.get("origin_url"), + timestamp=request.GET.get("timestamp"), ) @browse_route( r"origin/(?P.+)/visit/(?P.+)/log/", + r"origin/(?P.+)/log/", view_name="browse-origin-log-legacy", ) def origin_log_browse_legacy(request, origin_url, timestamp=None): """Django view that produces an HTML display of revisions history (aka the commit log) associated to a software origin. - The URL that points to it is + The URLs that point to it are + :http:get:`/browse/origin/(origin_url)/log/` and :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/log/` + """ return browse_snapshot_log(request, origin_url=origin_url, timestamp=timestamp) @browse_route( - r"origin/(?P.+)/branches/", view_name="browse-origin-branches", + r"origin/branches/", view_name="browse-origin-branches", ) -def origin_branches_browse(request, origin_url): +def origin_branches_browse(request): """Django view that produces an HTML display of the list of branches associated to an origin for a given visit. - The URL that points to it is :http:get:`/browse/origin/(origin_url)/branches/` + The URL that points to it is :http:get:`/browse/origin/branches/` """ return browse_snapshot_branches( - request, origin_url=origin_url, timestamp=request.GET.get("timestamp") + request, + origin_url=request.GET.get("origin_url"), + timestamp=request.GET.get("timestamp"), ) @browse_route( r"origin/(?P.+)/visit/(?P.+)/branches/", + r"origin/(?P.+)/branches/", view_name="browse-origin-branches-legacy", ) def origin_branches_browse_legacy(request, origin_url, timestamp=None): """Django view that produces an HTML display of the list of branches associated to an origin for a given visit. - The URL that points to it is + The URLs that point to it are + :http:get:`/browse/origin/(origin_url)/branches/` and :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/branches/` """ return browse_snapshot_branches(request, origin_url=origin_url, timestamp=timestamp) @browse_route( - r"origin/(?P.+)/releases/", view_name="browse-origin-releases", + r"origin/releases/", view_name="browse-origin-releases", ) -def origin_releases_browse(request, origin_url): +def origin_releases_browse(request): """Django view that produces an HTML display of the list of releases associated to an origin for a given visit. - The URL that points to it is :http:get:`/browse/origin/(origin_url)/releases/` + The URL that points to it is :http:get:`/browse/origin/releases/` """ return browse_snapshot_releases( - request, origin_url=origin_url, timestamp=request.GET.get("timestamp") + request, + origin_url=request.GET.get("origin_url"), + timestamp=request.GET.get("timestamp"), ) @browse_route( r"origin/(?P.+)/visit/(?P.+)/releases/", + r"origin/(?P.+)/releases/", view_name="browse-origin-releases-legacy", ) def origin_releases_browse_legacy(request, origin_url, timestamp=None): """Django view that produces an HTML display of the list of releases associated to an origin for a given visit. - The URL that points to it is + The URLs that point to it are + :http:get:`/browse/origin/(origin_url)/releases/` and :http:get:`/browse/origin/(origin_url)/visit/(timestamp)/releases/` """ return browse_snapshot_releases(request, origin_url=origin_url, timestamp=timestamp) -@browse_route(r"origin/(?P.+)/visits/", view_name="browse-origin-visits") -def origin_visits_browse(request, origin_url): - """Django view that produces an HTML display of visits reporting - for a given origin. - - The url that points to it is - :http:get:`/browse/origin/(origin_url)/visits/`. - """ +def _origin_visits_browse(request, origin_url): try: + + if origin_url is None: + raise BadInputExc("An origin URL must be provided as query parameter.") + origin_info = service.lookup_origin({"url": origin_url}) origin_visits = get_origin_visits(origin_info) snapshot_context = get_snapshot_context(origin_url=origin_url) except Exception as exc: return handle_view_exception(request, exc) for i, visit in enumerate(origin_visits): url_date = format_utc_iso_date(visit["date"], "%Y-%m-%dT%H:%M:%SZ") visit["formatted_date"] = format_utc_iso_date(visit["date"]) - query_params = {"timestamp": url_date} + query_params = {"origin_url": origin_url, "timestamp": url_date} if i < len(origin_visits) - 1: if visit["date"] == origin_visits[i + 1]["date"]: query_params = {"visit_id": visit["visit"]} if i > 0: if visit["date"] == origin_visits[i - 1]["date"]: query_params = {"visit_id": visit["visit"]} snapshot = visit["snapshot"] if visit["snapshot"] else "" - visit["url"] = reverse( - "browse-origin-directory", - url_args={"origin_url": origin_url}, - query_params=query_params, - ) + visit["url"] = reverse("browse-origin-directory", query_params=query_params,) if not snapshot: visit["snapshot"] = "" visit["date"] = parse_timestamp(visit["date"]).timestamp() heading = "Origin visits - %s" % origin_url return render( request, "browse/origin-visits.html", { "heading": heading, "swh_object_name": "Visits", "swh_object_metadata": origin_info, "origin_visits": origin_visits, "origin_info": origin_info, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions_menu": False, }, ) -@browse_route(r"origin/(?P.+)/", view_name="browse-origin") -def origin_browse(request, origin_url): +@browse_route(r"origin/visits/", view_name="browse-origin-visits") +def origin_visits_browse(request): + """Django view that produces an HTML display of visits reporting + for a given origin. + + The URL that points to it is + :http:get:`/browse/origin/visits/`. + """ + return _origin_visits_browse(request, request.GET.get("origin_url")) + + +@browse_route( + r"origin/(?P.+)/visits/", view_name="browse-origin-visits-legacy" +) +def origin_visits_browse_legacy(request, origin_url): + """Django view that produces an HTML display of visits reporting + for a given origin. + + The URL that points to it is + :http:get:`/browse/origin/(origin_url)/visits/`. + """ + return _origin_visits_browse(request, origin_url) + + +@browse_route(r"origin/", view_name="browse-origin") +def origin_browse(request): + """Django view that redirects to the display of the latest archived + snapshot for a given software origin. + """ + last_snapshot_url = reverse("browse-origin-directory", query_params=request.GET,) + return redirect(last_snapshot_url) + + +@browse_route(r"origin/(?P.+)/", view_name="browse-origin-legacy") +def origin_browse_legacy(request, origin_url): """Django view that redirects to the display of the latest archived snapshot for a given software origin. """ last_snapshot_url = reverse( "browse-origin-directory", - url_args={"origin_url": origin_url}, - query_params=request.GET, + query_params={"origin_url": origin_url, **request.GET}, ) return redirect(last_snapshot_url) diff --git a/swh/web/browse/views/release.py b/swh/web/browse/views/release.py index 5d135d22..cadbd171 100644 --- a/swh/web/browse/views/release.py +++ b/swh/web/browse/views/release.py @@ -1,235 +1,237 @@ -# Copyright (C) 2017-2019 The Software Heritage developers +# Copyright (C) 2017-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.shortcuts import render import sentry_sdk from swh.web.browse.browseurls import browse_route from swh.web.browse.snapshot_context import get_snapshot_context from swh.web.browse.utils import ( gen_revision_link, gen_link, gen_snapshot_link, get_swh_persistent_ids, gen_directory_link, gen_content_link, gen_release_link, gen_person_mail_link, ) from swh.web.common import service from swh.web.common.exc import NotFoundExc, handle_view_exception from swh.web.common.utils import reverse, format_utc_iso_date @browse_route( r"release/(?P[0-9a-f]+)/", view_name="browse-release", checksum_args=["sha1_git"], ) def release_browse(request, sha1_git): """ Django view that produces an HTML display of a release identified by its id. The url that points to it is :http:get:`/browse/release/(sha1_git)/`. """ try: release = service.lookup_release(sha1_git) snapshot_context = None origin_info = None snapshot_id = request.GET.get("snapshot_id", None) origin_url = request.GET.get("origin_url", None) if not origin_url: origin_url = request.GET.get("origin", None) timestamp = request.GET.get("timestamp", None) visit_id = request.GET.get("visit_id", None) if origin_url: try: snapshot_context = get_snapshot_context( snapshot_id, origin_url, timestamp, visit_id ) except NotFoundExc: raw_rel_url = reverse("browse-release", url_args={"sha1_git": sha1_git}) error_message = ( "The Software Heritage archive has a release " "with the hash you provided but the origin " "mentioned in your request appears broken: %s. " "Please check the URL and try again.\n\n" "Nevertheless, you can still browse the release " "without origin information: %s" % (gen_link(origin_url), gen_link(raw_rel_url)) ) raise NotFoundExc(error_message) origin_info = snapshot_context["origin_info"] elif snapshot_id: snapshot_context = get_snapshot_context(snapshot_id) except Exception as exc: return handle_view_exception(request, exc) release_data = {} release_data["author"] = "None" if release["author"]: release_data["author"] = gen_person_mail_link(release["author"]) release_data["date"] = format_utc_iso_date(release["date"]) release_data["release"] = sha1_git release_data["name"] = release["name"] release_data["synthetic"] = release["synthetic"] release_data["target"] = release["target"] release_data["target type"] = release["target_type"] if snapshot_context: if release["target_type"] == "revision": release_data["context-independent target"] = gen_revision_link( release["target"] ) elif release["target_type"] == "content": release_data["context-independent target"] = gen_content_link( release["target"] ) elif release["target_type"] == "directory": release_data["context-independent target"] = gen_directory_link( release["target"] ) elif release["target_type"] == "release": release_data["context-independent target"] = gen_release_link( release["target"] ) release_note_lines = [] if release["message"]: release_note_lines = release["message"].split("\n") vault_cooking = None rev_directory = None target_link = None if release["target_type"] == "revision": target_link = gen_revision_link( release["target"], snapshot_context=snapshot_context, link_text=None, link_attrs=None, ) try: revision = service.lookup_revision(release["target"]) rev_directory = revision["directory"] vault_cooking = { "directory_context": True, "directory_id": rev_directory, "revision_context": True, "revision_id": release["target"], } except Exception as exc: sentry_sdk.capture_exception(exc) elif release["target_type"] == "directory": target_link = gen_directory_link( release["target"], snapshot_context=snapshot_context, link_text=None, link_attrs=None, ) try: # check directory exists service.lookup_directory(release["target"]) vault_cooking = { "directory_context": True, "directory_id": release["target"], "revision_context": False, "revision_id": None, } except Exception as exc: sentry_sdk.capture_exception(exc) elif release["target_type"] == "content": target_link = gen_content_link( release["target"], snapshot_context=snapshot_context, link_text=None, link_attrs=None, ) elif release["target_type"] == "release": target_link = gen_release_link( release["target"], snapshot_context=snapshot_context, link_text=None, link_attrs=None, ) rev_directory_url = None if rev_directory is not None: if origin_info: rev_directory_url = reverse( "browse-origin-directory", - url_args={"origin_url": origin_info["url"]}, - query_params={"release": release["name"]}, + query_params={ + "origin_url": origin_info["url"], + "release": release["name"], + }, ) elif snapshot_id: rev_directory_url = reverse( "browse-snapshot-directory", url_args={"snapshot_id": snapshot_id}, query_params={"release": release["name"]}, ) else: rev_directory_url = reverse( "browse-directory", url_args={"sha1_git": rev_directory} ) directory_link = None if rev_directory_url is not None: directory_link = gen_link(rev_directory_url, rev_directory) release["directory_link"] = directory_link release["target_link"] = target_link if snapshot_context: release_data["snapshot"] = snapshot_context["snapshot_id"] if origin_info: release_data["context-independent release"] = gen_release_link(release["id"]) release_data["origin url"] = gen_link(origin_info["url"], origin_info["url"]) browse_snapshot_link = gen_snapshot_link(snapshot_context["snapshot_id"]) release_data["context-independent snapshot"] = browse_snapshot_link swh_objects = [{"type": "release", "id": sha1_git}] if snapshot_context: snapshot_id = snapshot_context["snapshot_id"] if snapshot_id: swh_objects.append({"type": "snapshot", "id": snapshot_id}) swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context) note_header = "None" if len(release_note_lines) > 0: note_header = release_note_lines[0] release["note_header"] = note_header release["note_body"] = "\n".join(release_note_lines[1:]) heading = "Release - %s" % release["name"] if snapshot_context: context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading += " - %s" % context_found return render( request, "browse/release.html", { "heading": heading, "swh_object_id": swh_ids[0]["swh_id"], "swh_object_name": "Release", "swh_object_metadata": release_data, "release": release, "snapshot_context": snapshot_context, "show_actions_menu": True, "breadcrumbs": None, "vault_cooking": vault_cooking, "top_right_link": None, "swh_ids": swh_ids, }, ) diff --git a/swh/web/browse/views/revision.py b/swh/web/browse/views/revision.py index d7790968..3bcd401c 100644 --- a/swh/web/browse/views/revision.py +++ b/swh/web/browse/views/revision.py @@ -1,593 +1,590 @@ -# Copyright (C) 2017-2019 The Software Heritage developers +# Copyright (C) 2017-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import hashlib import json import textwrap from django.http import HttpResponse from django.shortcuts import render from django.template.defaultfilters import filesizeformat from django.utils.html import escape from django.utils.safestring import mark_safe from swh.model.identifiers import persistent_identifier from swh.web.browse.browseurls import browse_route from swh.web.browse.snapshot_context import get_snapshot_context from swh.web.browse.utils import ( gen_link, gen_revision_link, gen_revision_url, get_revision_log_url, get_directory_entries, gen_directory_link, request_content, prepare_content_for_display, content_display_max_size, gen_snapshot_link, get_readme_to_display, get_swh_persistent_ids, format_log_entries, gen_person_mail_link, ) from swh.web.common import service from swh.web.common.exc import NotFoundExc, handle_view_exception from swh.web.common.utils import ( reverse, format_utc_iso_date, gen_path_info, swh_object_icons, ) def _gen_content_url(revision, query_string, path, snapshot_context): if snapshot_context: - url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] query_params["path"] = path query_params["revision"] = revision["id"] - content_url = reverse( - "browse-origin-content", url_args=url_args, query_params=query_params - ) + content_url = reverse("browse-origin-content", query_params=query_params) else: content_path = "%s/%s" % (revision["directory"], path) content_url = reverse( "browse-content", url_args={"query_string": query_string}, query_params={"path": content_path}, ) return content_url def _gen_diff_link(idx, diff_anchor, link_text): if idx < _max_displayed_file_diffs: return gen_link(diff_anchor, link_text) else: return link_text # TODO: put in conf _max_displayed_file_diffs = 1000 def _gen_revision_changes_list(revision, changes, snapshot_context): """ Returns a HTML string describing the file changes introduced in a revision. As this string will be displayed in the browse revision view, links to adequate file diffs are also generated. Args: revision (str): hexadecimal representation of a revision identifier changes (list): list of file changes in the revision snapshot_context (dict): optional origin context used to reverse the content urls Returns: A string to insert in a revision HTML view. """ changes_msg = [] for i, change in enumerate(changes): hasher = hashlib.sha1() from_query_string = "" to_query_string = "" diff_id = "diff-" if change["from"]: from_query_string = "sha1_git:" + change["from"]["target"] diff_id += change["from"]["target"] + "-" + change["from_path"] diff_id += "-" if change["to"]: to_query_string = "sha1_git:" + change["to"]["target"] diff_id += change["to"]["target"] + change["to_path"] change["path"] = change["to_path"] or change["from_path"] url_args = { "from_query_string": from_query_string, "to_query_string": to_query_string, } query_params = {"path": change["path"]} change["diff_url"] = reverse( "diff-contents", url_args=url_args, query_params=query_params ) hasher.update(diff_id.encode("utf-8")) diff_id = hasher.hexdigest() change["id"] = diff_id panel_diff_link = "#panel_" + diff_id if change["type"] == "modify": change["content_url"] = _gen_content_url( revision, to_query_string, change["to_path"], snapshot_context ) changes_msg.append( "modified: %s" % _gen_diff_link(i, panel_diff_link, change["to_path"]) ) elif change["type"] == "insert": change["content_url"] = _gen_content_url( revision, to_query_string, change["to_path"], snapshot_context ) changes_msg.append( "new file: %s" % _gen_diff_link(i, panel_diff_link, change["to_path"]) ) elif change["type"] == "delete": parent = service.lookup_revision(revision["parents"][0]) change["content_url"] = _gen_content_url( parent, from_query_string, change["from_path"], snapshot_context ) changes_msg.append( "deleted: %s" % _gen_diff_link(i, panel_diff_link, change["from_path"]) ) elif change["type"] == "rename": change["content_url"] = _gen_content_url( revision, to_query_string, change["to_path"], snapshot_context ) link_text = change["from_path"] + " → " + change["to_path"] changes_msg.append( "renamed: %s" % _gen_diff_link(i, panel_diff_link, link_text) ) if not changes: changes_msg.append("No changes") return mark_safe("\n".join(changes_msg)) @browse_route( r"revision/(?P[0-9a-f]+)/diff/", view_name="diff-revision", checksum_args=["sha1_git"], ) def _revision_diff(request, sha1_git): """ Browse internal endpoint to compute revision diff """ try: revision = service.lookup_revision(sha1_git) snapshot_context = None origin_url = request.GET.get("origin_url", None) if not origin_url: origin_url = request.GET.get("origin", None) timestamp = request.GET.get("timestamp", None) visit_id = request.GET.get("visit_id", None) if origin_url: snapshot_context = get_snapshot_context( origin_url=origin_url, timestamp=timestamp, visit_id=visit_id ) except Exception as exc: return handle_view_exception(request, exc) changes = service.diff_revision(sha1_git) changes_msg = _gen_revision_changes_list(revision, changes, snapshot_context) diff_data = { "total_nb_changes": len(changes), "changes": changes[:_max_displayed_file_diffs], "changes_msg": changes_msg, } diff_data_json = json.dumps(diff_data, separators=(",", ": ")) return HttpResponse(diff_data_json, content_type="application/json") NB_LOG_ENTRIES = 100 @browse_route( r"revision/(?P[0-9a-f]+)/log/", view_name="browse-revision-log", checksum_args=["sha1_git"], ) def revision_log_browse(request, sha1_git): """ Django view that produces an HTML display of the history log for a revision identified by its id. The url that points to it is :http:get:`/browse/revision/(sha1_git)/log/` """ try: per_page = int(request.GET.get("per_page", NB_LOG_ENTRIES)) offset = int(request.GET.get("offset", 0)) revs_ordering = request.GET.get("revs_ordering", "committer_date") session_key = "rev_%s_log_ordering_%s" % (sha1_git, revs_ordering) rev_log_session = request.session.get(session_key, None) rev_log = [] revs_walker_state = None if rev_log_session: rev_log = rev_log_session["rev_log"] revs_walker_state = rev_log_session["revs_walker_state"] if len(rev_log) < offset + per_page: revs_walker = service.get_revisions_walker( revs_ordering, sha1_git, max_revs=offset + per_page + 1, state=revs_walker_state, ) rev_log += [rev["id"] for rev in revs_walker] revs_walker_state = revs_walker.export_state() revs = rev_log[offset : offset + per_page] revision_log = service.lookup_revision_multiple(revs) request.session[session_key] = { "rev_log": rev_log, "revs_walker_state": revs_walker_state, } except Exception as exc: return handle_view_exception(request, exc) revs_ordering = request.GET.get("revs_ordering", "") prev_log_url = None if len(rev_log) > offset + per_page: prev_log_url = reverse( "browse-revision-log", url_args={"sha1_git": sha1_git}, query_params={ "per_page": per_page, "offset": offset + per_page, "revs_ordering": revs_ordering, }, ) next_log_url = None if offset != 0: next_log_url = reverse( "browse-revision-log", url_args={"sha1_git": sha1_git}, query_params={ "per_page": per_page, "offset": offset - per_page, "revs_ordering": revs_ordering, }, ) revision_log_data = format_log_entries(revision_log, per_page) swh_rev_id = persistent_identifier("revision", sha1_git) return render( request, "browse/revision-log.html", { "heading": "Revision history", "swh_object_id": swh_rev_id, "swh_object_name": "Revisions history", "swh_object_metadata": None, "revision_log": revision_log_data, "revs_ordering": revs_ordering, "next_log_url": next_log_url, "prev_log_url": prev_log_url, "breadcrumbs": None, "top_right_link": None, "snapshot_context": None, "vault_cooking": None, "show_actions_menu": True, "swh_ids": None, }, ) @browse_route( r"revision/(?P[0-9a-f]+)/", r"revision/(?P[0-9a-f]+)/(?P.+)/", view_name="browse-revision", checksum_args=["sha1_git"], ) def revision_browse(request, sha1_git, extra_path=None): """ Django view that produces an HTML display of a revision identified by its id. The url that points to it is :http:get:`/browse/revision/(sha1_git)/`. """ try: revision = service.lookup_revision(sha1_git) origin_info = None snapshot_context = None origin_url = request.GET.get("origin_url", None) if not origin_url: origin_url = request.GET.get("origin", None) timestamp = request.GET.get("timestamp", None) visit_id = request.GET.get("visit_id", None) snapshot_id = request.GET.get("snapshot_id", None) path = request.GET.get("path", None) dir_id = None dirs, files = None, None content_data = None if origin_url: try: snapshot_context = get_snapshot_context( origin_url=origin_url, timestamp=timestamp, visit_id=visit_id ) except NotFoundExc: raw_rev_url = reverse( "browse-revision", url_args={"sha1_git": sha1_git} ) error_message = ( "The Software Heritage archive has a revision " "with the hash you provided but the origin " "mentioned in your request appears broken: %s. " "Please check the URL and try again.\n\n" "Nevertheless, you can still browse the revision " "without origin information: %s" % (gen_link(origin_url), gen_link(raw_rev_url)) ) raise NotFoundExc(error_message) origin_info = snapshot_context["origin_info"] snapshot_id = snapshot_context["snapshot_id"] elif snapshot_id: snapshot_context = get_snapshot_context(snapshot_id) if path: file_info = service.lookup_directory_with_path(revision["directory"], path) if file_info["type"] == "dir": dir_id = file_info["target"] else: query_string = "sha1_git:" + file_info["target"] content_data = request_content(query_string, raise_if_unavailable=False) else: dir_id = revision["directory"] if dir_id: path = "" if path is None else (path + "/") dirs, files = get_directory_entries(dir_id) except Exception as exc: return handle_view_exception(request, exc) revision_data = {} revision_data["author"] = "None" if revision["author"]: author_link = gen_person_mail_link(revision["author"]) revision_data["author"] = author_link revision_data["committer"] = "None" if revision["committer"]: committer_link = gen_person_mail_link(revision["committer"]) revision_data["committer"] = committer_link revision_data["committer date"] = format_utc_iso_date(revision["committer_date"]) revision_data["date"] = format_utc_iso_date(revision["date"]) revision_data["directory"] = revision["directory"] if snapshot_context: revision_data["snapshot"] = snapshot_id browse_snapshot_link = gen_snapshot_link(snapshot_id) revision_data["context-independent snapshot"] = browse_snapshot_link revision_data["context-independent directory"] = gen_directory_link( revision["directory"] ) revision_data["revision"] = sha1_git revision_data["merge"] = revision["merge"] revision_data["metadata"] = escape( json.dumps( revision["metadata"], sort_keys=True, indent=4, separators=(",", ": ") ) ) if origin_info: revision_data["origin url"] = gen_link(origin_info["url"], origin_info["url"]) revision_data["context-independent revision"] = gen_revision_link(sha1_git) parents = "" for p in revision["parents"]: parent_link = gen_revision_link( p, link_text=None, link_attrs=None, snapshot_context=snapshot_context ) parents += parent_link + "
" revision_data["parents"] = mark_safe(parents) revision_data["synthetic"] = revision["synthetic"] revision_data["type"] = revision["type"] message_lines = ["None"] if revision["message"]: message_lines = revision["message"].split("\n") parents = [] for p in revision["parents"]: parent_url = gen_revision_url(p, snapshot_context) parents.append({"id": p, "url": parent_url}) path_info = gen_path_info(path) query_params = { "snapshot_id": snapshot_id, "origin": origin_url, "timestamp": timestamp, "visit_id": visit_id, } breadcrumbs = [] breadcrumbs.append( { "name": revision["directory"][:7], "url": reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ), } ) for pi in path_info: query_params["path"] = pi["path"] breadcrumbs.append( { "name": pi["name"], "url": reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ), } ) vault_cooking = { "directory_context": False, "directory_id": None, "revision_context": True, "revision_id": sha1_git, } swh_objects = [{"type": "revision", "id": sha1_git}] content = None content_size = None mimetype = None language = None readme_name = None readme_url = None readme_html = None readmes = {} error_code = 200 error_message = "" error_description = "" if content_data: breadcrumbs[-1]["url"] = None content_size = content_data["length"] mimetype = content_data["mimetype"] if content_data["raw_data"]: content_display_data = prepare_content_for_display( content_data["raw_data"], content_data["mimetype"], path ) content = content_display_data["content_data"] language = content_display_data["language"] mimetype = content_display_data["mimetype"] query_params = {} if path: filename = path_info[-1]["name"] query_params["filename"] = path_info[-1]["name"] revision_data["filename"] = filename top_right_link = { "url": reverse( "browse-content-raw", url_args={"query_string": query_string}, query_params=query_params, ), "icon": swh_object_icons["content"], "text": "Raw File", } swh_objects.append({"type": "content", "id": file_info["target"]}) error_code = content_data["error_code"] error_message = content_data["error_message"] error_description = content_data["error_description"] else: for d in dirs: if d["type"] == "rev": d["url"] = reverse( "browse-revision", url_args={"sha1_git": d["target"]} ) else: query_params["path"] = path + d["name"] d["url"] = reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ) for f in files: query_params["path"] = path + f["name"] f["url"] = reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ) if f["length"] is not None: f["length"] = filesizeformat(f["length"]) if f["name"].lower().startswith("readme"): readmes[f["name"]] = f["checksums"]["sha1"] readme_name, readme_url, readme_html = get_readme_to_display(readmes) top_right_link = { "url": get_revision_log_url(sha1_git, snapshot_context), "icon": swh_object_icons["revisions history"], "text": "History", } vault_cooking["directory_context"] = True vault_cooking["directory_id"] = dir_id swh_objects.append({"type": "directory", "id": dir_id}) diff_revision_url = reverse( "diff-revision", url_args={"sha1_git": sha1_git}, query_params={ "origin": origin_url, "timestamp": timestamp, "visit_id": visit_id, }, ) if snapshot_id: swh_objects.append({"type": "snapshot", "id": snapshot_id}) swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context) heading = "Revision - %s - %s" % ( sha1_git[:7], textwrap.shorten(message_lines[0], width=70), ) if snapshot_context: context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading += " - %s" % context_found return render( request, "browse/revision.html", { "heading": heading, "swh_object_id": swh_ids[0]["swh_id"], "swh_object_name": "Revision", "swh_object_metadata": revision_data, "message_header": message_lines[0], "message_body": "\n".join(message_lines[1:]), "parents": parents, "snapshot_context": snapshot_context, "dirs": dirs, "files": files, "content": content, "content_size": content_size, "max_content_size": content_display_max_size, "mimetype": mimetype, "language": language, "readme_name": readme_name, "readme_url": readme_url, "readme_html": readme_html, "breadcrumbs": breadcrumbs, "top_right_link": top_right_link, "vault_cooking": vault_cooking, "diff_revision_url": diff_revision_url, "show_actions_menu": True, "swh_ids": swh_ids, "error_code": error_code, "error_message": error_message, "error_description": error_description, }, status=error_code, ) diff --git a/swh/web/misc/badges.py b/swh/web/misc/badges.py index 23947cee..9e7ba859 100644 --- a/swh/web/misc/badges.py +++ b/swh/web/misc/badges.py @@ -1,167 +1,169 @@ -# Copyright (C) 2019 The Software Heritage developers +# Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from base64 import b64encode from typing import cast, Optional from django.conf.urls import url from django.contrib.staticfiles import finders from django.http import HttpResponse, HttpRequest from pybadges import badge from swh.model.exceptions import ValidationError from swh.model.identifiers import ( persistent_identifier, parse_persistent_identifier, CONTENT, DIRECTORY, ORIGIN, RELEASE, REVISION, SNAPSHOT, ) from swh.web.common import service from swh.web.common.exc import BadInputExc, NotFoundExc from swh.web.common.identifiers import resolve_swh_persistent_id from swh.web.common.utils import reverse _orange = "#f36a24" _blue = "#0172b2" _red = "#cd5741" _swh_logo_data = None _badge_config = { CONTENT: {"color": _blue, "title": "Archived source file",}, DIRECTORY: {"color": _blue, "title": "Archived source tree",}, ORIGIN: {"color": _orange, "title": "Archived software repository",}, RELEASE: {"color": _blue, "title": "Archived software release",}, REVISION: {"color": _blue, "title": "Archived commit",}, SNAPSHOT: {"color": _blue, "title": "Archived software repository snapshot",}, "error": {"color": _red, "title": "An error occurred when generating the badge"}, } def _get_logo_data() -> str: """ Get data-URI for Software Heritage SVG logo to embed it in the generated badges. """ global _swh_logo_data if _swh_logo_data is None: swh_logo_path = cast(str, finders.find("img/swh-logo-white.svg")) with open(swh_logo_path, "rb") as swh_logo_file: _swh_logo_data = "data:image/svg+xml;base64,%s" % b64encode( swh_logo_file.read() ).decode("ascii") return _swh_logo_data def _swh_badge( request: HttpRequest, object_type: str, object_id: str, object_pid: Optional[str] = "", ) -> HttpResponse: """ Generate a Software Heritage badge for a given object type and id. Args: request: input http request object_type: The type of swh object to generate a badge for, either *content*, *directory*, *revision*, *release*, *origin* or *snapshot* object_id: The id of the swh object, either an url for origin type or a *sha1* for other object types object_pid: If provided, the object persistent identifier will not be recomputed Returns: HTTP response with content type *image/svg+xml* containing the SVG badge data. If the provided parameters are invalid, HTTP 400 status code will be returned. If the object can not be found in the archive, HTTP 404 status code will be returned. """ left_text = "error" whole_link = None try: if object_type == ORIGIN: service.lookup_origin({"url": object_id}) right_text = "repository" - whole_link = reverse("browse-origin", url_args={"origin_url": object_id}) + whole_link = reverse( + "browse-origin", query_params={"origin_url": object_id} + ) else: # when pid is provided, object type and id will be parsed # from it if object_pid: parsed_pid = parse_persistent_identifier(object_pid) object_type = parsed_pid.object_type object_id = parsed_pid.object_id swh_object = service.lookup_object(object_type, object_id) if object_pid: right_text = object_pid else: right_text = persistent_identifier(object_type, object_id) whole_link = resolve_swh_persistent_id(right_text)["browse_url"] # remove pid metadata if any for badge text if object_pid: right_text = right_text.split(";")[0] # use release name for badge text if object_type == RELEASE: right_text = "release %s" % swh_object["name"] left_text = "archived" except (BadInputExc, ValidationError): right_text = f'invalid {object_type if object_type else "object"} id' object_type = "error" except NotFoundExc: right_text = f'{object_type if object_type else "object"} not found' object_type = "error" badge_data = badge( left_text=left_text, right_text=right_text, right_color=_badge_config[object_type]["color"], whole_link=request.build_absolute_uri(whole_link), whole_title=_badge_config[object_type]["title"], logo=_get_logo_data(), embed_logo=True, ) return HttpResponse(badge_data, content_type="image/svg+xml") def _swh_badge_pid(request: HttpRequest, object_pid: str) -> HttpResponse: """ Generate a Software Heritage badge for a given object persistent identifier. Args: request (django.http.HttpRequest): input http request object_pid (str): A swh object persistent identifier Returns: django.http.HttpResponse: An http response with content type *image/svg+xml* containing the SVG badge data. If any error occurs, a status code of 400 will be returned. """ return _swh_badge(request, "", "", object_pid) urlpatterns = [ url( r"^badge/(?P[a-z]+)/(?P.+)/$", _swh_badge, name="swh-badge", ), url( r"^badge/(?Pswh:[0-9]+:[a-z]+:[0-9a-f]+.*)/$", _swh_badge_pid, name="swh-badge-pid", ), ] diff --git a/swh/web/templates/browse/browse.html b/swh/web/templates/browse/browse.html index f0a864b8..4dafe49f 100644 --- a/swh/web/templates/browse/browse.html +++ b/swh/web/templates/browse/browse.html @@ -1,63 +1,63 @@ {% extends "./layout.html" %} {% comment %} -Copyright (C) 2017-2018 The Software Heritage developers +Copyright (C) 2017-2020 The Software Heritage developers See the AUTHORS file at the top-level directory of this distribution License: GNU Affero General Public License version 3, or any later version See top-level LICENSE file for more information {% endcomment %} {% load swh_templatetags %} {% block title %}{{ heading }} – Software Heritage archive{% endblock %} {% block navbar-content %} {% if snapshot_context %}

Browse archived {{ swh_object_name.lower }} {% if snapshot_context.origin_info %} for origin - - {{ snapshot_context.origin_info.url }} + + {% url 'browse-origin' %}?origin_url={{ snapshot_context.origin_info.url }} {% if snapshot_context.origin_info.url|slice:"0:4" == "http" %} {% endif %} {% else %} for snapshot {{ snapshot_context.snapshot_swhid }} {% endif %}

{% else %}

Browse archived {{ swh_object_name.lower }} {{ swh_object_id }}

{% endif %} {% endblock %} {% block browse-content %} {% block swh-browse-before-content %} {% if snapshot_context %} {% include "includes/snapshot-context.html" %} {% endif %} {% endblock %} {% block swh-browse-content %}{% endblock %} {% block swh-browse-after-content %}{% endblock %} {% endblock %} diff --git a/swh/web/templates/browse/help.html b/swh/web/templates/browse/help.html index 9f4ac175..09b35738 100644 --- a/swh/web/templates/browse/help.html +++ b/swh/web/templates/browse/help.html @@ -1,189 +1,189 @@ {% extends "./layout.html" %} {% comment %} -Copyright (C) 2017-2018 The Software Heritage developers +Copyright (C) 2017-2020 The Software Heritage developers See the AUTHORS file at the top-level directory of this distribution License: GNU Affero General Public License version 3, or any later version See top-level LICENSE file for more information {% endcomment %} {% block navbar-content %}

How to browse the archive ?

{% endblock %} {% block browse-content %}

Overview

This web application aims to provide HTML views to easily navigate in the Software Heritage archive. This is an ongoing development and new features and improvements will be progressively added over time.

URI scheme

The current URI scheme of that web application is described below and depends on the type of Software Heritage object to browse. Its exhaustive documentation can be consulted from the official Software Heritage development documentation

Context-independent browsing

Context-independent URLs provide information about objects (e.g., revisions, directories, contents, persons, …), independently of the contexts where they have been found (e.g., specific software origins, branches, commits, …).

Below are some examples of endpoints used to just render the corresponding information for user consumption:

Where hyperlinks are created when browsing these kind of endpoints, they always point to other context-independent browsing URLs.

Context-dependent browsing

Context-dependent URLs provide information about objects, limited to specific contexts where the objects have been found.

Currently, browsing the Software Heritage objects in the context of an origin is available. Below are some examples of such endpoints:

Search software origins to browse

In order to facilitate the browsing of the archive and generate relevant entry points to it, a search interface is available. Currently, it enables to search software origins from the URLs they were retrieved from. More search criteria will be added in the future. {% endblock %} diff --git a/swh/web/tests/browse/test_snapshot_context.py b/swh/web/tests/browse/test_snapshot_context.py index 960490ba..4b10974c 100644 --- a/swh/web/tests/browse/test_snapshot_context.py +++ b/swh/web/tests/browse/test_snapshot_context.py @@ -1,341 +1,329 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import random from hypothesis import given from swh.web.browse.snapshot_context import ( get_origin_visit_snapshot, get_snapshot_content, get_snapshot_context, ) from swh.web.common.identifiers import get_swh_persistent_id from swh.web.common.origin_visits import get_origin_visit, get_origin_visits from swh.web.common.typing import ( SnapshotBranchInfo, SnapshotReleaseInfo, SnapshotContext, ) from swh.web.common.utils import format_utc_iso_date, reverse from swh.web.tests.strategies import origin_with_multiple_visits, snapshot @given(origin_with_multiple_visits()) def test_get_origin_visit_snapshot_simple(archive_data, origin): visits = archive_data.origin_visit_get(origin["url"]) for visit in visits: snapshot = archive_data.snapshot_get(visit["snapshot"]) branches = [] releases = [] def _process_branch_data(branch, branch_data): if branch_data["target_type"] == "revision": rev_data = archive_data.revision_get(branch_data["target"]) branches.append( SnapshotBranchInfo( name=branch, revision=branch_data["target"], directory=rev_data["directory"], date=format_utc_iso_date(rev_data["date"]), message=rev_data["message"], url=None, ) ) elif branch_data["target_type"] == "release": rel_data = archive_data.release_get(branch_data["target"]) rev_data = archive_data.revision_get(rel_data["target"]) releases.append( SnapshotReleaseInfo( name=rel_data["name"], branch_name=branch, date=format_utc_iso_date(rel_data["date"]), id=rel_data["id"], message=rel_data["message"], target_type=rel_data["target_type"], target=rel_data["target"], directory=rev_data["directory"], url=None, ) ) for branch in sorted(snapshot["branches"].keys()): branch_data = snapshot["branches"][branch] if branch_data["target_type"] == "alias": target_data = snapshot["branches"][branch_data["target"]] _process_branch_data(branch, target_data) else: _process_branch_data(branch, branch_data) assert branches and releases, "Incomplete test data." origin_visit_branches = get_origin_visit_snapshot( origin, visit_id=visit["visit"] ) assert origin_visit_branches == (branches, releases) @given(snapshot()) def test_get_snapshot_context_no_origin(archive_data, snapshot): for browse_context, kwargs in ( ("content", {"snapshot_id": snapshot, "path": "/some/path"}), ("directory", {"snapshot_id": snapshot}), ("log", {"snapshot_id": snapshot}), ): url_args = {"snapshot_id": snapshot} query_params = dict(kwargs) query_params.pop("snapshot_id") snapshot_context = get_snapshot_context(**kwargs, browse_context=browse_context) branches, releases = get_snapshot_content(snapshot) releases = list(reversed(releases)) revision_id = None root_directory = None for branch in branches: if branch["name"] == "HEAD": revision_id = branch["revision"] root_directory = branch["directory"] branch["url"] = reverse( f"browse-snapshot-{browse_context}", url_args=url_args, query_params={"branch": branch["name"], **query_params}, ) for release in releases: release["url"] = reverse( f"browse-snapshot-{browse_context}", url_args=url_args, query_params={"release": release["name"], **query_params}, ) branches_url = reverse("browse-snapshot-branches", url_args=url_args) releases_url = reverse("browse-snapshot-releases", url_args=url_args) is_empty = not branches and not releases snapshot_swhid = get_swh_persistent_id("snapshot", snapshot) snapshot_sizes = {"revision": len(branches), "release": len(releases)} expected = SnapshotContext( branch="HEAD", branches=branches, branches_url=branches_url, is_empty=is_empty, origin_info=None, origin_visits_url=None, release=None, release_id=None, query_params=query_params, releases=releases, releases_url=releases_url, revision_id=revision_id, root_directory=root_directory, snapshot_id=snapshot, snapshot_sizes=snapshot_sizes, snapshot_swhid=snapshot_swhid, url_args=url_args, visit_info=None, ) assert snapshot_context == expected _check_branch_release_revision_parameters( archive_data, expected, browse_context, kwargs, branches, releases ) @given(origin_with_multiple_visits()) def test_get_snapshot_context_with_origin(archive_data, origin): origin_visits = get_origin_visits(origin) timestamp = format_utc_iso_date(origin_visits[0]["date"], "%Y-%m-%dT%H:%M:%SZ") visit_id = origin_visits[1]["visit"] for browse_context, kwargs in ( ("content", {"origin_url": origin["url"], "path": "/some/path"}), ("directory", {"origin_url": origin["url"]}), ("log", {"origin_url": origin["url"]}), ("directory", {"origin_url": origin["url"], "timestamp": timestamp,},), ("directory", {"origin_url": origin["url"], "visit_id": visit_id,},), ): visit_id = kwargs["visit_id"] if "visit_id" in kwargs else None visit_ts = kwargs["timestamp"] if "timestamp" in kwargs else None visit_info = get_origin_visit( {"url": kwargs["origin_url"]}, visit_ts=visit_ts, visit_id=visit_id ) snapshot = visit_info["snapshot"] snapshot_context = get_snapshot_context(**kwargs, browse_context=browse_context) - url_args = dict(kwargs) - url_args.pop("path", None) - url_args.pop("timestamp", None) - url_args.pop("visit_id", None) - query_params = dict(kwargs) - query_params.pop("origin_url") branches, releases = get_snapshot_content(snapshot) releases = list(reversed(releases)) revision_id = None root_directory = None for branch in branches: if branch["name"] == "HEAD": revision_id = branch["revision"] root_directory = branch["directory"] branch["url"] = reverse( f"browse-origin-{browse_context}", - url_args=url_args, query_params={"branch": branch["name"], **query_params}, ) for release in releases: release["url"] = reverse( f"browse-origin-{browse_context}", - url_args=url_args, query_params={"release": release["name"], **query_params}, ) query_params.pop("path", None) - branches_url = reverse( - "browse-origin-branches", url_args=url_args, query_params=query_params - ) - releases_url = reverse( - "browse-origin-releases", url_args=url_args, query_params=query_params - ) + branches_url = reverse("browse-origin-branches", query_params=query_params) + releases_url = reverse("browse-origin-releases", query_params=query_params) origin_visits_url = reverse( - "browse-origin-visits", url_args={"origin_url": kwargs["origin_url"]} + "browse-origin-visits", query_params={"origin_url": kwargs["origin_url"]} ) is_empty = not branches and not releases snapshot_swhid = get_swh_persistent_id("snapshot", snapshot) snapshot_sizes = {"revision": len(branches), "release": len(releases)} visit_info["url"] = reverse( - "browse-origin-directory", url_args=url_args, query_params=query_params + "browse-origin-directory", query_params=query_params ) visit_info["formatted_date"] = format_utc_iso_date(visit_info["date"]) if "path" in kwargs: query_params["path"] = kwargs["path"] expected = SnapshotContext( branch="HEAD", branches=branches, branches_url=branches_url, is_empty=is_empty, origin_info={"url": origin["url"]}, origin_visits_url=origin_visits_url, release=None, release_id=None, query_params=query_params, releases=releases, releases_url=releases_url, revision_id=revision_id, root_directory=root_directory, snapshot_id=snapshot, snapshot_sizes=snapshot_sizes, snapshot_swhid=snapshot_swhid, - url_args=url_args, + url_args={}, visit_info=visit_info, ) assert snapshot_context == expected _check_branch_release_revision_parameters( archive_data, expected, browse_context, kwargs, branches, releases ) def _check_branch_release_revision_parameters( archive_data, base_expected_context, browse_context, kwargs, branches, releases, ): branch = random.choice(branches) snapshot_context = get_snapshot_context( **kwargs, browse_context=browse_context, branch_name=branch["name"] ) url_args = dict(kwargs) url_args.pop("path", None) url_args.pop("timestamp", None) url_args.pop("visit_id", None) + url_args.pop("origin_url", None) query_params = dict(kwargs) - query_params.pop("origin_url", None) query_params.pop("snapshot_id", None) expected_branch = dict(base_expected_context) expected_branch["branch"] = branch["name"] expected_branch["revision_id"] = branch["revision"] expected_branch["root_directory"] = branch["directory"] expected_branch["query_params"] = {"branch": branch["name"], **query_params} assert snapshot_context == expected_branch if releases: release = random.choice(releases) snapshot_context = get_snapshot_context( **kwargs, browse_context=browse_context, release_name=release["name"] ) expected_release = dict(base_expected_context) expected_release["branch"] = None expected_release["release"] = release["name"] expected_release["release_id"] = release["id"] if release["target_type"] == "revision": expected_release["revision_id"] = release["target"] expected_release["root_directory"] = release["directory"] expected_release["query_params"] = {"release": release["name"], **query_params} assert snapshot_context == expected_release revision_log = archive_data.revision_log(branch["revision"]) revision = revision_log[-1] snapshot_context = get_snapshot_context( **kwargs, browse_context=browse_context, revision_id=revision["id"] ) if "origin_url" in kwargs: view_name = f"browse-origin-{browse_context}" else: view_name = f"browse-snapshot-{browse_context}" kwargs.pop("visit_id", None) revision_browse_url = reverse( view_name, url_args=url_args, query_params={"revision": revision["id"], **query_params}, ) branches.append( SnapshotBranchInfo( name=revision["id"], revision=revision["id"], directory=revision["directory"], date=revision["date"], message=revision["message"], url=revision_browse_url, ) ) expected_revision = dict(base_expected_context) expected_revision["branch"] = revision["id"] expected_revision["branches"] = branches expected_revision["revision_id"] = revision["id"] expected_revision["root_directory"] = revision["directory"] expected_revision["query_params"] = {"revision": revision["id"], **query_params} assert snapshot_context == expected_revision diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py index 6c3b2b98..20a6a7a6 100644 --- a/swh/web/tests/browse/views/test_origin.py +++ b/swh/web/tests/browse/views/test_origin.py @@ -1,1122 +1,1110 @@ # Copyright (C) 2017-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime import random import re import string from django.utils.html import escape from hypothesis import given from swh.model.hashutil import hash_to_bytes from swh.model.model import ( Snapshot, SnapshotBranch, TargetType, ) from swh.web.browse.snapshot_context import process_snapshot_branches from swh.web.common.exc import NotFoundExc from swh.web.common.identifiers import get_swh_persistent_id from swh.web.common.utils import ( reverse, gen_path_info, format_utc_iso_date, parse_timestamp, ) from swh.web.config import get_config from swh.web.tests.data import get_content, random_sha1 from swh.web.tests.django_asserts import assert_contains, assert_template_used from swh.web.tests.strategies import ( origin, origin_with_multiple_visits, new_origin, new_snapshot, visit_dates, revisions, origin_with_releases, release as existing_release, unknown_revision, ) @given(origin_with_multiple_visits()) def test_origin_visits_browse(client, archive_data, origin): - url = reverse("browse-origin-visits", url_args={"origin_url": origin["url"]}) + url = reverse("browse-origin-visits", query_params={"origin_url": origin["url"]}) resp = client.get(url) assert resp.status_code == 200 assert_template_used(resp, "browse/origin-visits.html") - url = reverse("browse-origin-visits", url_args={"origin_url": origin["url"]}) + url = reverse("browse-origin-visits", query_params={"origin_url": origin["url"]}) resp = client.get(url) assert resp.status_code == 200 assert_template_used(resp, "browse/origin-visits.html") visits = archive_data.origin_visit_get(origin["url"]) for v in visits: vdate = format_utc_iso_date(v["date"], "%Y-%m-%dT%H:%M:%SZ") browse_dir_url = reverse( "browse-origin-directory", - url_args={"origin_url": origin["url"]}, - query_params={"timestamp": vdate}, + query_params={"origin_url": origin["url"], "timestamp": vdate}, ) assert_contains(resp, browse_dir_url) @given(origin_with_multiple_visits()) def test_origin_content_view(client, archive_data, origin): origin_visits = archive_data.origin_visit_get(origin["url"]) def _get_archive_data(visit_idx): snapshot = archive_data.snapshot_get(origin_visits[visit_idx]["snapshot"]) head_rev_id = archive_data.snapshot_get_head(snapshot) head_rev = archive_data.revision_get(head_rev_id) dir_content = archive_data.directory_ls(head_rev["directory"]) dir_files = [e for e in dir_content if e["type"] == "file"] dir_file = random.choice(dir_files) branches, releases = process_snapshot_branches(snapshot) return { "branches": branches, "releases": releases, "root_dir_sha1": head_rev["directory"], "content": get_content(dir_file["checksums"]["sha1"]), "visit": origin_visits[visit_idx], } tdata = _get_archive_data(-1) _origin_content_view_test_helper( client, origin, origin_visits, tdata["branches"], tdata["releases"], tdata["root_dir_sha1"], tdata["content"], ) _origin_content_view_test_helper( client, origin, origin_visits, tdata["branches"], tdata["releases"], tdata["root_dir_sha1"], tdata["content"], timestamp=tdata["visit"]["date"], ) visit_unix_ts = parse_timestamp(tdata["visit"]["date"]).timestamp() visit_unix_ts = int(visit_unix_ts) _origin_content_view_test_helper( client, origin, origin_visits, tdata["branches"], tdata["releases"], tdata["root_dir_sha1"], tdata["content"], timestamp=visit_unix_ts, ) tdata = _get_archive_data(0) _origin_content_view_test_helper( client, origin, origin_visits, tdata["branches"], tdata["releases"], tdata["root_dir_sha1"], tdata["content"], visit_id=tdata["visit"]["visit"], ) @given(origin()) def test_origin_root_directory_view(client, archive_data, origin): origin_visits = archive_data.origin_visit_get(origin["url"]) visit = origin_visits[-1] snapshot = archive_data.snapshot_get(visit["snapshot"]) head_rev_id = archive_data.snapshot_get_head(snapshot) head_rev = archive_data.revision_get(head_rev_id) root_dir_sha1 = head_rev["directory"] dir_content = archive_data.directory_ls(root_dir_sha1) branches, releases = process_snapshot_branches(snapshot) visit_unix_ts = parse_timestamp(visit["date"]).timestamp() visit_unix_ts = int(visit_unix_ts) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, dir_content ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, dir_content, visit_id=visit["visit"], ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, dir_content, timestamp=visit_unix_ts, ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, dir_content, timestamp=visit["date"], ) origin = dict(origin) del origin["type"] _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, dir_content ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, dir_content, visit_id=visit["visit"], ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, dir_content, timestamp=visit_unix_ts, ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, dir_content, timestamp=visit["date"], ) @given(origin()) def test_origin_sub_directory_view(client, archive_data, origin): origin_visits = archive_data.origin_visit_get(origin["url"]) visit = origin_visits[-1] snapshot = archive_data.snapshot_get(visit["snapshot"]) head_rev_id = archive_data.snapshot_get_head(snapshot) head_rev = archive_data.revision_get(head_rev_id) root_dir_sha1 = head_rev["directory"] subdirs = [ e for e in archive_data.directory_ls(root_dir_sha1) if e["type"] == "dir" ] branches, releases = process_snapshot_branches(snapshot) visit_unix_ts = parse_timestamp(visit["date"]).timestamp() visit_unix_ts = int(visit_unix_ts) if len(subdirs) == 0: return subdir = random.choice(subdirs) subdir_content = archive_data.directory_ls(subdir["target"]) subdir_path = subdir["name"] _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, subdir_content, path=subdir_path, ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, subdir_content, path=subdir_path, visit_id=visit["visit"], ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, subdir_content, path=subdir_path, timestamp=visit_unix_ts, ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, subdir_content, path=subdir_path, timestamp=visit["date"], ) origin = dict(origin) del origin["type"] _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, subdir_content, path=subdir_path, ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, subdir_content, path=subdir_path, visit_id=visit["visit"], ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, subdir_content, path=subdir_path, timestamp=visit_unix_ts, ) _origin_directory_view_test_helper( client, origin, origin_visits, branches, releases, root_dir_sha1, subdir_content, path=subdir_path, timestamp=visit["date"], ) @given(origin()) def test_origin_branches(client, archive_data, origin): origin_visits = archive_data.origin_visit_get(origin["url"]) visit = origin_visits[-1] snapshot = archive_data.snapshot_get(visit["snapshot"]) snapshot_content = process_snapshot_branches(snapshot) _origin_branches_test_helper(client, origin, snapshot_content) origin = dict(origin) origin["type"] = None _origin_branches_test_helper(client, origin, snapshot_content) @given(origin()) def test_origin_releases(client, archive_data, origin): origin_visits = archive_data.origin_visit_get(origin["url"]) visit = origin_visits[-1] snapshot = archive_data.snapshot_get(visit["snapshot"]) snapshot_content = process_snapshot_branches(snapshot) _origin_releases_test_helper(client, origin, snapshot_content) origin = dict(origin) origin["type"] = None _origin_releases_test_helper(client, origin, snapshot_content) @given( new_origin(), new_snapshot(min_size=4, max_size=4), visit_dates(), revisions(min_size=3, max_size=3), ) def test_origin_snapshot_null_branch( client, archive_data, new_origin, new_snapshot, visit_dates, revisions ): snp_dict = new_snapshot.to_dict() new_origin = archive_data.origin_add([new_origin])[0] for i, branch in enumerate(snp_dict["branches"].keys()): if i == 0: snp_dict["branches"][branch] = None else: snp_dict["branches"][branch] = { "target_type": "revision", "target": hash_to_bytes(revisions[i - 1]), } archive_data.snapshot_add([Snapshot.from_dict(snp_dict)]) visit = archive_data.origin_visit_add(new_origin["url"], visit_dates[0], type="git") archive_data.origin_visit_update( new_origin["url"], visit.visit, status="partial", snapshot=snp_dict["id"] ) - url = reverse("browse-origin-directory", url_args={"origin_url": new_origin["url"]}) + url = reverse( + "browse-origin-directory", query_params={"origin_url": new_origin["url"]} + ) rv = client.get(url) assert rv.status_code == 200 @given( new_origin(), new_snapshot(min_size=4, max_size=4), visit_dates(), revisions(min_size=4, max_size=4), ) def test_origin_snapshot_invalid_branch( client, archive_data, new_origin, new_snapshot, visit_dates, revisions ): snp_dict = new_snapshot.to_dict() new_origin = archive_data.origin_add([new_origin])[0] for i, branch in enumerate(snp_dict["branches"].keys()): snp_dict["branches"][branch] = { "target_type": "revision", "target": hash_to_bytes(revisions[i]), } archive_data.snapshot_add([Snapshot.from_dict(snp_dict)]) visit = archive_data.origin_visit_add(new_origin["url"], visit_dates[0], type="git") archive_data.origin_visit_update( new_origin["url"], visit.visit, status="full", snapshot=snp_dict["id"] ) url = reverse( "browse-origin-directory", - url_args={"origin_url": new_origin["url"]}, - query_params={"branch": "invalid_branch"}, + query_params={"origin_url": new_origin["url"], "branch": "invalid_branch"}, ) rv = client.get(url) assert rv.status_code == 404 @given(new_origin()) def test_browse_visits_origin_not_found(client, new_origin): - url = reverse("browse-origin-visits", url_args={"origin_url": new_origin.url}) + url = reverse("browse-origin-visits", query_params={"origin_url": new_origin.url}) resp = client.get(url) assert resp.status_code == 404 assert_template_used(resp, "error.html") assert_contains( resp, f"Origin with url {new_origin.url} not found", status_code=404 ) @given(origin()) def test_browse_origin_directory_no_visit(client, mocker, origin): mock_get_origin_visits = mocker.patch( "swh.web.common.origin_visits.get_origin_visits" ) mock_get_origin_visits.return_value = [] - url = reverse("browse-origin-directory", url_args={"origin_url": origin["url"]}) + url = reverse("browse-origin-directory", query_params={"origin_url": origin["url"]}) resp = client.get(url) assert resp.status_code == 404 assert_template_used(resp, "error.html") assert_contains(resp, "No visit", status_code=404) assert mock_get_origin_visits.called @given(origin()) def test_browse_origin_directory_unknown_visit(client, mocker, origin): mock_get_origin_visits = mocker.patch( "swh.web.common.origin_visits.get_origin_visits" ) mock_get_origin_visits.return_value = [{"visit": 1}] url = reverse( "browse-origin-directory", - url_args={"origin_url": origin["url"]}, - query_params={"visit_id": 2}, + query_params={"origin_url": origin["url"], "visit_id": 2}, ) resp = client.get(url) assert resp.status_code == 404 assert_template_used(resp, "error.html") assert re.search("Visit.*not found", resp.content.decode("utf-8")) assert mock_get_origin_visits.called @given(origin()) def test_browse_origin_directory_not_found(client, origin): url = reverse( "browse-origin-directory", - url_args={"origin_url": origin["url"]}, - query_params={"path": "/invalid/dir/path/"}, + query_params={"origin_url": origin["url"], "path": "/invalid/dir/path/"}, ) resp = client.get(url) assert resp.status_code == 404 assert_template_used(resp, "error.html") assert re.search("Directory.*not found", resp.content.decode("utf-8")) @given(origin()) def test_browse_origin_content_no_visit(client, mocker, origin): mock_get_origin_visits = mocker.patch( "swh.web.common.origin_visits.get_origin_visits" ) mock_get_origin_visits.return_value = [] url = reverse( "browse-origin-content", - url_args={"origin_url": origin["url"]}, - query_params={"path": "foo"}, + query_params={"origin_url": origin["url"], "path": "foo"}, ) resp = client.get(url) assert resp.status_code == 404 assert_template_used(resp, "error.html") assert_contains(resp, "No visit", status_code=404) assert mock_get_origin_visits.called @given(origin()) def test_browse_origin_content_unknown_visit(client, mocker, origin): mock_get_origin_visits = mocker.patch( "swh.web.common.origin_visits.get_origin_visits" ) mock_get_origin_visits.return_value = [{"visit": 1}] url = reverse( "browse-origin-content", - url_args={"origin_url": origin["url"]}, - query_params={"path": "foo", "visit_id": 2}, + query_params={"origin_url": origin["url"], "path": "foo", "visit_id": 2}, ) resp = client.get(url) assert resp.status_code == 404 assert_template_used(resp, "error.html") assert re.search("Visit.*not found", resp.content.decode("utf-8")) assert mock_get_origin_visits.called @given(origin()) def test_browse_origin_content_directory_empty_snapshot(client, mocker, origin): mock_snapshot_service = mocker.patch("swh.web.browse.snapshot_context.service") mock_get_origin_visit_snapshot = mocker.patch( "swh.web.browse.snapshot_context.get_origin_visit_snapshot" ) mock_get_origin_visit_snapshot.return_value = ([], []) mock_snapshot_service.lookup_origin.return_value = origin mock_snapshot_service.lookup_snapshot_sizes.return_value = { "revision": 0, "release": 0, } for browse_context in ("content", "directory"): url = reverse( f"browse-origin-{browse_context}", - url_args={"origin_url": origin["url"]}, - query_params={"path": "baz"}, + query_params={"origin_url": origin["url"], "path": "baz"}, ) resp = client.get(url) assert resp.status_code == 200 assert_template_used(resp, f"browse/{browse_context}.html") assert re.search("snapshot.*is empty", resp.content.decode("utf-8")) assert mock_get_origin_visit_snapshot.called assert mock_snapshot_service.lookup_origin.called assert mock_snapshot_service.lookup_snapshot_sizes.called @given(origin()) def test_browse_origin_content_not_found(client, origin): url = reverse( "browse-origin-content", - url_args={"origin_url": origin["url"]}, - query_params={"path": "/invalid/file/path"}, + query_params={"origin_url": origin["url"], "path": "/invalid/file/path"}, ) resp = client.get(url) assert resp.status_code == 404 assert_template_used(resp, "error.html") assert re.search("Directory entry.*not found", resp.content.decode("utf-8")) @given(origin()) def test_browse_directory_snapshot_not_found(client, mocker, origin): mock_get_snapshot_context = mocker.patch( "swh.web.browse.snapshot_context.get_snapshot_context" ) mock_get_snapshot_context.side_effect = NotFoundExc("Snapshot not found") - url = reverse("browse-origin-directory", url_args={"origin_url": origin["url"]}) + url = reverse("browse-origin-directory", query_params={"origin_url": origin["url"]}) resp = client.get(url) assert resp.status_code == 404 assert_template_used(resp, "error.html") assert_contains(resp, "Snapshot not found", status_code=404) assert mock_get_snapshot_context.called @given(origin()) def test_origin_empty_snapshot(client, mocker, origin): mock_service = mocker.patch("swh.web.browse.snapshot_context.service") mock_get_origin_visit_snapshot = mocker.patch( "swh.web.browse.snapshot_context.get_origin_visit_snapshot" ) mock_get_origin_visit_snapshot.return_value = ([], []) mock_service.lookup_snapshot_sizes.return_value = { "revision": 0, "release": 0, } mock_service.lookup_origin.return_value = origin - url = reverse("browse-origin-directory", url_args={"origin_url": origin["url"]}) + url = reverse("browse-origin-directory", query_params={"origin_url": origin["url"]}) resp = client.get(url) assert resp.status_code == 200 assert_template_used(resp, "browse/directory.html") resp_content = resp.content.decode("utf-8") assert re.search("snapshot.*is empty", resp_content) assert not re.search("swh-tr-link", resp_content) assert mock_get_origin_visit_snapshot.called assert mock_service.lookup_snapshot_sizes.called @given(origin_with_releases()) def test_origin_release_browse(client, archive_data, origin): # for swh.web.browse.snapshot_context.get_snapshot_content to only return one branch config = get_config() snapshot_max_size = int(config["snapshot_content_max_size"]) config["snapshot_content_max_size"] = 1 try: snapshot = archive_data.snapshot_get_latest(origin["url"]) release = [ b for b in snapshot["branches"].values() if b["target_type"] == "release" ][-1] release_data = archive_data.release_get(release["target"]) url = reverse( "browse-origin-directory", - url_args={"origin_url": origin["url"]}, - query_params={"release": release_data["name"]}, + query_params={"origin_url": origin["url"], "release": release_data["name"]}, ) resp = client.get(url) assert resp.status_code == 200 assert_contains(resp, release_data["name"]) assert_contains(resp, release["target"]) finally: config["snapshot_content_max_size"] = snapshot_max_size @given(origin_with_releases()) def test_origin_release_browse_not_found(client, origin): invalid_release_name = "swh-foo-bar" url = reverse( "browse-origin-directory", - url_args={"origin_url": origin["url"]}, - query_params={"release": invalid_release_name}, + query_params={"origin_url": origin["url"], "release": invalid_release_name}, ) resp = client.get(url) assert resp.status_code == 404 assert re.search( f"Release {invalid_release_name}.*not found", resp.content.decode("utf-8") ) @given(new_origin(), unknown_revision()) def test_origin_browse_directory_branch_with_non_resolvable_revision( client, archive_data, new_origin, unknown_revision ): branch_name = "master" snapshot = Snapshot( branches={ branch_name.encode(): SnapshotBranch( target=hash_to_bytes(unknown_revision), target_type=TargetType.REVISION, ) } ) new_origin = archive_data.origin_add([new_origin])[0] archive_data.snapshot_add([snapshot]) visit = archive_data.origin_visit_add(new_origin["url"], datetime.now(), type="git") archive_data.origin_visit_update( new_origin["url"], visit.visit, status="full", snapshot=snapshot.id ) url = reverse( "browse-origin-directory", - url_args={"origin_url": new_origin["url"]}, - query_params={"branch": branch_name}, + query_params={"origin_url": new_origin["url"], "branch": branch_name}, ) resp = client.get(url) assert resp.status_code == 200 assert_contains( resp, f"Revision {unknown_revision } could not be found in the archive." ) @given(origin()) def test_origin_content_no_path(client, origin): - url = reverse("browse-origin-content", url_args={"origin_url": origin["url"]}) + url = reverse("browse-origin-content", query_params={"origin_url": origin["url"]}) resp = client.get(url) assert resp.status_code == 400 assert_contains( resp, "The path of a content must be given as query parameter.", status_code=400 ) +def test_origin_views_no_url_query_parameter(client): + for browse_context in ( + "content", + "directory", + "log", + "branches", + "releases", + "visits", + ): + url = reverse(f"browse-origin-{browse_context}") + resp = client.get(url) + assert resp.status_code == 400 + assert_contains( + resp, "An origin URL must be provided as query parameter.", status_code=400 + ) + + def _origin_content_view_test_helper( client, origin_info, origin_visits, origin_branches, origin_releases, root_dir_sha1, content, visit_id=None, timestamp=None, ): content_path = "/".join(content["path"].split("/")[1:]) - url_args = {"origin_url": origin_info["url"]} - if not visit_id: visit_id = origin_visits[-1]["visit"] - query_params = {"path": content_path} + query_params = {"origin_url": origin_info["url"], "path": content_path} if timestamp: query_params["timestamp"] = timestamp if visit_id: query_params["visit_id"] = visit_id - url = reverse("browse-origin-content", url_args=url_args, query_params=query_params) + url = reverse("browse-origin-content", query_params=query_params) resp = client.get(url) assert resp.status_code == 200 assert_template_used(resp, "browse/content.html") assert type(content["data"]) == str assert_contains(resp, '' % content["hljs_language"]) assert_contains(resp, escape(content["data"])) split_path = content_path.split("/") filename = split_path[-1] path = content_path.replace(filename, "")[:-1] path_info = gen_path_info(path) del query_params["path"] if timestamp: query_params["timestamp"] = format_utc_iso_date( parse_timestamp(timestamp).isoformat(), "%Y-%m-%dT%H:%M:%SZ" ) - root_dir_url = reverse( - "browse-origin-directory", url_args=url_args, query_params=query_params - ) + root_dir_url = reverse("browse-origin-directory", query_params=query_params) assert_contains(resp, '
  • ', count=len(path_info) + 1) assert_contains(resp, '%s' % (root_dir_url, root_dir_sha1[:7])) for p in path_info: query_params["path"] = p["path"] - dir_url = reverse( - "browse-origin-directory", url_args=url_args, query_params=query_params - ) + dir_url = reverse("browse-origin-directory", query_params=query_params) assert_contains(resp, '%s' % (dir_url, p["name"])) assert_contains(resp, "
  • %s
  • " % filename) query_string = "sha1_git:" + content["sha1_git"] url_raw = reverse( "browse-content-raw", url_args={"query_string": query_string}, query_params={"filename": filename}, ) assert_contains(resp, url_raw) if "path" in query_params: del query_params["path"] - origin_branches_url = reverse( - "browse-origin-branches", url_args=url_args, query_params=query_params - ) + origin_branches_url = reverse("browse-origin-branches", query_params=query_params) assert_contains( resp, 'Branches (%s)' % (escape(origin_branches_url), len(origin_branches)), ) - origin_releases_url = reverse( - "browse-origin-releases", url_args=url_args, query_params=query_params - ) + origin_releases_url = reverse("browse-origin-releases", query_params=query_params) assert_contains( resp, 'Releases (%s)' % (escape(origin_releases_url), len(origin_releases)), ) assert_contains(resp, '
  • ', count=len(origin_branches)) query_params["path"] = content_path for branch in origin_branches: query_params["branch"] = branch["name"] root_dir_branch_url = reverse( - "browse-origin-content", url_args=url_args, query_params=query_params + "browse-origin-content", query_params=query_params ) assert_contains(resp, '' % root_dir_branch_url) assert_contains(resp, '
  • ', count=len(origin_releases)) query_params["branch"] = None for release in origin_releases: query_params["release"] = release["name"] root_dir_release_url = reverse( - "browse-origin-content", url_args=url_args, query_params=query_params + "browse-origin-content", query_params=query_params ) assert_contains(resp, '' % root_dir_release_url) - url = reverse("browse-origin-content", url_args=url_args, query_params=query_params) + url = reverse("browse-origin-content", query_params=query_params) resp = client.get(url) assert resp.status_code == 200 assert_template_used(resp, "browse/content.html") swh_cnt_id = get_swh_persistent_id("content", content["sha1_git"]) swh_cnt_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_cnt_id}) assert_contains(resp, swh_cnt_id) assert_contains(resp, swh_cnt_id_url) assert_contains(resp, "swh-take-new-snapshot") def _origin_directory_view_test_helper( client, origin_info, origin_visits, origin_branches, origin_releases, root_directory_sha1, directory_entries, visit_id=None, timestamp=None, path=None, ): dirs = [e for e in directory_entries if e["type"] in ("dir", "rev")] files = [e for e in directory_entries if e["type"] == "file"] if not visit_id: visit_id = origin_visits[-1]["visit"] - url_args = {"origin_url": origin_info["url"]} - - query_params = {} + query_params = {"origin_url": origin_info["url"]} if timestamp: query_params["timestamp"] = timestamp else: query_params["visit_id"] = visit_id if path: query_params["path"] = path - url = reverse( - "browse-origin-directory", url_args=url_args, query_params=query_params - ) + url = reverse("browse-origin-directory", query_params=query_params) resp = client.get(url) assert resp.status_code == 200 assert_template_used(resp, "browse/directory.html") assert resp.status_code == 200 assert_template_used(resp, "browse/directory.html") assert_contains(resp, '', count=len(dirs)) assert_contains(resp, '', count=len(files)) if timestamp: query_params["timestamp"] = format_utc_iso_date( parse_timestamp(timestamp).isoformat(), "%Y-%m-%dT%H:%M:%SZ" ) for d in dirs: if d["type"] == "rev": dir_url = reverse("browse-revision", url_args={"sha1_git": d["target"]}) else: dir_path = d["name"] if path: dir_path = "%s/%s" % (path, d["name"]) query_params["path"] = dir_path - dir_url = reverse( - "browse-origin-directory", url_args=url_args, query_params=query_params, - ) + dir_url = reverse("browse-origin-directory", query_params=query_params,) assert_contains(resp, dir_url) for f in files: file_path = f["name"] if path: file_path = "%s/%s" % (path, f["name"]) query_params["path"] = file_path - file_url = reverse( - "browse-origin-content", url_args=url_args, query_params=query_params - ) + file_url = reverse("browse-origin-content", query_params=query_params) assert_contains(resp, file_url) if "path" in query_params: del query_params["path"] - root_dir_branch_url = reverse( - "browse-origin-directory", url_args=url_args, query_params=query_params - ) + root_dir_branch_url = reverse("browse-origin-directory", query_params=query_params) nb_bc_paths = 1 if path: nb_bc_paths = len(path.split("/")) + 1 assert_contains(resp, '
  • ', count=nb_bc_paths) assert_contains( resp, '%s' % (root_dir_branch_url, root_directory_sha1[:7]) ) - origin_branches_url = reverse( - "browse-origin-branches", url_args=url_args, query_params=query_params - ) + origin_branches_url = reverse("browse-origin-branches", query_params=query_params) assert_contains( resp, - 'Branches (%s)' % (origin_branches_url, len(origin_branches)), + 'Branches (%s)' + % (escape(origin_branches_url), len(origin_branches)), ) - origin_releases_url = reverse( - "browse-origin-releases", url_args=url_args, query_params=query_params - ) + origin_releases_url = reverse("browse-origin-releases", query_params=query_params) nb_releases = len(origin_releases) if nb_releases > 0: assert_contains( - resp, 'Releases (%s)' % (origin_releases_url, nb_releases) + resp, + 'Releases (%s)' + % (escape(origin_releases_url), nb_releases), ) if path: query_params["path"] = path assert_contains(resp, '
  • ', count=len(origin_branches)) for branch in origin_branches: query_params["branch"] = branch["name"] root_dir_branch_url = reverse( - "browse-origin-directory", url_args=url_args, query_params=query_params + "browse-origin-directory", query_params=query_params ) assert_contains(resp, '' % root_dir_branch_url) assert_contains(resp, '
  • ', count=len(origin_releases)) query_params["branch"] = None for release in origin_releases: query_params["release"] = release["name"] root_dir_release_url = reverse( - "browse-origin-directory", url_args=url_args, query_params=query_params + "browse-origin-directory", query_params=query_params ) assert_contains(resp, '' % root_dir_release_url) assert_contains(resp, "vault-cook-directory") assert_contains(resp, "vault-cook-revision") swh_dir_id = get_swh_persistent_id("directory", directory_entries[0]["dir_id"]) swh_dir_id_url = reverse("browse-swh-id", url_args={"swh_id": swh_dir_id}) assert_contains(resp, swh_dir_id) assert_contains(resp, swh_dir_id_url) assert_contains(resp, "swh-take-new-snapshot") def _origin_branches_test_helper(client, origin_info, origin_snapshot): - url_args = {"origin_url": origin_info["url"]} + query_params = {"origin_url": origin_info["url"]} - url = reverse("browse-origin-branches", url_args=url_args) + url = reverse("browse-origin-branches", query_params=query_params) resp = client.get(url) assert resp.status_code == 200 assert_template_used(resp, "browse/branches.html") origin_branches = origin_snapshot[0] origin_releases = origin_snapshot[1] - origin_branches_url = reverse("browse-origin-branches", url_args=url_args) + origin_branches_url = reverse("browse-origin-branches", query_params=query_params) assert_contains( resp, 'Branches (%s)' % (origin_branches_url, len(origin_branches)), ) - origin_releases_url = reverse("browse-origin-releases", url_args=url_args) + origin_releases_url = reverse("browse-origin-releases", query_params=query_params) nb_releases = len(origin_releases) if nb_releases > 0: assert_contains( resp, 'Releases (%s)' % (origin_releases_url, nb_releases) ) assert_contains(resp, '' % escape(browse_branch_url)) browse_revision_url = reverse( "browse-revision", url_args={"sha1_git": branch["revision"]}, query_params={"origin": origin_info["url"]}, ) assert_contains(resp, '' % escape(browse_revision_url)) def _origin_releases_test_helper(client, origin_info, origin_snapshot): - url_args = {"origin_url": origin_info["url"]} + query_params = {"origin_url": origin_info["url"]} - url = reverse("browse-origin-releases", url_args=url_args) + url = reverse("browse-origin-releases", query_params=query_params) resp = client.get(url) assert resp.status_code == 200 assert_template_used(resp, "browse/releases.html") origin_branches = origin_snapshot[0] origin_releases = origin_snapshot[1] - origin_branches_url = reverse("browse-origin-branches", url_args=url_args) + origin_branches_url = reverse("browse-origin-branches", query_params=query_params) assert_contains( resp, 'Branches (%s)' % (origin_branches_url, len(origin_branches)), ) - origin_releases_url = reverse("browse-origin-releases", url_args=url_args) + origin_releases_url = reverse("browse-origin-releases", query_params=query_params) nb_releases = len(origin_releases) if nb_releases > 0: assert_contains( resp, 'Releases (%s)' % (origin_releases_url, nb_releases) ) assert_contains(resp, '' % escape(browse_release_url)) assert_contains(resp, '' % escape(browse_revision_url)) @given( new_origin(), visit_dates(), revisions(min_size=10, max_size=10), existing_release() ) def test_origin_branches_pagination_with_alias( client, archive_data, mocker, new_origin, visit_dates, revisions, existing_release ): """ When a snapshot contains a branch or a release alias, pagination links in the branches / releases view should be displayed. """ mocker.patch("swh.web.browse.snapshot_context.PER_PAGE", len(revisions) / 2) snp_dict = {"branches": {}, "id": hash_to_bytes(random_sha1())} for i in range(len(revisions)): branch = "".join(random.choices(string.ascii_lowercase, k=8)) snp_dict["branches"][branch.encode()] = { "target_type": "revision", "target": hash_to_bytes(revisions[i]), } release = "".join(random.choices(string.ascii_lowercase, k=8)) snp_dict["branches"][b"RELEASE_ALIAS"] = { "target_type": "alias", "target": release.encode(), } snp_dict["branches"][release.encode()] = { "target_type": "release", "target": hash_to_bytes(existing_release), } new_origin = archive_data.origin_add([new_origin])[0] archive_data.snapshot_add([Snapshot.from_dict(snp_dict)]) visit = archive_data.origin_visit_add(new_origin["url"], visit_dates[0], type="git") archive_data.origin_visit_update( new_origin["url"], visit.visit, status="full", snapshot=snp_dict["id"] ) - url = reverse("browse-origin-branches", url_args={"origin_url": new_origin["url"]}) + url = reverse( + "browse-origin-branches", query_params={"origin_url": new_origin["url"]} + ) resp = client.get(url) assert resp.status_code == 200 assert_template_used(resp, "browse/branches.html") assert_contains(resp, '