diff --git a/cypress/integration/deposit-admin.spec.js b/cypress/integration/deposit-admin.spec.js index 13bad497..2fbeac94 100644 --- a/cypress/integration/deposit-admin.spec.js +++ b/cypress/integration/deposit-admin.spec.js @@ -1,155 +1,155 @@ /** * Copyright (C) 2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ // data to use as request query response let responseDeposits; let expectedOrigins; describe('Test admin deposit page', function() { beforeEach(() => { responseDeposits = [ { 'id': 614, 'external_id': 'ch-de-1', 'reception_date': '2020-05-18T13:48:27Z', 'status': 'done', 'status_detail': null, - 'swh_id': 'swh:1:dir:ef04a768', - 'swh_id_context': 'swh:1:dir:ef04a768;origin=https://w.s.o/c-d-1;visit=swh:1:snp:b234be1e;anchor=swh:1:rev:d24a75c9;path=/' + 'swhid': 'swh:1:dir:ef04a768', + 'swhid_context': 'swh:1:dir:ef04a768;origin=https://w.s.o/c-d-1;visit=swh:1:snp:b234be1e;anchor=swh:1:rev:d24a75c9;path=/' }, { 'id': 613, 'external_id': 'ch-de-2', 'reception_date': '2020-05-18T11:20:16Z', 'status': 'done', 'status_detail': null, - 'swh_id': 'swh:1:dir:181417fb', - 'swh_id_context': 'swh:1:dir:181417fb;origin=https://w.s.o/c-d-2;visit=swh:1:snp:8c32a2ef;anchor=swh:1:rev:3d1eba04;path=/' + 'swhid': 'swh:1:dir:181417fb', + 'swhid_context': 'swh:1:dir:181417fb;origin=https://w.s.o/c-d-2;visit=swh:1:snp:8c32a2ef;anchor=swh:1:rev:3d1eba04;path=/' }, { 'id': 612, 'external_id': 'ch-de-3', 'reception_date': '2020-05-18T11:20:16Z', 'status': 'rejected', 'status_detail': 'incomplete deposit!', - 'swh_id': null, - 'swh_id_context': null + 'swhid': null, + 'swhid_context': null } ]; // those are computed from the expectedOrigins = { 614: 'https://w.s.o/c-d-1', 613: 'https://w.s.o/c-d-2', 612: '' }; }); it('Should display properly entries', function() { cy.adminLogin(); cy.visit(this.Urls.admin_deposit()); let testDeposits = responseDeposits; cy.server(); cy.route({ method: 'GET', url: `${this.Urls.admin_deposit_list()}**`, response: { 'draw': 10, 'recordsTotal': testDeposits.length, 'recordsFiltered': testDeposits.length, 'data': testDeposits } }).as('listDeposits'); cy.location('pathname') .should('be.equal', this.Urls.admin_deposit()); cy.url().should('include', '/admin/deposit'); cy.get('#swh-admin-deposit-list') .should('exist'); cy.wait('@listDeposits').then((xhr) => { cy.log('response:', xhr.response); cy.log(xhr.response.body); let deposits = xhr.response.body.data; cy.log('Deposits: ', deposits); expect(deposits.length).to.equal(testDeposits.length); cy.get('#swh-admin-deposit-list').find('tbody > tr').as('rows'); // only 2 entries cy.get('@rows').each((row, idx, collection) => { let deposit = deposits[idx]; let responseDeposit = testDeposits[idx]; assert.isNotNull(deposit); assert.isNotNull(responseDeposit); expect(deposit.id).to.be.equal(responseDeposit['id']); expect(deposit.external_id).to.be.equal(responseDeposit['external_id']); expect(deposit.status).to.be.equal(responseDeposit['status']); expect(deposit.status_detail).to.be.equal(responseDeposit['status_detail']); - expect(deposit.swh_id).to.be.equal(responseDeposit['swh_id']); - expect(deposit.swh_id_context).to.be.equal(responseDeposit['swh_id_context']); + expect(deposit.swhid).to.be.equal(responseDeposit['swhid']); + expect(deposit.swhid_context).to.be.equal(responseDeposit['swhid_context']); let expectedOrigin = expectedOrigins[deposit.id]; // ensure it's in the dom cy.contains(deposit.id).should('be.visible'); if (deposit.status !== 'rejected') { expect(row).to.not.contain(deposit.external_id); cy.contains(expectedOrigin).should('be.visible'); } cy.contains(deposit.status).should('be.visible'); // those are hidden by default, so now visible if (deposit.status_detail !== null) { cy.contains(deposit.status_detail).should('not.be.visible'); } // those are hidden by default - if (deposit.swh_id !== null) { - cy.contains(deposit.swh_id).should('not.be.visible'); - cy.contains(deposit.swh_id_context).should('not.be.visible'); + if (deposit.swhid !== null) { + cy.contains(deposit.swhid).should('not.be.visible'); + cy.contains(deposit.swhid_context).should('not.be.visible'); } }); // toggling all links and ensure, the previous checks are inverted cy.get('a.toggle-col').click({'multiple': true}).then(() => { cy.get('#swh-admin-deposit-list').find('tbody > tr').as('rows'); cy.get('@rows').each((row, idx, collection) => { let deposit = deposits[idx]; let expectedOrigin = expectedOrigins[deposit.id]; // ensure it's in the dom cy.contains(deposit.id).should('not.be.visible'); if (deposit.status !== 'rejected') { expect(row).to.not.contain(deposit.external_id); expect(row).to.contain(expectedOrigin); } expect(row).to.not.contain(deposit.status); // those are hidden by default, so now visible if (deposit.status_detail !== null) { cy.contains(deposit.status_detail).should('be.visible'); } // those are hidden by default, so now they should be visible - if (deposit.swh_id !== null) { - cy.contains(deposit.swh_id).should('be.visible'); - cy.contains(deposit.swh_id_context).should('be.visible'); + if (deposit.swhid !== null) { + cy.contains(deposit.swhid).should('be.visible'); + cy.contains(deposit.swhid_context).should('be.visible'); } }); }); cy.get('#swh-admin-deposit-list-error') .should('not.contain', 'An error occurred while retrieving the list of deposits'); }); }); }); diff --git a/cypress/integration/origin-search.spec.js b/cypress/integration/origin-search.spec.js index 78253a11..ce8cd994 100644 --- a/cypress/integration/origin-search.spec.js +++ b/cypress/integration/origin-search.spec.js @@ -1,429 +1,429 @@ /** * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ const nonExistentText = 'NoMatchExists'; let origin; let url; function doSearch(searchText) { cy.get('#origins-url-patterns') .type(searchText) .get('.swh-search-icon') .click(); } function searchShouldRedirect(searchText, redirectUrl) { doSearch(searchText); cy.location('pathname') .should('equal', redirectUrl); } function searchShouldShowNotFound(searchText, msg) { doSearch(searchText); cy.get('#swh-no-result') .should('be.visible') .and('contain', msg); } function stubOriginVisitLatestRequests() { cy.server(); cy.route({ method: 'GET', url: '**/visit/latest/**', response: { type: 'tar' } }).as('originVisitLatest'); } describe('Test origin-search', function() { before(function() { origin = this.origin[0]; url = this.Urls.browse_search(); }); beforeEach(function() { cy.visit(url); }); it('should show in result when url is searched', function() { cy.get('#origins-url-patterns') .type(origin.url); cy.get('.swh-search-icon') .click(); cy.get('#origin-search-results') .should('be.visible'); cy.contains('tr', origin.url) .should('be.visible') .find('.swh-visit-status') .find('i') .should('have.class', 'mdi-check-bold') .and('have.attr', 'title', 'Software origin has been archived by Software Heritage'); }); it('should show not found message when no repo matches', function() { searchShouldShowNotFound(nonExistentText, 'No origins matching the search criteria were found.'); }); it('should add appropriate URL parameters', function() { // Check all three checkboxes and check if // correct url params are added cy.get('#swh-search-origins-with-visit') .check({force: true}) .get('#swh-filter-empty-visits') .check({force: true}) .get('#swh-search-origin-metadata') .check({force: true}) .then(() => { const searchText = origin.url; doSearch(searchText); cy.location('search').then(locationSearch => { const urlParams = new URLSearchParams(locationSearch); const query = urlParams.get('q'); const withVisit = urlParams.has('with_visit'); const withContent = urlParams.has('with_content'); const searchMetadata = urlParams.has('search_metadata'); assert.strictEqual(query, searchText); assert.strictEqual(withVisit, true); assert.strictEqual(withContent, true); assert.strictEqual(searchMetadata, true); }); }); }); it('should not send request to the resolve endpoint', function() { cy.server(); cy.route({ method: 'GET', - url: `${this.Urls.api_1_resolve_swh_pid('').slice(0, -1)}**` - }).as('resolvePid'); + url: `${this.Urls.api_1_resolve_swhid('').slice(0, -1)}**` + }).as('resolveSWHID'); cy.route({ method: 'GET', url: `${this.Urls.api_1_origin_search(origin.url)}**` }).as('searchOrigin'); cy.get('#origins-url-patterns') .type(origin.url); cy.get('.swh-search-icon') .click(); cy.wait('@searchOrigin'); - cy.xhrShouldBeCalled('resolvePid', 0); + cy.xhrShouldBeCalled('resolveSWHID', 0); cy.xhrShouldBeCalled('searchOrigin', 1); }); context('Test pagination', function() { it('should not paginate if there are not many results', function() { // Setup search cy.get('#swh-search-origins-with-visit') .uncheck({force: true}) .get('#swh-filter-empty-visits') .uncheck({force: true}) .then(() => { const searchText = 'libtess'; // Get first page of results doSearch(searchText); cy.get('.swh-search-result-entry') .should('have.length', 1); cy.get('.swh-search-result-entry#origin-0 td a') .should('have.text', 'https://github.com/memononen/libtess2'); cy.get('#origins-prev-results-button') .should('have.class', 'disabled'); cy.get('#origins-next-results-button') .should('have.class', 'disabled'); }); }); it('should paginate forward when there are many results', function() { stubOriginVisitLatestRequests(); // Setup search cy.get('#swh-search-origins-with-visit') .uncheck({force: true}) .get('#swh-filter-empty-visits') .uncheck({force: true}) .then(() => { const searchText = 'many.origins'; // Get first page of results doSearch(searchText); cy.wait('@originVisitLatest'); cy.get('.swh-search-result-entry') .should('have.length', 100); cy.get('.swh-search-result-entry#origin-0 td a') .should('have.text', 'https://many.origins/1'); cy.get('.swh-search-result-entry#origin-99 td a') .should('have.text', 'https://many.origins/100'); cy.get('#origins-prev-results-button') .should('have.class', 'disabled'); cy.get('#origins-next-results-button') .should('not.have.class', 'disabled'); // Get second page of results cy.get('#origins-next-results-button a') .click(); cy.wait('@originVisitLatest'); cy.get('.swh-search-result-entry') .should('have.length', 100); cy.get('.swh-search-result-entry#origin-0 td a') .should('have.text', 'https://many.origins/101'); cy.get('.swh-search-result-entry#origin-99 td a') .should('have.text', 'https://many.origins/200'); cy.get('#origins-prev-results-button') .should('not.have.class', 'disabled'); cy.get('#origins-next-results-button') .should('not.have.class', 'disabled'); // Get third (and last) page of results cy.get('#origins-next-results-button a') .click(); cy.wait('@originVisitLatest'); cy.get('.swh-search-result-entry') .should('have.length', 50); cy.get('.swh-search-result-entry#origin-0 td a') .should('have.text', 'https://many.origins/201'); cy.get('.swh-search-result-entry#origin-49 td a') .should('have.text', 'https://many.origins/250'); cy.get('#origins-prev-results-button') .should('not.have.class', 'disabled'); cy.get('#origins-next-results-button') .should('have.class', 'disabled'); }); }); it('should paginate backward from a middle page', function() { stubOriginVisitLatestRequests(); // Setup search cy.get('#swh-search-origins-with-visit') .uncheck({force: true}) .get('#swh-filter-empty-visits') .uncheck({force: true}) .then(() => { const searchText = 'many.origins'; // Get first page of results doSearch(searchText); cy.wait('@originVisitLatest'); cy.get('#origins-prev-results-button') .should('have.class', 'disabled'); cy.get('#origins-next-results-button') .should('not.have.class', 'disabled'); // Get second page of results cy.get('#origins-next-results-button a') .click(); cy.wait('@originVisitLatest'); cy.get('#origins-prev-results-button') .should('not.have.class', 'disabled'); cy.get('#origins-next-results-button') .should('not.have.class', 'disabled'); // Get first page of results again cy.get('#origins-prev-results-button a') .click(); cy.wait('@originVisitLatest'); cy.get('.swh-search-result-entry') .should('have.length', 100); cy.get('.swh-search-result-entry#origin-0 td a') .should('have.text', 'https://many.origins/1'); cy.get('.swh-search-result-entry#origin-99 td a') .should('have.text', 'https://many.origins/100'); cy.get('#origins-prev-results-button') .should('have.class', 'disabled'); cy.get('#origins-next-results-button') .should('not.have.class', 'disabled'); }); }); it('should paginate backward from the last page', function() { stubOriginVisitLatestRequests(); // Setup search cy.get('#swh-search-origins-with-visit') .uncheck({force: true}) .get('#swh-filter-empty-visits') .uncheck({force: true}) .then(() => { const searchText = 'many.origins'; // Get first page of results doSearch(searchText); cy.wait('@originVisitLatest'); cy.get('#origins-prev-results-button') .should('have.class', 'disabled'); cy.get('#origins-next-results-button') .should('not.have.class', 'disabled'); // Get second page of results cy.get('#origins-next-results-button a') .click(); cy.wait('@originVisitLatest'); cy.get('#origins-prev-results-button') .should('not.have.class', 'disabled'); cy.get('#origins-next-results-button') .should('not.have.class', 'disabled'); // Get third (and last) page of results cy.get('#origins-next-results-button a') .click(); cy.get('#origins-prev-results-button') .should('not.have.class', 'disabled'); cy.get('#origins-next-results-button') .should('have.class', 'disabled'); // Get second page of results again cy.get('#origins-prev-results-button a') .click(); cy.wait('@originVisitLatest'); cy.get('.swh-search-result-entry') .should('have.length', 100); cy.get('.swh-search-result-entry#origin-0 td a') .should('have.text', 'https://many.origins/101'); cy.get('.swh-search-result-entry#origin-99 td a') .should('have.text', 'https://many.origins/200'); cy.get('#origins-prev-results-button') .should('not.have.class', 'disabled'); cy.get('#origins-next-results-button') .should('not.have.class', 'disabled'); // Get first page of results again cy.get('#origins-prev-results-button a') .click(); cy.wait('@originVisitLatest'); cy.get('.swh-search-result-entry') .should('have.length', 100); cy.get('.swh-search-result-entry#origin-0 td a') .should('have.text', 'https://many.origins/1'); cy.get('.swh-search-result-entry#origin-99 td a') .should('have.text', 'https://many.origins/100'); cy.get('#origins-prev-results-button') .should('have.class', 'disabled'); cy.get('#origins-next-results-button') .should('not.have.class', 'disabled'); }); }); }); - context('Test valid persistent ids', function() { + context('Test valid SWHIDs', function() { it('should resolve directory', function() { const redirectUrl = this.Urls.browse_directory(origin.content[0].directory); - const persistentId = `swh:1:dir:${origin.content[0].directory}`; + const swhid = `swh:1:dir:${origin.content[0].directory}`; - searchShouldRedirect(persistentId, redirectUrl); + searchShouldRedirect(swhid, redirectUrl); }); it('should resolve revision', function() { const redirectUrl = this.Urls.browse_revision(origin.revisions[0]); - const persistentId = `swh:1:rev:${origin.revisions[0]}`; + const swhid = `swh:1:rev:${origin.revisions[0]}`; - searchShouldRedirect(persistentId, redirectUrl); + searchShouldRedirect(swhid, redirectUrl); }); it('should resolve snapshot', function() { const redirectUrl = this.Urls.browse_snapshot_directory(origin.snapshot); - const persistentId = `swh:1:snp:${origin.snapshot}`; + const swhid = `swh:1:snp:${origin.snapshot}`; - searchShouldRedirect(persistentId, redirectUrl); + searchShouldRedirect(swhid, redirectUrl); }); it('should resolve content', function() { const redirectUrl = this.Urls.browse_content(`sha1_git:${origin.content[0].sha1git}`); - const persistentId = `swh:1:cnt:${origin.content[0].sha1git}`; + const swhid = `swh:1:cnt:${origin.content[0].sha1git}`; - searchShouldRedirect(persistentId, redirectUrl); + searchShouldRedirect(swhid, redirectUrl); }); it('should not send request to the search endpoint', function() { cy.server(); - const persistentId = `swh:1:rev:${origin.revisions[0]}`; + const swhid = `swh:1:rev:${origin.revisions[0]}`; cy.route({ method: 'GET', - url: this.Urls.api_1_resolve_swh_pid(persistentId) - }).as('resolvePid'); + url: this.Urls.api_1_resolve_swhid(swhid) + }).as('resolveSWHID'); cy.route({ method: 'GET', url: `${this.Urls.api_1_origin_search('').slice(0, -1)}**` }).as('searchOrigin'); cy.get('#origins-url-patterns') - .type(persistentId); + .type(swhid); cy.get('.swh-search-icon') .click(); - cy.wait('@resolvePid'); + cy.wait('@resolveSWHID'); - cy.xhrShouldBeCalled('resolvePid', 1); + cy.xhrShouldBeCalled('resolveSWHID', 1); cy.xhrShouldBeCalled('searchOrigin', 0); }); }); - context('Test invalid persistent ids', function() { + context('Test invalid SWHIDs', function() { it('should show not found for directory', function() { - const persistentId = `swh:1:dir:${this.unarchivedRepo.rootDirectory}`; + const swhid = `swh:1:dir:${this.unarchivedRepo.rootDirectory}`; const msg = `Directory with sha1_git ${this.unarchivedRepo.rootDirectory} not found`; - searchShouldShowNotFound(persistentId, msg); + searchShouldShowNotFound(swhid, msg); }); it('should show not found for snapshot', function() { - const persistentId = `swh:1:snp:${this.unarchivedRepo.snapshot}`; + const swhid = `swh:1:snp:${this.unarchivedRepo.snapshot}`; const msg = `Snapshot with id ${this.unarchivedRepo.snapshot} not found!`; - searchShouldShowNotFound(persistentId, msg); + searchShouldShowNotFound(swhid, msg); }); it('should show not found for revision', function() { - const persistentId = `swh:1:rev:${this.unarchivedRepo.revision}`; + const swhid = `swh:1:rev:${this.unarchivedRepo.revision}`; const msg = `Revision with sha1_git ${this.unarchivedRepo.revision} not found.`; - searchShouldShowNotFound(persistentId, msg); + searchShouldShowNotFound(swhid, msg); }); it('should show not found for content', function() { - const persistentId = `swh:1:cnt:${this.unarchivedRepo.content[0].sha1git}`; + const swhid = `swh:1:cnt:${this.unarchivedRepo.content[0].sha1git}`; const msg = `Content with sha1_git checksum equals to ${this.unarchivedRepo.content[0].sha1git} not found!`; - searchShouldShowNotFound(persistentId, msg); + searchShouldShowNotFound(swhid, msg); }); }); }); diff --git a/cypress/integration/persistent-identifiers.spec.js b/cypress/integration/persistent-identifiers.spec.js index e6268054..17b05538 100644 --- a/cypress/integration/persistent-identifiers.spec.js +++ b/cypress/integration/persistent-identifiers.spec.js @@ -1,228 +1,228 @@ /** * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ let origin, originBadgeUrl, originBrowseUrl; let url, urlPrefix; let cntSWHID, cntSWHIDWithContext; let dirSWHID, dirSWHIDWithContext; let relSWHID, relSWHIDWithContext; let revSWHID, revSWHIDWithContext; let snpSWHID, snpSWHIDWithContext; let testsData; const firstSelLine = 6; const lastSelLine = 12; describe('Persistent Identifiers Tests', function() { before(function() { origin = this.origin[1]; url = `${this.Urls.browse_origin_content()}?origin_url=${origin.url}&path=${origin.content[0].path}`; url = `${url}&release=${origin.release}#L${firstSelLine}-L${lastSelLine}`; originBadgeUrl = this.Urls.swh_badge('origin', origin.url); originBrowseUrl = `${this.Urls.browse_origin()}?origin_url=${origin.url}`; cy.visit(url).window().then(win => { urlPrefix = `${win.location.protocol}//${win.location.hostname}`; if (win.location.port) { urlPrefix += `:${win.location.port}`; } const swhids = win.swh.webapp.getSwhIdsContext(); cntSWHID = swhids.content.swhid; cntSWHIDWithContext = swhids.content.swhid_with_context; cntSWHIDWithContext += `;lines=${firstSelLine}-${lastSelLine}`; dirSWHID = swhids.directory.swhid; dirSWHIDWithContext = swhids.directory.swhid_with_context; revSWHID = swhids.revision.swhid; revSWHIDWithContext = swhids.revision.swhid_with_context; relSWHID = swhids.release.swhid; relSWHIDWithContext = swhids.release.swhid_with_context; snpSWHID = swhids.snapshot.swhid; snpSWHIDWithContext = swhids.snapshot.swhid_with_context; testsData = [ { 'objectType': 'content', - 'objectPids': [cntSWHIDWithContext, cntSWHID], + 'objectSWHIDs': [cntSWHIDWithContext, cntSWHID], 'badgeUrl': this.Urls.swh_badge('content', swhids.content.object_id), - 'badgePidUrl': this.Urls.swh_badge_pid(cntSWHID), - 'browseUrl': this.Urls.browse_swh_id(cntSWHIDWithContext) + 'badgeSWHIDUrl': this.Urls.swh_badge_swhid(cntSWHID), + 'browseUrl': this.Urls.browse_swhid(cntSWHIDWithContext) }, { 'objectType': 'directory', - 'objectPids': [dirSWHIDWithContext, dirSWHID], + 'objectSWHIDs': [dirSWHIDWithContext, dirSWHID], 'badgeUrl': this.Urls.swh_badge('directory', swhids.directory.object_id), - 'badgePidUrl': this.Urls.swh_badge_pid(dirSWHID), - 'browseUrl': this.Urls.browse_swh_id(dirSWHIDWithContext) + 'badgeSWHIDUrl': this.Urls.swh_badge_swhid(dirSWHID), + 'browseUrl': this.Urls.browse_swhid(dirSWHIDWithContext) }, { 'objectType': 'release', - 'objectPids': [relSWHIDWithContext, relSWHID], + 'objectSWHIDs': [relSWHIDWithContext, relSWHID], 'badgeUrl': this.Urls.swh_badge('release', swhids.release.object_id), - 'badgePidUrl': this.Urls.swh_badge_pid(relSWHID), - 'browseUrl': this.Urls.browse_swh_id(relSWHIDWithContext) + 'badgeSWHIDUrl': this.Urls.swh_badge_swhid(relSWHID), + 'browseUrl': this.Urls.browse_swhid(relSWHIDWithContext) }, { 'objectType': 'revision', - 'objectPids': [revSWHIDWithContext, revSWHID], + 'objectSWHIDs': [revSWHIDWithContext, revSWHID], 'badgeUrl': this.Urls.swh_badge('revision', swhids.revision.object_id), - 'badgePidUrl': this.Urls.swh_badge_pid(revSWHID), - 'browseUrl': this.Urls.browse_swh_id(revSWHIDWithContext) + 'badgeSWHIDUrl': this.Urls.swh_badge_swhid(revSWHID), + 'browseUrl': this.Urls.browse_swhid(revSWHIDWithContext) }, { 'objectType': 'snapshot', - 'objectPids': [snpSWHIDWithContext, snpSWHID], + 'objectSWHIDs': [snpSWHIDWithContext, snpSWHID], 'badgeUrl': this.Urls.swh_badge('snapshot', swhids.snapshot.object_id), - 'badgePidUrl': this.Urls.swh_badge_pid(snpSWHID), - 'browseUrl': this.Urls.browse_swh_id(snpSWHIDWithContext) + 'badgeSWHIDUrl': this.Urls.swh_badge_swhid(snpSWHID), + 'browseUrl': this.Urls.browse_swhid(snpSWHIDWithContext) } ]; }); }); beforeEach(function() { cy.visit(url); }); it('should open and close identifiers tab when clicking on handle', function() { cy.get('#swh-identifiers') .should('have.class', 'ui-slideouttab-ready'); cy.get('.ui-slideouttab-handle') .click(); cy.get('#swh-identifiers') .should('have.class', 'ui-slideouttab-open'); cy.get('.ui-slideouttab-handle') .click(); cy.get('#swh-identifiers') .should('not.have.class', 'ui-slideouttab-open'); }); it('should display identifiers with permalinks for browsed objects', function() { cy.get('.ui-slideouttab-handle') .click(); for (let td of testsData) { - cy.get(`a[href="#swh-id-tab-${td.objectType}"]`) + cy.get(`a[href="#swhid-tab-${td.objectType}"]`) .click(); - cy.get(`#swh-id-tab-${td.objectType}`) + cy.get(`#swhid-tab-${td.objectType}`) .should('be.visible'); - cy.get(`#swh-id-tab-${td.objectType} .swh-id`) - .contains(td.objectPids[0]) - .should('have.attr', 'href', this.Urls.browse_swh_id(td.objectPids[0])); + cy.get(`#swhid-tab-${td.objectType} .swhid`) + .contains(td.objectSWHIDs[0]) + .should('have.attr', 'href', this.Urls.browse_swhid(td.objectSWHIDs[0])); } }); it('should update other object identifiers contextual info when toggling context checkbox', function() { cy.get('.ui-slideouttab-handle') .click(); for (let td of testsData) { - cy.get(`a[href="#swh-id-tab-${td.objectType}"]`) + cy.get(`a[href="#swhid-tab-${td.objectType}"]`) .click(); - cy.get(`#swh-id-tab-${td.objectType} .swh-id`) - .contains(td.objectPids[0]) - .should('have.attr', 'href', this.Urls.browse_swh_id(td.objectPids[0])); + cy.get(`#swhid-tab-${td.objectType} .swhid`) + .contains(td.objectSWHIDs[0]) + .should('have.attr', 'href', this.Urls.browse_swhid(td.objectSWHIDs[0])); - cy.get(`#swh-id-tab-${td.objectType} .swh-id-option`) + cy.get(`#swhid-tab-${td.objectType} .swhid-option`) .click(); - cy.get(`#swh-id-tab-${td.objectType} .swh-id`) - .contains(td.objectPids[1]) - .should('have.attr', 'href', this.Urls.browse_swh_id(td.objectPids[1])); + cy.get(`#swhid-tab-${td.objectType} .swhid`) + .contains(td.objectSWHIDs[1]) + .should('have.attr', 'href', this.Urls.browse_swhid(td.objectSWHIDs[1])); - cy.get(`#swh-id-tab-${td.objectType} .swh-id-option`) + cy.get(`#swhid-tab-${td.objectType} .swhid-option`) .click(); - cy.get(`#swh-id-tab-${td.objectType} .swh-id`) - .contains(td.objectPids[0]) - .should('have.attr', 'href', this.Urls.browse_swh_id(td.objectPids[0])); + cy.get(`#swhid-tab-${td.objectType} .swhid`) + .contains(td.objectSWHIDs[0]) + .should('have.attr', 'href', this.Urls.browse_swhid(td.objectSWHIDs[0])); } }); it('should display swh badges in identifiers tab for browsed objects', function() { cy.get('.ui-slideouttab-handle') .click(); const originBadgeUrl = this.Urls.swh_badge('origin', origin.url); for (let td of testsData) { - cy.get(`a[href="#swh-id-tab-${td.objectType}"]`) + cy.get(`a[href="#swhid-tab-${td.objectType}"]`) .click(); - cy.get(`#swh-id-tab-${td.objectType} .swh-badge-origin`) + cy.get(`#swhid-tab-${td.objectType} .swh-badge-origin`) .should('have.attr', 'src', originBadgeUrl); - cy.get(`#swh-id-tab-${td.objectType} .swh-badge-${td.objectType}`) + cy.get(`#swhid-tab-${td.objectType} .swh-badge-${td.objectType}`) .should('have.attr', 'src', td.badgeUrl); } }); it('should display badge integration info when clicking on it', function() { cy.get('.ui-slideouttab-handle') .click(); for (let td of testsData) { - cy.get(`a[href="#swh-id-tab-${td.objectType}"]`) + cy.get(`a[href="#swhid-tab-${td.objectType}"]`) .click(); - cy.get(`#swh-id-tab-${td.objectType} .swh-badge-origin`) + cy.get(`#swhid-tab-${td.objectType} .swh-badge-origin`) .click() .wait(500); for (let badgeType of ['html', 'md', 'rst']) { cy.get(`.modal .swh-badge-${badgeType}`) .contains(`${urlPrefix}${originBrowseUrl}`) .contains(`${urlPrefix}${originBadgeUrl}`); } cy.get('.modal.show .close') .click() .wait(500); - cy.get(`#swh-id-tab-${td.objectType} .swh-badge-${td.objectType}`) + cy.get(`#swhid-tab-${td.objectType} .swh-badge-${td.objectType}`) .click() .wait(500); for (let badgeType of ['html', 'md', 'rst']) { cy.get(`.modal .swh-badge-${badgeType}`) .contains(`${urlPrefix}${td.browseUrl}`) - .contains(`${urlPrefix}${td.badgePidUrl}`); + .contains(`${urlPrefix}${td.badgeSWHIDUrl}`); } cy.get('.modal.show .close') .click() .wait(500); } }); it('should be possible to retrieve SWHIDs context from JavaScript', function() { cy.window().then(win => { const swhIdsContext = win.swh.webapp.getSwhIdsContext(); for (let testData of testsData) { assert.isTrue(swhIdsContext.hasOwnProperty(testData.objectType)); assert.equal(swhIdsContext[testData.objectType].swhid, - testData.objectPids.slice(-1)[0]); + testData.objectSWHIDs.slice(-1)[0]); } }); }); }); diff --git a/docs/developers-info.rst b/docs/developers-info.rst index 9d797c3a..f9f35bb2 100644 --- a/docs/developers-info.rst +++ b/docs/developers-info.rst @@ -1,127 +1,127 @@ Developers Information ====================== Sample configuration -------------------- The configuration will be taken from the default configuration file: ``~/.config/swh/web/web.yml``. The following introduces a default configuration file: .. sourcecode:: yaml storage: cls: remote args: url: http://localhost:5002 debug: false throttling: cache_uri: None scopes: swh_api: limiter_rate: default: 120/h exempted_networks: - 127.0.0.0/8 Run server ---------- Either use the django manage script directly (useful in development mode as it offers various commands): .. sourcecode:: shell $ python3 -m swh.web.manage runserver or use the following shortcut: .. sourcecode:: shell $ make run Modules description ------------------- Common to all web applications ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Configuration and settings """""""""""""""""""""""""" * :mod:`swh.web.config`: holds the configuration for the web applications. * :mod:`swh.web.doc_config`: utility module used to extend the sphinx configuration when building the documentation. * :mod:`swh.web.manage`: Django management module for developers. * :mod:`swh.web.urls`: module that holds the whole URI scheme of all the web applications. * :mod:`swh.web.settings.common`: Common Django settings * :mod:`swh.web.settings.development`: Django settings for development * :mod:`swh.web.settings.production`: Django settings for production * :mod:`swh.web.settings.tests`: Django settings for tests Common utilities """""""""""""""" * :mod:`swh.web.common.converters`: conversion module used to transform raw data to serializable ones. It is used by :mod:`swh.web.common.service`: to convert data before transmitting then to Django views. * :mod:`swh.web.common.exc`: module defining exceptions used in the web applications. * :mod:`swh.web.common.highlightjs`: utility module to ease the use of the highlightjs_ library in produced Django views. * :mod:`swh.web.common.query`: Utilities to parse data from HTTP endpoints. It is used by :mod:`swh.web.common.service`. * :mod:`swh.web.common.service`: Orchestration layer used by views module in charge of communication with :mod:`swh.storage` to retrieve information and perform conversion for the upper layer. * :mod:`swh.web.common.swh_templatetags`: Custom Django template tags library for swh. * :mod:`swh.web.common.urlsindex`: Utilities to help the registering of endpoints for the web applications * :mod:`swh.web.common.utils`: Utility functions used in the web applications implementation swh-web API application ^^^^^^^^^^^^^^^^^^^^^^^ * :mod:`swh.web.api.apidoc`: Utilities to document the web api for its html browsable rendering. * :mod:`swh.web.api.apiresponse`: Utility module to ease the generation of web api responses. * :mod:`swh.web.api.apiurls`: Utilities to facilitate the registration of web api endpoints. * :mod:`swh.web.api.throttling`: Custom request rate limiter to use with the `Django REST Framework `_ * :mod:`swh.web.api.urls`: Module that defines the whole URI scheme for the api endpoints * :mod:`swh.web.api.utils`: Utility functions used in the web api implementation. * :mod:`swh.web.api.views.content`: Implementation of API endpoints for getting information about contents. * :mod:`swh.web.api.views.directory`: Implementation of API endpoints for getting information about directories. * :mod:`swh.web.api.views.origin`: Implementation of API endpoints for getting information about origins. * :mod:`swh.web.api.views.person`: Implementation of API endpoints for getting information about persons. * :mod:`swh.web.api.views.release`: Implementation of API endpoints for getting information about releases. * :mod:`swh.web.api.views.revision`: Implementation of API endpoints for getting information about revisions. * :mod:`swh.web.api.views.snapshot`: Implementation of API endpoints for getting information about snapshots. * :mod:`swh.web.api.views.stat`: Implementation of API endpoints for getting information about archive statistics. * :mod:`swh.web.api.views.utils`: Utilities used in the web api endpoints implementation. swh-web browse application ^^^^^^^^^^^^^^^^^^^^^^^^^^ * :mod:`swh.web.browse.browseurls`: Utilities to facilitate the registration of browse endpoints. * :mod:`swh.web.browse.urls`: Module that defines the whole URI scheme for the browse endpoints. * :mod:`swh.web.browse.utils`: Utilities functions used throughout the browse endpoints implementation. * :mod:`swh.web.browse.views.content`: Implementation of endpoints for browsing contents. * :mod:`swh.web.browse.views.directory`: Implementation of endpoints for browsing directories. * :mod:`swh.web.browse.views.identifiers`: Implementation of endpoints for browsing objects - through persistent identifiers. + through :ref:`persistent-identifiers`. * :mod:`swh.web.browse.views.origin`: Implementation of endpoints for browsing origins. * :mod:`swh.web.browse.views.person`: Implementation of endpoints for browsing persons. * :mod:`swh.web.browse.views.release`: Implementation of endpoints for browsing releases. * :mod:`swh.web.browse.views.revision`: Implementation of endpoints for browsing revisions. * :mod:`swh.web.browse.views.snapshot`: Implementation of endpoints for browsing snapshots. .. _highlightjs: https://highlightjs.org/ diff --git a/docs/uri-scheme-api-identifiers.rst b/docs/uri-scheme-api-identifiers.rst index 8beeb92f..26b7b6a7 100644 --- a/docs/uri-scheme-api-identifiers.rst +++ b/docs/uri-scheme-api-identifiers.rst @@ -1,7 +1,7 @@ Persistent identifiers ---------------------- -.. autosimple:: swh.web.api.views.identifiers.api_resolve_swh_pid +.. autosimple:: swh.web.api.views.identifiers.api_resolve_swhid -.. autosimple:: swh.web.api.views.identifiers.api_swh_pid_known +.. autosimple:: swh.web.api.views.identifiers.api_swhid_known diff --git a/docs/uri-scheme-identifiers.rst b/docs/uri-scheme-identifiers.rst index f46d9e77..b67d772f 100644 --- a/docs/uri-scheme-identifiers.rst +++ b/docs/uri-scheme-identifiers.rst @@ -1,31 +1,30 @@ URI scheme for Software Heritage identifiers ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ A subset of Software Heritage objects (contents, directories, releases and -revisions) can be browsed using :ref:`SWHIDs `. Those +revisions) can be browsed using :ref:`persistent-identifiers`. Those identifiers are guaranteed to remain stable (persistent) over time. -.. http:get:: /(swh_id)/ +.. http:get:: /(swhid)/ - End point to browse Software Heritage objects using their persistent identifiers. + Endpoint to browse Software Heritage objects using their SWHIDs. A redirection to the adequate HTML view will be performed when reaching it. - :param string swh_id: a persistent identifier for a Software Heritage - object, or SWHID (see :ref:`persistent identifiers - ` to learn more about its syntax) + :param string swhid: a SoftWare Heritage persistent IDentifier + object, or SWHID (see :ref:`persistent-identifiers` to learn more about its syntax) :resheader Location: the redirection URL for browsing the Software Heritage object associated to the provided identifier :statuscode 302: no error :statuscode 400: the provided identifier is malformed **Examples:** .. parsed-literal:: :swh_web:`swh:1:cnt:0ffd12d85cdec70c88e852fc3f5ea9fd342213cd` :swh_web:`swh:1:dir:db990da9af15427455ce7836ce2b8a34b9bf67f5` :swh_web:`swh:1:rel:a9b7e3f1eada90250a6b2ab2ef3e0a846cb16831` :swh_web:`swh:1:rev:f1b94134a4b879bc55c3dacdb496690c8ebdc03f` :swh_web:`swh:1:snp:673156c31a876c5b99b2fe3e89615529de9a3c44` diff --git a/requirements-swh.txt b/requirements-swh.txt index 15ceb1b6..0a4e4b76 100644 --- a/requirements-swh.txt +++ b/requirements-swh.txt @@ -1,7 +1,7 @@ swh.core >= 0.0.95 swh.indexer >= 0.0.171 -swh.model >= 0.3.5 +swh.model >= 0.3.8 swh.scheduler >= 0.1.1 swh.search >= 0.0.4 swh.storage >= 0.8.0 swh.vault >= 0.0.33 diff --git a/swh/web/admin/deposit.py b/swh/web/admin/deposit.py index a7e2eb89..33adbd6f 100644 --- a/swh/web/admin/deposit.py +++ b/swh/web/admin/deposit.py @@ -1,111 +1,111 @@ # Copyright (C) 2018-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import json import requests from django.core.cache import cache from django.conf import settings from django.contrib.admin.views.decorators import staff_member_required from django.core.paginator import Paginator from django.http import HttpResponse from django.shortcuts import render from requests.auth import HTTPBasicAuth import sentry_sdk from swh.web.admin.adminurls import admin_route from swh.web.config import get_config config = get_config()["deposit"] @admin_route(r"deposit/", view_name="admin-deposit") @staff_member_required(view_func=None, login_url=settings.LOGIN_URL) def _admin_origin_save(request): return render(request, "admin/deposit.html") @admin_route(r"deposit/list/", view_name="admin-deposit-list") @staff_member_required(view_func=None, login_url=settings.LOGIN_URL) def _admin_deposit_list(request): table_data = {} table_data["draw"] = int(request.GET["draw"]) deposits_list_url = config["private_api_url"] + "deposits" deposits_list_auth = HTTPBasicAuth( config["private_api_user"], config["private_api_password"] ) try: nb_deposits = requests.get( "%s?page_size=1" % deposits_list_url, auth=deposits_list_auth, timeout=30 ).json()["count"] deposits_data = cache.get("swh-deposit-list") if not deposits_data or deposits_data["count"] != nb_deposits: deposits_data = requests.get( "%s?page_size=%s" % (deposits_list_url, nb_deposits), auth=deposits_list_auth, timeout=30, ).json() cache.set("swh-deposit-list", deposits_data) deposits = deposits_data["results"] search_value = request.GET["search[value]"] if search_value: deposits = [ d for d in deposits if any( search_value.lower() in val for val in [str(v).lower() for v in d.values()] ) ] exclude_pattern = request.GET.get("excludePattern") if exclude_pattern: deposits = [ d for d in deposits if all( exclude_pattern.lower() not in val for val in [str(v).lower() for v in d.values()] ) ] column_order = request.GET["order[0][column]"] field_order = request.GET["columns[%s][name]" % column_order] order_dir = request.GET["order[0][dir]"] deposits = sorted(deposits, key=lambda d: d[field_order] or "") if order_dir == "desc": deposits = list(reversed(deposits)) length = int(request.GET["length"]) page = int(request.GET["start"]) / length + 1 paginator = Paginator(deposits, length) data = paginator.page(page).object_list table_data["recordsTotal"] = deposits_data["count"] table_data["recordsFiltered"] = len(deposits) table_data["data"] = [ { "id": d["id"], "external_id": d["external_id"], "reception_date": d["reception_date"], "status": d["status"], "status_detail": d["status_detail"], - "swh_id": d["swh_id"], - "swh_id_context": d["swh_id_context"], + "swhid": d["swh_id"], + "swhid_context": d["swh_id_context"], } for d in data ] except Exception as exc: sentry_sdk.capture_exception(exc) table_data["error"] = ( "An error occurred while retrieving " "the list of deposits !" ) return HttpResponse(json.dumps(table_data), content_type="application/json") diff --git a/swh/web/api/views/identifiers.py b/swh/web/api/views/identifiers.py index 4fda5fe0..4f3ab1a5 100644 --- a/swh/web/api/views/identifiers.py +++ b/swh/web/api/views/identifiers.py @@ -1,120 +1,112 @@ # Copyright (C) 2018-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.web.api.apidoc import api_doc, format_docstring from swh.web.api.apiurls import api_route from swh.web.common import service from swh.web.common.exc import LargePayloadExc from swh.web.common.identifiers import ( - resolve_swh_persistent_id, - get_persistent_identifier, - group_swh_persistent_identifiers, + resolve_swhid, + get_swhid, + group_swhids, ) -@api_route(r"/resolve/(?P.*)/", "api-1-resolve-swh-pid") +@api_route(r"/resolve/(?P.*)/", "api-1-resolve-swhid") @api_doc("/resolve/") @format_docstring() -def api_resolve_swh_pid(request, swh_id): +def api_resolve_swhid(request, swhid): """ - .. http:get:: /api/1/resolve/(swh_id)/ + .. http:get:: /api/1/resolve/(swhid)/ - Resolve a Software Heritage persistent identifier. + Resolve :ref:`persistent-identifiers`. - Try to resolve a provided `persistent identifier - `_ - into an url for browsing the pointed archive object. If the provided - identifier is valid, the existence of the object in the archive - will also be checked. + Try to resolve a provided SWHID into an url for browsing the + pointed archive object. If the provided identifier is valid, + the existence of the object in the archive will also be checked. - :param string swh_id: a Software Heritage persistent identifier + :param string swhid: a SoftWare Heritage persistent IDentifier :>json string browse_url: the url for browsing the pointed object - :>json object metadata: object holding optional parts of the - persistent identifier - :>json string namespace: the persistent identifier namespace + :>json object metadata: object holding optional parts of the SWHID + :>json string namespace: the SWHID namespace :>json string object_id: the hash identifier of the pointed object :>json string object_type: the type of the pointed object - :>json number scheme_version: the scheme version of the persistent - identifier + :>json number scheme_version: the scheme version of the SWHID {common_headers} :statuscode 200: no error - :statuscode 400: an invalid persistent identifier has been provided + :statuscode 400: an invalid SWHID has been provided :statuscode 404: the pointed object does not exist in the archive **Example:** .. parsed-literal:: :swh_web_api:`resolve/swh:1:rev:96db9023b881d7cd9f379b0c154650d6c108e9a3;origin=https://github.com/openssl/openssl/` """ - # try to resolve the provided pid - swh_id_resolved = resolve_swh_persistent_id(swh_id) + # try to resolve the provided swhid + swhid_resolved = resolve_swhid(swhid) # id is well-formed, now check that the pointed # object is present in the archive, NotFoundExc # will be raised otherwise - swh_id_parsed = swh_id_resolved["swh_id_parsed"] - object_type = swh_id_parsed.object_type - object_id = swh_id_parsed.object_id + swhid_parsed = swhid_resolved["swhid_parsed"] + object_type = swhid_parsed.object_type + object_id = swhid_parsed.object_id service.lookup_object(object_type, object_id) # id is well-formed and the pointed object exists - swh_id_data = swh_id_parsed._asdict() - swh_id_data["browse_url"] = request.build_absolute_uri( - swh_id_resolved["browse_url"] - ) - return swh_id_data + swhid_data = swhid_parsed._asdict() + swhid_data["browse_url"] = request.build_absolute_uri(swhid_resolved["browse_url"]) + return swhid_data @api_route(r"/known/", "api-1-known", methods=["POST"]) @api_doc("/known/") @format_docstring() -def api_swh_pid_known(request): +def api_swhid_known(request): """ .. http:post:: /api/1/known/ Check if a list of objects are present in the Software Heritage archive. - The objects to check existence must be provided using Software Heritage - `persistent identifiers - `_. + The objects to check existence must be provided using + :ref:`persistent-identifiers`. - :json object : an object whose keys are input persistent - identifiers and values objects with the following keys: + :>json object : an object whose keys are input SWHIDs and values + objects with the following keys: * **known (bool)**: whether the object was found {common_headers} :statuscode 200: no error - :statuscode 400: an invalid persistent identifier was provided - :statuscode 413: the input array of persistent identifiers is too large + :statuscode 400: an invalid SWHID was provided + :statuscode 413: the input array of SWHIDs is too large """ limit = 1000 if len(request.data) > limit: raise LargePayloadExc( - "The maximum number of PIDs this endpoint can " "receive is %s" % limit + "The maximum number of SWHIDs this endpoint can receive is %s" % limit ) - persistent_ids = [get_persistent_identifier(pid) for pid in request.data] + swhids = [get_swhid(swhid) for swhid in request.data] - response = {str(pid): {"known": False} for pid in persistent_ids} + response = {str(swhid): {"known": False} for swhid in swhids} - # group pids by their type - pids_by_type = group_swh_persistent_identifiers(persistent_ids) + # group swhids by their type + swhids_by_type = group_swhids(swhids) # search for hashes not present in the storage - missing_hashes = service.lookup_missing_hashes(pids_by_type) + missing_hashes = service.lookup_missing_hashes(swhids_by_type) - for pid in persistent_ids: - if pid.object_id not in missing_hashes: - response[str(pid)]["known"] = True + for swhid in swhids: + if swhid.object_id not in missing_hashes: + response[str(swhid)]["known"] = True return response diff --git a/swh/web/assets/src/bundles/admin/deposit.js b/swh/web/assets/src/bundles/admin/deposit.js index b6e457b3..69479430 100644 --- a/swh/web/assets/src/bundles/admin/deposit.js +++ b/swh/web/assets/src/bundles/admin/deposit.js @@ -1,162 +1,162 @@ /** * Copyright (C) 2018-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ function genSwhLink(data, type) { if (type === 'display') { if (data && data.startsWith('swh')) { - let browseUrl = Urls.browse_swh_id(data); + let browseUrl = Urls.browse_swhid(data); return `${data}`; } } return data; } export function initDepositAdmin() { let depositsTable; $(document).ready(() => { $.fn.dataTable.ext.errMode = 'none'; depositsTable = $('#swh-admin-deposit-list') .on('error.dt', (e, settings, techNote, message) => { $('#swh-admin-deposit-list-error').text(message); }) .DataTable({ serverSide: true, processing: true, // let's define the order of table options display // f: (f)ilter // l: (l)ength changing // r: p(r)ocessing // t: (t)able // i: (i)nfo // p: (p)agination // see https://datatables.net/examples/basic_init/dom.html dom: '<<"d-flex justify-content-between align-items-center"f' + '<"#list-exclude">l>rt<"bottom"ip>>', // div#list-exclude is a custom filter added next to dataTable // initialization below through js dom manipulation, see // https://datatables.net/examples/advanced_init/dom_toolbar.html ajax: { url: Urls.admin_deposit_list(), data: d => { d.excludePattern = $('#swh-admin-deposit-list-exclude-filter').val(); } }, columns: [ { data: 'id', name: 'id' }, { - data: 'swh_id_context', - name: 'swh_id_context', + data: 'swhid_context', + name: 'swhid_context', render: (data, type, row) => { if (data && type === 'display') { let originPattern = ';origin='; let originPatternIdx = data.indexOf(originPattern); if (originPatternIdx !== -1) { let originUrl = data.slice(originPatternIdx + originPattern.length); let nextSepPattern = ';'; let nextSepPatternIdx = originUrl.indexOf(nextSepPattern); if (nextSepPatternIdx !== -1) { /* Remove extra context */ originUrl = originUrl.slice(0, nextSepPatternIdx); } return `${originUrl}`; } } return data; } }, { data: 'reception_date', name: 'reception_date', render: (data, type, row) => { if (type === 'display') { let date = new Date(data); return date.toLocaleString(); } return data; } }, { data: 'status', name: 'status' }, { data: 'status_detail', name: 'status_detail', render: (data, type, row) => { if (type === 'display' && data) { let text = data; if (typeof data === 'object') { text = JSON.stringify(data, null, 4); } return `
${text}
`; } return data; }, orderable: false, visible: false }, { - data: 'swh_id', - name: 'swh_id', + data: 'swhid', + name: 'swhid', render: (data, type, row) => { return genSwhLink(data, type); }, orderable: false, visible: false }, { - data: 'swh_id_context', - name: 'swh_id_context', + data: 'swhid_context', + name: 'swhid_context', render: (data, type, row) => { return genSwhLink(data, type); }, orderable: false, visible: false } ], scrollX: true, scrollY: '50vh', scrollCollapse: true, order: [[0, 'desc']] }); // Some more customization is needed on the table $('div#list-exclude').html(`
`); // Adding exclusion pattern update behavior, when typing, update search $('#swh-admin-deposit-list-exclude-filter').keyup(function() { depositsTable.draw(); }); // at last draw the table depositsTable.draw(); }); $('a.toggle-col').on('click', function(e) { e.preventDefault(); var column = depositsTable.column($(this).attr('data-column')); column.visible(!column.visible()); if (column.visible()) { $(this).removeClass('col-hidden'); } else { $(this).addClass('col-hidden'); } }); } diff --git a/swh/web/assets/src/bundles/browse/index.js b/swh/web/assets/src/bundles/browse/index.js index 4ab79b40..bda1363c 100644 --- a/swh/web/assets/src/bundles/browse/index.js +++ b/swh/web/assets/src/bundles/browse/index.js @@ -1,18 +1,18 @@ /** * Copyright (C) 2018 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ // main bundle for the swh-web/browse application import './browse.css'; import './breadcrumbs.css'; import './content.css'; import './snapshot-navigation.css'; export * from './snapshot-navigation'; export * from './origin-search'; export * from './browse-utils'; -export * from './swh-ids-utils'; +export * from './swhid-utils'; diff --git a/swh/web/assets/src/bundles/browse/origin-search.js b/swh/web/assets/src/bundles/browse/origin-search.js index 765a59a0..7ee6c401 100644 --- a/swh/web/assets/src/bundles/browse/origin-search.js +++ b/swh/web/assets/src/bundles/browse/origin-search.js @@ -1,242 +1,242 @@ /** * Copyright (C) 2018-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {handleFetchError} from 'utils/functions'; const limit = 100; let linksPrev = []; let linkNext = null; let linkCurrent = null; let inSearch = false; function parseLinkHeader(s) { let re = /<(.+)>; rel="next"/; return s.match(re)[1]; } function fixTableRowsStyle() { setTimeout(() => { $('#origin-search-results tbody tr').removeAttr('style'); }); } function clearOriginSearchResultsTable() { $('#origin-search-results tbody tr').remove(); } function populateOriginSearchResultsTable(origins) { if (origins.length > 0) { $('#swh-origin-search-results').show(); $('#swh-no-result').hide(); clearOriginSearchResultsTable(); let table = $('#origin-search-results tbody'); for (let [i, origin] of origins.entries()) { let browseUrl = `${Urls.browse_origin()}?origin_url=${origin.url}`; let tableRow = ``; tableRow += `` + '' + 'Checking'; tableRow += '' + `${encodeURI(origin.url)}`; tableRow += `` + '' + 'Checking'; tableRow += ''; table.append(tableRow); // get async latest visit snapshot and update visit status icon let latestSnapshotUrl = Urls.api_1_origin_visit_latest(origin.url); latestSnapshotUrl += '?require_snapshot=true'; fetch(latestSnapshotUrl) .then(response => response.json()) .then(data => { $(`#visit-type-origin-${i}`).html(data.type); $(`#visit-status-origin-${i}`).children().remove(); if (data) { $(`#visit-status-origin-${i}`).html( 'Archived'); } else { $(`#visit-status-origin-${i}`).html( 'Pending archival'); if ($('#swh-filter-empty-visits').prop('checked')) { $(`#origin-${i}`).remove(); } } }); } fixTableRowsStyle(); } else { $('#swh-origin-search-results').hide(); $('#swh-no-result').text('No origins matching the search criteria were found.'); $('#swh-no-result').show(); } if (linkNext === null) { $('#origins-next-results-button').addClass('disabled'); } else { $('#origins-next-results-button').removeClass('disabled'); } if (linksPrev.length === 0) { $('#origins-prev-results-button').addClass('disabled'); } else { $('#origins-prev-results-button').removeClass('disabled'); } inSearch = false; setTimeout(() => { window.scrollTo(0, 0); }); } function searchOriginsFirst(searchQueryText, limit) { let baseSearchUrl; let searchMetadata = $('#swh-search-origin-metadata').prop('checked'); if (searchMetadata) { baseSearchUrl = new URL(Urls.api_1_origin_metadata_search(), window.location); baseSearchUrl.searchParams.append('fulltext', searchQueryText); } else { baseSearchUrl = new URL(Urls.api_1_origin_search(searchQueryText), window.location); } let withVisit = $('#swh-search-origins-with-visit').prop('checked'); baseSearchUrl.searchParams.append('limit', limit); baseSearchUrl.searchParams.append('with_visit', withVisit); let searchUrl = baseSearchUrl.toString(); searchOrigins(searchUrl); } function searchOrigins(searchUrl) { clearOriginSearchResultsTable(); $('.swh-loading').addClass('show'); let response = fetch(searchUrl) .then(handleFetchError) .then(resp => { response = resp; return response.json(); }) .then(data => { // Save link to the current results page linkCurrent = searchUrl; // Save link to the next results page. linkNext = null; if (response.headers.has('Link')) { let parsedLink = parseLinkHeader(response.headers.get('Link')); if (parsedLink !== undefined) { linkNext = parsedLink; } } // prevLinks is updated by the caller, which is the one to know if // we're going forward or backward in the pages. $('.swh-loading').removeClass('show'); populateOriginSearchResultsTable(data); }) .catch(response => { $('.swh-loading').removeClass('show'); inSearch = false; $('#swh-origin-search-results').hide(); $('#swh-no-result').text(`Error ${response.status}: ${response.statusText}`); $('#swh-no-result').show(); }); } function doSearch() { $('#swh-no-result').hide(); let searchQueryText = $('#origins-url-patterns').val(); inSearch = true; if (searchQueryText.startsWith('swh:')) { // searchQueryText may be a PID so sending search queries to PID resolve endpoint - let resolvePidUrl = Urls.api_1_resolve_swh_pid(searchQueryText); - fetch(resolvePidUrl) + let resolveSWHIDUrl = Urls.api_1_resolve_swhid(searchQueryText); + fetch(resolveSWHIDUrl) .then(handleFetchError) .then(response => response.json()) .then(data => { - // pid has been successfully resolved, + // SWHID has been successfully resolved, // so redirect to browse page window.location = data.browse_url; }) .catch(response => { // display a useful error message if the input - // looks like a swh pid + // looks like a SWHID response.json().then(data => { $('#swh-origin-search-results').hide(); $('.swh-search-pagination').hide(); $('#swh-no-result').text(data.reason); $('#swh-no-result').show(); }); }); } else { // otherwise, proceed with origins search $('#swh-origin-search-results').show(); $('.swh-search-pagination').show(); searchOriginsFirst(searchQueryText, limit); } } export function initOriginSearch() { $(document).ready(() => { $('#swh-search-origins').submit(event => { event.preventDefault(); let searchQueryText = $('#origins-url-patterns').val().trim(); let withVisit = $('#swh-search-origins-with-visit').prop('checked'); let withContent = $('#swh-filter-empty-visits').prop('checked'); let searchMetadata = $('#swh-search-origin-metadata').prop('checked'); let queryParameters = new URLSearchParams(); queryParameters.append('q', searchQueryText); if (withVisit) { queryParameters.append('with_visit', withVisit); } if (withContent) { queryParameters.append('with_content', withContent); } if (searchMetadata) { queryParameters.append('search_metadata', searchMetadata); } // Update the url, triggering page reload and effective search window.location = `${Urls.browse_search()}?${queryParameters.toString()}`; }); $('#origins-next-results-button').click(event => { if ($('#origins-next-results-button').hasClass('disabled') || inSearch) { return; } inSearch = true; linksPrev.push(linkCurrent); searchOrigins(linkNext); event.preventDefault(); }); $('#origins-prev-results-button').click(event => { if ($('#origins-prev-results-button').hasClass('disabled') || inSearch) { return; } inSearch = true; searchOrigins(linksPrev.pop()); event.preventDefault(); }); let urlParams = new URLSearchParams(window.location.search); let query = urlParams.get('q'); let withVisit = urlParams.has('with_visit'); let withContent = urlParams.has('with_content'); let searchMetadata = urlParams.has('search_metadata'); if (query) { $('#origins-url-patterns').val(query); $('#swh-search-origins-with-visit').prop('checked', withVisit); $('#swh-filter-empty-visits').prop('checked', withContent); $('#swh-search-origin-metadata').prop('checked', searchMetadata); doSearch(); } }); } diff --git a/swh/web/assets/src/bundles/browse/swh-ids-utils.js b/swh/web/assets/src/bundles/browse/swhid-utils.js similarity index 85% rename from swh/web/assets/src/bundles/browse/swh-ids-utils.js rename to swh/web/assets/src/bundles/browse/swhid-utils.js index 876cfd67..72ca002d 100644 --- a/swh/web/assets/src/bundles/browse/swh-ids-utils.js +++ b/swh/web/assets/src/bundles/browse/swhid-utils.js @@ -1,120 +1,120 @@ /** * Copyright (C) 2018-2019 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import ClipboardJS from 'clipboard'; import 'thirdparty/jquery.tabSlideOut/jquery.tabSlideOut'; import 'thirdparty/jquery.tabSlideOut/jquery.tabSlideOut.css'; import {BREAKPOINT_SM} from 'utils/constants'; export function swhIdObjectTypeToggled(event) { event.preventDefault(); $(event.target).tab('show'); } export function swhIdContextOptionToggled(event) { event.stopPropagation(); - let swhIdElt = $(event.target).closest('.swh-id-ui').find('.swh-id'); + let swhIdElt = $(event.target).closest('.swhid-ui').find('.swhid'); let swhIdWithContext = $(event.target).data('swhid-with-context'); let currentSwhId = swhIdElt.text(); if ($(event.target).prop('checked')) { currentSwhId = swhIdWithContext; } else { const pos = currentSwhId.indexOf(';'); if (pos !== -1) { currentSwhId = currentSwhId.slice(0, pos); } } swhIdElt.text(currentSwhId); swhIdElt.attr('href', '/' + currentSwhId + '/'); addLinesInfo(); } function addLinesInfo() { - let swhIdElt = $('#swh-id-tab-content').find('.swh-id'); + let swhIdElt = $('#swhid-tab-content').find('.swhid'); let currentSwhId = swhIdElt.text(); let lines = []; let linesPart = ';lines='; let linesRegexp = new RegExp(/L(\d+)/g); let line = linesRegexp.exec(window.location.hash); while (line) { lines.push(parseInt(line[1])); line = linesRegexp.exec(window.location.hash); } if (lines.length > 0) { linesPart += lines[0]; } if (lines.length > 1) { linesPart += '-' + lines[1]; } - if ($('#swh-id-context-option-content').prop('checked')) { + if ($('#swhid-context-option-content').prop('checked')) { currentSwhId = currentSwhId.replace(/;lines=\d+-*\d*/g, ''); if (lines.length > 0) { currentSwhId += linesPart; } swhIdElt.text(currentSwhId); swhIdElt.attr('href', '/' + currentSwhId + '/'); } } $(document).ready(() => { - new ClipboardJS('.btn-swh-id-copy', { + new ClipboardJS('.btn-swhid-copy', { text: trigger => { - let swhId = $(trigger).closest('.swh-id-ui').find('.swh-id').text(); + let swhId = $(trigger).closest('.swhid-ui').find('.swhid').text(); return swhId; } }); - new ClipboardJS('.btn-swh-id-url-copy', { + new ClipboardJS('.btn-swhid-url-copy', { text: trigger => { - let swhId = $(trigger).closest('.swh-id-ui').find('.swh-id').text(); + let swhId = $(trigger).closest('.swhid-ui').find('.swhid').text(); return window.location.origin + '/' + swhId + '/'; } }); if (window.innerWidth * 0.7 > 1000) { $('#swh-identifiers').css('width', '1000px'); } let tabSlideOptions = { tabLocation: 'right', clickScreenToCloseFilters: ['.ui-slideouttab-panel', '.modal'], offset: function() { const width = $(window).width(); if (width < BREAKPOINT_SM) { return '250px'; } else { return '200px'; } } }; // ensure tab scrolling on small screens if (window.innerHeight < 600 || window.innerWidth < 500) { tabSlideOptions['otherOffset'] = '20px'; } // initiate the sliding identifiers tab $('#swh-identifiers').tabSlideOut(tabSlideOptions); // set the tab visible once the close animation is terminated $('#swh-identifiers').css('display', 'block'); - $('.swh-id-context-option').trigger('click'); + $('.swhid-context-option').trigger('click'); // highlighted code lines changed $(window).on('hashchange', () => { addLinesInfo(); }); // highlighted code lines removed $('body').click(() => { addLinesInfo(); }); }); diff --git a/swh/web/assets/src/bundles/webapp/badges.js b/swh/web/assets/src/bundles/webapp/badges.js index 0a2a148a..3e14a8bd 100644 --- a/swh/web/assets/src/bundles/webapp/badges.js +++ b/swh/web/assets/src/bundles/webapp/badges.js @@ -1,49 +1,49 @@ /** * Copyright (C) 2019-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ -export function showBadgeInfoModal(objectType, objectPid) { +export function showBadgeInfoModal(objectType, objectSWHID) { let badgeImageUrl; let badgeLinkUrl; if (objectType === 'origin') { - badgeImageUrl = Urls.swh_badge(objectType, objectPid); - badgeLinkUrl = `${Urls.browse_origin()}?origin_url=${objectPid}`; + badgeImageUrl = Urls.swh_badge(objectType, objectSWHID); + badgeLinkUrl = `${Urls.browse_origin()}?origin_url=${objectSWHID}`; } else { - const pos = objectPid.indexOf(';'); + const pos = objectSWHID.indexOf(';'); if (pos !== -1) { - badgeImageUrl = Urls.swh_badge_pid(objectPid.slice(0, pos)); + badgeImageUrl = Urls.swh_badge_swhid(objectSWHID.slice(0, pos)); } else { - badgeImageUrl = Urls.swh_badge_pid(objectPid); + badgeImageUrl = Urls.swh_badge_swhid(objectSWHID); } - badgeLinkUrl = Urls.browse_swh_id(objectPid); + badgeLinkUrl = Urls.browse_swhid(objectSWHID); } let urlPrefix = `${window.location.protocol}//${window.location.hostname}`; if (window.location.port) { urlPrefix += `:${window.location.port}`; } const absoluteBadgeImageUrl = `${urlPrefix}${badgeImageUrl}`; const absoluteBadgeLinkUrl = `${urlPrefix}${badgeLinkUrl}`; const html = `
<a href="${absoluteBadgeLinkUrl}">
     <img src="${absoluteBadgeImageUrl}">
 </a>
[![SWH](${absoluteBadgeImageUrl})](${absoluteBadgeLinkUrl})
.. image:: ${absoluteBadgeImageUrl}
     :target: ${absoluteBadgeLinkUrl}
`; swh.webapp.showModalHtml('Software Heritage badge integration', html); } diff --git a/swh/web/assets/src/bundles/webapp/webapp-utils.js b/swh/web/assets/src/bundles/webapp/webapp-utils.js index 76c01ed1..0c97bc6d 100644 --- a/swh/web/assets/src/bundles/webapp/webapp-utils.js +++ b/swh/web/assets/src/bundles/webapp/webapp-utils.js @@ -1,349 +1,349 @@ /** * Copyright (C) 2018-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import objectFitImages from 'object-fit-images'; import {selectText} from 'utils/functions'; import {BREAKPOINT_MD} from 'utils/constants'; let collapseSidebar = false; let previousSidebarState = localStorage.getItem('remember.lte.pushmenu'); if (previousSidebarState !== undefined) { collapseSidebar = previousSidebarState === 'sidebar-collapse'; } $(document).on('DOMContentLoaded', () => { // set state to collapsed on smaller devices if ($(window).width() < BREAKPOINT_MD) { collapseSidebar = true; } // restore previous sidebar state (collapsed/expanded) if (collapseSidebar) { // hack to avoid animated transition for collapsing sidebar // when loading a page let sidebarTransition = $('.main-sidebar, .main-sidebar:before').css('transition'); let sidebarEltsTransition = $('.sidebar .nav-link p, .main-sidebar .brand-text, .sidebar .user-panel .info').css('transition'); $('.main-sidebar, .main-sidebar:before').css('transition', 'none'); $('.sidebar .nav-link p, .main-sidebar .brand-text, .sidebar .user-panel .info').css('transition', 'none'); $('body').addClass('sidebar-collapse'); $('.swh-words-logo-swh').css('visibility', 'visible'); // restore transitions for user navigation setTimeout(() => { $('.main-sidebar, .main-sidebar:before').css('transition', sidebarTransition); $('.sidebar .nav-link p, .main-sidebar .brand-text, .sidebar .user-panel .info').css('transition', sidebarEltsTransition); }); } }); $(document).on('collapsed.lte.pushmenu', event => { if ($('body').width() >= BREAKPOINT_MD) { $('.swh-words-logo-swh').css('visibility', 'visible'); } }); $(document).on('shown.lte.pushmenu', event => { $('.swh-words-logo-swh').css('visibility', 'hidden'); }); function ensureNoFooterOverflow() { $('body').css('padding-bottom', $('footer').outerHeight() + 'px'); } $(document).ready(() => { // redirect to last browse page if any when clicking on the 'Browse' entry // in the sidebar $(`.swh-browse-link`).click(event => { let lastBrowsePage = sessionStorage.getItem('last-browse-page'); if (lastBrowsePage) { event.preventDefault(); window.location = lastBrowsePage; } }); const mainSideBar = $('.main-sidebar'); function updateSidebarState() { const body = $('body'); if (body.hasClass('sidebar-collapse') && !mainSideBar.hasClass('swh-sidebar-collapsed')) { mainSideBar.removeClass('swh-sidebar-expanded'); mainSideBar.addClass('swh-sidebar-collapsed'); $('.swh-words-logo-swh').css('visibility', 'visible'); } else if (!body.hasClass('sidebar-collapse') && !mainSideBar.hasClass('swh-sidebar-expanded')) { mainSideBar.removeClass('swh-sidebar-collapsed'); mainSideBar.addClass('swh-sidebar-expanded'); $('.swh-words-logo-swh').css('visibility', 'hidden'); } // ensure correct sidebar state when loading a page if (body.hasClass('hold-transition')) { setTimeout(() => { updateSidebarState(); }); } } // set sidebar state after collapse / expand animation mainSideBar.on('transitionend', evt => { updateSidebarState(); }); updateSidebarState(); // ensure footer do not overflow main content for mobile devices // or after resizing the browser window ensureNoFooterOverflow(); $(window).resize(function() { ensureNoFooterOverflow(); if ($('body').hasClass('sidebar-collapse') && $('body').width() >= BREAKPOINT_MD) { $('.swh-words-logo-swh').css('visibility', 'visible'); } }); // activate css polyfill 'object-fit: contain' in old browsers objectFitImages(); // reparent the modals to the top navigation div in order to be able // to display them $('.swh-browse-top-navigation').append($('.modal')); let selectedCode = null; function getCodeOrPreEltUnderPointer(e) { let elts = document.elementsFromPoint(e.clientX, e.clientY); for (let elt of elts) { if (elt.nodeName === 'CODE' || elt.nodeName === 'PRE') { return elt; } } return null; } // click handler to set focus on code block for copy $(document).click(e => { selectedCode = getCodeOrPreEltUnderPointer(e); }); function selectCode(event, selectedCode) { if (selectedCode) { let hljsLnCodeElts = $(selectedCode).find('.hljs-ln-code'); if (hljsLnCodeElts.length) { selectText(hljsLnCodeElts[0], hljsLnCodeElts[hljsLnCodeElts.length - 1]); } else { selectText(selectedCode.firstChild, selectedCode.lastChild); } event.preventDefault(); } } // select the whole text of focused code block when user // double clicks or hits Ctrl+A $(document).dblclick(e => { if ((e.ctrlKey || e.metaKey)) { selectCode(e, getCodeOrPreEltUnderPointer(e)); } }); $(document).keydown(e => { if ((e.ctrlKey || e.metaKey) && e.key === 'a') { selectCode(e, selectedCode); } }); // show/hide back-to-top button let scrollThreshold = 0; scrollThreshold += $('.swh-top-bar').height() || 0; scrollThreshold += $('.navbar').height() || 0; $(window).scroll(() => { if ($(window).scrollTop() > scrollThreshold) { $('#back-to-top').css('display', 'block'); } else { $('#back-to-top').css('display', 'none'); } }); // navbar search form submission callback $('#swh-origins-search-top').submit(event => { event.preventDefault(); let searchQueryText = $('#swh-origins-search-top-input').val().trim(); let queryParameters = new URLSearchParams(); queryParameters.append('q', searchQueryText); queryParameters.append('with_visit', true); queryParameters.append('with_content', true); window.location = `${Urls.browse_search()}?${queryParameters.toString()}`; }); }); export function initPage(page) { $(document).ready(() => { // set relevant sidebar link to page active $(`.swh-${page}-item`).addClass('active'); $(`.swh-${page}-link`).addClass('active'); // triggered when unloading the current page $(window).on('unload', () => { // backup current browse page if (page === 'browse') { sessionStorage.setItem('last-browse-page', window.location); } }); }); } export function initHomePage() { $(document).ready(() => { $('.swh-coverage-list').iFrameResize({heightCalculationMethod: 'taggedElement'}); fetch(Urls.stat_counters()) .then(response => response.json()) .then(data => { if (data.stat_counters.content) { $('#swh-contents-count').html(data.stat_counters.content.toLocaleString()); $('#swh-revisions-count').html(data.stat_counters.revision.toLocaleString()); $('#swh-origins-count').html(data.stat_counters.origin.toLocaleString()); $('#swh-directories-count').html(data.stat_counters.directory.toLocaleString()); $('#swh-persons-count').html(data.stat_counters.person.toLocaleString()); $('#swh-releases-count').html(data.stat_counters.release.toLocaleString()); } if (data.stat_counters_history.content) { swh.webapp.drawHistoryCounterGraph('#swh-contents-count-history', data.stat_counters_history.content); swh.webapp.drawHistoryCounterGraph('#swh-revisions-count-history', data.stat_counters_history.revision); swh.webapp.drawHistoryCounterGraph('#swh-origins-count-history', data.stat_counters_history.origin); } else { $('#swh-contents-count-history').hide(); $('#swh-revisions-count-history').hide(); $('#swh-origins-count-history').hide(); } }); }); initPage('home'); } export function showModalMessage(title, message) { $('#swh-web-modal-message .modal-title').text(title); $('#swh-web-modal-message .modal-content p').text(message); $('#swh-web-modal-message').modal('show'); } export function showModalConfirm(title, message, callback) { $('#swh-web-modal-confirm .modal-title').text(title); $('#swh-web-modal-confirm .modal-content p').text(message); $('#swh-web-modal-confirm #swh-web-modal-confirm-ok-btn').bind('click', () => { callback(); $('#swh-web-modal-confirm').modal('hide'); $('#swh-web-modal-confirm #swh-web-modal-confirm-ok-btn').unbind('click'); }); $('#swh-web-modal-confirm').modal('show'); } export function showModalHtml(title, html) { $('#swh-web-modal-html .modal-title').text(title); $('#swh-web-modal-html .modal-body').html(html); $('#swh-web-modal-html').modal('show'); } export function addJumpToPagePopoverToDataTable(dataTableElt) { dataTableElt.on('draw.dt', function() { $('.paginate_button.disabled').css('cursor', 'pointer'); $('.paginate_button.disabled').on('click', event => { const pageInfo = dataTableElt.page.info(); let content = ' / ${pageInfo.pages}`; $(event.target).popover({ 'title': 'Jump to page', 'content': content, 'html': true, 'placement': 'top', 'sanitizeFn': swh.webapp.filterXSS }); $(event.target).popover('show'); $('.jump-to-page').on('change', function() { $('.paginate_button.disabled').popover('hide'); const pageNumber = parseInt($(this).val()) - 1; dataTableElt.page(pageNumber).draw('page'); }); }); }); dataTableElt.on('preXhr.dt', () => { $('.paginate_button.disabled').popover('hide'); }); } let swhObjectIcons; export function setSwhObjectIcons(icons) { swhObjectIcons = icons; } export function getSwhObjectIcon(swhObjectType) { return swhObjectIcons[swhObjectType]; } let browsedSwhObjectMetadata = {}; export function setBrowsedSwhObjectMetadata(metadata) { browsedSwhObjectMetadata = metadata; } export function getBrowsedSwhObjectMetadata() { return browsedSwhObjectMetadata; } // This will contain a mapping between an archived object type // and its related SWHID metadata for each object reachable from // the current browse view. // SWHID metadata contain the following keys: // * object_type: type of archived object // * object_id: sha1 object identifier -// * swhid: SWH persistent identifier without contextual info -// * swhid_url: URL to resolve SWH persistent identifier without contextual info +// * swhid: SWHID without contextual info +// * swhid_url: URL to resolve SWHID without contextual info // * context: object describing SWHID context -// * swhid_with_context: SWH persistent identifier with contextual info -// * swhid_with_context_url: URL to resolve SWH persistent identifier with contextual info +// * swhid_with_context: SWHID with contextual info +// * swhid_with_context_url: URL to resolve SWHID with contextual info let swhidsContext_ = {}; export function setSwhIdsContext(swhidsContext) { swhidsContext_ = {}; for (let swhidContext of swhidsContext) { swhidsContext_[swhidContext.object_type] = swhidContext; } } export function getSwhIdsContext() { return swhidsContext_; } function setFullWidth(fullWidth) { if (fullWidth) { $('#swh-web-content').removeClass('container'); $('#swh-web-content').addClass('container-fluid'); } else { $('#swh-web-content').removeClass('container-fluid'); $('#swh-web-content').addClass('container'); } localStorage.setItem('swh-web-full-width', JSON.stringify(fullWidth)); $('#swh-full-width-switch').prop('checked', fullWidth); } export function fullWidthToggled(event) { setFullWidth($(event.target).prop('checked')); } export function setContainerFullWidth() { let previousFullWidthState = JSON.parse(localStorage.getItem('swh-web-full-width')); if (previousFullWidthState !== null) { setFullWidth(previousFullWidthState); } } diff --git a/swh/web/assets/src/bundles/webapp/webapp.css b/swh/web/assets/src/bundles/webapp/webapp.css index 5c0d36d6..fc4338aa 100644 --- a/swh/web/assets/src/bundles/webapp/webapp.css +++ b/swh/web/assets/src/bundles/webapp/webapp.css @@ -1,687 +1,687 @@ /** * Copyright (C) 2018-2020 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ html { height: 100%; overflow-x: hidden; scroll-behavior: auto !important; } body { min-height: 100%; margin: 0; position: relative; padding-bottom: 120px; } a:active, a.active { outline: none; } code { background-color: #f9f2f4; } pre code { background-color: transparent; } footer { background-color: #262626; color: #fff; font-size: 0.8rem; position: absolute; bottom: 0; width: 100%; padding-top: 20px; padding-bottom: 20px; } footer a, footer a:visited, footer a:hover { color: #fecd1b; } footer a:hover { text-decoration: underline; } .link-color { color: #fecd1b; } pre { background-color: #f5f5f5; border: 1px solid #ccc; border-radius: 4px; padding: 9.5px; font-size: 0.8rem; } .btn.active { background-color: #e7e7e7; } .card { margin-bottom: 5px !important; overflow-x: auto; } .navbar-brand { padding: 5px; margin-right: 0; } .table { margin-bottom: 0; } .swh-table thead { background-color: #f2f4f5; border-top: 1px solid rgba(0, 0, 0, 0.2); font-weight: normal; } .swh-table-striped th { border-top: none; } .swh-table-striped tbody tr:nth-child(even) { background-color: #f2f4f5; } .swh-table-striped tbody tr:nth-child(odd) { background-color: #fff; } .swh-web-app-link a { text-decoration: none; border: none; } .swh-web-app-link:hover { background-color: #efeff2; } .table > thead > tr > th { border-top: none; border-bottom: 1px solid #e20026; } .table > tbody > tr > td { border-style: none; } .sitename .first-word, .sitename .second-word { color: rgba(0, 0, 0, 0.75); font-weight: normal; font-size: 1.2rem; } .sitename .first-word { font-family: 'Alegreya Sans', sans-serif; } .sitename .second-word { font-family: 'Alegreya', serif; } .swh-counter { font-size: 150%; } @media (max-width: 600px) { .swh-counter-container { margin-top: 1rem; } } .swh-http-error { margin: 0 auto; text-align: center; } .swh-http-error-head { color: #2d353c; font-size: 30px; } .swh-http-error-code { bottom: 60%; color: #2d353c; font-size: 96px; line-height: 80px; margin-bottom: 10px !important; } .swh-http-error-desc { font-size: 12px; color: #647788; text-align: center; } .swh-http-error-desc pre { display: inline-block; text-align: left; max-width: 800px; white-space: pre-wrap; } .swh-list-unstyled { list-style: none; } .popover { max-width: 97%; z-index: 40000; } .modal { text-align: center; padding: 0 !important; z-index: 50000; } .modal::before { content: ''; display: inline-block; height: 100%; vertical-align: middle; margin-right: -4px; } .modal-dialog { display: inline-block; text-align: left; vertical-align: middle; } .dropdown-submenu { position: relative; } .dropdown-submenu .dropdown-menu { top: 0; left: -100%; margin-top: -5px; margin-left: -2px; } .dropdown-item:hover, .dropdown-item:focus { background-color: rgba(0, 0, 0, 0.1); } a.dropdown-left::before { content: "\f035e"; font-family: 'Material Design Icons'; display: block; width: 20px; height: 20px; float: left; margin-left: 0; } #swh-navbar { border-top-style: none; border-left-style: none; border-right-style: none; border-bottom-style: solid; border-bottom-width: 5px; border-image: linear-gradient(to right, rgb(226, 0, 38) 0%, rgb(254, 205, 27) 100%) 1 1 1 1; width: 100%; padding: 5px; margin-bottom: 10px; margin-top: 30px; justify-content: normal; flex-wrap: nowrap; height: 72px; overflow: hidden; } #back-to-top { display: none; position: fixed; bottom: 30px; right: 30px; z-index: 10; } #back-to-top a img { display: block; width: 32px; height: 32px; background-size: 32px 32px; text-indent: -999px; overflow: hidden; } .swh-top-bar { direction: ltr; height: 30px; position: fixed; top: 0; left: 0; width: 100%; z-index: 99999; background-color: #262626; color: #fff; text-align: center; font-size: 14px; } .swh-top-bar ul { margin-top: 4px; padding-left: 0; white-space: nowrap; } .swh-top-bar li { display: inline-block; margin-left: 10px; margin-right: 10px; } .swh-top-bar a, .swh-top-bar a:visited { color: white; } .swh-top-bar a.swh-current-site, .swh-top-bar a.swh-current-site:visited { color: #fecd1b; } .swh-position-left { position: absolute; left: 0; } .swh-position-right { position: absolute; right: 0; } .swh-background-gray { background: #efeff2; } .swh-donate-link { border: 1px solid #fecd1b; background-color: #e20026; color: white !important; padding: 3px; border-radius: 3px; } .swh-navbar-content h4 { padding-top: 7px; } .swh-navbar-content .bread-crumbs { display: block; margin-left: -40px; } .swh-navbar-content .bread-crumbs li.bc-no-root { padding-top: 7px; } .main-sidebar { margin-top: 30px; } .content-wrapper { background: none; } .brand-image { max-height: 40px; } .brand-link { padding-top: 18.5px; padding-bottom: 18px; padding-left: 4px; border-bottom: 5px solid #e20026 !important; } .navbar-header a, ul.dropdown-menu a, ul.navbar-nav a, ul.nav-sidebar a { border-bottom-style: none; color: #323232; } .swh-sidebar .nav-link.active { color: #323232 !important; background-color: #e7e7e7 !important; } .nav-tabs .nav-link.active { border-top: 3px solid #e20026; } .swh-image-error { width: 80px; height: auto; } @media (max-width: 600px) { .card { min-width: 80%; } .swh-image-error { width: 40px; height: auto; } .swh-donate-link { display: none; } } .form-check-label { padding-top: 4px; } -.swh-id { +.swhid { white-space: pre-wrap; } -.swh-id .swh-id-option { +.swhid .swhid-option { display: inline-block; margin-right: 5px; line-height: 1rem; } .nav-pills .nav-link:not(.active):hover { color: rgba(0, 0, 0, 0.55); } .swh-heading-color { color: #e20026 !important; } .sidebar-mini.sidebar-collapse .main-sidebar:hover { width: 4.6rem; } .sidebar-mini.sidebar-collapse .main-sidebar:hover .user-panel > .info, .sidebar-mini.sidebar-collapse .main-sidebar:hover .nav-sidebar .nav-link p, .sidebar-mini.sidebar-collapse .main-sidebar:hover .brand-text { visibility: hidden !important; } .sidebar .nav-link p, .main-sidebar .brand-text, .sidebar .user-panel .info { transition: none; } .sidebar-mini.sidebar-mini.sidebar-collapse .sidebar { padding-right: 0; } .swh-words-logo { position: absolute; top: 0; left: 0; width: 73px; height: 73px; text-align: center; font-size: 10pt; color: rgba(0, 0, 0, 0.75); } .swh-words-logo:hover { text-decoration: none; } .swh-words-logo-swh { line-height: 1; padding-top: 13px; visibility: hidden; } hr.swh-faded-line { border: 0; height: 1px; background-image: linear-gradient(to left, #f0f0f0, #8c8b8b, #f0f0f0); } /* Ensure that section title with link is colored like standard section title */ .swh-readme h1 a, .swh-readme h2 a, .swh-readme h3 a, .swh-readme h4 a, .swh-readme h5 a, .swh-readme h6 a { color: #e20026; } /* Make list compact in reStructuredText rendering */ .swh-rst li p { margin-bottom: 0; } .swh-readme-txt pre { background: none; border: none; } .swh-coverage-col { padding-left: 10px; padding-right: 10px; } .swh-coverage { height: calc(65px + 1em); padding-top: 0.3rem; border: none; } .swh-coverage a { text-decoration: none; } .swh-coverage-logo { display: block; width: 100%; height: 50px; margin-left: auto; margin-right: auto; object-fit: contain; /* polyfill for old browsers, see https://github.com/bfred-it/object-fit-images */ font-family: 'object-fit: contain;'; } .swh-coverage-list { width: 100%; height: 320px; border: none; } tr.swh-tr-hover-highlight:hover td { background: #ededed; } tr.swh-api-doc-route a { text-decoration: none; } .swh-apidoc .col { margin: 10px; } .swh-apidoc .swh-rst blockquote { border: 0; margin: 0; padding: 0; } a.toggle-col { text-decoration: none; } a.toggle-col.col-hidden { text-decoration: line-through; } .admonition.warning { background: #fcf8e3; border: 1px solid #faebcc; padding: 15px; border-radius: 4px; } .admonition.warning p { margin-bottom: 0; } .admonition.warning .first { font-size: 1.5rem; } .swh-popover { max-height: 50vh; overflow-y: auto; overflow-x: auto; padding: 0; padding-right: 1.4em; } @media screen and (min-width: 768px) { .swh-popover { max-width: 50vw; } } .swh-metadata-table-row { border-top: 1px solid #ddd !important; } .swh-metadata-table-key { min-width: 200px; max-width: 200px; width: 200px; } .swh-metadata-table-value pre { white-space: pre-wrap; } .d3-wrapper { position: relative; height: 0; width: 100%; padding: 0; /* padding-bottom will be overwritten by JavaScript later */ padding-bottom: 100%; } .d3-wrapper > svg { position: absolute; height: 100%; width: 100%; left: 0; top: 0; } div.d3-tooltip { position: absolute; text-align: center; width: auto; height: auto; padding: 2px; font: 12px sans-serif; background: white; border: 1px solid black; border-radius: 4px; pointer-events: none; } .page-link { cursor: pointer; } .wrapper { overflow: hidden; } .swh-badge { padding-bottom: 1rem; cursor: pointer; } .swh-badge-html, .swh-badge-md, .swh-badge-rst { white-space: pre-wrap; } /* Material Design icons alignment tweaks */ .mdi { display: inline-block; } .mdi-camera { transform: translateY(1px); } .mdi-source-commit { transform: translateY(2px); } /* To set icons at a fixed width. Great to use when different icon widths throw off alignment. Courtesy of Font Awesome. */ .mdi-fw { text-align: center; width: 1.25em; } .main-header .nav-link { height: inherit; } .nav-sidebar .nav-header:not(:first-of-type) { padding-top: 1rem; } .nav-sidebar .nav-link { padding-top: 0; padding-bottom: 0; } .nav-sidebar > .nav-item .nav-icon { vertical-align: sub; } .swh-search-icon { line-height: 1rem; vertical-align: middle; } .swh-search-navbar { position: absolute; top: 0.7rem; right: 15rem; z-index: 50000; width: 500px; } .sidebar-collapse .swh-search-navbar { right: 4rem; } diff --git a/swh/web/browse/identifiers.py b/swh/web/browse/identifiers.py index 4bd6e3f7..18458204 100644 --- a/swh/web/browse/identifiers.py +++ b/swh/web/browse/identifiers.py @@ -1,24 +1,23 @@ # Copyright (C) 2017-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.shortcuts import redirect -from swh.web.common.identifiers import resolve_swh_persistent_id +from swh.web.common.identifiers import resolve_swhid from swh.web.common.exc import handle_view_exception -def swh_id_browse(request, swh_id): +def swhid_browse(request, swhid): """ - Django view enabling to browse the archive using - :ref:`SWHIDs `. + Django view enabling to browse the archive using :ref:``. - The url that points to it is :http:get:`/(swh_id)/`. + The url that points to it is :http:get:`/(swhid)/`. """ try: - swh_id_resolved = resolve_swh_persistent_id(swh_id, query_params=request.GET) + swhid_resolved = resolve_swhid(swhid, query_params=request.GET) except Exception as exc: return handle_view_exception(request, exc) - return redirect(swh_id_resolved["browse_url"]) + return redirect(swhid_resolved["browse_url"]) diff --git a/swh/web/browse/snapshot_context.py b/swh/web/browse/snapshot_context.py index 950ed202..a6e262e2 100644 --- a/swh/web/browse/snapshot_context.py +++ b/swh/web/browse/snapshot_context.py @@ -1,1465 +1,1465 @@ # Copyright (C) 2018-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information # Utility module for browsing the archive in a snapshot context. from collections import defaultdict from copy import copy from typing import Any, Dict, List, Optional, Union, Tuple from django.core.cache import cache from django.shortcuts import render from django.template.defaultfilters import filesizeformat from django.utils.html import escape import sentry_sdk from swh.model.identifiers import ( - persistent_identifier, + swhid, snapshot_identifier, CONTENT, DIRECTORY, REVISION, RELEASE, SNAPSHOT, ) from swh.web.browse.utils import ( get_directory_entries, gen_directory_link, gen_revision_link, gen_revision_url, request_content, gen_content_link, prepare_content_for_display, content_display_max_size, format_log_entries, gen_revision_log_link, gen_release_link, get_readme_to_display, gen_snapshot_link, ) from swh.web.common import service, highlightjs from swh.web.common.exc import handle_view_exception, NotFoundExc, BadInputExc from swh.web.common.identifiers import get_swhids_info from swh.web.common.origin_visits import get_origin_visit from swh.web.common.typing import ( OriginInfo, SnapshotBranchInfo, SnapshotReleaseInfo, SnapshotContext, ContentMetadata, DirectoryMetadata, SWHObjectInfo, ) from swh.web.common.utils import ( reverse, gen_path_info, format_utc_iso_date, swh_object_icons, ) from swh.web.config import get_config _empty_snapshot_id = snapshot_identifier({"branches": {}}) def _get_branch(branches, branch_name, snapshot_id): """ Utility function to get a specific branch from a branches list. Its purpose is to get the default HEAD branch as some software origin (e.g those with svn type) does not have it. In that latter case, check if there is a master branch instead and returns it. """ filtered_branches = [b for b in branches if b["name"] == branch_name] if filtered_branches: return filtered_branches[0] elif branch_name == "HEAD": filtered_branches = [b for b in branches if b["name"].endswith("master")] if filtered_branches: return filtered_branches[0] elif branches: return branches[0] else: # case where a large branches list has been truncated snp = service.lookup_snapshot( snapshot_id, branches_from=branch_name, branches_count=1, target_types=["revision", "alias"], ) snp_branch, _ = process_snapshot_branches(snp) if snp_branch and snp_branch[0]["name"] == branch_name: branches.append(snp_branch[0]) return snp_branch[0] def _get_release(releases, release_name, snapshot_id): """ Utility function to get a specific release from a releases list. Returns None if the release can not be found in the list. """ filtered_releases = [r for r in releases if r["name"] == release_name] if filtered_releases: return filtered_releases[0] else: # case where a large branches list has been truncated try: # git origins have specific branches for releases snp = service.lookup_snapshot( snapshot_id, branches_from=f"refs/tags/{release_name}", branches_count=1, target_types=["release"], ) except NotFoundExc: snp = service.lookup_snapshot( snapshot_id, branches_from=release_name, branches_count=1, target_types=["release"], ) _, snp_release = process_snapshot_branches(snp) if snp_release and snp_release[0]["name"] == release_name: releases.append(snp_release[0]) return snp_release[0] def _branch_not_found( branch_type, branch, snapshot_id, snapshot_sizes, origin_info, timestamp, visit_id ): """ Utility function to raise an exception when a specified branch/release can not be found. """ if branch_type == "branch": branch_type = "Branch" branch_type_plural = "branches" target_type = "revision" else: branch_type = "Release" branch_type_plural = "releases" target_type = "release" if snapshot_id and snapshot_sizes[target_type] == 0: msg = "Snapshot with id %s has an empty list" " of %s!" % ( snapshot_id, branch_type_plural, ) elif snapshot_id: msg = "%s %s for snapshot with id %s" " not found!" % ( branch_type, branch, snapshot_id, ) elif visit_id and snapshot_sizes[target_type] == 0: msg = ( "Origin with url %s" " for visit with id %s has an empty list" " of %s!" % (origin_info["url"], visit_id, branch_type_plural) ) elif visit_id: msg = ( "%s %s associated to visit with" " id %s for origin with url %s" " not found!" % (branch_type, branch, visit_id, origin_info["url"]) ) elif snapshot_sizes[target_type] == 0: msg = ( "Origin with url %s" " for visit with timestamp %s has an empty list" " of %s!" % (origin_info["url"], timestamp, branch_type_plural) ) else: msg = ( "%s %s associated to visit with" " timestamp %s for origin with " "url %s not found!" % (branch_type, branch, timestamp, origin_info["url"]) ) raise NotFoundExc(escape(msg)) def process_snapshot_branches( snapshot: Dict[str, Any] ) -> Tuple[List[SnapshotBranchInfo], List[SnapshotReleaseInfo]]: """ Process a dictionary describing snapshot branches: extract those targeting revisions and releases, put them in two different lists, then sort those lists in lexicographical order of the branches' names. Args: snapshot: A dict describing a snapshot as returned for instance by :func:`swh.web.common.service.lookup_snapshot` Returns: A tuple whose first member is the sorted list of branches targeting revisions and second member the sorted list of branches targeting releases """ snapshot_branches = snapshot["branches"] branches: Dict[str, SnapshotBranchInfo] = {} branch_aliases: Dict[str, str] = {} releases: Dict[str, SnapshotReleaseInfo] = {} revision_to_branch = defaultdict(set) revision_to_release = defaultdict(set) release_to_branch = defaultdict(set) for branch_name, target in snapshot_branches.items(): if not target: # FIXME: display branches with an unknown target anyway continue target_id = target["target"] target_type = target["target_type"] if target_type == "revision": branches[branch_name] = SnapshotBranchInfo( name=branch_name, revision=target_id, date=None, directory=None, message=None, url=None, ) revision_to_branch[target_id].add(branch_name) elif target_type == "release": release_to_branch[target_id].add(branch_name) elif target_type == "alias": branch_aliases[branch_name] = target_id # FIXME: handle pointers to other object types def _add_release_info(branch, release): releases[branch] = SnapshotReleaseInfo( name=release["name"], branch_name=branch, date=format_utc_iso_date(release["date"]), directory=None, id=release["id"], message=release["message"], target_type=release["target_type"], target=release["target"], url=None, ) def _add_branch_info(branch, revision): branches[branch] = SnapshotBranchInfo( name=branch, revision=revision["id"], directory=revision["directory"], date=format_utc_iso_date(revision["date"]), message=revision["message"], url=None, ) releases_info = service.lookup_release_multiple(release_to_branch.keys()) for release in releases_info: branches_to_update = release_to_branch[release["id"]] for branch in branches_to_update: _add_release_info(branch, release) if release["target_type"] == "revision": revision_to_release[release["target"]].update(branches_to_update) revisions = service.lookup_revision_multiple( set(revision_to_branch.keys()) | set(revision_to_release.keys()) ) for revision in revisions: if not revision: continue for branch in revision_to_branch[revision["id"]]: _add_branch_info(branch, revision) for release in revision_to_release[revision["id"]]: releases[release]["directory"] = revision["directory"] for branch_alias, branch_target in branch_aliases.items(): if branch_target in branches: branches[branch_alias] = copy(branches[branch_target]) else: snp = service.lookup_snapshot( snapshot["id"], branches_from=branch_target, branches_count=1 ) if snp and branch_target in snp["branches"]: if snp["branches"][branch_target] is None: continue target_type = snp["branches"][branch_target]["target_type"] target = snp["branches"][branch_target]["target"] if target_type == "revision": branches[branch_alias] = snp["branches"][branch_target] revision = service.lookup_revision(target) _add_branch_info(branch_alias, revision) elif target_type == "release": release = service.lookup_release(target) _add_release_info(branch_alias, release) if branch_alias in branches: branches[branch_alias]["name"] = branch_alias ret_branches = list(sorted(branches.values(), key=lambda b: b["name"])) ret_releases = list(sorted(releases.values(), key=lambda b: b["name"])) return ret_branches, ret_releases def get_snapshot_content( snapshot_id: str, ) -> Tuple[List[SnapshotBranchInfo], List[SnapshotReleaseInfo]]: """Returns the lists of branches and releases associated to a swh snapshot. That list is put in cache in order to speedup the navigation in the swh-web/browse ui. .. warning:: At most 1000 branches contained in the snapshot will be returned for performance reasons. Args: snapshot_id: hexadecimal representation of the snapshot identifier Returns: A tuple with two members. The first one is a list of dict describing the snapshot branches. The second one is a list of dict describing the snapshot releases. Raises: NotFoundExc if the snapshot does not exist """ cache_entry_id = "swh_snapshot_%s" % snapshot_id cache_entry = cache.get(cache_entry_id) if cache_entry: return cache_entry["branches"], cache_entry["releases"] branches: List[SnapshotBranchInfo] = [] releases: List[SnapshotReleaseInfo] = [] snapshot_content_max_size = get_config()["snapshot_content_max_size"] if snapshot_id: snapshot = service.lookup_snapshot( snapshot_id, branches_count=snapshot_content_max_size ) branches, releases = process_snapshot_branches(snapshot) cache.set(cache_entry_id, {"branches": branches, "releases": releases,}) return branches, releases def get_origin_visit_snapshot( origin_info: OriginInfo, visit_ts: Optional[Union[int, str]] = None, visit_id: Optional[int] = None, snapshot_id: Optional[str] = None, ) -> Tuple[List[SnapshotBranchInfo], List[SnapshotReleaseInfo]]: """Returns the lists of branches and releases associated to an origin for a given visit. The visit is expressed by either: * a snapshot identifier * a timestamp, if no visit with that exact timestamp is found, the closest one from the provided timestamp will be used. If no visit parameter is provided, it returns the list of branches found for the latest visit. That list is put in cache in order to speedup the navigation in the swh-web/browse ui. .. warning:: At most 1000 branches contained in the snapshot will be returned for performance reasons. Args: origin_info: a dict filled with origin information visit_ts: an ISO date string or Unix timestamp to parse visit_id: visit id for disambiguation in case several visits have the same timestamp snapshot_id: if provided, visit associated to the snapshot will be processed Returns: A tuple with two members. The first one is a list of dict describing the origin branches for the given visit. The second one is a list of dict describing the origin releases for the given visit. Raises: NotFoundExc if the origin or its visit are not found """ visit_info = get_origin_visit(origin_info, visit_ts, visit_id, snapshot_id) return get_snapshot_content(visit_info["snapshot"]) def get_snapshot_context( snapshot_id: Optional[str] = None, origin_url: Optional[str] = None, timestamp: Optional[str] = None, visit_id: Optional[int] = None, branch_name: Optional[str] = None, release_name: Optional[str] = None, revision_id: Optional[str] = None, path: Optional[str] = None, browse_context: str = "directory", ) -> SnapshotContext: """ Utility function to compute relevant information when navigating the archive in a snapshot context. The snapshot is either referenced by its id or it will be retrieved from an origin visit. Args: snapshot_id: hexadecimal representation of a snapshot identifier origin_url: an origin_url timestamp: a datetime string for retrieving the closest visit of the origin visit_id: optional visit id for disambiguation in case of several visits with the same timestamp branch_name: optional branch name set when browsing the snapshot in that scope (will default to "HEAD" if not provided) release_name: optional release name set when browsing the snapshot in that scope revision_id: optional revision identifier set when browsing the snapshot in that scope path: optional path of the object currently browsed in the snapshot browse_context: indicates which type of object is currently browsed Returns: A dict filled with snapshot context information. Raises: swh.web.common.exc.NotFoundExc: if no snapshot is found for the visit of an origin. """ assert origin_url is not None or snapshot_id is not None origin_info = None visit_info = None url_args = {} query_params: Dict[str, Any] = {} origin_visits_url = None if origin_url: if visit_id is not None: query_params["visit_id"] = visit_id elif snapshot_id is not None: query_params["snapshot"] = snapshot_id origin_info = service.lookup_origin({"url": origin_url}) visit_info = get_origin_visit(origin_info, timestamp, visit_id, snapshot_id) formatted_date = format_utc_iso_date(visit_info["date"]) visit_info["formatted_date"] = formatted_date snapshot_id = visit_info["snapshot"] if not snapshot_id: raise NotFoundExc( "No snapshot associated to the visit of origin " "%s on %s" % (escape(origin_url), formatted_date) ) # provided timestamp is not necessarily equals to the one # of the retrieved visit, so get the exact one in order # to use it in the urls generated below if timestamp: timestamp = visit_info["date"] branches, releases = get_origin_visit_snapshot( origin_info, timestamp, visit_id, snapshot_id ) query_params["origin_url"] = origin_info["url"] origin_visits_url = reverse( "browse-origin-visits", query_params={"origin_url": origin_info["url"]} ) if timestamp is not None: query_params["timestamp"] = format_utc_iso_date( timestamp, "%Y-%m-%dT%H:%M:%SZ" ) visit_url = reverse("browse-origin-directory", query_params=query_params) visit_info["url"] = visit_url branches_url = reverse("browse-origin-branches", query_params=query_params) releases_url = reverse("browse-origin-releases", query_params=query_params) else: assert snapshot_id is not None branches, releases = get_snapshot_content(snapshot_id) url_args = {"snapshot_id": snapshot_id} branches_url = reverse("browse-snapshot-branches", url_args=url_args) releases_url = reverse("browse-snapshot-releases", url_args=url_args) releases = list(reversed(releases)) snapshot_sizes = service.lookup_snapshot_sizes(snapshot_id) is_empty = sum(snapshot_sizes.values()) == 0 - swh_snp_id = persistent_identifier("snapshot", snapshot_id) + swh_snp_id = swhid("snapshot", snapshot_id) if visit_info: timestamp = format_utc_iso_date(visit_info["date"]) if origin_info: browse_view_name = f"browse-origin-{browse_context}" else: browse_view_name = f"browse-snapshot-{browse_context}" release_id = None root_directory = None snapshot_total_size = sum(snapshot_sizes.values()) if path is not None: query_params["path"] = path if snapshot_total_size and revision_id is not None: revision = service.lookup_revision(revision_id) root_directory = revision["directory"] branches.append( SnapshotBranchInfo( name=revision_id, revision=revision_id, directory=root_directory, date=revision["date"], message=revision["message"], url=None, ) ) branch_name = revision_id query_params["revision"] = revision_id elif snapshot_total_size and release_name: release = _get_release(releases, release_name, snapshot_id) try: root_directory = release["directory"] revision_id = release["target"] release_id = release["id"] query_params["release"] = release_name except Exception as exc: sentry_sdk.capture_exception(exc) _branch_not_found( "release", release_name, snapshot_id, snapshot_sizes, origin_info, timestamp, visit_id, ) elif snapshot_total_size: if branch_name: query_params["branch"] = branch_name branch = _get_branch(branches, branch_name or "HEAD", snapshot_id) try: branch_name = branch["name"] revision_id = branch["revision"] root_directory = branch["directory"] except Exception as exc: sentry_sdk.capture_exception(exc) _branch_not_found( "branch", branch_name, snapshot_id, snapshot_sizes, origin_info, timestamp, visit_id, ) for b in branches: branch_query_params = dict(query_params) branch_query_params.pop("release", None) if b["name"] != b["revision"]: branch_query_params.pop("revision", None) branch_query_params["branch"] = b["name"] b["url"] = reverse( browse_view_name, url_args=url_args, query_params=branch_query_params ) for r in releases: release_query_params = dict(query_params) release_query_params.pop("branch", None) release_query_params.pop("revision", None) release_query_params["release"] = r["name"] r["url"] = reverse( browse_view_name, url_args=url_args, query_params=release_query_params, ) revision_info = None if revision_id: try: revision_info = service.lookup_revision(revision_id) except NotFoundExc: pass else: revision_info["date"] = format_utc_iso_date(revision_info["date"]) revision_info["committer_date"] = format_utc_iso_date( revision_info["committer_date"] ) if revision_info["message"]: message_lines = revision_info["message"].split("\n") revision_info["message_header"] = message_lines[0] else: revision_info["message_header"] = "" snapshot_context = SnapshotContext( branch=branch_name, branches=branches, branches_url=branches_url, is_empty=is_empty, origin_info=origin_info, origin_visits_url=origin_visits_url, release=release_name, release_id=release_id, query_params=query_params, releases=releases, releases_url=releases_url, revision_id=revision_id, revision_info=revision_info, root_directory=root_directory, snapshot_id=snapshot_id, snapshot_sizes=snapshot_sizes, snapshot_swhid=swh_snp_id, url_args=url_args, visit_info=visit_info, ) if revision_info: revision_info["revision_url"] = gen_revision_url(revision_id, snapshot_context) return snapshot_context def _build_breadcrumbs(snapshot_context: SnapshotContext, path: str): origin_info = snapshot_context["origin_info"] url_args = snapshot_context["url_args"] query_params = dict(snapshot_context["query_params"]) root_directory = snapshot_context["root_directory"] path_info = gen_path_info(path) if origin_info: browse_view_name = "browse-origin-directory" else: browse_view_name = "browse-snapshot-directory" breadcrumbs = [] if root_directory: query_params.pop("path", None) breadcrumbs.append( { "name": root_directory[:7], "url": reverse( browse_view_name, url_args=url_args, query_params=query_params ), } ) for pi in path_info: query_params["path"] = pi["path"] breadcrumbs.append( { "name": pi["name"], "url": reverse( browse_view_name, url_args=url_args, query_params=query_params ), } ) return breadcrumbs def _check_origin_url(snapshot_id, origin_url): if snapshot_id is None and origin_url is None: raise BadInputExc("An origin URL must be provided as query parameter.") def browse_snapshot_directory( request, snapshot_id=None, origin_url=None, timestamp=None, path=None ): """ Django view implementation for browsing a directory in a snapshot context. """ try: _check_origin_url(snapshot_id, origin_url) snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), path=path, browse_context="directory", branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=request.GET.get("revision"), ) root_directory = snapshot_context["root_directory"] sha1_git = root_directory if root_directory and path: dir_info = service.lookup_directory_with_path(root_directory, path) sha1_git = dir_info["target"] dirs = [] files = [] if sha1_git: dirs, files = get_directory_entries(sha1_git) except Exception as exc: return handle_view_exception(request, exc) origin_info = snapshot_context["origin_info"] visit_info = snapshot_context["visit_info"] url_args = snapshot_context["url_args"] query_params = dict(snapshot_context["query_params"]) revision_id = snapshot_context["revision_id"] snapshot_id = snapshot_context["snapshot_id"] if origin_info: browse_view_name = "browse-origin-directory" else: browse_view_name = "browse-snapshot-directory" breadcrumbs = _build_breadcrumbs(snapshot_context, path) path = "" if path is None else (path + "/") for d in dirs: if d["type"] == "rev": d["url"] = reverse("browse-revision", url_args={"sha1_git": d["target"]}) else: query_params["path"] = path + d["name"] d["url"] = reverse( browse_view_name, url_args=url_args, query_params=query_params ) sum_file_sizes = 0 readmes = {} if origin_info: browse_view_name = "browse-origin-content" else: browse_view_name = "browse-snapshot-content" for f in files: query_params["path"] = path + f["name"] f["url"] = reverse( browse_view_name, url_args=url_args, query_params=query_params ) if f["length"] is not None: sum_file_sizes += f["length"] f["length"] = filesizeformat(f["length"]) if f["name"].lower().startswith("readme"): readmes[f["name"]] = f["checksums"]["sha1"] readme_name, readme_url, readme_html = get_readme_to_display(readmes) if origin_info: browse_view_name = "browse-origin-log" else: browse_view_name = "browse-snapshot-log" history_url = None if snapshot_id != _empty_snapshot_id: query_params.pop("path", None) history_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) nb_files = None nb_dirs = None dir_path = None if root_directory: nb_files = len(files) nb_dirs = len(dirs) sum_file_sizes = filesizeformat(sum_file_sizes) dir_path = "/" + path browse_dir_link = gen_directory_link(sha1_git) browse_rev_link = gen_revision_link(revision_id) browse_snp_link = gen_snapshot_link(snapshot_id) revision_found = True if sha1_git is None and revision_id is not None: try: service.lookup_revision(revision_id) except NotFoundExc: revision_found = False swh_objects = [ SWHObjectInfo(object_type=DIRECTORY, object_id=sha1_git), SWHObjectInfo(object_type=REVISION, object_id=revision_id), SWHObjectInfo(object_type=SNAPSHOT, object_id=snapshot_id), ] visit_date = None visit_type = None if visit_info: visit_date = format_utc_iso_date(visit_info["date"]) visit_type = visit_info["type"] release_id = snapshot_context["release_id"] browse_rel_link = None if release_id: swh_objects.append(SWHObjectInfo(object_type=RELEASE, object_id=release_id)) browse_rel_link = gen_release_link(release_id) dir_metadata = DirectoryMetadata( object_type=DIRECTORY, object_id=sha1_git, directory=sha1_git, directory_url=browse_dir_link, nb_files=nb_files, nb_dirs=nb_dirs, sum_file_sizes=sum_file_sizes, root_directory=root_directory, path=dir_path, revision=revision_id, revision_found=revision_found, revision_url=browse_rev_link, release=release_id, release_url=browse_rel_link, snapshot=snapshot_id, snapshot_url=browse_snp_link, origin_url=origin_url, visit_date=visit_date, visit_type=visit_type, ) vault_cooking = { "directory_context": True, "directory_id": sha1_git, "revision_context": True, "revision_id": revision_id, } swhids_info = get_swhids_info(swh_objects, snapshot_context, dir_metadata) dir_path = "/".join([bc["name"] for bc in breadcrumbs]) + "/" context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading = "Directory - %s - %s - %s" % ( dir_path, snapshot_context["branch"], context_found, ) top_right_link = None if not snapshot_context["is_empty"]: top_right_link = { "url": history_url, "icon": swh_object_icons["revisions history"], "text": "History", } return render( request, "browse/directory.html", { "heading": heading, "swh_object_name": "Directory", "swh_object_metadata": dir_metadata, "dirs": dirs, "files": files, "breadcrumbs": breadcrumbs if root_directory else [], "top_right_link": top_right_link, "readme_name": readme_name, "readme_url": readme_url, "readme_html": readme_html, "snapshot_context": snapshot_context, "vault_cooking": vault_cooking, "show_actions": True, "swhids_info": swhids_info, }, ) def browse_snapshot_content( request, snapshot_id=None, origin_url=None, timestamp=None, path=None, selected_language=None, ): """ Django view implementation for browsing a content in a snapshot context. """ try: _check_origin_url(snapshot_id, origin_url) if path is None: raise BadInputExc("The path of a content must be given as query parameter.") snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), path=path, browse_context="content", branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=request.GET.get("revision"), ) root_directory = snapshot_context["root_directory"] sha1_git = None query_string = None content_data = {} directory_id = None split_path = path.split("/") filename = split_path[-1] filepath = path[: -len(filename)] if root_directory: content_info = service.lookup_directory_with_path(root_directory, path) sha1_git = content_info["target"] query_string = "sha1_git:" + sha1_git content_data = request_content(query_string, raise_if_unavailable=False) if filepath: dir_info = service.lookup_directory_with_path(root_directory, filepath) directory_id = dir_info["target"] else: directory_id = root_directory except Exception as exc: return handle_view_exception(request, exc) revision_id = snapshot_context["revision_id"] origin_info = snapshot_context["origin_info"] visit_info = snapshot_context["visit_info"] snapshot_id = snapshot_context["snapshot_id"] if content_data.get("raw_data") is not None: content_display_data = prepare_content_for_display( content_data["raw_data"], content_data["mimetype"], path ) content_data.update(content_display_data) # Override language with user-selected language if selected_language is not None: content_data["language"] = selected_language available_languages = None if content_data.get("mimetype") is not None and "text/" in content_data["mimetype"]: available_languages = highlightjs.get_supported_languages() breadcrumbs = _build_breadcrumbs(snapshot_context, filepath) breadcrumbs.append({"name": filename, "url": None}) browse_content_link = gen_content_link(sha1_git) content_raw_url = None if query_string: content_raw_url = reverse( "browse-content-raw", url_args={"query_string": query_string}, query_params={"filename": filename}, ) browse_rev_link = gen_revision_link(revision_id) browse_dir_link = gen_directory_link(directory_id) content_checksums = content_data.get("checksums", {}) swh_objects = [ SWHObjectInfo(object_type=CONTENT, object_id=content_checksums.get("sha1_git")), SWHObjectInfo(object_type=DIRECTORY, object_id=directory_id), SWHObjectInfo(object_type=REVISION, object_id=revision_id), SWHObjectInfo(object_type=SNAPSHOT, object_id=snapshot_id), ] visit_date = None visit_type = None if visit_info: visit_date = format_utc_iso_date(visit_info["date"]) visit_type = visit_info["type"] release_id = snapshot_context["release_id"] browse_rel_link = None if release_id: swh_objects.append(SWHObjectInfo(object_type=RELEASE, object_id=release_id)) browse_rel_link = gen_release_link(release_id) content_metadata = ContentMetadata( object_type=CONTENT, object_id=content_checksums.get("sha1_git"), sha1=content_checksums.get("sha1"), sha1_git=content_checksums.get("sha1_git"), sha256=content_checksums.get("sha256"), blake2s256=content_checksums.get("blake2s256"), content_url=browse_content_link, mimetype=content_data.get("mimetype"), encoding=content_data.get("encoding"), size=filesizeformat(content_data.get("length", 0)), language=content_data.get("language"), licenses=content_data.get("licenses"), root_directory=root_directory, path=f"/{filepath}", filename=filename, directory=directory_id, directory_url=browse_dir_link, revision=revision_id, revision_url=browse_rev_link, release=release_id, release_url=browse_rel_link, snapshot=snapshot_id, snapshot_url=gen_snapshot_link(snapshot_id), origin_url=origin_url, visit_date=visit_date, visit_type=visit_type, ) swhids_info = get_swhids_info(swh_objects, snapshot_context, content_metadata) content_path = "/".join([bc["name"] for bc in breadcrumbs]) context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading = "Content - %s - %s - %s" % ( content_path, snapshot_context["branch"], context_found, ) top_right_link = None if not snapshot_context["is_empty"]: top_right_link = { "url": content_raw_url, "icon": swh_object_icons["content"], "text": "Raw File", } return render( request, "browse/content.html", { "heading": heading, "swh_object_name": "Content", "swh_object_metadata": content_metadata, "content": content_data.get("content_data"), "content_size": content_data.get("length"), "max_content_size": content_display_max_size, "filename": filename, "encoding": content_data.get("encoding"), "mimetype": content_data.get("mimetype"), "language": content_data.get("language"), "available_languages": available_languages, "breadcrumbs": breadcrumbs if root_directory else [], "top_right_link": top_right_link, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions": True, "swhids_info": swhids_info, "error_code": content_data.get("error_code"), "error_message": content_data.get("error_message"), "error_description": content_data.get("error_description"), }, status=content_data.get("error_code", 200), ) PER_PAGE = 100 def browse_snapshot_log(request, snapshot_id=None, origin_url=None, timestamp=None): """ Django view implementation for browsing a revision history in a snapshot context. """ try: _check_origin_url(snapshot_id, origin_url) snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), browse_context="log", branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=request.GET.get("revision"), ) revision_id = snapshot_context["revision_id"] per_page = int(request.GET.get("per_page", PER_PAGE)) offset = int(request.GET.get("offset", 0)) revs_ordering = request.GET.get("revs_ordering", "committer_date") session_key = "rev_%s_log_ordering_%s" % (revision_id, revs_ordering) rev_log_session = request.session.get(session_key, None) rev_log = [] revs_walker_state = None if rev_log_session: rev_log = rev_log_session["rev_log"] revs_walker_state = rev_log_session["revs_walker_state"] if len(rev_log) < offset + per_page: revs_walker = service.get_revisions_walker( revs_ordering, revision_id, max_revs=offset + per_page + 1, state=revs_walker_state, ) rev_log += [rev["id"] for rev in revs_walker] revs_walker_state = revs_walker.export_state() revs = rev_log[offset : offset + per_page] revision_log = service.lookup_revision_multiple(revs) request.session[session_key] = { "rev_log": rev_log, "revs_walker_state": revs_walker_state, } except Exception as exc: return handle_view_exception(request, exc) origin_info = snapshot_context["origin_info"] visit_info = snapshot_context["visit_info"] url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] snapshot_id = snapshot_context["snapshot_id"] query_params["per_page"] = per_page revs_ordering = request.GET.get("revs_ordering", "") query_params["revs_ordering"] = revs_ordering if origin_info: browse_view_name = "browse-origin-log" else: browse_view_name = "browse-snapshot-log" prev_log_url = None if len(rev_log) > offset + per_page: query_params["offset"] = offset + per_page prev_log_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) next_log_url = None if offset != 0: query_params["offset"] = offset - per_page next_log_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) revision_log_data = format_log_entries(revision_log, per_page, snapshot_context) browse_rev_link = gen_revision_link(revision_id) browse_log_link = gen_revision_log_link(revision_id) browse_snp_link = gen_snapshot_link(snapshot_id) revision_metadata = { "context-independent revision": browse_rev_link, "context-independent revision history": browse_log_link, "context-independent snapshot": browse_snp_link, "snapshot": snapshot_id, } if origin_info: revision_metadata["origin url"] = origin_info["url"] revision_metadata["origin visit date"] = format_utc_iso_date(visit_info["date"]) revision_metadata["origin visit type"] = visit_info["type"] swh_objects = [ SWHObjectInfo(object_type=REVISION, object_id=revision_id), SWHObjectInfo(object_type=SNAPSHOT, object_id=snapshot_id), ] release_id = snapshot_context["release_id"] if release_id: swh_objects.append(SWHObjectInfo(object_type=RELEASE, object_id=release_id)) browse_rel_link = gen_release_link(release_id) revision_metadata["release"] = release_id revision_metadata["context-independent release"] = browse_rel_link swhids_info = get_swhids_info(swh_objects, snapshot_context) context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading = "Revision history - %s - %s" % (snapshot_context["branch"], context_found) return render( request, "browse/revision-log.html", { "heading": heading, "swh_object_name": "Revisions history", "swh_object_metadata": revision_metadata, "revision_log": revision_log_data, "revs_ordering": revs_ordering, "next_log_url": next_log_url, "prev_log_url": prev_log_url, "breadcrumbs": None, "top_right_link": None, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions": True, "swhids_info": swhids_info, }, ) def browse_snapshot_branches( request, snapshot_id=None, origin_url=None, timestamp=None ): """ Django view implementation for browsing a list of branches in a snapshot context. """ try: _check_origin_url(snapshot_id, origin_url) snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), ) branches_bc = request.GET.get("branches_breadcrumbs", "") branches_bc = branches_bc.split(",") if branches_bc else [] branches_from = branches_bc[-1] if branches_bc else "" origin_info = snapshot_context["origin_info"] url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] if origin_info: browse_view_name = "browse-origin-directory" else: browse_view_name = "browse-snapshot-directory" snapshot = service.lookup_snapshot( snapshot_context["snapshot_id"], branches_from, PER_PAGE + 1, target_types=["revision", "alias"], ) displayed_branches, _ = process_snapshot_branches(snapshot) except Exception as exc: return handle_view_exception(request, exc) for branch in displayed_branches: rev_query_params = {} if origin_info: rev_query_params["origin_url"] = origin_info["url"] revision_url = reverse( "browse-revision", url_args={"sha1_git": branch["revision"]}, query_params=query_params, ) query_params["branch"] = branch["name"] directory_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) del query_params["branch"] branch["revision_url"] = revision_url branch["directory_url"] = directory_url if origin_info: browse_view_name = "browse-origin-branches" else: browse_view_name = "browse-snapshot-branches" prev_branches_url = None next_branches_url = None if branches_bc: query_params_prev = dict(query_params) query_params_prev["branches_breadcrumbs"] = ",".join(branches_bc[:-1]) prev_branches_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_prev ) elif branches_from: prev_branches_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) if snapshot["next_branch"] is not None: query_params_next = dict(query_params) next_branch = displayed_branches[-1]["name"] del displayed_branches[-1] branches_bc.append(next_branch) query_params_next["branches_breadcrumbs"] = ",".join(branches_bc) next_branches_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_next ) heading = "Branches - " if origin_info: heading += "origin: %s" % origin_info["url"] else: heading += "snapshot: %s" % snapshot_id return render( request, "browse/branches.html", { "heading": heading, "swh_object_name": "Branches", "swh_object_metadata": {}, "top_right_link": None, "displayed_branches": displayed_branches, "prev_branches_url": prev_branches_url, "next_branches_url": next_branches_url, "snapshot_context": snapshot_context, }, ) def browse_snapshot_releases( request, snapshot_id=None, origin_url=None, timestamp=None ): """ Django view implementation for browsing a list of releases in a snapshot context. """ try: _check_origin_url(snapshot_id, origin_url) snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=request.GET.get("visit_id"), ) rel_bc = request.GET.get("releases_breadcrumbs", "") rel_bc = rel_bc.split(",") if rel_bc else [] rel_from = rel_bc[-1] if rel_bc else "" origin_info = snapshot_context["origin_info"] url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] snapshot = service.lookup_snapshot( snapshot_context["snapshot_id"], rel_from, PER_PAGE + 1, target_types=["release", "alias"], ) _, displayed_releases = process_snapshot_branches(snapshot) except Exception as exc: return handle_view_exception(request, exc) for release in displayed_releases: query_params_tgt = {"snapshot": snapshot_id} if origin_info: query_params_tgt["origin_url"] = origin_info["url"] release_url = reverse( "browse-release", url_args={"sha1_git": release["id"]}, query_params=query_params_tgt, ) target_url = "" if release["target_type"] == "revision": target_url = reverse( "browse-revision", url_args={"sha1_git": release["target"]}, query_params=query_params_tgt, ) elif release["target_type"] == "directory": target_url = reverse( "browse-directory", url_args={"sha1_git": release["target"]}, query_params=query_params_tgt, ) elif release["target_type"] == "content": target_url = reverse( "browse-content", url_args={"query_string": release["target"]}, query_params=query_params_tgt, ) elif release["target_type"] == "release": target_url = reverse( "browse-release", url_args={"sha1_git": release["target"]}, query_params=query_params_tgt, ) release["release_url"] = release_url release["target_url"] = target_url if origin_info: browse_view_name = "browse-origin-releases" else: browse_view_name = "browse-snapshot-releases" prev_releases_url = None next_releases_url = None if rel_bc: query_params_prev = dict(query_params) query_params_prev["releases_breadcrumbs"] = ",".join(rel_bc[:-1]) prev_releases_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_prev ) elif rel_from: prev_releases_url = reverse( browse_view_name, url_args=url_args, query_params=query_params ) if snapshot["next_branch"] is not None: query_params_next = dict(query_params) next_rel = displayed_releases[-1]["branch_name"] del displayed_releases[-1] rel_bc.append(next_rel) query_params_next["releases_breadcrumbs"] = ",".join(rel_bc) next_releases_url = reverse( browse_view_name, url_args=url_args, query_params=query_params_next ) heading = "Releases - " if origin_info: heading += "origin: %s" % origin_info["url"] else: heading += "snapshot: %s" % snapshot_id return render( request, "browse/releases.html", { "heading": heading, "top_panel_visible": False, "top_panel_collapsible": False, "swh_object_name": "Releases", "swh_object_metadata": {}, "top_right_link": None, "displayed_releases": displayed_releases, "prev_releases_url": prev_releases_url, "next_releases_url": next_releases_url, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions": False, }, ) diff --git a/swh/web/browse/urls.py b/swh/web/browse/urls.py index f802ef5d..270bac3e 100644 --- a/swh/web/browse/urls.py +++ b/swh/web/browse/urls.py @@ -1,55 +1,55 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.conf.urls import url from django.shortcuts import render, redirect import swh.web.browse.views.directory # noqa import swh.web.browse.views.content # noqa import swh.web.browse.views.origin # noqa import swh.web.browse.views.release # noqa import swh.web.browse.views.revision # noqa import swh.web.browse.views.snapshot # noqa from swh.web.browse.browseurls import BrowseUrls -from swh.web.browse.identifiers import swh_id_browse +from swh.web.browse.identifiers import swhid_browse from swh.web.common.utils import reverse def _browse_help_view(request): return render( request, "browse/help.html", {"heading": "How to browse the archive ?"} ) def _browse_search_view(request): return render( request, "browse/search.html", {"heading": "Search software origins to browse"} ) def _browse_vault_view(request): return render( request, "browse/vault-ui.html", {"heading": "Download archive content from the Vault"}, ) def _browse_origin_save_view(request): return redirect(reverse("origin-save")) urlpatterns = [ url(r"^$", _browse_search_view), url(r"^help/$", _browse_help_view, name="browse-help"), url(r"^search/$", _browse_search_view, name="browse-search"), url(r"^vault/$", _browse_vault_view, name="browse-vault"), # for backward compatibility url(r"^origin/save/$", _browse_origin_save_view, name="browse-origin-save"), - url(r"^(?Pswh:[0-9]+:[a-z]+:[0-9a-f]+.*)/$", swh_id_browse), + url(r"^(?Pswh:[0-9]+:[a-z]+:[0-9a-f]+.*)/$", swhid_browse), ] urlpatterns += BrowseUrls.get_url_patterns() diff --git a/swh/web/browse/views/revision.py b/swh/web/browse/views/revision.py index bebd7f5d..17af6081 100644 --- a/swh/web/browse/views/revision.py +++ b/swh/web/browse/views/revision.py @@ -1,607 +1,607 @@ # Copyright (C) 2017-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import hashlib import json import textwrap from django.http import HttpResponse from django.shortcuts import render from django.template.defaultfilters import filesizeformat from django.utils.safestring import mark_safe from swh.model.identifiers import ( - persistent_identifier, + swhid, CONTENT, DIRECTORY, REVISION, SNAPSHOT, ) from swh.web.browse.browseurls import browse_route from swh.web.browse.snapshot_context import get_snapshot_context from swh.web.browse.utils import ( gen_link, gen_revision_link, gen_revision_url, get_revision_log_url, get_directory_entries, gen_directory_link, request_content, prepare_content_for_display, content_display_max_size, gen_snapshot_link, get_readme_to_display, format_log_entries, gen_person_mail_link, ) from swh.web.common import service from swh.web.common.exc import NotFoundExc, handle_view_exception from swh.web.common.identifiers import get_swhids_info from swh.web.common.typing import RevisionMetadata, SWHObjectInfo from swh.web.common.utils import ( reverse, format_utc_iso_date, gen_path_info, swh_object_icons, ) def _gen_content_url(revision, query_string, path, snapshot_context): if snapshot_context: query_params = snapshot_context["query_params"] query_params["path"] = path query_params["revision"] = revision["id"] content_url = reverse("browse-origin-content", query_params=query_params) else: content_path = "%s/%s" % (revision["directory"], path) content_url = reverse( "browse-content", url_args={"query_string": query_string}, query_params={"path": content_path}, ) return content_url def _gen_diff_link(idx, diff_anchor, link_text): if idx < _max_displayed_file_diffs: return gen_link(diff_anchor, link_text) else: return link_text # TODO: put in conf _max_displayed_file_diffs = 1000 def _gen_revision_changes_list(revision, changes, snapshot_context): """ Returns a HTML string describing the file changes introduced in a revision. As this string will be displayed in the browse revision view, links to adequate file diffs are also generated. Args: revision (str): hexadecimal representation of a revision identifier changes (list): list of file changes in the revision snapshot_context (dict): optional origin context used to reverse the content urls Returns: A string to insert in a revision HTML view. """ changes_msg = [] for i, change in enumerate(changes): hasher = hashlib.sha1() from_query_string = "" to_query_string = "" diff_id = "diff-" if change["from"]: from_query_string = "sha1_git:" + change["from"]["target"] diff_id += change["from"]["target"] + "-" + change["from_path"] diff_id += "-" if change["to"]: to_query_string = "sha1_git:" + change["to"]["target"] diff_id += change["to"]["target"] + change["to_path"] change["path"] = change["to_path"] or change["from_path"] url_args = { "from_query_string": from_query_string, "to_query_string": to_query_string, } query_params = {"path": change["path"]} change["diff_url"] = reverse( "diff-contents", url_args=url_args, query_params=query_params ) hasher.update(diff_id.encode("utf-8")) diff_id = hasher.hexdigest() change["id"] = diff_id panel_diff_link = "#panel_" + diff_id if change["type"] == "modify": change["content_url"] = _gen_content_url( revision, to_query_string, change["to_path"], snapshot_context ) changes_msg.append( "modified: %s" % _gen_diff_link(i, panel_diff_link, change["to_path"]) ) elif change["type"] == "insert": change["content_url"] = _gen_content_url( revision, to_query_string, change["to_path"], snapshot_context ) changes_msg.append( "new file: %s" % _gen_diff_link(i, panel_diff_link, change["to_path"]) ) elif change["type"] == "delete": parent = service.lookup_revision(revision["parents"][0]) change["content_url"] = _gen_content_url( parent, from_query_string, change["from_path"], snapshot_context ) changes_msg.append( "deleted: %s" % _gen_diff_link(i, panel_diff_link, change["from_path"]) ) elif change["type"] == "rename": change["content_url"] = _gen_content_url( revision, to_query_string, change["to_path"], snapshot_context ) link_text = change["from_path"] + " → " + change["to_path"] changes_msg.append( "renamed: %s" % _gen_diff_link(i, panel_diff_link, link_text) ) if not changes: changes_msg.append("No changes") return mark_safe("\n".join(changes_msg)) @browse_route( r"revision/(?P[0-9a-f]+)/diff/", view_name="diff-revision", checksum_args=["sha1_git"], ) def _revision_diff(request, sha1_git): """ Browse internal endpoint to compute revision diff """ try: revision = service.lookup_revision(sha1_git) snapshot_context = None origin_url = request.GET.get("origin_url", None) if not origin_url: origin_url = request.GET.get("origin", None) timestamp = request.GET.get("timestamp", None) visit_id = request.GET.get("visit_id", None) if origin_url: snapshot_context = get_snapshot_context( origin_url=origin_url, timestamp=timestamp, visit_id=visit_id ) except Exception as exc: return handle_view_exception(request, exc) changes = service.diff_revision(sha1_git) changes_msg = _gen_revision_changes_list(revision, changes, snapshot_context) diff_data = { "total_nb_changes": len(changes), "changes": changes[:_max_displayed_file_diffs], "changes_msg": changes_msg, } diff_data_json = json.dumps(diff_data, separators=(",", ": ")) return HttpResponse(diff_data_json, content_type="application/json") NB_LOG_ENTRIES = 100 @browse_route( r"revision/(?P[0-9a-f]+)/log/", view_name="browse-revision-log", checksum_args=["sha1_git"], ) def revision_log_browse(request, sha1_git): """ Django view that produces an HTML display of the history log for a revision identified by its id. The url that points to it is :http:get:`/browse/revision/(sha1_git)/log/` """ try: origin_url = request.GET.get("origin_url") snapshot_id = request.GET.get("snapshot") snapshot_context = None if origin_url or snapshot_id: snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=request.GET.get("timestamp"), visit_id=request.GET.get("visit_id"), branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=sha1_git, ) per_page = int(request.GET.get("per_page", NB_LOG_ENTRIES)) offset = int(request.GET.get("offset", 0)) revs_ordering = request.GET.get("revs_ordering", "committer_date") session_key = "rev_%s_log_ordering_%s" % (sha1_git, revs_ordering) rev_log_session = request.session.get(session_key, None) rev_log = [] revs_walker_state = None if rev_log_session: rev_log = rev_log_session["rev_log"] revs_walker_state = rev_log_session["revs_walker_state"] if len(rev_log) < offset + per_page: revs_walker = service.get_revisions_walker( revs_ordering, sha1_git, max_revs=offset + per_page + 1, state=revs_walker_state, ) rev_log += [rev["id"] for rev in revs_walker] revs_walker_state = revs_walker.export_state() revs = rev_log[offset : offset + per_page] revision_log = service.lookup_revision_multiple(revs) request.session[session_key] = { "rev_log": rev_log, "revs_walker_state": revs_walker_state, } except Exception as exc: return handle_view_exception(request, exc) revs_ordering = request.GET.get("revs_ordering", "") prev_log_url = None if len(rev_log) > offset + per_page: prev_log_url = reverse( "browse-revision-log", url_args={"sha1_git": sha1_git}, query_params={ "per_page": per_page, "offset": offset + per_page, "revs_ordering": revs_ordering, }, ) next_log_url = None if offset != 0: next_log_url = reverse( "browse-revision-log", url_args={"sha1_git": sha1_git}, query_params={ "per_page": per_page, "offset": offset - per_page, "revs_ordering": revs_ordering, }, ) revision_log_data = format_log_entries(revision_log, per_page) - swh_rev_id = persistent_identifier("revision", sha1_git) + swh_rev_id = swhid("revision", sha1_git) return render( request, "browse/revision-log.html", { "heading": "Revision history", "swh_object_id": swh_rev_id, "swh_object_name": "Revisions history", "swh_object_metadata": None, "revision_log": revision_log_data, "revs_ordering": revs_ordering, "next_log_url": next_log_url, "prev_log_url": prev_log_url, "breadcrumbs": None, "top_right_link": None, "snapshot_context": snapshot_context, "vault_cooking": None, "show_actions": True, "swhids_info": None, }, ) @browse_route( r"revision/(?P[0-9a-f]+)/", view_name="browse-revision", checksum_args=["sha1_git"], ) def revision_browse(request, sha1_git): """ Django view that produces an HTML display of a revision identified by its id. The url that points to it is :http:get:`/browse/revision/(sha1_git)/`. """ try: revision = service.lookup_revision(sha1_git) origin_info = None snapshot_context = None origin_url = request.GET.get("origin_url") if not origin_url: origin_url = request.GET.get("origin") timestamp = request.GET.get("timestamp") visit_id = request.GET.get("visit_id") snapshot_id = request.GET.get("snapshot_id") if not snapshot_id: snapshot_id = request.GET.get("snapshot") path = request.GET.get("path") dir_id = None dirs, files = None, None content_data = {} if origin_url: try: snapshot_context = get_snapshot_context( snapshot_id=snapshot_id, origin_url=origin_url, timestamp=timestamp, visit_id=visit_id, branch_name=request.GET.get("branch"), release_name=request.GET.get("release"), revision_id=sha1_git, ) except NotFoundExc as e: raw_rev_url = reverse( "browse-revision", url_args={"sha1_git": sha1_git} ) error_message = ( "The Software Heritage archive has a revision " "with the hash you provided but the origin " "mentioned in your request appears broken: %s. " "Please check the URL and try again.\n\n" "Nevertheless, you can still browse the revision " "without origin information: %s" % (gen_link(origin_url), gen_link(raw_rev_url)) ) if str(e).startswith("Origin"): raise NotFoundExc(error_message) else: raise e origin_info = snapshot_context["origin_info"] snapshot_id = snapshot_context["snapshot_id"] elif snapshot_id: snapshot_context = get_snapshot_context(snapshot_id) if path: file_info = service.lookup_directory_with_path(revision["directory"], path) if file_info["type"] == "dir": dir_id = file_info["target"] else: query_string = "sha1_git:" + file_info["target"] content_data = request_content(query_string, raise_if_unavailable=False) else: dir_id = revision["directory"] if dir_id: path = "" if path is None else (path + "/") dirs, files = get_directory_entries(dir_id) except Exception as exc: return handle_view_exception(request, exc) revision_metadata = RevisionMetadata( object_type=REVISION, object_id=sha1_git, revision=sha1_git, revision_url=gen_revision_link(sha1_git), author=revision["author"]["fullname"] if revision["author"] else "None", author_url=gen_person_mail_link(revision["author"]) if revision["author"] else "None", committer=revision["committer"]["fullname"] if revision["committer"] else "None", committer_url=gen_person_mail_link(revision["committer"]) if revision["committer"] else "None", committer_date=format_utc_iso_date(revision["committer_date"]), date=format_utc_iso_date(revision["date"]), directory=revision["directory"], directory_url=gen_directory_link(revision["directory"]), merge=revision["merge"], metadata=json.dumps( revision["metadata"], sort_keys=True, indent=4, separators=(",", ": ") ), parents=revision["parents"], synthetic=revision["synthetic"], type=revision["type"], snapshot=snapshot_id, snapshot_url=gen_snapshot_link(snapshot_id) if snapshot_id else None, origin_url=origin_url, ) message_lines = ["None"] if revision["message"]: message_lines = revision["message"].split("\n") parents = [] for p in revision["parents"]: parent_url = gen_revision_url(p, snapshot_context) parents.append({"id": p, "url": parent_url}) path_info = gen_path_info(path) query_params = snapshot_context["query_params"] if snapshot_context else {} breadcrumbs = [] breadcrumbs.append( { "name": revision["directory"][:7], "url": reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ), } ) for pi in path_info: query_params["path"] = pi["path"] breadcrumbs.append( { "name": pi["name"], "url": reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ), } ) vault_cooking = { "directory_context": False, "directory_id": None, "revision_context": True, "revision_id": sha1_git, } swh_objects = [SWHObjectInfo(object_type=REVISION, object_id=sha1_git)] content = None content_size = None filename = None mimetype = None language = None readme_name = None readme_url = None readme_html = None readmes = {} error_code = 200 error_message = "" error_description = "" extra_context = dict(revision_metadata) extra_context["path"] = f"/{path}" if path else "/" if content_data: breadcrumbs[-1]["url"] = None content_size = content_data["length"] mimetype = content_data["mimetype"] if content_data["raw_data"]: content_display_data = prepare_content_for_display( content_data["raw_data"], content_data["mimetype"], path ) content = content_display_data["content_data"] language = content_display_data["language"] mimetype = content_display_data["mimetype"] if path: filename = path_info[-1]["name"] query_params["filename"] = filename filepath = "/".join(pi["name"] for pi in path_info[:-1]) extra_context["path"] = f"/{filepath}/" if filepath else "/" extra_context["filename"] = filename top_right_link = { "url": reverse( "browse-content-raw", url_args={"query_string": query_string}, query_params={"filename": filename}, ), "icon": swh_object_icons["content"], "text": "Raw File", } swh_objects.append( SWHObjectInfo(object_type=CONTENT, object_id=file_info["target"]) ) error_code = content_data["error_code"] error_message = content_data["error_message"] error_description = content_data["error_description"] else: for d in dirs: if d["type"] == "rev": d["url"] = reverse( "browse-revision", url_args={"sha1_git": d["target"]} ) else: query_params["path"] = path + d["name"] d["url"] = reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ) for f in files: query_params["path"] = path + f["name"] f["url"] = reverse( "browse-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ) if f["length"] is not None: f["length"] = filesizeformat(f["length"]) if f["name"].lower().startswith("readme"): readmes[f["name"]] = f["checksums"]["sha1"] readme_name, readme_url, readme_html = get_readme_to_display(readmes) top_right_link = { "url": get_revision_log_url(sha1_git, snapshot_context), "icon": swh_object_icons["revisions history"], "text": "History", } vault_cooking["directory_context"] = True vault_cooking["directory_id"] = dir_id swh_objects.append(SWHObjectInfo(object_type=DIRECTORY, object_id=dir_id)) query_params.pop("path", None) diff_revision_url = reverse( "diff-revision", url_args={"sha1_git": sha1_git}, query_params=query_params, ) if snapshot_id: swh_objects.append(SWHObjectInfo(object_type=SNAPSHOT, object_id=snapshot_id)) swhids_info = get_swhids_info(swh_objects, snapshot_context, extra_context) heading = "Revision - %s - %s" % ( sha1_git[:7], textwrap.shorten(message_lines[0], width=70), ) if snapshot_context: context_found = "snapshot: %s" % snapshot_context["snapshot_id"] if origin_info: context_found = "origin: %s" % origin_info["url"] heading += " - %s" % context_found return render( request, "browse/revision.html", { "heading": heading, "swh_object_id": swhids_info[0]["swhid"], "swh_object_name": "Revision", "swh_object_metadata": revision_metadata, "message_header": message_lines[0], "message_body": "\n".join(message_lines[1:]), "parents": parents, "snapshot_context": snapshot_context, "dirs": dirs, "files": files, "content": content, "content_size": content_size, "max_content_size": content_display_max_size, "filename": filename, "encoding": content_data.get("encoding"), "mimetype": mimetype, "language": language, "readme_name": readme_name, "readme_url": readme_url, "readme_html": readme_html, "breadcrumbs": breadcrumbs, "top_right_link": top_right_link, "vault_cooking": vault_cooking, "diff_revision_url": diff_revision_url, "show_actions": True, "swhids_info": swhids_info, "error_code": error_code, "error_message": error_message, "error_description": error_description, }, status=error_code, ) diff --git a/swh/web/common/exc.py b/swh/web/common/exc.py index 178dd346..4dcce962 100644 --- a/swh/web/common/exc.py +++ b/swh/web/common/exc.py @@ -1,150 +1,149 @@ # Copyright (C) 2015-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import traceback from django.http import HttpResponse from django.shortcuts import render from django.utils.safestring import mark_safe from django.utils.html import escape import sentry_sdk from swh.web.config import get_config class BadInputExc(ValueError): """Wrong request to the api. Example: Asking a content with the wrong identifier format. """ pass class NotFoundExc(Exception): """Good request to the api but no result were found. Example: Asking a content with the right identifier format but that content does not exist. """ pass class ForbiddenExc(Exception): """Good request to the api, forbidden result to return due to enforce policy. Example: Asking for a raw content which exists but whose mimetype is not text. """ pass class LargePayloadExc(Exception): """The input size is too large. - Example: Asking to resolve 10000 persistent identifier when the limit - is 1000. + Example: Asking to resolve 10000 SWHIDs when the limit is 1000. """ pass http_status_code_message = { 400: "Bad Request", 401: "Unauthorized", 403: "Access Denied", 404: "Resource not found", 413: "Payload Too Large", 500: "Internal Server Error", 501: "Not Implemented", 502: "Bad Gateway", 503: "Service unavailable", } def _generate_error_page(request, error_code, error_description): return render( request, "error.html", { "error_code": error_code, "error_message": http_status_code_message[error_code], "error_description": mark_safe(error_description), }, status=error_code, ) def swh_handle400(request, exception=None): """ Custom Django HTTP error 400 handler for swh-web. """ error_description = ( "The server cannot process the request to %s due to " "something that is perceived to be a client error." % escape(request.META["PATH_INFO"]) ) return _generate_error_page(request, 400, error_description) def swh_handle403(request, exception=None): """ Custom Django HTTP error 403 handler for swh-web. """ error_description = "The resource %s requires an authentication." % escape( request.META["PATH_INFO"] ) return _generate_error_page(request, 403, error_description) def swh_handle404(request, exception=None): """ Custom Django HTTP error 404 handler for swh-web. """ error_description = "The resource %s could not be found on the server." % escape( request.META["PATH_INFO"] ) return _generate_error_page(request, 404, error_description) def swh_handle500(request): """ Custom Django HTTP error 500 handler for swh-web. """ error_description = ( "An unexpected condition was encountered when " "requesting resource %s." % escape(request.META["PATH_INFO"]) ) return _generate_error_page(request, 500, error_description) def handle_view_exception(request, exc, html_response=True): """ Function used to generate an error page when an exception was raised inside a swh-web browse view. """ sentry_sdk.capture_exception(exc) error_code = 500 error_description = "%s: %s" % (type(exc).__name__, str(exc)) if get_config()["debug"]: error_description = traceback.format_exc() if isinstance(exc, BadInputExc): error_code = 400 if isinstance(exc, ForbiddenExc): error_code = 403 if isinstance(exc, NotFoundExc): error_code = 404 if html_response: return _generate_error_page(request, error_code, error_description) else: return HttpResponse( error_description, content_type="text/plain", status=error_code ) diff --git a/swh/web/common/identifiers.py b/swh/web/common/identifiers.py index ebdcb7ce..ead96716 100644 --- a/swh/web/common/identifiers.py +++ b/swh/web/common/identifiers.py @@ -1,394 +1,383 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from urllib.parse import quote -from typing import Any, Dict, Iterable, List, Optional +from typing import cast, Any, Dict, Iterable, List, Optional from typing_extensions import TypedDict from django.http import QueryDict from swh.model.exceptions import ValidationError from swh.model.hashutil import hash_to_bytes from swh.model.identifiers import ( - persistent_identifier, - parse_persistent_identifier, + swhid, + parse_swhid, CONTENT, DIRECTORY, ORIGIN, RELEASE, REVISION, SNAPSHOT, - PersistentId, + SWHID, ) from swh.web.common import service from swh.web.common.exc import BadInputExc from swh.web.common.typing import ( QueryParameters, SnapshotContext, SWHObjectInfo, SWHIDInfo, SWHIDContext, ) from swh.web.common.utils import reverse -def get_swh_persistent_id( +def gen_swhid( object_type: str, object_id: str, scheme_version: int = 1, metadata: SWHIDContext = {}, ) -> str: """ - Returns the persistent identifier for a swh object based on: + Returns the SoftWare Heritage persistent IDentifier for a swh object based on: * the object type * the object id - * the swh identifiers scheme version + * the SWHID scheme version Args: object_type: the swh object type (content/directory/release/revision/snapshot) object_id: the swh object id (hexadecimal representation of its hash value) - scheme_version: the scheme version of the swh - persistent identifiers + scheme_version: the scheme version of the SWHIDs Returns: - the swh object persistent identifier + the SWHID of the object Raises: BadInputExc: if the provided parameters do not enable to generate a valid identifier """ try: - swh_id = persistent_identifier(object_type, object_id, scheme_version, metadata) - except ValidationError as e: - raise BadInputExc( - "Invalid object (%s) for swh persistent id. %s" % (object_id, e) + obj_swhid = swhid( + object_type, object_id, scheme_version, cast(Dict[str, Any], metadata) ) + except ValidationError as e: + raise BadInputExc("Invalid object (%s) for SWHID. %s" % (object_id, e)) else: - return swh_id + return obj_swhid -class ResolvedPersistentId(TypedDict): +class ResolvedSWHID(TypedDict): """parsed SWHID with context""" - swh_id_parsed: PersistentId + swhid_parsed: SWHID """URL to browse object according to SWHID context""" browse_url: Optional[str] -def resolve_swh_persistent_id( - swh_id: str, query_params: Optional[QueryParameters] = None -) -> ResolvedPersistentId: +def resolve_swhid( + swhid: str, query_params: Optional[QueryParameters] = None +) -> ResolvedSWHID: """ - Try to resolve a Software Heritage persistent id into an url for + Try to resolve a SoftWare Heritage persistent IDentifier into an url for browsing the targeted object. Args: - swh_id: a Software Heritage persistent identifier + swhid: a SoftWare Heritage persistent IDentifier query_params: optional dict filled with query parameters to append to the browse url Returns: a dict with the following keys: - * **swh_id_parsed**: the parsed identifier + * **swhid_parsed**: the parsed identifier * **browse_url**: the url for browsing the targeted object """ - swh_id_parsed = get_persistent_identifier(swh_id) - object_type = swh_id_parsed.object_type - object_id = swh_id_parsed.object_id + swhid_parsed = get_swhid(swhid) + object_type = swhid_parsed.object_type + object_id = swhid_parsed.object_id browse_url = None url_args = {} query_dict = QueryDict("", mutable=True) fragment = "" anchor_swhid_parsed = None process_lines = object_type is CONTENT if query_params and len(query_params) > 0: for k in sorted(query_params.keys()): query_dict[k] = query_params[k] - if "origin" in swh_id_parsed.metadata: - query_dict["origin_url"] = swh_id_parsed.metadata["origin"] + if "origin" in swhid_parsed.metadata: + query_dict["origin_url"] = swhid_parsed.metadata["origin"] - if "anchor" in swh_id_parsed.metadata: - anchor_swhid_parsed = get_persistent_identifier( - swh_id_parsed.metadata["anchor"] - ) + if "anchor" in swhid_parsed.metadata: + anchor_swhid_parsed = get_swhid(swhid_parsed.metadata["anchor"]) - if "path" in swh_id_parsed.metadata and swh_id_parsed.metadata["path"] != "/": - query_dict["path"] = swh_id_parsed.metadata["path"] + if "path" in swhid_parsed.metadata and swhid_parsed.metadata["path"] != "/": + query_dict["path"] = swhid_parsed.metadata["path"] if anchor_swhid_parsed: directory = "" if anchor_swhid_parsed.object_type == DIRECTORY: directory = anchor_swhid_parsed.object_id elif anchor_swhid_parsed.object_type == REVISION: revision = service.lookup_revision(anchor_swhid_parsed.object_id) directory = revision["directory"] elif anchor_swhid_parsed.object_type == RELEASE: release = service.lookup_release(anchor_swhid_parsed.object_id) if release["target_type"] == REVISION: revision = service.lookup_revision(release["target"]) directory = revision["directory"] if object_type == CONTENT: - if "origin" not in swh_id_parsed.metadata: + if "origin" not in swhid_parsed.metadata: # when no origin context, content objects need to have their # path prefixed by root directory id for proper breadcrumbs display query_dict["path"] = directory + query_dict["path"] else: # remove leading slash from SWHID content path query_dict["path"] = query_dict["path"][1:] elif object_type == DIRECTORY: object_id = directory # remove leading and trailing slashes from SWHID directory path query_dict["path"] = query_dict["path"][1:-1] # snapshot context - if "visit" in swh_id_parsed.metadata: + if "visit" in swhid_parsed.metadata: - snp_swhid_parsed = get_persistent_identifier(swh_id_parsed.metadata["visit"]) + snp_swhid_parsed = get_swhid(swhid_parsed.metadata["visit"]) if snp_swhid_parsed.object_type != SNAPSHOT: raise BadInputExc("Visit must be a snapshot SWHID.") query_dict["snapshot"] = snp_swhid_parsed.object_id if anchor_swhid_parsed: if anchor_swhid_parsed.object_type == REVISION: # check if the anchor revision is the tip of a branch branch_name = service.lookup_snapshot_branch_name_from_tip_revision( snp_swhid_parsed.object_id, anchor_swhid_parsed.object_id ) if branch_name: query_dict["branch"] = branch_name elif object_type != REVISION: query_dict["revision"] = anchor_swhid_parsed.object_id elif anchor_swhid_parsed.object_type == RELEASE: release = service.lookup_release(anchor_swhid_parsed.object_id) if release: query_dict["release"] = release["name"] if object_type == REVISION and "release" not in query_dict: branch_name = service.lookup_snapshot_branch_name_from_tip_revision( snp_swhid_parsed.object_id, object_id ) if branch_name: query_dict["branch"] = branch_name # browsing content or directory without snapshot context elif object_type in (CONTENT, DIRECTORY) and anchor_swhid_parsed: if anchor_swhid_parsed.object_type == REVISION: # anchor revision, objects are browsed from its view object_type = REVISION object_id = anchor_swhid_parsed.object_id elif object_type == DIRECTORY and anchor_swhid_parsed.object_type == DIRECTORY: # a directory is browsed from its root object_id = anchor_swhid_parsed.object_id if object_type == CONTENT: url_args["query_string"] = f"sha1_git:{object_id}" elif object_type == DIRECTORY: url_args["sha1_git"] = object_id elif object_type == RELEASE: url_args["sha1_git"] = object_id elif object_type == REVISION: url_args["sha1_git"] = object_id elif object_type == SNAPSHOT: url_args["snapshot_id"] = object_id elif object_type == ORIGIN: raise BadInputExc( ( - "Origin PIDs (Persistent Identifiers) are not " - "publicly resolvable because they are for " + "Origin SWHIDs are not publicly resolvable because they are for " "internal usage only" ) ) - if "lines" in swh_id_parsed.metadata and process_lines: - lines = swh_id_parsed.metadata["lines"].split("-") + if "lines" in swhid_parsed.metadata and process_lines: + lines = swhid_parsed.metadata["lines"].split("-") fragment += "#L" + lines[0] if len(lines) > 1: fragment += "-L" + lines[1] if url_args: browse_url = ( reverse( f"browse-{object_type}", url_args=url_args, query_params=query_dict, ) + fragment ) - return ResolvedPersistentId(swh_id_parsed=swh_id_parsed, browse_url=browse_url) + return ResolvedSWHID(swhid_parsed=swhid_parsed, browse_url=browse_url) -def get_persistent_identifier(persistent_id: str) -> PersistentId: - """Check if a persistent identifier is valid. +def get_swhid(swhid: str) -> SWHID: + """Check if a SWHID is valid and return it parsed. Args: - persistent_id: A string representing a Software Heritage - persistent identifier. + swhid: a SoftWare Heritage persistent IDentifier. Raises: - BadInputExc: if the provided persistent identifier can - not be parsed. + BadInputExc: if the provided SWHID can not be parsed. Return: - A persistent identifier object. + A parsed SWHID. """ try: - pid_object = parse_persistent_identifier(persistent_id) + swhid_parsed = parse_swhid(swhid) except ValidationError as ve: raise BadInputExc("Error when parsing identifier: %s" % " ".join(ve.messages)) else: - return pid_object + return swhid_parsed -def group_swh_persistent_identifiers( - persistent_ids: Iterable[PersistentId], -) -> Dict[str, List[bytes]]: +def group_swhids(swhids: Iterable[SWHID],) -> Dict[str, List[bytes]]: """ - Groups many Software Heritage persistent identifiers into a + Groups many SoftWare Heritage persistent IDentifiers into a dictionary depending on their type. Args: - persistent_ids: an iterable of Software Heritage persistent - identifier objects + swhids: an iterable of SoftWare Heritage persistent + IDentifier objects Returns: A dictionary with: - keys: persistent identifier types - values: persistent identifiers id + keys: object types + values: object hashes """ - pids_by_type: Dict[str, List[bytes]] = { + swhids_by_type: Dict[str, List[bytes]] = { CONTENT: [], DIRECTORY: [], REVISION: [], RELEASE: [], SNAPSHOT: [], } - for pid in persistent_ids: - obj_id = pid.object_id - obj_type = pid.object_type - pids_by_type[obj_type].append(hash_to_bytes(obj_id)) + for obj_swhid in swhids: + obj_id = obj_swhid.object_id + obj_type = obj_swhid.object_type + swhids_by_type[obj_type].append(hash_to_bytes(obj_id)) - return pids_by_type + return swhids_by_type def get_swhids_info( swh_objects: Iterable[SWHObjectInfo], snapshot_context: Optional[SnapshotContext] = None, extra_context: Optional[Dict[str, Any]] = None, ) -> List[SWHIDInfo]: """ - Returns a list of dict containing info related to persistent - identifiers of swh objects. + Returns a list of dict containing info related to SWHIDs of objects. Args: swh_objects: an iterable of dict describing archived objects snapshot_context: optional dict parameter describing the snapshot in which the objects have been found extra_context: optional dict filled with extra contextual info about the objects Returns: - a list of dict containing persistent identifiers info + a list of dict containing SWHIDs info """ swhids_info = [] for swh_object in swh_objects: if not swh_object["object_id"]: swhids_info.append( SWHIDInfo( object_type=swh_object["object_type"], object_id="", swhid="", swhid_url="", context={}, swhid_with_context=None, swhid_with_context_url=None, ) ) continue object_type = swh_object["object_type"] object_id = swh_object["object_id"] swhid_context: SWHIDContext = {} if snapshot_context: if snapshot_context["origin_info"] is not None: swhid_context["origin"] = quote( snapshot_context["origin_info"]["url"], safe="/?:@&" ) if object_type != SNAPSHOT: - swhid_context["visit"] = get_swh_persistent_id( + swhid_context["visit"] = gen_swhid( SNAPSHOT, snapshot_context["snapshot_id"] ) if object_type in (CONTENT, DIRECTORY): if snapshot_context["release_id"] is not None: - swhid_context["anchor"] = get_swh_persistent_id( + swhid_context["anchor"] = gen_swhid( RELEASE, snapshot_context["release_id"] ) elif snapshot_context["revision_id"] is not None: - swhid_context["anchor"] = get_swh_persistent_id( + swhid_context["anchor"] = gen_swhid( REVISION, snapshot_context["revision_id"] ) if object_type in (CONTENT, DIRECTORY): if ( extra_context and "revision" in extra_context and extra_context["revision"] and "anchor" not in swhid_context ): - swhid_context["anchor"] = get_swh_persistent_id( - REVISION, extra_context["revision"] - ) + swhid_context["anchor"] = gen_swhid(REVISION, extra_context["revision"]) elif ( extra_context and "root_directory" in extra_context and extra_context["root_directory"] and "anchor" not in swhid_context and ( object_type != DIRECTORY or extra_context["root_directory"] != object_id ) ): - swhid_context["anchor"] = get_swh_persistent_id( + swhid_context["anchor"] = gen_swhid( DIRECTORY, extra_context["root_directory"] ) path = None if extra_context and "path" in extra_context: path = extra_context["path"] or "/" if "filename" in extra_context and object_type == CONTENT: path += extra_context["filename"] if path: swhid_context["path"] = quote(path, safe="/?:@&") - swhid = get_swh_persistent_id(object_type, object_id) - swhid_url = reverse("browse-swh-id", url_args={"swh_id": swhid}) + swhid = gen_swhid(object_type, object_id) + swhid_url = reverse("browse-swhid", url_args={"swhid": swhid}) swhid_with_context = None swhid_with_context_url = None if swhid_context: - swhid_with_context = get_swh_persistent_id( + swhid_with_context = gen_swhid( object_type, object_id, metadata=swhid_context ) swhid_with_context_url = reverse( - "browse-swh-id", url_args={"swh_id": swhid_with_context} + "browse-swhid", url_args={"swhid": swhid_with_context} ) swhids_info.append( SWHIDInfo( object_type=object_type, object_id=object_id, swhid=swhid, swhid_url=swhid_url, context=swhid_context, swhid_with_context=swhid_with_context, swhid_with_context_url=swhid_with_context_url, ) ) return swhids_info diff --git a/swh/web/common/service.py b/swh/web/common/service.py index 004c0216..9ee6fb74 100644 --- a/swh/web/common/service.py +++ b/swh/web/common/service.py @@ -1,1318 +1,1318 @@ # Copyright (C) 2015-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import itertools import os import re from collections import defaultdict from typing import Any, Dict, List, Set, Iterable, Iterator, Optional, Tuple from swh.model import hashutil from swh.model.identifiers import CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT from swh.storage.algos import diff, revisions_walker from swh.storage.algos.origin import origin_get_latest_visit_status from swh.storage.algos.snapshot import snapshot_get_latest from swh.vault.exc import NotFoundExc as VaultNotFoundExc from swh.web import config from swh.web.common import converters from swh.web.common import query from swh.web.common.exc import BadInputExc, NotFoundExc from swh.web.common.origin_visits import get_origin_visit from swh.web.common.typing import OriginInfo, OriginVisitInfo search = config.search() storage = config.storage() vault = config.vault() idx_storage = config.indexer_storage() MAX_LIMIT = 50 # Top limit the users can ask for def _first_element(lst): """Returns the first element in the provided list or None if it is empty or None""" return next(iter(lst or []), None) def lookup_multiple_hashes(hashes): """Lookup the passed hashes in a single DB connection, using batch processing. Args: An array of {filename: X, sha1: Y}, string X, hex sha1 string Y. Returns: The same array with elements updated with elem['found'] = true if the hash is present in storage, elem['found'] = false if not. """ hashlist = [hashutil.hash_to_bytes(elem["sha1"]) for elem in hashes] content_missing = storage.content_missing_per_sha1(hashlist) missing = [hashutil.hash_to_hex(x) for x in content_missing] for x in hashes: x.update({"found": True}) for h in hashes: if h["sha1"] in missing: h["found"] = False return hashes def lookup_expression(expression, last_sha1, per_page): """Lookup expression in raw content. Args: expression (str): An expression to lookup through raw indexed content last_sha1 (str): Last sha1 seen per_page (int): Number of results per page Yields: ctags whose content match the expression """ limit = min(per_page, MAX_LIMIT) ctags = idx_storage.content_ctags_search( expression, last_sha1=last_sha1, limit=limit ) for ctag in ctags: ctag = converters.from_swh(ctag, hashess={"id"}) ctag["sha1"] = ctag["id"] ctag.pop("id") yield ctag def lookup_hash(q): """Checks if the storage contains a given content checksum Args: query string of the form Returns: Dict with key found containing the hash info if the hash is present, None if not. """ algo, hash = query.parse_hash(q) found = _first_element(storage.content_find({algo: hash})) return {"found": converters.from_content(found), "algo": algo} def search_hash(q): """Checks if the storage contains a given content checksum Args: query string of the form Returns: Dict with key found to True or False, according to whether the checksum is present or not """ algo, hash = query.parse_hash(q) found = _first_element(storage.content_find({algo: hash})) return {"found": found is not None} def _lookup_content_sha1(q): """Given a possible input, query for the content's sha1. Args: q: query string of the form Returns: binary sha1 if found or None """ algo, hash = query.parse_hash(q) if algo != "sha1": hashes = _first_element(storage.content_find({algo: hash})) if not hashes: return None return hashes["sha1"] return hash def lookup_content_ctags(q): """Return ctags information from a specified content. Args: q: query string of the form Yields: ctags information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None ctags = list(idx_storage.content_ctags_get([sha1])) if not ctags: return None for ctag in ctags: yield converters.from_swh(ctag, hashess={"id"}) def lookup_content_filetype(q): """Return filetype information from a specified content. Args: q: query string of the form Yields: filetype information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None filetype = _first_element(list(idx_storage.content_mimetype_get([sha1]))) if not filetype: return None return converters.from_filetype(filetype) def lookup_content_language(q): """Return language information from a specified content. Args: q: query string of the form Yields: language information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None lang = _first_element(list(idx_storage.content_language_get([sha1]))) if not lang: return None return converters.from_swh(lang, hashess={"id"}) def lookup_content_license(q): """Return license information from a specified content. Args: q: query string of the form Yields: license information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None lic = _first_element(idx_storage.content_fossology_license_get([sha1])) if not lic: return None return converters.from_swh({"id": sha1, "facts": lic[sha1]}, hashess={"id"}) def lookup_origin(origin: OriginInfo) -> OriginInfo: """Return information about the origin matching dict origin. Args: origin: origin's dict with 'url' key Returns: origin information as dict. """ origins = [origin] if origin["url"]: # handle case when user provided an origin url with a trailing # slash while the url in storage does not have it (e.g. GitHub) if origin["url"].endswith("/"): origins.append({"url": origin["url"][:-1]}) # handle case when user provided an origin url without a trailing # slash while the url in storage have it (e.g. Debian source package) else: origins.append({"url": f"{origin['url']}/"}) # Check all possible origin urls for orig in origins: origin_info = storage.origin_get(orig) if origin_info: break if not origin_info: msg = "Origin with url %s not found!" % origin["url"] raise NotFoundExc(msg) return converters.from_origin(origin_info) def lookup_origins( origin_from: int = 1, origin_count: int = 100 ) -> Iterator[OriginInfo]: """Get list of archived software origins in a paginated way. Origins are sorted by id before returning them Args: origin_from (int): The minimum id of the origins to return origin_count (int): The maximum number of origins to return Yields: origins information as dicts """ origins = storage.origin_get_range(origin_from, origin_count) return map(converters.from_origin, origins) def search_origin( url_pattern: str, limit: int = 50, with_visit: bool = False, page_token: Any = None ) -> Tuple[List[OriginInfo], Any]: """Search for origins whose urls contain a provided string pattern or match a provided regular expression. Args: url_pattern: the string pattern to search for in origin urls limit: the maximum number of found origins to return page_token: opaque string used to get the next results of a search Returns: list of origin information as dict. """ if search: results = search.origin_search( url_pattern=url_pattern, count=limit, page_token=page_token, with_visit=with_visit, ) origins = list(map(converters.from_origin, results["results"])) return (origins, results["next_page_token"]) else: # Fallback to swh-storage if swh-search is not configured offset = int(page_token) if page_token else 0 regexp = True search_words = [re.escape(word) for word in url_pattern.split()] if len(search_words) >= 7: url_pattern = ".*".join(search_words) else: pattern_parts = [] for permut in itertools.permutations(search_words): pattern_parts.append(".*".join(permut)) url_pattern = "|".join(pattern_parts) origins_raw = storage.origin_search( url_pattern, offset, limit, regexp, with_visit ) origins = list(map(converters.from_origin, origins_raw)) if len(origins) >= limit: page_token = str(offset + len(origins)) else: page_token = None return (origins, page_token) def search_origin_metadata(fulltext, limit=50): """Search for origins whose metadata match a provided string pattern. Args: fulltext: the string pattern to search for in origin metadata offset: number of found origins to skip before returning results limit: the maximum number of found origins to return Returns: list of origin metadata as dict. """ matches = idx_storage.origin_intrinsic_metadata_search_fulltext( conjunction=[fulltext], limit=limit ) results = [] for match in matches: match["from_revision"] = hashutil.hash_to_hex(match["from_revision"]) origin = storage.origin_get({"url": match["id"]}) del match["id"] result = converters.from_origin(origin) if result: result["metadata"] = match results.append(result) return results def lookup_origin_intrinsic_metadata(origin_dict): """Return intrinsic metadata for origin whose origin matches given origin. Args: origin_dict: origin's dict with keys ('type' AND 'url') Returns: origin metadata. """ origin_info = storage.origin_get(origin_dict) if not origin_info: msg = "Origin with url %s not found!" % origin_dict["url"] raise NotFoundExc(msg) origins = [origin_info["url"]] match = _first_element(idx_storage.origin_intrinsic_metadata_get(origins)) result = {} if match: result = match["metadata"] return result def _to_sha1_bin(sha1_hex): _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_hex, ["sha1"], "Only sha1_git is supported." # HACK: sha1_git really ) return sha1_git_bin def _check_directory_exists(sha1_git, sha1_git_bin): if len(list(storage.directory_missing([sha1_git_bin]))): raise NotFoundExc("Directory with sha1_git %s not found" % sha1_git) def lookup_directory(sha1_git): """Return information about the directory with id sha1_git. Args: sha1_git as string Returns: directory information as dict. """ empty_dir_sha1 = "4b825dc642cb6eb9a060e54bf8d69288fbee4904" if sha1_git == empty_dir_sha1: return [] sha1_git_bin = _to_sha1_bin(sha1_git) _check_directory_exists(sha1_git, sha1_git_bin) directory_entries = storage.directory_ls(sha1_git_bin) return map(converters.from_directory_entry, directory_entries) def lookup_directory_with_path(sha1_git, path_string): """Return directory information for entry with path path_string w.r.t. root directory pointed by directory_sha1_git Args: - directory_sha1_git: sha1_git corresponding to the directory to which we append paths to (hopefully) find the entry - the relative path to the entry starting from the directory pointed by directory_sha1_git Raises: NotFoundExc if the directory entry is not found """ sha1_git_bin = _to_sha1_bin(sha1_git) _check_directory_exists(sha1_git, sha1_git_bin) paths = path_string.strip(os.path.sep).split(os.path.sep) queried_dir = storage.directory_entry_get_by_path( sha1_git_bin, list(map(lambda p: p.encode("utf-8"), paths)) ) if not queried_dir: raise NotFoundExc( ("Directory entry with path %s from %s not found") % (path_string, sha1_git) ) return converters.from_directory_entry(queried_dir) def lookup_release(release_sha1_git): """Return information about the release with sha1 release_sha1_git. Args: release_sha1_git: The release's sha1 as hexadecimal Returns: Release information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ sha1_git_bin = _to_sha1_bin(release_sha1_git) release = _first_element(storage.release_get([sha1_git_bin])) if not release: raise NotFoundExc("Release with sha1_git %s not found." % release_sha1_git) return converters.from_release(release) def lookup_release_multiple(sha1_git_list): """Return information about the revisions identified with their sha1_git identifiers. Args: sha1_git_list: A list of revision sha1_git identifiers Returns: Release information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ sha1_bin_list = (_to_sha1_bin(sha1_git) for sha1_git in sha1_git_list) releases = storage.release_get(sha1_bin_list) or [] return (converters.from_release(r) for r in releases) def lookup_revision(rev_sha1_git): """Return information about the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Revision information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. NotFoundExc if there is no revision with the provided sha1_git. """ sha1_git_bin = _to_sha1_bin(rev_sha1_git) revision = _first_element(storage.revision_get([sha1_git_bin])) if not revision: raise NotFoundExc("Revision with sha1_git %s not found." % rev_sha1_git) return converters.from_revision(revision) def lookup_revision_multiple(sha1_git_list): """Return information about the revisions identified with their sha1_git identifiers. Args: sha1_git_list: A list of revision sha1_git identifiers Returns: Iterator of revisions information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ sha1_bin_list = (_to_sha1_bin(sha1_git) for sha1_git in sha1_git_list) revisions = storage.revision_get(sha1_bin_list) or [] return (converters.from_revision(r) for r in revisions) def lookup_revision_message(rev_sha1_git): """Return the raw message of the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Decoded revision message as dict {'message': } Raises: ValueError if the identifier provided is not of sha1 nature. NotFoundExc if the revision is not found, or if it has no message """ sha1_git_bin = _to_sha1_bin(rev_sha1_git) revision = _first_element(storage.revision_get([sha1_git_bin])) if not revision: raise NotFoundExc("Revision with sha1_git %s not found." % rev_sha1_git) if "message" not in revision: raise NotFoundExc("No message for revision with sha1_git %s." % rev_sha1_git) res = {"message": revision["message"]} return res def _lookup_revision_id_by(origin, branch_name, timestamp): def _get_snapshot_branch(snapshot, branch_name): snapshot = lookup_snapshot( visit["snapshot"], branches_from=branch_name, branches_count=10 ) branch = None if branch_name in snapshot["branches"]: branch = snapshot["branches"][branch_name] return branch if isinstance(origin, int): origin = {"id": origin} elif isinstance(origin, str): origin = {"url": origin} else: raise TypeError('"origin" must be an int or a string.') visit = get_origin_visit(origin, visit_ts=timestamp) branch = _get_snapshot_branch(visit["snapshot"], branch_name) rev_id = None if branch and branch["target_type"] == "revision": rev_id = branch["target"] elif branch and branch["target_type"] == "alias": branch = _get_snapshot_branch(visit["snapshot"], branch["target"]) if branch and branch["target_type"] == "revision": rev_id = branch["target"] if not rev_id: raise NotFoundExc( "Revision for origin %s and branch %s not found." % (origin.get("url"), branch_name) ) return rev_id def lookup_revision_by(origin, branch_name="HEAD", timestamp=None): """Lookup revision by origin, snapshot branch name and visit timestamp. If branch_name is not provided, lookup using 'HEAD' as default. If timestamp is not provided, use the most recent. Args: origin (Union[int,str]): origin of the revision branch_name (str): snapshot branch name timestamp (str/int): origin visit time frame Returns: dict: The revision matching the criterions Raises: NotFoundExc if no revision corresponds to the criterion """ rev_id = _lookup_revision_id_by(origin, branch_name, timestamp) return lookup_revision(rev_id) def lookup_revision_log(rev_sha1_git, limit): """Lookup revision log by revision id. Args: rev_sha1_git (str): The revision's sha1 as hexadecimal limit (int): the maximum number of revisions returned Returns: list: Revision log as list of revision dicts Raises: ValueError: if the identifier provided is not of sha1 nature. swh.web.common.exc.NotFoundExc: if there is no revision with the provided sha1_git. """ lookup_revision(rev_sha1_git) sha1_git_bin = _to_sha1_bin(rev_sha1_git) revision_entries = storage.revision_log([sha1_git_bin], limit) return map(converters.from_revision, revision_entries) def lookup_revision_log_by(origin, branch_name, timestamp, limit): """Lookup revision by origin, snapshot branch name and visit timestamp. Args: origin (Union[int,str]): origin of the revision branch_name (str): snapshot branch timestamp (str/int): origin visit time frame limit (int): the maximum number of revisions returned Returns: list: Revision log as list of revision dicts Raises: swh.web.common.exc.NotFoundExc: if no revision corresponds to the criterion """ rev_id = _lookup_revision_id_by(origin, branch_name, timestamp) return lookup_revision_log(rev_id, limit) def lookup_revision_with_context_by( origin, branch_name, timestamp, sha1_git, limit=100 ): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. sha1_git_root being resolved through the lookup of a revision by origin, branch_name and ts. In other words, sha1_git is an ancestor of sha1_git_root. Args: - origin: origin of the revision. - branch_name: revision's branch. - timestamp: revision's time frame. - sha1_git: one of sha1_git_root's ancestors. - limit: limit the lookup to 100 revisions back. Returns: Pair of (root_revision, revision). Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root. """ rev_root_id = _lookup_revision_id_by(origin, branch_name, timestamp) rev_root_id_bin = hashutil.hash_to_bytes(rev_root_id) rev_root = _first_element(storage.revision_get([rev_root_id_bin])) return ( converters.from_revision(rev_root), lookup_revision_with_context(rev_root, sha1_git, limit), ) def lookup_revision_with_context(sha1_git_root, sha1_git, limit=100): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. In other words, sha1_git is an ancestor of sha1_git_root. Args: sha1_git_root: latest revision. The type is either a sha1 (as an hex string) or a non converted dict. sha1_git: one of sha1_git_root's ancestors limit: limit the lookup to 100 revisions back Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root Raises: BadInputExc in case of unknown algo_hash or bad hash NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root """ sha1_git_bin = _to_sha1_bin(sha1_git) revision = _first_element(storage.revision_get([sha1_git_bin])) if not revision: raise NotFoundExc("Revision %s not found" % sha1_git) if isinstance(sha1_git_root, str): sha1_git_root_bin = _to_sha1_bin(sha1_git_root) revision_root = _first_element(storage.revision_get([sha1_git_root_bin])) if not revision_root: raise NotFoundExc("Revision root %s not found" % sha1_git_root) else: sha1_git_root_bin = sha1_git_root["id"] revision_log = storage.revision_log([sha1_git_root_bin], limit) parents = {} children = defaultdict(list) for rev in revision_log: rev_id = rev["id"] parents[rev_id] = [] for parent_id in rev["parents"]: parents[rev_id].append(parent_id) children[parent_id].append(rev_id) if revision["id"] not in parents: raise NotFoundExc( "Revision %s is not an ancestor of %s" % (sha1_git, sha1_git_root) ) revision["children"] = children[revision["id"]] return converters.from_revision(revision) def lookup_directory_with_revision(sha1_git, dir_path=None, with_data=False): """Return information on directory pointed by revision with sha1_git. If dir_path is not provided, display top level directory. Otherwise, display the directory pointed by dir_path (if it exists). Args: sha1_git: revision's hash. dir_path: optional directory pointed to by that revision. with_data: boolean that indicates to retrieve the raw data if the path resolves to a content. Default to False (for the api) Returns: Information on the directory pointed to by that revision. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc either if the revision is not found or the path referenced does not exist. NotImplementedError in case of dir_path exists but do not reference a type 'dir' or 'file'. """ sha1_git_bin = _to_sha1_bin(sha1_git) revision = _first_element(storage.revision_get([sha1_git_bin])) if not revision: raise NotFoundExc("Revision %s not found" % sha1_git) dir_sha1_git_bin = revision["directory"] if dir_path: paths = dir_path.strip(os.path.sep).split(os.path.sep) entity = storage.directory_entry_get_by_path( dir_sha1_git_bin, list(map(lambda p: p.encode("utf-8"), paths)) ) if not entity: raise NotFoundExc( "Directory or File '%s' pointed to by revision %s not found" % (dir_path, sha1_git) ) else: entity = {"type": "dir", "target": dir_sha1_git_bin} if entity["type"] == "dir": directory_entries = storage.directory_ls(entity["target"]) or [] return { "type": "dir", "path": "." if not dir_path else dir_path, "revision": sha1_git, "content": list(map(converters.from_directory_entry, directory_entries)), } elif entity["type"] == "file": # content content = _first_element(storage.content_find({"sha1_git": entity["target"]})) if not content: raise NotFoundExc("Content not found for revision %s" % sha1_git) if with_data: c = _first_element(storage.content_get([content["sha1"]])) content["data"] = c["data"] return { "type": "file", "path": "." if not dir_path else dir_path, "revision": sha1_git, "content": converters.from_content(content), } elif entity["type"] == "rev": # revision revision = next(storage.revision_get([entity["target"]])) return { "type": "rev", "path": "." if not dir_path else dir_path, "revision": sha1_git, "content": converters.from_revision(revision), } else: raise NotImplementedError("Entity of type %s not implemented." % entity["type"]) def lookup_content(q): """Lookup the content designed by q. Args: q: The release's sha1 as hexadecimal Raises: NotFoundExc if the requested content is not found """ algo, hash = query.parse_hash(q) c = _first_element(storage.content_find({algo: hash})) if not c: raise NotFoundExc( "Content with %s checksum equals to %s not found!" % (algo, hashutil.hash_to_hex(hash)) ) return converters.from_content(c) def lookup_content_raw(q): """Lookup the content defined by q. Args: q: query string of the form Returns: dict with 'sha1' and 'data' keys. data representing its raw data decoded. Raises: NotFoundExc if the requested content is not found or if the content bytes are not available in the storage """ c = lookup_content(q) content_sha1_bytes = hashutil.hash_to_bytes(c["checksums"]["sha1"]) content = _first_element(storage.content_get([content_sha1_bytes])) if not content: algo, hash = query.parse_hash(q) raise NotFoundExc( "Bytes of content with %s checksum equals to %s " "are not available!" % (algo, hashutil.hash_to_hex(hash)) ) return converters.from_content(content) def stat_counters(): """Return the stat counters for Software Heritage Returns: A dict mapping textual labels to integer values. """ return storage.stat_counters() def _lookup_origin_visits( origin_url: str, last_visit: Optional[int] = None, limit: int = 10 ) -> Iterator[Dict[str, Any]]: """Yields the origin origins' visits. Args: origin_url (str): origin to list visits for last_visit (int): last visit to lookup from limit (int): Number of elements max to display Yields: Dictionaries of origin_visit for that origin """ limit = min(limit, MAX_LIMIT) for visit in storage.origin_visit_get( origin_url, last_visit=last_visit, limit=limit ): visit["origin"] = origin_url yield visit def lookup_origin_visits( origin: str, last_visit: Optional[int] = None, per_page: int = 10 ) -> Iterator[OriginVisitInfo]: """Yields the origin origins' visits. Args: origin: origin to list visits for Yields: Dictionaries of origin_visit for that origin """ visits = _lookup_origin_visits(origin, last_visit=last_visit, limit=per_page) for visit in visits: visit_status = storage.origin_visit_status_get_latest(origin, visit["visit"]) yield converters.from_origin_visit({**visit, **visit_status.to_dict()}) def lookup_origin_visit_latest( origin_url: str, require_snapshot: bool = False, type: Optional[str] = None, allowed_statuses: Optional[Iterable[str]] = None, ) -> Optional[OriginVisitInfo]: """Return the origin's latest visit Args: origin_url: origin to list visits for type: Optional visit type to filter on (e.g git, tar, dsc, svn, hg, npm, pypi, ...) allowed_statuses: list of visit statuses considered to find the latest visit. For instance, ``allowed_statuses=['full']`` will only consider visits that have successfully run to completion. require_snapshot: filter out origins without a snapshot Returns: The origin visit info as dict if found """ visit_and_status = origin_get_latest_visit_status( storage, origin_url, type=type, allowed_statuses=allowed_statuses, require_snapshot=require_snapshot, ) return ( converters.from_origin_visit( {**visit_and_status[0].to_dict(), **visit_and_status[1].to_dict()} ) if visit_and_status else None ) def lookup_origin_visit(origin_url: str, visit_id: int) -> OriginVisitInfo: """Return information about visit visit_id with origin origin. Args: origin (str): origin concerned by the visit visit_id: the visit identifier to lookup Yields: The dict origin_visit concerned """ visit = storage.origin_visit_get_by(origin_url, visit_id) visit_status = storage.origin_visit_status_get_latest(origin_url, visit_id) if not visit: raise NotFoundExc( "Origin %s or its visit " "with id %s not found!" % (origin_url, visit_id) ) visit["origin"] = origin_url return converters.from_origin_visit({**visit, **visit_status.to_dict()}) def lookup_snapshot_sizes(snapshot_id): """Count the number of branches in the snapshot with the given id Args: snapshot_id (str): sha1 identifier of the snapshot Returns: dict: A dict whose keys are the target types of branches and values their corresponding amount """ snapshot_id_bin = _to_sha1_bin(snapshot_id) snapshot_sizes = storage.snapshot_count_branches(snapshot_id_bin) if "revision" not in snapshot_sizes: snapshot_sizes["revision"] = 0 if "release" not in snapshot_sizes: snapshot_sizes["release"] = 0 # adjust revision / release count for display if aliases are defined if "alias" in snapshot_sizes: aliases = lookup_snapshot( snapshot_id, branches_count=snapshot_sizes["alias"], target_types=["alias"] ) for alias in aliases["branches"].values(): try: for target_type in ("revision", "release"): snapshot = lookup_snapshot( snapshot_id, branches_from=alias["target"], branches_count=1, target_types=[target_type], ) if snapshot and alias["target"] in snapshot["branches"]: snapshot_sizes[target_type] += 1 except NotFoundExc: # aliased revision or release is missing in the snapshot pass del snapshot_sizes["alias"] # remove possible None key returned by snapshot_count_branches # when null branches are present in the snapshot snapshot_sizes.pop(None, None) return snapshot_sizes def lookup_snapshot( snapshot_id, branches_from="", branches_count=1000, target_types=None ): """Return information about a snapshot, aka the list of named branches found during a specific visit of an origin. Args: snapshot_id (str): sha1 identifier of the snapshot branches_from (str): optional parameter used to skip branches whose name is lesser than it before returning them branches_count (int): optional parameter used to restrain the amount of returned branches target_types (list): optional parameter used to filter the target types of branch to return (possible values that can be contained in that list are `'content', 'directory', 'revision', 'release', 'snapshot', 'alias'`) Returns: A dict filled with the snapshot content. """ snapshot_id_bin = _to_sha1_bin(snapshot_id) snapshot = storage.snapshot_get_branches( snapshot_id_bin, branches_from.encode(), branches_count, target_types ) if not snapshot: raise NotFoundExc("Snapshot with id %s not found!" % snapshot_id) return converters.from_snapshot(snapshot) def lookup_latest_origin_snapshot( origin: str, allowed_statuses: Iterable[str] = None ) -> Optional[Dict[str, Any]]: """Return information about the latest snapshot of an origin. .. warning:: At most 1000 branches contained in the snapshot will be returned for performance reasons. Args: origin: URL or integer identifier of the origin allowed_statuses: list of visit statuses considered to find the latest snapshot for the visit. For instance, ``allowed_statuses=['full']`` will only consider visits that have successfully run to completion. Returns: A dict filled with the snapshot content. """ snp = snapshot_get_latest( storage, origin, allowed_statuses=allowed_statuses, branches_count=1000 ) return converters.from_snapshot(snp.to_dict()) if snp is not None else None def lookup_snapshot_branch_name_from_tip_revision( snapshot_id: str, revision_id: str ) -> Optional[str]: """Check if a revision corresponds to the tip of a snapshot branch Args: snapshot_id: hexadecimal representation of a snapshot id revision_id: hexadecimal representation of a revision id Returns: The name of the first found branch or None otherwise """ per_page = 10000 branches_from = "" snapshot: Dict[str, Any] = {"branches": {}} branches = [] while not branches_from or len(snapshot["branches"]) == per_page + 1: snapshot = lookup_snapshot( snapshot_id, target_types=[REVISION], branches_from=branches_from, branches_count=per_page + 1, ) branches += [ {"name": k, "target": v["target"]} for k, v in snapshot["branches"].items() ] branches_from = branches[-1]["name"] for branch in branches: if branch["target"] == revision_id: return branch["name"] return None def lookup_revision_through(revision, limit=100): """Retrieve a revision from the criterion stored in revision dictionary. Args: revision: Dictionary of criterion to lookup the revision with. Here are the supported combination of possible values: - origin_url, branch_name, ts, sha1_git - origin_url, branch_name, ts - sha1_git_root, sha1_git - sha1_git Returns: None if the revision is not found or the actual revision. """ if ( "origin_url" in revision and "branch_name" in revision and "ts" in revision and "sha1_git" in revision ): return lookup_revision_with_context_by( revision["origin_url"], revision["branch_name"], revision["ts"], revision["sha1_git"], limit, ) if "origin_url" in revision and "branch_name" in revision and "ts" in revision: return lookup_revision_by( revision["origin_url"], revision["branch_name"], revision["ts"] ) if "sha1_git_root" in revision and "sha1_git" in revision: return lookup_revision_with_context( revision["sha1_git_root"], revision["sha1_git"], limit ) if "sha1_git" in revision: return lookup_revision(revision["sha1_git"]) # this should not happen raise NotImplementedError("Should not happen!") def lookup_directory_through_revision(revision, path=None, limit=100, with_data=False): """Retrieve the directory information from the revision. Args: revision: dictionary of criterion representing a revision to lookup path: directory's path to lookup. limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of. with_data: indicate to retrieve the content's raw data if path resolves to a content. Returns: The directory pointing to by the revision criterions at path. """ rev = lookup_revision_through(revision, limit) if not rev: raise NotFoundExc("Revision with criterion %s not found!" % revision) return (rev["id"], lookup_directory_with_revision(rev["id"], path, with_data)) def _vault_request(vault_fn, *args, **kwargs): try: return vault_fn(*args, **kwargs) except VaultNotFoundExc: return None def vault_cook(obj_type, obj_id, email=None): """Cook a vault bundle. """ return _vault_request(vault.cook, obj_type, obj_id, email=email) def vault_fetch(obj_type, obj_id): """Fetch a vault bundle. """ return _vault_request(vault.fetch, obj_type, obj_id) def vault_progress(obj_type, obj_id): """Get the current progress of a vault bundle. """ return _vault_request(vault.progress, obj_type, obj_id) def diff_revision(rev_id): """Get the list of file changes (insertion / deletion / modification / renaming) for a particular revision. """ rev_sha1_git_bin = _to_sha1_bin(rev_id) changes = diff.diff_revision(storage, rev_sha1_git_bin, track_renaming=True) for change in changes: change["from"] = converters.from_directory_entry(change["from"]) change["to"] = converters.from_directory_entry(change["to"]) if change["from_path"]: change["from_path"] = change["from_path"].decode("utf-8") if change["to_path"]: change["to_path"] = change["to_path"].decode("utf-8") return changes class _RevisionsWalkerProxy(object): """ Proxy class wrapping a revisions walker iterator from swh-storage and performing needed conversions. """ def __init__(self, rev_walker_type, rev_start, *args, **kwargs): rev_start_bin = hashutil.hash_to_bytes(rev_start) self.revisions_walker = revisions_walker.get_revisions_walker( rev_walker_type, storage, rev_start_bin, *args, **kwargs ) def export_state(self): return self.revisions_walker.export_state() def __next__(self): return converters.from_revision(next(self.revisions_walker)) def __iter__(self): return self def get_revisions_walker(rev_walker_type, rev_start, *args, **kwargs): """ Utility function to instantiate a revisions walker of a given type, see :mod:`swh.storage.algos.revisions_walker`. Args: rev_walker_type (str): the type of revisions walker to return, possible values are: ``committer_date``, ``dfs``, ``dfs_post``, ``bfs`` and ``path`` rev_start (str): hexadecimal representation of a revision identifier args (list): position arguments to pass to the revisions walker constructor kwargs (dict): keyword arguments to pass to the revisions walker constructor """ # first check if the provided revision is valid lookup_revision(rev_start) return _RevisionsWalkerProxy(rev_walker_type, rev_start, *args, **kwargs) def lookup_object(object_type: str, object_id: str) -> Dict[str, Any]: """ Utility function for looking up an object in the archive by its type and id. Args: object_type (str): the type of object to lookup, either *content*, *directory*, *release*, *revision* or *snapshot* object_id (str): the *sha1_git* checksum identifier in hexadecimal form of the object to lookup Returns: Dict[str, Any]: A dictionary describing the object or a list of dictionary for the directory object type. Raises: swh.web.common.exc.NotFoundExc: if the object could not be found in the archive BadInputExc: if the object identifier is invalid """ if object_type == CONTENT: return lookup_content(f"sha1_git:{object_id}") elif object_type == DIRECTORY: return {"id": object_id, "content": list(lookup_directory(object_id))} elif object_type == RELEASE: return lookup_release(object_id) elif object_type == REVISION: return lookup_revision(object_id) elif object_type == SNAPSHOT: return lookup_snapshot(object_id) raise BadInputExc( ( "Invalid swh object type! Valid types are " f"{CONTENT}, {DIRECTORY}, {RELEASE} " f"{REVISION} or {SNAPSHOT}." ) ) -def lookup_missing_hashes(grouped_pids: Dict[str, List[bytes]]) -> Set[str]: +def lookup_missing_hashes(grouped_swhids: Dict[str, List[bytes]]) -> Set[str]: """Lookup missing Software Heritage persistent identifier hash, using batch processing. Args: A dictionary with: - keys: persistent identifier type - values: list(bytes) persistent identifier hash + keys: object types + values: object hashes Returns: A set(hexadecimal) of the hashes not found in the storage """ missing_hashes = [] - for obj_type, obj_ids in grouped_pids.items(): + for obj_type, obj_ids in grouped_swhids.items(): if obj_type == CONTENT: missing_hashes.append(storage.content_missing_per_sha1_git(obj_ids)) elif obj_type == DIRECTORY: missing_hashes.append(storage.directory_missing(obj_ids)) elif obj_type == REVISION: missing_hashes.append(storage.revision_missing(obj_ids)) elif obj_type == RELEASE: missing_hashes.append(storage.directory_missing(obj_ids)) elif obj_type == SNAPSHOT: missing_hashes.append(storage.directory_missing(obj_ids)) missing = set( map(lambda x: hashutil.hash_to_hex(x), itertools.chain(*missing_hashes)) ) return missing diff --git a/swh/web/misc/badges.py b/swh/web/misc/badges.py index 9e7ba859..17809b5d 100644 --- a/swh/web/misc/badges.py +++ b/swh/web/misc/badges.py @@ -1,169 +1,167 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from base64 import b64encode from typing import cast, Optional from django.conf.urls import url from django.contrib.staticfiles import finders from django.http import HttpResponse, HttpRequest from pybadges import badge from swh.model.exceptions import ValidationError from swh.model.identifiers import ( - persistent_identifier, - parse_persistent_identifier, + swhid, + parse_swhid, CONTENT, DIRECTORY, ORIGIN, RELEASE, REVISION, SNAPSHOT, ) from swh.web.common import service from swh.web.common.exc import BadInputExc, NotFoundExc -from swh.web.common.identifiers import resolve_swh_persistent_id +from swh.web.common.identifiers import resolve_swhid from swh.web.common.utils import reverse _orange = "#f36a24" _blue = "#0172b2" _red = "#cd5741" _swh_logo_data = None _badge_config = { CONTENT: {"color": _blue, "title": "Archived source file",}, DIRECTORY: {"color": _blue, "title": "Archived source tree",}, ORIGIN: {"color": _orange, "title": "Archived software repository",}, RELEASE: {"color": _blue, "title": "Archived software release",}, REVISION: {"color": _blue, "title": "Archived commit",}, SNAPSHOT: {"color": _blue, "title": "Archived software repository snapshot",}, "error": {"color": _red, "title": "An error occurred when generating the badge"}, } def _get_logo_data() -> str: """ Get data-URI for Software Heritage SVG logo to embed it in the generated badges. """ global _swh_logo_data if _swh_logo_data is None: swh_logo_path = cast(str, finders.find("img/swh-logo-white.svg")) with open(swh_logo_path, "rb") as swh_logo_file: _swh_logo_data = "data:image/svg+xml;base64,%s" % b64encode( swh_logo_file.read() ).decode("ascii") return _swh_logo_data def _swh_badge( request: HttpRequest, object_type: str, object_id: str, - object_pid: Optional[str] = "", + object_swhid: Optional[str] = "", ) -> HttpResponse: """ Generate a Software Heritage badge for a given object type and id. Args: request: input http request object_type: The type of swh object to generate a badge for, either *content*, *directory*, *revision*, *release*, *origin* or *snapshot* object_id: The id of the swh object, either an url for origin type or a *sha1* for other object types - object_pid: If provided, the object persistent - identifier will not be recomputed + object_swhid: If provided, the object SWHID will not be recomputed Returns: HTTP response with content type *image/svg+xml* containing the SVG badge data. If the provided parameters are invalid, HTTP 400 status code will be returned. If the object can not be found in the archive, HTTP 404 status code will be returned. """ left_text = "error" whole_link = None try: if object_type == ORIGIN: service.lookup_origin({"url": object_id}) right_text = "repository" whole_link = reverse( "browse-origin", query_params={"origin_url": object_id} ) else: - # when pid is provided, object type and id will be parsed + # when SWHID is provided, object type and id will be parsed # from it - if object_pid: - parsed_pid = parse_persistent_identifier(object_pid) - object_type = parsed_pid.object_type - object_id = parsed_pid.object_id + if object_swhid: + parsed_swhid = parse_swhid(object_swhid) + object_type = parsed_swhid.object_type + object_id = parsed_swhid.object_id swh_object = service.lookup_object(object_type, object_id) - if object_pid: - right_text = object_pid + if object_swhid: + right_text = object_swhid else: - right_text = persistent_identifier(object_type, object_id) + right_text = swhid(object_type, object_id) - whole_link = resolve_swh_persistent_id(right_text)["browse_url"] - # remove pid metadata if any for badge text - if object_pid: + whole_link = resolve_swhid(right_text)["browse_url"] + # remove SWHID metadata if any for badge text + if object_swhid: right_text = right_text.split(";")[0] # use release name for badge text if object_type == RELEASE: right_text = "release %s" % swh_object["name"] left_text = "archived" except (BadInputExc, ValidationError): right_text = f'invalid {object_type if object_type else "object"} id' object_type = "error" except NotFoundExc: right_text = f'{object_type if object_type else "object"} not found' object_type = "error" badge_data = badge( left_text=left_text, right_text=right_text, right_color=_badge_config[object_type]["color"], whole_link=request.build_absolute_uri(whole_link), whole_title=_badge_config[object_type]["title"], logo=_get_logo_data(), embed_logo=True, ) return HttpResponse(badge_data, content_type="image/svg+xml") -def _swh_badge_pid(request: HttpRequest, object_pid: str) -> HttpResponse: +def _swh_badge_swhid(request: HttpRequest, object_swhid: str) -> HttpResponse: """ - Generate a Software Heritage badge for a given object persistent - identifier. + Generate a Software Heritage badge for a given object SWHID. Args: request (django.http.HttpRequest): input http request - object_pid (str): A swh object persistent identifier + object_swhid (str): a SWHID of an archived object Returns: django.http.HttpResponse: An http response with content type *image/svg+xml* containing the SVG badge data. If any error occurs, a status code of 400 will be returned. """ - return _swh_badge(request, "", "", object_pid) + return _swh_badge(request, "", "", object_swhid) urlpatterns = [ url( r"^badge/(?P[a-z]+)/(?P.+)/$", _swh_badge, name="swh-badge", ), url( - r"^badge/(?Pswh:[0-9]+:[a-z]+:[0-9a-f]+.*)/$", - _swh_badge_pid, - name="swh-badge-pid", + r"^badge/(?Pswh:[0-9]+:[a-z]+:[0-9a-f]+.*)/$", + _swh_badge_swhid, + name="swh-badge-swhid", ), ] diff --git a/swh/web/templates/includes/origin-search-form.html b/swh/web/templates/includes/origin-search-form.html index 12a9dcb1..7aa9b6d9 100644 --- a/swh/web/templates/includes/origin-search-form.html +++ b/swh/web/templates/includes/origin-search-form.html @@ -1,40 +1,40 @@ {% comment %} Copyright (C) 2020 The Software Heritage developers See the AUTHORS file at the top-level directory of this distribution License: GNU Affero General Public License version 3, or any later version See top-level LICENSE file for more information {% endcomment %}
- +
-
+
-
+
-
+
diff --git a/swh/web/templates/includes/show-swh-ids.html b/swh/web/templates/includes/show-swhids.html similarity index 71% rename from swh/web/templates/includes/show-swh-ids.html rename to swh/web/templates/includes/show-swhids.html index 888d1b41..45188505 100644 --- a/swh/web/templates/includes/show-swh-ids.html +++ b/swh/web/templates/includes/show-swhids.html @@ -1,98 +1,101 @@ {% comment %} Copyright (C) 2017-2020 The Software Heritage developers See the AUTHORS file at the top-level directory of this distribution License: GNU Affero General Public License version 3, or any later version See top-level LICENSE file for more information {% endcomment %} {% load swh_templatetags %} {% if swhids_info %}