diff --git a/cypress/e2e/origin-search.cy.js b/cypress/e2e/origin-search.cy.js
index d9934c96..7214487e 100644
--- a/cypress/e2e/origin-search.cy.js
+++ b/cypress/e2e/origin-search.cy.js
@@ -1,653 +1,677 @@
/**
- * Copyright (C) 2019-2021 The Software Heritage developers
+ * Copyright (C) 2019-2022 The Software Heritage developers
* See the AUTHORS file at the top-level directory of this distribution
* License: GNU Affero General Public License version 3, or any later version
* See top-level LICENSE file for more information
*/
const nonExistentText = 'NoMatchExists';
let origin;
let url;
function doSearch(searchText, searchInputElt = '#swh-origins-url-patterns') {
if (searchText.startsWith('swh:')) {
cy.intercept('**/api/1/resolve/**')
.as('swhidResolve');
}
cy.get(searchInputElt)
// to avoid sending too much SWHID validation requests
// as cypress insert character one by one when using type
.invoke('val', searchText.slice(0, -1))
.type(searchText.slice(-1))
.get('.swh-search-icon')
.click({force: true});
if (searchText.startsWith('swh:')) {
cy.wait('@swhidResolve');
}
}
function searchShouldRedirect(searchText, redirectUrl) {
doSearch(searchText);
cy.location('pathname')
.should('equal', redirectUrl);
}
function searchShouldShowNotFound(searchText, msg) {
doSearch(searchText);
if (searchText.startsWith('swh:')) {
cy.get('.invalid-feedback')
.should('be.visible')
.and('contain', msg);
}
}
function stubOriginVisitLatestRequests(status = 200, response = {type: 'tar'}, aliasSuffix = '') {
cy.intercept({url: '**/visit/latest/**'}, {
body: response,
statusCode: status
}).as(`originVisitLatest${aliasSuffix}`);
}
describe('Test origin-search', function() {
before(function() {
origin = this.origin[0];
url = this.Urls.browse_search();
});
beforeEach(function() {
cy.visit(url);
});
it('should have focus on search form after page load', function() {
cy.get('#swh-origins-url-patterns')
.should('have.attr', 'autofocus');
// for some reason, autofocus is not honored when running cypress tests
// while it is in non controlled browsers
// .should('have.focus');
});
it('should redirect to browse when archived URL is searched', function() {
cy.get('#swh-origins-url-patterns')
.type(origin.url);
cy.get('.swh-search-icon')
.click();
cy.location('pathname')
.should('eq', this.Urls.browse_origin_directory());
cy.location('search')
.should('eq', `?origin_url=${origin.url}`);
});
it('should not redirect for non valid URL', function() {
cy.get('#swh-origins-url-patterns')
.type('www.example'); // Invalid URL
cy.get('.swh-search-icon')
.click();
cy.location('pathname')
.should('eq', this.Urls.browse_search()); // Stay in the current page
});
it('should not redirect for valid non archived URL', function() {
cy.get('#swh-origins-url-patterns')
.type('http://eaxmple.com/test/'); // Valid URL, but not archived
cy.get('.swh-search-icon')
.click();
cy.location('pathname')
.should('eq', this.Urls.browse_search()); // Stay in the current page
});
it('should remove origin URL with no archived content', function() {
stubOriginVisitLatestRequests(404);
// Using a non full origin URL here
// This is because T3354 redirects to the origin in case of a valid, archived URL
cy.get('#swh-origins-url-patterns')
.type(origin.url.slice(0, -1));
cy.get('.swh-search-icon')
.click();
cy.wait('@originVisitLatest');
cy.get('#origin-search-results')
.should('be.visible')
.find('tbody tr').should('have.length', 0);
stubOriginVisitLatestRequests(200, {}, '2');
cy.get('.swh-search-icon')
.click();
cy.wait('@originVisitLatest2');
cy.get('#origin-search-results')
.should('be.visible')
.find('tbody tr').should('have.length', 0);
});
it('should filter origins by visit type', function() {
cy.intercept('**/visit/latest/**').as('checkOriginVisits');
cy.get('#swh-origins-url-patterns')
.type('http');
for (const visitType of ['git', 'tar']) {
cy.get('#swh-search-visit-type')
.select(visitType);
cy.get('.swh-search-icon')
.click();
cy.wait('@checkOriginVisits');
cy.get('#origin-search-results')
.should('be.visible');
cy.get('tbody tr td.swh-origin-visit-type').then(elts => {
for (const elt of elts) {
cy.get(elt).should('have.text', visitType);
}
});
}
});
it('should show not found message when no repo matches', function() {
searchShouldShowNotFound(nonExistentText,
'No origins matching the search criteria were found.');
});
it('should add appropriate URL parameters', function() {
// Check all three checkboxes and check if
// correct url params are added
cy.get('#swh-search-origins-with-visit')
.check({force: true})
.get('#swh-filter-empty-visits')
.check({force: true})
.get('#swh-search-origin-metadata')
.check({force: true})
.then(() => {
const searchText = origin.url.slice(0, -1);
doSearch(searchText);
cy.location('search').then(locationSearch => {
const urlParams = new URLSearchParams(locationSearch);
const query = urlParams.get('q');
const withVisit = urlParams.has('with_visit');
const withContent = urlParams.has('with_content');
const searchMetadata = urlParams.has('search_metadata');
assert.strictEqual(query, searchText);
assert.strictEqual(withVisit, true);
assert.strictEqual(withContent, true);
assert.strictEqual(searchMetadata, true);
});
});
});
it('should search in origin intrinsic metadata', function() {
cy.intercept('GET', '**/origin/metadata-search/**').as(
'originMetadataSearch'
);
cy.get('#swh-search-origins-with-visit')
.check({force: true})
.get('#swh-filter-empty-visits')
.check({force: true})
.get('#swh-search-origin-metadata')
.check({force: true})
.then(() => {
const searchText = 'plugin';
doSearch(searchText);
cy.wait('@originMetadataSearch').then((req) => {
expect(req.response.body[0].metadata.metadata.description).to.equal(
'Line numbering plugin for Highlight.js'
// metadata is defined in _TEST_ORIGINS variable in swh/web/tests/data.py
);
});
});
});
it('should not send request to the resolve endpoint', function() {
cy.intercept(`${this.Urls.api_1_resolve_swhid('').slice(0, -1)}**`)
.as('resolveSWHID');
cy.intercept(`${this.Urls.api_1_origin_search(origin.url.slice(0, -1))}**`)
.as('searchOrigin');
cy.get('#swh-origins-url-patterns')
.type(origin.url.slice(0, -1));
cy.get('.swh-search-icon')
.click();
cy.wait('@searchOrigin');
cy.xhrShouldBeCalled('resolveSWHID', 0);
cy.xhrShouldBeCalled('searchOrigin', 1);
});
it('should add query language support for staff users', function() {
cy.get('#swh-search-use-ql')
.should('not.exist');
cy.adminLogin();
cy.visit(url);
cy.get('#swh-search-use-ql')
.should('exist');
});
it('should show error messages when using the query language', function() {
cy.adminLogin();
cy.visit(url);
cy.intercept('GET', `${this.Urls.api_1_origin_search('**')}**`,
{
body: {
'exception': 'BadInputExc',
'reason': 'Syntax error in search query: Invalid query'
},
statusCode: 400
})
.as('searchOrigin');
cy.get('#swh-search-use-ql')
.should('exist')
.click({force: true}); // Covered by label
cy.get('#swh-origins-url-patterns')
.type('this is not a valid query')
.type('{enter}');
cy.wait('@searchOrigin').then((xhr) => {
cy.get('#swh-no-result')
.should('contain', 'Syntax error in search query');
});
});
function checkSearchHasResults() {
cy.get('.swh-search-icon')
.click();
cy.wait('@checkOriginVisits');
cy.get('#origin-search-results')
.should('be.visible');
cy.get('tbody tr td.swh-origin-visit-type')
.should('exist');
}
it('should search all origins when no pattern is provided', function() {
cy.intercept('**/visit/latest/**').as('checkOriginVisits');
// with default filters
checkSearchHasResults();
// remove filters
cy.get('#swh-search-origins-with-visit')
.uncheck({force: true})
.get('#swh-filter-empty-visits')
.uncheck({force: true});
checkSearchHasResults();
});
it('should search all origins for a visit type', function() {
cy.intercept('**/visit/latest/**').as('checkOriginVisits');
for (const visitType of ['git', 'tar']) {
cy.get('#swh-search-visit-type')
.select(visitType);
checkSearchHasResults();
cy.get('tbody tr td.swh-origin-visit-type').then(elts => {
for (const elt of elts) {
cy.get(elt).should('have.text', visitType);
}
});
}
});
+ it('should encode origin argument in latest visit URL queried by XHR', function() {
+ // origin added in tests data by Python
+ const originUrl = 'https://example.org/project/download.php?version=2.0';
+ cy.intercept(`**/api/1/origin/${encodeURIComponent(originUrl)}/visit/latest/**`)
+ .as('checkOriginVisit');
+
+ doSearch(originUrl);
+
+ cy.wait('@checkOriginVisit');
+
+ cy.get('.swh-search-result-entry')
+ .should('have.length', 1);
+
+ cy.get('.swh-search-result-entry#origin-0 .swh-origin-visit-type')
+ .should('have.text', 'tar');
+
+ cy.get('.swh-search-result-entry#origin-0 td a')
+ .should('have.text', originUrl);
+
+ cy.get('.swh-search-result-entry#origin-0 .swh-visit-status')
+ .should('have.text', 'Archived');
+
+ });
+
context('Test pagination', function() {
it('should not paginate if there are not many results', function() {
// Setup search
cy.get('#swh-search-origins-with-visit')
.uncheck({force: true})
.get('#swh-filter-empty-visits')
.uncheck({force: true})
.then(() => {
const searchText = 'libtess';
// Get first page of results
doSearch(searchText);
cy.get('.swh-search-result-entry')
.should('have.length', 1);
cy.get('.swh-search-result-entry#origin-0 td a')
.should('have.text', 'https://github.com/memononen/libtess2');
cy.get('#origins-prev-results-button')
.should('have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('have.class', 'disabled');
});
});
it('should paginate forward when there are many results', function() {
stubOriginVisitLatestRequests();
// Setup search
cy.get('#swh-search-origins-with-visit')
.uncheck({force: true})
.get('#swh-filter-empty-visits')
.uncheck({force: true})
.then(() => {
const searchText = 'many.origins';
// Get first page of results
doSearch(searchText);
cy.wait('@originVisitLatest');
cy.get('.swh-search-result-entry')
.should('have.length', 100);
cy.get('.swh-search-result-entry#origin-0 td a')
.should('have.text', 'https://many.origins/1');
cy.get('.swh-search-result-entry#origin-99 td a')
.should('have.text', 'https://many.origins/100');
cy.get('#origins-prev-results-button')
.should('have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('not.have.class', 'disabled');
// Get second page of results
cy.get('#origins-next-results-button a')
.click();
cy.wait('@originVisitLatest');
cy.get('.swh-search-result-entry')
.should('have.length', 100);
cy.get('.swh-search-result-entry#origin-0 td a')
.should('have.text', 'https://many.origins/101');
cy.get('.swh-search-result-entry#origin-99 td a')
.should('have.text', 'https://many.origins/200');
cy.get('#origins-prev-results-button')
.should('not.have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('not.have.class', 'disabled');
// Get third (and last) page of results
cy.get('#origins-next-results-button a')
.click();
cy.wait('@originVisitLatest');
cy.get('.swh-search-result-entry')
.should('have.length', 50);
cy.get('.swh-search-result-entry#origin-0 td a')
.should('have.text', 'https://many.origins/201');
cy.get('.swh-search-result-entry#origin-49 td a')
.should('have.text', 'https://many.origins/250');
cy.get('#origins-prev-results-button')
.should('not.have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('have.class', 'disabled');
});
});
it('should paginate backward from a middle page', function() {
stubOriginVisitLatestRequests();
// Setup search
cy.get('#swh-search-origins-with-visit')
.uncheck({force: true})
.get('#swh-filter-empty-visits')
.uncheck({force: true})
.then(() => {
const searchText = 'many.origins';
// Get first page of results
doSearch(searchText);
cy.wait('@originVisitLatest');
cy.get('#origins-prev-results-button')
.should('have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('not.have.class', 'disabled');
// Get second page of results
cy.get('#origins-next-results-button a')
.click();
cy.wait('@originVisitLatest');
cy.get('#origins-prev-results-button')
.should('not.have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('not.have.class', 'disabled');
// Get first page of results again
cy.get('#origins-prev-results-button a')
.click();
cy.wait('@originVisitLatest');
cy.get('.swh-search-result-entry')
.should('have.length', 100);
cy.get('.swh-search-result-entry#origin-0 td a')
.should('have.text', 'https://many.origins/1');
cy.get('.swh-search-result-entry#origin-99 td a')
.should('have.text', 'https://many.origins/100');
cy.get('#origins-prev-results-button')
.should('have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('not.have.class', 'disabled');
});
});
it('should paginate backward from the last page', function() {
stubOriginVisitLatestRequests();
// Setup search
cy.get('#swh-search-origins-with-visit')
.uncheck({force: true})
.get('#swh-filter-empty-visits')
.uncheck({force: true})
.then(() => {
const searchText = 'many.origins';
// Get first page of results
doSearch(searchText);
cy.wait('@originVisitLatest');
cy.get('#origins-prev-results-button')
.should('have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('not.have.class', 'disabled');
// Get second page of results
cy.get('#origins-next-results-button a')
.click();
cy.wait('@originVisitLatest');
cy.get('#origins-prev-results-button')
.should('not.have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('not.have.class', 'disabled');
// Get third (and last) page of results
cy.get('#origins-next-results-button a')
.click();
cy.get('#origins-prev-results-button')
.should('not.have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('have.class', 'disabled');
// Get second page of results again
cy.get('#origins-prev-results-button a')
.click();
cy.wait('@originVisitLatest');
cy.get('.swh-search-result-entry')
.should('have.length', 100);
cy.get('.swh-search-result-entry#origin-0 td a')
.should('have.text', 'https://many.origins/101');
cy.get('.swh-search-result-entry#origin-99 td a')
.should('have.text', 'https://many.origins/200');
cy.get('#origins-prev-results-button')
.should('not.have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('not.have.class', 'disabled');
// Get first page of results again
cy.get('#origins-prev-results-button a')
.click();
cy.wait('@originVisitLatest');
cy.get('.swh-search-result-entry')
.should('have.length', 100);
cy.get('.swh-search-result-entry#origin-0 td a')
.should('have.text', 'https://many.origins/1');
cy.get('.swh-search-result-entry#origin-99 td a')
.should('have.text', 'https://many.origins/100');
cy.get('#origins-prev-results-button')
.should('have.class', 'disabled');
cy.get('#origins-next-results-button')
.should('not.have.class', 'disabled');
});
});
});
context('Test valid SWHIDs', function() {
it('should resolve directory', function() {
const redirectUrl = this.Urls.browse_directory(origin.content[0].directory);
const swhid = `swh:1:dir:${origin.content[0].directory}`;
searchShouldRedirect(swhid, redirectUrl);
});
it('should resolve revision', function() {
const redirectUrl = this.Urls.browse_revision(origin.revisions[0]);
const swhid = `swh:1:rev:${origin.revisions[0]}`;
searchShouldRedirect(swhid, redirectUrl);
});
it('should resolve snapshot', function() {
const redirectUrl = this.Urls.browse_snapshot_directory(origin.snapshot);
const swhid = `swh:1:snp:${origin.snapshot}`;
searchShouldRedirect(swhid, redirectUrl);
});
it('should resolve content', function() {
const redirectUrl = this.Urls.browse_content(`sha1_git:${origin.content[0].sha1git}`);
const swhid = `swh:1:cnt:${origin.content[0].sha1git}`;
searchShouldRedirect(swhid, redirectUrl);
});
it('should not send request to the search endpoint', function() {
const swhid = `swh:1:rev:${origin.revisions[0]}`;
cy.intercept(this.Urls.api_1_resolve_swhid(swhid))
.as('resolveSWHID');
cy.intercept(`${this.Urls.api_1_origin_search('').slice(0, -1)}**`)
.as('searchOrigin');
cy.get('#swh-origins-url-patterns')
.type(swhid);
cy.get('.swh-search-icon')
.click();
cy.wait('@resolveSWHID');
cy.xhrShouldBeCalled('resolveSWHID', 1);
cy.xhrShouldBeCalled('searchOrigin', 0);
});
});
context('Test invalid SWHIDs', function() {
it('should show not found for directory', function() {
const swhid = `swh:1:dir:${this.unarchivedRepo.rootDirectory}`;
const msg = `Directory with sha1_git ${this.unarchivedRepo.rootDirectory} not found`;
searchShouldShowNotFound(swhid, msg);
});
it('should show not found for snapshot', function() {
const swhid = `swh:1:snp:${this.unarchivedRepo.snapshot}`;
const msg = `Snapshot with id ${this.unarchivedRepo.snapshot} not found!`;
searchShouldShowNotFound(swhid, msg);
});
it('should show not found for revision', function() {
const swhid = `swh:1:rev:${this.unarchivedRepo.revision}`;
const msg = `Revision with sha1_git ${this.unarchivedRepo.revision} not found.`;
searchShouldShowNotFound(swhid, msg);
});
it('should show not found for content', function() {
const swhid = `swh:1:cnt:${this.unarchivedRepo.content[0].sha1git}`;
const msg = `Content with sha1_git checksum equals to ${this.unarchivedRepo.content[0].sha1git} not found!`;
searchShouldShowNotFound(swhid, msg);
});
function checkInvalidSWHIDReport(url, searchInputElt, swhidInput, validationMessagePattern = '') {
cy.visit(url);
doSearch(swhidInput, searchInputElt);
cy.get(searchInputElt)
.then($el => $el[0].checkValidity()).should('be.false');
cy.get(searchInputElt)
.invoke('prop', 'validationMessage')
.should('not.equal', '')
.should('contain', validationMessagePattern);
}
it('should report invalid SWHID in search page input', function() {
const swhidInput =
`swh:1:cnt:${this.unarchivedRepo.content[0].sha1git};lines=45-60/`;
checkInvalidSWHIDReport(this.Urls.browse_search(), '#swh-origins-url-patterns', swhidInput);
cy.get('.invalid-feedback')
.should('be.visible');
});
it('should report invalid SWHID in top right search input', function() {
const swhidInput =
`swh:1:cnt:${this.unarchivedRepo.content[0].sha1git};lines=45-60/`;
checkInvalidSWHIDReport(this.Urls.browse_help(), '#swh-origins-search-top-input', swhidInput);
});
it('should report SWHID with uppercase chars in search page input', function() {
const swhidInput =
`swh:1:cnt:${this.unarchivedRepo.content[0].sha1git}`.toUpperCase();
checkInvalidSWHIDReport(this.Urls.browse_search(), '#swh-origins-url-patterns', swhidInput, swhidInput.toLowerCase());
cy.get('.invalid-feedback')
.should('be.visible');
});
it('should report SWHID with uppercase chars in top right search input', function() {
let swhidInput =
`swh:1:cnt:${this.unarchivedRepo.content[0].sha1git}`.toUpperCase();
swhidInput += ';lines=45-60/';
checkInvalidSWHIDReport(this.Urls.browse_help(), '#swh-origins-search-top-input', swhidInput.toLowerCase());
});
});
});
diff --git a/swh/web/browse/assets/browse/origin-search.js b/swh/web/browse/assets/browse/origin-search.js
index 7b547457..03669682 100644
--- a/swh/web/browse/assets/browse/origin-search.js
+++ b/swh/web/browse/assets/browse/origin-search.js
@@ -1,271 +1,271 @@
/**
- * Copyright (C) 2018-2021 The Software Heritage developers
+ * Copyright (C) 2018-2022 The Software Heritage developers
* See the AUTHORS file at the top-level directory of this distribution
* License: GNU Affero General Public License version 3, or any later version
* See top-level LICENSE file for more information
*/
-import {handleFetchError, errorMessageFromResponse, isArchivedOrigin} from 'utils/functions';
+import {errorMessageFromResponse, handleFetchError, isArchivedOrigin} from 'utils/functions';
const limit = 100;
const linksPrev = [];
let linkNext = null;
let linkCurrent = null;
let inSearch = false;
function parseLinkHeader(s) {
const re = /<(.+)>; rel="next"/;
return s.match(re)[1];
}
function fixTableRowsStyle() {
setTimeout(() => {
$('#origin-search-results tbody tr').removeAttr('style');
});
}
function clearOriginSearchResultsTable() {
$('#origin-search-results tbody tr').remove();
}
async function populateOriginSearchResultsTable(origins) {
if (origins.length > 0) {
$('#swh-origin-search-results').show();
$('#swh-no-result').hide();
clearOriginSearchResultsTable();
const table = $('#origin-search-results tbody');
const promises = [];
for (const [i, origin] of origins.entries()) {
const browseUrl = `${Urls.browse_origin()}?origin_url=${encodeURIComponent(origin.url)}`;
let tableRow =
`
`;
tableRow +=
`` +
'' +
'Checking | ';
tableRow +=
'' +
`${origin.url} | `;
tableRow +=
`` +
'' +
'Checking | ';
tableRow += '
';
table.append(tableRow);
// get async latest visit snapshot and update visit status icon
- let latestSnapshotUrl = Urls.api_1_origin_visit_latest(origin.url);
+ let latestSnapshotUrl = Urls.api_1_origin_visit_latest(encodeURIComponent(origin.url));
latestSnapshotUrl += '?require_snapshot=true';
promises.push(fetch(latestSnapshotUrl));
}
const responses = await Promise.all(promises);
const responsesData = await Promise.all(responses.map(r => r.json()));
for (let i = 0; i < responses.length; ++i) {
const response = responses[i];
const data = responsesData[i];
if (response.status !== 404 && data.type) {
$(`#visit-type-origin-${i}`).html(data.type);
$(`#visit-status-origin-${i}`).html(
'Archived');
} else {
$(`#visit-type-origin-${i}`).html('unknown');
$(`#visit-status-origin-${i}`).html(
'Pending archival');
if ($('#swh-filter-empty-visits').prop('checked')) {
$(`#origin-${i}`).remove();
}
}
}
fixTableRowsStyle();
} else {
$('#swh-origin-search-results').hide();
$('#swh-no-result').text('No origins matching the search criteria were found.');
$('#swh-no-result').show();
}
if (linkNext === null) {
$('#origins-next-results-button').addClass('disabled');
} else {
$('#origins-next-results-button').removeClass('disabled');
}
if (linksPrev.length === 0) {
$('#origins-prev-results-button').addClass('disabled');
} else {
$('#origins-prev-results-button').removeClass('disabled');
}
inSearch = false;
setTimeout(() => {
window.scrollTo(0, 0);
});
}
function searchOriginsFirst(searchQueryText, limit) {
let baseSearchUrl;
const searchMetadata = $('#swh-search-origin-metadata').prop('checked');
if (searchMetadata) {
baseSearchUrl = new URL(Urls.api_1_origin_metadata_search(), window.location);
baseSearchUrl.searchParams.append('fulltext', searchQueryText);
} else {
const useSearchQL = $('#swh-search-use-ql').prop('checked');
baseSearchUrl = new URL(Urls.api_1_origin_search(searchQueryText), window.location);
baseSearchUrl.searchParams.append('use_ql', useSearchQL ?? false);
}
const withVisit = $('#swh-search-origins-with-visit').prop('checked');
baseSearchUrl.searchParams.append('limit', limit);
baseSearchUrl.searchParams.append('with_visit', withVisit);
const visitType = $('#swh-search-visit-type').val();
if (visitType !== 'any') {
baseSearchUrl.searchParams.append('visit_type', visitType);
}
const searchUrl = baseSearchUrl.toString();
searchOrigins(searchUrl);
}
async function searchOrigins(searchUrl) {
clearOriginSearchResultsTable();
$('.swh-loading').addClass('show');
try {
const response = await fetch(searchUrl);
handleFetchError(response);
const data = await response.json();
// Save link to the current results page
linkCurrent = searchUrl;
// Save link to the next results page.
linkNext = null;
if (response.headers.has('Link')) {
const parsedLink = parseLinkHeader(response.headers.get('Link'));
if (parsedLink !== undefined) {
linkNext = parsedLink;
}
}
// prevLinks is updated by the caller, which is the one to know if
// we're going forward or backward in the pages.
$('.swh-loading').removeClass('show');
populateOriginSearchResultsTable(data);
} catch (errorResponse) {
const errorData = await errorResponse.json();
$('.swh-loading').removeClass('show');
inSearch = false;
$('#swh-origin-search-results').hide();
$('#swh-no-result').text(errorMessageFromResponse(
errorData, 'An unknown error occurred while searching origins'));
$('#swh-no-result').show();
}
}
async function doSearch() {
$('#swh-no-result').hide();
const searchQueryText = $('#swh-origins-url-patterns').val();
inSearch = true;
if (searchQueryText.startsWith('swh:')) {
try {
// searchQueryText may be a PID so sending search queries to PID resolve endpoint
const resolveSWHIDUrl = Urls.api_1_resolve_swhid(searchQueryText);
const response = await fetch(resolveSWHIDUrl);
handleFetchError(response);
const data = await response.json();
// SWHID has been successfully resolved,
// so redirect to browse page
window.location = data.browse_url;
} catch (response) {
// display a useful error message if the input
// looks like a SWHID
const data = await response.json();
$('#swh-origin-search-results').hide();
$('.swh-search-pagination').hide();
$('#swh-no-result').text(data.reason);
$('#swh-no-result').show();
}
} else if (await isArchivedOrigin(searchQueryText)) {
// redirect to the browse origin
window.location.href =
`${Urls.browse_origin()}?origin_url=${encodeURIComponent(searchQueryText)}`;
} else {
// otherwise, proceed with origins search irrespective of the error
$('#swh-origin-search-results').show();
$('.swh-search-pagination').show();
searchOriginsFirst(searchQueryText, limit);
}
}
export function initOriginSearch() {
$(document).ready(() => {
$('#swh-search-origins').submit(event => {
event.preventDefault();
if (event.target.checkValidity()) {
$(event.target).removeClass('was-validated');
const searchQueryText = $('#swh-origins-url-patterns').val().trim();
const withVisit = $('#swh-search-origins-with-visit').prop('checked');
const withContent = $('#swh-filter-empty-visits').prop('checked');
const useSearchQL = $('#swh-search-use-ql').prop('checked');
const searchMetadata = $('#swh-search-origin-metadata').prop('checked');
const visitType = $('#swh-search-visit-type').val();
const queryParameters = new URLSearchParams();
queryParameters.append('q', searchQueryText);
if (withVisit) {
queryParameters.append('with_visit', withVisit);
}
if (withContent) {
queryParameters.append('with_content', withContent);
}
if (useSearchQL) {
queryParameters.append('use_ql', useSearchQL ?? false);
}
if (searchMetadata) {
queryParameters.append('search_metadata', searchMetadata);
}
if (visitType !== 'any') {
queryParameters.append('visit_type', visitType);
}
// Update the url, triggering page reload and effective search
window.location = `${Urls.browse_search()}?${queryParameters.toString()}`;
} else {
$(event.target).addClass('was-validated');
}
});
$('#origins-next-results-button').click(event => {
if ($('#origins-next-results-button').hasClass('disabled') || inSearch) {
return;
}
inSearch = true;
linksPrev.push(linkCurrent);
searchOrigins(linkNext);
event.preventDefault();
});
$('#origins-prev-results-button').click(event => {
if ($('#origins-prev-results-button').hasClass('disabled') || inSearch) {
return;
}
inSearch = true;
searchOrigins(linksPrev.pop());
event.preventDefault();
});
if (window.location.search) {
const urlParams = new URLSearchParams(window.location.search);
const query = urlParams.get('q');
const withVisit = urlParams.has('with_visit');
const useSearchQL = urlParams.has('use_ql');
const withContent = urlParams.has('with_content');
const searchMetadata = urlParams.has('search_metadata');
const visitType = urlParams.get('visit_type');
$('#swh-origins-url-patterns').val(query);
$('#swh-search-origins-with-visit').prop('checked', withVisit);
$('#swh-search-use-ql').prop('checked', useSearchQL ?? false);
$('#swh-filter-empty-visits').prop('checked', withContent);
$('#swh-search-origin-metadata').prop('checked', searchMetadata);
if (visitType) {
$('#swh-search-visit-type').val(visitType);
}
doSearch();
}
});
}
diff --git a/swh/web/tests/data.py b/swh/web/tests/data.py
index 53e0ee8c..78069701 100644
--- a/swh/web/tests/data.py
+++ b/swh/web/tests/data.py
@@ -1,542 +1,551 @@
# Copyright (C) 2018-2022 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from copy import deepcopy
from datetime import timedelta
import os
from pathlib import Path
import random
import time
from typing import Dict, List, Optional, Set
from swh.core.config import merge_configs
from swh.counters import get_counters
from swh.indexer.fossology_license import FossologyLicenseIndexer
from swh.indexer.mimetype import MimetypeIndexer
from swh.indexer.storage import get_indexer_storage
from swh.indexer.storage.model import OriginIntrinsicMetadataRow
from swh.loader.git.from_disk import GitLoaderFromArchive
from swh.model.hashutil import DEFAULT_ALGORITHMS, hash_to_hex
from swh.model.model import (
Content,
Directory,
Origin,
OriginVisit,
OriginVisitStatus,
Snapshot,
)
from swh.model.swhids import CoreSWHID, ObjectType, QualifiedSWHID
from swh.search import get_search
from swh.storage import get_storage
from swh.storage.algos.dir_iterators import dir_iterator
from swh.storage.algos.snapshot import snapshot_get_latest
from swh.storage.interface import Sha1
from swh.storage.utils import now
from swh.web import config
from swh.web.browse.utils import (
get_mimetype_and_encoding_for_content,
prepare_content_for_display,
re_encode_content,
)
from swh.web.utils import archive
# Module used to initialize data that will be provided as tests input
# Base content indexer configuration
_TEST_INDEXER_BASE_CONFIG = {
"storage": {"cls": "memory"},
"objstorage": {
"cls": "memory",
"args": {},
},
"indexer_storage": {
"cls": "memory",
"args": {},
},
}
def random_sha1_bytes() -> Sha1:
return bytes(random.randint(0, 255) for _ in range(20))
def random_sha1() -> str:
return hash_to_hex(random_sha1_bytes())
def random_sha256() -> str:
return hash_to_hex(bytes(random.randint(0, 255) for _ in range(32)))
def random_blake2s256() -> str:
return hash_to_hex(bytes(random.randint(0, 255) for _ in range(32)))
def random_content():
return {
"sha1": random_sha1(),
"sha1_git": random_sha1(),
"sha256": random_sha256(),
"blake2s256": random_blake2s256(),
}
_TEST_MIMETYPE_INDEXER_CONFIG = merge_configs(
_TEST_INDEXER_BASE_CONFIG,
{
"tools": {
"name": "file",
"version": "1:5.30-1+deb9u1",
"configuration": {"type": "library", "debian-package": "python3-magic"},
}
},
)
_TEST_LICENSE_INDEXER_CONFIG = merge_configs(
_TEST_INDEXER_BASE_CONFIG,
{
"workdir": "/tmp/swh/indexer.fossology.license",
"tools": {
"name": "nomos",
"version": "3.1.0rc2-31-ga2cbb8c",
"configuration": {
"command_line": "nomossa ",
},
},
},
)
# Lightweight git repositories that will be loaded to generate
# input data for tests
_TEST_ORIGINS = [
{
"type": "git",
"url": "https://github.com/memononen/libtess2",
"archives": ["libtess2.zip"],
"metadata": {
"@context": "https://doi.org/10.5063/schema/codemeta-2.0",
"description": (
"Game and tools oriented refactored version of GLU tessellator."
),
},
},
{
"type": "git",
"url": "https://github.com/wcoder/highlightjs-line-numbers.js",
"archives": [
"highlightjs-line-numbers.js.zip",
"highlightjs-line-numbers.js_visit2.zip",
],
"metadata": {
"@context": "https://doi.org/10.5063/schema/codemeta-2.0",
"description": "Line numbering plugin for Highlight.js",
},
},
{
"type": "git",
"url": "repo_with_submodules",
"archives": ["repo_with_submodules.tgz"],
"metadata": {
"@context": "https://doi.org/10.5063/schema/codemeta-2.0",
"description": "This is just a sample repository with submodules",
},
},
]
_contents = {}
def _add_extra_contents(storage, contents):
pbm_image_data = b"""P1
# PBM example
24 7
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 1 1 1 1 0 0 1 1 1 1 0 0 1 1 1 1 0 0 1 1 1 1 0
0 1 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 1 0 0 1 0
0 1 1 1 0 0 0 1 1 1 0 0 0 1 1 1 0 0 0 1 1 1 1 0
0 1 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0
0 1 0 0 0 0 0 1 1 1 1 0 0 1 1 1 1 0 0 1 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0"""
# add file with mimetype image/x-portable-bitmap in the archive content
pbm_content = Content.from_data(pbm_image_data)
storage.content_add([pbm_content])
contents.add(pbm_content.sha1)
# add file with mimetype application/pgp-keys in the archive content
gpg_path = os.path.join(
os.path.dirname(__file__), "resources/contents/other/extensions/public.gpg"
)
gpg_content = Content.from_data(Path(gpg_path).read_bytes())
storage.content_add([gpg_content])
contents.add(gpg_content.sha1)
INDEXER_TOOL = {
"tool_name": "swh-web tests",
"tool_version": "1.0",
"tool_configuration": {},
}
ORIGIN_METADATA_KEY = "keywords"
ORIGIN_METADATA_VALUE = "git"
ORIGIN_MASTER_REVISION = {}
ORIGIN_MASTER_DIRECTORY = {}
def _add_origin(
storage, search, counters, origin_url, visit_type="git", snapshot_branches={}
):
storage.origin_add([Origin(url=origin_url)])
search.origin_update(
[{"url": origin_url, "has_visits": True, "visit_types": [visit_type]}]
)
counters.add("origin", [origin_url])
date = now()
visit = OriginVisit(origin=origin_url, date=date, type=visit_type)
visit = storage.origin_visit_add([visit])[0]
counters.add("origin_visit", [f"{visit.unique_key()}"])
snapshot = Snapshot.from_dict({"branches": snapshot_branches})
storage.snapshot_add([snapshot])
counters.add("snapshot", [snapshot.id])
visit_status = OriginVisitStatus(
origin=origin_url,
visit=visit.visit,
date=date + timedelta(minutes=1),
type=visit.type,
status="full",
snapshot=snapshot.id,
)
storage.origin_visit_status_add([visit_status])
counters.add("origin_visit_status", [f"{visit_status.unique_key()}"])
# Tests data initialization
def _init_tests_data():
# To hold reference to the memory storage
storage = get_storage("memory")
# Create search instance
search = get_search("memory")
search.initialize()
search.origin_update({"url": origin["url"]} for origin in _TEST_ORIGINS)
# create the counters instance
counters = get_counters("memory")
# Create indexer storage instance that will be shared by indexers
idx_storage = get_indexer_storage("memory")
# Declare a test tool for origin intrinsic metadata tests
idx_tool = idx_storage.indexer_configuration_add([INDEXER_TOOL])[0]
INDEXER_TOOL["id"] = idx_tool["id"]
# Load git repositories from archives
for origin in _TEST_ORIGINS:
for i, archive_ in enumerate(origin["archives"]):
if i > 0:
# ensure visit dates will be different when simulating
# multiple visits of an origin
time.sleep(1)
origin_repo_archive = os.path.join(
os.path.dirname(__file__), "resources/repos/%s" % archive_
)
loader = GitLoaderFromArchive(
storage,
origin["url"],
archive_path=origin_repo_archive,
)
result = loader.load()
assert result["status"] == "eventful"
ori = storage.origin_get([origin["url"]])[0]
origin.update(ori.to_dict()) # add an 'id' key if enabled
search.origin_update(
[{"url": origin["url"], "has_visits": True, "visit_types": ["git"]}]
)
for i in range(250):
_add_origin(
storage,
search,
counters,
origin_url=f"https://many.origins/{i+1}",
visit_type="tar",
)
+ # origin used in cypress test for origins search
+ _add_origin(
+ storage,
+ search,
+ counters,
+ origin_url="https://example.org/project/download.php?version=2.0",
+ visit_type="tar",
+ )
+
sha1s: Set[Sha1] = set()
directories = set()
revisions = set()
releases = set()
snapshots = set()
swhids = []
content_path = {}
# Get all objects loaded into the test archive
common_metadata = {ORIGIN_METADATA_KEY: ORIGIN_METADATA_VALUE}
for origin in _TEST_ORIGINS:
origin_revisions = set()
snp = snapshot_get_latest(storage, origin["url"])
swhids.append(
QualifiedSWHID(
object_type=ObjectType.SNAPSHOT, object_id=snp.id, origin=origin["url"]
)
)
snapshots.add(hash_to_hex(snp.id))
for branch_name, branch_data in snp.branches.items():
target_type = branch_data.target_type.value
if target_type == "revision":
origin_revisions.add(branch_data.target)
swhids.append(
QualifiedSWHID(
object_type=ObjectType.REVISION,
object_id=branch_data.target,
origin=origin["url"],
visit=CoreSWHID(
object_type=ObjectType.SNAPSHOT, object_id=snp.id
),
)
)
if b"master" in branch_name:
# Add some origin intrinsic metadata for tests
metadata = common_metadata
metadata.update(origin.get("metadata", {}))
revision = storage.revision_get([branch_data.target])[0]
origin_metadata = OriginIntrinsicMetadataRow(
id=origin["url"],
from_directory=revision.directory,
indexer_configuration_id=idx_tool["id"],
metadata=metadata,
mappings=[],
)
idx_storage.origin_intrinsic_metadata_add([origin_metadata])
search.origin_update([{"url": origin["url"], "jsonld": metadata}])
ORIGIN_MASTER_REVISION[origin["url"]] = hash_to_hex(
branch_data.target
)
ORIGIN_MASTER_DIRECTORY[origin["url"]] = hash_to_hex(
revision.directory
)
elif target_type == "release":
release = storage.release_get([branch_data.target])[0]
origin_revisions.add(release.target)
releases.add(hash_to_hex(branch_data.target))
swhids.append(
QualifiedSWHID(
object_type=ObjectType.RELEASE,
object_id=branch_data.target,
origin=origin["url"],
visit=CoreSWHID(
object_type=ObjectType.SNAPSHOT, object_id=snp.id
),
)
)
for rev_log in storage.revision_shortlog(origin_revisions):
rev_id = rev_log[0]
revisions.add(rev_id)
for rev in storage.revision_get(sorted(origin_revisions)):
if rev is None:
continue
dir_id = rev.directory
directories.add(hash_to_hex(dir_id))
for entry in dir_iterator(storage, dir_id):
if entry["type"] == "file":
sha1s.add(entry["sha1"])
content_path[entry["sha1"]] = "/".join(
[hash_to_hex(dir_id), entry["path"].decode("utf-8")]
)
swhids.append(
QualifiedSWHID(
object_type=ObjectType.CONTENT,
object_id=entry["sha1_git"],
origin=origin["url"],
visit=CoreSWHID(
object_type=ObjectType.SNAPSHOT, object_id=snp.id
),
anchor=CoreSWHID(
object_type=ObjectType.REVISION, object_id=rev.id
),
path=b"/" + entry["path"],
)
)
elif entry["type"] == "dir":
directories.add(hash_to_hex(entry["target"]))
swhids.append(
QualifiedSWHID(
object_type=ObjectType.DIRECTORY,
object_id=entry["target"],
origin=origin["url"],
visit=CoreSWHID(
object_type=ObjectType.SNAPSHOT, object_id=snp.id
),
anchor=CoreSWHID(
object_type=ObjectType.REVISION, object_id=rev.id
),
path=b"/" + entry["path"] + b"/",
)
)
_add_extra_contents(storage, sha1s)
# Get all checksums for each content
result: List[Optional[Content]] = storage.content_get(list(sha1s))
contents: List[Dict] = []
for content in result:
assert content is not None
sha1 = hash_to_hex(content.sha1)
content_metadata = {
algo: hash_to_hex(getattr(content, algo)) for algo in DEFAULT_ALGORITHMS
}
path = ""
if content.sha1 in content_path:
path = content_path[content.sha1]
cnt_data = storage.content_get_data(content.sha1)
assert cnt_data is not None
mimetype, encoding = get_mimetype_and_encoding_for_content(cnt_data)
_, _, cnt_data = re_encode_content(mimetype, encoding, cnt_data)
content_display_data = prepare_content_for_display(cnt_data, mimetype, path)
content_metadata.update(
{
"path": path,
"mimetype": mimetype,
"encoding": encoding,
"hljs_language": content_display_data["language"],
"raw_data": cnt_data,
"data": content_display_data["content_data"],
}
)
_contents[sha1] = content_metadata
contents.append(content_metadata)
# Add the empty directory to the test archive
storage.directory_add([Directory(entries=())])
# Add empty content to the test archive
storage.content_add([Content.from_data(data=b"")])
# Add fake git origin with pull request branches
_add_origin(
storage,
search,
counters,
origin_url="https://git.example.org/project",
snapshot_branches={
b"refs/heads/master": {
"target_type": "revision",
"target": next(iter(revisions)),
},
**{
f"refs/pull/{i}".encode(): {
"target_type": "revision",
"target": next(iter(revisions)),
}
for i in range(300)
},
},
)
counters.add("revision", revisions)
counters.add("release", releases)
counters.add("directory", directories)
counters.add("content", [content["sha1"] for content in contents])
# Return tests data
return {
"search": search,
"storage": storage,
"idx_storage": idx_storage,
"counters": counters,
"origins": _TEST_ORIGINS,
"contents": list(sorted(contents, key=lambda c: c["sha1"])),
"directories": list(sorted(directories)),
"releases": list(sorted(releases)),
"revisions": list(sorted(map(hash_to_hex, revisions))),
"snapshots": list(sorted(snapshots)),
"swhids": swhids,
}
def _init_indexers(tests_data):
# Instantiate content indexers that will be used in tests
# and force them to use the memory storages
indexers = {}
for idx_name, idx_class, idx_config in (
("mimetype_indexer", MimetypeIndexer, _TEST_MIMETYPE_INDEXER_CONFIG),
("license_indexer", FossologyLicenseIndexer, _TEST_LICENSE_INDEXER_CONFIG),
):
idx = idx_class(config=idx_config)
idx.storage = tests_data["storage"]
idx.objstorage = tests_data["storage"].objstorage
idx.idx_storage = tests_data["idx_storage"]
idx.register_tools(idx.config["tools"])
indexers[idx_name] = idx
return indexers
def get_content(content_sha1):
return _contents.get(content_sha1)
_tests_data = None
_current_tests_data = None
_indexer_loggers = {}
def get_tests_data(reset=False):
"""
Initialize tests data and return them in a dict.
"""
global _tests_data, _current_tests_data
if _tests_data is None:
_tests_data = _init_tests_data()
indexers = _init_indexers(_tests_data)
for (name, idx) in indexers.items():
# pytest makes the loggers use a temporary file; and deepcopy
# requires serializability. So we remove them, and add them
# back after the copy.
_indexer_loggers[name] = idx.log
del idx.log
_tests_data.update(indexers)
if reset or _current_tests_data is None:
_current_tests_data = deepcopy(_tests_data)
for (name, logger) in _indexer_loggers.items():
_current_tests_data[name].log = logger
return _current_tests_data
def override_storages(storage, idx_storage, search, counters):
"""
Helper function to replace the storages from which archive data
are fetched.
"""
swh_config = config.get_config()
swh_config.update(
{
"storage": storage,
"indexer_storage": idx_storage,
"search": search,
"counters": counters,
}
)
archive.storage = storage
archive.idx_storage = idx_storage
archive.search = search
archive.counters = counters