diff --git a/swh/web/assets/src/bundles/vault/vault-ui.js b/swh/web/assets/src/bundles/vault/vault-ui.js
index 59377a084..529ff50c7 100644
--- a/swh/web/assets/src/bundles/vault/vault-ui.js
+++ b/swh/web/assets/src/bundles/vault/vault-ui.js
@@ -1,256 +1,252 @@
/**
* Copyright (C) 2018 The Software Heritage developers
* See the AUTHORS file at the top-level directory of this distribution
* License: GNU Affero General Public License version 3, or any later version
* See top-level LICENSE file for more information
*/
import {handleFetchError, handleFetchErrors, csrfPost} from 'utils/functions';
let progress = `
;`;
let pollingInterval = 5000;
let checkVaultId;
function updateProgressBar(progressBar, cookingTask) {
if (cookingTask.status === 'new') {
progressBar.css('background-color', 'rgba(128, 128, 128, 0.5)');
} else if (cookingTask.status === 'pending') {
progressBar.css('background-color', 'rgba(0, 0, 255, 0.5)');
} else if (cookingTask.status === 'done') {
progressBar.css('background-color', '#5cb85c');
} else if (cookingTask.status === 'failed') {
progressBar.css('background-color', 'rgba(255, 0, 0, 0.5)');
progressBar.css('background-image', 'none');
}
progressBar.text(cookingTask.progress_message || cookingTask.status);
if (cookingTask.status === 'new' || cookingTask.status === 'pending') {
progressBar.addClass('progress-bar-animated');
} else {
progressBar.removeClass('progress-bar-striped');
}
}
let recookTask;
// called when the user wants to download a cooked archive
export function fetchCookedObject(fetchUrl) {
recookTask = null;
// first, check if the link is still available from the vault
fetch(fetchUrl)
.then(response => {
// link is still alive, proceed to download
if (response.ok) {
$('#vault-fetch-iframe').attr('src', fetchUrl);
// link is dead
} else {
// get the associated cooking task
let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks'));
for (let i = 0; i < vaultCookingTasks.length; ++i) {
if (vaultCookingTasks[i].fetch_url === fetchUrl) {
recookTask = vaultCookingTasks[i];
break;
}
}
// display a modal asking the user if he wants to recook the archive
$('#vault-recook-object-modal').modal('show');
}
});
}
// called when the user wants to recook an archive
// for which the download link is not available anymore
export function recookObject() {
if (recookTask) {
// stop cookink tasks status polling
clearTimeout(checkVaultId);
// build cook request url
let cookingUrl;
if (recookTask.object_type === 'directory') {
cookingUrl = Urls.api_vault_cook_directory(recookTask.object_id);
} else {
cookingUrl = Urls.api_vault_cook_revision_gitfast(recookTask.object_id);
}
if (recookTask.email) {
cookingUrl += '?email=' + recookTask.email;
}
// request archive cooking
csrfPost(cookingUrl)
.then(handleFetchError)
.then(() => {
// update task status
recookTask.status = 'new';
let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks'));
for (let i = 0; i < vaultCookingTasks.length; ++i) {
if (vaultCookingTasks[i].object_id === recookTask.object_id) {
vaultCookingTasks[i] = recookTask;
break;
}
}
// save updated tasks to local storage
localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks));
// restart cooking tasks status polling
checkVaultCookingTasks();
// hide recook archive modal
$('#vault-recook-object-modal').modal('hide');
})
// something went wrong
.catch(() => {
checkVaultCookingTasks();
$('#vault-recook-object-modal').modal('hide');
});
}
}
function checkVaultCookingTasks() {
let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks'));
if (!vaultCookingTasks || vaultCookingTasks.length === 0) {
$('.swh-vault-table tbody tr').remove();
checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval);
return;
}
let cookingTaskRequests = [];
let tasks = {};
let currentObjectIds = [];
for (let i = 0; i < vaultCookingTasks.length; ++i) {
let cookingTask = vaultCookingTasks[i];
currentObjectIds.push(cookingTask.object_id);
tasks[cookingTask.object_id] = cookingTask;
let cookingUrl;
if (cookingTask.object_type === 'directory') {
cookingUrl = Urls.api_vault_cook_directory(cookingTask.object_id);
} else {
cookingUrl = Urls.api_vault_cook_revision_gitfast(cookingTask.object_id);
}
if (cookingTask.status !== 'done' && cookingTask.status !== 'failed') {
cookingTaskRequests.push(fetch(cookingUrl));
}
}
$('.swh-vault-table tbody tr').each((i, row) => {
let objectId = $(row).find('.vault-object-id').data('object-id');
if ($.inArray(objectId, currentObjectIds) === -1) {
$(row).remove();
}
});
Promise.all(cookingTaskRequests)
.then(handleFetchErrors)
.then(responses => Promise.all(responses.map(r => r.json())))
.then(cookingTasks => {
let table = $('#vault-cooking-tasks tbody');
for (let i = 0; i < cookingTasks.length; ++i) {
let cookingTask = tasks[cookingTasks[i].obj_id];
cookingTask.status = cookingTasks[i].status;
cookingTask.fetch_url = cookingTasks[i].fetch_url;
cookingTask.progress_message = cookingTasks[i].progress_message;
}
for (let i = 0; i < vaultCookingTasks.length; ++i) {
let cookingTask = vaultCookingTasks[i];
let rowTask = $('#vault-task-' + cookingTask.object_id);
let downloadLinkWait = 'Waiting for download link to be available';
if (!rowTask.length) {
let browseUrl;
if (cookingTask.object_type === 'directory') {
browseUrl = Urls.browse_directory(cookingTask.object_id);
} else {
browseUrl = Urls.browse_revision(cookingTask.object_id);
}
let progressBar = $.parseHTML(progress)[0];
let progressBarContent = $(progressBar).find('.progress-bar');
updateProgressBar(progressBarContent, cookingTask);
let tableRow;
if (cookingTask.object_type === 'directory') {
tableRow = `
`;
let downloadLink = downloadLinkWait;
if (cookingTask.status === 'done') {
downloadLink = `';
} else if (cookingTask.status === 'failed') {
downloadLink = '';
}
tableRow += `
${downloadLink}
`;
tableRow += '
';
table.prepend(tableRow);
} else {
let progressBar = rowTask.find('.progress-bar');
updateProgressBar(progressBar, cookingTask);
let downloadLink = rowTask.find('.vault-dl-link');
if (cookingTask.status === 'done') {
downloadLink[0].innerHTML = `';
} else if (cookingTask.status === 'failed') {
downloadLink[0].innerHTML = '';
} else if (cookingTask.status === 'new') {
downloadLink[0].innerHTML = downloadLinkWait;
}
}
}
localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks));
checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval);
})
.catch(() => {});
}
export function initUi() {
$('#vault-tasks-toggle-selection').change(event => {
$('.vault-task-toggle-selection').prop('checked', event.currentTarget.checked);
});
$('#vault-remove-tasks').click(() => {
clearTimeout(checkVaultId);
let tasksToRemove = [];
$('.swh-vault-table tbody tr').each((i, row) => {
let taskSelected = $(row).find('.vault-task-toggle-selection').prop('checked');
if (taskSelected) {
let objectId = $(row).find('.vault-object-id').data('object-id');
tasksToRemove.push(objectId);
$(row).remove();
}
});
let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks'));
vaultCookingTasks = $.grep(vaultCookingTasks, task => {
return $.inArray(task.object_id, tasksToRemove) === -1;
});
localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks));
$('#vault-tasks-toggle-selection').prop('checked', false);
checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval);
});
checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval);
$(document).on('shown.bs.tab', 'a[data-toggle="tab"]', e => {
if (e.currentTarget.text.trim() === 'Vault') {
clearTimeout(checkVaultId);
checkVaultCookingTasks();
}
});
window.onfocus = () => {
clearTimeout(checkVaultId);
checkVaultCookingTasks();
};
}
diff --git a/swh/web/assets/src/bundles/webapp/webapp-utils.js b/swh/web/assets/src/bundles/webapp/webapp-utils.js
index 4fc91a9d8..40bcb44df 100644
--- a/swh/web/assets/src/bundles/webapp/webapp-utils.js
+++ b/swh/web/assets/src/bundles/webapp/webapp-utils.js
@@ -1,117 +1,127 @@
import objectFitImages from 'object-fit-images';
import {Layout} from 'admin-lte';
let collapseSidebar = false;
let previousSidebarState = localStorage.getItem('swh-sidebar-collapsed');
if (previousSidebarState !== undefined) {
collapseSidebar = JSON.parse(previousSidebarState);
}
// adapt implementation of fixLayoutHeight from admin-lte
Layout.prototype.fixLayoutHeight = () => {
let heights = {
window: $(window).height(),
header: $('.main-header').outerHeight(),
footer: $('.footer').outerHeight(),
sidebar: $('.main-sidebar').height(),
topbar: $('.swh-top-bar').height()
};
let offset = 10;
$('.content-wrapper').css('min-height', heights.window - heights.topbar - heights.header - heights.footer - offset);
$('.main-sidebar').css('min-height', heights.window - heights.topbar - heights.header - heights.footer - offset);
};
$(document).on('DOMContentLoaded', () => {
// restore previous sidebar state (collapsed/expanded)
if (collapseSidebar) {
// hack to avoid animated transition for collasping sidebar
// when loading a page
let sidebarTransition = $('.main-sidebar, .main-sidebar:before').css('transition');
let sidebarEltsTransition = $('.sidebar .nav-link p, .main-sidebar .brand-text, .sidebar .user-panel .info').css('transition');
$('.main-sidebar, .main-sidebar:before').css('transition', 'none');
$('.sidebar .nav-link p, .main-sidebar .brand-text, .sidebar .user-panel .info').css('transition', 'none');
$('body').addClass('sidebar-collapse');
$('.swh-words-logo-swh').css('visibility', 'visible');
// restore transitions for user navigation
setTimeout(() => {
$('.main-sidebar, .main-sidebar:before').css('transition', sidebarTransition);
$('.sidebar .nav-link p, .main-sidebar .brand-text, .sidebar .user-panel .info').css('transition', sidebarEltsTransition);
});
}
});
$(document).on('collapsed.lte.pushmenu', event => {
if ($('body').width() > 980) {
$('.swh-words-logo-swh').css('visibility', 'visible');
}
});
$(document).on('shown.lte.pushmenu', event => {
$('.swh-words-logo-swh').css('visibility', 'hidden');
});
function ensureNoFooterOverflow() {
$('body').css('padding-bottom', $('footer').outerHeight() + 'px');
}
$(document).ready(() => {
// redirect to last browse page if any when clicking on the 'Browse' entry
// in the sidebar
$(`.swh-browse-link`).click(event => {
let lastBrowsePage = sessionStorage.getItem('last-browse-page');
if (lastBrowsePage) {
event.preventDefault();
window.location = lastBrowsePage;
}
});
// ensure footer do not overflow main content for mobile devices
// or after resizing the browser window
ensureNoFooterOverflow();
$(window).resize(function() {
ensureNoFooterOverflow();
if ($('body').hasClass('sidebar-collapse') && $('body').width() > 980) {
$('.swh-words-logo-swh').css('visibility', 'visible');
}
});
// activate css polyfill 'object-fit: contain' in old browsers
objectFitImages();
});
export function initPage(page) {
$(document).ready(() => {
// set relevant sidebar link to page active
$(`.swh-${page}-item`).addClass('active');
$(`.swh-${page}-link`).addClass('active');
// triggered when unloading the current page
$(window).on('unload', () => {
// backup sidebar state (collapsed/expanded)
let sidebarCollapsed = $('body').hasClass('sidebar-collapse');
localStorage.setItem('swh-sidebar-collapsed', JSON.stringify(sidebarCollapsed));
// backup current browse page
if (page === 'browse') {
sessionStorage.setItem('last-browse-page', window.location);
}
});
});
}
export function showModalMessage(title, message) {
$('#swh-web-modal-message .modal-title').text(title);
$('#swh-web-modal-message .modal-content p').text(message);
$('#swh-web-modal-message').modal('show');
}
export function showModalConfirm(title, message, callback) {
$('#swh-web-modal-confirm .modal-title').text(title);
$('#swh-web-modal-confirm .modal-content p').text(message);
$('#swh-web-modal-confirm #swh-web-modal-confirm-ok-btn').bind('click', () => {
callback();
$('#swh-web-modal-confirm').modal('hide');
$('#swh-web-modal-confirm #swh-web-modal-confirm-ok-btn').unbind('click');
});
$('#swh-web-modal-confirm').modal('show');
}
+
+let swhObjectIcons;
+
+export function setSwhObjectIcons(icons) {
+ swhObjectIcons = icons;
+}
+
+export function getSwhObjectIcon(swhObjectType) {
+ return swhObjectIcons[swhObjectType];
+}
diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py
index be6ebaa65..0844c0bd1 100644
--- a/swh/web/browse/utils.py
+++ b/swh/web/browse/utils.py
@@ -1,1186 +1,1197 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import base64
from collections import defaultdict
import magic
import math
import pypandoc
import stat
from django.core.cache import cache
from django.utils.safestring import mark_safe
from importlib import reload
from swh.model.identifiers import persistent_identifier
from swh.web.common import highlightjs, service
from swh.web.common.exc import NotFoundExc, http_status_code_message
from swh.web.common.utils import (
reverse, format_utc_iso_date, parse_timestamp,
- get_origin_visits, get_swh_persistent_id
+ get_origin_visits, get_swh_persistent_id,
+ swh_object_icons
)
from swh.web.config import get_config
def get_directory_entries(sha1_git):
"""Function that retrieves the content of a SWH directory
from the SWH archive.
The directories entries are first sorted in lexicographical order.
Sub-directories and regular files are then extracted.
Args:
sha1_git: sha1_git identifier of the directory
Returns:
A tuple whose first member corresponds to the sub-directories list
and second member the regular files list
Raises:
NotFoundExc if the directory is not found
"""
cache_entry_id = 'directory_entries_%s' % sha1_git
cache_entry = cache.get(cache_entry_id)
if cache_entry:
return cache_entry
entries = list(service.lookup_directory(sha1_git))
for e in entries:
e['perms'] = stat.filemode(e['perms'])
if e['type'] == 'rev':
# modify dir entry name to explicitely show it points
# to a revision
e['name'] = '%s @ %s' % (e['name'], e['target'][:7])
dirs = [e for e in entries if e['type'] in ('dir', 'rev')]
files = [e for e in entries if e['type'] == 'file']
dirs = sorted(dirs, key=lambda d: d['name'])
files = sorted(files, key=lambda f: f['name'])
cache.set(cache_entry_id, (dirs, files))
return dirs, files
def get_mimetype_and_encoding_for_content(content):
"""Function that returns the mime type and the encoding associated to
a content buffer using the magic module under the hood.
Args:
content (bytes): a content buffer
Returns:
A tuple (mimetype, encoding), for instance ('text/plain', 'us-ascii'),
associated to the provided content.
"""
while True:
try:
magic_result = magic.detect_from_content(content)
mime_type = magic_result.mime_type
encoding = magic_result.encoding
break
except Exception:
# workaround an issue with the magic module who can fail
# if detect_from_content is called multiple times in
# a short amount of time
reload(magic)
return mime_type, encoding
# maximum authorized content size in bytes for HTML display
# with code highlighting
content_display_max_size = get_config()['content_display_max_size']
snapshot_content_max_size = get_config()['snapshot_content_max_size']
def request_content(query_string, max_size=content_display_max_size,
raise_if_unavailable=True, reencode=True):
"""Function that retrieves a SWH content from the SWH archive.
Raw bytes content is first retrieved, then the content mime type.
If the mime type is not stored in the archive, it will be computed
using Python magic module.
Args:
query_string: a string of the form "[ALGO_HASH:]HASH" where
optional ALGO_HASH can be either *sha1*, *sha1_git*, *sha256*,
or *blake2s256* (default to *sha1*) and HASH the hexadecimal
representation of the hash value
max_size: the maximum size for a content to retrieve (default to 1MB,
no size limit if None)
Returns:
A tuple whose first member corresponds to the content raw bytes
and second member the content mime type
Raises:
NotFoundExc if the content is not found
"""
content_data = service.lookup_content(query_string)
filetype = None
language = None
license = None
# requests to the indexer db may fail so properly handle
# those cases in order to avoid content display errors
try:
filetype = service.lookup_content_filetype(query_string)
language = service.lookup_content_language(query_string)
license = service.lookup_content_license(query_string)
except Exception:
pass
mimetype = 'unknown'
encoding = 'unknown'
if filetype:
mimetype = filetype['mimetype']
encoding = filetype['encoding']
content_data['error_code'] = 200
content_data['error_message'] = ''
content_data['error_description'] = ''
if not max_size or content_data['length'] < max_size:
try:
content_raw = service.lookup_content_raw(query_string)
except Exception as e:
if raise_if_unavailable:
raise e
else:
content_data['raw_data'] = None
content_data['error_code'] = 404
content_data['error_description'] = \
'The bytes of the content are currently not available in the archive.' # noqa
content_data['error_message'] = \
http_status_code_message[content_data['error_code']]
else:
content_data['raw_data'] = content_raw['data']
if not filetype:
mimetype, encoding = \
get_mimetype_and_encoding_for_content(content_data['raw_data']) # noqa
# encode textual content to utf-8 if needed
if reencode and mimetype.startswith('text/'):
# probably a malformed UTF-8 content, re-encode it
# by replacing invalid chars with a substitution one
if encoding == 'unknown-8bit':
content_data['raw_data'] = \
content_data['raw_data'].decode('utf-8', 'replace')\
.encode('utf-8')
elif 'ascii' not in encoding and encoding not in ['utf-8', 'binary']: # noqa
content_data['raw_data'] = \
content_data['raw_data'].decode(encoding, 'replace')\
.encode('utf-8')
elif reencode and mimetype.startswith('application/octet-stream'):
# file may detect a text content as binary
# so try to decode it for display
encodings = ['us-ascii']
encodings += ['iso-8859-%s' % i for i in range(1, 17)]
for encoding in encodings:
try:
content_data['raw_data'] = \
content_data['raw_data'].decode(encoding)\
.encode('utf-8')
except Exception:
pass
else:
# ensure display in content view
mimetype = 'text/plain'
break
else:
content_data['raw_data'] = None
content_data['mimetype'] = mimetype
content_data['encoding'] = encoding
if language:
content_data['language'] = language['lang']
else:
content_data['language'] = 'not detected'
if license:
content_data['licenses'] = ', '.join(license['facts'][0]['licenses'])
else:
content_data['licenses'] = 'not detected'
return content_data
_browsers_supported_image_mimes = set(['image/gif', 'image/png',
'image/jpeg', 'image/bmp',
'image/webp', 'image/svg',
'image/svg+xml'])
def prepare_content_for_display(content_data, mime_type, path):
"""Function that prepares a content for HTML display.
The function tries to associate a programming language to a
content in order to perform syntax highlighting client-side
using highlightjs. The language is determined using either
the content filename or its mime type.
If the mime type corresponds to an image format supported
by web browsers, the content will be encoded in base64
for displaying the image.
Args:
content_data (bytes): raw bytes of the content
mime_type (string): mime type of the content
path (string): path of the content including filename
Returns:
A dict containing the content bytes (possibly different from the one
provided as parameter if it is an image) under the key 'content_data
and the corresponding highlightjs language class under the
key 'language'.
"""
language = highlightjs.get_hljs_language_from_filename(path)
if not language:
language = highlightjs.get_hljs_language_from_mime_type(mime_type)
if not language:
language = 'nohighlight'
elif mime_type.startswith('application/'):
mime_type = mime_type.replace('application/', 'text/')
if mime_type.startswith('image/'):
if mime_type in _browsers_supported_image_mimes:
content_data = base64.b64encode(content_data)
else:
content_data = None
if mime_type.startswith('image/svg'):
mime_type = 'image/svg+xml'
return {'content_data': content_data,
'language': language,
'mimetype': mime_type}
def get_origin_visit(origin_info, visit_ts=None, visit_id=None,
snapshot_id=None):
"""Function that returns information about a SWH visit for
a given origin.
The visit is retrieved from a provided timestamp.
The closest visit from that timestamp is selected.
Args:
origin_info (dict): a dict filled with origin information
(id, url, type)
visit_ts (int or str): an ISO date string or Unix timestamp to parse
Returns:
A dict containing the visit info as described below::
{'origin': 2,
'date': '2017-10-08T11:54:25.582463+00:00',
'metadata': {},
'visit': 25,
'status': 'full'}
"""
visits = get_origin_visits(origin_info)
if not visits:
raise NotFoundExc('No SWH visit associated to origin with'
' type %s and url %s!' % (origin_info['type'],
origin_info['url']))
if snapshot_id:
visit = [v for v in visits if v['snapshot'] == snapshot_id]
if len(visit) == 0:
raise NotFoundExc(
'Visit for snapshot with id %s for origin with type %s'
' and url %s not found!' % (snapshot_id, origin_info['type'],
origin_info['url']))
return visit[0]
if visit_id:
visit = [v for v in visits if v['visit'] == int(visit_id)]
if len(visit) == 0:
raise NotFoundExc(
'Visit with id %s for origin with type %s'
' and url %s not found!' % (visit_id, origin_info['type'],
origin_info['url']))
return visit[0]
if not visit_ts:
# returns the latest full visit when no timestamp is provided
for v in reversed(visits):
if v['status'] == 'full':
return v
return visits[-1]
parsed_visit_ts = math.floor(parse_timestamp(visit_ts).timestamp())
visit_idx = None
for i, visit in enumerate(visits):
ts = math.floor(parse_timestamp(visit['date']).timestamp())
if i == 0 and parsed_visit_ts <= ts:
return visit
elif i == len(visits) - 1:
if parsed_visit_ts >= ts:
return visit
else:
next_ts = math.floor(
parse_timestamp(visits[i+1]['date']).timestamp())
if parsed_visit_ts >= ts and parsed_visit_ts < next_ts:
if (parsed_visit_ts - ts) < (next_ts - parsed_visit_ts):
visit_idx = i
break
else:
visit_idx = i+1
break
if visit_idx is not None:
visit = visits[visit_idx]
while visit_idx < len(visits) - 1 and \
visit['date'] == visits[visit_idx+1]['date']:
visit_idx = visit_idx + 1
visit = visits[visit_idx]
return visit
else:
raise NotFoundExc(
'Visit with timestamp %s for origin with type %s and url %s not found!' % # noqa
(visit_ts, origin_info['type'], origin_info['url']))
def process_snapshot_branches(snapshot_branches):
"""
Process a dictionary describing snapshot branches: extract those
targeting revisions and releases, put them in two different lists,
then sort those lists in lexicographical order of the branches' names.
Args:
snapshot_branches (dict): A dict describing the branches of a snapshot
as returned for instance by :func:`swh.web.common.service.lookup_snapshot`
Returns:
tuple: A tuple whose first member is the sorted list of branches
targeting revisions and second member the sorted list of branches
targeting releases
""" # noqa
branches = {}
releases = {}
revision_to_branch = defaultdict(set)
revision_to_release = defaultdict(set)
release_to_branch = defaultdict(set)
for branch_name, target in snapshot_branches.items():
if not target:
# FIXME: display branches with an unknown target anyway
continue
target_id = target['target']
target_type = target['target_type']
if target_type == 'revision':
branches[branch_name] = {
'name': branch_name,
'revision': target_id,
}
revision_to_branch[target_id].add(branch_name)
elif target_type == 'release':
release_to_branch[target_id].add(branch_name)
# FIXME: handle pointers to other object types
# FIXME: handle branch aliases
releases_info = service.lookup_release_multiple(
release_to_branch.keys()
)
for release in releases_info:
branches_to_update = release_to_branch[release['id']]
for branch in branches_to_update:
releases[branch] = {
'name': release['name'],
'branch_name': branch,
'date': format_utc_iso_date(release['date']),
'id': release['id'],
'message': release['message'],
'target_type': release['target_type'],
'target': release['target'],
}
if release['target_type'] == 'revision':
revision_to_release[release['target']].update(
branches_to_update
)
revisions = service.lookup_revision_multiple(
set(revision_to_branch.keys()) | set(revision_to_release.keys())
)
for revision in revisions:
if not revision:
continue
revision_data = {
'directory': revision['directory'],
'date': format_utc_iso_date(revision['date']),
'message': revision['message'],
}
for branch in revision_to_branch[revision['id']]:
branches[branch].update(revision_data)
for release in revision_to_release[revision['id']]:
releases[release]['directory'] = revision['directory']
ret_branches = list(sorted(branches.values(), key=lambda b: b['name']))
ret_releases = list(sorted(releases.values(), key=lambda b: b['name']))
return ret_branches, ret_releases
def get_snapshot_content(snapshot_id):
"""Returns the lists of branches and releases
associated to a swh snapshot.
That list is put in cache in order to speedup the navigation
in the swh-web/browse ui.
.. warning:: At most 1000 branches contained in the snapshot
will be returned for performance reasons.
Args:
snapshot_id (str): hexadecimal representation of the snapshot
identifier
Returns:
A tuple with two members. The first one is a list of dict describing
the snapshot branches. The second one is a list of dict describing the
snapshot releases.
Raises:
NotFoundExc if the snapshot does not exist
"""
cache_entry_id = 'swh_snapshot_%s' % snapshot_id
cache_entry = cache.get(cache_entry_id)
if cache_entry:
return cache_entry['branches'], cache_entry['releases']
branches = []
releases = []
if snapshot_id:
snapshot = service.lookup_snapshot(
snapshot_id, branches_count=snapshot_content_max_size)
branches, releases = process_snapshot_branches(snapshot['branches'])
cache.set(cache_entry_id, {
'branches': branches,
'releases': releases,
})
return branches, releases
def get_origin_visit_snapshot(origin_info, visit_ts=None, visit_id=None,
snapshot_id=None):
"""Returns the lists of branches and releases
associated to a swh origin for a given visit.
The visit is expressed by a timestamp. In the latter case,
the closest visit from the provided timestamp will be used.
If no visit parameter is provided, it returns the list of branches
found for the latest visit.
That list is put in cache in order to speedup the navigation
in the swh-web/browse ui.
.. warning:: At most 1000 branches contained in the snapshot
will be returned for performance reasons.
Args:
origin_info (dict): a dict filled with origin information
(id, url, type)
visit_ts (int or str): an ISO date string or Unix timestamp to parse
visit_id (int): optional visit id for desambiguation in case
several visits have the same timestamp
Returns:
A tuple with two members. The first one is a list of dict describing
the origin branches for the given visit.
The second one is a list of dict describing the origin releases
for the given visit.
Raises:
NotFoundExc if the origin or its visit are not found
"""
visit_info = get_origin_visit(origin_info, visit_ts, visit_id, snapshot_id)
return get_snapshot_content(visit_info['snapshot'])
def gen_link(url, link_text=None, link_attrs={}):
"""
Utility function for generating an HTML link to insert
in Django templates.
Args:
url (str): an url
link_text (str): optional text for the produced link,
if not provided the url will be used
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
attrs = ' '
for k, v in link_attrs.items():
attrs += '%s="%s" ' % (k, v)
if not link_text:
link_text = url
link = '%s' % (attrs, url, link_text)
return mark_safe(link)
def gen_person_link(person_id, person_name, snapshot_context=None,
link_attrs={}):
"""
Utility function for generating a link to a SWH person HTML view
to insert in Django templates.
Args:
person_id (int): a SWH person id
person_name (str): the associated person name
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'person_name'
"""
query_params = None
if snapshot_context and snapshot_context['origin_info']:
origin_info = snapshot_context['origin_info']
query_params = {'origin_type': origin_info['type'],
'origin': origin_info['url']}
if 'timestamp' in snapshot_context['url_args']:
query_params['timestamp'] = \
snapshot_context['url_args']['timestamp']
if 'visit_id' in snapshot_context['query_params']:
query_params['visit_id'] = \
snapshot_context['query_params']['visit_id']
elif snapshot_context:
query_params = {'snapshot_id': snapshot_context['snapshot_id']}
person_url = reverse('browse-person', url_args={'person_id': person_id},
query_params=query_params)
return gen_link(person_url, person_name or 'None', link_attrs)
-def gen_revision_link(revision_id, shorten_id=False, snapshot_context=None,
- link_text=None, link_attrs={}):
+def gen_revision_url(revision_id, snapshot_context=None):
"""
- Utility function for generating a link to a SWH revision HTML view
- to insert in Django templates.
+ Utility function for generating an url to a SWH revision.
Args:
revision_id (str): a SWH revision id
- shorten_id (boolean): whether to shorten the revision id to 7
- characters for the link text
snapshot_context (dict): if provided, generate snapshot-dependent
- browsing link
- link_attrs (dict): optional attributes (e.g. class)
- to add to the link
+ browsing url
Returns:
- An HTML link in the form 'revision_id'
+ str: The url to browse the revision
"""
- if not revision_id:
- return None
query_params = None
if snapshot_context and snapshot_context['origin_info']:
origin_info = snapshot_context['origin_info']
origin_type = snapshot_context['origin_type']
query_params = {'origin_type': origin_type,
'origin': origin_info['url']}
if 'timestamp' in snapshot_context['url_args']:
query_params['timestamp'] = \
snapshot_context['url_args']['timestamp']
if 'visit_id' in snapshot_context['query_params']:
query_params['visit_id'] = \
snapshot_context['query_params']['visit_id']
elif snapshot_context:
query_params = {'snapshot_id': snapshot_context['snapshot_id']}
- revision_url = reverse('browse-revision',
- url_args={'sha1_git': revision_id},
- query_params=query_params)
+ return reverse('browse-revision',
+ url_args={'sha1_git': revision_id},
+ query_params=query_params)
+
+
+def gen_revision_link(revision_id, shorten_id=False, snapshot_context=None,
+ link_text=None, link_attrs={}):
+ """
+ Utility function for generating a link to a SWH revision HTML view
+ to insert in Django templates.
+
+ Args:
+ revision_id (str): a SWH revision id
+ shorten_id (boolean): whether to shorten the revision id to 7
+ characters for the link text
+ snapshot_context (dict): if provided, generate snapshot-dependent
+ browsing link
+ link_attrs (dict): optional attributes (e.g. class)
+ to add to the link
+
+ Returns:
+ str: An HTML link in the form 'revision_id'
+
+ """
+ if not revision_id:
+ return None
+
+ revision_url = gen_revision_url(revision_id, snapshot_context)
+
if shorten_id:
return gen_link(revision_url, revision_id[:7], link_attrs)
else:
if not link_text:
link_text = revision_id
return gen_link(revision_url, link_text, link_attrs)
def gen_origin_link(origin_info, link_attrs={}):
"""
Utility function for generating a link to a SWH origin HTML view
to insert in Django templates.
Args:
origin_info (dict): a dicted filled with origin information
(id, type, url)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'Origin: origin_url'
""" # noqa
origin_browse_url = reverse('browse-origin',
url_args={'origin_type': origin_info['type'],
'origin_url': origin_info['url']})
return gen_link(origin_browse_url,
'Origin: ' + origin_info['url'], link_attrs)
def gen_directory_link(sha1_git, link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH directory HTML view
to insert in Django templates.
Args:
sha1_git (str): directory identifier
link_text (str): optional text for the generated link
(the generated url will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
if not sha1_git:
return None
directory_url = reverse('browse-directory',
url_args={'sha1_git': sha1_git})
if not link_text:
link_text = directory_url
return gen_link(directory_url, link_text, link_attrs)
def gen_snapshot_link(snapshot_id, link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH snapshot HTML view
to insert in Django templates.
Args:
snapshot_id (str): snapshot identifier
link_text (str): optional text for the generated link
(the generated url will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
snapshot_url = reverse('browse-snapshot',
url_args={'snapshot_id': snapshot_id})
if not link_text:
link_text = snapshot_url
return gen_link(snapshot_url, link_text, link_attrs)
def gen_snapshot_directory_link(snapshot_context, revision_id=None,
link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH directory HTML view
in the context of a snapshot to insert in Django templates.
Args:
snapshot_context (dict): the snapshot information
revision_id (str): optional revision identifier in order
to use the associated directory
link_text (str): optional text to use for the generated link
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form
'origin_directory_view_url'
"""
query_params = {'revision': revision_id}
if snapshot_context['origin_info']:
origin_info = snapshot_context['origin_info']
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
if 'timestamp' in snapshot_context['url_args']:
url_args['timestamp'] = \
snapshot_context['url_args']['timestamp']
if 'visit_id' in snapshot_context['query_params']:
query_params['visit_id'] = \
snapshot_context['query_params']['visit_id']
directory_url = reverse('browse-origin-directory',
url_args=url_args,
query_params=query_params)
else:
url_args = {'snapshot_id': snapshot_context['snapshot_id']}
directory_url = reverse('browse-snapshot-directory',
url_args=url_args,
query_params=query_params)
if not link_text:
link_text = directory_url
return gen_link(directory_url, link_text, link_attrs)
def gen_content_link(sha1_git, link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH content HTML view
to insert in Django templates.
Args:
sha1_git (str): content identifier
link_text (str): optional text for the generated link
(the generated url will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form 'link_text'
"""
if not sha1_git:
return None
content_url = reverse('browse-content',
url_args={'query_string': 'sha1_git:' + sha1_git})
if not link_text:
link_text = content_url
return gen_link(content_url, link_text, link_attrs)
def get_revision_log_url(revision_id, snapshot_context=None):
"""
Utility function for getting the URL for a SWH revision log HTML view
(possibly in the context of an origin).
Args:
revision_id (str): revision identifier the history heads to
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
Returns:
The SWH revision log view URL
"""
query_params = {'revision': revision_id}
if snapshot_context and snapshot_context['origin_info']:
origin_info = snapshot_context['origin_info']
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
if 'timestamp' in snapshot_context['url_args']:
url_args['timestamp'] = \
snapshot_context['url_args']['timestamp']
if 'visit_id' in snapshot_context['query_params']:
query_params['visit_id'] = \
snapshot_context['query_params']['visit_id']
revision_log_url = reverse('browse-origin-log',
url_args=url_args,
query_params=query_params)
elif snapshot_context:
url_args = {'snapshot_id': snapshot_context['snapshot_id']}
revision_log_url = reverse('browse-snapshot-log',
url_args=url_args,
query_params=query_params)
else:
revision_log_url = reverse('browse-revision-log',
url_args={'sha1_git': revision_id})
return revision_log_url
def gen_revision_log_link(revision_id, snapshot_context=None, link_text=None,
link_attrs={}):
"""
Utility function for generating a link to a SWH revision log HTML view
(possibly in the context of an origin) to insert in Django templates.
Args:
revision_id (str): revision identifier the history heads to
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
link_text (str): optional text to use for the generated link
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form
'link_text'
"""
if not revision_id:
return None
revision_log_url = get_revision_log_url(revision_id, snapshot_context)
if not link_text:
link_text = revision_log_url
return gen_link(revision_log_url, link_text, link_attrs)
def _format_log_entries(revision_log, per_page, snapshot_context=None):
revision_log_data = []
for i, log in enumerate(revision_log):
if i == per_page:
break
author_name = 'None'
author_link = 'None'
if log['author']:
author_name = log['author']['name'] or log['author']['fullname']
author_link = gen_person_link(log['author']['id'], author_name,
snapshot_context)
revision_log_data.append(
{'author': author_link,
'revision': gen_revision_link(log['id'], True, snapshot_context),
'message': log['message'],
'date': format_utc_iso_date(log['date']),
'directory': log['directory']})
return revision_log_data
def prepare_revision_log_for_display(revision_log, per_page, revs_breadcrumb,
snapshot_context=None):
"""
Utility functions that process raw revision log data for HTML display.
Its purpose is to:
* add links to relevant SWH browse views
* format date in human readable format
* truncate the message log
It also computes the data needed to generate the links for navigating back
and forth in the history log.
Args:
revision_log (list): raw revision log as returned by the SWH web api
per_page (int): number of log entries per page
revs_breadcrumb (str): breadcrumbs of revisions navigated so far,
in the form 'rev1[/rev2/../revN]'. Each revision corresponds to
the first one displayed in the HTML view for history log.
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
"""
current_rev = revision_log[0]['id']
next_rev = None
prev_rev = None
next_revs_breadcrumb = None
prev_revs_breadcrumb = None
if len(revision_log) == per_page + 1:
prev_rev = revision_log[-1]['id']
prev_rev_bc = current_rev
if snapshot_context:
prev_rev_bc = prev_rev
if revs_breadcrumb:
revs = revs_breadcrumb.split('/')
next_rev = revs[-1]
if len(revs) > 1:
next_revs_breadcrumb = '/'.join(revs[:-1])
if len(revision_log) == per_page + 1:
prev_revs_breadcrumb = revs_breadcrumb + '/' + prev_rev_bc
else:
prev_revs_breadcrumb = prev_rev_bc
return {'revision_log_data': _format_log_entries(revision_log, per_page,
snapshot_context),
'prev_rev': prev_rev,
'prev_revs_breadcrumb': prev_revs_breadcrumb,
'next_rev': next_rev,
'next_revs_breadcrumb': next_revs_breadcrumb}
# list of origin types that can be found in the swh archive
# TODO: retrieve it dynamically in an efficient way instead
# of hardcoding it
_swh_origin_types = ['git', 'svn', 'deb', 'hg', 'ftp', 'deposit', 'pypi']
def get_origin_info(origin_url, origin_type=None):
"""
Get info about a SWH origin.
Its main purpose is to automatically find an origin type
when it is not provided as parameter.
Args:
origin_url (str): complete url of a SWH origin
origin_type (str): optional origin type
Returns:
A dict with the following entries:
* type: the origin type
* url: the origin url
* id: the SWH internal id of the origin
"""
if origin_type:
return service.lookup_origin({'type': origin_type,
'url': origin_url})
else:
for origin_type in _swh_origin_types:
try:
origin_info = service.lookup_origin({'type': origin_type,
'url': origin_url})
return origin_info
except Exception:
pass
raise NotFoundExc('Origin with url %s not found!' % origin_url)
def get_snapshot_context(snapshot_id=None, origin_type=None, origin_url=None,
timestamp=None, visit_id=None):
"""
Utility function to compute relevant information when navigating
the SWH archive in a snapshot context. The snapshot is either
referenced by its id or it will be retrieved from an origin visit.
Args:
snapshot_id (str): hexadecimal representation of a snapshot identifier,
all other parameters will be ignored if it is provided
origin_type (str): the origin type (git, svn, deposit, ...)
origin_url (str): the origin_url (e.g. https://github.com/(user)/(repo)/)
timestamp (str): a datetime string for retrieving the closest
SWH visit of the origin
visit_id (int): optional visit id for disambiguation in case
of several visits with the same timestamp
Returns:
A dict with the following entries:
* origin_info: dict containing origin information
* visit_info: dict containing SWH visit information
* branches: the list of branches for the origin found
during the visit
* releases: the list of releases for the origin found
during the visit
* origin_browse_url: the url to browse the origin
* origin_branches_url: the url to browse the origin branches
* origin_releases_url': the url to browse the origin releases
* origin_visit_url: the url to browse the snapshot of the origin
found during the visit
* url_args: dict containing url arguments to use when browsing in
the context of the origin and its visit
Raises:
NotFoundExc: if no snapshot is found for the visit of an origin.
""" # noqa
origin_info = None
visit_info = None
url_args = None
query_params = {}
branches = []
releases = []
browse_url = None
visit_url = None
branches_url = None
releases_url = None
swh_type = 'snapshot'
if origin_url:
swh_type = 'origin'
origin_info = get_origin_info(origin_url, origin_type)
visit_info = get_origin_visit(origin_info, timestamp, visit_id,
snapshot_id)
fmt_date = format_utc_iso_date(visit_info['date'])
visit_info['fmt_date'] = fmt_date
snapshot_id = visit_info['snapshot']
if not snapshot_id:
raise NotFoundExc('No snapshot associated to the visit of origin '
'%s on %s' % (origin_url, fmt_date))
# provided timestamp is not necessarily equals to the one
# of the retrieved visit, so get the exact one in order
# use it in the urls generated below
if timestamp:
timestamp = visit_info['date']
branches, releases = \
get_origin_visit_snapshot(origin_info, timestamp, visit_id,
snapshot_id)
url_args = {'origin_type': origin_type,
'origin_url': origin_info['url']}
query_params = {'visit_id': visit_id}
browse_url = reverse('browse-origin-visits',
url_args=url_args)
if timestamp:
url_args['timestamp'] = format_utc_iso_date(timestamp,
'%Y-%m-%dT%H:%M:%S')
visit_url = reverse('browse-origin-directory',
url_args=url_args,
query_params=query_params)
visit_info['url'] = visit_url
branches_url = reverse('browse-origin-branches',
url_args=url_args,
query_params=query_params)
releases_url = reverse('browse-origin-releases',
url_args=url_args,
query_params=query_params)
elif snapshot_id:
branches, releases = get_snapshot_content(snapshot_id)
url_args = {'snapshot_id': snapshot_id}
browse_url = reverse('browse-snapshot',
url_args=url_args)
branches_url = reverse('browse-snapshot-branches',
url_args=url_args)
releases_url = reverse('browse-snapshot-releases',
url_args=url_args)
releases = list(reversed(releases))
snapshot_size = service.lookup_snapshot_size(snapshot_id)
is_empty = sum(snapshot_size.values()) == 0
swh_snp_id = persistent_identifier('snapshot', snapshot_id)
return {
'swh_type': swh_type,
'swh_object_id': swh_snp_id,
'snapshot_id': snapshot_id,
'snapshot_size': snapshot_size,
'is_empty': is_empty,
'origin_info': origin_info,
# keep track if the origin type was provided as url argument
'origin_type': origin_type,
'visit_info': visit_info,
'branches': branches,
'releases': releases,
'branch': None,
'release': None,
'browse_url': browse_url,
'branches_url': branches_url,
'releases_url': releases_url,
'url_args': url_args,
'query_params': query_params
}
# list of common readme names ordered by preference
# (lower indices have higher priority)
_common_readme_names = [
"readme.markdown",
"readme.md",
"readme.rst",
"readme.txt",
"readme"
]
def get_readme_to_display(readmes):
"""
Process a list of readme files found in a directory
in order to find the adequate one to display.
Args:
readmes: a list of dict where keys are readme file names and values
are readme sha1s
Returns:
A tuple (readme_name, readme_sha1)
"""
readme_name = None
readme_url = None
readme_sha1 = None
readme_html = None
lc_readmes = {k.lower(): {'orig_name': k, 'sha1': v}
for k, v in readmes.items()}
# look for readme names according to the preference order
# defined by the _common_readme_names list
for common_readme_name in _common_readme_names:
if common_readme_name in lc_readmes:
readme_name = lc_readmes[common_readme_name]['orig_name']
readme_sha1 = lc_readmes[common_readme_name]['sha1']
readme_url = reverse('browse-content-raw',
url_args={'query_string': readme_sha1})
break
# otherwise pick the first readme like file if any
if not readme_name and len(readmes.items()) > 0:
readme_name = next(iter(readmes))
readme_sha1 = readmes[readme_name]
readme_url = reverse('browse-content-raw',
url_args={'query_string': readme_sha1})
# convert rst README to html server side as there is
# no viable solution to perform that task client side
if readme_name and readme_name.endswith('.rst'):
cache_entry_id = 'readme_%s' % readme_sha1
cache_entry = cache.get(cache_entry_id)
if cache_entry:
readme_html = cache_entry
else:
try:
rst_doc = request_content(readme_sha1)
readme_html = pypandoc.convert_text(rst_doc['raw_data'],
'html', format='rst')
cache.set(cache_entry_id, readme_html)
except Exception:
readme_html = 'Readme bytes are not available'
return readme_name, readme_url, readme_html
def get_swh_persistent_ids(swh_objects, snapshot_context=None):
"""
Returns a list of dict containing info related to persistent
identifiers of swh objects.
Args:
swh_objects (list): a list of dict with the following keys:
* type: swh object type (content/directory/release/revision/snapshot)
* id: swh object id
snapshot_context (dict): optional parameter describing the snapshot in which
the object has been found
Returns:
list: a list of dict with the following keys:
* object_type: the swh object type (content/directory/release/revision/snapshot)
* object_icon: the swh object icon to use in HTML views
* swh_id: the computed swh object persistent identifier
* swh_id_url: the url resolving the persistent identifier
* show_options: boolean indicating if the persistent id options must
be displayed in persistent ids HTML view
""" # noqa
swh_ids = []
for swh_object in swh_objects:
if not swh_object['id']:
continue
swh_id = get_swh_persistent_id(swh_object['type'], swh_object['id'])
show_options = swh_object['type'] == 'content' or \
(snapshot_context and snapshot_context['origin_info'] is not None)
- object_icon = mark_safe('')
- if swh_object['type'] == 'directory':
- object_icon = mark_safe('')
- elif swh_object['type'] == 'release':
- object_icon = mark_safe('')
- elif swh_object['type'] == 'revision':
- object_icon = mark_safe('') # noqa
- elif swh_object['type'] == 'snapshot':
- object_icon = mark_safe('')
+ object_icon = swh_object_icons[swh_object['type']]
swh_ids.append({
'object_type': swh_object['type'],
'object_icon': object_icon,
'swh_id': swh_id,
'swh_id_url': reverse('browse-swh-id',
url_args={'swh_id': swh_id}),
'show_options': show_options
})
return swh_ids
diff --git a/swh/web/browse/views/content.py b/swh/web/browse/views/content.py
index 6c7facfee..657fb72eb 100644
--- a/swh/web/browse/views/content.py
+++ b/swh/web/browse/views/content.py
@@ -1,297 +1,296 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import difflib
import json
from distutils.util import strtobool
from django.http import HttpResponse
from django.shortcuts import render
from django.template.defaultfilters import filesizeformat
from swh.model.hashutil import hash_to_hex
from swh.web.common import query
from swh.web.common.utils import (
- reverse, gen_path_info
+ reverse, gen_path_info, swh_object_icons
)
from swh.web.common.exc import NotFoundExc, handle_view_exception
from swh.web.browse.utils import (
request_content, prepare_content_for_display,
content_display_max_size, get_snapshot_context,
get_swh_persistent_ids, gen_link
)
from swh.web.browse.browseurls import browse_route
@browse_route(r'content/(?P.+)/raw/',
view_name='browse-content-raw')
def content_raw(request, query_string):
"""Django view that produces a raw display of a SWH content identified
by its hash value.
The url that points to it is :http:get:`/browse/content/[(algo_hash):](hash)/raw/`
""" # noqa
try:
algo, checksum = query.parse_hash(query_string)
checksum = hash_to_hex(checksum)
content_data = request_content(query_string, max_size=None,
reencode=False)
except Exception as exc:
return handle_view_exception(request, exc)
filename = request.GET.get('filename', None)
if not filename:
filename = '%s_%s' % (algo, checksum)
if content_data['mimetype'].startswith('text/') or \
content_data['mimetype'] == 'inode/x-empty':
response = HttpResponse(content_data['raw_data'],
content_type="text/plain")
response['Content-disposition'] = 'filename=%s' % filename
else:
response = HttpResponse(content_data['raw_data'],
content_type='application/octet-stream')
response['Content-disposition'] = 'attachment; filename=%s' % filename
return response
_auto_diff_size_limit = 20000
@browse_route(r'content/(?P.*)/diff/(?P.*)', # noqa
view_name='diff-contents')
def _contents_diff(request, from_query_string, to_query_string):
"""
Browse endpoint used to compute unified diffs between two contents.
Diffs are generated only if the two contents are textual.
By default, diffs whose size are greater than 20 kB will
not be generated. To force the generation of large diffs,
the 'force' boolean query parameter must be used.
Args:
request: input django http request
from_query_string: a string of the form "[ALGO_HASH:]HASH" where
optional ALGO_HASH can be either *sha1*, *sha1_git*, *sha256*,
or *blake2s256* (default to *sha1*) and HASH the hexadecimal
representation of the hash value identifying the first content
to_query_string: same as above for identifying the second content
Returns:
A JSON object containing the unified diff.
"""
diff_data = {}
content_from = None
content_to = None
content_from_size = 0
content_to_size = 0
content_from_lines = []
content_to_lines = []
force = request.GET.get('force', 'false')
path = request.GET.get('path', None)
language = 'nohighlight'
force = bool(strtobool(force))
if from_query_string == to_query_string:
diff_str = 'File renamed without changes'
else:
try:
text_diff = True
if from_query_string:
content_from = \
request_content(from_query_string, max_size=None)
content_from_display_data = \
prepare_content_for_display(content_from['raw_data'],
content_from['mimetype'], path)
language = content_from_display_data['language']
content_from_size = content_from['length']
if not (content_from['mimetype'].startswith('text/') or
content_from['mimetype'] == 'inode/x-empty'):
text_diff = False
if text_diff and to_query_string:
content_to = request_content(to_query_string, max_size=None)
content_to_display_data = prepare_content_for_display(
content_to['raw_data'], content_to['mimetype'], path)
language = content_to_display_data['language']
content_to_size = content_to['length']
if not (content_to['mimetype'].startswith('text/') or
content_to['mimetype'] == 'inode/x-empty'):
text_diff = False
diff_size = abs(content_to_size - content_from_size)
if not text_diff:
diff_str = 'Diffs are not generated for non textual content'
language = 'nohighlight'
elif not force and diff_size > _auto_diff_size_limit:
diff_str = 'Large diffs are not automatically computed'
language = 'nohighlight'
else:
if content_from:
content_from_lines = \
content_from['raw_data'].decode('utf-8')\
.splitlines(True)
if content_from_lines and \
content_from_lines[-1][-1] != '\n':
content_from_lines[-1] += '[swh-no-nl-marker]\n'
if content_to:
content_to_lines = content_to['raw_data'].decode('utf-8')\
.splitlines(True)
if content_to_lines and content_to_lines[-1][-1] != '\n':
content_to_lines[-1] += '[swh-no-nl-marker]\n'
diff_lines = difflib.unified_diff(content_from_lines,
content_to_lines)
diff_str = ''.join(list(diff_lines)[2:])
except Exception as e:
diff_str = str(e)
diff_data['diff_str'] = diff_str
diff_data['language'] = language
diff_data_json = json.dumps(diff_data, separators=(',', ': '))
return HttpResponse(diff_data_json, content_type='application/json')
@browse_route(r'content/(?P.+)/',
view_name='browse-content')
def content_display(request, query_string):
"""Django view that produces an HTML display of a SWH content identified
by its hash value.
The url that points to it is :http:get:`/browse/content/[(algo_hash):](hash)/`
""" # noqa
try:
algo, checksum = query.parse_hash(query_string)
checksum = hash_to_hex(checksum)
content_data = request_content(query_string,
raise_if_unavailable=False)
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
snapshot_context = None
if origin_url:
try:
snapshot_context = get_snapshot_context(None, origin_type,
origin_url)
except Exception:
raw_cnt_url = reverse('browse-content',
url_args={'query_string': query_string})
error_message = \
('The Software Heritage archive has a content '
'with the hash you provided but the origin '
'mentioned in your request appears broken: %s. '
'Please check the URL and try again.\n\n'
'Nevertheless, you can still browse the content '
'without origin information: %s'
% (gen_link(origin_url), gen_link(raw_cnt_url)))
raise NotFoundExc(error_message)
if snapshot_context:
snapshot_context['visit_info'] = None
except Exception as exc:
return handle_view_exception(request, exc)
path = request.GET.get('path', None)
content = None
language = None
mimetype = None
if content_data['raw_data'] is not None:
content_display_data = prepare_content_for_display(
content_data['raw_data'], content_data['mimetype'], path)
content = content_display_data['content_data']
language = content_display_data['language']
mimetype = content_display_data['mimetype']
root_dir = None
filename = None
path_info = None
breadcrumbs = []
if path:
split_path = path.split('/')
root_dir = split_path[0]
filename = split_path[-1]
path = path.replace(root_dir + '/', '')
path = path[:-len(filename)]
path_info = gen_path_info(path)
breadcrumbs.append({'name': root_dir[:7],
'url': reverse('browse-directory',
url_args={'sha1_git': root_dir})})
for pi in path_info:
breadcrumbs.append({'name': pi['name'],
'url': reverse('browse-directory',
url_args={'sha1_git': root_dir,
'path': pi['path']})})
breadcrumbs.append({'name': filename,
'url': None})
query_params = None
if filename:
query_params = {'filename': filename}
content_raw_url = reverse('browse-content-raw',
url_args={'query_string': query_string},
query_params=query_params)
content_metadata = {
'sha1 checksum': content_data['checksums']['sha1'],
'sha1_git checksum': content_data['checksums']['sha1_git'],
'sha256 checksum': content_data['checksums']['sha256'],
'blake2s256 checksum': content_data['checksums']['blake2s256'],
'mime type': content_data['mimetype'],
'encoding': content_data['encoding'],
'size': filesizeformat(content_data['length']),
'language': content_data['language'],
'licenses': content_data['licenses'],
'filename': filename
}
if filename:
content_metadata['filename'] = filename
sha1_git = content_data['checksums']['sha1_git']
swh_ids = get_swh_persistent_ids([{'type': 'content',
'id': sha1_git}])
heading = 'Content - %s' % sha1_git
if breadcrumbs:
content_path = '/'.join([bc['name'] for bc in breadcrumbs])
heading += ' - %s' % content_path
return render(request, 'browse/content.html',
{'heading': heading,
'swh_object_id': swh_ids[0]['swh_id'],
'swh_object_name': 'Content',
- 'swh_object_icon': 'fa fa-file-text',
'swh_object_metadata': content_metadata,
'content': content,
'content_size': content_data['length'],
'max_content_size': content_display_max_size,
'mimetype': mimetype,
'language': language,
'breadcrumbs': breadcrumbs,
'top_right_link': {
'url': content_raw_url,
- 'icon': 'fa fa-file-text',
+ 'icon': swh_object_icons['content'],
'text': 'Raw File'
},
'snapshot_context': snapshot_context,
'vault_cooking': None,
'show_actions_menu': True,
'swh_ids': swh_ids,
'error_code': content_data['error_code'],
'error_message': content_data['error_message'],
'error_description': content_data['error_description']},
status=content_data['error_code'])
diff --git a/swh/web/browse/views/directory.py b/swh/web/browse/views/directory.py
index 946282b21..fecc7b45c 100644
--- a/swh/web/browse/views/directory.py
+++ b/swh/web/browse/views/directory.py
@@ -1,154 +1,153 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from django.shortcuts import render, redirect
from django.template.defaultfilters import filesizeformat
from swh.web.common import service
from swh.web.common.utils import (
reverse, gen_path_info
)
from swh.web.common.exc import handle_view_exception, NotFoundExc
from swh.web.browse.utils import (
get_directory_entries, get_snapshot_context,
get_readme_to_display, get_swh_persistent_ids,
gen_link
)
from swh.web.browse.browseurls import browse_route
@browse_route(r'directory/(?P[0-9a-f]+)/',
r'directory/(?P[0-9a-f]+)/(?P.+)/',
view_name='browse-directory')
def directory_browse(request, sha1_git, path=None):
"""Django view for browsing the content of a SWH directory identified
by its sha1_git value.
The url that points to it is :http:get:`/browse/directory/(sha1_git)/[(path)/]`
""" # noqa
root_sha1_git = sha1_git
try:
if path:
dir_info = service.lookup_directory_with_path(sha1_git, path)
# some readme files can reference assets reachable from the
# browsed directory, handle that special case in order to
# correctly displayed them
if dir_info and dir_info['type'] == 'file':
file_raw_url = reverse(
'browse-content-raw',
url_args={'query_string': dir_info['checksums']['sha1']})
return redirect(file_raw_url)
sha1_git = dir_info['target']
dirs, files = get_directory_entries(sha1_git)
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
snapshot_context = None
if origin_url:
try:
snapshot_context = get_snapshot_context(None, origin_type,
origin_url)
except Exception:
raw_dir_url = reverse('browse-directory',
url_args={'sha1_git': sha1_git})
error_message = \
('The Software Heritage archive has a directory '
'with the hash you provided but the origin '
'mentioned in your request appears broken: %s. '
'Please check the URL and try again.\n\n'
'Nevertheless, you can still browse the directory '
'without origin information: %s'
% (gen_link(origin_url), gen_link(raw_dir_url)))
raise NotFoundExc(error_message)
if snapshot_context:
snapshot_context['visit_info'] = None
except Exception as exc:
return handle_view_exception(request, exc)
path_info = gen_path_info(path)
breadcrumbs = []
breadcrumbs.append({'name': root_sha1_git[:7],
'url': reverse('browse-directory',
url_args={'sha1_git': root_sha1_git})})
for pi in path_info:
breadcrumbs.append({'name': pi['name'],
'url': reverse('browse-directory',
url_args={'sha1_git': root_sha1_git,
'path': pi['path']})})
path = '' if path is None else (path + '/')
for d in dirs:
if d['type'] == 'rev':
d['url'] = reverse('browse-revision',
url_args={'sha1_git': d['target']})
else:
d['url'] = reverse('browse-directory',
url_args={'sha1_git': root_sha1_git,
'path': path + d['name']})
sum_file_sizes = 0
readmes = {}
for f in files:
query_string = 'sha1_git:' + f['target']
f['url'] = reverse('browse-content',
url_args={'query_string': query_string},
query_params={'path': root_sha1_git + '/' +
path + f['name']})
if f['length'] is not None:
sum_file_sizes += f['length']
f['length'] = filesizeformat(f['length'])
if f['name'].lower().startswith('readme'):
readmes[f['name']] = f['checksums']['sha1']
readme_name, readme_url, readme_html = get_readme_to_display(readmes)
sum_file_sizes = filesizeformat(sum_file_sizes)
dir_metadata = {'id': sha1_git,
'number of regular files': len(files),
'number of subdirectories': len(dirs),
'sum of regular file sizes': sum_file_sizes}
vault_cooking = {
'directory_context': True,
'directory_id': sha1_git,
'revision_context': False,
'revision_id': None
}
swh_ids = get_swh_persistent_ids([{'type': 'directory',
'id': sha1_git}])
heading = 'Directory - %s' % sha1_git
if breadcrumbs:
dir_path = '/'.join([bc['name'] for bc in breadcrumbs]) + '/'
heading += ' - %s' % dir_path
return render(request, 'browse/directory.html',
{'heading': heading,
'swh_object_id': swh_ids[0]['swh_id'],
'swh_object_name': 'Directory',
- 'swh_object_icon': 'fa fa-folder',
'swh_object_metadata': dir_metadata,
'dirs': dirs,
'files': files,
'breadcrumbs': breadcrumbs,
'top_right_link': None,
'readme_name': readme_name,
'readme_url': readme_url,
'readme_html': readme_html,
'snapshot_context': snapshot_context,
'vault_cooking': vault_cooking,
'show_actions_menu': True,
'swh_ids': swh_ids})
diff --git a/swh/web/browse/views/origin.py b/swh/web/browse/views/origin.py
index 9502626e5..f475b27c6 100644
--- a/swh/web/browse/views/origin.py
+++ b/swh/web/browse/views/origin.py
@@ -1,241 +1,240 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
from distutils.util import strtobool
from django.http import HttpResponse
from django.shortcuts import render, redirect
from swh.web.common import service
from swh.web.common.utils import (
reverse, format_utc_iso_date, parse_timestamp,
get_origin_visits
)
from swh.web.common.exc import handle_view_exception
from swh.web.browse.utils import (
get_origin_info, get_snapshot_context
)
from swh.web.browse.browseurls import browse_route
from .utils.snapshot_context import (
browse_snapshot_directory, browse_snapshot_content,
browse_snapshot_log, browse_snapshot_branches,
browse_snapshot_releases
)
@browse_route(r'origin/(?P[a-z]+)/url/(?P.+)/visit/(?P.+)/directory/', # noqa
r'origin/(?P[a-z]+)/url/(?P.+)/visit/(?P.+)/directory/(?P.+)/', # noqa
r'origin/(?P[a-z]+)/url/(?P.+)/directory/', # noqa
r'origin/(?P[a-z]+)/url/(?P.+)/directory/(?P.+)/', # noqa
r'origin/(?P.+)/visit/(?P.+)/directory/', # noqa
r'origin/(?P.+)/visit/(?P.+)/directory/(?P.+)/', # noqa
r'origin/(?P.+)/directory/', # noqa
r'origin/(?P.+)/directory/(?P.+)/', # noqa
view_name='browse-origin-directory')
def origin_directory_browse(request, origin_url, origin_type=None,
timestamp=None, path=None):
"""Django view for browsing the content of a SWH directory associated
to an origin for a given visit.
The url scheme that points to it is the following:
* :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/directory/[(path)/]`
* :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/visit/(timestamp)/directory/[(path)/]`
""" # noqa
return browse_snapshot_directory(
request, origin_type=origin_type, origin_url=origin_url,
timestamp=timestamp, path=path)
@browse_route(r'origin/(?P[a-z]+)/url/(?P.+)/visit/(?P.+)/content/(?P.+)/', # noqa
r'origin/(?P[a-z]+)/url/(?P.+)/content/(?P.+)/', # noqa
r'origin/(?P.+)/visit/(?P.+)/content/(?P.+)/', # noqa
r'origin/(?P.+)/content/(?P.+)/', # noqa
view_name='browse-origin-content')
def origin_content_browse(request, origin_url, origin_type=None, path=None,
timestamp=None):
"""Django view that produces an HTML display of a SWH content
associated to an origin for a given visit.
The url scheme that points to it is the following:
* :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/content/(path)/`
* :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/visit/(timestamp)/content/(path)/`
""" # noqa
return browse_snapshot_content(request, origin_type=origin_type,
origin_url=origin_url, timestamp=timestamp,
path=path)
PER_PAGE = 20
@browse_route(r'origin/(?P[a-z]+)/url/(?P.+)/visit/(?P.+)/log/', # noqa
r'origin/(?P[a-z]+)/url/(?P.+)/log/',
r'origin/(?P.+)/visit/(?P.+)/log/', # noqa
r'origin/(?P.+)/log/',
view_name='browse-origin-log')
def origin_log_browse(request, origin_url, origin_type=None, timestamp=None):
"""Django view that produces an HTML display of revisions history (aka
the commit log) associated to a SWH origin.
The url scheme that points to it is the following:
* :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/log/`
* :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/visit/(timestamp)/log/`
""" # noqa
return browse_snapshot_log(request, origin_type=origin_type,
origin_url=origin_url, timestamp=timestamp)
@browse_route(r'origin/(?P[a-z]+)/url/(?P.+)/visit/(?P.+)/branches/', # noqa
r'origin/(?P[a-z]+)/url/(?P.+)/branches/', # noqa
r'origin/(?P.+)/visit/(?P.+)/branches/', # noqa
r'origin/(?P.+)/branches/', # noqa
view_name='browse-origin-branches')
def origin_branches_browse(request, origin_url, origin_type=None,
timestamp=None):
"""Django view that produces an HTML display of the list of branches
associated to an origin for a given visit.
The url scheme that points to it is the following:
* :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/branches/`
* :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/visit/(timestamp)/branches/`
""" # noqa
return browse_snapshot_branches(request, origin_type=origin_type,
origin_url=origin_url, timestamp=timestamp)
@browse_route(r'origin/(?P[a-z]+)/url/(?P.+)/visit/(?P.+)/releases/', # noqa
r'origin/(?P[a-z]+)/url/(?P.+)/releases/', # noqa
r'origin/(?P.+)/visit/(?P.+)/releases/', # noqa
r'origin/(?P.+)/releases/', # noqa
view_name='browse-origin-releases')
def origin_releases_browse(request, origin_url, origin_type=None,
timestamp=None):
"""Django view that produces an HTML display of the list of releases
associated to an origin for a given visit.
The url scheme that points to it is the following:
* :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/releases/`
* :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/visit/(timestamp)/releases/`
""" # noqa
return browse_snapshot_releases(request, origin_type=origin_type,
origin_url=origin_url, timestamp=timestamp)
@browse_route(r'origin/(?P[a-z]+)/url/(?P.+)/visits/',
r'origin/(?P.+)/visits/',
view_name='browse-origin-visits')
def origin_visits_browse(request, origin_url, origin_type=None):
"""Django view that produces an HTML display of visits reporting
for a swh origin identified by its id or its url.
The url that points to it is :http:get:`/browse/origin/[(origin_type)/url/](origin_url)/visits/`.
""" # noqa
try:
origin_info = get_origin_info(origin_url, origin_type)
origin_visits = get_origin_visits(origin_info)
snapshot_context = get_snapshot_context(origin_type=origin_type,
origin_url=origin_url)
except Exception as exc:
return handle_view_exception(request, exc)
for i, visit in enumerate(origin_visits):
url_date = format_utc_iso_date(visit['date'], '%Y-%m-%dT%H:%M:%SZ')
visit['fmt_date'] = format_utc_iso_date(visit['date'])
query_params = {}
if i < len(origin_visits) - 1:
if visit['date'] == origin_visits[i+1]['date']:
query_params = {'visit_id': visit['visit']}
if i > 0:
if visit['date'] == origin_visits[i-1]['date']:
query_params = {'visit_id': visit['visit']}
snapshot = visit['snapshot'] if visit['snapshot'] else ''
visit['browse_url'] = reverse('browse-origin-directory',
url_args={'origin_type': origin_type,
'origin_url': origin_url,
'timestamp': url_date},
query_params=query_params)
if not snapshot:
visit['snapshot'] = ''
visit['date'] = parse_timestamp(visit['date']).timestamp()
heading = 'Origin visits - %s' % origin_url
return render(request, 'browse/origin-visits.html',
{'heading': heading,
'swh_object_name': 'Visits',
- 'swh_object_icon': 'fa fa-calendar',
'swh_object_metadata': origin_info,
'origin_visits': origin_visits,
'origin_info': origin_info,
'snapshot_context': snapshot_context,
'vault_cooking': None,
'show_actions_menu': False})
@browse_route(r'origin/search/(?P.+)/',
view_name='browse-origin-search')
def _origin_search(request, url_pattern):
"""Internal browse endpoint to search for origins whose urls contain
a provided string pattern or match a provided regular expression.
The search is performed in a case insensitive way.
"""
offset = int(request.GET.get('offset', '0'))
limit = int(request.GET.get('limit', '50'))
regexp = request.GET.get('regexp', 'false')
with_visit = request.GET.get('with_visit', 'false')
url_pattern = url_pattern.replace('///', '\\')
try:
results = service.search_origin(url_pattern, offset, limit,
bool(strtobool(regexp)),
bool(strtobool(with_visit)))
results = json.dumps(list(results), sort_keys=True, indent=4,
separators=(',', ': '))
except Exception as exc:
return handle_view_exception(request, exc, html_response=False)
return HttpResponse(results, content_type='application/json')
@browse_route(r'origin/(?P[0-9]+)/latest_snapshot/',
view_name='browse-origin-latest-snapshot')
def _origin_latest_snapshot(request, origin_id):
"""
Internal browse endpoint used to check if an origin has already
been visited by Software Heritage and has at least one full visit.
"""
result = service.lookup_latest_origin_snapshot(origin_id,
allowed_statuses=['full'])
result = json.dumps(result, sort_keys=True, indent=4,
separators=(',', ': '))
return HttpResponse(result, content_type='application/json')
@browse_route(r'origin/(?P[a-z]+)/url/(?P.+)/',
r'origin/(?P.+)/',
view_name='browse-origin')
def origin_browse(request, origin_url, origin_type=None):
"""Django view that redirects to the display of the latest archived
snapshot for a given software origin.
""" # noqa
last_snapshot_url = reverse('browse-origin-directory',
url_args={'origin_type': origin_type,
'origin_url': origin_url})
return redirect(last_snapshot_url)
diff --git a/swh/web/browse/views/person.py b/swh/web/browse/views/person.py
index b1cecd6e1..1b8a7da92 100644
--- a/swh/web/browse/views/person.py
+++ b/swh/web/browse/views/person.py
@@ -1,53 +1,52 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from django.shortcuts import render
from swh.web.common import service
from swh.web.common.exc import handle_view_exception
from swh.web.browse.browseurls import browse_route
from swh.web.browse.utils import get_snapshot_context
@browse_route(r'person/(?P[0-9]+)/',
view_name='browse-person')
def person_browse(request, person_id):
"""
Django view that produces an HTML display of a swh person
identified by its id.
The url that points to it is :http:get:`/browse/person/(person_id)/`.
"""
try:
snapshot_context = None
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
snapshot_id = request.GET.get('snapshot_id', None)
if origin_url:
snapshot_context = get_snapshot_context(None, origin_type,
origin_url)
elif snapshot_id:
snapshot_context = get_snapshot_context(snapshot_id)
person = service.lookup_person(person_id)
except Exception as exc:
return handle_view_exception(request, exc)
heading = 'Person - %s' % person['fullname']
if snapshot_context:
context_found = 'snapshot: %s' % snapshot_context['snapshot_id']
if origin_url:
context_found = 'origin: %s' % origin_url
heading += ' - %s' % context_found
return render(request, 'browse/person.html',
{'heading': heading,
'swh_object_name': 'Person',
- 'swh_object_icon': 'fa fa-user',
'swh_object_metadata': person,
'snapshot_context': snapshot_context,
'vault_cooking': None,
'show_actions_menu': False})
diff --git a/swh/web/browse/views/release.py b/swh/web/browse/views/release.py
index f98dd33f4..d2a9176b2 100644
--- a/swh/web/browse/views/release.py
+++ b/swh/web/browse/views/release.py
@@ -1,219 +1,212 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from django.shortcuts import render
from swh.web.common import service
from swh.web.common.utils import (
reverse, format_utc_iso_date
)
from swh.web.common.exc import NotFoundExc, handle_view_exception
from swh.web.browse.browseurls import browse_route
from swh.web.browse.utils import (
gen_person_link, gen_revision_link,
get_snapshot_context, gen_link,
gen_snapshot_link, get_swh_persistent_ids
)
@browse_route(r'release/(?P[0-9a-f]+)/',
view_name='browse-release')
def release_browse(request, sha1_git):
"""
Django view that produces an HTML display of a SWH release
identified by its id.
The url that points to it is :http:get:`/browse/release/(sha1_git)/`.
"""
try:
release = service.lookup_release(sha1_git)
snapshot_context = None
origin_info = None
snapshot_id = request.GET.get('snapshot_id', None)
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
timestamp = request.GET.get('timestamp', None)
visit_id = request.GET.get('visit_id', None)
if origin_url:
try:
snapshot_context = \
get_snapshot_context(snapshot_id, origin_type,
origin_url, timestamp,
visit_id)
except Exception:
raw_rel_url = reverse('browse-release',
url_args={'sha1_git': sha1_git})
error_message = \
('The Software Heritage archive has a release '
'with the hash you provided but the origin '
'mentioned in your request appears broken: %s. '
'Please check the URL and try again.\n\n'
'Nevertheless, you can still browse the release '
'without origin information: %s'
% (gen_link(origin_url), gen_link(raw_rel_url)))
raise NotFoundExc(error_message)
origin_info = snapshot_context['origin_info']
elif snapshot_id:
snapshot_context = get_snapshot_context(snapshot_id)
except Exception as exc:
return handle_view_exception(request, exc)
release_data = {}
author_name = 'None'
release_data['author'] = 'None'
if release['author']:
author_name = release['author']['name'] or \
release['author']['fullname']
release_data['author'] = \
gen_person_link(release['author']['id'], author_name,
snapshot_context)
release_data['date'] = format_utc_iso_date(release['date'])
release_data['id'] = sha1_git
release_data['name'] = release['name']
release_data['synthetic'] = release['synthetic']
release_data['target type'] = release['target_type']
if release['target_type'] == 'revision':
release_data['target'] = \
gen_revision_link(release['target'],
snapshot_context=snapshot_context)
elif release['target_type'] == 'content':
content_url = \
reverse('browse-content',
url_args={'query_string': 'sha1_git:' + release['target']})
release_data['target'] = gen_link(content_url, release['target'])
elif release['target_type'] == 'directory':
directory_url = \
reverse('browse-directory',
url_args={'sha1_git': release['target']})
release_data['target'] = gen_link(directory_url, release['target'])
elif release['target_type'] == 'release':
release_url = \
reverse('browse-release',
url_args={'sha1_git': release['target']})
release_data['target'] = gen_link(release_url, release['target'])
release_note_lines = []
if release['message']:
release_note_lines = release['message'].split('\n')
vault_cooking = None
query_params = {}
if snapshot_id:
query_params = {'snapshot_id': snapshot_id}
elif origin_info:
query_params = {'origin': origin_info['url']}
- target_icon = ''
target_url = ''
if release['target_type'] == 'revision':
- target_icon = 'octicon octicon-git-commit'
target_url = reverse('browse-revision',
url_args={'sha1_git': release['target']},
query_params=query_params)
try:
revision = service.lookup_revision(release['target'])
vault_cooking = {
'directory_context': True,
'directory_id': revision['directory'],
'revision_context': True,
'revision_id': release['target']
}
except Exception:
pass
elif release['target_type'] == 'directory':
- target_icon = 'fa fa-folder'
target_url = reverse('browse-directory',
url_args={'sha1_git': release['target']},
query_params=query_params)
try:
revision = service.lookup_directory(release['target'])
vault_cooking = {
'directory_context': True,
'directory_id': revision['directory'],
'revision_context': False,
'revision_id': None
}
except Exception:
pass
elif release['target_type'] == 'content':
- target_icon = 'fa fa-file-text'
target_url = reverse('browse-content',
url_args={'sha1_git': release['target']},
query_params=query_params)
elif release['target_type'] == 'release':
- target_icon = 'fa fa-tag'
target_url = reverse('browse-release',
url_args={'sha1_git': release['target']},
query_params=query_params)
- release['target_icon'] = target_icon
release['target_url'] = target_url
if snapshot_context:
release_data['snapshot id'] = snapshot_context['snapshot_id']
if origin_info:
release_url = reverse('browse-release',
url_args={'sha1_git': release['id']})
release_data['context-independent release'] = \
gen_link(release_url, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
release_data['origin id'] = origin_info['id']
release_data['origin type'] = origin_info['type']
release_data['origin url'] = gen_link(origin_info['url'],
origin_info['url'])
browse_snapshot_link = \
gen_snapshot_link(snapshot_context['snapshot_id'],
link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
release_data['snapshot'] = browse_snapshot_link
swh_objects = [{'type': 'release',
'id': sha1_git}]
if snapshot_context:
snapshot_id = snapshot_context['snapshot_id']
if snapshot_id:
swh_objects.append({'type': 'snapshot',
'id': snapshot_id})
swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context)
note_header = 'None'
if len(release_note_lines) > 0:
note_header = release_note_lines[0]
release['note_header'] = note_header
release['note_body'] = '\n'.join(release_note_lines[1:])
heading = 'Release - %s' % release['name']
if snapshot_context:
context_found = 'snapshot: %s' % snapshot_context['snapshot_id']
if origin_info:
context_found = 'origin: %s' % origin_info['url']
heading += ' - %s' % context_found
return render(request, 'browse/release.html',
{'heading': heading,
'swh_object_id': swh_ids[0]['swh_id'],
'swh_object_name': 'Release',
- 'swh_object_icon': 'fa fa-tag',
'swh_object_metadata': release_data,
'release': release,
'snapshot_context': snapshot_context,
'show_actions_menu': True,
'breadcrumbs': None,
'vault_cooking': vault_cooking,
'top_right_link': None,
'swh_ids': swh_ids})
diff --git a/swh/web/browse/views/revision.py b/swh/web/browse/views/revision.py
index 495b0f770..92d0aebe5 100644
--- a/swh/web/browse/views/revision.py
+++ b/swh/web/browse/views/revision.py
@@ -1,552 +1,544 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import hashlib
import json
import textwrap
from django.http import HttpResponse
from django.shortcuts import render, redirect
from django.template.defaultfilters import filesizeformat
from django.utils.safestring import mark_safe
from swh.model.identifiers import persistent_identifier
from swh.web.common import service
from swh.web.common.utils import (
- reverse, format_utc_iso_date, gen_path_info
+ reverse, format_utc_iso_date, gen_path_info, swh_object_icons
)
from swh.web.common.exc import NotFoundExc, handle_view_exception
from swh.web.browse.browseurls import browse_route
from swh.web.browse.utils import (
- gen_link, gen_person_link, gen_revision_link,
+ gen_link, gen_person_link, gen_revision_link, gen_revision_url,
prepare_revision_log_for_display,
get_snapshot_context, gen_snapshot_directory_link,
get_revision_log_url, get_directory_entries,
gen_directory_link, request_content, prepare_content_for_display,
content_display_max_size, gen_snapshot_link, get_readme_to_display,
get_swh_persistent_ids
)
def _gen_content_url(revision, query_string, path, snapshot_context):
if snapshot_context:
url_args = snapshot_context['url_args']
url_args['path'] = path
query_params = snapshot_context['query_params']
query_params['revision'] = revision['id']
content_url = reverse('browse-origin-content',
url_args=url_args,
query_params=query_params)
else:
content_path = '%s/%s' % (revision['directory'], path)
content_url = reverse('browse-content',
url_args={'query_string': query_string},
query_params={'path': content_path})
return content_url
def _gen_diff_link(idx, diff_anchor, link_text):
if idx < _max_displayed_file_diffs:
return gen_link(diff_anchor, link_text)
else:
return link_text
# TODO: put in conf
_max_displayed_file_diffs = 1000
def _gen_revision_changes_list(revision, changes, snapshot_context):
"""
Returns a HTML string describing the file changes
introduced in a revision.
As this string will be displayed in the browse revision view,
links to adequate file diffs are also generated.
Args:
revision (str): hexadecimal representation of a revision identifier
changes (list): list of file changes in the revision
snapshot_context (dict): optional origin context used to reverse
the content urls
Returns:
A string to insert in a revision HTML view.
"""
changes_msg = []
for i, change in enumerate(changes):
hasher = hashlib.sha1()
from_query_string = ''
to_query_string = ''
diff_id = 'diff-'
if change['from']:
from_query_string = 'sha1_git:' + change['from']['target']
diff_id += change['from']['target'] + '-' + change['from_path']
diff_id += '-'
if change['to']:
to_query_string = 'sha1_git:' + change['to']['target']
diff_id += change['to']['target'] + change['to_path']
change['path'] = change['to_path'] or change['from_path']
url_args = {'from_query_string': from_query_string,
'to_query_string': to_query_string}
query_params = {'path': change['path']}
change['diff_url'] = reverse('diff-contents',
url_args=url_args,
query_params=query_params)
hasher.update(diff_id.encode('utf-8'))
diff_id = hasher.hexdigest()
change['id'] = diff_id
panel_diff_link = '#panel_' + diff_id
if change['type'] == 'modify':
change['content_url'] = \
_gen_content_url(revision, to_query_string,
change['to_path'], snapshot_context)
changes_msg.append('modified: %s' %
_gen_diff_link(i, panel_diff_link,
change['to_path']))
elif change['type'] == 'insert':
change['content_url'] = \
_gen_content_url(revision, to_query_string,
change['to_path'], snapshot_context)
changes_msg.append('new file: %s' %
_gen_diff_link(i, panel_diff_link,
change['to_path']))
elif change['type'] == 'delete':
parent = service.lookup_revision(revision['parents'][0])
change['content_url'] = \
_gen_content_url(parent,
from_query_string,
change['from_path'], snapshot_context)
changes_msg.append('deleted: %s' %
_gen_diff_link(i, panel_diff_link,
change['from_path']))
elif change['type'] == 'rename':
change['content_url'] = \
_gen_content_url(revision, to_query_string,
change['to_path'], snapshot_context)
link_text = change['from_path'] + ' → ' + change['to_path']
changes_msg.append('renamed: %s' %
_gen_diff_link(i, panel_diff_link, link_text))
if not changes:
changes_msg.append('No changes')
return mark_safe('\n'.join(changes_msg))
@browse_route(r'revision/(?P[0-9a-f]+)/diff/',
view_name='diff-revision')
def _revision_diff(request, sha1_git):
"""
Browse internal endpoint to compute revision diff
"""
try:
revision = service.lookup_revision(sha1_git)
snapshot_context = None
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
timestamp = request.GET.get('timestamp', None)
visit_id = request.GET.get('visit_id', None)
if origin_url:
snapshot_context = get_snapshot_context(None, origin_type,
origin_url,
timestamp, visit_id)
except Exception as exc:
return handle_view_exception(request, exc)
changes = service.diff_revision(sha1_git)
changes_msg = _gen_revision_changes_list(revision, changes,
snapshot_context)
diff_data = {
'total_nb_changes': len(changes),
'changes': changes[:_max_displayed_file_diffs],
'changes_msg': changes_msg
}
diff_data_json = json.dumps(diff_data, separators=(',', ': '))
return HttpResponse(diff_data_json, content_type='application/json')
NB_LOG_ENTRIES = 20
@browse_route(r'revision/(?P[0-9a-f]+)/log/',
view_name='browse-revision-log')
def revision_log_browse(request, sha1_git):
"""
Django view that produces an HTML display of the history
log for a SWH revision identified by its id.
The url that points to it is :http:get:`/browse/revision/(sha1_git)/log/`.
""" # noqa
try:
per_page = int(request.GET.get('per_page', NB_LOG_ENTRIES))
revision_log = service.lookup_revision_log(sha1_git,
limit=per_page+1)
revision_log = list(revision_log)
except Exception as exc:
return handle_view_exception(request, exc)
revs_breadcrumb = request.GET.get('revs_breadcrumb', None)
revision_log_display_data = prepare_revision_log_for_display(
revision_log, per_page, revs_breadcrumb)
prev_rev = revision_log_display_data['prev_rev']
prev_revs_breadcrumb = revision_log_display_data['prev_revs_breadcrumb']
prev_log_url = None
if prev_rev:
prev_log_url = \
reverse('browse-revision-log',
url_args={'sha1_git': prev_rev},
query_params={'revs_breadcrumb': prev_revs_breadcrumb,
'per_page': per_page})
next_rev = revision_log_display_data['next_rev']
next_revs_breadcrumb = revision_log_display_data['next_revs_breadcrumb']
next_log_url = None
if next_rev:
next_log_url = \
reverse('browse-revision-log',
url_args={'sha1_git': next_rev},
query_params={'revs_breadcrumb': next_revs_breadcrumb,
'per_page': per_page})
revision_log_data = revision_log_display_data['revision_log_data']
for log in revision_log_data:
log['directory'] = gen_directory_link(
log['directory'],
link_text=''
'Browse files',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
swh_rev_id = persistent_identifier('revision', sha1_git)
return render(request, 'browse/revision-log.html',
{'heading': 'Revision history',
'swh_object_id': swh_rev_id,
'swh_object_name': 'Revisions history',
- 'swh_object_icon': 'fa fa-history',
'swh_object_metadata': None,
'revision_log': revision_log_data,
'next_log_url': next_log_url,
'prev_log_url': prev_log_url,
'breadcrumbs': None,
'top_right_link': None,
'snapshot_context': None,
'vault_cooking': None,
'show_actions_menu': True,
'swh_ids': None})
@browse_route(r'revision/(?P[0-9a-f]+)/',
r'revision/(?P[0-9a-f]+)/(?P.+)/',
view_name='browse-revision')
def revision_browse(request, sha1_git, extra_path=None):
"""
Django view that produces an HTML display of a SWH revision
identified by its id.
The url that points to it is :http:get:`/browse/revision/(sha1_git)/`.
"""
try:
revision = service.lookup_revision(sha1_git)
# some readme files can reference assets reachable from the
# browsed directory, handle that special case in order to
# correctly displayed them
if extra_path:
dir_info = \
service.lookup_directory_with_path(revision['directory'],
extra_path)
if dir_info and dir_info['type'] == 'file':
file_raw_url = reverse(
'browse-content-raw',
url_args={'query_string': dir_info['checksums']['sha1']})
return redirect(file_raw_url)
origin_info = None
snapshot_context = None
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
timestamp = request.GET.get('timestamp', None)
visit_id = request.GET.get('visit_id', None)
snapshot_id = request.GET.get('snapshot_id', None)
path = request.GET.get('path', None)
dir_id = None
dirs, files = None, None
content_data = None
if origin_url:
try:
snapshot_context = get_snapshot_context(None, origin_type,
origin_url,
timestamp, visit_id)
except Exception:
raw_rev_url = reverse('browse-revision',
url_args={'sha1_git': sha1_git})
error_message = \
('The Software Heritage archive has a revision '
'with the hash you provided but the origin '
'mentioned in your request appears broken: %s. '
'Please check the URL and try again.\n\n'
'Nevertheless, you can still browse the revision '
'without origin information: %s'
% (gen_link(origin_url), gen_link(raw_rev_url)))
raise NotFoundExc(error_message)
origin_info = snapshot_context['origin_info']
snapshot_id = snapshot_context['snapshot_id']
elif snapshot_id:
snapshot_context = get_snapshot_context(snapshot_id)
if path:
file_info = \
service.lookup_directory_with_path(revision['directory'], path)
if file_info['type'] == 'dir':
dir_id = file_info['target']
else:
query_string = 'sha1_git:' + file_info['target']
content_data = request_content(query_string,
raise_if_unavailable=False)
else:
dir_id = revision['directory']
if dir_id:
path = '' if path is None else (path + '/')
dirs, files = get_directory_entries(dir_id)
except Exception as exc:
return handle_view_exception(request, exc)
revision_data = {}
author_name = 'None'
revision_data['author'] = 'None'
if revision['author']:
author_name = revision['author']['name'] or \
revision['author']['fullname']
revision_data['author'] = \
gen_person_link(revision['author']['id'], author_name,
snapshot_context)
revision_data['committer'] = 'None'
if revision['committer']:
revision_data['committer'] = \
gen_person_link(revision['committer']['id'],
revision['committer']['name'], snapshot_context)
revision_data['committer date'] = format_utc_iso_date(
revision['committer_date'])
revision_data['date'] = format_utc_iso_date(revision['date'])
if snapshot_context:
revision_data['snapshot id'] = snapshot_id
revision_data['directory'] = \
gen_snapshot_directory_link(snapshot_context, sha1_git,
link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm', # noqa
'role': 'button'})
else:
revision_data['directory'] = \
gen_directory_link(revision['directory'], link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
revision_data['id'] = sha1_git
revision_data['merge'] = revision['merge']
revision_data['metadata'] = json.dumps(revision['metadata'],
sort_keys=True,
indent=4, separators=(',', ': '))
if origin_info:
revision_data['context-independent revision'] = \
gen_revision_link(sha1_git, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
revision_data['origin id'] = origin_info['id']
revision_data['origin type'] = origin_info['type']
revision_data['origin url'] = gen_link(origin_info['url'],
origin_info['url'])
browse_snapshot_link = \
gen_snapshot_link(snapshot_id, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
revision_data['snapshot'] = browse_snapshot_link
parents = ''
for p in revision['parents']:
parent_link = gen_revision_link(p, snapshot_context=snapshot_context)
parents += parent_link + ' '
revision_data['parents'] = mark_safe(parents)
revision_data['synthetic'] = revision['synthetic']
revision_data['type'] = revision['type']
message_lines = ['None']
if revision['message']:
message_lines = revision['message'].split('\n')
- parents_links = '%s parent%s ' % \
- (len(revision['parents']),
- '' if len(revision['parents']) == 1 else 's')
- parents_links += ' '
+ parents = []
for p in revision['parents']:
- parent_link = gen_revision_link(p, shorten_id=True,
- snapshot_context=snapshot_context)
- parents_links += parent_link
- if p != revision['parents'][-1]:
- parents_links += ' + '
+ parent_url = gen_revision_url(p, snapshot_context)
+ parents.append({'id': p, 'url': parent_url})
path_info = gen_path_info(path)
query_params = {'snapshot_id': snapshot_id,
'origin_type': origin_type,
'origin': origin_url,
'timestamp': timestamp,
'visit_id': visit_id}
breadcrumbs = []
breadcrumbs.append({'name': revision['directory'][:7],
'url': reverse('browse-revision',
url_args={'sha1_git': sha1_git},
query_params=query_params)})
for pi in path_info:
query_params['path'] = pi['path']
breadcrumbs.append({'name': pi['name'],
'url': reverse('browse-revision',
url_args={'sha1_git': sha1_git},
query_params=query_params)})
vault_cooking = {
'directory_context': False,
'directory_id': None,
'revision_context': True,
'revision_id': sha1_git
}
swh_objects = [{'type': 'revision',
'id': sha1_git}]
content = None
content_size = None
mimetype = None
language = None
readme_name = None
readme_url = None
readme_html = None
readmes = {}
error_code = 200
error_message = ''
error_description = ''
if content_data:
breadcrumbs[-1]['url'] = None
content_size = content_data['length']
mimetype = content_data['mimetype']
if content_data['raw_data']:
content_display_data = prepare_content_for_display(
content_data['raw_data'], content_data['mimetype'], path)
content = content_display_data['content_data']
language = content_display_data['language']
query_params = {}
if path:
query_params['filename'] = path_info[-1]['name']
top_right_link = {
'url': reverse('browse-content-raw',
url_args={'query_string': query_string},
query_params=query_params),
- 'icon': 'fa fa-file-text',
+ 'icon': swh_object_icons['content'],
'text': 'Raw File'
}
swh_objects.append({'type': 'content',
'id': file_info['target']})
error_code = content_data['error_code']
error_message = content_data['error_message']
error_description = content_data['error_description']
else:
for d in dirs:
if d['type'] == 'rev':
d['url'] = reverse('browse-revision',
url_args={'sha1_git': d['target']})
else:
query_params['path'] = path + d['name']
d['url'] = reverse('browse-revision',
url_args={'sha1_git': sha1_git},
query_params=query_params)
for f in files:
query_params['path'] = path + f['name']
f['url'] = reverse('browse-revision',
url_args={'sha1_git': sha1_git},
query_params=query_params)
if f['length'] is not None:
f['length'] = filesizeformat(f['length'])
if f['name'].lower().startswith('readme'):
readmes[f['name']] = f['checksums']['sha1']
readme_name, readme_url, readme_html = get_readme_to_display(readmes)
top_right_link = {
'url': get_revision_log_url(sha1_git, snapshot_context),
- 'icon': 'fa fa-history',
+ 'icon': swh_object_icons['revisions history'],
'text': 'History'
}
vault_cooking['directory_context'] = True
vault_cooking['directory_id'] = dir_id
swh_objects.append({'type': 'directory',
'id': dir_id})
diff_revision_url = reverse('diff-revision',
url_args={'sha1_git': sha1_git},
query_params={'origin_type': origin_type,
'origin': origin_url,
'timestamp': timestamp,
'visit_id': visit_id})
if snapshot_id:
swh_objects.append({'type': 'snapshot',
'id': snapshot_id})
swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context)
heading = 'Revision - %s - %s' %\
(sha1_git[:7], textwrap.shorten(message_lines[0], width=70))
if snapshot_context:
context_found = 'snapshot: %s' % snapshot_context['snapshot_id']
if origin_info:
context_found = 'origin: %s' % origin_info['url']
heading += ' - %s' % context_found
return render(request, 'browse/revision.html',
{'heading': heading,
'swh_object_id': swh_ids[0]['swh_id'],
'swh_object_name': 'Revision',
- 'swh_object_icon': 'octicon octicon-git-commit',
'swh_object_metadata': revision_data,
'message_header': message_lines[0],
'message_body': '\n'.join(message_lines[1:]),
- 'parents_links': mark_safe(parents_links),
+ 'parents': parents,
'snapshot_context': snapshot_context,
'dirs': dirs,
'files': files,
'content': content,
'content_size': content_size,
'max_content_size': content_display_max_size,
'mimetype': mimetype,
'language': language,
'readme_name': readme_name,
'readme_url': readme_url,
'readme_html': readme_html,
'breadcrumbs': breadcrumbs,
'top_right_link': top_right_link,
'vault_cooking': vault_cooking,
'diff_revision_url': diff_revision_url,
'show_actions_menu': True,
'swh_ids': swh_ids,
'error_code': error_code,
'error_message': error_message,
'error_description': error_description},
status=error_code)
diff --git a/swh/web/browse/views/utils/snapshot_context.py b/swh/web/browse/views/utils/snapshot_context.py
index bae4e1225..365d8f86b 100644
--- a/swh/web/browse/views/utils/snapshot_context.py
+++ b/swh/web/browse/views/utils/snapshot_context.py
@@ -1,962 +1,951 @@
# Copyright (C) 2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
# Utility module implementing Django views for browsing the SWH archive
# in a snapshot context.
# Its purpose is to factorize code for the views reachable from the
# /origin/.* and /snapshot/.* endpoints.
from django.shortcuts import render, redirect
from django.template.defaultfilters import filesizeformat
from swh.model.identifiers import snapshot_identifier
from swh.web.browse.utils import (
get_snapshot_context, get_directory_entries, gen_directory_link,
gen_revision_link, request_content, gen_content_link,
prepare_content_for_display, content_display_max_size,
prepare_revision_log_for_display, gen_snapshot_directory_link,
gen_revision_log_link, gen_link, get_readme_to_display,
get_swh_persistent_ids, process_snapshot_branches
)
from swh.web.common import service
from swh.web.common.exc import (
handle_view_exception, NotFoundExc
)
from swh.web.common.utils import (
- reverse, gen_path_info, format_utc_iso_date
+ reverse, gen_path_info, format_utc_iso_date, swh_object_icons
)
_empty_snapshot_id = snapshot_identifier({'branches': {}})
def _get_branch(branches, branch_name, snapshot_id):
"""
Utility function to get a specific branch from a branches list.
Its purpose is to get the default HEAD branch as some SWH origin
(e.g those with svn type) does not have it. In that latter case, check
if there is a master branch instead and returns it.
"""
filtered_branches = \
[b for b in branches if b['name'].endswith(branch_name)]
if len(filtered_branches) > 0:
return filtered_branches[0]
elif branch_name == 'HEAD':
filtered_branches = \
[b for b in branches if b['name'].endswith('master')]
if len(filtered_branches) > 0:
return filtered_branches[0]
elif len(branches) > 0:
return branches[0]
else:
# case where a large branches list has been truncated
snp_branch = service.lookup_snapshot(snapshot_id,
branches_from=branch_name,
branches_count=1,
target_types=['revision'])
snp_branch, _ = process_snapshot_branches(snp_branch['branches'])
if snp_branch:
branches.append(snp_branch[0])
return snp_branch[0]
return None
def _get_release(releases, release_name):
"""
Utility function to get a specific release from a releases list.
Returns None if the release can not be found in the list.
"""
filtered_releases = \
[r for r in releases if r['name'] == release_name]
if len(filtered_releases) > 0:
return filtered_releases[0]
else:
return None
def _branch_not_found(branch_type, branch, branches, snapshot_id=None,
origin_info=None, timestamp=None, visit_id=None):
"""
Utility function to raise an exception when a specified branch/release
can not be found.
"""
if branch_type == 'branch':
branch_type = 'Branch'
branch_type_plural = 'branches'
else:
branch_type = 'Release'
branch_type_plural = 'releases'
if snapshot_id and len(branches) == 0:
msg = 'Snapshot with id %s has an empty list' \
' of %s!' % (snapshot_id, branch_type_plural)
elif snapshot_id:
msg = '%s %s for snapshot with id %s' \
' not found!' % (branch_type, branch, snapshot_id)
elif visit_id and len(branches) == 0:
msg = 'Origin with type %s and url %s' \
' for visit with id %s has an empty list' \
' of %s!' % (origin_info['type'], origin_info['url'], visit_id,
branch_type_plural)
elif visit_id:
msg = '%s %s associated to visit with' \
' id %s for origin with type %s and url %s' \
' not found!' % (branch_type, branch, visit_id,
origin_info['type'], origin_info['url'])
elif len(branches) == 0:
msg = 'Origin with type %s and url %s' \
' for visit with timestamp %s has an empty list' \
' of %s!' % (origin_info['type'], origin_info['url'],
timestamp, branch_type_plural)
else:
msg = '%s %s associated to visit with' \
' timestamp %s for origin with type %s' \
' and url %s not found!' % (branch_type, branch, timestamp,
origin_info['type'],
origin_info['url'])
raise NotFoundExc(msg)
def _process_snapshot_request(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None, path=None,
browse_context='directory'):
"""
Utility function to perform common input request processing
for snapshot context views.
"""
visit_id = request.GET.get('visit_id', None)
snapshot_context = get_snapshot_context(snapshot_id, origin_type,
origin_url, timestamp, visit_id)
swh_type = snapshot_context['swh_type']
origin_info = snapshot_context['origin_info']
branches = snapshot_context['branches']
releases = snapshot_context['releases']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
if snapshot_context['visit_info']:
timestamp = format_utc_iso_date(snapshot_context['visit_info']['date'],
'%Y-%m-%dT%H:%M:%SZ')
snapshot_context['timestamp'] = \
format_utc_iso_date(snapshot_context['visit_info']['date'])
browse_view_name = 'browse-' + swh_type + '-' + browse_context
root_sha1_git = None
revision_id = request.GET.get('revision', None)
release_name = request.GET.get('release', None)
release_id = None
branch_name = None
snapshot_total_size = sum(snapshot_context['snapshot_size'].values())
if snapshot_total_size and revision_id:
revision = service.lookup_revision(revision_id)
root_sha1_git = revision['directory']
branches.append({'name': revision_id,
'revision': revision_id,
'directory': root_sha1_git,
'url': None})
branch_name = revision_id
query_params['revision'] = revision_id
elif snapshot_total_size and release_name:
release = _get_release(releases, release_name)
try:
root_sha1_git = release['directory']
revision_id = release['target']
release_id = release['id']
query_params['release'] = release_name
except Exception:
_branch_not_found("release", release_name, releases, snapshot_id,
origin_info, timestamp, visit_id)
elif snapshot_total_size:
branch_name = request.GET.get('branch', None)
if branch_name:
query_params['branch'] = branch_name
branch = _get_branch(branches, branch_name or 'HEAD',
snapshot_context['snapshot_id'])
try:
branch_name = branch['name']
revision_id = branch['revision']
root_sha1_git = branch['directory']
except Exception:
_branch_not_found("branch", branch_name, branches, snapshot_id,
origin_info, timestamp, visit_id)
for b in branches:
branch_url_args = dict(url_args)
branch_query_params = dict(query_params)
if 'release' in branch_query_params:
del branch_query_params['release']
branch_query_params['branch'] = b['name']
if path:
b['path'] = path
branch_url_args['path'] = path
b['url'] = reverse(browse_view_name,
url_args=branch_url_args,
query_params=branch_query_params)
for r in releases:
release_url_args = dict(url_args)
release_query_params = dict(query_params)
if 'branch' in release_query_params:
del release_query_params['branch']
release_query_params['release'] = r['name']
if path:
r['path'] = path
release_url_args['path'] = path
r['url'] = reverse(browse_view_name,
url_args=release_url_args,
query_params=release_query_params)
snapshot_context['query_params'] = query_params
snapshot_context['root_sha1_git'] = root_sha1_git
snapshot_context['revision_id'] = revision_id
snapshot_context['branch'] = branch_name
snapshot_context['release'] = release_name
snapshot_context['release_id'] = release_id
return snapshot_context
def browse_snapshot_directory(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None, path=None):
"""
Django view implementation for browsing a directory in a snapshot context.
"""
try:
snapshot_context = _process_snapshot_request(request, snapshot_id,
origin_type, origin_url,
timestamp, path,
browse_context='directory') # noqa
root_sha1_git = snapshot_context['root_sha1_git']
sha1_git = root_sha1_git
if root_sha1_git and path:
dir_info = service.lookup_directory_with_path(root_sha1_git, path)
# some readme files can reference assets reachable from the
# browsed directory, handle that special case in order to
# correctly displayed them
if dir_info and dir_info['type'] == 'file':
file_raw_url = reverse(
'browse-content-raw',
url_args={'query_string': dir_info['checksums']['sha1']})
return redirect(file_raw_url)
sha1_git = dir_info['target']
dirs = []
files = []
if sha1_git:
dirs, files = get_directory_entries(sha1_git)
except Exception as exc:
return handle_view_exception(request, exc)
swh_type = snapshot_context['swh_type']
origin_info = snapshot_context['origin_info']
visit_info = snapshot_context['visit_info']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
revision_id = snapshot_context['revision_id']
snapshot_id = snapshot_context['snapshot_id']
path_info = gen_path_info(path)
browse_view_name = 'browse-' + swh_type + '-directory'
breadcrumbs = []
if root_sha1_git:
breadcrumbs.append({'name': root_sha1_git[:7],
'url': reverse(browse_view_name,
url_args=url_args,
query_params=query_params)})
for pi in path_info:
bc_url_args = dict(url_args)
bc_url_args['path'] = pi['path']
breadcrumbs.append({'name': pi['name'],
'url': reverse(browse_view_name,
url_args=bc_url_args,
query_params=query_params)})
path = '' if path is None else (path + '/')
for d in dirs:
if d['type'] == 'rev':
d['url'] = reverse('browse-revision',
url_args={'sha1_git': d['target']})
else:
bc_url_args = dict(url_args)
bc_url_args['path'] = path + d['name']
d['url'] = reverse(browse_view_name,
url_args=bc_url_args,
query_params=query_params)
sum_file_sizes = 0
readmes = {}
browse_view_name = 'browse-' + swh_type + '-content'
for f in files:
bc_url_args = dict(url_args)
bc_url_args['path'] = path + f['name']
f['url'] = reverse(browse_view_name,
url_args=bc_url_args,
query_params=query_params)
if f['length'] is not None:
sum_file_sizes += f['length']
f['length'] = filesizeformat(f['length'])
if f['name'].lower().startswith('readme'):
readmes[f['name']] = f['checksums']['sha1']
readme_name, readme_url, readme_html = get_readme_to_display(readmes)
browse_view_name = 'browse-' + swh_type + '-log'
history_url = None
if snapshot_id != _empty_snapshot_id:
history_url = reverse(browse_view_name,
url_args=url_args,
query_params=query_params)
nb_files = None
nb_dirs = None
sum_file_sizes = None
dir_path = None
if root_sha1_git:
nb_files = len(files)
nb_dirs = len(dirs)
sum_file_sizes = filesizeformat(sum_file_sizes)
dir_path = '/' + path
browse_dir_link = \
gen_directory_link(sha1_git, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
browse_rev_link = \
gen_revision_link(revision_id,
snapshot_context=snapshot_context,
link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
dir_metadata = {'id': sha1_git,
'context-independent directory': browse_dir_link,
'number of regular files': nb_files,
'number of subdirectories': nb_dirs,
'sum of regular file sizes': sum_file_sizes,
'path': dir_path,
'revision id': revision_id,
'revision': browse_rev_link,
'snapshot id': snapshot_id}
if origin_info:
dir_metadata['origin id'] = origin_info['id']
dir_metadata['origin type'] = origin_info['type']
dir_metadata['origin url'] = origin_info['url']
dir_metadata['origin visit date'] = format_utc_iso_date(visit_info['date']) # noqa
dir_metadata['origin visit id'] = visit_info['visit']
snapshot_context_url = reverse('browse-snapshot-directory',
url_args={'snapshot_id': snapshot_id},
query_params=request.GET)
browse_snapshot_link = \
gen_link(snapshot_context_url, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
dir_metadata['snapshot context'] = browse_snapshot_link
vault_cooking = {
'directory_context': True,
'directory_id': sha1_git,
'revision_context': True,
'revision_id': revision_id
}
swh_objects = [{'type': 'directory',
'id': sha1_git},
{'type': 'revision',
'id': revision_id},
{'type': 'snapshot',
'id': snapshot_id}]
release_id = snapshot_context['release_id']
if release_id:
swh_objects.append({'type': 'release',
'id': release_id})
swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context)
dir_path = '/'.join([bc['name'] for bc in breadcrumbs]) + '/'
context_found = 'snapshot: %s' % snapshot_context['snapshot_id']
if origin_info:
context_found = 'origin: %s' % origin_info['url']
heading = 'Directory - %s - %s - %s' %\
(dir_path, snapshot_context['branch'], context_found)
return render(request, 'browse/directory.html',
{'heading': heading,
'swh_object_name': 'Directory',
- 'swh_object_icon': 'fa fa-folder',
'swh_object_metadata': dir_metadata,
'dirs': dirs,
'files': files,
'breadcrumbs': breadcrumbs if root_sha1_git else [],
'top_right_link': {
'url': history_url,
- 'icon': 'fa fa-history',
+ 'icon': swh_object_icons['revisions history'],
'text': 'History'
},
'readme_name': readme_name,
'readme_url': readme_url,
'readme_html': readme_html,
'snapshot_context': snapshot_context,
'vault_cooking': vault_cooking,
'show_actions_menu': True,
'swh_ids': swh_ids})
def browse_snapshot_content(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None, path=None):
"""
Django view implementation for browsing a content in a snapshot context.
"""
try:
snapshot_context = _process_snapshot_request(request, snapshot_id,
origin_type, origin_url,
timestamp, path,
browse_context='content')
root_sha1_git = snapshot_context['root_sha1_git']
sha1_git = None
query_string = None
content_data = None
if root_sha1_git:
content_info = service.lookup_directory_with_path(root_sha1_git,
path)
sha1_git = content_info['target']
query_string = 'sha1_git:' + sha1_git
content_data = request_content(query_string,
raise_if_unavailable=False)
except Exception as exc:
return handle_view_exception(request, exc)
swh_type = snapshot_context['swh_type']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
revision_id = snapshot_context['revision_id']
origin_info = snapshot_context['origin_info']
visit_info = snapshot_context['visit_info']
snapshot_id = snapshot_context['snapshot_id']
content = None
language = None
mimetype = None
if content_data and content_data['raw_data'] is not None:
content_display_data = prepare_content_for_display(
content_data['raw_data'], content_data['mimetype'], path)
content = content_display_data['content_data']
language = content_display_data['language']
mimetype = content_display_data['mimetype']
filename = None
path_info = None
browse_view_name = 'browse-' + swh_type + '-directory'
breadcrumbs = []
split_path = path.split('/')
filename = split_path[-1]
path_info = gen_path_info(path[:-len(filename)])
if root_sha1_git:
breadcrumbs.append({'name': root_sha1_git[:7],
'url': reverse(browse_view_name,
url_args=url_args,
query_params=query_params)})
for pi in path_info:
bc_url_args = dict(url_args)
bc_url_args['path'] = pi['path']
breadcrumbs.append({'name': pi['name'],
'url': reverse(browse_view_name,
url_args=bc_url_args,
query_params=query_params)})
breadcrumbs.append({'name': filename,
'url': None})
browse_content_link = \
gen_content_link(sha1_git, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
content_raw_url = None
if query_string:
content_raw_url = reverse('browse-content-raw',
url_args={'query_string': query_string},
query_params={'filename': filename})
browse_rev_link = \
gen_revision_link(revision_id,
snapshot_context=snapshot_context,
link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
content_metadata = {
'context-independent content': browse_content_link,
'path': None,
'filename': None,
'revision id': revision_id,
'revision': browse_rev_link,
'snapshot id': snapshot_id
}
cnt_sha1_git = None
content_size = None
error_code = 200
error_description = ''
error_message = ''
if content_data:
content_metadata['sha1 checksum'] = \
content_data['checksums']['sha1']
content_metadata['sha1_git checksum'] = \
content_data['checksums']['sha1_git']
content_metadata['sha256 checksum'] = \
content_data['checksums']['sha256']
content_metadata['blake2s256 checksum'] = \
content_data['checksums']['blake2s256']
content_metadata['mime type'] = content_data['mimetype']
content_metadata['encoding'] = content_data['encoding']
content_metadata['size'] = filesizeformat(content_data['length'])
content_metadata['language'] = content_data['language']
content_metadata['licenses'] = content_data['licenses']
content_metadata['path'] = '/' + path[:-len(filename)]
content_metadata['filename'] = filename
cnt_sha1_git = content_data['checksums']['sha1_git']
content_size = content_data['length']
error_code = content_data['error_code']
error_message = content_data['error_message']
error_description = content_data['error_description']
if origin_info:
content_metadata['origin id'] = origin_info['id']
content_metadata['origin type'] = origin_info['type']
content_metadata['origin url'] = origin_info['url']
content_metadata['origin visit date'] = format_utc_iso_date(visit_info['date']) # noqa
content_metadata['origin visit id'] = visit_info['visit']
browse_snapshot_url = reverse('browse-snapshot-content',
url_args={'snapshot_id': snapshot_id,
'path': path},
query_params=request.GET)
browse_snapshot_link = \
gen_link(browse_snapshot_url, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
content_metadata['snapshot context'] = browse_snapshot_link
swh_objects = [{'type': 'content',
'id': cnt_sha1_git},
{'type': 'revision',
'id': revision_id},
{'type': 'snapshot',
'id': snapshot_id}]
release_id = snapshot_context['release_id']
if release_id:
swh_objects.append({'type': 'release',
'id': release_id})
swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context)
content_path = '/'.join([bc['name'] for bc in breadcrumbs])
context_found = 'snapshot: %s' % snapshot_context['snapshot_id']
if origin_info:
context_found = 'origin: %s' % origin_info['url']
heading = 'Content - %s - %s - %s' %\
(content_path, snapshot_context['branch'], context_found)
return render(request, 'browse/content.html',
{'heading': heading,
'swh_object_name': 'Content',
- 'swh_object_icon': 'fa fa-file-text',
'swh_object_metadata': content_metadata,
'content': content,
'content_size': content_size,
'max_content_size': content_display_max_size,
'mimetype': mimetype,
'language': language,
'breadcrumbs': breadcrumbs if root_sha1_git else [],
'top_right_link': {
'url': content_raw_url,
- 'icon': 'fa fa-file-text',
+ 'icon': swh_object_icons['content'],
'text': 'Raw File'
},
'snapshot_context': snapshot_context,
'vault_cooking': None,
'show_actions_menu': True,
'swh_ids': swh_ids,
'error_code': error_code,
'error_message': error_message,
'error_description': error_description},
status=error_code)
PER_PAGE = 100
def browse_snapshot_log(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None):
"""
Django view implementation for browsing a revision history in a
snapshot context.
"""
try:
snapshot_context = _process_snapshot_request(request, snapshot_id,
origin_type, origin_url,
timestamp, browse_context='log') # noqa
revision_id = snapshot_context['revision_id']
current_rev = revision_id
per_page = int(request.GET.get('per_page', PER_PAGE))
revs_breadcrumb = request.GET.get('revs_breadcrumb', None)
if revs_breadcrumb:
current_rev = revs_breadcrumb.split('/')[-1]
revision_log = []
if current_rev:
revision_log = list(service.lookup_revision_log(current_rev,
limit=per_page+1))
except Exception as exc:
return handle_view_exception(request, exc)
swh_type = snapshot_context['swh_type']
origin_info = snapshot_context['origin_info']
visit_info = snapshot_context['visit_info']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
snapshot_id = snapshot_context['snapshot_id']
query_params['per_page'] = per_page
revision_log_data = []
next_log_url = ''
prev_log_url = ''
if revision_log:
revision_log_display_data = prepare_revision_log_for_display(
revision_log, per_page, revs_breadcrumb, snapshot_context)
browse_view_name = 'browse-' + swh_type + '-log'
prev_rev = revision_log_display_data['prev_rev']
prev_revs_breadcrumb = revision_log_display_data['prev_revs_breadcrumb'] # noqa
prev_log_url = None
query_params['revs_breadcrumb'] = prev_revs_breadcrumb
if prev_rev:
prev_log_url = \
reverse(browse_view_name,
url_args=url_args,
query_params=query_params)
next_rev = revision_log_display_data['next_rev']
next_revs_breadcrumb = revision_log_display_data['next_revs_breadcrumb'] # noqa
next_log_url = None
query_params['revs_breadcrumb'] = next_revs_breadcrumb
if next_rev:
next_log_url = \
reverse(browse_view_name,
url_args=url_args,
query_params=query_params)
revision_log_data = revision_log_display_data['revision_log_data']
for i, log in enumerate(revision_log_data):
params = {
'revision': revision_log[i]['id'],
}
if 'visit_id' in query_params:
params['visit_id'] = query_params['visit_id']
log['directory'] = gen_snapshot_directory_link(
snapshot_context, revision_log[i]['id'],
link_text=''
'Browse files',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
browse_log_link = \
gen_revision_log_link(revision_id, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
revision_metadata = {
'context-independent revision history': browse_log_link,
'snapshot id': snapshot_id
}
if origin_info:
revision_metadata['origin id'] = origin_info['id']
revision_metadata['origin type'] = origin_info['type']
revision_metadata['origin url'] = origin_info['url']
revision_metadata['origin visit date'] = format_utc_iso_date(visit_info['date']) # noqa
revision_metadata['origin visit id'] = visit_info['visit']
browse_snapshot_url = reverse('browse-snapshot-log',
url_args={'snapshot_id': snapshot_id},
query_params=request.GET)
browse_snapshot_link = \
gen_link(browse_snapshot_url, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'})
revision_metadata['snapshot context'] = browse_snapshot_link
swh_objects = [{'type': 'revision',
'id': revision_id},
{'type': 'snapshot',
'id': snapshot_id}]
release_id = snapshot_context['release_id']
if release_id:
swh_objects.append({'type': 'release',
'id': release_id})
swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context)
context_found = 'snapshot: %s' % snapshot_context['snapshot_id']
if origin_info:
context_found = 'origin: %s' % origin_info['url']
heading = 'Revision history - %s - %s' %\
(snapshot_context['branch'], context_found)
return render(request, 'browse/revision-log.html',
{'heading': heading,
'swh_object_name': 'Revisions history',
- 'swh_object_icon': 'fa fa-history',
'swh_object_metadata': revision_metadata,
'revision_log': revision_log_data,
'next_log_url': next_log_url,
'prev_log_url': prev_log_url,
'breadcrumbs': None,
'top_right_link': None,
'snapshot_context': snapshot_context,
'vault_cooking': None,
'show_actions_menu': True,
'swh_ids': swh_ids})
def browse_snapshot_branches(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None):
"""
Django view implementation for browsing a list of branches in a snapshot
context.
"""
try:
snapshot_context = _process_snapshot_request(request, snapshot_id,
origin_type, origin_url,
timestamp)
branches_bc = request.GET.get('branches_breadcrumbs', '')
branches_bc = \
branches_bc.split(',') if branches_bc else []
branches_from = branches_bc[-1] if branches_bc else ''
swh_type = snapshot_context['swh_type']
origin_info = snapshot_context['origin_info']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
browse_view_name = 'browse-' + swh_type + '-directory'
displayed_branches = \
service.lookup_snapshot(snapshot_context['snapshot_id'],
branches_from, PER_PAGE+1,
target_types=['revision'])['branches']
except Exception as exc:
return handle_view_exception(request, exc)
displayed_branches, _ = process_snapshot_branches(displayed_branches)
for branch in displayed_branches:
if snapshot_id:
revision_url = reverse('browse-revision',
url_args={'sha1_git': branch['revision']},
query_params={'snapshot_id': snapshot_id})
else:
revision_url = reverse('browse-revision',
url_args={'sha1_git': branch['revision']},
query_params={'origin_type': origin_type,
'origin': origin_info['url']})
query_params['branch'] = branch['name']
directory_url = reverse(browse_view_name,
url_args=url_args,
query_params=query_params)
del query_params['branch']
branch['revision_url'] = revision_url
branch['directory_url'] = directory_url
browse_view_name = 'browse-' + swh_type + '-branches'
prev_branches_url = None
next_branches_url = None
if branches_bc:
query_params_prev = dict(query_params)
query_params_prev['branches_breadcrumbs'] = \
','.join(branches_bc[:-1])
prev_branches_url = reverse(browse_view_name, url_args=url_args,
query_params=query_params_prev)
elif branches_from:
prev_branches_url = reverse(browse_view_name, url_args=url_args,
query_params=query_params)
if len(displayed_branches) > PER_PAGE:
query_params_next = dict(query_params)
next_branch = displayed_branches[-1]['name']
del displayed_branches[-1]
branches_bc.append(next_branch)
query_params_next['branches_breadcrumbs'] = \
','.join(branches_bc)
next_branches_url = reverse(browse_view_name, url_args=url_args,
query_params=query_params_next)
heading = 'Branches - '
if origin_info:
heading += 'origin: %s' % origin_info['url']
else:
heading += 'snapshot: %s' % snapshot_id
return render(request, 'browse/branches.html',
{'heading': heading,
'swh_object_name': 'Branches',
- 'swh_object_icon': 'fa fa-code-fork',
'swh_object_metadata': {},
'top_right_link': None,
'displayed_branches': displayed_branches,
'prev_branches_url': prev_branches_url,
'next_branches_url': next_branches_url,
'snapshot_context': snapshot_context})
def browse_snapshot_releases(request, snapshot_id=None, origin_type=None,
origin_url=None, timestamp=None):
"""
Django view implementation for browsing a list of releases in a snapshot
context.
"""
try:
snapshot_context = _process_snapshot_request(request, snapshot_id,
origin_type, origin_url,
timestamp)
rel_bc = request.GET.get('releases_breadcrumbs', '')
rel_bc = \
rel_bc.split(',') if rel_bc else []
rel_from = rel_bc[-1] if rel_bc else ''
swh_type = snapshot_context['swh_type']
origin_info = snapshot_context['origin_info']
url_args = snapshot_context['url_args']
query_params = snapshot_context['query_params']
displayed_releases = \
service.lookup_snapshot(snapshot_context['snapshot_id'],
rel_from, PER_PAGE+1,
target_types=['release'])['branches']
except Exception as exc:
return handle_view_exception(request, exc)
_, displayed_releases = process_snapshot_branches(displayed_releases)
for release in displayed_releases:
if snapshot_id:
query_params_tgt = {'snapshot_id': snapshot_id}
else:
query_params_tgt = {'origin': origin_info['url']}
release_url = reverse('browse-release',
url_args={'sha1_git': release['id']},
query_params=query_params_tgt)
- target_icon = ''
target_url = ''
if release['target_type'] == 'revision':
- target_icon = "octicon octicon-git-commit"
target_url = reverse('browse-revision',
url_args={'sha1_git': release['target']},
query_params=query_params_tgt)
elif release['target_type'] == 'directory':
- target_icon = "fa fa-folder"
target_url = reverse('browse-directory',
url_args={'sha1_git': release['target']},
query_params=query_params_tgt)
elif release['target_type'] == 'content':
- target_icon = "fa fa-file-text"
target_url = reverse('browse-content',
url_args={'sha1_git': release['target']},
query_params=query_params_tgt)
elif release['target_type'] == 'release':
- target_icon = "fa fa-tag"
target_url = reverse('browse-release',
url_args={'sha1_git': release['target']},
query_params=query_params_tgt)
release['release_url'] = release_url
- release['target_icon'] = target_icon
release['target_url'] = target_url
browse_view_name = 'browse-' + swh_type + '-releases'
prev_releases_url = None
next_releases_url = None
if rel_bc:
query_params_prev = dict(query_params)
query_params_prev['releases_breadcrumbs'] = \
','.join(rel_bc[:-1])
prev_releases_url = reverse(browse_view_name, url_args=url_args,
query_params=query_params_prev)
elif rel_from:
prev_releases_url = reverse(browse_view_name, url_args=url_args,
query_params=query_params)
if len(displayed_releases) > PER_PAGE:
query_params_next = dict(query_params)
next_rel = displayed_releases[-1]['branch_name']
del displayed_releases[-1]
rel_bc.append(next_rel)
query_params_next['releases_breadcrumbs'] = \
','.join(rel_bc)
next_releases_url = reverse(browse_view_name, url_args=url_args,
query_params=query_params_next)
heading = 'Releases - '
if origin_info:
heading += 'origin: %s' % origin_info['url']
else:
heading += 'snapshot: %s' % snapshot_id
return render(request, 'browse/releases.html',
{'heading': heading,
'top_panel_visible': False,
'top_panel_collapsible': False,
'swh_object_name': 'Releases',
- 'swh_object_icon': 'fa fa-tag',
'swh_object_metadata': {},
'top_right_link': None,
'displayed_releases': displayed_releases,
'prev_releases_url': prev_releases_url,
'next_releases_url': next_releases_url,
'snapshot_context': snapshot_context,
'vault_cooking': None,
'show_actions_menu': False})
diff --git a/swh/web/common/swh_templatetags.py b/swh/web/common/swh_templatetags.py
index 1e0000699..3e652d0cd 100644
--- a/swh/web/common/swh_templatetags.py
+++ b/swh/web/common/swh_templatetags.py
@@ -1,138 +1,152 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import re
from django import template
from django.core.serializers.json import DjangoJSONEncoder
from django.utils.safestring import mark_safe
from docutils.core import publish_parts
from docutils.writers.html4css1 import Writer, HTMLTranslator
from inspect import cleandoc
register = template.Library()
class NoHeaderHTMLTranslator(HTMLTranslator):
"""
Docutils translator subclass to customize the generation of HTML
from reST-formatted docstrings
"""
def __init__(self, document):
super().__init__(document)
self.body_prefix = []
self.body_suffix = []
def visit_bullet_list(self, node):
self.context.append((self.compact_simple, self.compact_p))
self.compact_p = None
self.compact_simple = self.is_compactable(node)
self.body.append(self.starttag(node, 'ul', CLASS='docstring'))
DOCSTRING_WRITER = Writer()
DOCSTRING_WRITER.translator_class = NoHeaderHTMLTranslator
@register.filter
def safe_docstring_display(docstring):
"""
Utility function to htmlize reST-formatted documentation in browsable
api.
"""
docstring = cleandoc(docstring)
return publish_parts(docstring, writer=DOCSTRING_WRITER)['html_body']
@register.filter
def urlize_links_and_mails(text):
"""Utility function for decorating api links in browsable api.
Args:
text: whose content matching links should be transformed into
contextual API or Browse html links.
Returns
The text transformed if any link is found.
The text as is otherwise.
"""
if 'href="' not in text:
text = re.sub(r'(/api/[^"<]*|/browse/[^"<]*|http.*$)',
r'\1',
text)
return re.sub(r'([^ <>"]+@[^ <>"]+)',
r'\1',
text)
else:
return text
@register.filter
def urlize_header_links(text):
"""Utility function for decorating headers links in browsable api.
Args
text: Text whose content contains Link header value
Returns:
The text transformed with html link if any link is found.
The text as is otherwise.
"""
links = text.split(',')
ret = ''
for i, link in enumerate(links):
ret += re.sub(r'<(/api/.*|/browse/.*)>', r'<\1>',
link)
# add one link per line and align them
if i != len(links) - 1:
ret += '\n '
return ret
@register.filter
def jsonify(obj):
"""Utility function for converting a django template variable
to JSON in order to use it in script tags.
Args
obj: Any django template context variable
Returns:
JSON representation of the variable.
"""
return mark_safe(json.dumps(obj, cls=DjangoJSONEncoder))
@register.filter
def sub(value, arg):
"""Django template filter for subtracting two numbers
Args:
value (int/float): the value to subtract from
arg (int/float): the value to subtract to
Returns:
int/float: The subtraction result
"""
return value - arg
@register.filter
def mul(value, arg):
"""Django template filter for multiplying two numbers
Args:
value (int/float): the value to multiply from
arg (int/float): the value to multiply with
Returns:
int/float: The multiplication result
"""
return value * arg
+
+
+@register.filter
+def key_value(dict, key):
+ """Django template filter to get a value in a dictionary.
+
+ Args:
+ dict (dict): a dictionary
+ key (str): the key to lookup value
+
+ Returns:
+ The requested value in the dictionary
+ """
+ return dict[key]
diff --git a/swh/web/common/utils.py b/swh/web/common/utils.py
index 5f77b617e..38343bab0 100644
--- a/swh/web/common/utils.py
+++ b/swh/web/common/utils.py
@@ -1,390 +1,412 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import docutils.parsers.rst
import docutils.utils
import re
import requests
from datetime import datetime, timezone
from dateutil import parser as date_parser
from dateutil import tz
from django.core.cache import cache
from django.urls import reverse as django_reverse
from django.http import QueryDict
from swh.model.exceptions import ValidationError
from swh.model.identifiers import (
persistent_identifier, parse_persistent_identifier,
CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT
)
from swh.web.common import service
from swh.web.common.exc import BadInputExc
from swh.web.config import get_config
+swh_object_icons = {
+ 'branch': 'fa fa-code-fork',
+ 'branches': 'fa fa-code-fork',
+ 'content': 'fa fa-file-text',
+ 'directory': 'fa fa-folder',
+ 'person': 'fa fa-user',
+ 'revisions history': 'fa fa-history',
+ 'release': 'fa fa-tag',
+ 'releases': 'fa fa-tag',
+ 'revision': 'octicon octicon-git-commit',
+ 'snapshot': 'fa fa-camera',
+ 'visits': 'fa fa-calendar',
+}
+
def reverse(viewname, url_args=None, query_params=None,
current_app=None, urlconf=None):
"""An override of django reverse function supporting query parameters.
Args:
viewname (str): the name of the django view from which to compute a url
url_args (dict): dictionary of url arguments indexed by their names
query_params (dict): dictionary of query parameters to append to the
reversed url
current_app (str): the name of the django app tighted to the view
urlconf (str): url configuration module
Returns:
str: the url of the requested view with processed arguments and
query parameters
"""
if url_args:
url_args = {k: v for k, v in url_args.items() if v is not None}
url = django_reverse(viewname, urlconf=urlconf, kwargs=url_args,
current_app=current_app)
if query_params:
query_params = {k: v for k, v in query_params.items() if v}
if query_params and len(query_params) > 0:
query_dict = QueryDict('', mutable=True)
for k in sorted(query_params.keys()):
query_dict[k] = query_params[k]
url += ('?' + query_dict.urlencode(safe='/;:'))
return url
def datetime_to_utc(date):
"""Returns datetime in UTC without timezone info
Args:
date (datetime.datetime): input datetime with timezone info
Returns:
datetime.datetime: datetime in UTC without timezone info
"""
if date.tzinfo:
return date.astimezone(tz.gettz('UTC')).replace(tzinfo=timezone.utc)
else:
return date
def parse_timestamp(timestamp):
"""Given a time or timestamp (as string), parse the result as UTC datetime.
Returns:
datetime.datetime: a timezone-aware datetime representing the
parsed value or None if the parsing fails.
Samples:
- 2016-01-12
- 2016-01-12T09:19:12+0100
- Today is January 1, 2047 at 8:21:00AM
- 1452591542
"""
if not timestamp:
return None
try:
date = date_parser.parse(timestamp, ignoretz=False, fuzzy=True)
return datetime_to_utc(date)
except Exception:
try:
return datetime.utcfromtimestamp(float(timestamp)).replace(
tzinfo=timezone.utc)
except (ValueError, OverflowError) as e:
raise BadInputExc(e)
def shorten_path(path):
"""Shorten the given path: for each hash present, only return the first
8 characters followed by an ellipsis"""
sha256_re = r'([0-9a-f]{8})[0-9a-z]{56}'
sha1_re = r'([0-9a-f]{8})[0-9a-f]{32}'
ret = re.sub(sha256_re, r'\1...', path)
return re.sub(sha1_re, r'\1...', ret)
def format_utc_iso_date(iso_date, fmt='%d %B %Y, %H:%M UTC'):
"""Turns a string reprensation of an ISO 8601 date string
to UTC and format it into a more human readable one.
For instance, from the following input
string: '2017-05-04T13:27:13+02:00' the following one
is returned: '04 May 2017, 11:27 UTC'.
Custom format string may also be provided
as parameter
Args:
iso_date (str): a string representation of an ISO 8601 date
fmt (str): optional date formatting string
Returns:
str: a formatted string representation of the input iso date
"""
if not iso_date:
return iso_date
date = parse_timestamp(iso_date)
return date.strftime(fmt)
def gen_path_info(path):
"""Function to generate path data navigation for use
with a breadcrumb in the swh web ui.
For instance, from a path /folder1/folder2/folder3,
it returns the following list::
[{'name': 'folder1', 'path': 'folder1'},
{'name': 'folder2', 'path': 'folder1/folder2'},
{'name': 'folder3', 'path': 'folder1/folder2/folder3'}]
Args:
path: a filesystem path
Returns:
list: a list of path data for navigation as illustrated above.
"""
path_info = []
if path:
sub_paths = path.strip('/').split('/')
path_from_root = ''
for p in sub_paths:
path_from_root += '/' + p
path_info.append({'name': p,
'path': path_from_root.strip('/')})
return path_info
def get_origin_visits(origin_info):
"""Function that returns the list of visits for a swh origin.
That list is put in cache in order to speedup the navigation
in the swh web browse ui.
Args:
origin_id (int): the id of the swh origin to fetch visits from
Returns:
list: A list of dict describing the origin visits with the
following keys:
* **date**: UTC visit date in ISO format,
* **origin**: the origin id
* **status**: the visit status, either *full* or *partial*
* **visit**: the visit id
Raises:
NotFoundExc: if the origin is not found
"""
cache_entry_id = 'origin_%s_visits' % origin_info['id']
cache_entry = cache.get(cache_entry_id)
last_snapshot = service.lookup_latest_origin_snapshot(origin_info['id'])
if cache_entry and \
(not last_snapshot or
last_snapshot['id'] == cache_entry[-1]['snapshot']):
return cache_entry
origin_visits = []
per_page = service.MAX_LIMIT
last_visit = None
while 1:
visits = list(service.lookup_origin_visits(origin_info['id'],
last_visit=last_visit,
per_page=per_page))
origin_visits += visits
if len(visits) < per_page:
break
else:
if not last_visit:
last_visit = per_page
else:
last_visit += per_page
def _visit_sort_key(visit):
ts = parse_timestamp(visit['date']).timestamp()
return ts + (float(visit['visit']) / 10e3)
for v in origin_visits:
if 'metadata' in v:
del v['metadata']
origin_visits = [dict(t) for t in set([tuple(d.items())
for d in origin_visits])]
origin_visits = sorted(origin_visits, key=lambda v: _visit_sort_key(v))
cache.set(cache_entry_id, origin_visits)
return origin_visits
def get_swh_persistent_id(object_type, object_id, scheme_version=1):
"""
Returns the persistent identifier for a swh object based on:
* the object type
* the object id
* the swh identifiers scheme version
Args:
object_type (str): the swh object type
(content/directory/release/revision/snapshot)
object_id (str): the swh object id (hexadecimal representation
of its hash value)
scheme_version (int): the scheme version of the swh
persistent identifiers
Returns:
str: the swh object persistent identifier
Raises:
BadInputExc: if the provided parameters do not enable to
generate a valid identifier
"""
try:
swh_id = persistent_identifier(object_type, object_id, scheme_version)
except ValidationError as e:
raise BadInputExc('Invalid object (%s) for swh persistent id. %s' %
(object_id, e))
else:
return swh_id
def resolve_swh_persistent_id(swh_id, query_params=None):
"""
Try to resolve a SWH persistent id into an url for
browsing the pointed object.
Args:
swh_id (str): a SWH persistent identifier
query_params (django.http.QueryDict): optional dict filled with
query parameters to append to the browse url
Returns:
dict: a dict with the following keys:
* **swh_id_parsed (swh.model.identifiers.PersistentId)**: the parsed identifier
* **browse_url (str)**: the url for browsing the pointed object
Raises:
BadInputExc: if the provided identifier can not be parsed
""" # noqa
try:
swh_id_parsed = parse_persistent_identifier(swh_id)
object_type = swh_id_parsed.object_type
object_id = swh_id_parsed.object_id
browse_url = None
query_dict = QueryDict('', mutable=True)
if query_params and len(query_params) > 0:
for k in sorted(query_params.keys()):
query_dict[k] = query_params[k]
if 'origin' in swh_id_parsed.metadata:
query_dict['origin'] = swh_id_parsed.metadata['origin']
if object_type == CONTENT:
query_string = 'sha1_git:' + object_id
fragment = ''
if 'lines' in swh_id_parsed.metadata:
lines = swh_id_parsed.metadata['lines'].split('-')
fragment += '#L' + lines[0]
if len(lines) > 1:
fragment += '-L' + lines[1]
browse_url = reverse('browse-content',
url_args={'query_string': query_string},
query_params=query_dict) + fragment
elif object_type == DIRECTORY:
browse_url = reverse('browse-directory',
url_args={'sha1_git': object_id},
query_params=query_dict)
elif object_type == RELEASE:
browse_url = reverse('browse-release',
url_args={'sha1_git': object_id},
query_params=query_dict)
elif object_type == REVISION:
browse_url = reverse('browse-revision',
url_args={'sha1_git': object_id},
query_params=query_dict)
elif object_type == SNAPSHOT:
browse_url = reverse('browse-snapshot',
url_args={'snapshot_id': object_id},
query_params=query_dict)
except ValidationError as ve:
raise BadInputExc('Error when parsing identifier. %s' %
' '.join(ve.messages))
else:
return {'swh_id_parsed': swh_id_parsed,
'browse_url': browse_url}
def parse_rst(text, report_level=2):
"""
Parse a reStructuredText string with docutils.
Args:
text (str): string with reStructuredText markups in it
report_level (int): level of docutils report messages to print
(1 info 2 warning 3 error 4 severe 5 none)
Returns:
docutils.nodes.document: a parsed docutils document
"""
parser = docutils.parsers.rst.Parser()
components = (docutils.parsers.rst.Parser,)
settings = docutils.frontend.OptionParser(
components=components).get_default_values()
settings.report_level = report_level
document = docutils.utils.new_document('rst-doc', settings=settings)
parser.parse(text, document)
return document
def get_client_ip(request):
"""
Return the client IP address from an incoming HTTP request.
Args:
request (django.http.HttpRequest): the incoming HTTP request
Returns:
str: The client IP address
"""
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def is_recaptcha_valid(request, recaptcha_response):
"""
Verify if the response for Google reCAPTCHA is valid.
Args:
request (django.http.HttpRequest): the incoming HTTP request
recaptcha_response (str): the reCAPTCHA response
Returns:
bool: Wether the reCAPTCHA response is valid or not
"""
config = get_config()
return requests.post(
config['grecaptcha']['validation_url'],
data={
'secret': config['grecaptcha']['private_key'],
'response': recaptcha_response,
'remoteip': get_client_ip(request)
},
verify=True
).json().get("success", False)
+
+
+def context_processor(request):
+ """
+ Django context processor used to inject variables
+ in all swh-web templates.
+ """
+ return {'swh_object_icons': swh_object_icons}
diff --git a/swh/web/settings/common.py b/swh/web/settings/common.py
index 4aff299fb..b941fbe88 100644
--- a/swh/web/settings/common.py
+++ b/swh/web/settings/common.py
@@ -1,233 +1,234 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
"""
Django common settings for swh-web.
"""
import os
from swh.web.config import get_config
swh_web_config = get_config()
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = swh_web_config['secret_key']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = swh_web_config['debug']
DEBUG_PROPAGATE_EXCEPTIONS = swh_web_config['debug']
ALLOWED_HOSTS = ['127.0.0.1', 'localhost'] + swh_web_config['allowed_hosts']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'swh.web.common',
'swh.web.api',
'swh.web.browse',
'webpack_loader',
'django_js_reverse'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'swh.web.common.middlewares.ThrottlingHeadersMiddleware'
]
# Compress all assets (static ones and dynamically generated html)
# served by django in a local development environment context.
# In a production environment, assets compression will be directly
# handled by web servers like apache or nginx.
if swh_web_config['serve_assets']:
MIDDLEWARE.insert(0, 'django.middleware.gzip.GZipMiddleware')
ROOT_URLCONF = 'swh.web.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(PROJECT_DIR, "../templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
+ 'swh.web.common.utils.context_processor'
],
'libraries': {
'swh_templatetags': 'swh.web.common.swh_templatetags',
},
},
},
]
WSGI_APPLICATION = 'swh.web.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(PROJECT_DIR, "../static")
]
INTERNAL_IPS = ['127.0.0.1']
throttle_rates = {}
http_requests = ['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'PATCH']
throttling = swh_web_config['throttling']
for limiter_scope, limiter_conf in throttling['scopes'].items():
if 'default' in limiter_conf['limiter_rate']:
throttle_rates[limiter_scope] = limiter_conf['limiter_rate']['default']
# for backward compatibility
else:
throttle_rates[limiter_scope] = limiter_conf['limiter_rate']
# register sub scopes specific for HTTP request types
for http_request in http_requests:
if http_request in limiter_conf['limiter_rate']:
throttle_rates[limiter_scope + '_' + http_request.lower()] = \
limiter_conf['limiter_rate'][http_request]
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'swh.web.api.renderers.YAMLRenderer',
'rest_framework.renderers.TemplateHTMLRenderer'
),
'DEFAULT_THROTTLE_CLASSES': (
'swh.web.common.throttling.SwhWebRateThrottle',
),
'DEFAULT_THROTTLE_RATES': throttle_rates
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'formatters': {
'verbose': {
'format': '[%(asctime)s] [%(levelname)s] %(request)s %(status_code)s', # noqa
'datefmt': "%d/%b/%Y %H:%M:%S"
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
},
'file': {
'level': 'INFO',
'filters': ['require_debug_false'],
'class': 'logging.FileHandler',
'filename': os.path.join(swh_web_config['log_dir'], 'swh-web.log'),
'formatter': 'verbose'
},
'null': {
'class': 'logging.NullHandler',
},
},
'loggers': {
'django': {
'handlers': ['console', 'file'],
'level': 'DEBUG' if DEBUG else 'INFO',
'propagate': True,
},
'django.request': {
'handlers': ['file'],
'level': 'DEBUG' if DEBUG else 'INFO',
'propagate': False,
},
'django.db.backends': {
'handlers': ['null'],
'propagate': False
}
},
}
WEBPACK_LOADER = { # noqa
'DEFAULT': {
'CACHE': False,
'BUNDLE_DIR_NAME': './',
'STATS_FILE': os.path.join(PROJECT_DIR, '../static/webpack-stats.json'), # noqa
'POLL_INTERVAL': 0.1,
'TIMEOUT': None,
'IGNORE': ['.+\.hot-update.js', '.+\.map']
}
}
LOGIN_URL = '/admin/login/'
LOGIN_REDIRECT_URL = 'admin'
diff --git a/swh/web/templates/browse/branches.html b/swh/web/templates/browse/branches.html
index f00102341..6aee7a0a4 100644
--- a/swh/web/templates/browse/branches.html
+++ b/swh/web/templates/browse/branches.html
@@ -1,59 +1,58 @@
{% extends "./browse.html" %}
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
-{% comment %} {% block after-navbar-content %}
- Branches
-{% endblock %} {% endcomment %}
+
+{% load swh_templatetags %}
{% block swh-browse-content %}
{% if displayed_branches|length > 0 %}
{% else %}
The list of branches is empty !
{% endif %}
{% endblock %}
{% block swh-browse-after-content %}
{% if prev_branches_url or next_branches_url %}
{% endif %}
{% endblock %}
diff --git a/swh/web/templates/browse/browse.html b/swh/web/templates/browse/browse.html
index fd87ea92e..a478ba337 100644
--- a/swh/web/templates/browse/browse.html
+++ b/swh/web/templates/browse/browse.html
@@ -1,63 +1,63 @@
{% extends "./layout.html" %}
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
{% load swh_templatetags %}
{% block title %}{{ heading }} – Software Heritage archive{% endblock %}
{% block navbar-content %}
{% if snapshot_context %}
{% endif %}
{% endblock %}
{% block browse-content %}
{% block swh-browse-before-content %}
{% if snapshot_context %}
{% include "includes/snapshot-context.html" %}
{% endif %}
{% endblock %}
{% block swh-browse-content %}{% endblock %}
{% block swh-browse-after-content %}{% endblock %}
{% endblock %}
diff --git a/swh/web/templates/browse/release.html b/swh/web/templates/browse/release.html
index 7c4e6d5ed..45c04da95 100644
--- a/swh/web/templates/browse/release.html
+++ b/swh/web/templates/browse/release.html
@@ -1,22 +1,25 @@
{% extends "./browse.html" %}
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
+{% load swh_templatetags %}
+
{% block swh-browse-content %}
{% include "includes/top-navigation.html" %}
- Release {{ swh_object_metadata.name }}
+ Release {{ swh_object_metadata.name }}
created by {{ swh_object_metadata.author }} on {{ swh_object_metadata.date }}
{% endblock %}
diff --git a/swh/web/templates/browse/releases.html b/swh/web/templates/browse/releases.html
index 54e0c7597..3ea848e50 100644
--- a/swh/web/templates/browse/releases.html
+++ b/swh/web/templates/browse/releases.html
@@ -1,55 +1,57 @@
{% extends "./browse.html" %}
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
+{% load swh_templatetags %}
+
{% block swh-browse-content %}
{% if displayed_releases|length > 0 %}
{% else %}
The list of releases is empty !
{% endif %}
{% endblock %}
{% block swh-browse-after-content %}
{% if prev_releases_url or next_releases_url %}
{% endif %}
{% endblock %}
diff --git a/swh/web/templates/browse/revision-log.html b/swh/web/templates/browse/revision-log.html
index 9b9ca6929..9d89811af 100644
--- a/swh/web/templates/browse/revision-log.html
+++ b/swh/web/templates/browse/revision-log.html
@@ -1,67 +1,68 @@
{% extends "./browse.html" %}
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
{% load render_bundle from webpack_loader %}
+{% load swh_templatetags %}
{% block header %}
{{ block.super }}
{% render_bundle 'revision' %}
{% endblock %}
{% block swh-browse-content %}
{% if snapshot_context %}
{% include "includes/top-navigation.html" %}
{% endif %}
{% if snapshot_context and snapshot_context.is_empty %}
{% include "includes/empty-snapshot.html" %}
{% else %}
-
Revision
+
Revision
Author
Message
Date
{% for log in revision_log %}
-
{{ log.revision }}
+
{{ log.revision }}
{{ log.author }}
{{ log.message }}
{{ log.date }}
{{ log.directory }}
{% endfor %}
{% endif %}
{% endblock %}
{% block swh-browse-after-content %}
{% if not snapshot_context or not snapshot_context.is_empty %}
{% endif %}
{% endblock %}
diff --git a/swh/web/templates/browse/revision.html b/swh/web/templates/browse/revision.html
index 1cdbe88c7..d4153e7ee 100644
--- a/swh/web/templates/browse/revision.html
+++ b/swh/web/templates/browse/revision.html
@@ -1,103 +1,110 @@
{% extends "./browse.html" %}
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
{% load static %}
{% load swh_templatetags %}
{% load render_bundle from webpack_loader %}
{% block header %}
{{ block.super }}
{% render_bundle 'revision' %}
{% endblock %}
{% block swh-browse-content %}
- Revision {{ swh_object_metadata.id }}
+ Revision {{ swh_object_metadata.id }}
authored by {{ swh_object_metadata.author }} on {{ swh_object_metadata.date }}
{% endblock %}
{% block swh-browse-after-content %}
{% include "includes/readme-display.html" %}
{% endblock %}
diff --git a/swh/web/templates/includes/directory-display.html b/swh/web/templates/includes/directory-display.html
index 661c87974..6447b5f4c 100644
--- a/swh/web/templates/includes/directory-display.html
+++ b/swh/web/templates/includes/directory-display.html
@@ -1,47 +1,47 @@
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
{% if snapshot_context and snapshot_context.is_empty %}
{% include "includes/empty-snapshot.html" %}
{% elif dirs|length > 0 or files|length > 0 %}
{% elif dirs|length == 0 and files|length == 0 %}
Directory is empty
{% endif %}
\ No newline at end of file
diff --git a/swh/web/templates/includes/show-swh-ids.html b/swh/web/templates/includes/show-swh-ids.html
index 05ef45079..bcf46de76 100644
--- a/swh/web/templates/includes/show-swh-ids.html
+++ b/swh/web/templates/includes/show-swh-ids.html
@@ -1,95 +1,103 @@
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
{% load swh_templatetags %}
{% if swh_ids %}
To reference or cite the objects present in the Software Heritage archive, permalinks based on persistent identifiers
must be used instead of copying and pasting the url from the address bar of the browser (as there is no guarantee the current URI
scheme will remain the same over time).
Select below a type of object currently browsed in order to display its associated persistent identifier and permalink.
{% for swh_id in swh_ids %}
{% if forloop.first %}
-
{% endif %}
diff --git a/swh/web/templates/includes/snapshot-context.html b/swh/web/templates/includes/snapshot-context.html
index d76e9f47e..3370e61bb 100644
--- a/swh/web/templates/includes/snapshot-context.html
+++ b/swh/web/templates/includes/snapshot-context.html
@@ -1,29 +1,31 @@
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
+{% load swh_templatetags %}
+
\ No newline at end of file
diff --git a/swh/web/templates/includes/top-navigation.html b/swh/web/templates/includes/top-navigation.html
index f22d8614b..ea928f865 100644
--- a/swh/web/templates/includes/top-navigation.html
+++ b/swh/web/templates/includes/top-navigation.html
@@ -1,125 +1,125 @@
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
{% load swh_templatetags %}
{% if snapshot_context %}
{% if snapshot_context.branch or snapshot_context.release %}
{% endfor %}
{% if snapshot_context.branches|length < snapshot_context.snapshot_size.revision %}
Branches list truncated to {{ snapshot_context.branches|length }} entries,
{{ snapshot_context.branches|length|mul:-1|add:snapshot_context.snapshot_size.revision }}
were omitted.
{% endif %}
{% if snapshot_context.releases %}
{% for r in snapshot_context.releases %}
{% if r.target_type == 'revision' %}
Releases list truncated to {{ snapshot_context.releases|length }} entries,
{{ snapshot_context.releases|length|mul:-1|add:snapshot_context.snapshot_size.release }}
were omitted.
{% endif %}
{% else %}
No releases to show
{% endif %}
{% if not snapshot_context or not snapshot_context.is_empty %}
{% include "includes/vault-create-tasks.html" %}
{% endif %}
{% include "includes/show-metadata.html" %}
{% endif %}
{% include "includes/breadcrumbs.html" %}
{% include "includes/show-swh-ids.html" %}
diff --git a/swh/web/templates/includes/vault-create-tasks.html b/swh/web/templates/includes/vault-create-tasks.html
index cf3e800df..24e6672c3 100644
--- a/swh/web/templates/includes/vault-create-tasks.html
+++ b/swh/web/templates/includes/vault-create-tasks.html
@@ -1,110 +1,118 @@
{% comment %}
Copyright (C) 2017-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
+{% load swh_templatetags %}
+
{% if vault_cooking %}
{% if vault_cooking.directory_context %}
-
+
{% endif %}
{% if vault_cooking.revision_context %}
-
+
{% endif %}
Cook and download a directory from the Software Heritage Vault
You have requested the cooking of the directory with identifier {{ vault_cooking.directory_id }}
into a standard tar.gz archive.
Are you sure you want to continue ?
Cook and download a revision from the Software Heritage Vault
You have requested the cooking of the revision with identifier {{ vault_cooking.revision_id }}
into a git fast-import archive.
Are you sure you want to continue ?
Invalid Email !
The provided email is not well-formed.
{% endif %}
\ No newline at end of file
diff --git a/swh/web/templates/layout.html b/swh/web/templates/layout.html
index c06a211dd..172f3f03d 100644
--- a/swh/web/templates/layout.html
+++ b/swh/web/templates/layout.html
@@ -1,180 +1,186 @@
{% comment %}
Copyright (C) 2015-2018 The Software Heritage developers
See the AUTHORS file at the top-level directory of this distribution
License: GNU Affero General Public License version 3, or any later version
See top-level LICENSE file for more information
{% endcomment %}
{% load static %}
{% load render_bundle from webpack_loader %}
+{% load swh_templatetags %}
{% block title %}{% endblock %}
{% render_bundle 'vendors' %}
{% render_bundle 'webapp' %}
{% block header %}{% endblock %}