diff --git a/MANIFEST.in b/MANIFEST.in index 3cf699a4..e4f2f7c3 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,7 +1,8 @@ include Makefile include requirements.txt include requirements-swh.txt include version.txt recursive-include swh/web/static * recursive-include swh/web/templates * include swh/web/tests/browse/views/data/swh-logo.png +include swh/web/tests/browse/views/data/iso-8859-1_encoded_content diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py index bc587ec2..0089292c 100644 --- a/swh/web/browse/utils.py +++ b/swh/web/browse/utils.py @@ -1,458 +1,473 @@ # Copyright (C) 2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import base64 import magic import math import stat import textwrap from django.core.cache import cache from django.utils.safestring import mark_safe from swh.web.common import highlightjs, service from swh.web.common.exc import NotFoundExc from swh.web.common.utils import ( reverse, format_utc_iso_date, parse_timestamp ) def get_directory_entries(sha1_git): """Function that retrieves the content of a SWH directory from the SWH archive. The directories entries are first sorted in lexicographical order. Sub-directories and regular files are then extracted. Args: sha1_git: sha1_git identifier of the directory Returns: A tuple whose first member corresponds to the sub-directories list and second member the regular files list Raises: NotFoundExc if the directory is not found """ cache_entry_id = 'directory_entries_%s' % sha1_git cache_entry = cache.get(cache_entry_id) if cache_entry: return cache_entry entries = list(service.lookup_directory(sha1_git)) entries = sorted(entries, key=lambda e: e['name']) for entry in entries: entry['perms'] = stat.filemode(entry['perms']) dirs = [e for e in entries if e['type'] == 'dir'] files = [e for e in entries if e['type'] == 'file'] cache.set(cache_entry_id, (dirs, files)) return dirs, files def gen_path_info(path): """Function to generate path data navigation for use with a breadcrumb in the swh web ui. For instance, from a path /folder1/folder2/folder3, it returns the following list:: [{'name': 'folder1', 'path': 'folder1'}, {'name': 'folder2', 'path': 'folder1/folder2'}, {'name': 'folder3', 'path': 'folder1/folder2/folder3'}] Args: path: a filesystem path Returns: A list of path data for navigation as illustrated above. """ path_info = [] if path: sub_paths = path.strip('/').split('/') path_from_root = '' for p in sub_paths: path_from_root += '/' + p path_info.append({'name': p, 'path': path_from_root.strip('/')}) return path_info -def get_mimetype_for_content(content): - """Function that returns the mime type associated to +def get_mimetype_and_encoding_for_content(content): + """Function that returns the mime type and the encoding associated to a content buffer using the magic module under the hood. Args: content (bytes): a content buffer Returns: - The mime type (e.g. text/plain) associated to the provided content. + A tuple (mimetype, encoding), for instance ('text/plain', 'us-ascii'), + associated to the provided content. """ if hasattr(magic, 'detect_from_content'): - return magic.detect_from_content(content).mime_type + magic_result = magic.detect_from_content(content) + return magic_result.mime_type, magic_result.encoding # for old api version of magic module (debian jessie) else: - m = magic.open(magic.MAGIC_MIME_TYPE) + m = magic.open(magic.MAGIC_MIME) m.load() - return m.buffer(content) + magic_result = m.buffer(content).split(';') + mimetype = magic_result[0] + encoding = magic_result[1].split('=')[1] + return mimetype, encoding def request_content(query_string): """Function that retrieves a SWH content from the SWH archive. Raw bytes content is first retrieved, then the content mime type. If the mime type is not stored in the archive, it will be computed using Python magic module. Args: query_string: a string of the form "[ALGO_HASH:]HASH" where optional ALGO_HASH can be either *sha1*, *sha1_git*, *sha256*, or *blake2s256* (default to *sha1*) and HASH the hexadecimal representation of the hash value Returns: A tuple whose first member corresponds to the content raw bytes and second member the content mime type Raises: NotFoundExc if the content is not found """ content_data = service.lookup_content(query_string) content_raw = service.lookup_content_raw(query_string) content_data['raw_data'] = content_raw['data'] - mimetype = service.lookup_content_filetype(query_string) + filetype = service.lookup_content_filetype(query_string) language = service.lookup_content_language(query_string) license = service.lookup_content_license(query_string) - if mimetype: - mimetype = mimetype['mimetype'] + if filetype: + mimetype = filetype['mimetype'] + encoding = filetype['encoding'] else: - mimetype = get_mimetype_for_content(content_data['raw_data']) + mimetype, encoding = \ + get_mimetype_and_encoding_for_content(content_data['raw_data']) content_data['mimetype'] = mimetype + content_data['encoding'] = encoding + + # encode textual content to utf-8 if needed + if mimetype.startswith('text/') and 'ascii' not in encoding \ + and 'utf-8' not in encoding: + content_data['raw_data'] = \ + content_data['raw_data'].decode(encoding).encode('utf-8') + if language: content_data['language'] = language['lang'] else: content_data['language'] = 'not detected' if license: content_data['licenses'] = ', '.join(license['licenses']) else: content_data['licenses'] = 'not detected' return content_data _browsers_supported_image_mimes = set(['image/gif', 'image/png', 'image/jpeg', 'image/bmp', 'image/webp']) def prepare_content_for_display(content_data, mime_type, path): """Function that prepares a content for HTML display. The function tries to associate a programming language to a content in order to perform syntax highlighting client-side using highlightjs. The language is determined using either the content filename or its mime type. If the mime type corresponds to an image format supported by web browsers, the content will be encoded in base64 for displaying the image. Args: content_data (bytes): raw bytes of the content mime_type (string): mime type of the content path (string): path of the content including filename Returns: A dict containing the content bytes (possibly different from the one provided as parameter if it is an image) under the key 'content_data and the corresponding highlightjs language class under the key 'language'. """ language = highlightjs.get_hljs_language_from_filename(path) if not language: language = highlightjs.get_hljs_language_from_mime_type(mime_type) if not language: language = 'nohighlight-swh' elif mime_type.startswith('application/'): mime_type = mime_type.replace('application/', 'text/') if mime_type.startswith('image/'): if mime_type in _browsers_supported_image_mimes: content_data = base64.b64encode(content_data) else: content_data = None return {'content_data': content_data, 'language': language} def get_origin_visits(origin_id): """Function that returns the list of visits for a swh origin. That list is put in cache in order to speedup the navigation in the swh web browse ui. Args: origin_id (int): the id of the swh origin to fetch visits from Returns: A list of dict describing the origin visits:: [{'date': , 'origin': , 'status': <'full' | 'partial'>, 'visit': }, ... ] Raises: NotFoundExc if the origin is not found """ cache_entry_id = 'origin_%s_visits' % origin_id cache_entry = cache.get(cache_entry_id) if cache_entry: return cache_entry origin_visits = [] per_page = service.MAX_LIMIT last_visit = None while 1: visits = list(service.lookup_origin_visits(origin_id, last_visit=last_visit, per_page=per_page)) origin_visits += visits if len(visits) < per_page: break else: if not last_visit: last_visit = per_page else: last_visit += per_page cache.set(cache_entry_id, origin_visits) return origin_visits def get_origin_visit_branches(origin_id, visit_id=None, visit_ts=None): """Function that returns the list of branches associated to a swh origin for a given visit. The visit can be expressed by its id or a timestamp. In the latter case, the closest visit from the provided timestamp will be used. If no visit parameter is provided, it returns the list of branches found for the latest visit. That list is put in cache in order to speedup the navigation in the swh web browse ui. Args: origin_id (int): the id of the swh origin to fetch branches from visit_id (int): the id of the origin visit visit_ts (int or str): an ISO date string or Unix timestamp to parse Returns: A list of dict describing the origin branches for the given visit:: [{'name': , 'revision': , 'directory': }, ... ] Raises: NotFoundExc if the origin or its visit are not found """ if not visit_id and visit_ts: parsed_visit_ts = math.floor(parse_timestamp(visit_ts).timestamp()) visits = get_origin_visits(origin_id) for i, visit in enumerate(visits): ts = math.floor(parse_timestamp(visit['date']).timestamp()) if i == 0: if parsed_visit_ts <= ts: return get_origin_visit_branches(origin_id, visit['visit']) elif i == len(visits) - 1: if parsed_visit_ts >= ts: return get_origin_visit_branches(origin_id, visit['visit']) else: next_ts = math.floor( parse_timestamp(visits[i+1]['date']).timestamp()) if parsed_visit_ts >= ts and parsed_visit_ts < next_ts: if (parsed_visit_ts - ts) < (next_ts - parsed_visit_ts): return get_origin_visit_branches(origin_id, visit['visit']) else: return get_origin_visit_branches(origin_id, visits[i+1]['visit']) raise NotFoundExc( 'Visit with timestamp %s for origin with id %s not found!' % (visit_ts, origin_id)) cache_entry_id = 'origin_%s_visit_%s_branches' % (origin_id, visit_id) cache_entry = cache.get(cache_entry_id) if cache_entry: return cache_entry origin_visit_data = service.lookup_origin_visit(origin_id, visit_id) branches = [] revision_ids = [] occurrences = origin_visit_data['occurrences'] for key in sorted(occurrences.keys()): if occurrences[key]['target_type'] == 'revision': branches.append({'name': key, 'revision': occurrences[key]['target']}) revision_ids.append(occurrences[key]['target']) revisions = service.lookup_revision_multiple(revision_ids) branches_to_remove = [] for idx, revision in enumerate(revisions): if revision: branches[idx]['directory'] = revision['directory'] else: branches_to_remove.append(branches[idx]) for b in branches_to_remove: branches.remove(b) cache.set(cache_entry_id, branches) return branches def gen_link(url, link_text): """ Utility function for generating an HTML link to insert in Django templates. Args: url (str): an url link_text (str): the text for the produced link Returns: An HTML link in the form 'link_text' """ link = '%s' % (url, link_text) return mark_safe(link) def gen_person_link(person_id, person_name): """ Utility function for generating a link to a SWH person HTML view to insert in Django templates. Args: person_id (int): a SWH person id person_name (str): the associated person name Returns: An HTML link in the form 'person_name' """ person_url = reverse('browse-person', kwargs={'person_id': person_id}) return gen_link(person_url, person_name) def gen_revision_link(revision_id, shorten_id=False): """ Utility function for generating a link to a SWH revision HTML view to insert in Django templates. Args: revision_id (int): a SWH revision id shorten_id (boolean): wheter to shorten the revision id to 7 characters for the link text Returns: An HTML link in the form 'revision_id' """ revision_url = reverse('browse-revision', kwargs={'sha1_git': revision_id}) if shorten_id: return gen_link(revision_url, revision_id[:7]) else: return gen_link(revision_url, revision_id) def _format_log_entries(revision_log, per_page): revision_log_data = [] for i, log in enumerate(revision_log): if i == per_page: break revision_log_data.append( {'author': gen_person_link(log['author']['id'], log['author']['name']), 'revision': gen_revision_link(log['id'], True), 'message': log['message'], 'message_shorten': textwrap.shorten(log['message'], width=80, placeholder='...'), 'date': format_utc_iso_date(log['date']), 'directory': log['directory']}) return revision_log_data def prepare_revision_log_for_display(revision_log, per_page, revs_breadcrumb, origin_context=False): """ Utility functions that process raw revision log data for HTML display. Its purpose is to: * add links to relevant SWH browse views * format date in human readable format * truncate the message log It also computes the data needed to generate the links for navigating back and forth in the history log. Args: revision_log (list): raw revision log as returned by the SWH web api per_page (int): number of log entries per page revs_breadcrumb (str): breadcrumbs of revisions navigated so far, in the form 'rev1[/rev2/../revN]'. Each revision corresponds to the first one displayed in the HTML view for history log. origin_context (boolean): wheter or not the revision log is browsed from an origin view. """ current_rev = revision_log[0]['id'] next_rev = None prev_rev = None next_revs_breadcrumb = None prev_revs_breadcrumb = None if len(revision_log) == per_page + 1: prev_rev = revision_log[-1]['id'] prev_rev_bc = current_rev if origin_context: prev_rev_bc = prev_rev if revs_breadcrumb: revs = revs_breadcrumb.split('/') next_rev = revs[-1] if len(revs) > 1: next_revs_breadcrumb = '/'.join(revs[:-1]) if len(revision_log) == per_page + 1: prev_revs_breadcrumb = revs_breadcrumb + '/' + prev_rev_bc else: prev_revs_breadcrumb = prev_rev_bc return {'revision_log_data': _format_log_entries(revision_log, per_page), 'prev_rev': prev_rev, 'prev_revs_breadcrumb': prev_revs_breadcrumb, 'next_rev': next_rev, 'next_revs_breadcrumb': next_revs_breadcrumb} diff --git a/swh/web/browse/views/content.py b/swh/web/browse/views/content.py index 60918606..bcd43eea 100644 --- a/swh/web/browse/views/content.py +++ b/swh/web/browse/views/content.py @@ -1,154 +1,155 @@ # Copyright (C) 2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from django.http import HttpResponse from django.shortcuts import render from django.template.defaultfilters import filesizeformat from swh.model.hashutil import hash_to_hex from swh.web.common import query from swh.web.common.utils import reverse from swh.web.common.exc import handle_view_exception from swh.web.browse.utils import ( gen_path_info, request_content, prepare_content_for_display ) from swh.web.browse.browseurls import browse_route @browse_route(r'content/(?P.+)/raw/', view_name='browse-content-raw') def content_raw(request, query_string): """Django view that produces a raw display of a SWH content identified by its hash value. The url that points to it is :http:get:`/browse/content/[(algo_hash):](hash)/raw/` Args: request: input django http request query_string: a string of the form "[ALGO_HASH:]HASH" where optional ALGO_HASH can be either *sha1*, *sha1_git*, *sha256*, or *blake2s256* (default to *sha1*) and HASH the hexadecimal representation of the hash value Returns: The raw bytes of the content. """ # noqa try: algo, checksum = query.parse_hash(query_string) checksum = hash_to_hex(checksum) content_data = request_content(query_string) except Exception as exc: return handle_view_exception(exc) filename = request.GET.get('filename', None) if not filename: filename = '%s_%s' % (algo, checksum) if content_data['mimetype'].startswith('text/'): response = HttpResponse(content_data['raw_data'], content_type="text/plain") response['Content-disposition'] = 'filename=%s' % filename else: response = HttpResponse(content_data['raw_data'], content_type='application/octet-stream') response['Content-disposition'] = 'attachment; filename=%s' % filename return response @browse_route(r'content/(?P.+)/', view_name='browse-content') def content_display(request, query_string): """Django view that produces an HTML display of a SWH content identified by its hash value. The url that points to it is :http:get:`/browse/content/[(algo_hash):](hash)/` Args: request: input django http request query_string: a string of the form "[ALGO_HASH:]HASH" where optional ALGO_HASH can be either *sha1*, *sha1_git*, *sha256*, or *blake2s256* (default to *sha1*) and HASH the hexadecimal representation of the hash value Returns: The HTML rendering of the requested content. """ # noqa try: algo, checksum = query.parse_hash(query_string) checksum = hash_to_hex(checksum) content_data = request_content(query_string) except Exception as exc: return handle_view_exception(exc) path = request.GET.get('path', None) content_display_data = prepare_content_for_display( content_data['raw_data'], content_data['mimetype'], path) root_dir = None filename = None path_info = None breadcrumbs = [] if path: split_path = path.split('/') root_dir = split_path[0] filename = split_path[-1] path = path.replace(root_dir + '/', '') path = path.replace(filename, '') path_info = gen_path_info(path) breadcrumbs.append({'name': root_dir[:7], 'url': reverse('browse-directory', kwargs={'sha1_git': root_dir})}) for pi in path_info: breadcrumbs.append({'name': pi['name'], 'url': reverse('browse-directory', kwargs={'sha1_git': root_dir, 'path': pi['path']})}) breadcrumbs.append({'name': filename, 'url': None}) query_params = None if filename: query_params = {'filename': filename} content_raw_url = reverse('browse-content-raw', kwargs={'query_string': query_string}, query_params=query_params) content_metadata = { 'sha1 checksum': content_data['checksums']['sha1'], 'sha1_git checksum': content_data['checksums']['sha1_git'], 'sha256 checksum': content_data['checksums']['sha256'], 'blake2s256 checksum': content_data['checksums']['blake2s256'], 'mime type': content_data['mimetype'], + 'encoding': content_data['encoding'], 'size': filesizeformat(content_data['length']), 'language': content_data['language'], 'licenses': content_data['licenses'] } return render(request, 'content.html', {'top_panel_visible': True, 'top_panel_collapsible': True, 'top_panel_text_left': 'SWH object: Content', 'top_panel_text_right': '%s: %s' % (algo, checksum), 'swh_object_metadata': content_metadata, 'main_panel_visible': True, 'content': content_display_data['content_data'], 'mimetype': content_data['mimetype'], 'language': content_display_data['language'], 'breadcrumbs': breadcrumbs, 'branches': None, 'branch': None, 'top_right_link': content_raw_url, 'top_right_link_text': 'Raw File'}) diff --git a/swh/web/browse/views/origin.py b/swh/web/browse/views/origin.py index 67847f93..fbec43df 100644 --- a/swh/web/browse/views/origin.py +++ b/swh/web/browse/views/origin.py @@ -1,573 +1,574 @@ # Copyright (C) 2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import dateutil from django.shortcuts import render from django.template.defaultfilters import filesizeformat from swh.web.common import service from swh.web.common.utils import reverse, format_utc_iso_date from swh.web.common.exc import NotFoundExc, handle_view_exception from swh.web.browse.utils import ( get_origin_visits, get_origin_visit_branches, gen_path_info, get_directory_entries, request_content, prepare_content_for_display, gen_link, prepare_revision_log_for_display ) from swh.web.browse.browseurls import browse_route @browse_route(r'origin/(?P[0-9]+)/', r'origin/(?P[a-z]+)/url/(?P.+)/', view_name='browse-origin') def origin_browse(request, origin_id=None, origin_type=None, origin_url=None): """Django view that produces an HTML display of a swh origin identified by its id or its url. The url scheme that points to it is :http:get:`/browse/origin/(origin_id)/`. Args: request: input django http request origin_id: a swh origin id origin_type: type of origin (git, svn, ...) origin_url: url of the origin (e.g. https://github.com//) Returns: The HMTL rendering for the metadata of the provided origin. """ # noqa try: if origin_id: origin_request_params = { 'id': origin_id, } else: origin_request_params = { 'type': origin_type, 'url': origin_url } origin_info = service.lookup_origin(origin_request_params) origin_id = origin_info['id'] origin_visits = get_origin_visits(origin_id) except Exception as exc: return handle_view_exception(exc) origin_info['last swh visit browse url'] = \ reverse('browse-origin-directory', kwargs={'origin_id': origin_id}) origin_visits_data = [] for visit in origin_visits: visit_date = dateutil.parser.parse(visit['date']) visit['date'] = format_utc_iso_date(visit['date']) visit['browse_url'] = reverse('browse-origin-directory', kwargs={'origin_id': origin_id, 'visit_id': visit['visit']}) origin_visits_data.append( {'date': visit_date.timestamp()}) return render(request, 'origin.html', {'top_panel_visible': True, 'top_panel_collapsible': True, 'top_panel_text_left': 'SWH object: Origin', 'top_panel_text_right': 'Url: ' + origin_info['url'], 'swh_object_metadata': origin_info, 'main_panel_visible': True, 'origin_visits_data': origin_visits_data, 'visits': list(reversed(origin_visits)), 'browse_url_base': '/browse/origin/%s/' % origin_id}) def _get_origin_branches_and_url_args(origin_id, visit_id, ts): if not visit_id and ts: branches = get_origin_visit_branches(origin_id, visit_ts=ts) url_args = {'origin_id': origin_id, 'timestamp': ts} else: branches = get_origin_visit_branches(origin_id, visit_id) url_args = {'origin_id': origin_id, 'visit_id': visit_id} return branches, url_args def _raise_exception_if_branch_not_found(origin_id, visit_id, ts, branch): if visit_id: raise NotFoundExc('Branch %s associated to visit with' ' id %s for origin with id %s' ' not found!' % (branch, visit_id, origin_id)) else: raise NotFoundExc('Branch %s associated to visit with' ' timestamp %s for origin with id %s' ' not found!' % (branch, ts, origin_id)) def _get_branch(branches, branch_name): """ Utility function to get a specific branch from an origin branches list. Its purpose is to get the default HEAD branch as some SWH origin (e.g those with svn type) does not have it. In that latter case, check if there is a master branch instead and returns it. """ filtered_branches = [b for b in branches if branch_name in b['name']] if len(filtered_branches) > 0: return filtered_branches[0] elif branch_name == 'HEAD': filtered_branches = [b for b in branches if 'master' in b['name']] if len(filtered_branches) > 0: return filtered_branches[0] return None @browse_route(r'origin/(?P[0-9]+)/directory/', r'origin/(?P[0-9]+)/directory/(?P.+)/', r'origin/(?P[0-9]+)/visit/(?P[0-9]+)/directory/', # noqa r'origin/(?P[0-9]+)/visit/(?P[0-9]+)/directory/(?P.+)/', # noqa r'origin/(?P[0-9]+)/ts/(?P.+)/directory/', # noqa r'origin/(?P[0-9]+)/ts/(?P.+)/directory/(?P.+)/', # noqa view_name='browse-origin-directory') def origin_directory_browse(request, origin_id, visit_id=None, timestamp=None, path=None): """Django view for browsing the content of a swh directory associated to an origin for a given visit. The url scheme that points to it is the following: * :http:get:`/browse/origin/(origin_id)/directory/[(path)/]` * :http:get:`/browse/origin/(origin_id)/visit/(visit_id)/directory/[(path)/]` * :http:get:`/browse/origin/(origin_id)/ts/(timestamp)/directory/[(path)/]` Args: request: input django http request origin_id: a swh origin id visit_id: optionnal visit id parameter (the last one will be used by default) timestamp: optionnal visit timestamp parameter (the last one will be used by default) path: optionnal path parameter used to navigate in directories reachable from the origin root one branch: optionnal query parameter that specifies the origin branch from which to retrieve the directory revision: optional query parameter to specify the origin revision from which to retrieve the directory Returns: The HTML rendering for the content of the directory associated to the provided origin and visit. """ # noqa try: if not visit_id and not timestamp: origin_visits = get_origin_visits(origin_id) return origin_directory_browse(request, origin_id, origin_visits[-1]['visit'], path=path) origin_info = service.lookup_origin({'id': origin_id}) branches, url_args = _get_origin_branches_and_url_args(origin_id, visit_id, timestamp) for b in branches: branch_url_args = dict(url_args) if path: b['path'] = path branch_url_args['path'] = path b['url'] = reverse('browse-origin-directory', kwargs=branch_url_args, query_params={'branch': b['name']}) revision_id = request.GET.get('revision', None) if revision_id: revision = service.lookup_revision(revision_id) root_sha1_git = revision['directory'] branches.append({'name': revision_id, 'revision': revision_id, 'directory': root_sha1_git, 'url': None}) branch_name = revision_id else: branch_name = request.GET.get('branch', 'HEAD') branch = _get_branch(branches, branch_name) if branch: branch_name = branch['name'] root_sha1_git = branch['directory'] else: _raise_exception_if_branch_not_found(origin_id, visit_id, timestamp, branch_name) sha1_git = root_sha1_git if path: dir_info = service.lookup_directory_with_path(root_sha1_git, path) sha1_git = dir_info['target'] dirs, files = get_directory_entries(sha1_git) except Exception as exc: return handle_view_exception(exc) if revision_id: query_params = {'revision': revision_id} else: query_params = {'branch': branch_name} path_info = gen_path_info(path) breadcrumbs = [] breadcrumbs.append({'name': root_sha1_git[:7], 'url': reverse('browse-origin-directory', kwargs=url_args, query_params=query_params)}) for pi in path_info: bc_url_args = dict(url_args) bc_url_args['path'] = pi['path'] breadcrumbs.append({'name': pi['name'], 'url': reverse('browse-origin-directory', kwargs=bc_url_args, query_params=query_params)}) path = '' if path is None else (path + '/') for d in dirs: bc_url_args = dict(url_args) bc_url_args['path'] = path + d['name'] d['url'] = reverse('browse-origin-directory', kwargs=bc_url_args, query_params=query_params) sum_file_sizes = 0 for f in files: bc_url_args = dict(url_args) bc_url_args['path'] = path + f['name'] f['url'] = reverse('browse-origin-content', kwargs=bc_url_args, query_params=query_params) sum_file_sizes += f['length'] f['length'] = filesizeformat(f['length']) history_url = reverse('browse-origin-log', kwargs=url_args, query_params=query_params) sum_file_sizes = filesizeformat(sum_file_sizes) dir_metadata = {'id': sha1_git, 'number of regular files': len(files), 'number of subdirectories': len(dirs), 'sum of regular file sizes': sum_file_sizes, 'origin id': origin_info['id'], 'origin type': origin_info['type'], 'origin url': origin_info['url'], 'path': '/' + path} return render(request, 'directory.html', {'top_panel_visible': True, 'top_panel_collapsible': True, 'top_panel_text_left': 'SWH object: Directory', 'top_panel_text_right': 'Origin: ' + origin_info['url'], 'swh_object_metadata': dir_metadata, 'main_panel_visible': True, 'dirs': dirs, 'files': files, 'breadcrumbs': breadcrumbs, 'branches': branches, 'branch': branch_name, 'top_right_link': history_url, 'top_right_link_text': 'History'}) @browse_route(r'origin/(?P[0-9]+)/content/(?P.+)/', r'origin/(?P[0-9]+)/visit/(?P[0-9]+)/content/(?P.+)/', # noqa r'origin/(?P[0-9]+)/ts/(?P.+)/content/(?P.+)/', # noqa view_name='browse-origin-content') def origin_content_display(request, origin_id, path, visit_id=None, timestamp=None): """Django view that produces an HTML display of a swh content associated to an origin for a given visit. The url scheme that points to it is the following: * :http:get:`/browse/origin/(origin_id)/content/(path)/` * :http:get:`/browse/origin/(origin_id)/visit/(visit_id)/content/(path)/` * :http:get:`/browse/origin/(origin_id)/ts/(timestamp)/content/(path)/` Args: request: input django http request origin_id: id of a swh origin path: path of the content relative to the origin root directory visit_id: optionnal visit id parameter (the last one will be used by default) timestamp: optionnal visit timestamp parameter (the last one will be used by default) branch: optionnal query parameter that specifies the origin branch from which to retrieve the content revision: optional query parameter to specify the origin revision from which to retrieve the content Returns: The HTML rendering of the requested content associated to the provided origin and visit. """ # noqa try: if not visit_id and not timestamp: origin_visits = get_origin_visits(origin_id) return origin_content_display(request, origin_id, path, origin_visits[-1]['visit']) origin_info = service.lookup_origin({'id': origin_id}) branches, url_args = _get_origin_branches_and_url_args(origin_id, visit_id, timestamp) for b in branches: bc_url_args = dict(url_args) bc_url_args['path'] = path b['url'] = reverse('browse-origin-content', kwargs=bc_url_args, query_params={'branch': b['name']}) revision_id = request.GET.get('revision', None) if revision_id: revision = service.lookup_revision(revision_id) root_sha1_git = revision['directory'] branches.append({'name': revision_id, 'revision': revision_id, 'directory': root_sha1_git, 'url': None}) branch_name = revision_id else: branch_name = request.GET.get('branch', 'HEAD') branch = _get_branch(branches, branch_name) if branch: branch_name = branch['name'] root_sha1_git = branch['directory'] else: _raise_exception_if_branch_not_found(origin_id, visit_id, timestamp, branch_name) content_info = service.lookup_directory_with_path(root_sha1_git, path) sha1_git = content_info['target'] query_string = 'sha1_git:' + sha1_git content_data = request_content(query_string) except Exception as exc: return handle_view_exception(exc) if revision_id: query_params = {'revision': revision_id} else: query_params = {'branch': branch_name} content_display_data = prepare_content_for_display( content_data['raw_data'], content_data['mimetype'], path) filename = None path_info = None breadcrumbs = [] split_path = path.split('/') filename = split_path[-1] path = path.replace(filename, '') path_info = gen_path_info(path) breadcrumbs.append({'name': root_sha1_git[:7], 'url': reverse('browse-origin-directory', kwargs=url_args, query_params=query_params)}) for pi in path_info: bc_url_args = dict(url_args) bc_url_args['path'] = pi['path'] breadcrumbs.append({'name': pi['name'], 'url': reverse('browse-origin-directory', kwargs=bc_url_args, query_params=query_params)}) breadcrumbs.append({'name': filename, 'url': None}) content_raw_url = reverse('browse-content-raw', kwargs={'query_string': query_string}, query_params={'filename': filename}) content_metadata = { 'sha1 checksum': content_data['checksums']['sha1'], 'sha1_git checksum': content_data['checksums']['sha1_git'], 'sha256 checksum': content_data['checksums']['sha256'], 'blake2s256 checksum': content_data['checksums']['blake2s256'], 'mime type': content_data['mimetype'], + 'encoding': content_data['encoding'], 'size': filesizeformat(content_data['length']), 'language': content_data['language'], 'licenses': content_data['licenses'], 'origin id': origin_info['id'], 'origin type': origin_info['type'], 'origin url': origin_info['url'], 'path': '/' + path, 'filename': filename } return render(request, 'content.html', {'top_panel_visible': True, 'top_panel_collapsible': True, 'top_panel_text_left': 'SWH object: Content', 'top_panel_text_right': 'Origin: %s' % origin_info['url'], 'swh_object_metadata': content_metadata, 'main_panel_visible': True, 'content': content_display_data['content_data'], 'mimetype': content_data['mimetype'], 'language': content_display_data['language'], 'breadcrumbs': breadcrumbs, 'branches': branches, 'branch': branch_name, 'top_right_link': content_raw_url, 'top_right_link_text': 'Raw File'}) def _gen_directory_link(url_args, revision, link_text): directory_url = reverse('browse-origin-directory', kwargs=url_args, query_params={'revision': revision}) return gen_link(directory_url, link_text) NB_LOG_ENTRIES = 20 @browse_route(r'origin/(?P[0-9]+)/log/', r'origin/(?P[0-9]+)/visit/(?P[0-9]+)/log/', # noqa r'origin/(?P[0-9]+)/ts/(?P.+)/log/', view_name='browse-origin-log') def origin_log_browse(request, origin_id, visit_id=None, timestamp=None): """Django view that produces an HTML display of revisions history (aka the commit log) associated to a SWH origin. The url scheme that points to it is the following: * :http:get:`/browse/origin/(origin_id)/log/` * :http:get:`/browse/origin/(origin_id)/visit/(visit_id)/log/` * :http:get:`/browse/origin/(origin_id)/ts/(timestamp)/log/` Args: request: input django http request origin_id: id of a swh origin visit_id: optionnal visit id parameter (the last one will be used by default) timestamp: optionnal visit timestamp parameter (the last one will be used by default) revs_breadcrumb: query parameter used internally to store the navigation breadcrumbs (i.e. the list of descendant revisions visited so far). per_page: optionnal query parameter used to specify the number of log entries per page branch: optionnal query parameter that specifies the origin branch from which to retrieve the content revision: optional query parameter to specify the origin revision from which to retrieve the directory Returns: The HTML rendering of revisions history for a given SWH visit. """ # noqa try: if not visit_id and not timestamp: origin_visits = get_origin_visits(origin_id) return origin_log_browse(request, origin_id, origin_visits[-1]['visit']) branches, url_args = _get_origin_branches_and_url_args(origin_id, visit_id, timestamp) for b in branches: b['url'] = reverse('browse-origin-log', kwargs=url_args, query_params={'branch': b['name']}) revision_id = request.GET.get('revision', None) revs_breadcrumb = request.GET.get('revs_breadcrumb', None) branch_name = request.GET.get('branch', 'HEAD') if revision_id: revision = service.lookup_revision(revision_id) branches.append({'name': revision_id, 'revision': revision_id, 'directory': revision['directory']}) revision = revision_id branch_name = revision_id elif revs_breadcrumb: revs = revs_breadcrumb.split('/') revision = revs[-1] else: branch = _get_branch(branches, branch_name) if branch: branch_name = branch['name'] revision = branch['revision'] else: _raise_exception_if_branch_not_found(origin_id, visit_id, timestamp, branch_name) per_page = int(request.GET.get('per_page', NB_LOG_ENTRIES)) revision_log = service.lookup_revision_log(revision, limit=per_page+1) revision_log = list(revision_log) except Exception as exc: return handle_view_exception(exc) revision_log_display_data = prepare_revision_log_for_display( revision_log, per_page, revs_breadcrumb, origin_context=True) prev_rev = revision_log_display_data['prev_rev'] prev_revs_breadcrumb = revision_log_display_data['prev_revs_breadcrumb'] prev_log_url = None if prev_rev: prev_log_url = \ reverse('browse-origin-log', kwargs=url_args, query_params={'revs_breadcrumb': prev_revs_breadcrumb, 'per_page': per_page, 'branch': branch_name}) next_rev = revision_log_display_data['next_rev'] next_revs_breadcrumb = revision_log_display_data['next_revs_breadcrumb'] next_log_url = None if next_rev: next_log_url = \ reverse('browse-origin-log', kwargs=url_args, query_params={'revs_breadcrumb': next_revs_breadcrumb, 'per_page': per_page, 'branch': branch_name}) revision_log_data = revision_log_display_data['revision_log_data'] for i, log in enumerate(revision_log_data): log['directory'] = _gen_directory_link(url_args, revision_log[i]['id'], 'Tree') return render(request, 'revision-log.html', {'top_panel_visible': False, 'top_panel_collapsible': False, 'top_panel_text_left': 'SWH object: Revision history', 'top_panel_text_right': 'Sha1 git: ' + revision, 'swh_object_metadata': None, 'main_panel_visible': True, 'revision_log': revision_log_data, 'next_log_url': next_log_url, 'prev_log_url': prev_log_url, 'breadcrumbs': None, 'branches': branches, 'branch': branch_name, 'top_right_link': None, 'top_right_link_text': None, 'include_top_navigation': True}) diff --git a/swh/web/tests/browse/test_utils.py b/swh/web/tests/browse/test_utils.py index b7ca75cd..0e242fb9 100644 --- a/swh/web/tests/browse/test_utils.py +++ b/swh/web/tests/browse/test_utils.py @@ -1,297 +1,298 @@ # Copyright (C) 2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import unittest from unittest.mock import patch from nose.tools import istest from swh.web.browse import utils from swh.web.common.utils import reverse from .views.data.revision_test_data import revision_history_log_test class SwhBrowseUtilsTestCase(unittest.TestCase): @istest def gen_path_info(self): input_path = '/home/user/swh-environment/swh-web/' expected_result = [ {'name': 'home', 'path': 'home'}, {'name': 'user', 'path': 'home/user'}, {'name': 'swh-environment', 'path': 'home/user/swh-environment'}, {'name': 'swh-web', 'path': 'home/user/swh-environment/swh-web'} ] path_info = utils.gen_path_info(input_path) self.assertEquals(path_info, expected_result) input_path = 'home/user/swh-environment/swh-web' path_info = utils.gen_path_info(input_path) self.assertEquals(path_info, expected_result) @istest - def get_mimetype_for_content(self): + def get_mimetype_and_encoding_for_content(self): text = b'Hello world!' - self.assertEqual(utils.get_mimetype_for_content(text), 'text/plain') + self.assertEqual(utils.get_mimetype_and_encoding_for_content(text), + ('text/plain', 'us-ascii')) @patch('swh.web.browse.utils.service') @istest def get_origin_visits(self, mock_service): mock_service.MAX_LIMIT = 2 def _lookup_origin_visits(*args, **kwargs): if kwargs['last_visit'] is None: return [{'visit': 1}, {'visit': 2}] else: return [{'visit': 3}] mock_service.lookup_origin_visits.side_effect = _lookup_origin_visits origin_visits = utils.get_origin_visits(1) self.assertEqual(len(origin_visits), 3) @patch('swh.web.browse.utils.service') @istest def test_get_origin_visit_branches(self, mock_service): mock_service.lookup_origin_visit.return_value = \ {'date': '2015-08-04T22:26:14.804009+00:00', 'metadata': {}, 'occurrences': { 'refs/heads/master': { 'target': '9fbd21adbac36be869514e82e2e98505dc47219c', 'target_type': 'revision', 'target_url': '/api/1/revision/9fbd21adbac36be869514e82e2e98505dc47219c/' # noqa }, 'refs/tags/0.10.0': { 'target': '6072557b6c10cd9a21145781e26ad1f978ed14b9', 'target_type': 'revision', 'target_url': '/api/1/revision/6072557b6c10cd9a21145781e26ad1f978ed14b9/' # noqa }, 'refs/tags/0.10.1': { 'target': 'ecc003b43433e5b46511157598e4857a761007bf', 'target_type': 'revision', 'target_url': '/api/1/revision/ecc003b43433e5b46511157598e4857a761007bf/' # noqa } }, 'origin': 1, 'origin_url': '/api/1/origin/1/', 'status': 'full', 'visit': 1} mock_service.lookup_revision_multiple.return_value = \ [{'directory': '828da2b80e41aa958b2c98526f4a1d2cc7d298b7'}, {'directory': '2df4cd84ecc65b50b1d5318d3727e02a39b8a4cf'}, {'directory': '28ba64f97ef709e54838ae482c2da2619a74a0bd'}] expected_result = [ {'name': 'refs/heads/master', 'revision': '9fbd21adbac36be869514e82e2e98505dc47219c', 'directory': '828da2b80e41aa958b2c98526f4a1d2cc7d298b7'}, {'name': 'refs/tags/0.10.0', 'revision': '6072557b6c10cd9a21145781e26ad1f978ed14b9', 'directory': '2df4cd84ecc65b50b1d5318d3727e02a39b8a4cf'}, {'name': 'refs/tags/0.10.1', 'revision': 'ecc003b43433e5b46511157598e4857a761007bf', 'directory': '28ba64f97ef709e54838ae482c2da2619a74a0bd'} ] origin_visit_branches = utils.get_origin_visit_branches(1, 1) self.assertEqual(origin_visit_branches, expected_result) @istest def gen_link(self): self.assertEqual(utils.gen_link('https://www.softwareheritage.org/', 'SWH'), # noqa 'SWH') @istest def gen_person_link(self): person_id = 8221896 person_name = 'Antoine Lambert' person_url = reverse('browse-person', kwargs={'person_id': person_id}) self.assertEqual(utils.gen_person_link(person_id, person_name), '%s' % (person_url, person_name)) @istest def gen_revision_link(self): revision_id = '28a0bc4120d38a394499382ba21d6965a67a3703' revision_url = reverse('browse-revision', kwargs={'sha1_git': revision_id}) self.assertEqual(utils.gen_revision_link(revision_id), '%s' % (revision_url, revision_id)) self.assertEqual(utils.gen_revision_link(revision_id, shorten_id=True), '%s' % (revision_url, revision_id[:7])) # noqa @istest def test_prepare_revision_log_for_display_no_contex(self): per_page = 10 first_page_logs_data = revision_history_log_test[:per_page+1] second_page_logs_data = revision_history_log_test[per_page:2*per_page+1] # noqa third_page_logs_data = revision_history_log_test[2*per_page:3*per_page+1] # noqa last_page_logs_data = revision_history_log_test[3*per_page:3*per_page+5] # noqa revision_log_display_data = utils.prepare_revision_log_for_display( first_page_logs_data, per_page, None) self.assertEqual(revision_log_display_data['revision_log_data'], utils._format_log_entries(first_page_logs_data, per_page)) self.assertEqual(revision_log_display_data['prev_rev'], first_page_logs_data[-1]['id']) self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'], first_page_logs_data[0]['id']) self.assertEqual(revision_log_display_data['next_rev'], None) self.assertEqual(revision_log_display_data['next_revs_breadcrumb'], None) old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb']) # noqa revision_log_display_data = utils.prepare_revision_log_for_display( second_page_logs_data, per_page, old_prev_revs_bc) self.assertEqual(revision_log_display_data['revision_log_data'], utils._format_log_entries(second_page_logs_data, per_page)) self.assertEqual(revision_log_display_data['prev_rev'], second_page_logs_data[-1]['id']) self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'], old_prev_revs_bc + '/' + second_page_logs_data[0]['id']) # noqa self.assertEqual(revision_log_display_data['next_rev'], old_prev_revs_bc) self.assertEqual(revision_log_display_data['next_revs_breadcrumb'], None) old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb']) # noqa revision_log_display_data = utils.prepare_revision_log_for_display( third_page_logs_data, per_page, old_prev_revs_bc) self.assertEqual(revision_log_display_data['revision_log_data'], utils._format_log_entries(third_page_logs_data, per_page)) # noqa self.assertEqual(revision_log_display_data['prev_rev'], third_page_logs_data[-1]['id']) self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'], old_prev_revs_bc + '/' + third_page_logs_data[0]['id']) # noqa self.assertEqual(revision_log_display_data['next_rev'], old_prev_revs_bc.split('/')[-1]) self.assertEqual(revision_log_display_data['next_revs_breadcrumb'], '/'.join(old_prev_revs_bc.split('/')[:-1])) old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb']) # noqa revision_log_display_data = utils.prepare_revision_log_for_display( last_page_logs_data, per_page, old_prev_revs_bc) self.assertEqual(revision_log_display_data['revision_log_data'], utils._format_log_entries(last_page_logs_data, per_page)) # noqa self.assertEqual(revision_log_display_data['prev_rev'], None) self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'], None) # noqa self.assertEqual(revision_log_display_data['next_rev'], old_prev_revs_bc.split('/')[-1]) # noqa self.assertEqual(revision_log_display_data['next_revs_breadcrumb'], '/'.join(old_prev_revs_bc.split('/')[:-1])) @istest def test_prepare_revision_log_for_display_origin_contex(self): per_page = 10 first_page_logs_data = revision_history_log_test[:per_page+1] second_page_logs_data = revision_history_log_test[per_page:2*per_page+1] # noqa third_page_logs_data = revision_history_log_test[2*per_page:3*per_page+1] # noqa last_page_logs_data = revision_history_log_test[3*per_page:3*per_page+5] # noqa revision_log_display_data = utils.prepare_revision_log_for_display( first_page_logs_data, per_page, None, origin_context=True) self.assertEqual(revision_log_display_data['revision_log_data'], utils._format_log_entries(first_page_logs_data, per_page)) self.assertEqual(revision_log_display_data['prev_rev'], first_page_logs_data[-1]['id']) self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'], first_page_logs_data[-1]['id']) self.assertEqual(revision_log_display_data['next_rev'], None) self.assertEqual(revision_log_display_data['next_revs_breadcrumb'], None) old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb']) # noqa revision_log_display_data = utils.prepare_revision_log_for_display( second_page_logs_data, per_page, old_prev_revs_bc, origin_context=True) # noqa self.assertEqual(revision_log_display_data['revision_log_data'], utils._format_log_entries(second_page_logs_data, per_page)) self.assertEqual(revision_log_display_data['prev_rev'], second_page_logs_data[-1]['id']) self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'], old_prev_revs_bc + '/' + second_page_logs_data[-1]['id']) # noqa self.assertEqual(revision_log_display_data['next_rev'], old_prev_revs_bc) self.assertEqual(revision_log_display_data['next_revs_breadcrumb'], None) old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb']) # noqa revision_log_display_data = utils.prepare_revision_log_for_display( third_page_logs_data, per_page, old_prev_revs_bc, origin_context=True) # noqa self.assertEqual(revision_log_display_data['revision_log_data'], utils._format_log_entries(third_page_logs_data, per_page)) # noqa self.assertEqual(revision_log_display_data['prev_rev'], third_page_logs_data[-1]['id']) self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'], old_prev_revs_bc + '/' + third_page_logs_data[-1]['id']) # noqa self.assertEqual(revision_log_display_data['next_rev'], old_prev_revs_bc.split('/')[-1]) self.assertEqual(revision_log_display_data['next_revs_breadcrumb'], '/'.join(old_prev_revs_bc.split('/')[:-1])) old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb']) # noqa revision_log_display_data = utils.prepare_revision_log_for_display( last_page_logs_data, per_page, old_prev_revs_bc, origin_context=True) # noqa self.assertEqual(revision_log_display_data['revision_log_data'], utils._format_log_entries(last_page_logs_data, per_page)) # noqa self.assertEqual(revision_log_display_data['prev_rev'], None) self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'], None) # noqa self.assertEqual(revision_log_display_data['next_rev'], old_prev_revs_bc.split('/')[-1]) # noqa self.assertEqual(revision_log_display_data['next_revs_breadcrumb'], '/'.join(old_prev_revs_bc.split('/')[:-1])) diff --git a/swh/web/tests/browse/views/data/content_test_data.py b/swh/web/tests/browse/views/data/content_test_data.py index 16d5fb76..df442115 100644 --- a/swh/web/tests/browse/views/data/content_test_data.py +++ b/swh/web/tests/browse/views/data/content_test_data.py @@ -1,182 +1,203 @@ # Copyright (C) 2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information # flake8: noqa import os stub_content_root_dir = '08e8329257dad3a3ef7adea48aa6e576cd82de5b' stub_content_text_file = \ """ /* This file is part of the KDE project * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public License * along with this library; see the file COPYING.LIB. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301, USA. */ #ifndef KATE_SESSION_TEST_H #define KATE_SESSION_TEST_H #include class KateSessionTest : public QObject { Q_OBJECT private Q_SLOTS: void init(); void cleanup(); void initTestCase(); void cleanupTestCase(); void create(); void createAnonymous(); void createAnonymousFrom(); void createFrom(); void documents(); void setFile(); void setName(); void timestamp(); private: class QTemporaryFile *m_tmpfile; }; #endif """ stub_content_text_file_no_highlight = \ """ GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. """ stub_content_text_data = { 'checksums': { 'sha1': '5ecd9f37b7a2d2e9980d201acd6286116f2ba1f1', 'sha1_git': '537b47f68469c1c916c1bfbc072599133bfcbb21', 'sha256': 'b3057544f04e5821ab0e2a007e2ceabd7de2dfb1d42a764f1de8d0d2eff80006', 'blake2s256': '25117fa9f124d5b771a0a7dfca9c7a57247d81f8343334b4b41c782c7f7ed64d' }, 'length': 1317, 'raw_data': str.encode(stub_content_text_file), 'mimetype': 'text/x-c++', + 'encoding': 'us-ascii', 'language': 'c++', 'licenses': 'GPL' } stub_content_text_no_highlight_data = { 'checksums': { 'sha1': '8624bcdae55baeef00cd11d5dfcfa60f68710a02', 'sha1_git': '94a9ed024d3859793618152ea559a168bbcbb5e2', 'sha256': '8ceb4b9ee5adedde47b31e975c1d90c73ad27b6b165a1dcd80c7c545eb65b903', 'blake2s256': '38702b7168c7785bfe748b51b45d9856070ba90f9dc6d90f2ea75d4356411ffe' }, 'length': 35147, 'raw_data': str.encode(stub_content_text_file_no_highlight), 'mimetype': 'text/plain', + 'encoding': 'us-ascii', 'language': 'not detected', 'licenses': 'GPL' } stub_content_text_path = 'kate/autotests/session_test.h' stub_content_text_path_with_root_dir = stub_content_root_dir + '/' + stub_content_text_path stub_content_bin_filename = 'swh-logo.png' png_file_path = os.path.dirname(__file__) + '/' + stub_content_bin_filename with open(png_file_path, 'rb') as png_file: stub_content_bin_data = { 'checksums': { 'sha1': 'd0cec0fc2d795f0077c18d51578cdb228eaf6a99', 'sha1_git': '02328b91cfad800e1d2808cfb379511b79679ebc', 'sha256': 'e290592e2cfa9767497011bda4b7e273b4cf29e7695d72ecacbd723008a29144', 'blake2s256': '7177cad95407952e362ee326a800a9d215ccd619fdbdb735bb51039be81ab9ce' }, 'length': 18063, 'raw_data': png_file.read(), 'mimetype': 'image/png', + 'encoding': 'binary', 'language': 'not detected', 'licenses': 'not detected' - } \ No newline at end of file + } + +_non_utf8_encoding_file_path = os.path.dirname(__file__) + '/iso-8859-1_encoded_content' + +non_utf8_encoded_content_data = { + 'checksums': { + 'sha1': '62cb71aa3534a03c12572157d20fa893753b03b6', + 'sha1_git': '2f7470d0b26108130e71087e42a53c032473499c', + 'sha256': 'aaf364ccd3acb546829ccc0e8e5e293e924c8a2e55a67cb739d249016e0034ed', + 'blake2s256': 'b7564932460a7c2697c53bd55bd855272490da511d64b20c5a04f636dc9ac467' + }, + 'length': 111000 +} + +non_utf8_encoding = 'iso-8859-1' + +with open(_non_utf8_encoding_file_path, 'rb') as iso88591_file: + non_utf8_encoded_content = iso88591_file.read() + \ No newline at end of file diff --git a/swh/web/tests/browse/views/data/iso-8859-1_encoded_content b/swh/web/tests/browse/views/data/iso-8859-1_encoded_content new file mode 100644 index 00000000..2f7470d0 --- /dev/null +++ b/swh/web/tests/browse/views/data/iso-8859-1_encoded_content @@ -0,0 +1,279 @@ + 4) + {header("Location: login.php?mess=".$mess_inc_non_autorise); exit();} + +*/ + +if (isset($_POST['f_deconnect'])) { + //log + require "../utiles/config.inc"; + require "../utiles/fonctions.inc"; + if ($conf_log) {ajout_to_log("LOGOUT");} + session_destroy(); header("Location: login.php?mess=".urlencode("Vous êtes déconnecté")); exit(); + } + + +// Load TemTab class et messages.inc +require "../classes/class.TemTab.php"; +require "../classes/class.ReqSql.php"; +require "../utiles/config.inc"; +require "../utiles/c.inc"; +require "../utiles/fonctions.inc"; + +require "../utiles/secure-session.inc"; + +// Accès ok pour niveaux 1 à 2 +niveau_ok(1,2); + +// compute starting time with microseconds precision +if ($conf_aff_req_sql) { + $mtime = explode(" ",microtime()); + $starttime = $mtime[1] + $mtime[0]; + } + +// Soyons prudent et portable, désactivons magic_quote pour ce script. merci nexen ;) if (get_magic_quotes_gpc()) { + function stripslashes_deep($value) + { + $value = is_array($value) ? + array_map('stripslashes_deep', $value) : + stripslashes($value); + + return $value; + } + + $_POST = array_map('stripslashes_deep', $_POST); + $_GET = array_map('stripslashes_deep', $_GET); + $_COOKIE = array_map('stripslashes_deep', $_COOKIE); +} + +/**************************/ +/* Crée les objets TemTab */ +/**************************/ + + // top + $template_file = "../html/meta.html"; + $template_meta = new TemTab($template_file); + + if ($_SESSION["S_niveau"] == 1) {$template_file = "../html/tete-admin-statistiques.html";} + if ($_SESSION["S_niveau"] == 2) {$template_file = "../html/tete-statistiques.html";} + $template_tete = new TemTab($template_file); + + // corps + $template_file = "../html/statistiques.html"; + $template = new TemTab($template_file); + + // pied $template_file = "../html/pied.html"; + $template_pied = new TemTab($template_file); + +// Some initializations +$sel="";$from="";$whe="";$col_val="";$affmess=""; + + +/************************/ +/* Traitement du script */ +/************************/ + + // Préparation de l'info alias / destination générique + $info_alias_dest_gen = ""; $client_auth_gen = false; + $client_auth_gen = ($conf_alias_dest_gen or $_SESSION["S_niveau"] == 1); + if (!$client_auth_gen) { + $info_alias_dest_gen = "Vous n\'avez pas le droit de modifier des alias ou destinations génériques, du type "@domain.tld " ! Vous devez demander cette modification à l\'administrateur du système."; + } + else { + $info_alias_dest_gen = "Vous avez le droit de modifier des alias ou destinations génériques, du type "@domain.tld "."; + } + + $domaines = str_replace(" ", "", $_SESSION["S_domaines"]); + + // construction de la clause where en fonction des domaines autorisés par le login + $whe_dom = ""; + $whe_dom = $conf_champ_alias." like \"%".$domaines; + $whe_dom = str_replace(",", "\" or ".$conf_champ_alias." like \"%", $whe_dom); + $whe_dom = $whe_dom."\""; + + // recherche des alias et destinations génériques + $alias_dest_gen = new ReqSql($hm_, $um_, $pm_, $nm_); + $sel = $conf_champ_alias.", ".$conf_champ_destination; + $from = $conf_table_alias; + $order = $conf_champ_alias." ASC"; + $alias_dest_gen->RS_select($sel, $from, $whe_dom, $order); + mysql_free_result($alias_dest_gen->RS_res); + + $alias_dest_gen_array = array(); + foreach($alias_dest_gen->RS_ligne as $val1) { + foreach($val1 as $key2 => $val2) { + if (($key2 == "0" or $key2 == "1") and (substr($val2, 0, 1) == "@")) { + array_push($alias_dest_gen_array, "".$val1["0"]." => ".$val1["1"]); + } // if (($key2 == "0" or $key2 == "1") and .. + } // foreach($val1 as $key2 => $val2) + } // foreach($alias_dest_gen->RS_ligne as $val1) + + $alias_dest_gen_array = array_unique($alias_dest_gen_array); + + + // Recherche des domaines ayant un alias + $dom_login = new ReqSql($hm_, $um_, $pm_, $nm_); +// Pas mal, le select et le group by, là ;-) + $sel = "SUBSTRING(".$conf_champ_alias.", LOCATE('@', ".$conf_champ_alias.") + 1, LENGTH(".$conf_champ_alias.")) as nom_domaine, count( SUBSTRING(".$conf_champ_alias.", LOCATE('@', ".$conf_champ_alias.") + 1, LENGTH(".$conf_champ_alias."))) as nb_domaine"; + $from = $conf_table_alias; + $group = "SUBSTRING(".$conf_champ_alias.", LOCATE('@', ".$conf_champ_alias.") + 1, LENGTH(".$conf_champ_alias."))"; + $dom_login->RS_select_group($sel, $from, $whe_dom, $group); + mysql_free_result($dom_login->RS_res); + + // Ajout des domaines du fichier de config n'ayant pas d'alias, puis tri + $domaine_array = array();$dom_array_compare = array();$dom_array_all = array(); + // dédoublonner le tableau de la requete + preparer un tableau simple de comparaison + foreach($dom_login->RS_ligne as $val) { + array_push($domaine_array, array_unique($val)); + array_push($dom_array_compare, $val["0"]); + } + + // Tableau simple de tous les domaines autorisé + $dom_array_all = explode(",", $domaines); + + // ajout des domaines du fichier de config n'ayant pas d'alias a ceux qui en ont + foreach($dom_array_all as $val) { + if (!in_array($val, $dom_array_compare)) {array_push($domaine_array, array($val, 0));} + } + + // Tri (voir bas de page : http://www.nexen.net/docs/php/annotee/function.array-multisort.php ) + foreach ($domaine_array as $key => $row) { + $ligne0[$key] = $row['0']; + $ligne1[$key] = $row['1']; + } + array_multisort($ligne0, SORT_ASC, $ligne1, SORT_DESC, $domaine_array); + + // Correspondance login / domaines et liste des domaines de la table transport + $f_base_login = "";$f_base_domaines = ""; + if ($_SESSION["S_niveau"] == 1) { + $liste_login = new ReqSql($hl_, $ul_, $pl_, $nl_); + $sel = $conf_champ_login.",".$conf_champ_domaines; + $from = $conf_table_login; + $liste_login->RS_select($sel, $from, "", $conf_champ_login); + mysql_free_result($liste_login->RS_res); + $f_base_login = "Login => domaines gérés
(".$liste_login->RS_nb_ligne." login trouvé(s) dans dans la table ".$conf_table_login." de la base de données ".$conf_base_login.". All = tous les domaines [super admin] )"; + + $liste_dom = new ReqSql($hm_, $um_, $pm_, $nm_); + $sel = $conf_champ_domaine.",".$conf_champ_transport; + $from = $conf_table_transport; + $liste_dom->RS_select($sel, $from, "", $conf_champ_domaine); + mysql_free_result($liste_dom->RS_res); + $f_base_domaines = "Domaines [transport]
(".$liste_dom->RS_nb_ligne." domaine(s) trouvé(s) dans la table ".$conf_table_transport." de la base de données ".$conf_base_mail."."; + + } // if ($_SESSION["S_niveau"] == 1) + + +/***********************************/ +/* traitement des variables TemTab */ +/***********************************/ + + // Affichage des balises meta + $template_meta->replace_var("t_meta_charset", $conf_meta_charset); + $template_meta->replace_var("t_meta_general_keywords", $conf_meta_general_keywords); + $template_meta->replace_var("t_meta_particulier_keywords", $meta_particulier_keywords); + $template_meta->replace_var("t_meta_copyright", $conf_meta_copyright); + $template_meta->replace_var("t_meta_author", $conf_meta_author); + $template_meta->replace_var("t_meta_generator", $conf_meta_generator); + $template_meta->replace_var("t_meta_description", $conf_meta_description); + + + // Affichage des titres + $template_meta->replace_var("t_nom_site", $conf_nom_site); + + // Affichage de l(identifiant du site + $template_tete->replace_var("t_conf_name_log", $conf_name_log); + + // formulaire déconnexion + $template_tete->replace_var("t_script_name", $conf_script_name); + + // Affichage des alias et destination génériques + $template->replace_loop_vars_one("loop_gen", $alias_dest_gen_array); + + // Affichage des statistiques des alias + $template->replace_loop_vars("loop_stats", $domaine_array); + + // Affichage des statistiques des comptes + $template->replace_loop_vars("loop_stats_comptes", 0); + + // Affichage des correspondances login / domaines + $template->replace_var("t_base_login_domaines", $f_base_login); + if ($_SESSION["S_niveau"] == 1) {$template->replace_loop_vars("loop_liste_login_domaines", $liste_login->RS_ligne);} + else {@$template->replace_loop_vars("loop_liste_login_domaines", $login_domaines_array);} // tableau vide si niveau 2 + + // Affichage des domaines de la table transport + $template->replace_var("t_base_domaines", $f_base_domaines); + if ($_SESSION["S_niveau"] == 1) {$template->replace_loop_vars("loop_liste_domaines", $liste_dom->RS_ligne);} + else {@$template->replace_loop_vars("loop_liste_domaines", $login_domaines_array);} // tableau vide si niveau 2 + + // Affichage de l'info alias / destination générique + $template->replace_var("t_info_alias_dest_gen", $info_alias_dest_gen); + + // affichage du login + $f_login = $_SESSION["S_login"]; + $template->replace_var("t_login", $f_login); + + +// prépare un message éventuellement passé par GET (déconnexion, en l'occurence) +if (isset($_GET["mess"])) {$affmess = stripslashes($_GET["mess"]);} +if (!isset($affmess)) {$affmess = "";} +if (isset($_GET["mess"]) or $affmess != "") { + $template->replace_var("t_mess_accueil", "
".$affmess."
 
"); + } + + +/*****************************************/ +/* Termine la construction des templates */ +/*****************************************/ + + // get rid of unused variables + $template_meta->remove_all_vars(); + $template_tete->remove_all_vars(); + $template->remove_all_vars(); + $template_pied->remove_all_vars(); + + // and finally get the outputs + print ($template_meta->get_output()); + print ($template_tete->get_output()); + print ($template->get_output()); + print ($template_pied->get_output()); + + +// compute elapsed time with microseconds precision +if ($conf_aff_req_sql) { + $mtime = explode(" ",microtime()); + $endtime = $mtime[1] + $mtime[0] - $starttime; + print "\n"; + print "
Durée du traitement: ".$endtime." microsecondes
\n
"; + } + + + +?> + diff --git a/swh/web/tests/browse/views/test_content.py b/swh/web/tests/browse/views/test_content.py index aa31bf9b..d5ccd3b1 100644 --- a/swh/web/tests/browse/views/test_content.py +++ b/swh/web/tests/browse/views/test_content.py @@ -1,200 +1,228 @@ # Copyright (C) 2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import base64 from unittest.mock import patch from nose.tools import istest from django.test import TestCase from django.utils.html import escape +from django.utils.encoding import DjangoUnicodeDecodeError from swh.web.common.utils import reverse from swh.web.browse.utils import ( gen_path_info ) from .data.content_test_data import ( stub_content_text_data, stub_content_text_path_with_root_dir, stub_content_bin_data, stub_content_bin_filename, - stub_content_text_no_highlight_data + stub_content_text_no_highlight_data, + non_utf8_encoded_content_data, + non_utf8_encoded_content, + non_utf8_encoding ) class SwhBrowseContentTest(TestCase): @patch('swh.web.browse.views.content.request_content') @istest def content_view_text(self, mock_request_content): mock_request_content.return_value = stub_content_text_data url = reverse('browse-content', kwargs={'query_string': stub_content_text_data['checksums']['sha1']}) # noqa url_raw = reverse('browse-content-raw', kwargs={'query_string': stub_content_text_data['checksums']['sha1']}) # noqa resp = self.client.get(url) self.assertEquals(resp.status_code, 200) self.assertTemplateUsed('content.html') self.assertContains(resp, '') self.assertContains(resp, escape(stub_content_text_data['raw_data'])) self.assertContains(resp, url_raw) @patch('swh.web.browse.views.content.request_content') @istest def content_view_text_no_highlight(self, mock_request_content): mock_request_content.return_value = stub_content_text_no_highlight_data url = reverse('browse-content', kwargs={'query_string': stub_content_text_no_highlight_data['checksums']['sha1']}) # noqa url_raw = reverse('browse-content-raw', kwargs={'query_string': stub_content_text_no_highlight_data['checksums']['sha1']}) # noqa resp = self.client.get(url) self.assertEquals(resp.status_code, 200) self.assertTemplateUsed('content.html') self.assertContains(resp, '') self.assertContains(resp, escape(stub_content_text_no_highlight_data['raw_data'])) # noqa self.assertContains(resp, url_raw) + @patch('swh.web.browse.utils.service') + @istest + def content_view_no_utf8_text(self, mock_service): + mock_service.lookup_content.return_value = \ + non_utf8_encoded_content_data + + mock_service.lookup_content_raw.return_value = \ + {'data': non_utf8_encoded_content} + + mock_service.lookup_content_filetype.return_value = None + mock_service.lookup_content_language.return_value = None + mock_service.lookup_content_license.return_value = None + + url = reverse('browse-content', + kwargs={'query_string': non_utf8_encoded_content_data['checksums']['sha1']}) # noqa + + try: + resp = self.client.get(url) + self.assertEquals(resp.status_code, 200) + self.assertTemplateUsed('content.html') + self.assertContains(resp, escape(non_utf8_encoded_content.decode(non_utf8_encoding).encode('utf-8'))) # noqa + except DjangoUnicodeDecodeError: + self.fail('Textual content is not encoded in utf-8') + @patch('swh.web.browse.views.content.request_content') @istest def content_view_image(self, mock_request_content): mime_type = 'image/png' mock_request_content.return_value = stub_content_bin_data url = reverse('browse-content', kwargs={'query_string': stub_content_bin_data['checksums']['sha1']}) # noqa url_raw = reverse('browse-content-raw', kwargs={'query_string': stub_content_bin_data['checksums']['sha1']}) # noqa resp = self.client.get(url) self.assertEquals(resp.status_code, 200) self.assertTemplateUsed('content.html') pngEncoded = base64.b64encode(stub_content_bin_data['raw_data']) \ .decode('utf-8') self.assertContains(resp, '' % (mime_type, pngEncoded)) self.assertContains(resp, url_raw) @patch('swh.web.browse.views.content.request_content') @istest def content_view_with_path(self, mock_request_content): mock_request_content.return_value = stub_content_text_data url = reverse('browse-content', kwargs={'query_string': stub_content_text_data['checksums']['sha1']}, # noqa query_params={'path': stub_content_text_path_with_root_dir}) # noqa resp = self.client.get(url) self.assertEquals(resp.status_code, 200) self.assertTemplateUsed('content.html') self.assertContains(resp, '') self.assertContains(resp, escape(stub_content_text_data['raw_data'])) split_path = stub_content_text_path_with_root_dir.split('/') root_dir_sha1 = split_path[0] filename = split_path[-1] path = stub_content_text_path_with_root_dir \ .replace(root_dir_sha1 + '/', '') \ .replace(filename, '') path_info = gen_path_info(path) root_dir_url = reverse('browse-directory', kwargs={'sha1_git': root_dir_sha1}) self.assertContains(resp, '
  • ', count=len(path_info)+1) self.assertContains(resp, '' + root_dir_sha1[:7] + '') for p in path_info: dir_url = reverse('browse-directory', kwargs={'sha1_git': root_dir_sha1, 'path': p['path']}) self.assertContains(resp, '' + p['name'] + '') self.assertContains(resp, '
  • ' + filename + '
  • ') url_raw = reverse('browse-content-raw', kwargs={'query_string': stub_content_text_data['checksums']['sha1']}, # noqa query_params={'filename': filename}) self.assertContains(resp, url_raw) @patch('swh.web.browse.views.content.request_content') @istest def test_content_raw_text(self, mock_request_content): mock_request_content.return_value = stub_content_text_data url = reverse('browse-content-raw', kwargs={'query_string': stub_content_text_data['checksums']['sha1']}) # noqa resp = self.client.get(url) self.assertEquals(resp.status_code, 200) self.assertEqual(resp['Content-Type'], 'text/plain') self.assertEqual(resp['Content-disposition'], 'filename=%s_%s' % ('sha1', stub_content_text_data['checksums']['sha1'])) # noqa self.assertEqual(resp.content, stub_content_text_data['raw_data']) filename = stub_content_text_path_with_root_dir.split('/')[-1] url = reverse('browse-content-raw', kwargs={'query_string': stub_content_text_data['checksums']['sha1']}, # noqa query_params={'filename': filename}) resp = self.client.get(url) self.assertEquals(resp.status_code, 200) self.assertEqual(resp['Content-Type'], 'text/plain') self.assertEqual(resp['Content-disposition'], 'filename=%s' % filename) self.assertEqual(resp.content, stub_content_text_data['raw_data']) @patch('swh.web.browse.views.content.request_content') @istest def content_raw_bin(self, mock_request_content): mock_request_content.return_value = stub_content_bin_data url = reverse('browse-content-raw', kwargs={'query_string': stub_content_bin_data['checksums']['sha1']}) # noqa resp = self.client.get(url) self.assertEquals(resp.status_code, 200) self.assertEqual(resp['Content-Type'], 'application/octet-stream') self.assertEqual(resp['Content-disposition'], 'attachment; filename=%s_%s' % ('sha1', stub_content_bin_data['checksums']['sha1'])) self.assertEqual(resp.content, stub_content_bin_data['raw_data']) url = reverse('browse-content-raw', kwargs={'query_string': stub_content_bin_data['checksums']['sha1']}, # noqa query_params={'filename': stub_content_bin_filename}) resp = self.client.get(url) self.assertEquals(resp.status_code, 200) self.assertEqual(resp['Content-Type'], 'application/octet-stream') self.assertEqual(resp['Content-disposition'], 'attachment; filename=%s' % stub_content_bin_filename) self.assertEqual(resp.content, stub_content_bin_data['raw_data'])