Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F9341862
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
154 KB
Subscribers
None
View Options
diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py
index a9ece627..15c8d458 100644
--- a/swh/web/browse/utils.py
+++ b/swh/web/browse/utils.py
@@ -1,839 +1,839 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import base64
import dateutil
import magic
import math
import stat
from django.core.cache import cache
from django.utils.safestring import mark_safe
from importlib import reload
from swh.web.common import highlightjs, service
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import (
reverse, format_utc_iso_date, parse_timestamp
)
from swh.web.config import get_config
def get_directory_entries(sha1_git):
"""Function that retrieves the content of a SWH directory
from the SWH archive.
The directories entries are first sorted in lexicographical order.
Sub-directories and regular files are then extracted.
Args:
sha1_git: sha1_git identifier of the directory
Returns:
A tuple whose first member corresponds to the sub-directories list
and second member the regular files list
Raises:
NotFoundExc if the directory is not found
"""
cache_entry_id = 'directory_entries_%s' % sha1_git
cache_entry = cache.get(cache_entry_id)
if cache_entry:
return cache_entry
entries = list(service.lookup_directory(sha1_git))
entries = sorted(entries, key=lambda e: e['name'])
for entry in entries:
entry['perms'] = stat.filemode(entry['perms'])
dirs = [e for e in entries if e['type'] == 'dir']
files = [e for e in entries if e['type'] == 'file']
cache.set(cache_entry_id, (dirs, files))
return dirs, files
def get_mimetype_and_encoding_for_content(content):
"""Function that returns the mime type and the encoding associated to
a content buffer using the magic module under the hood.
Args:
content (bytes): a content buffer
Returns:
A tuple (mimetype, encoding), for instance ('text/plain', 'us-ascii'),
associated to the provided content.
"""
while True:
try:
magic_result = magic.detect_from_content(content)
mime_type = magic_result.mime_type
encoding = magic_result.encoding
break
except Exception as exc:
# workaround an issue with the magic module who can fail
# if detect_from_content is called multiple times in
# a short amount of time
if 'too many values to unpack' in str(exc):
reload(magic)
else:
break
return mime_type, encoding
# maximum authorized content size in bytes for HTML display
# with code highlighting
content_display_max_size = get_config()['content_display_max_size']
def request_content(query_string, max_size=content_display_max_size):
"""Function that retrieves a SWH content from the SWH archive.
Raw bytes content is first retrieved, then the content mime type.
If the mime type is not stored in the archive, it will be computed
using Python magic module.
Args:
query_string: a string of the form "[ALGO_HASH:]HASH" where
optional ALGO_HASH can be either *sha1*, *sha1_git*, *sha256*,
or *blake2s256* (default to *sha1*) and HASH the hexadecimal
representation of the hash value
max_size: the maximum size for a content to retrieve (default to 1MB,
no size limit if None)
Returns:
A tuple whose first member corresponds to the content raw bytes
and second member the content mime type
Raises:
NotFoundExc if the content is not found
"""
content_data = service.lookup_content(query_string)
filetype = service.lookup_content_filetype(query_string)
language = service.lookup_content_language(query_string)
license = service.lookup_content_license(query_string)
mimetype = 'unknown'
encoding = 'unknown'
if filetype:
mimetype = filetype['mimetype']
encoding = filetype['encoding']
if not max_size or content_data['length'] < max_size:
content_raw = service.lookup_content_raw(query_string)
content_data['raw_data'] = content_raw['data']
if not filetype:
mimetype, encoding = \
get_mimetype_and_encoding_for_content(content_data['raw_data'])
# encode textual content to utf-8 if needed
if mimetype.startswith('text/'):
# probably a malformed UTF-8 content, reencode it
# by replacing invalid chars with a substitution one
if encoding == 'unknown-8bit':
content_data['raw_data'] = \
content_data['raw_data'].decode('utf-8', 'replace')\
.encode('utf-8')
elif 'ascii' not in encoding and encoding not in ['utf-8', 'binary']: # noqa
content_data['raw_data'] = \
content_data['raw_data'].decode(encoding, 'replace')\
.encode('utf-8')
else:
content_data['raw_data'] = None
content_data['mimetype'] = mimetype
content_data['encoding'] = encoding
if language:
content_data['language'] = language['lang']
else:
content_data['language'] = 'not detected'
if license:
content_data['licenses'] = ', '.join(license['licenses'])
else:
content_data['licenses'] = 'not detected'
return content_data
_browsers_supported_image_mimes = set(['image/gif', 'image/png',
'image/jpeg', 'image/bmp',
'image/webp'])
def prepare_content_for_display(content_data, mime_type, path):
"""Function that prepares a content for HTML display.
The function tries to associate a programming language to a
content in order to perform syntax highlighting client-side
using highlightjs. The language is determined using either
the content filename or its mime type.
If the mime type corresponds to an image format supported
by web browsers, the content will be encoded in base64
for displaying the image.
Args:
content_data (bytes): raw bytes of the content
mime_type (string): mime type of the content
path (string): path of the content including filename
Returns:
A dict containing the content bytes (possibly different from the one
provided as parameter if it is an image) under the key 'content_data
and the corresponding highlightjs language class under the
key 'language'.
"""
language = highlightjs.get_hljs_language_from_filename(path)
if not language:
language = highlightjs.get_hljs_language_from_mime_type(mime_type)
if not language:
language = 'nohighlight-swh'
elif mime_type.startswith('application/'):
mime_type = mime_type.replace('application/', 'text/')
if mime_type.startswith('image/'):
if mime_type in _browsers_supported_image_mimes:
content_data = base64.b64encode(content_data)
else:
content_data = None
return {'content_data': content_data,
'language': language}
def get_origin_visits(origin_info):
"""Function that returns the list of visits for a swh origin.
That list is put in cache in order to speedup the navigation
in the swh web browse ui.
Args:
origin_id (int): the id of the swh origin to fetch visits from
Returns:
A list of dict describing the origin visits::
[{'date': <UTC visit date in ISO format>,
'origin': <origin id>,
'status': <'full' | 'partial'>,
'visit': <visit id>
},
...
]
Raises:
NotFoundExc if the origin is not found
"""
cache_entry_id = 'origin_%s_visits' % origin_info['id']
cache_entry = cache.get(cache_entry_id)
if cache_entry:
return cache_entry
origin_visits = []
per_page = service.MAX_LIMIT
last_visit = None
while 1:
visits = list(service.lookup_origin_visits(origin_info['id'],
last_visit=last_visit,
per_page=per_page))
origin_visits += visits
if len(visits) < per_page:
break
else:
if not last_visit:
last_visit = per_page
else:
last_visit += per_page
def _visit_sort_key(visit):
ts = dateutil.parser.parse(visit['date']).timestamp()
return ts + (float(visit['visit']) / 10e3)
for v in origin_visits:
if 'metadata' in v:
del v['metadata']
origin_visits = [dict(t) for t in set([tuple(d.items())
for d in origin_visits])]
origin_visits = sorted(origin_visits, key=lambda v: _visit_sort_key(v))
cache.set(cache_entry_id, origin_visits)
return origin_visits
def get_origin_visit(origin_info, visit_ts=None, visit_id=None):
"""Function that returns information about a SWH visit for
a given origin.
The visit is retrieved from a provided timestamp.
The closest visit from that timestamp is selected.
Args:
origin_info (dict): a dict filled with origin information
(id, url, type)
visit_ts (int or str): an ISO date string or Unix timestamp to parse
Returns:
A dict containing the visit info as described below::
{'origin': 2,
'date': '2017-10-08T11:54:25.582463+00:00',
'metadata': {},
'visit': 25,
'status': 'full'}
"""
visits = get_origin_visits(origin_info)
if not visits:
raise NotFoundExc('No SWH visit associated to origin with'
' type %s and url %s!' % (origin_info['type'],
origin_info['url']))
if visit_id:
visit = [v for v in visits if v['visit'] == int(visit_id)]
if len(visit) == 0:
raise NotFoundExc(
'Visit with id %s for origin with type %s'
' and url %s not found!' % (visit_id, origin_info['type'],
origin_info['url']))
return visit[0]
if not visit_ts:
return visits[-1]
parsed_visit_ts = math.floor(parse_timestamp(visit_ts).timestamp())
visit_idx = None
for i, visit in enumerate(visits):
ts = math.floor(parse_timestamp(visit['date']).timestamp())
if i == 0 and parsed_visit_ts <= ts:
return visit
elif i == len(visits) - 1:
if parsed_visit_ts >= ts:
return visit
else:
next_ts = math.floor(
parse_timestamp(visits[i+1]['date']).timestamp())
if parsed_visit_ts >= ts and parsed_visit_ts < next_ts:
if (parsed_visit_ts - ts) < (next_ts - parsed_visit_ts):
visit_idx = i
break
else:
visit_idx = i+1
break
if visit_idx:
visit = visits[visit_idx]
while visit_idx < len(visits) - 1 and \
visit['date'] == visits[visit_idx+1]['date']:
visit_idx = visit_idx + 1
visit = visits[visit_idx]
return visit
else:
raise NotFoundExc(
'Visit with timestamp %s for origin with type %s and url %s not found!' % # noqa
(visit_ts, origin_info['type'], origin_info['url']))
-def get_origin_visit_occurrences(origin_info, visit_ts=None, visit_id=None):
+def get_origin_visit_snapshot(origin_info, visit_ts=None, visit_id=None):
"""Function that returns the lists of branches and releases
associated to a swh origin for a given visit.
The visit is expressed by a timestamp. In the latter case,
the closest visit from the provided timestamp will be used.
If no visit parameter is provided, it returns the list of branches
found for the latest visit.
That list is put in cache in order to speedup the navigation
in the swh web browse ui.
Args:
origin_info (dict): a dict filled with origin information
(id, url, type)
visit_ts (int or str): an ISO date string or Unix timestamp to parse
visit_id (int): optional visit id for desambiguation in case
several visits have the same timestamp
Returns:
A tuple with two members. The first one is a list of dict describing
the origin branches for the given visit::
[{'name': <branch name>,
'revision': <sha1_git of the associated revision>,
'directory': <sha1_git of the associated root directory>
},
...
]
The second one is a list of dict describing the origin branches
for the given visit.
Raises:
NotFoundExc if the origin or its visit are not found
"""
visit_info = get_origin_visit(origin_info, visit_ts, visit_id)
visit = visit_info['visit']
- cache_entry_id = 'origin_%s_visit_%s_occurrences' % (origin_info['id'],
- visit)
+ cache_entry_id = 'origin_%s_visit_%s_snapshot' % (origin_info['id'],
+ visit)
cache_entry = cache.get(cache_entry_id)
if cache_entry:
return cache_entry['branches'], cache_entry['releases']
- origin_visit_data = service.lookup_origin_visit(origin_info['id'],
- visit)
+ origin_visit_snapshot = service.lookup_snapshot(visit_info['snapshot'])
+
branches = []
releases = []
revision_ids = []
releases_ids = []
- occurrences = origin_visit_data['occurrences']
- for key in sorted(occurrences.keys()):
- if occurrences[key]['target_type'] == 'revision':
+ snapshot_branches = origin_visit_snapshot['branches']
+ for key in sorted(snapshot_branches.keys()):
+ if snapshot_branches[key]['target_type'] == 'revision':
branches.append({'name': key,
- 'revision': occurrences[key]['target']})
- revision_ids.append(occurrences[key]['target'])
- elif occurrences[key]['target_type'] == 'release':
- releases_ids.append(occurrences[key]['target'])
+ 'revision': snapshot_branches[key]['target']})
+ revision_ids.append(snapshot_branches[key]['target'])
+ elif snapshot_branches[key]['target_type'] == 'release':
+ releases_ids.append(snapshot_branches[key]['target'])
releases_info = service.lookup_release_multiple(releases_ids)
for release in releases_info:
releases.append({'name': release['name'],
'date': format_utc_iso_date(release['date']),
'id': release['id'],
'message': release['message'],
'target_type': release['target_type'],
'target': release['target']})
revision_ids.append(release['target'])
revisions = service.lookup_revision_multiple(revision_ids)
branches_to_remove = []
for idx, revision in enumerate(revisions):
if idx < len(branches):
if revision:
branches[idx]['directory'] = revision['directory']
branches[idx]['date'] = format_utc_iso_date(revision['date'])
branches[idx]['message'] = revision['message']
else:
branches_to_remove.append(branches[idx])
else:
rel_idx = idx - len(branches)
if revision:
releases[rel_idx]['directory'] = revision['directory']
for b in branches_to_remove:
branches.remove(b)
cache.set(cache_entry_id, {'branches': branches, 'releases': releases})
return branches, releases
def gen_link(url, link_text, link_attrs={}):
"""
Utility function for generating an HTML link to insert
in Django templates.
Args:
url (str): an url
link_text (str): the text for the produced link
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="url">link_text</a>'
"""
attrs = ' '
for k, v in link_attrs.items():
attrs += '%s="%s" ' % (k, v)
link = '<a%shref="%s">%s</a>' % (attrs, url, link_text)
return mark_safe(link)
def gen_person_link(person_id, person_name, link_attrs={}):
"""
Utility function for generating a link to a SWH person HTML view
to insert in Django templates.
Args:
person_id (int): a SWH person id
person_name (str): the associated person name
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="person_view_url">person_name</a>'
"""
person_url = reverse('browse-person', kwargs={'person_id': person_id})
return gen_link(person_url, person_name, link_attrs)
def gen_revision_link(revision_id, shorten_id=False, origin_context=None,
link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH revision HTML view
to insert in Django templates.
Args:
revision_id (str): a SWH revision id
shorten_id (boolean): wheter to shorten the revision id to 7
characters for the link text
origin_context (dict): if provided, generate origin-dependent browsing
link (see :func:`swh.web.browse.utils.get_origin_context`)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="revision_view_url">revision_id</a>'
"""
query_params = None
if origin_context:
origin_info = origin_context['origin_info']
query_params = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
if 'timestamp' in origin_context['url_args']:
query_params['timestamp'] = \
origin_context['url_args']['timestamp']
if 'visit_id' in origin_context['query_params']:
query_params['visit_id'] = \
origin_context['query_params']['visit_id']
revision_url = reverse('browse-revision',
kwargs={'sha1_git': revision_id},
query_params=query_params)
if shorten_id:
return gen_link(revision_url, revision_id[:7], link_attrs)
else:
if not link_text:
link_text = revision_id
return gen_link(revision_url, link_text, link_attrs)
def gen_origin_link(origin_info, link_attrs={}):
"""
Utility function for generating a link to a SWH origin HTML view
to insert in Django templates.
Args:
origin_info (dict): a dicted filled with origin information
(id, type, url)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="origin_view_url">Origin: origin_url</a>'
""" # noqa
origin_browse_url = reverse('browse-origin',
kwargs={'origin_type': origin_info['type'],
'origin_url': origin_info['url']})
return gen_link(origin_browse_url,
'Origin: ' + origin_info['url'], link_attrs)
def gen_directory_link(sha1_git, link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH directory HTML view
to insert in Django templates.
Args:
sha1_git (str): directory identifier
link_text (str): optional text for the generated link
(the generated url will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="directory_view_url">link_text</a>'
"""
directory_url = reverse('browse-directory',
kwargs={'sha1_git': sha1_git})
if not link_text:
link_text = directory_url
return gen_link(directory_url, link_text, link_attrs)
def gen_origin_directory_link(origin_context, revision_id=None,
link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH directory HTML view
in the context of an origin to insert in Django templates.
Args:
origin_info (dict): the origin information (type and url)
revision_id (str): optional revision identifier in order
to use the associated directory
link_text (str): optional text to use for the generated link
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form
'<a href="origin_directory_view_url">origin_directory_view_url</a>'
"""
origin_info = origin_context['origin_info']
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
query_params = {'revision': revision_id}
if 'timestamp' in origin_context['url_args']:
url_args['timestamp'] = \
origin_context['url_args']['timestamp']
if 'visit_id' in origin_context['query_params']:
query_params['visit_id'] = \
origin_context['query_params']['visit_id']
directory_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
if not link_text:
link_text = directory_url
return gen_link(directory_url, link_text, link_attrs)
def gen_content_link(sha1_git, link_text=None, link_attrs={}):
"""
Utility function for generating a link to a SWH content HTML view
to insert in Django templates.
Args:
sha1_git (str): content identifier
link_text (str): optional text for the generated link
(the generated url will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="content_view_url">link_text</a>'
"""
content_url = reverse('browse-content',
kwargs={'query_string': 'sha1_git:' + sha1_git})
if not link_text:
link_text = content_url
return gen_link(content_url, link_text, link_attrs)
def get_revision_log_url(revision_id, origin_context=None):
"""
Utility function for getting the URL for a SWH revision log HTML view
(possibly in the context of an origin).
Args:
revision_id (str): revision identifier the history heads to
origin_context (dict): if provided, generate origin-dependent browsing
link (see :func:`swh.web.browse.utils.get_origin_context`)
Returns:
The SWH revision log view URL
"""
if origin_context:
origin_info = origin_context['origin_info']
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
query_params = {'revision': revision_id}
if 'timestamp' in origin_context['url_args']:
url_args['timestamp'] = \
origin_context['url_args']['timestamp']
if 'visit_id' in origin_context['query_params']:
query_params['visit_id'] = \
origin_context['query_params']['visit_id']
revision_log_url = reverse('browse-origin-log',
kwargs=url_args,
query_params=query_params)
else:
revision_log_url = reverse('browse-revision-log',
kwargs={'sha1_git': revision_id})
return revision_log_url
def gen_revision_log_link(revision_id, origin_context=None, link_text=None,
link_attrs={}):
"""
Utility function for generating a link to a SWH revision log HTML view
(possibly in the context of an origin) to insert in Django templates.
Args:
revision_id (str): revision identifier the history heads to
origin_context (dict): if provided, generate origin-dependent browsing
link (see :func:`swh.web.browse.utils.get_origin_context`)
link_text (str): optional text to use for the generated link
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form
'<a href="revision_log_view_url">link_text</a>'
"""
revision_log_url = get_revision_log_url(revision_id, origin_context)
if not link_text:
link_text = revision_log_url
return gen_link(revision_log_url, link_text, link_attrs)
def _format_log_entries(revision_log, per_page, origin_context=None):
revision_log_data = []
for i, log in enumerate(revision_log):
if i == per_page:
break
revision_log_data.append(
{'author': gen_person_link(log['author']['id'],
log['author']['name']),
'revision': gen_revision_link(log['id'], True, origin_context),
'message': log['message'],
'date': format_utc_iso_date(log['date']),
'directory': log['directory']})
return revision_log_data
def prepare_revision_log_for_display(revision_log, per_page, revs_breadcrumb,
origin_context=None):
"""
Utility functions that process raw revision log data for HTML display.
Its purpose is to:
* add links to relevant SWH browse views
* format date in human readable format
* truncate the message log
It also computes the data needed to generate the links for navigating back
and forth in the history log.
Args:
revision_log (list): raw revision log as returned by the SWH web api
per_page (int): number of log entries per page
revs_breadcrumb (str): breadcrumbs of revisions navigated so far,
in the form 'rev1[/rev2/../revN]'. Each revision corresponds to
the first one displayed in the HTML view for history log.
origin_context (boolean): wheter or not the revision log is browsed
from an origin view.
"""
current_rev = revision_log[0]['id']
next_rev = None
prev_rev = None
next_revs_breadcrumb = None
prev_revs_breadcrumb = None
if len(revision_log) == per_page + 1:
prev_rev = revision_log[-1]['id']
prev_rev_bc = current_rev
if origin_context:
prev_rev_bc = prev_rev
if revs_breadcrumb:
revs = revs_breadcrumb.split('/')
next_rev = revs[-1]
if len(revs) > 1:
next_revs_breadcrumb = '/'.join(revs[:-1])
if len(revision_log) == per_page + 1:
prev_revs_breadcrumb = revs_breadcrumb + '/' + prev_rev_bc
else:
prev_revs_breadcrumb = prev_rev_bc
return {'revision_log_data': _format_log_entries(revision_log, per_page,
origin_context),
'prev_rev': prev_rev,
'prev_revs_breadcrumb': prev_revs_breadcrumb,
'next_rev': next_rev,
'next_revs_breadcrumb': next_revs_breadcrumb}
def get_origin_context(origin_type, origin_url, timestamp, visit_id=None):
"""
Utility function to compute relevant information when navigating
the SWH archive in an origin context.
Args:
origin_type (str): the origin type (git, svn, deposit, ...)
origin_url (str): the origin_url (e.g. https://github.com/<user>/<repo>)
timestamp (str): a datetime string for retrieving the closest
SWH visit of the origin
visit_id (int): optional visit id for disambiguation in case
of several visits with the same timestamp
Returns:
A dict with the following entries:
* origin_info: dict containing origin information
* visit_info: dict containing SWH visit information
* branches: the list of branches for the origin found
during the visit
* releases: the list of releases for the origin found
during the visit
* origin_browse_url: the url to browse the origin
* origin_branches_url: the url to browse the origin branches
* origin_releases_url': the url to browse the origin releases
* origin_visit_url: the url to browse the snapshot of the origin
found during the visit
* url_args: dict containg url arguments to use when browsing in
the context of the origin and its visit
""" # noqa
origin_info = service.lookup_origin({'type': origin_type,
'url': origin_url})
visit_info = get_origin_visit(origin_info, timestamp, visit_id)
visit_info['fmt_date'] = format_utc_iso_date(visit_info['date'])
# provided timestamp is not necessarily equals to the one
# of the retrieved visit, so get the exact one in order
# use it in the urls generated below
if timestamp:
timestamp = visit_info['date']
branches, releases = \
- get_origin_visit_occurrences(origin_info, timestamp, visit_id)
+ get_origin_visit_snapshot(origin_info, timestamp, visit_id)
releases = list(reversed(releases))
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
if timestamp:
url_args['timestamp'] = format_utc_iso_date(timestamp,
'%Y-%m-%dT%H:%M:%S')
origin_browse_url = reverse('browse-origin',
kwargs={'origin_type': origin_info['type'],
'origin_url': origin_info['url']})
origin_visit_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params={'visit_id': visit_id})
origin_branches_url = reverse('browse-origin-branches',
kwargs=url_args,
query_params={'visit_id': visit_id})
origin_releases_url = reverse('browse-origin-releases',
kwargs=url_args,
query_params={'visit_id': visit_id})
return {
'origin_info': origin_info,
'visit_info': visit_info,
'branches': branches,
'releases': releases,
'branch': None,
'release': None,
'origin_browse_url': origin_browse_url,
'origin_branches_url': origin_branches_url,
'origin_releases_url': origin_releases_url,
'origin_visit_url': origin_visit_url,
'url_args': url_args,
'query_params': {'visit_id': visit_id}
}
diff --git a/swh/web/browse/views/origin.py b/swh/web/browse/views/origin.py
index e90ffe41..9cfd5a64 100644
--- a/swh/web/browse/views/origin.py
+++ b/swh/web/browse/views/origin.py
@@ -1,876 +1,876 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import dateutil
import json
from distutils.util import strtobool
from django.http import HttpResponse
from django.shortcuts import render
from django.utils.safestring import mark_safe
from django.template.defaultfilters import filesizeformat
from swh.web.common import service
from swh.web.common.utils import (
gen_path_info, reverse, format_utc_iso_date
)
from swh.web.common.exc import NotFoundExc, handle_view_exception
from swh.web.browse.utils import (
get_origin_visits,
get_directory_entries, request_content,
prepare_content_for_display,
prepare_revision_log_for_display,
get_origin_context, gen_directory_link,
gen_revision_link, gen_revision_log_link,
gen_content_link, gen_origin_directory_link,
content_display_max_size
)
from swh.web.browse.browseurls import browse_route
-def _occurrence_not_found(origin_info, timestamp,
- branch_type, occurrence, occurrences,
- visit_id=None):
+def _branch_not_found(origin_info, timestamp,
+ branch_type, branch, branches,
+ visit_id=None):
"""
Utility function to raise an exception when a specified branch/release
can not be found.
"""
if branch_type:
occ_type = 'Branch'
occ_type_plural = 'branches'
else:
occ_type = 'Release'
occ_type_plural = 'releases'
if visit_id:
- if len(occurrences) == 0:
+ if len(branches) == 0:
raise NotFoundExc('Origin with type %s and url %s'
' for visit with id %s has an empty list'
' of %s!' % (origin_info['type'],
origin_info['url'], visit_id,
occ_type_plural))
else:
raise NotFoundExc('%s %s associated to visit with'
' id %s for origin with type %s and url %s'
- ' not found!' % (occ_type, occurrence, visit_id,
+ ' not found!' % (occ_type, branch, visit_id,
origin_info['type'],
origin_info['url']))
else:
- if len(occurrences) == 0:
+ if len(branches) == 0:
raise NotFoundExc('Origin with type %s and url %s'
' for visit with timestamp %s has an empty list'
' of %s!' % (origin_info['type'],
origin_info['url'],
timestamp, occ_type_plural))
else:
raise NotFoundExc('%s %s associated to visit with'
' timestamp %s for origin with type %s'
- ' and url %s not found!' % (occ_type, occurrence,
+ ' and url %s not found!' % (occ_type, branch,
timestamp,
origin_info['type'],
origin_info['url']))
def _get_branch(branches, branch_name):
"""
Utility function to get a specific branch from an origin branches list.
Its purpose is to get the default HEAD branch as some SWH origin
(e.g those with svn type) does not have it. In that latter case, check
if there is a master branch instead and returns it.
"""
filtered_branches = \
[b for b in branches if b['name'].endswith(branch_name)]
if len(filtered_branches) > 0:
return filtered_branches[0]
elif branch_name == 'HEAD':
filtered_branches = \
[b for b in branches if b['name'].endswith('master')]
if len(filtered_branches) > 0:
return filtered_branches[0]
elif len(branches) > 0:
return branches[0]
return None
def _get_release(releases, release_name):
filtered_releases = \
[r for r in releases if r['name'] == release_name]
if len(filtered_releases) > 0:
return filtered_releases[0]
else:
return None
def _process_origin_request(request, origin_type, origin_url,
timestamp, path, browse_view_name):
"""
Utility function to perform common input request processing
for origin context views.
"""
visit_id = request.GET.get('visit_id', None)
origin_context = get_origin_context(origin_type, origin_url,
timestamp, visit_id)
for b in origin_context['branches']:
branch_url_args = dict(origin_context['url_args'])
if path:
b['path'] = path
branch_url_args['path'] = path
b['url'] = reverse(browse_view_name,
kwargs=branch_url_args,
query_params={'branch': b['name'],
'visit_id': visit_id})
for r in origin_context['releases']:
release_url_args = dict(origin_context['url_args'])
if path:
r['path'] = path
release_url_args['path'] = path
r['url'] = reverse(browse_view_name,
kwargs=release_url_args,
query_params={'release': r['name'],
'visit_id': visit_id})
root_sha1_git = None
query_params = origin_context['query_params']
revision_id = request.GET.get('revision', None)
release_name = request.GET.get('release', None)
branch_name = None
if revision_id:
revision = service.lookup_revision(revision_id)
root_sha1_git = revision['directory']
origin_context['branches'].append({'name': revision_id,
'revision': revision_id,
'directory': root_sha1_git,
'url': None})
branch_name = revision_id
query_params['revision'] = revision_id
elif release_name:
release = _get_release(origin_context['releases'], release_name)
if release:
root_sha1_git = release['directory']
query_params['release'] = release_name
revision_id = release['target']
else:
- _occurrence_not_found(origin_context['origin_info'], timestamp,
- False, release_name,
- origin_context['releases'], visit_id)
+ _branch_not_found(origin_context['origin_info'], timestamp,
+ False, release_name,
+ origin_context['releases'], visit_id)
else:
branch_name = request.GET.get('branch', None)
if branch_name:
query_params['branch'] = branch_name
branch = _get_branch(origin_context['branches'], branch_name or 'HEAD')
if branch:
branch_name = branch['name']
root_sha1_git = branch['directory']
revision_id = branch['revision']
else:
- _occurrence_not_found(origin_context['origin_info'], timestamp,
- True, branch_name,
- origin_context['branches'], visit_id)
+ _branch_not_found(origin_context['origin_info'], timestamp,
+ True, branch_name,
+ origin_context['branches'], visit_id)
origin_context['root_sha1_git'] = root_sha1_git
origin_context['revision_id'] = revision_id
origin_context['branch'] = branch_name
origin_context['release'] = release_name
return origin_context
@browse_route(r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/visit/(?P<timestamp>.+)/directory/', # noqa
r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/visit/(?P<timestamp>.+)/directory/(?P<path>.+)/', # noqa
r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/directory/', # noqa
r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/directory/(?P<path>.+)/', # noqa
view_name='browse-origin-directory')
def origin_directory_browse(request, origin_type, origin_url,
timestamp=None, path=None):
"""Django view for browsing the content of a SWH directory associated
to an origin for a given visit.
The url scheme that points to it is the following:
* :http:get:`/browse/origin/(origin_type)/url/(origin_url)/directory/[(path)/]`
* :http:get:`/browse/origin/(origin_type)/url/(origin_type)/visit/(timestamp)/directory/[(path)/]`
Args:
request: input django http request
origin_type: the type of swh origin (git, svn, hg, ...)
origin_url: the url of the swh origin
timestamp: optional swh visit timestamp parameter
(the last one will be used by default)
path: optional path parameter used to navigate in directories
reachable from the origin root one
branch: optional query parameter that specifies the origin branch
from which to retrieve the directory
release: optional query parameter that specifies the origin release
from which to retrieve the directory
revision: optional query parameter to specify the origin revision
from which to retrieve the directory
Returns:
The HTML rendering for the content of the directory associated
to the provided origin and visit.
""" # noqa
try:
origin_context = _process_origin_request(
request, origin_type, origin_url, timestamp, path,
'browse-origin-directory')
root_sha1_git = origin_context['root_sha1_git']
sha1_git = root_sha1_git
if path:
dir_info = service.lookup_directory_with_path(root_sha1_git, path)
sha1_git = dir_info['target']
dirs, files = get_directory_entries(sha1_git)
except Exception as exc:
return handle_view_exception(request, exc)
origin_info = origin_context['origin_info']
visit_info = origin_context['visit_info']
url_args = origin_context['url_args']
query_params = origin_context['query_params']
revision_id = origin_context['revision_id']
path_info = gen_path_info(path)
breadcrumbs = []
breadcrumbs.append({'name': root_sha1_git[:7],
'url': reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)})
for pi in path_info:
bc_url_args = dict(url_args)
bc_url_args['path'] = pi['path']
breadcrumbs.append({'name': pi['name'],
'url': reverse('browse-origin-directory',
kwargs=bc_url_args,
query_params=query_params)})
path = '' if path is None else (path + '/')
for d in dirs:
bc_url_args = dict(url_args)
bc_url_args['path'] = path + d['name']
d['url'] = reverse('browse-origin-directory',
kwargs=bc_url_args,
query_params=query_params)
sum_file_sizes = 0
readme_name = None
readme_url = None
for f in files:
bc_url_args = dict(url_args)
bc_url_args['path'] = path + f['name']
f['url'] = reverse('browse-origin-content',
kwargs=bc_url_args,
query_params=query_params)
sum_file_sizes += f['length']
f['length'] = filesizeformat(f['length'])
if f['name'].lower().startswith('readme'):
readme_name = f['name']
readme_sha1 = f['checksums']['sha1']
readme_url = reverse('browse-content-raw',
kwargs={'query_string': readme_sha1})
history_url = reverse('browse-origin-log',
kwargs=url_args,
query_params=query_params)
sum_file_sizes = filesizeformat(sum_file_sizes)
browse_dir_link = \
gen_directory_link(sha1_git, link_text='Browse',
link_attrs={'class': 'btn btn-md btn-swh',
'role': 'button'})
browse_rev_link = \
gen_revision_link(revision_id,
origin_context=origin_context,
link_text='Browse',
link_attrs={'class': 'btn btn-md btn-swh',
'role': 'button'})
dir_metadata = {'id': sha1_git,
'context-independent directory': browse_dir_link,
'number of regular files': len(files),
'number of subdirectories': len(dirs),
'sum of regular file sizes': sum_file_sizes,
'origin id': origin_info['id'],
'origin type': origin_info['type'],
'origin url': origin_info['url'],
'origin visit date': format_utc_iso_date(visit_info['date']), # noqa
'origin visit id': visit_info['visit'],
'path': '/' + path,
'revision id': revision_id,
'revision': browse_rev_link}
vault_cooking = {
'directory_context': True,
'directory_id': sha1_git,
'revision_context': True,
'revision_id': revision_id
}
return render(request, 'directory.html',
{'empty_browse': False,
'heading': 'Directory information',
'top_panel_visible': True,
'top_panel_collapsible': True,
'top_panel_text': 'SWH object: Directory',
'swh_object_metadata': dir_metadata,
'main_panel_visible': True,
'dirs': dirs,
'files': files,
'breadcrumbs': breadcrumbs,
'top_right_link': history_url,
'top_right_link_text': mark_safe(
'<i class="fa fa-history fa-fw" aria-hidden="true"></i>'
'History'
),
'readme_name': readme_name,
'readme_url': readme_url,
'origin_context': origin_context,
'vault_cooking': vault_cooking})
@browse_route(r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/visit/(?P<timestamp>.+)/content/(?P<path>.+)/', # noqa
r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/content/(?P<path>.+)/', # noqa
view_name='browse-origin-content')
def origin_content_display(request, origin_type, origin_url, path,
timestamp=None):
"""Django view that produces an HTML display of a SWH content
associated to an origin for a given visit.
The url scheme that points to it is the following:
* :http:get:`/browse/origin/(origin_type)/url/(origin_url)/content/(path)/`
* :http:get:`/browse/origin/(origin_type)/url/(origin_url)/visit/(timestamp)/content/(path)/`
Args:
request: input django http request
origin_type: the type of swh origin (git, svn, hg, ...)
origin_url: the url of the swh origin
path: path of the content relative to the origin root directory
timestamp: optional swh visit timestamp parameter
(the last one will be used by default)
branch: optional query parameter that specifies the origin branch
from which to retrieve the content
release: optional query parameter that specifies the origin release
from which to retrieve the content
revision: optional query parameter to specify the origin revision
from which to retrieve the content
Returns:
The HTML rendering of the requested content associated to
the provided origin and visit.
""" # noqa
try:
origin_context = _process_origin_request(
request, origin_type, origin_url, timestamp, path,
'browse-origin-content')
root_sha1_git = origin_context['root_sha1_git']
content_info = service.lookup_directory_with_path(root_sha1_git, path)
sha1_git = content_info['target']
query_string = 'sha1_git:' + sha1_git
content_data = request_content(query_string)
except Exception as exc:
return handle_view_exception(request, exc)
url_args = origin_context['url_args']
query_params = origin_context['query_params']
revision_id = origin_context['revision_id']
origin_info = origin_context['origin_info']
visit_info = origin_context['visit_info']
content = None
language = None
if content_data['raw_data'] is not None:
content_display_data = prepare_content_for_display(
content_data['raw_data'], content_data['mimetype'], path)
content = content_display_data['content_data']
language = content_display_data['language']
filename = None
path_info = None
breadcrumbs = []
split_path = path.split('/')
filename = split_path[-1]
path = path[:-len(filename)]
path_info = gen_path_info(path)
breadcrumbs.append({'name': root_sha1_git[:7],
'url': reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)})
for pi in path_info:
bc_url_args = dict(url_args)
bc_url_args['path'] = pi['path']
breadcrumbs.append({'name': pi['name'],
'url': reverse('browse-origin-directory',
kwargs=bc_url_args,
query_params=query_params)})
breadcrumbs.append({'name': filename,
'url': None})
browse_content_link = \
gen_content_link(sha1_git, link_text='Browse',
link_attrs={'class': 'btn btn-md btn-swh',
'role': 'button'})
content_raw_url = reverse('browse-content-raw',
kwargs={'query_string': query_string},
query_params={'filename': filename})
browse_rev_link = \
gen_revision_link(revision_id,
origin_context=origin_context,
link_text='Browse',
link_attrs={'class': 'btn btn-md btn-swh',
'role': 'button'})
content_metadata = {
'context-independent content': browse_content_link,
'sha1 checksum': content_data['checksums']['sha1'],
'sha1_git checksum': content_data['checksums']['sha1_git'],
'sha256 checksum': content_data['checksums']['sha256'],
'blake2s256 checksum': content_data['checksums']['blake2s256'],
'mime type': content_data['mimetype'],
'encoding': content_data['encoding'],
'size': filesizeformat(content_data['length']),
'language': content_data['language'],
'licenses': content_data['licenses'],
'origin id': origin_info['id'],
'origin type': origin_info['type'],
'origin url': origin_info['url'],
'origin visit date': format_utc_iso_date(visit_info['date']),
'origin visit id': visit_info['visit'],
'path': '/' + path,
'filename': filename,
'revision id': revision_id,
'revision': browse_rev_link
}
return render(request, 'content.html',
{'empty_browse': False,
'heading': 'Content information',
'top_panel_visible': True,
'top_panel_collapsible': True,
'top_panel_text': 'SWH object: Content',
'swh_object_metadata': content_metadata,
'main_panel_visible': True,
'content': content,
'content_size': content_data['length'],
'max_content_size': content_display_max_size,
'mimetype': content_data['mimetype'],
'language': language,
'breadcrumbs': breadcrumbs,
'top_right_link': content_raw_url,
'top_right_link_text': mark_safe(
'<i class="fa fa-file-text fa-fw" aria-hidden="true">'
'</i>Raw File'),
'origin_context': origin_context,
'vault_cooking': None
})
PER_PAGE = 20
@browse_route(r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/visit/(?P<timestamp>.+)/log/', # noqa
r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/log/',
view_name='browse-origin-log')
def origin_log_browse(request, origin_type, origin_url, timestamp=None):
"""Django view that produces an HTML display of revisions history (aka
the commit log) associated to a SWH origin.
The url scheme that points to it is the following:
* :http:get:`/browse/origin/(origin_type)/url/(origin_url)/log/`
* :http:get:`/browse/origin/(origin_type)/url/(origin_url)/visit/(timestamp)/log/`
Args:
request: input django http request
origin_type: the type of swh origin (git, svn, hg, ...)
origin_url: the url of the swh origin
timestamp: optional visit timestamp parameter
(the last one will be used by default)
revs_breadcrumb: query parameter used internally to store
the navigation breadcrumbs (i.e. the list of descendant revisions
visited so far).
per_page: optional query parameter used to specify the number of
log entries per page
branch: optional query parameter that specifies the origin branch
from which to retrieve the commit log
release: optional query parameter that specifies the origin release
from which to retrieve the commit log
revision: optional query parameter to specify the origin revision
from which to retrieve the commit log
Returns:
The HTML rendering of revisions history for a given SWH visit.
""" # noqa
try:
origin_context = _process_origin_request(
request, origin_type, origin_url, timestamp, None,
'browse-origin-log')
revision_id = origin_context['revision_id']
per_page = int(request.GET.get('per_page', PER_PAGE))
revision_log = service.lookup_revision_log(revision_id,
limit=per_page+1)
revision_log = list(revision_log)
except Exception as exc:
return handle_view_exception(request, exc)
origin_info = origin_context['origin_info']
visit_info = origin_context['visit_info']
url_args = origin_context['url_args']
query_params = origin_context['query_params']
query_params['per_page'] = per_page
revs_breadcrumb = request.GET.get('revs_breadcrumb', None)
if revs_breadcrumb:
revision_id = revs_breadcrumb.split('/')[-1]
revision_log_display_data = prepare_revision_log_for_display(
revision_log, per_page, revs_breadcrumb, origin_context)
prev_rev = revision_log_display_data['prev_rev']
prev_revs_breadcrumb = revision_log_display_data['prev_revs_breadcrumb']
prev_log_url = None
query_params['revs_breadcrumb'] = prev_revs_breadcrumb
if prev_rev:
prev_log_url = \
reverse('browse-origin-log',
kwargs=url_args,
query_params=query_params)
next_rev = revision_log_display_data['next_rev']
next_revs_breadcrumb = revision_log_display_data['next_revs_breadcrumb']
next_log_url = None
query_params['revs_breadcrumb'] = next_revs_breadcrumb
if next_rev:
next_log_url = \
reverse('browse-origin-log',
kwargs=url_args,
query_params=query_params)
revision_log_data = revision_log_display_data['revision_log_data']
for i, log in enumerate(revision_log_data):
params = {
'revision': revision_log[i]['id'],
}
if 'visit_id' in query_params:
params['visit_id'] = query_params['visit_id']
log['directory'] = gen_origin_directory_link(
origin_context, revision_log[i]['id'],
link_text='<i class="fa fa-folder-open fa-fw" aria-hidden="true">'
'</i>Browse files',
link_attrs={'class': 'btn btn-md btn-swh',
'role': 'button'})
browse_log_link = \
gen_revision_log_link(revision_id, link_text='Browse',
link_attrs={'class': 'btn btn-md btn-swh',
'role': 'button'})
revision_metadata = {
'context-independent revision history': browse_log_link,
'origin id': origin_info['id'],
'origin type': origin_info['type'],
'origin url': origin_info['url'],
'origin visit date': format_utc_iso_date(visit_info['date']),
'origin visit id': visit_info['visit']
}
return render(request, 'revision-log.html',
{'empty_browse': False,
'heading': 'Revision history information',
'top_panel_visible': True,
'top_panel_collapsible': True,
'top_panel_text': 'SWH object: Revision history',
'swh_object_metadata': revision_metadata,
'main_panel_visible': True,
'revision_log': revision_log_data,
'next_log_url': next_log_url,
'prev_log_url': prev_log_url,
'breadcrumbs': None,
'top_right_link': None,
'top_right_link_text': None,
'origin_context': origin_context,
'vault_cooking': None})
@browse_route(r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/visit/(?P<timestamp>.+)/branches/', # noqa
r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/branches/', # noqa
view_name='browse-origin-branches')
def origin_branches_browse(request, origin_type, origin_url, timestamp=None):
"""Django view that produces an HTML display of the list of branches
associated to an origin for a given visit.
The url scheme that points to it is the following:
* :http:get:`/browse/origin/(origin_type)/url/(origin_url)/branches/`
* :http:get:`/browse/origin/(origin_type)/url/(origin_url)/visit/(timestamp)/branches/`
""" # noqa
try:
origin_context = _process_origin_request(
request, origin_type, origin_url, timestamp, None,
'browse-origin-directory')
except Exception as exc:
return handle_view_exception(request, exc)
branches_offset = int(request.GET.get('branches_offset', 0))
origin_info = origin_context['origin_info']
url_args = origin_context['url_args']
query_params = origin_context['query_params']
branches = origin_context['branches']
displayed_branches = \
branches[branches_offset:branches_offset+PER_PAGE]
for branch in displayed_branches:
revision_url = reverse(
'browse-revision', kwargs={'sha1_git': branch['revision']},
query_params={'origin_type': origin_info['type'],
'origin_url': origin_info['url']})
query_params['branch'] = branch['name']
directory_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
del query_params['branch']
branch['revision_url'] = revision_url
branch['directory_url'] = directory_url
prev_branches_url = None
next_branches_url = None
next_offset = branches_offset + PER_PAGE
prev_offset = branches_offset - PER_PAGE
if next_offset < len(branches):
query_params['branches_offset'] = next_offset
next_branches_url = reverse('browse-origin-branches',
kwargs=url_args, query_params=query_params)
query_params['branches_offset'] = None
if prev_offset >= 0:
if prev_offset != 0:
query_params['branches_offset'] = prev_offset
prev_branches_url = reverse('browse-origin-branches',
kwargs=url_args, query_params=query_params)
return render(request, 'branches.html',
{'empty_browse': False,
'heading': 'Origin branches list',
'top_panel_visible': False,
'top_panel_collapsible': False,
'top_panel_text': 'SWH object: Origin branches list',
'swh_object_metadata': {},
'main_panel_visible': True,
'top_right_link': None,
'top_right_link_text': None,
'displayed_branches': displayed_branches,
'prev_branches_url': prev_branches_url,
'next_branches_url': next_branches_url,
'origin_context': origin_context})
@browse_route(r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/visit/(?P<timestamp>.+)/releases/', # noqa
r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/releases/', # noqa
view_name='browse-origin-releases')
def origin_releases_browse(request, origin_type, origin_url, timestamp=None):
"""Django view that produces an HTML display of the list of releases
associated to an origin for a given visit.
The url scheme that points to it is the following:
* :http:get:`/browse/origin/(origin_type)/url/(origin_url)/releases/`
* :http:get:`/browse/origin/(origin_type)/url/(origin_url)/visit/(timestamp)/releases/`
""" # noqa
try:
origin_context = _process_origin_request(
request, origin_type, origin_url, timestamp, None,
'browse-origin-directory')
except Exception as exc:
return handle_view_exception(request, exc)
releases_offset = int(request.GET.get('releases_offset', 0))
origin_info = origin_context['origin_info']
url_args = origin_context['url_args']
query_params = origin_context['query_params']
releases = origin_context['releases']
displayed_releases = \
releases[releases_offset:releases_offset+PER_PAGE]
for release in displayed_releases:
release_url = reverse('browse-release',
kwargs={'sha1_git': release['id']},
query_params={'origin_type': origin_info['type'],
'origin_url': origin_info['url']})
query_params['release'] = release['name']
del query_params['release']
release['release_url'] = release_url
prev_releases_url = None
next_releases_url = None
next_offset = releases_offset + PER_PAGE
prev_offset = releases_offset - PER_PAGE
if next_offset < len(releases):
query_params['releases_offset'] = next_offset
next_releases_url = reverse('browse-origin-releases',
kwargs=url_args, query_params=query_params)
query_params['releases_offset'] = None
if prev_offset >= 0:
if prev_offset != 0:
query_params['releases_offset'] = prev_offset
prev_releases_url = reverse('browse-origin-releases',
kwargs=url_args, query_params=query_params)
return render(request, 'releases.html',
{'empty_browse': False,
'heading': 'Origin releases list',
'top_panel_visible': False,
'top_panel_collapsible': False,
'top_panel_text': 'SWH object: Origin releases list',
'swh_object_metadata': {},
'main_panel_visible': True,
'top_right_link': None,
'top_right_link_text': None,
'displayed_releases': displayed_releases,
'prev_releases_url': prev_releases_url,
'next_releases_url': next_releases_url,
'origin_context': origin_context,
'vault_cooking': None})
@browse_route(r'origin/(?P<origin_type>[a-z]+)/url/(?P<origin_url>.+)/',
view_name='browse-origin')
def origin_browse(request, origin_type=None, origin_url=None):
"""Django view that produces an HTML display of a swh origin identified
by its id or its url.
The url scheme that points to it is :http:get:`/browse/origin/(origin_type)/url/(origin_url)/`.
Args:
request: input django http request
origin_type: type of origin (git, svn, ...)
origin_url: url of the origin (e.g. https://github.com/<user>/<repo>)
Returns:
The HMTL rendering for the metadata of the provided origin.
""" # noqa
try:
origin_info = service.lookup_origin({
'type': origin_type,
'url': origin_url
})
origin_visits = get_origin_visits(origin_info)
origin_visits.reverse()
except Exception as exc:
return handle_view_exception(request, exc)
origin_info['last swh visit browse url'] = \
reverse('browse-origin-directory',
kwargs={'origin_type': origin_type,
'origin_url': origin_url})
origin_visits_data = []
visits_splitted = []
visits_by_year = {}
for i, visit in enumerate(origin_visits):
visit_date = dateutil.parser.parse(visit['date'])
visit_year = str(visit_date.year)
url_date = format_utc_iso_date(visit['date'], '%Y-%m-%dT%H:%M:%S')
visit['fmt_date'] = format_utc_iso_date(visit['date'])
query_params = {}
if i < len(origin_visits) - 1:
if visit['date'] == origin_visits[i+1]['date']:
query_params = {'visit_id': visit['visit']}
if i > 0:
if visit['date'] == origin_visits[i-1]['date']:
query_params = {'visit_id': visit['visit']}
visit['browse_url'] = reverse('browse-origin-directory',
kwargs={'origin_type': origin_type,
'origin_url': origin_url,
'timestamp': url_date},
query_params=query_params)
origin_visits_data.insert(0, {'date': visit_date.timestamp()})
if visit_year not in visits_by_year:
# display 3 years by row in visits list view
if len(visits_by_year) == 3:
visits_splitted.insert(0, visits_by_year)
visits_by_year = {}
visits_by_year[visit_year] = []
visits_by_year[visit_year].append(visit)
if len(visits_by_year) > 0:
visits_splitted.insert(0, visits_by_year)
return render(request, 'origin.html',
{'empty_browse': False,
'heading': 'Origin information',
'top_panel_visible': False,
'top_panel_collapsible': False,
'top_panel_text': 'SWH object: Visits history',
'swh_object_metadata': origin_info,
'main_panel_visible': True,
'origin_visits_data': origin_visits_data,
'visits_splitted': visits_splitted,
'origin_info': origin_info,
'browse_url_base': '/browse/origin/%s/url/%s/' %
(origin_type, origin_url),
'vault_cooking': None})
@browse_route(r'origin/search/(?P<url_pattern>.+)/',
view_name='browse-origin-search')
def origin_search(request, url_pattern):
"""Search for origins whose urls contain a provided string pattern
or match a provided regular expression.
The search is performed in a case insensitive way.
"""
offset = int(request.GET.get('offset', '0'))
limit = int(request.GET.get('limit', '50'))
regexp = request.GET.get('regexp', 'false')
results = service.search_origin(url_pattern, offset, limit,
bool(strtobool(regexp)))
results = json.dumps(list(results), sort_keys=True, indent=4,
separators=(',', ': '))
return HttpResponse(results, content_type='application/json')
diff --git a/swh/web/tests/browse/test_utils.py b/swh/web/tests/browse/test_utils.py
index dfcce275..c77c8a52 100644
--- a/swh/web/tests/browse/test_utils.py
+++ b/swh/web/tests/browse/test_utils.py
@@ -1,433 +1,429 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
import unittest
from unittest.mock import patch
from nose.tools import istest
from swh.web.browse import utils
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import reverse
from swh.web.tests.testbase import SWHWebTestBase
from .views.data.revision_test_data import revision_history_log_test
class SwhBrowseUtilsTestCase(SWHWebTestBase, unittest.TestCase):
@istest
def get_mimetype_and_encoding_for_content(self):
text = b'Hello world!'
self.assertEqual(utils.get_mimetype_and_encoding_for_content(text),
('text/plain', 'us-ascii'))
@patch('swh.web.browse.utils.service')
@istest
def get_origin_visits(self, mock_service):
mock_service.MAX_LIMIT = 2
def _lookup_origin_visits(*args, **kwargs):
if kwargs['last_visit'] is None:
return [{'visit': 1,
'date': '2017-05-06T00:59:10+00:00',
'metadata': {}},
{'visit': 2,
'date': '2017-08-06T00:59:10+00:00',
'metadata': {}}
]
else:
return [{'visit': 3,
'date': '2017-09-06T00:59:10+00:00',
'metadata': {}}
]
mock_service.lookup_origin_visits.side_effect = _lookup_origin_visits
origin_info = {
'id': 1,
'type': 'git',
'url': 'https://github.com/foo/bar',
}
origin_visits = utils.get_origin_visits(origin_info)
self.assertEqual(len(origin_visits), 3)
@patch('swh.web.browse.utils.get_origin_visits')
@istest
def get_origin_visit(self, mock_origin_visits):
origin_info = {
'id': 2,
'type': 'git',
'url': 'https://github.com/foo/bar',
}
visits = \
[{'status': 'full',
'date': '2015-07-09T21:09:24+00:00',
'visit': 1,
'origin': origin_info['id']
},
{'status': 'full',
'date': '2016-02-23T18:05:23.312045+00:00',
'visit': 2,
'origin': origin_info['id']
},
{'status': 'full',
'date': '2016-03-28T01:35:06.554111+00:00',
'visit': 3,
'origin': origin_info['id']
},
{'status': 'full',
'date': '2016-06-18T01:22:24.808485+00:00',
'visit': 4,
'origin': origin_info['id']
},
{'status': 'full',
'date': '2016-08-14T12:10:00.536702+00:00',
'visit': 5,
'origin': origin_info['id']
}]
mock_origin_visits.return_value = visits
with self.assertRaises(NotFoundExc) as cm:
visit_id = 12
visit = utils.get_origin_visit(origin_info,
visit_id=visit_id)
self.assertIn('Visit with id %s for origin with id %s not found' %
(origin_info['id'], visit_id),
cm.exception.args[0])
visit = utils.get_origin_visit(origin_info, visit_id=2)
self.assertEqual(visit, visits[1])
visit = utils.get_origin_visit(
origin_info, visit_ts='2016-02-23T18:05:23.312045+00:00')
self.assertEqual(visit, visits[1])
visit = utils.get_origin_visit(
origin_info, visit_ts='2016-02-20')
self.assertEqual(visit, visits[1])
visit = utils.get_origin_visit(
origin_info, visit_ts='2016-06-18T01:22')
self.assertEqual(visit, visits[3])
visit = utils.get_origin_visit(
origin_info, visit_ts='2016-06-18 01:22')
self.assertEqual(visit, visits[3])
visit = utils.get_origin_visit(
origin_info, visit_ts=1466208000)
self.assertEqual(visit, visits[3])
visit = utils.get_origin_visit(
origin_info, visit_ts='2014-01-01')
self.assertEqual(visit, visits[0])
visit = utils.get_origin_visit(
origin_info, visit_ts='2018-01-01')
self.assertEqual(visit, visits[-1])
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.utils.get_origin_visit')
@istest
- def get_origin_visit_occurrences(self, mock_get_origin_visit,
- mock_service):
+ def get_origin_visit_snapshot(self, mock_get_origin_visit,
+ mock_service):
mock_get_origin_visit.return_value = \
{'status': 'full',
'date': '2015-08-04T22:26:14.804009+00:00',
'visit': 1,
- 'origin': 1}
+ 'origin': 1,
+ 'snapshot': '584b2fe3ce6218a96892e73bd76c2966bbc2a797'}
- mock_service.lookup_origin_visit.return_value = \
- {'date': '2015-08-04T22:26:14.804009+00:00',
- 'metadata': {},
- 'occurrences': {
+ mock_service.lookup_snapshot.return_value = \
+ {'branches': {
'refs/heads/master': {
'target': '9fbd21adbac36be869514e82e2e98505dc47219c',
'target_type': 'revision',
'target_url': '/api/1/revision/9fbd21adbac36be869514e82e2e98505dc47219c/'
},
'refs/tags/0.10.0': {
'target': '6072557b6c10cd9a21145781e26ad1f978ed14b9',
'target_type': 'release',
'target_url': '/api/1/release/6072557b6c10cd9a21145781e26ad1f978ed14b9/'
},
'refs/tags/0.10.1': {
'target': 'ecc003b43433e5b46511157598e4857a761007bf',
'target_type': 'release',
'target_url': '/api/1/release/ecc003b43433e5b46511157598e4857a761007bf/'
}
},
- 'origin': 1,
- 'origin_url': '/api/1/origin/1/',
- 'status': 'full',
- 'visit': 1}
+ 'id': '584b2fe3ce6218a96892e73bd76c2966bbc2a797'}
mock_service.lookup_release_multiple.return_value = \
[{'name': '0.10.0',
'message': 'release 0.10.0',
'id': '6072557b6c10cd9a21145781e26ad1f978ed14b9',
'date': '2015-08-04T13:16:54+03:00',
'target_type': 'revision',
'target': 'e9c6243371087d04848b7686888f6dd29dfaef0e'},
{'name': '0.10.1',
'message': 'release 0.10.1',
'id': 'ecc003b43433e5b46511157598e4857a761007bf',
'date': '2017-08-04T13:16:54+03:00',
'target_type': 'revision',
'target': '6072557b6c10cd9a21145781e26ad1f978ed14b9'}]
mock_service.lookup_revision_multiple.return_value = \
[{'date': '2015-08-04T13:16:54+03:00',
'directory': '828da2b80e41aa958b2c98526f4a1d2cc7d298b7',
'id': '9fbd21adbac36be869514e82e2e98505dc47219c',
'message': 'Merge pull request #678 from algernon'},
{'date': '2014-04-10T23:01:11-04:00',
'directory': '2df4cd84ecc65b50b1d5318d3727e02a39b8a4cf',
'id': '6072557b6c10cd9a21145781e26ad1f978ed14b9',
'message': '0.10: The "Oh fuck it\'s PyCon" release\n'},
{'date': '2014-10-10T09:45:23-04:00',
'directory': '28ba64f97ef709e54838ae482c2da2619a74a0bd',
'id': 'ecc003b43433e5b46511157598e4857a761007bf',
'message': '0.10.1\n'}]
expected_result = (
[{'name': 'refs/heads/master',
'message': 'Merge pull request #678 from algernon',
'date': '04 August 2015, 13:16 UTC',
'revision': '9fbd21adbac36be869514e82e2e98505dc47219c',
'directory': '828da2b80e41aa958b2c98526f4a1d2cc7d298b7'}],
[{'name': '0.10.0',
'id': '6072557b6c10cd9a21145781e26ad1f978ed14b9',
'message': 'release 0.10.0',
'date': '04 August 2015, 13:16 UTC',
'target_type': 'revision',
'target': 'e9c6243371087d04848b7686888f6dd29dfaef0e',
'directory': '2df4cd84ecc65b50b1d5318d3727e02a39b8a4cf'},
{'name': '0.10.1',
'id': 'ecc003b43433e5b46511157598e4857a761007bf',
'message': 'release 0.10.1',
'date': '04 August 2017, 13:16 UTC',
'target_type': 'revision',
'target': '6072557b6c10cd9a21145781e26ad1f978ed14b9',
'directory': '28ba64f97ef709e54838ae482c2da2619a74a0bd'}]
)
origin_info = {
'id': 1,
'type': 'git',
'url': 'https://github.com/hylang/hy'
}
origin_visit_branches = \
- utils.get_origin_visit_occurrences(origin_info, visit_id=1)
+ utils.get_origin_visit_snapshot(origin_info, visit_id=1)
self.assertEqual(origin_visit_branches, expected_result)
@istest
def gen_link(self):
self.assertEqual(utils.gen_link('https://www.softwareheritage.org/', 'SWH'),
'<a href="https://www.softwareheritage.org/">SWH</a>')
@istest
def gen_person_link(self):
person_id = 8221896
person_name = 'Antoine Lambert'
person_url = reverse('browse-person', kwargs={'person_id': person_id})
self.assertEqual(utils.gen_person_link(person_id, person_name),
'<a href="%s">%s</a>' % (person_url, person_name))
@istest
def gen_revision_link(self):
revision_id = '28a0bc4120d38a394499382ba21d6965a67a3703'
revision_url = reverse('browse-revision',
kwargs={'sha1_git': revision_id})
self.assertEqual(utils.gen_revision_link(revision_id),
'<a href="%s">%s</a>' % (revision_url, revision_id))
self.assertEqual(utils.gen_revision_link(revision_id, shorten_id=True),
'<a href="%s">%s</a>' % (revision_url, revision_id[:7]))
@istest
def prepare_revision_log_for_display_no_contex(self):
per_page = 10
first_page_logs_data = revision_history_log_test[:per_page+1]
second_page_logs_data = revision_history_log_test[per_page:2*per_page+1]
third_page_logs_data = revision_history_log_test[2*per_page:3*per_page+1]
last_page_logs_data = revision_history_log_test[3*per_page:3*per_page+5]
revision_log_display_data = utils.prepare_revision_log_for_display(
first_page_logs_data, per_page, None)
self.assertEqual(revision_log_display_data['revision_log_data'],
utils._format_log_entries(first_page_logs_data,
per_page))
self.assertEqual(revision_log_display_data['prev_rev'],
first_page_logs_data[-1]['id'])
self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'],
first_page_logs_data[0]['id'])
self.assertEqual(revision_log_display_data['next_rev'], None)
self.assertEqual(revision_log_display_data['next_revs_breadcrumb'],
None)
old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb'])
revision_log_display_data = utils.prepare_revision_log_for_display(
second_page_logs_data, per_page, old_prev_revs_bc)
self.assertEqual(revision_log_display_data['revision_log_data'],
utils._format_log_entries(second_page_logs_data,
per_page))
self.assertEqual(revision_log_display_data['prev_rev'],
second_page_logs_data[-1]['id'])
self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'],
old_prev_revs_bc + '/' + second_page_logs_data[0]['id'])
self.assertEqual(revision_log_display_data['next_rev'],
old_prev_revs_bc)
self.assertEqual(revision_log_display_data['next_revs_breadcrumb'],
None)
old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb'])
revision_log_display_data = utils.prepare_revision_log_for_display(
third_page_logs_data, per_page, old_prev_revs_bc)
self.assertEqual(revision_log_display_data['revision_log_data'],
utils._format_log_entries(third_page_logs_data, per_page))
self.assertEqual(revision_log_display_data['prev_rev'],
third_page_logs_data[-1]['id'])
self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'],
old_prev_revs_bc + '/' + third_page_logs_data[0]['id'])
self.assertEqual(revision_log_display_data['next_rev'],
old_prev_revs_bc.split('/')[-1])
self.assertEqual(revision_log_display_data['next_revs_breadcrumb'],
'/'.join(old_prev_revs_bc.split('/')[:-1]))
old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb'])
revision_log_display_data = utils.prepare_revision_log_for_display(
last_page_logs_data, per_page, old_prev_revs_bc)
self.assertEqual(revision_log_display_data['revision_log_data'],
utils._format_log_entries(last_page_logs_data, per_page))
self.assertEqual(revision_log_display_data['prev_rev'],
None)
self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'],
None)
self.assertEqual(revision_log_display_data['next_rev'], old_prev_revs_bc.split('/')[-1])
self.assertEqual(revision_log_display_data['next_revs_breadcrumb'],
'/'.join(old_prev_revs_bc.split('/')[:-1]))
@istest
def prepare_revision_log_for_display_origin_context(self):
per_page = 10
first_page_logs_data = revision_history_log_test[:per_page+1]
second_page_logs_data = revision_history_log_test[per_page:2*per_page+1]
third_page_logs_data = revision_history_log_test[2*per_page:3*per_page+1]
last_page_logs_data = revision_history_log_test[3*per_page:3*per_page+5]
origin_context = {
'origin_info': {'type': 'git',
'url': 'https://github.com/git/git'},
'url_args': {},
'query_params': {}
}
revision_log_display_data = utils.prepare_revision_log_for_display(
first_page_logs_data, per_page, None, origin_context=origin_context)
self.assertEqual(revision_log_display_data['revision_log_data'],
utils._format_log_entries(first_page_logs_data,
per_page, origin_context=origin_context))
self.assertEqual(revision_log_display_data['prev_rev'],
first_page_logs_data[-1]['id'])
self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'],
first_page_logs_data[-1]['id'])
self.assertEqual(revision_log_display_data['next_rev'], None)
self.assertEqual(revision_log_display_data['next_revs_breadcrumb'],
None)
old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb'])
revision_log_display_data = utils.prepare_revision_log_for_display(
second_page_logs_data, per_page, old_prev_revs_bc, origin_context=origin_context)
self.assertEqual(revision_log_display_data['revision_log_data'],
utils._format_log_entries(second_page_logs_data,
per_page, origin_context=origin_context))
self.assertEqual(revision_log_display_data['prev_rev'],
second_page_logs_data[-1]['id'])
self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'],
old_prev_revs_bc + '/' + second_page_logs_data[-1]['id'])
self.assertEqual(revision_log_display_data['next_rev'],
old_prev_revs_bc)
self.assertEqual(revision_log_display_data['next_revs_breadcrumb'],
None)
old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb'])
revision_log_display_data = utils.prepare_revision_log_for_display(
third_page_logs_data, per_page, old_prev_revs_bc, origin_context=origin_context)
self.assertEqual(revision_log_display_data['revision_log_data'],
utils._format_log_entries(third_page_logs_data, per_page,
origin_context=origin_context))
self.assertEqual(revision_log_display_data['prev_rev'],
third_page_logs_data[-1]['id'])
self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'],
old_prev_revs_bc + '/' + third_page_logs_data[-1]['id'])
self.assertEqual(revision_log_display_data['next_rev'],
old_prev_revs_bc.split('/')[-1])
self.assertEqual(revision_log_display_data['next_revs_breadcrumb'],
'/'.join(old_prev_revs_bc.split('/')[:-1]))
old_prev_revs_bc = str(revision_log_display_data['prev_revs_breadcrumb'])
revision_log_display_data = utils.prepare_revision_log_for_display(
last_page_logs_data, per_page, old_prev_revs_bc, origin_context=origin_context)
self.assertEqual(revision_log_display_data['revision_log_data'],
utils._format_log_entries(last_page_logs_data, per_page,
origin_context=origin_context))
self.assertEqual(revision_log_display_data['prev_rev'],
None)
self.assertEqual(revision_log_display_data['prev_revs_breadcrumb'],
None)
self.assertEqual(revision_log_display_data['next_rev'], old_prev_revs_bc.split('/')[-1])
self.assertEqual(revision_log_display_data['next_revs_breadcrumb'],
'/'.join(old_prev_revs_bc.split('/')[:-1]))
diff --git a/swh/web/tests/browse/views/data/origin_test_data.py b/swh/web/tests/browse/views/data/origin_test_data.py
index 96858b0d..5869f552 100644
--- a/swh/web/tests/browse/views/data/origin_test_data.py
+++ b/swh/web/tests/browse/views/data/origin_test_data.py
@@ -1,726 +1,740 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
origin_info_test_data = {
'id': 2,
'type': 'git',
'url': 'https://github.com/torvalds/linux'
}
origin_visits_test_data = [
{'date': '2015-07-09T21:09:24+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 1},
{'date': '2016-02-23T18:05:23.312045+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 2},
{'date': '2016-03-28T01:35:06.554111+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 3},
{'date': '2016-06-18T01:22:24.808485+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 4},
{'date': '2016-08-14T12:10:00.536702+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 5},
{'date': '2016-08-17T09:16:22.052065+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 6},
{'date': '2016-08-29T18:55:54.153721+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 7},
{'date': '2016-09-07T08:44:47.861875+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 8},
{'date': '2016-09-14T10:36:21.505296+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 9},
{'date': '2016-09-23T10:14:02.169862+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 10},
{'date': '2017-02-16T07:53:39.467657+00:00',
'metadata': {},
'origin': 2,
'status': 'partial',
'visit': 11},
{'date': '2017-05-04T19:40:09.336451+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 12},
{'date': '2017-09-07T18:43:13.021746+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 13},
{'date': '2017-09-09T05:14:33.466107+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 14},
{'date': '2017-09-09T17:18:54.307789+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 15},
{'date': '2017-09-10T05:29:01.462971+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 16},
{'date': '2017-09-10T17:35:20.158515+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 17},
{'date': '2017-09-11T05:49:58.300518+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 18},
{'date': '2017-09-11T18:00:15.037345+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 19},
{'date': '2017-09-12T06:06:34.703343+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 20},
{'date': '2017-09-12T18:12:35.344511+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 21},
{'date': '2017-09-13T06:26:36.580675+00:00',
'metadata': {},
'origin': 2,
'status': 'full',
'visit': 22}
]
stub_origin_info = {
'id': 7416001,
'type': 'git',
'url': 'https://github.com/webpack/webpack'
}
stub_visit_id = 10
stub_visit_unix_ts = 1493909263
stub_visit_iso_date = '2017-05-04T14:47:43+00:00'
stub_origin_visits = [
{'date': '2015-08-05T18:55:20.899865+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': '23fac03bbf6f4d1037bc1477a85bc1c71e586f98',
'visit': 1},
{'date': '2016-03-06T12:16:26.240919+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': 'c71048f1d29a4889ef79f4a64e3c144efe83ea66',
'visit': 2},
{'date': '2016-03-21T11:40:10.329221+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': '0d83f0dae76581e55b31ca96d3574261754f1f8f',
'visit': 3},
{'date': '2016-03-29T08:05:17.602649+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': 'eeb186a965a6df47327f34997ee164be66340046',
'visit': 4},
{'date': '2016-07-26T20:11:03.827577+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': '1bf4bddbcf9be09ffeeaa68a85b53f039b2d32c2',
'visit': 5},
{'date': '2016-08-13T04:10:22.142897+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': '57cfa801c5cba9b034f994c119e122fb153da3ec',
'visit': 6},
{'date': '2016-08-16T22:57:46.201737+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': 'd0c85af82c4c3abb2024c5c628f3e4b584c8b0ef',
'visit': 7},
{'date': '2016-08-17T17:58:43.346437+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': '6ba2ff728eed2777156fd5c89424a2a46609f334',
'visit': 8},
{'date': '2016-08-29T23:29:09.445945+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': 'adb6d6adf04454f2b8acd6bf3c89d82dd84c3eed',
'visit': 9},
{'date': '2016-09-07T13:49:15.096109+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': '8e29ad8af5f8a9bac86d26f48f956cc0ec69bcd9',
'visit': 10},
{'date': '2016-09-14T15:01:09.017257+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': '78fbd0992f12cf1694257b2495e12bd2a3971643',
'visit': 11},
{'date': '2016-09-23T12:29:15.921727+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': '4fa28005f67b46f285bebe7228fe0a96a287ad94',
'visit': 12},
{'date': '2017-02-16T07:44:23.302439+00:00',
'metadata': {},
'origin': 7416001,
'status': 'partial',
+ 'snapshot': None,
'visit': 13},
{'date': '2017-05-04T14:47:43.228455+00:00',
'metadata': {},
'origin': 7416001,
'status': 'full',
+ 'snapshot': 'ea21a9304f34a5b646f81994bd53d580de917427',
'visit': 14}
]
-stub_origin_occurrences = (
+stub_origin_snapshot = (
[
{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'name': 'HEAD',
'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
'date': '04 May 2017, 13:27 UTC',
'message': 'Merge pull request #4816 from webpack/bugfix/hoist-immutable-export'},
{'directory': 'c47a824f95109ca7cafdd1c3206332a0d10df55d',
'name': 'refs/heads/0.10',
'revision': 'f944553c77254732c4ce22c0add32aa1f641959d',
'date': '19 June 2013, 12:46 UTC',
'message': 'webpack 0.10'},
{'directory': '45e31184ebb7699cd74175145c7eb11cce3f085e',
'name': 'refs/heads/0.11',
'revision': '0a29109a6e4579926ebc9b03a6301c61861cce62',
'date': '31 December 2013, 12:43 UTC',
'message': '0.11.18'},
{'directory': '42346b33e2d16019490c273ff586ee88817327b3',
'name': 'refs/heads/0.8',
'revision': 'e42701dc6f9b035bfbb5d0fffded905d8b456db4',
'date': 'e42701dc6f9b035bfbb5d0fffded905d8b456db4',
'message': 'fixes #54'},
{'directory': '828c7e9385523f852f8d4dac3cb241e319a9ce61',
'name': 'refs/heads/0.9',
'revision': '6c3f51e6d9491a2463ad099a2ca49255ec83ff00',
'date': '19 March 2013, 07:56 UTC',
'message': 'updated some small things on the cli'},
{'directory': '2c50e78d63bdc4441c8d2691f5729b04f0ab3ecd',
'name': 'refs/heads/1.0',
'revision': 'fb7958d172e1ef6fb77f23bf56818ad24e896e5c',
'date': '03 March 2014, 14:37 UTC',
'message': 'Merge pull request #188 from polotek/patch-1'},
{'directory': '31a3355c4d0a464aa311c5fa11c7f8b20aede6b4',
'name': 'refs/heads/IgnorePluginHotfix',
'revision': 'fdc922a2fa007e71b7ec07252012ffab9a178d4a',
'date': '08 April 2017, 15:50 UTC',
'message': 'add tests for ignored context modules'},
{'directory': 'e566db1fc65cb61b3799c6e0f0ad06b2406f095f',
'name': 'refs/heads/beta',
'revision': '40428853da5d9ce6a8751e13b5e54145337b6a7e',
'date': '04 May 2017, 13:35 UTC',
'message': 'Merge remote-tracking branch \'origin/perf/chunks-set\' into beta'}
],
[{'name': 'v2.1.0-beta.6',
'message': '2.1.0-beta.6',
'date': '22 April 2016, 01:03 UTC',
'id': 'ae2e1a30e4f2ac701e8a6e2fe85a5f200d7e597a',
'target_type': 'revision',
'target': 'ca8b693c2c17bd06778476381fae23b3b21c0475',
'directory': '4e1f9b3c2f5c4bd205051a14af4ade62349ee57a'},
{'name': 'v2.1.0-beta.7',
'message': '2.1.0-beta.7',
'date': '07 May 2016, 00:00 UTC',
'id': '46e94bbdc9e54cf6273a985732446b4c963bf1aa',
'target_type': 'revision',
'target': '9162f9e6eea62137139f95b8aaedee335c870edd',
'directory': '713763f90f17371fec714c1660f229ba41b9f5e2'},
{'name': 'v2.1.0-beta.8',
'message': '2.1.0-beta.8',
'date': '29 May 2016, 20:53 UTC',
'id': '910ada6bf809f8f1c318e098f67f2c0b3c80c888',
'target_type': 'revision',
'target': 'abf0cefd592700a19856c3ef9b6d65f905ec73c1',
'directory': 'd6a069fda992759670851dc38500b2e8dccdc595'},
{'name': 'v2.1.0-beta.9',
'message': '2.1.0-beta.9',
'date': '04 June 2016, 20:19 UTC',
'id': '63063663c86b0c7e5886adbd3c22aacba9b957b0',
'target_type': 'revision',
'target': 'dc3bd055027d8d1ebbb0ebdd07fb73387a0ab6d1',
'directory': '467251807aea6ba83719194e9a1d65e8053f14e0'}
])
stub_origin_master_branch = 'HEAD'
stub_origin_root_directory_sha1 = 'ae59ceecf46367e8e4ad800e231fc76adc3afffb'
stub_origin_root_directory_entries = [
{'checksums': {'sha1': '1a17dd2c8245559b43a90aa7c084572e917effff',
'sha1_git': '012966bd94e648f23b53e71a3f9918e28abc5d81',
'sha256': 'd65ab1f8cdb323e2b568a8e99814b1b986a38beed85a380981b383c0feb93525'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 394,
'name': '.editorconfig',
'perms': 33188,
'status': 'visible',
'target': '012966bd94e648f23b53e71a3f9918e28abc5d81',
'type': 'file'},
{'checksums': {'sha1': '2e727ec452dc592ae6038d3e09cd35d83d7ea265',
'sha1_git': '291a4e25598633cd7c286ad8d6cbe9eee5a6291a',
'sha256': 'd5951c8b796288e0dae1da50575d1b8619462a8df2272cd250146872a1fe804a'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 1839,
'name': '.eslintrc.js',
'perms': 33188,
'status': 'visible',
'target': '291a4e25598633cd7c286ad8d6cbe9eee5a6291a',
'type': 'file'},
{'checksums': {'sha1': '5c59880c0576b2789ec126b61b09fad7a982763b',
'sha1_git': 'ac579eb7bc04ba44fe84f3c8d1082573e9f4f514',
'sha256': '8a59a61ff6c0f568a8f76bab434baf3318c80a75ef6fb1b6eb861a0c97518de0'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 67,
'name': '.gitattributes',
'perms': 33188,
'status': 'visible',
'target': 'ac579eb7bc04ba44fe84f3c8d1082573e9f4f514',
'type': 'file'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': '.github',
'perms': 16384,
'target': '93bdcf98e9c05307b39a9d9e00e48cda6dbd036c',
'type': 'dir'},
{'checksums': {'sha1': '7e1008eee2a373f0db7746d0416856aec6b95c22',
'sha1_git': '84bc35a3abab38bdf87a8f32cc82ce9c136d331e',
'sha256': '7de369f1d26bc34c7b6329de78973db07e341320eace6a8704a65d4c5bf5993f'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 167,
'name': '.gitignore',
'perms': 33188,
'status': 'visible',
'target': '84bc35a3abab38bdf87a8f32cc82ce9c136d331e',
'type': 'file'},
{'checksums': {'sha1': '06d96508b7d343ff42868f9b6406864517935da7',
'sha1_git': '79b049846744a2da3eb1c4ac3b01543f2bdca44a',
'sha256': '697733061d96dd2e061df04dcd86392bb792e2dbe5725a6cb14a436d3c8b76f1'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 706,
'name': '.jsbeautifyrc',
'perms': 33188,
'status': 'visible',
'target': '79b049846744a2da3eb1c4ac3b01543f2bdca44a',
'type': 'file'},
{'checksums': {'sha1': '8041a4a66f46e615c99a850700850a8bd1079dce',
'sha1_git': '90e4f1ef5beb167891b2e029da6eb9b14ab17add',
'sha256': '3d6a76a57351b9e3acc5843ff2127dc2cf70c023133312143f86ee74ba9ef6d3'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 1059,
'name': '.travis.yml',
'perms': 33188,
'status': 'visible',
'target': '90e4f1ef5beb167891b2e029da6eb9b14ab17add',
'type': 'file'},
{'checksums': {'sha1': 'cd52973e43c6f4294e8cdfd3106df602b9993f20',
'sha1_git': 'e5279ebcecd87445648d003c36e6abfebed0ed73',
'sha256': '130672b16dff61b1541b6d26c2e568ac11830a31d04faace1583d3ad4a38720e'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 2058,
'name': 'CONTRIBUTING.md',
'perms': 33188,
'status': 'visible',
'target': 'e5279ebcecd87445648d003c36e6abfebed0ed73',
'type': 'file'},
{'checksums': {'sha1': '3bebb9ba92e45dd02a0512e144f6a46b14a9b8ab',
'sha1_git': '8c11fc7289b75463fe07534fcc8224e333feb7ff',
'sha256': '9068a8782d2fb4c6e432cfa25334efa56f722822180570802bf86e71b6003b1e'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 1071,
'name': 'LICENSE',
'perms': 33188,
'status': 'visible',
'target': '8c11fc7289b75463fe07534fcc8224e333feb7ff',
'type': 'file'},
{'checksums': {'sha1': '6892825420196e84c7104a7ff71ec75db20a1fca',
'sha1_git': '8f96a0a6d3bfe7183765938483585f3981151553',
'sha256': 'b0170cfc28f56ca718b43ab086ca5428f853268687c8c033b4fbf028c66d663e'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 46700,
'name': 'README.md',
'perms': 33188,
'status': 'visible',
'target': '8f96a0a6d3bfe7183765938483585f3981151553',
'type': 'file'},
{'checksums': {'sha1': '9bc4902b282f9f1c9f8f885a6947f3bf0f6e6e5f',
'sha1_git': 'dd6912c8fc97eff255d64da84cfd9837ebf0a05a',
'sha256': 'e06dbc101195ec7ea0b9aa236be4bdc03784a01f64d6e11846ce3a3f6e1080c6'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 590,
'name': 'appveyor.yml',
'perms': 33188,
'status': 'visible',
'target': 'dd6912c8fc97eff255d64da84cfd9837ebf0a05a',
'type': 'file'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': 'benchmark',
'perms': 16384,
'target': '6bd2996b76e051982aa86499a2b485594e607fe3',
'type': 'dir'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': 'bin',
'perms': 16384,
'target': '681da97ea1ce9a2bd29e3e72781d80e8b961cd51',
'type': 'dir'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': 'buildin',
'perms': 16384,
'target': '35cfb25d1b3a4063bf04a43f9cbb7e1e87703708',
'type': 'dir'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': 'ci',
'perms': 16384,
'target': 'efccd3ce0a0304c8cbcffcfdfcafcf1e598819b8',
'type': 'dir'},
{'checksums': {'sha1': '9eb3d0e3711f68f82d29785e64ebff2c0d7cec7a',
'sha1_git': '1ecf877e445bcf865ef53cfcecadda7e9691aace',
'sha256': '2007e0883c2784bb82584a10d53a0f0c36286dd913741bfd5e4d22b812db529c'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 529,
'name': 'circle.yml',
'perms': 33188,
'status': 'visible',
'target': '1ecf877e445bcf865ef53cfcecadda7e9691aace',
'type': 'file'},
{'checksums': {'sha1': '63209428718e101492c3bb91509f1b4e319b0d7d',
'sha1_git': 'b3fa4e6abe22977e6267e9969a593e790bf2cd36',
'sha256': '5d14c8d70215f46a9722d29c7ebff8cc9bd24509650d7ee601fd461e52a52f7f'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 254,
'name': 'codecov.yml',
'perms': 33188,
'status': 'visible',
'target': 'b3fa4e6abe22977e6267e9969a593e790bf2cd36',
'type': 'file'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': 'examples',
'perms': 16384,
'target': '7e3ac01795317fbc36a031a9117e7963d6c7da90',
'type': 'dir'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': 'hot',
'perms': 16384,
'target': 'a5eea6ca952fba9f7ae4177627ed5e22754df9f5',
'type': 'dir'},
{'checksums': {'sha1': '92d9367db4ba049f698f5bf78b6946b8e2d91345',
'sha1_git': 'eaa9cc4a247b01d6a9c0adc91997fefe6a62be1f',
'sha256': 'd4b42fa0651cf3d99dea0ca5bd6ba64cc21e80be7d9ea05b2b4423ef8f16ec36'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 19,
'name': 'input.js',
'perms': 33188,
'status': 'visible',
'target': 'eaa9cc4a247b01d6a9c0adc91997fefe6a62be1f',
'type': 'file'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': 'lib',
'perms': 16384,
'target': '187d40104aa21475d8af88ccd77fc582cf6ac7a6',
'type': 'dir'},
{'checksums': {'sha1': 'f17ffa2dc14262292e2275efa3730a96fe060c44',
'sha1_git': 'd55b7110929cbba3d94da01494a272b39878ac0f',
'sha256': '012d4446ef8ab6656251b1b7f8e0217a5666ec04ad952e8a617b70946de17166'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 9132,
'name': 'open-bot.yaml',
'perms': 33188,
'status': 'visible',
'target': 'd55b7110929cbba3d94da01494a272b39878ac0f',
'type': 'file'},
{'checksums': {'sha1': '3a6638e72fcc2499f1a4c9b46d4d00d239bbe1c8',
'sha1_git': '6d1aa82c90ecd184d136151eb81d240e1fea723e',
'sha256': '00faf7dde1eb0742f3ca567af4dbcd8c01a38cf30d8faa7f0208f46dbc6b5201'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 4034,
'name': 'package.json',
'perms': 33188,
'status': 'visible',
'target': '6d1aa82c90ecd184d136151eb81d240e1fea723e',
'type': 'file'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': 'schemas',
'perms': 16384,
'target': 'f1f89c389f73c29e7a5d1a0ce5f9e0f166857815',
'type': 'dir'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': 'test',
'perms': 16384,
'target': '318c279189d186a1e06653fc5c78c539878c4d7d',
'type': 'dir'},
{'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': None,
'name': 'web_modules',
'perms': 16384,
'target': '93a5cc8e492d0b0323386814a72536381019ef7b',
'type': 'dir'},
{'checksums': {'sha1': '8047389fcc8e286ceed5536c677c2e803032cf84',
'sha1_git': 'eb8509f70158c231a3fd864aecf2649590bbedf3',
'sha256': '8cbe1ce94349ac3bc6cbcc952efd45d838c6b4524af8a773b18e1ebe8b4f936b'},
'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
'length': 141192,
'name': 'yarn.lock',
'perms': 33188,
'status': 'visible',
'target': 'eb8509f70158c231a3fd864aecf2649590bbedf3',
'type': 'file'}
]
stub_origin_sub_directory_path = 'lib/webworker'
stub_origin_sub_directory_entries = [
{'checksums': {'sha1': '7bf366cd9f4a9835c73aafb70e44f640bab7ad16',
'sha1_git': '870252b7a175ee5ec2edfe2c22b2d56aa04bece4',
'sha256': 'e0af438932627dd9d53b36bfe69c3dbad6dc4d4569f6cdb29d606c9df2b128fa'},
'dir_id': '02b626051e0935ecd28f50337f452db76803f980',
'length': 921,
'name': 'WebWorkerChunkTemplatePlugin.js',
'perms': 33188,
'status': 'visible',
'target': '870252b7a175ee5ec2edfe2c22b2d56aa04bece4',
'type': 'file'},
{'checksums': {'sha1': 'e2862b2787702bd3eb856f73627d5d8df5a8b550',
'sha1_git': 'b3e90d26a68ad9da0a7cc97a262db585fa4c73ba',
'sha256': '1c254e76248ff5ec7e2185cdb1cfd2e0338087244d2d617a868c346317b7646b'},
'dir_id': '02b626051e0935ecd28f50337f452db76803f980',
'length': 1039,
'name': 'WebWorkerHotUpdateChunkTemplatePlugin.js',
'perms': 33188,
'status': 'visible',
'target': 'b3e90d26a68ad9da0a7cc97a262db585fa4c73ba',
'type': 'file'},
{'checksums': {'sha1': 'a1e04061d3e50bb8c024b07e9464da7392f37bf1',
'sha1_git': '1e503e028fdd5322c9f7d8ec50f54006cacf334e',
'sha256': '72dea06510d1a4435346f8dca20d8898a394c52c7382a97bd73d1840e31f90b3'},
'dir_id': '02b626051e0935ecd28f50337f452db76803f980',
'length': 1888,
'name': 'WebWorkerMainTemplate.runtime.js',
'perms': 33188,
'status': 'visible',
'target': '1e503e028fdd5322c9f7d8ec50f54006cacf334e',
'type': 'file'},
{'checksums': {'sha1': 'b95c16e90784cf7025352839133b482149526da0',
'sha1_git': '46c9fe382d606ce19e556deeae6a23af47a8027d',
'sha256': 'c78c7ca9ee0aa341f843a431ef27c75c386607be3037d44ff530bfe3218edb3c'},
'dir_id': '02b626051e0935ecd28f50337f452db76803f980',
'length': 4051,
'name': 'WebWorkerMainTemplatePlugin.js',
'perms': 33188,
'status': 'visible',
'target': '46c9fe382d606ce19e556deeae6a23af47a8027d',
'type': 'file'},
{'checksums': {'sha1': 'ec9df36b1e8dd689d84dbeeeb9f45fe9f9d96605',
'sha1_git': 'd850018bb0d2ad41dd0ae9e5c887dff8a23601e9',
'sha256': 'f995f6a13511955244850c2344c6cef09c10ab24c49f8448544e2b34aa69d03c'},
'dir_id': '02b626051e0935ecd28f50337f452db76803f980',
'length': 763,
'name': 'WebWorkerTemplatePlugin.js',
'perms': 33188,
'status': 'visible',
'target': 'd850018bb0d2ad41dd0ae9e5c887dff8a23601e9',
'type': 'file'}
]
stub_content_origin_info = {
'id': 10357753,
'type': 'git',
'url': 'https://github.com/KDE/kate'
}
stub_content_origin_visit_id = 10
stub_content_origin_visit_unix_ts = 1494032350
stub_content_origin_visit_iso_date = '2017-05-06T00:59:10+00:00'
stub_content_origin_branch = 'HEAD'
stub_content_origin_visits = [
{'date': '2015-09-26T09:30:52.373449+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 1},
{'date': '2016-03-10T05:36:11.118989+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 2},
{'date': '2016-03-24T07:39:29.727793+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 3},
{'date': '2016-03-31T22:55:31.402863+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 4},
{'date': '2016-05-26T06:25:54.879676+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 5},
{'date': '2016-06-07T17:16:33.964164+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 6},
{'date': '2016-07-27T01:38:20.345358+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 7},
{'date': '2016-08-13T04:46:45.987508+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 8},
{'date': '2016-08-16T23:24:13.214496+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 9},
{'date': '2016-08-17T18:10:39.841005+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 10},
{'date': '2016-08-30T17:28:02.476486+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 11},
{'date': '2016-09-08T09:32:37.152054+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 12},
{'date': '2016-09-15T09:47:37.758093+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 13},
{'date': '2016-12-04T06:14:02.688518+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 14},
{'date': '2017-02-16T08:45:57.719974+00:00',
'metadata': {},
'origin': 10357753,
'status': 'partial',
'visit': 15},
{'date': '2017-05-06T00:59:10.495727+00:00',
'metadata': {},
'origin': 10357753,
'status': 'full',
'visit': 16}
]
-stub_content_origin_occurrences = (
+stub_content_origin_snapshot = (
[
{'directory': '08e8329257dad3a3ef7adea48aa6e576cd82de5b',
'name': 'HEAD',
'revision': '11f15b0789344427ddf17b8d75f38577c4395ce0',
'date': '02 May 2017, 05:33 UTC',
'message': 'GIT_SILENT made messages (after extraction)'},
{'directory': '2371baf0411e3adf12d65daf86c3b135633dd5e4',
'name': 'refs/heads/Applications/14.12',
'revision': '5b27ad32f8c8da9b6fc898186d59079488fb74c9',
'date': '23 February 2015, 12:10 UTC',
'message': 'SVN_SILENT made messages (.desktop file)'},
{'directory': '5d024d33a218eeb164936301a2f89231d1f0854a',
'name': 'refs/heads/Applications/15.04',
'revision': '4f1e29120795ac643044991e91f24d02c9980202',
'date': '04 July 2015, 12:34 UTC',
'message': 'SVN_SILENT made messages (.desktop file)'},
{'directory': 'f33984df50ec29dbbc86295adb81ebb831e3b86d',
'name': 'refs/heads/Applications/15.08',
'revision': '52722e588f46a32b480b5f304ba21480fc8234b1',
'date': '12 June 2016, 20:28 UTC',
'message': 'Correctly restore view config of all split views'},
{'directory': 'e706b836cf32929a48b6f92c07766f237f9d068f',
'name': 'refs/heads/Applications/15.12',
'revision': '38c4e42c4a653453fc668c704bb8995ae31b5baf',
'date': '06 September 2016, 12:01 UTC',
'message': 'Fix crash in tab switcher plugin when using split views'},
{'directory': 'ebf8ae783b44df5c827bfa46227e5dbe98f25eb4',
'name': 'refs/heads/Applications/16.04',
'revision': 'd0fce3b880ab37a551d75ec940137e0f46bf2143',
'date': '06 September 2016, 12:01 UTC',
'message': 'Fix crash in tab switcher plugin when using split views'}
],
[{'name': 'v4.9.90',
'message': 'KDE 4.9.90',
'date': '09 December 2012, 23:15 UTC',
'id': 'f6a3a31474a86023377ce6fa1cbec3d9ab809d06',
'target_type': 'revision',
'target': '4dd3d7de2f684fcdf27028bafdc022183e33610d',
'directory': 'a5b9c74c35732189b8aa7567f979f9ac36fdb8bc'},
{'name': 'v4.9.95',
'message': 'KDE 4.9.95',
'date': '02 January 2013, 19:00 UTC',
'id': '74bab04b34b243269354f6e5530d6d0edf92f84d',
'target_type': 'revision',
'target': '6bd42579908cf62f094ebca0e100832208967428',
'directory': 'aaeba0a71293465b9026249381c0a1f13a13a43f'},
{'name': 'v4.9.97',
'message': 'KDE 4.9.97',
'date': '05 January 2013, 20:34 UTC',
'id': 'd8bf93d6915c4ab17de882c443423f281c961a1c',
'target_type': 'revision',
'target': '5fbd023fc46ecc57a6772be2aa04f532e8426f43',
'directory': '0ce36caec34ad7c930f35eca907148208b2a3f2b'},
{'name': 'v4.9.98',
'message': 'KDE 4.9.98',
'date': '21 January 2013, 19:36 UTC',
'id': '9bf0265d4fce650926bfd93b117584eb3fd0bd73',
'target_type': 'revision',
'target': '670aff3a940fecf6a085fe71a5bead2edcad8a55',
'directory': '0747fbcc783dfab9e857040287ed400df145079d'}
])
diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py
index 09678da8..263827bc 100644
--- a/swh/web/tests/browse/views/test_origin.py
+++ b/swh/web/tests/browse/views/test_origin.py
@@ -1,720 +1,720 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
from unittest.mock import patch
from nose.tools import istest, nottest
from django.test import TestCase
from django.utils.html import escape
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import (
reverse, gen_path_info, format_utc_iso_date,
parse_timestamp
)
from swh.web.tests.testbase import SWHWebTestBase
from .data.origin_test_data import (
origin_info_test_data,
origin_visits_test_data,
stub_content_origin_info, stub_content_origin_visit_id,
stub_content_origin_visit_unix_ts, stub_content_origin_visit_iso_date,
stub_content_origin_branch,
- stub_content_origin_visits, stub_content_origin_occurrences,
+ stub_content_origin_visits, stub_content_origin_snapshot,
stub_origin_info, stub_visit_id,
- stub_origin_visits, stub_origin_occurrences,
+ stub_origin_visits, stub_origin_snapshot,
stub_origin_root_directory_entries, stub_origin_master_branch,
stub_origin_root_directory_sha1, stub_origin_sub_directory_path,
stub_origin_sub_directory_entries, stub_visit_unix_ts, stub_visit_iso_date
)
from .data.content_test_data import (
stub_content_root_dir,
stub_content_text_data,
stub_content_text_path
)
class SwhBrowseOriginTest(SWHWebTestBase, TestCase):
@patch('swh.web.browse.views.origin.get_origin_visits')
@patch('swh.web.browse.views.origin.service')
@istest
def origin_browse(self, mock_service, mock_get_origin_visits):
mock_service.lookup_origin.return_value = origin_info_test_data
mock_get_origin_visits.return_value = origin_visits_test_data
url = reverse('browse-origin',
kwargs={'origin_type': origin_info_test_data['type'],
'origin_url': origin_info_test_data['url']})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('origin.html')
self.assertContains(resp, '<pre>%s</pre>' % origin_info_test_data['type'])
self.assertContains(resp, '<pre><a href="%s">%s</a></pre>' %
(origin_info_test_data['url'],
origin_info_test_data['url']))
self.assertContains(resp, '<td class="swh-origin-visit">',
count=len(origin_visits_test_data))
for visit in origin_visits_test_data:
visit_date_iso = format_utc_iso_date(visit['date'], '%Y-%m-%dT%H:%M:%S')
visit_date = format_utc_iso_date(visit['date'])
browse_url = reverse('browse-origin-directory',
kwargs={'origin_type': origin_info_test_data['type'],
'origin_url': origin_info_test_data['url'],
'timestamp': visit_date_iso})
self.assertContains(resp, 'href="%s">%s</a>' %
(browse_url, visit_date))
@nottest
def origin_content_view_test(self, origin_info, origin_visits,
origin_branches, origin_releases,
origin_branch,
root_dir_sha1, content_sha1,
content_path, content_data,
content_language,
visit_id=None, timestamp=None):
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url'],
'path': content_path}
if not visit_id:
visit_id = origin_visits[-1]['visit']
query_params = {}
if timestamp:
url_args['timestamp'] = timestamp
if visit_id:
query_params['visit_id'] = visit_id
url = reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('content.html')
self.assertContains(resp, '<code class="%s">' % content_language)
self.assertContains(resp, escape(content_data))
split_path = content_path.split('/')
filename = split_path[-1]
path = content_path.replace(filename, '')[:-1]
path_info = gen_path_info(path)
del url_args['path']
if timestamp:
url_args['timestamp'] = \
format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
'%Y-%m-%dT%H:%M:%S')
root_dir_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '<li class="swh-path">',
count=len(path_info)+1)
self.assertContains(resp, '<a href="%s">%s</a>' %
(root_dir_url, root_dir_sha1[:7]))
for p in path_info:
url_args['path'] = p['path']
dir_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '<a href="%s">%s</a>' %
(dir_url, p['name']))
self.assertContains(resp, '<li>%s</li>' % filename)
query_string = 'sha1_git:' + content_sha1
url_raw = reverse('browse-content-raw',
kwargs={'query_string': query_string},
query_params={'filename': filename})
self.assertContains(resp, url_raw)
del url_args['path']
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
(origin_releases_url, len(origin_releases)))
self.assertContains(resp, '<li class="swh-branch">',
count=len(origin_branches))
url_args['path'] = content_path
for branch in origin_branches:
query_params['branch'] = branch['name']
root_dir_branch_url = \
reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '<a href="%s">' % root_dir_branch_url)
self.assertContains(resp, '<li class="swh-release">',
count=len(origin_releases))
query_params['branch'] = None
for release in origin_releases:
query_params['release'] = release['name']
root_dir_release_url = \
reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '<a href="%s">' % root_dir_release_url)
@patch('swh.web.browse.utils.get_origin_visits')
- @patch('swh.web.browse.utils.get_origin_visit_occurrences')
+ @patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.views.origin.service')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.request_content')
@istest
def origin_content_view(self, mock_request_content, mock_utils_service,
- mock_service, mock_get_origin_visit_occurrences,
+ mock_service, mock_get_origin_visit_snapshot,
mock_get_origin_visits):
stub_content_text_sha1 = stub_content_text_data['checksums']['sha1']
mock_get_origin_visits.return_value = stub_content_origin_visits
- mock_get_origin_visit_occurrences.return_value = stub_content_origin_occurrences
+ mock_get_origin_visit_snapshot.return_value = stub_content_origin_snapshot
mock_service.lookup_directory_with_path.return_value = \
{'target': stub_content_text_sha1}
mock_request_content.return_value = stub_content_text_data
mock_utils_service.lookup_origin.return_value = stub_content_origin_info
self.origin_content_view_test(stub_content_origin_info,
stub_content_origin_visits,
- stub_content_origin_occurrences[0],
- stub_content_origin_occurrences[1],
+ stub_content_origin_snapshot[0],
+ stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp')
self.origin_content_view_test(stub_content_origin_info,
stub_content_origin_visits,
- stub_content_origin_occurrences[0],
- stub_content_origin_occurrences[1],
+ stub_content_origin_snapshot[0],
+ stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp',
visit_id=stub_content_origin_visit_id)
self.origin_content_view_test(stub_content_origin_info,
stub_content_origin_visits,
- stub_content_origin_occurrences[0],
- stub_content_origin_occurrences[1],
+ stub_content_origin_snapshot[0],
+ stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp',
timestamp=stub_content_origin_visit_unix_ts)
self.origin_content_view_test(stub_content_origin_info,
stub_content_origin_visits,
- stub_content_origin_occurrences[0],
- stub_content_origin_occurrences[1],
+ stub_content_origin_snapshot[0],
+ stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp',
timestamp=stub_content_origin_visit_iso_date)
@nottest
def origin_directory_view(self, origin_info, origin_visits,
origin_branches, origin_releases, origin_branch,
root_directory_sha1, directory_entries,
visit_id=None, timestamp=None, path=None):
dirs = [e for e in directory_entries
if e['type'] == 'dir']
files = [e for e in directory_entries
if e['type'] == 'file']
if not visit_id:
visit_id = origin_visits[-1]['visit']
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
query_params = {}
if timestamp:
url_args['timestamp'] = timestamp
else:
query_params['visit_id'] = visit_id
if path:
url_args['path'] = path
url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('directory.html')
self.assertContains(resp, '<td class="swh-directory">',
count=len(dirs))
self.assertContains(resp, '<td class="swh-content">',
count=len(files))
if timestamp:
url_args['timestamp'] = \
format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
'%Y-%m-%dT%H:%M:%S')
for d in dirs:
dir_path = d['name']
if path:
dir_path = "%s/%s" % (path, d['name'])
dir_url_args = dict(url_args)
dir_url_args['path'] = dir_path
dir_url = reverse('browse-origin-directory',
kwargs=dir_url_args,
query_params=query_params)
self.assertContains(resp, dir_url)
for f in files:
file_path = f['name']
if path:
file_path = "%s/%s" % (path, f['name'])
file_url_args = dict(url_args)
file_url_args['path'] = file_path
file_url = reverse('browse-origin-content',
kwargs=file_url_args,
query_params=query_params)
self.assertContains(resp, file_url)
if 'path' in url_args:
del url_args['path']
root_dir_branch_url = \
reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
nb_bc_paths = 1
if path:
nb_bc_paths = len(path.split('/')) + 1
self.assertContains(resp, '<li class="swh-path">', count=nb_bc_paths)
self.assertContains(resp, '<a href="%s">%s</a>' %
(root_dir_branch_url,
root_directory_sha1[:7]))
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
(origin_releases_url, len(origin_releases)))
if path:
url_args['path'] = path
self.assertContains(resp, '<li class="swh-branch">',
count=len(origin_branches))
for branch in origin_branches:
query_params['branch'] = branch['name']
root_dir_branch_url = \
reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '<a href="%s">' % root_dir_branch_url)
self.assertContains(resp, '<li class="swh-release">',
count=len(origin_releases))
query_params['branch'] = None
for release in origin_releases:
query_params['release'] = release['name']
root_dir_release_url = \
reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '<a href="%s">' % root_dir_release_url)
self.assertContains(resp, '<button id="vault-cook-directory"')
self.assertContains(resp, '<button id="vault-cook-revision"')
@patch('swh.web.browse.utils.get_origin_visits')
- @patch('swh.web.browse.utils.get_origin_visit_occurrences')
+ @patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
@istest
def origin_root_directory_view(self, mock_origin_service,
mock_utils_service,
- mock_get_origin_visit_occurrences,
+ mock_get_origin_visit_snapshot,
mock_get_origin_visits):
mock_get_origin_visits.return_value = stub_origin_visits
- mock_get_origin_visit_occurrences.return_value = stub_origin_occurrences
+ mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_directory.return_value = \
stub_origin_root_directory_entries
mock_utils_service.lookup_origin.return_value = stub_origin_info
self.origin_directory_view(stub_origin_info, stub_origin_visits,
- stub_origin_occurrences[0],
- stub_origin_occurrences[1],
+ stub_origin_snapshot[0],
+ stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
- stub_origin_occurrences[0],
- stub_origin_occurrences[1],
+ stub_origin_snapshot[0],
+ stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
visit_id=stub_visit_id)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
- stub_origin_occurrences[0],
- stub_origin_occurrences[1],
+ stub_origin_snapshot[0],
+ stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
timestamp=stub_visit_unix_ts)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
- stub_origin_occurrences[0],
- stub_origin_occurrences[1],
+ stub_origin_snapshot[0],
+ stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
timestamp=stub_visit_iso_date)
@patch('swh.web.browse.utils.get_origin_visits')
- @patch('swh.web.browse.utils.get_origin_visit_occurrences')
+ @patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
@istest
def origin_sub_directory_view(self, mock_origin_service,
mock_utils_service,
- mock_get_origin_visit_occurrences,
+ mock_get_origin_visit_snapshot,
mock_get_origin_visits):
mock_get_origin_visits.return_value = stub_origin_visits
- mock_get_origin_visit_occurrences.return_value = stub_origin_occurrences
+ mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_directory.return_value = \
stub_origin_sub_directory_entries
mock_origin_service.lookup_directory_with_path.return_value = \
{'target': '120c39eeb566c66a77ce0e904d29dfde42228adb'}
mock_utils_service.lookup_origin.return_value = stub_origin_info
self.origin_directory_view(stub_origin_info, stub_origin_visits,
- stub_origin_occurrences[0],
- stub_origin_occurrences[1],
+ stub_origin_snapshot[0],
+ stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
path=stub_origin_sub_directory_path)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
- stub_origin_occurrences[0],
- stub_origin_occurrences[1],
+ stub_origin_snapshot[0],
+ stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
visit_id=stub_visit_id,
path=stub_origin_sub_directory_path)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
- stub_origin_occurrences[0],
- stub_origin_occurrences[1],
+ stub_origin_snapshot[0],
+ stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
timestamp=stub_visit_unix_ts,
path=stub_origin_sub_directory_path)
self.origin_directory_view(stub_origin_info, stub_origin_visits,
- stub_origin_occurrences[0],
- stub_origin_occurrences[1],
+ stub_origin_snapshot[0],
+ stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
timestamp=stub_visit_iso_date,
path=stub_origin_sub_directory_path)
@patch('swh.web.browse.views.origin.request_content')
@patch('swh.web.browse.utils.get_origin_visits')
- @patch('swh.web.browse.utils.get_origin_visit_occurrences')
+ @patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
@istest
def origin_request_errors(self, mock_origin_service,
mock_utils_service,
- mock_get_origin_visit_occurrences,
+ mock_get_origin_visit_snapshot,
mock_get_origin_visits,
mock_request_content):
mock_origin_service.lookup_origin.side_effect = \
NotFoundExc('origin not found')
url = reverse('browse-origin',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, "origin not found", status_code=404)
mock_utils_service.lookup_origin.side_effect = None
mock_utils_service.lookup_origin.return_value = origin_info_test_data
mock_get_origin_visits.return_value = []
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, "No SWH visit", status_code=404)
mock_get_origin_visits.return_value = stub_origin_visits
- mock_get_origin_visit_occurrences.side_effect = \
+ mock_get_origin_visit_snapshot.side_effect = \
NotFoundExc('visit not found')
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'},
query_params={'visit_id': len(stub_origin_visits)+1})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
mock_get_origin_visits.return_value = stub_origin_visits
- mock_get_origin_visit_occurrences.side_effect = None
- mock_get_origin_visit_occurrences.return_value = ([], [])
+ mock_get_origin_visit_snapshot.side_effect = None
+ mock_get_origin_visit_snapshot.return_value = ([], [])
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'),
'Origin.*has an empty list of branches')
- mock_get_origin_visit_occurrences.return_value = stub_origin_occurrences
+ mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_directory.side_effect = \
NotFoundExc('Directory not found')
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Directory not found', status_code=404)
mock_origin_service.lookup_origin.side_effect = None
mock_origin_service.lookup_origin.return_value = origin_info_test_data
mock_get_origin_visits.return_value = []
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'foo'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, "No SWH visit", status_code=404)
mock_get_origin_visits.return_value = stub_origin_visits
- mock_get_origin_visit_occurrences.side_effect = \
+ mock_get_origin_visit_snapshot.side_effect = \
NotFoundExc('visit not found')
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'foo'},
query_params={'visit_id': len(stub_origin_visits)+1})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
mock_get_origin_visits.return_value = stub_origin_visits
- mock_get_origin_visit_occurrences.side_effect = None
- mock_get_origin_visit_occurrences.return_value = ([], [])
+ mock_get_origin_visit_snapshot.side_effect = None
+ mock_get_origin_visit_snapshot.return_value = ([], [])
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'baz'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'),
'Origin.*has an empty list of branches')
- mock_get_origin_visit_occurrences.return_value = stub_origin_occurrences
+ mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_origin_service.lookup_directory_with_path.return_value = \
{'target': stub_content_text_data['checksums']['sha1']}
mock_request_content.side_effect = \
NotFoundExc('Content not found')
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'baz'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Content not found', status_code=404)
@patch('swh.web.browse.utils.get_origin_visits')
- @patch('swh.web.browse.utils.get_origin_visit_occurrences')
+ @patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
@istest
def origin_branches(self, mock_origin_service,
mock_utils_service,
- mock_get_origin_visit_occurrences,
+ mock_get_origin_visit_snapshot,
mock_get_origin_visits):
mock_get_origin_visits.return_value = stub_origin_visits
- mock_get_origin_visit_occurrences.return_value = stub_origin_occurrences
+ mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_origin.return_value = stub_origin_info
url_args = {'origin_type': stub_origin_info['type'],
'origin_url': stub_origin_info['url']}
url = reverse('browse-origin-branches',
kwargs=url_args)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('branches.html')
- origin_branches = stub_origin_occurrences[0]
- origin_releases = stub_origin_occurrences[1]
+ origin_branches = stub_origin_snapshot[0]
+ origin_releases = stub_origin_snapshot[1]
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args)
self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args)
self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
(origin_releases_url, len(origin_releases)))
self.assertContains(resp, '<tr class="swh-origin-branch">',
count=len(origin_branches))
for branch in origin_branches:
browse_branch_url = reverse('browse-origin-directory',
kwargs={'origin_type': stub_origin_info['type'],
'origin_url': stub_origin_info['url']},
query_params={'branch': branch['name']})
self.assertContains(resp, '<a href="%s">%s</a>' % (escape(browse_branch_url), branch['name']))
browse_revision_url = reverse('browse-revision',
kwargs={'sha1_git': branch['revision']},
query_params={'origin_type': stub_origin_info['type'],
'origin_url': stub_origin_info['url']})
self.assertContains(resp, '<a href="%s">%s</a>' % (escape(browse_revision_url), branch['revision'][:7]))
@patch('swh.web.browse.utils.get_origin_visits')
- @patch('swh.web.browse.utils.get_origin_visit_occurrences')
+ @patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
@istest
def origin_releases(self, mock_origin_service,
mock_utils_service,
- mock_get_origin_visit_occurrences,
+ mock_get_origin_visit_snapshot,
mock_get_origin_visits):
mock_get_origin_visits.return_value = stub_origin_visits
- mock_get_origin_visit_occurrences.return_value = stub_origin_occurrences
+ mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_origin.return_value = stub_origin_info
url_args = {'origin_type': stub_origin_info['type'],
'origin_url': stub_origin_info['url']}
url = reverse('browse-origin-releases',
kwargs=url_args)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('releases.html')
- origin_branches = stub_origin_occurrences[0]
- origin_releases = stub_origin_occurrences[1]
+ origin_branches = stub_origin_snapshot[0]
+ origin_releases = stub_origin_snapshot[1]
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args)
self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args)
self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
(origin_releases_url, len(origin_releases)))
self.assertContains(resp, '<tr class="swh-origin-release">',
count=len(origin_releases))
for release in origin_releases:
browse_release_url = reverse('browse-release',
kwargs={'sha1_git': release['id']},
query_params={'origin_type': stub_origin_info['type'],
'origin_url': stub_origin_info['url']})
self.assertContains(resp, '<a href="%s">%s</a>' % (escape(browse_release_url), release['name']))
diff --git a/swh/web/tests/browse/views/test_release.py b/swh/web/tests/browse/views/test_release.py
index 7f363695..f5cdf7dd 100644
--- a/swh/web/tests/browse/views/test_release.py
+++ b/swh/web/tests/browse/views/test_release.py
@@ -1,105 +1,106 @@
# Copyright (C) 2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
from unittest.mock import patch
from nose.tools import istest
from django.test import TestCase
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import reverse, format_utc_iso_date
from swh.web.tests.testbase import SWHWebTestBase
from .data.release_test_data import (
stub_release
)
from .data.origin_test_data import stub_origin_visits
class SwhBrowseReleaseTest(SWHWebTestBase, TestCase):
@patch('swh.web.browse.views.release.service')
@patch('swh.web.browse.utils.service')
@istest
def release_browse(self, mock_service_utils, mock_service):
mock_service.lookup_release.return_value = stub_release
url = reverse('browse-release',
kwargs={'sha1_git': stub_release['id']})
release_id = stub_release['id']
release_name = stub_release['name']
author_id = stub_release['author']['id']
author_name = stub_release['author']['name']
author_url = reverse('browse-person',
kwargs={'person_id': author_id})
release_date = stub_release['date']
message = stub_release['message']
target_type = stub_release['target_type']
target = stub_release['target']
target_url = reverse('browse-revision', kwargs={'sha1_git': target})
message_lines = stub_release['message'].split('\n')
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('release.html')
self.assertContains(resp, '<a href="%s">%s</a>' %
(author_url, author_name))
self.assertContains(resp, format_utc_iso_date(release_date))
self.assertContains(resp, '<h2>%s</h2>%s' % (message_lines[0],
'\n'.join(message_lines[1:])))
self.assertContains(resp, release_id)
self.assertContains(resp, release_name)
self.assertContains(resp, target_type)
self.assertContains(resp, '<a href="%s">%s</a>' %
(target_url, target))
origin_info = {
'id': 13706355,
'type': 'git',
'url': 'https://github.com/python/cpython'
}
mock_service_utils.lookup_origin.return_value = origin_info
mock_service_utils.lookup_origin_visits.return_value = stub_origin_visits
mock_service_utils.MAX_LIMIT = 20
url = reverse('browse-release',
kwargs={'sha1_git': stub_release['id']},
query_params={'origin_type': origin_info['type'],
'origin_url': origin_info['url']})
resp = self.client.get(url)
+
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed('release.html')
self.assertContains(resp, '<a href="%s">%s</a>' %
(author_url, author_name))
self.assertContains(resp, format_utc_iso_date(release_date))
self.assertContains(resp, '<h2>%s</h2>%s' % (message_lines[0],
'\n'.join(message_lines[1:])))
self.assertContains(resp, release_id)
self.assertContains(resp, release_name)
self.assertContains(resp, target_type)
target_url = reverse('browse-revision', kwargs={'sha1_git': target},
query_params={'origin_type': origin_info['type'],
'origin_url': origin_info['url']})
self.assertContains(resp, '<a href="%s">%s</a>' % (target_url, target))
mock_service.lookup_release.side_effect = \
NotFoundExc('Release not found')
url = reverse('browse-release',
kwargs={'sha1_git': 'ffff'})
resp = self.client.get(url)
self.assertEquals(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Release not found', status_code=404)
File Metadata
Details
Attached
Mime Type
text/x-diff
Expires
Fri, Jul 4, 12:20 PM (2 w, 3 d ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3287721
Attached To
rDWAPPS Web applications
Event Timeline
Log In to Comment