diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py
index 604d4ea9f..7e753153d 100644
--- a/swh/web/browse/utils.py
+++ b/swh/web/browse/utils.py
@@ -1,1173 +1,1178 @@
 # Copyright (C) 2017-2018  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import base64
 from collections import defaultdict
 import magic
 import math
 import pypandoc
 import stat
 import textwrap
 
 from django.core.cache import cache
 from django.utils.safestring import mark_safe
 
 from importlib import reload
 
 from swh.model.identifiers import persistent_identifier
 from swh.web.common import highlightjs, service
 from swh.web.common.exc import NotFoundExc, http_status_code_message
 from swh.web.common.utils import (
     reverse, format_utc_iso_date, parse_timestamp,
     get_origin_visits, get_swh_persistent_id,
     swh_object_icons
 )
 from swh.web.config import get_config
 
 
 def get_directory_entries(sha1_git):
     """Function that retrieves the content of a directory
     from the archive.
 
     The directories entries are first sorted in lexicographical order.
     Sub-directories and regular files are then extracted.
 
     Args:
         sha1_git: sha1_git identifier of the directory
 
     Returns:
         A tuple whose first member corresponds to the sub-directories list
         and second member the regular files list
 
     Raises:
         NotFoundExc if the directory is not found
     """
     cache_entry_id = 'directory_entries_%s' % sha1_git
     cache_entry = cache.get(cache_entry_id)
 
     if cache_entry:
         return cache_entry
 
     entries = list(service.lookup_directory(sha1_git))
     for e in entries:
         e['perms'] = stat.filemode(e['perms'])
         if e['type'] == 'rev':
             # modify dir entry name to explicitly show it points
             # to a revision
             e['name'] = '%s @ %s' % (e['name'], e['target'][:7])
 
     dirs = [e for e in entries if e['type'] in ('dir', 'rev')]
     files = [e for e in entries if e['type'] == 'file']
 
     dirs = sorted(dirs, key=lambda d: d['name'])
     files = sorted(files, key=lambda f: f['name'])
 
     cache.set(cache_entry_id, (dirs, files))
 
     return dirs, files
 
 
 def get_mimetype_and_encoding_for_content(content):
     """Function that returns the mime type and the encoding associated to
     a content buffer using the magic module under the hood.
 
     Args:
         content (bytes): a content buffer
 
     Returns:
         A tuple (mimetype, encoding), for instance ('text/plain', 'us-ascii'),
         associated to the provided content.
 
     """
     while True:
         try:
             magic_result = magic.detect_from_content(content)
             mime_type = magic_result.mime_type
             encoding = magic_result.encoding
             break
         except Exception:
             # workaround an issue with the magic module who can fail
             # if detect_from_content is called multiple times in
             # a short amount of time
             reload(magic)
 
     return mime_type, encoding
 
 
 # maximum authorized content size in bytes for HTML display
 # with code highlighting
 content_display_max_size = get_config()['content_display_max_size']
 
 snapshot_content_max_size = get_config()['snapshot_content_max_size']
 
 
 def request_content(query_string, max_size=content_display_max_size,
                     raise_if_unavailable=True, reencode=True):
     """Function that retrieves a content from the archive.
 
     Raw bytes content is first retrieved, then the content mime type.
     If the mime type is not stored in the archive, it will be computed
     using Python magic module.
 
     Args:
         query_string: a string of the form "[ALGO_HASH:]HASH" where
             optional ALGO_HASH can be either ``sha1``, ``sha1_git``,
             ``sha256``, or ``blake2s256`` (default to ``sha1``) and HASH
             the hexadecimal representation of the hash value
         max_size: the maximum size for a content to retrieve (default to 1MB,
             no size limit if None)
 
     Returns:
         A tuple whose first member corresponds to the content raw bytes
         and second member the content mime type
 
     Raises:
         NotFoundExc if the content is not found
     """
     content_data = service.lookup_content(query_string)
     filetype = None
     language = None
     license = None
     # requests to the indexer db may fail so properly handle
     # those cases in order to avoid content display errors
     try:
         filetype = service.lookup_content_filetype(query_string)
         language = service.lookup_content_language(query_string)
         license = service.lookup_content_license(query_string)
     except Exception:
         pass
     mimetype = 'unknown'
     encoding = 'unknown'
     if filetype:
         mimetype = filetype['mimetype']
         encoding = filetype['encoding']
+        # workaround when encountering corrupted data due to implicit
+        # conversion from bytea to text in the indexer db (see T818)
+        # TODO: Remove that code when all data have been correctly converted
+        if mimetype.startswith('\\'):
+            filetype = None
 
     content_data['error_code'] = 200
     content_data['error_message'] = ''
     content_data['error_description'] = ''
 
     if not max_size or content_data['length'] < max_size:
         try:
             content_raw = service.lookup_content_raw(query_string)
         except Exception as e:
             if raise_if_unavailable:
                 raise e
             else:
                 content_data['raw_data'] = None
                 content_data['error_code'] = 404
                 content_data['error_description'] = \
                     'The bytes of the content are currently not available in the archive.' # noqa
                 content_data['error_message'] = \
                     http_status_code_message[content_data['error_code']]
         else:
             content_data['raw_data'] = content_raw['data']
 
             if not filetype:
                 mimetype, encoding = \
                     get_mimetype_and_encoding_for_content(content_data['raw_data']) # noqa
 
             # encode textual content to utf-8 if needed
             if reencode and mimetype.startswith('text/'):
                 # probably a malformed UTF-8 content, re-encode it
                 # by replacing invalid chars with a substitution one
                 if encoding == 'unknown-8bit':
                     content_data['raw_data'] = \
                         content_data['raw_data'].decode('utf-8', 'replace')\
                                                 .encode('utf-8')
                 elif 'ascii' not in encoding and encoding not in ['utf-8', 'binary']: # noqa
                     content_data['raw_data'] = \
                         content_data['raw_data'].decode(encoding, 'replace')\
                                                 .encode('utf-8')
             elif reencode and mimetype.startswith('application/octet-stream'):
                 # file may detect a text content as binary
                 # so try to decode it for display
                 encodings = ['us-ascii']
                 encodings += ['iso-8859-%s' % i for i in range(1, 17)]
                 for encoding in encodings:
                     try:
                         content_data['raw_data'] = \
                                 content_data['raw_data'].decode(encoding)\
                                                         .encode('utf-8')
                     except Exception:
                         pass
                     else:
                         # ensure display in content view
                         mimetype = 'text/plain'
                         break
     else:
         content_data['raw_data'] = None
 
     content_data['mimetype'] = mimetype
     content_data['encoding'] = encoding
 
     if language:
         content_data['language'] = language['lang']
     else:
         content_data['language'] = 'not detected'
     if license:
         content_data['licenses'] = ', '.join(license['facts'][0]['licenses'])
     else:
         content_data['licenses'] = 'not detected'
 
     return content_data
 
 
 _browsers_supported_image_mimes = set(['image/gif', 'image/png',
                                        'image/jpeg', 'image/bmp',
                                        'image/webp', 'image/svg',
                                        'image/svg+xml'])
 
 
 def prepare_content_for_display(content_data, mime_type, path):
     """Function that prepares a content for HTML display.
 
     The function tries to associate a programming language to a
     content in order to perform syntax highlighting client-side
     using highlightjs. The language is determined using either
     the content filename or its mime type.
     If the mime type corresponds to an image format supported
     by web browsers, the content will be encoded in base64
     for displaying the image.
 
     Args:
         content_data (bytes): raw bytes of the content
         mime_type (string): mime type of the content
         path (string): path of the content including filename
 
     Returns:
         A dict containing the content bytes (possibly different from the one
         provided as parameter if it is an image) under the key 'content_data
         and the corresponding highlightjs language class under the
         key 'language'.
     """
 
     language = highlightjs.get_hljs_language_from_filename(path)
 
     if not language:
         language = highlightjs.get_hljs_language_from_mime_type(mime_type)
 
     if not language:
         language = 'nohighlight'
     elif mime_type.startswith('application/'):
         mime_type = mime_type.replace('application/', 'text/')
 
     if mime_type.startswith('image/'):
         if mime_type in _browsers_supported_image_mimes:
             content_data = base64.b64encode(content_data)
         else:
             content_data = None
 
     if mime_type.startswith('image/svg'):
         mime_type = 'image/svg+xml'
 
     return {'content_data': content_data,
             'language': language,
             'mimetype': mime_type}
 
 
 def get_origin_visit(origin_info, visit_ts=None, visit_id=None,
                      snapshot_id=None):
     """Function that returns information about a visit for
     a given origin.
     The visit is retrieved from a provided timestamp.
     The closest visit from that timestamp is selected.
 
     Args:
         origin_info (dict): a dict filled with origin information
             (id, url, type)
         visit_ts (int or str): an ISO date string or Unix timestamp to parse
 
     Returns:
         A dict containing the visit info as described below::
 
             {'origin': 2,
              'date': '2017-10-08T11:54:25.582463+00:00',
              'metadata': {},
              'visit': 25,
              'status': 'full'}
 
     """
     visits = get_origin_visits(origin_info)
 
     if not visits:
         raise NotFoundExc('No visit associated to origin with'
                           ' type %s and url %s!' % (origin_info['type'],
                                                     origin_info['url']))
 
     if snapshot_id:
         visit = [v for v in visits if v['snapshot'] == snapshot_id]
         if len(visit) == 0:
             raise NotFoundExc(
                 'Visit for snapshot with id %s for origin with type %s'
                 ' and url %s not found!' % (snapshot_id, origin_info['type'],
                                             origin_info['url']))
         return visit[0]
 
     if visit_id:
         visit = [v for v in visits if v['visit'] == int(visit_id)]
         if len(visit) == 0:
             raise NotFoundExc(
                 'Visit with id %s for origin with type %s'
                 ' and url %s not found!' % (visit_id, origin_info['type'],
                                             origin_info['url']))
         return visit[0]
 
     if not visit_ts:
         # returns the latest full visit when no timestamp is provided
         for v in reversed(visits):
             if v['status'] == 'full':
                 return v
         return visits[-1]
 
     parsed_visit_ts = math.floor(parse_timestamp(visit_ts).timestamp())
 
     visit_idx = None
     for i, visit in enumerate(visits):
         ts = math.floor(parse_timestamp(visit['date']).timestamp())
         if i == 0 and parsed_visit_ts <= ts:
             return visit
         elif i == len(visits) - 1:
             if parsed_visit_ts >= ts:
                 return visit
         else:
             next_ts = math.floor(
                 parse_timestamp(visits[i+1]['date']).timestamp())
             if parsed_visit_ts >= ts and parsed_visit_ts < next_ts:
                 if (parsed_visit_ts - ts) < (next_ts - parsed_visit_ts):
                     visit_idx = i
                     break
                 else:
                     visit_idx = i+1
                     break
 
     if visit_idx is not None:
         visit = visits[visit_idx]
         while visit_idx < len(visits) - 1 and \
                 visit['date'] == visits[visit_idx+1]['date']:
             visit_idx = visit_idx + 1
             visit = visits[visit_idx]
         return visit
     else:
         raise NotFoundExc(
             'Visit with timestamp %s for origin with type %s and url %s not found!' % # noqa
             (visit_ts, origin_info['type'], origin_info['url']))
 
 
 def process_snapshot_branches(snapshot_branches):
     """
     Process a dictionary describing snapshot branches: extract those
     targeting revisions and releases, put them in two different lists,
     then sort those lists in lexicographical order of the branches' names.
 
     Args:
         snapshot_branches (dict): A dict describing the branches of a snapshot
             as returned for instance by :func:`swh.web.common.service.lookup_snapshot`
 
     Returns:
         tuple: A tuple whose first member is the sorted list of branches
             targeting revisions and second member the sorted list of branches
             targeting releases
     """ # noqa
     branches = {}
     releases = {}
     revision_to_branch = defaultdict(set)
     revision_to_release = defaultdict(set)
     release_to_branch = defaultdict(set)
     for branch_name, target in snapshot_branches.items():
         if not target:
             # FIXME: display branches with an unknown target anyway
             continue
         target_id = target['target']
         target_type = target['target_type']
         if target_type == 'revision':
             branches[branch_name] = {
                 'name': branch_name,
                 'revision': target_id,
             }
             revision_to_branch[target_id].add(branch_name)
         elif target_type == 'release':
             release_to_branch[target_id].add(branch_name)
         # FIXME: handle pointers to other object types
         # FIXME: handle branch aliases
 
     releases_info = service.lookup_release_multiple(
         release_to_branch.keys()
     )
     for release in releases_info:
         branches_to_update = release_to_branch[release['id']]
         for branch in branches_to_update:
             releases[branch] = {
                 'name': release['name'],
                 'branch_name': branch,
                 'date': format_utc_iso_date(release['date']),
                 'id': release['id'],
                 'message': release['message'],
                 'target_type': release['target_type'],
                 'target': release['target'],
             }
         if release['target_type'] == 'revision':
             revision_to_release[release['target']].update(
                 branches_to_update
             )
 
     revisions = service.lookup_revision_multiple(
         set(revision_to_branch.keys()) | set(revision_to_release.keys())
     )
 
     for revision in revisions:
         if not revision:
             continue
         revision_data = {
             'directory': revision['directory'],
             'date': format_utc_iso_date(revision['date']),
             'message': revision['message'],
         }
         for branch in revision_to_branch[revision['id']]:
             branches[branch].update(revision_data)
         for release in revision_to_release[revision['id']]:
             releases[release]['directory'] = revision['directory']
 
     ret_branches = list(sorted(branches.values(), key=lambda b: b['name']))
     ret_releases = list(sorted(releases.values(), key=lambda b: b['name']))
 
     return ret_branches, ret_releases
 
 
 def get_snapshot_content(snapshot_id):
     """Returns the lists of branches and releases
     associated to a swh snapshot.
     That list is put in  cache in order to speedup the navigation
     in the swh-web/browse ui.
 
     .. warning:: At most 1000 branches contained in the snapshot
         will be returned for performance reasons.
 
     Args:
         snapshot_id (str): hexadecimal representation of the snapshot
             identifier
 
     Returns:
         A tuple with two members. The first one is a list of dict describing
         the snapshot branches. The second one is a list of dict describing the
         snapshot releases.
 
     Raises:
         NotFoundExc if the snapshot does not exist
     """
     cache_entry_id = 'swh_snapshot_%s' % snapshot_id
     cache_entry = cache.get(cache_entry_id)
 
     if cache_entry:
         return cache_entry['branches'], cache_entry['releases']
 
     branches = []
     releases = []
 
     if snapshot_id:
         snapshot = service.lookup_snapshot(
             snapshot_id, branches_count=snapshot_content_max_size)
         branches, releases = process_snapshot_branches(snapshot['branches'])
 
     cache.set(cache_entry_id, {
         'branches': branches,
         'releases': releases,
     })
 
     return branches, releases
 
 
 def get_origin_visit_snapshot(origin_info, visit_ts=None, visit_id=None,
                               snapshot_id=None):
     """Returns the lists of branches and releases
     associated to a swh origin for a given visit.
     The visit is expressed by a timestamp. In the latter case,
     the closest visit from the provided timestamp will be used.
     If no visit parameter is provided, it returns the list of branches
     found for the latest visit.
     That list is put in  cache in order to speedup the navigation
     in the swh-web/browse ui.
 
     .. warning:: At most 1000 branches contained in the snapshot
         will be returned for performance reasons.
 
     Args:
         origin_info (dict): a dict filled with origin information
             (id, url, type)
         visit_ts (int or str): an ISO date string or Unix timestamp to parse
         visit_id (int): optional visit id for disambiguation in case
             several visits have the same timestamp
 
     Returns:
         A tuple with two members. The first one is a list of dict describing
         the origin branches for the given visit.
         The second one is a list of dict describing the origin releases
         for the given visit.
 
     Raises:
         NotFoundExc if the origin or its visit are not found
     """
 
     visit_info = get_origin_visit(origin_info, visit_ts, visit_id, snapshot_id)
 
     return get_snapshot_content(visit_info['snapshot'])
 
 
 def gen_link(url, link_text=None, link_attrs={}):
     """
     Utility function for generating an HTML link to insert
     in Django templates.
 
     Args:
         url (str): an url
         link_text (str): optional text for the produced link,
             if not provided the url will be used
         link_attrs (dict): optional attributes (e.g. class)
             to add to the link
 
     Returns:
         An HTML link in the form '<a href="url">link_text</a>'
 
     """
     attrs = ' '
     for k, v in link_attrs.items():
         attrs += '%s="%s" ' % (k, v)
     if not link_text:
         link_text = url
     link = '<a%shref="%s">%s</a>' % (attrs, url, link_text)
     return mark_safe(link)
 
 
 def gen_person_link(person_id, person_name, snapshot_context=None,
                     link_attrs={}):
     """
     Utility function for generating a link to a person HTML view
     to insert in Django templates.
 
     Args:
         person_id (int): a person id
         person_name (str): the associated person name
         link_attrs (dict): optional attributes (e.g. class)
             to add to the link
 
     Returns:
         An HTML link in the form '<a href="person_view_url">person_name</a>'
 
     """
     query_params = None
     if snapshot_context and snapshot_context['origin_info']:
         origin_info = snapshot_context['origin_info']
         query_params = {'origin_type': origin_info['type'],
                         'origin': origin_info['url']}
         if 'timestamp' in snapshot_context['url_args']:
             query_params['timestamp'] = \
                  snapshot_context['url_args']['timestamp']
         if 'visit_id' in snapshot_context['query_params']:
             query_params['visit_id'] = \
                 snapshot_context['query_params']['visit_id']
     elif snapshot_context:
         query_params = {'snapshot_id': snapshot_context['snapshot_id']}
     person_url = reverse('browse-person', url_args={'person_id': person_id},
                          query_params=query_params)
     return gen_link(person_url, person_name or 'None', link_attrs)
 
 
 def gen_revision_url(revision_id, snapshot_context=None):
     """
     Utility function for generating an url to a revision.
 
     Args:
         revision_id (str): a revision id
         snapshot_context (dict): if provided, generate snapshot-dependent
             browsing url
 
     Returns:
         str: The url to browse the revision
 
     """
     query_params = None
     if snapshot_context and snapshot_context['origin_info']:
         origin_info = snapshot_context['origin_info']
         origin_type = snapshot_context['origin_type']
         query_params = {'origin_type': origin_type,
                         'origin': origin_info['url']}
         if 'timestamp' in snapshot_context['url_args']:
             query_params['timestamp'] = \
                  snapshot_context['url_args']['timestamp']
         if 'visit_id' in snapshot_context['query_params']:
             query_params['visit_id'] = \
                 snapshot_context['query_params']['visit_id']
     elif snapshot_context:
         query_params = {'snapshot_id': snapshot_context['snapshot_id']}
 
     return reverse('browse-revision',
                    url_args={'sha1_git': revision_id},
                    query_params=query_params)
 
 
 def gen_revision_link(revision_id, shorten_id=False, snapshot_context=None,
                       link_text=None, link_attrs={}):
     """
     Utility function for generating a link to a revision HTML view
     to insert in Django templates.
 
     Args:
         revision_id (str): a revision id
         shorten_id (boolean): whether to shorten the revision id to 7
             characters for the link text
         snapshot_context (dict): if provided, generate snapshot-dependent
             browsing link
         link_attrs (dict): optional attributes (e.g. class)
             to add to the link
 
     Returns:
         str: An HTML link in the form '<a href="revision_url">revision_id</a>'
 
     """
     if not revision_id:
         return None
 
     revision_url = gen_revision_url(revision_id, snapshot_context)
 
     if shorten_id:
         return gen_link(revision_url, revision_id[:7], link_attrs)
     else:
         if not link_text:
             link_text = revision_id
         return gen_link(revision_url, link_text, link_attrs)
 
 
 def gen_origin_link(origin_info, link_attrs={}):
     """
     Utility function for generating a link to a software origin HTML view
     to insert in Django templates.
 
     Args:
         origin_info (dict): a dict filled with origin information
             (id, type, url)
         link_attrs (dict): optional attributes (e.g. class)
             to add to the link
 
     Returns:
         An HTML link in the form '<a href="origin_view_url">Origin: origin_url</a>'
 
     """ # noqa
     origin_browse_url = reverse('browse-origin',
                                 url_args={'origin_type': origin_info['type'],
                                           'origin_url': origin_info['url']})
     return gen_link(origin_browse_url,
                     'Origin: ' + origin_info['url'], link_attrs)
 
 
 def gen_directory_link(sha1_git, link_text=None, link_attrs={}):
     """
     Utility function for generating a link to a directory HTML view
     to insert in Django templates.
 
     Args:
         sha1_git (str): directory identifier
         link_text (str): optional text for the generated link
             (the generated url will be used by default)
         link_attrs (dict): optional attributes (e.g. class)
             to add to the link
 
     Returns:
         An HTML link in the form '<a href="directory_view_url">link_text</a>'
 
     """
     if not sha1_git:
         return None
 
     directory_url = reverse('browse-directory',
                             url_args={'sha1_git': sha1_git})
 
     if not link_text:
         link_text = directory_url
     return gen_link(directory_url, link_text, link_attrs)
 
 
 def gen_snapshot_link(snapshot_id, link_text=None, link_attrs={}):
     """
     Utility function for generating a link to a snapshot HTML view
     to insert in Django templates.
 
     Args:
         snapshot_id (str): snapshot identifier
         link_text (str): optional text for the generated link
             (the generated url will be used by default)
         link_attrs (dict): optional attributes (e.g. class)
             to add to the link
 
     Returns:
         An HTML link in the form '<a href="snapshot_view_url">link_text</a>'
 
     """
     snapshot_url = reverse('browse-snapshot',
                            url_args={'snapshot_id': snapshot_id})
     if not link_text:
         link_text = snapshot_url
     return gen_link(snapshot_url, link_text, link_attrs)
 
 
 def gen_snapshot_directory_link(snapshot_context, revision_id=None,
                                 link_text=None, link_attrs={}):
     """
     Utility function for generating a link to a directory HTML view
     in the context of a snapshot to insert in Django templates.
 
     Args:
         snapshot_context (dict): the snapshot information
         revision_id (str): optional revision identifier in order
             to use the associated directory
         link_text (str): optional text to use for the generated link
         link_attrs (dict): optional attributes (e.g. class)
             to add to the link
 
     Returns:
         An HTML link in the form
         '<a href="origin_directory_view_url">origin_directory_view_url</a>'
     """
     query_params = {'revision': revision_id}
     if snapshot_context['origin_info']:
         origin_info = snapshot_context['origin_info']
         url_args = {'origin_url': origin_info['url']}
         if 'timestamp' in snapshot_context['url_args']:
             url_args['timestamp'] = \
                 snapshot_context['url_args']['timestamp']
         if 'visit_id' in snapshot_context['query_params']:
             query_params['visit_id'] = \
                 snapshot_context['query_params']['visit_id']
         directory_url = reverse('browse-origin-directory',
                                 url_args=url_args,
                                 query_params=query_params)
     else:
         url_args = {'snapshot_id': snapshot_context['snapshot_id']}
         directory_url = reverse('browse-snapshot-directory',
                                 url_args=url_args,
                                 query_params=query_params)
 
     if not link_text:
         link_text = directory_url
     return gen_link(directory_url, link_text, link_attrs)
 
 
 def gen_content_link(sha1_git, link_text=None, link_attrs={}):
     """
     Utility function for generating a link to a content HTML view
     to insert in Django templates.
 
     Args:
         sha1_git (str): content identifier
         link_text (str): optional text for the generated link
             (the generated url will be used by default)
         link_attrs (dict): optional attributes (e.g. class)
             to add to the link
 
     Returns:
         An HTML link in the form '<a href="content_view_url">link_text</a>'
 
     """
     if not sha1_git:
         return None
     content_url = reverse('browse-content',
                           url_args={'query_string': 'sha1_git:' + sha1_git})
     if not link_text:
         link_text = content_url
     return gen_link(content_url, link_text, link_attrs)
 
 
 def get_revision_log_url(revision_id, snapshot_context=None):
     """
     Utility function for getting the URL for a revision log HTML view
     (possibly in the context of an origin).
 
     Args:
         revision_id (str): revision identifier the history heads to
         snapshot_context (dict): if provided, generate snapshot-dependent
             browsing link
     Returns:
         The revision log view URL
     """
     query_params = {'revision': revision_id}
     if snapshot_context and snapshot_context['origin_info']:
         origin_info = snapshot_context['origin_info']
         url_args = {'origin_url': origin_info['url']}
         if 'timestamp' in snapshot_context['url_args']:
             url_args['timestamp'] = \
                 snapshot_context['url_args']['timestamp']
         if 'visit_id' in snapshot_context['query_params']:
             query_params['visit_id'] = \
                 snapshot_context['query_params']['visit_id']
         revision_log_url = reverse('browse-origin-log',
                                    url_args=url_args,
                                    query_params=query_params)
     elif snapshot_context:
         url_args = {'snapshot_id': snapshot_context['snapshot_id']}
         revision_log_url = reverse('browse-snapshot-log',
                                    url_args=url_args,
                                    query_params=query_params)
     else:
         revision_log_url = reverse('browse-revision-log',
                                    url_args={'sha1_git': revision_id})
     return revision_log_url
 
 
 def gen_revision_log_link(revision_id, snapshot_context=None, link_text=None,
                           link_attrs={}):
     """
     Utility function for generating a link to a revision log HTML view
     (possibly in the context of an origin) to insert in Django templates.
 
     Args:
         revision_id (str): revision identifier the history heads to
         snapshot_context (dict): if provided, generate snapshot-dependent
             browsing link
         link_text (str): optional text to use for the generated link
         link_attrs (dict): optional attributes (e.g. class)
             to add to the link
 
     Returns:
         An HTML link in the form
         '<a href="revision_log_view_url">link_text</a>'
     """
     if not revision_id:
         return None
 
     revision_log_url = get_revision_log_url(revision_id, snapshot_context)
 
     if not link_text:
         link_text = revision_log_url
     return gen_link(revision_log_url, link_text, link_attrs)
 
 
 def format_log_entries(revision_log, per_page, snapshot_context=None):
     """
     Utility functions that process raw revision log data for HTML display.
     Its purpose is to:
 
         * add links to relevant browse views
         * format date in human readable format
         * truncate the message log
 
     Args:
         revision_log (list): raw revision log as returned by the swh-web api
         per_page (int): number of log entries per page
         snapshot_context (dict): if provided, generate snapshot-dependent
             browsing link
 
 
     """
     revision_log_data = []
     for i, rev in enumerate(revision_log):
         if i == per_page:
             break
         author_name = 'None'
         author_fullname = 'None'
         committer_fullname = 'None'
         if rev['author']:
             author_name = rev['author']['name'] or rev['author']['fullname']
             author_fullname = rev['author']['fullname']
         if rev['committer']:
             committer_fullname = rev['committer']['fullname']
         author_date = format_utc_iso_date(rev['date'])
         committer_date = format_utc_iso_date(rev['committer_date'])
 
         tooltip = 'revision %s\n' % rev['id']
         tooltip += 'author: %s\n' % author_fullname
         tooltip += 'author date: %s\n' % author_date
         tooltip += 'committer: %s\n' % committer_fullname
         tooltip += 'committer date: %s\n\n' % committer_date
         tooltip += textwrap.indent(rev['message'], ' '*4)
 
         revision_log_data.append({
             'author': author_name,
             'id': rev['id'][:7],
             'message': rev['message'],
             'date': author_date,
             'commit_date': committer_date,
             'url': gen_revision_url(rev['id'], snapshot_context),
             'tooltip': tooltip
         })
     return revision_log_data
 
 
 # list of origin types that can be found in the swh archive
 # TODO: retrieve it dynamically in an efficient way instead
 #       of hardcoding it
 _swh_origin_types = ['git', 'svn', 'deb', 'hg', 'ftp', 'deposit', 'pypi']
 
 
 def get_origin_info(origin_url, origin_type=None):
     """
     Get info about a software origin.
     Its main purpose is to automatically find an origin type
     when it is not provided as parameter.
 
     Args:
         origin_url (str): complete url of a software origin
         origin_type (str): optional origin type
 
     Returns:
         A dict with the following entries:
             * type: the origin type
             * url: the origin url
             * id: the internal id of the origin
     """
     if origin_type:
         return service.lookup_origin({'type': origin_type,
                                       'url': origin_url})
     else:
         for origin_type in _swh_origin_types:
             try:
                 origin_info = service.lookup_origin({'type': origin_type,
                                                      'url': origin_url})
                 return origin_info
             except Exception:
                 pass
     raise NotFoundExc('Origin with url %s not found!' % origin_url)
 
 
 def get_snapshot_context(snapshot_id=None, origin_type=None, origin_url=None,
                          timestamp=None, visit_id=None):
     """
     Utility function to compute relevant information when navigating
     the archive in a snapshot context. The snapshot is either
     referenced by its id or it will be retrieved from an origin visit.
 
     Args:
         snapshot_id (str): hexadecimal representation of a snapshot identifier,
             all other parameters will be ignored if it is provided
         origin_type (str): the origin type (git, svn, deposit, ...)
         origin_url (str): the origin_url (e.g. https://github.com/(user)/(repo)/)
         timestamp (str): a datetime string for retrieving the closest
             visit of the origin
         visit_id (int): optional visit id for disambiguation in case
             of several visits with the same timestamp
 
     Returns:
         A dict with the following entries:
             * origin_info: dict containing origin information
             * visit_info: dict containing visit information
             * branches: the list of branches for the origin found
               during the visit
             * releases: the list of releases for the origin found
               during the visit
             * origin_browse_url: the url to browse the origin
             * origin_branches_url: the url to browse the origin branches
             * origin_releases_url': the url to browse the origin releases
             * origin_visit_url: the url to browse the snapshot of the origin
               found during the visit
             * url_args: dict containing url arguments to use when browsing in
               the context of the origin and its visit
 
     Raises:
         NotFoundExc: if no snapshot is found for the visit of an origin.
     """ # noqa
     origin_info = None
     visit_info = None
     url_args = None
     query_params = {}
     branches = []
     releases = []
     browse_url = None
     visit_url = None
     branches_url = None
     releases_url = None
     swh_type = 'snapshot'
     if origin_url:
         swh_type = 'origin'
         origin_info = get_origin_info(origin_url, origin_type)
 
         visit_info = get_origin_visit(origin_info, timestamp, visit_id,
                                       snapshot_id)
         fmt_date = format_utc_iso_date(visit_info['date'])
         visit_info['fmt_date'] = fmt_date
         snapshot_id = visit_info['snapshot']
 
         if not snapshot_id:
             raise NotFoundExc('No snapshot associated to the visit of origin '
                               '%s on %s' % (origin_url, fmt_date))
 
         # provided timestamp is not necessarily equals to the one
         # of the retrieved visit, so get the exact one in order
         # use it in the urls generated below
         if timestamp:
             timestamp = visit_info['date']
 
         branches, releases = \
             get_origin_visit_snapshot(origin_info, timestamp, visit_id,
                                       snapshot_id)
 
         url_args = {'origin_type': origin_type,
                     'origin_url': origin_info['url']}
 
         query_params = {'visit_id': visit_id}
 
         browse_url = reverse('browse-origin-visits',
                              url_args=url_args)
 
         if timestamp:
             url_args['timestamp'] = format_utc_iso_date(timestamp,
                                                         '%Y-%m-%dT%H:%M:%S')
         visit_url = reverse('browse-origin-directory',
                             url_args=url_args,
                             query_params=query_params)
         visit_info['url'] = visit_url
 
         branches_url = reverse('browse-origin-branches',
                                url_args=url_args,
                                query_params=query_params)
 
         releases_url = reverse('browse-origin-releases',
                                url_args=url_args,
                                query_params=query_params)
     elif snapshot_id:
         branches, releases = get_snapshot_content(snapshot_id)
         url_args = {'snapshot_id': snapshot_id}
         browse_url = reverse('browse-snapshot',
                              url_args=url_args)
         branches_url = reverse('browse-snapshot-branches',
                                url_args=url_args)
 
         releases_url = reverse('browse-snapshot-releases',
                                url_args=url_args)
 
     releases = list(reversed(releases))
 
     snapshot_size = service.lookup_snapshot_size(snapshot_id)
 
     is_empty = sum(snapshot_size.values()) == 0
 
     swh_snp_id = persistent_identifier('snapshot', snapshot_id)
 
     return {
         'swh_type': swh_type,
         'swh_object_id': swh_snp_id,
         'snapshot_id': snapshot_id,
         'snapshot_size': snapshot_size,
         'is_empty': is_empty,
         'origin_info': origin_info,
         # keep track if the origin type was provided as url argument
         'origin_type': origin_type,
         'visit_info': visit_info,
         'branches': branches,
         'releases': releases,
         'branch': None,
         'release': None,
         'browse_url': browse_url,
         'branches_url': branches_url,
         'releases_url': releases_url,
         'url_args': url_args,
         'query_params': query_params
     }
 
 
 # list of common readme names ordered by preference
 # (lower indices have higher priority)
 _common_readme_names = [
     "readme.markdown",
     "readme.md",
     "readme.rst",
     "readme.txt",
     "readme"
 ]
 
 
 def get_readme_to_display(readmes):
     """
     Process a list of readme files found in a directory
     in order to find the adequate one to display.
 
     Args:
         readmes: a list of dict where keys are readme file names and values
             are readme sha1s
 
     Returns:
         A tuple (readme_name, readme_sha1)
     """
     readme_name = None
     readme_url = None
     readme_sha1 = None
     readme_html = None
 
     lc_readmes = {k.lower(): {'orig_name': k, 'sha1': v}
                   for k, v in readmes.items()}
 
     # look for readme names according to the preference order
     # defined by the _common_readme_names list
     for common_readme_name in _common_readme_names:
         if common_readme_name in lc_readmes:
             readme_name = lc_readmes[common_readme_name]['orig_name']
             readme_sha1 = lc_readmes[common_readme_name]['sha1']
             readme_url = reverse('browse-content-raw',
                                  url_args={'query_string': readme_sha1})
             break
 
     # otherwise pick the first readme like file if any
     if not readme_name and len(readmes.items()) > 0:
         readme_name = next(iter(readmes))
         readme_sha1 = readmes[readme_name]
         readme_url = reverse('browse-content-raw',
                              url_args={'query_string': readme_sha1})
 
     # convert rst README to html server side as there is
     # no viable solution to perform that task client side
     if readme_name and readme_name.endswith('.rst'):
         cache_entry_id = 'readme_%s' % readme_sha1
         cache_entry = cache.get(cache_entry_id)
 
         if cache_entry:
             readme_html = cache_entry
         else:
             try:
                 rst_doc = request_content(readme_sha1)
                 readme_html = pypandoc.convert_text(rst_doc['raw_data'],
                                                     'html', format='rst')
                 cache.set(cache_entry_id, readme_html)
             except Exception:
                 readme_html = 'Readme bytes are not available'
 
     return readme_name, readme_url, readme_html
 
 
 def get_swh_persistent_ids(swh_objects, snapshot_context=None):
     """
     Returns a list of dict containing info related to persistent
     identifiers of swh objects.
 
     Args:
         swh_objects (list): a list of dict with the following keys:
             * type: swh object type (content/directory/release/revision/snapshot)
             * id: swh object id
         snapshot_context (dict): optional parameter describing the snapshot in which
             the object has been found
 
     Returns:
         list: a list of dict with the following keys:
             * object_type: the swh object type (content/directory/release/revision/snapshot)
             * object_icon: the swh object icon to use in HTML views
             * swh_id: the computed swh object persistent identifier
             * swh_id_url: the url resolving the persistent identifier
             * show_options: boolean indicating if the persistent id options must
               be displayed in persistent ids HTML view
     """ # noqa
     swh_ids = []
     for swh_object in swh_objects:
         if not swh_object['id']:
             continue
         swh_id = get_swh_persistent_id(swh_object['type'], swh_object['id'])
         show_options = swh_object['type'] == 'content' or \
             (snapshot_context and snapshot_context['origin_info'] is not None)
 
         object_icon = swh_object_icons[swh_object['type']]
 
         swh_ids.append({
             'object_type': swh_object['type'],
             'object_icon': object_icon,
             'swh_id': swh_id,
             'swh_id_url': reverse('browse-swh-id',
                                   url_args={'swh_id': swh_id}),
             'show_options': show_options
         })
     return swh_ids
diff --git a/swh/web/common/converters.py b/swh/web/common/converters.py
index 08d53e24f..e4aded536 100644
--- a/swh/web/common/converters.py
+++ b/swh/web/common/converters.py
@@ -1,382 +1,381 @@
 # Copyright (C) 2015-2018  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import datetime
 import json
 
 from swh.model import hashutil
 from swh.core.utils import decode_with_escape
 
 
 def _group_checksums(data):
     """Groups checksums values computed from hash functions used in swh
     and stored in data dict under a single entry 'checksums'
     """
     if data:
         checksums = {}
         for hash in hashutil.ALGORITHMS:
             if hash in data and data[hash]:
                 checksums[hash] = data[hash]
                 del data[hash]
         if len(checksums) > 0:
             data['checksums'] = checksums
 
 
 def fmap(f, data):
     """Map f to data at each level.
 
     This must keep the origin data structure type:
     - map -> map
     - dict -> dict
     - list -> list
     - None -> None
 
     Args:
         f: function that expects one argument.
         data: data to traverse to apply the f function.
               list, map, dict or bare value.
 
     Returns:
         The same data-structure with modified values by the f function.
 
     """
     if data is None:
         return data
     if isinstance(data, map):
         return map(lambda y: fmap(f, y), (x for x in data))
     if isinstance(data, list):
         return [fmap(f, x) for x in data]
     if isinstance(data, dict):
         return {k: fmap(f, v) for (k, v) in data.items()}
     return f(data)
 
 
 def from_swh(dict_swh, hashess={}, bytess={}, dates={}, blacklist={},
              removables_if_empty={}, empty_dict={}, empty_list={},
              convert={}, convert_fn=lambda x: x):
     """Convert from a swh dictionary to something reasonably json
     serializable.
 
     Args:
         dict_swh: the origin dictionary needed to be transformed
         hashess: list/set of keys representing hashes values (sha1, sha256,
             sha1_git, etc...) as bytes. Those need to be transformed in
             hexadecimal string
         bytess: list/set of keys representing bytes values which needs to be
             decoded
         blacklist: set of keys to filter out from the conversion
         convert: set of keys whose associated values need to be converted using
             convert_fn
         convert_fn: the conversion function to apply on the value of key in
             'convert'
 
     The remaining keys are copied as is in the output.
 
     Returns:
         dictionary equivalent as dict_swh only with its keys converted.
 
     """
     def convert_hashes_bytes(v):
         """v is supposedly a hash as bytes, returns it converted in hex.
 
         """
         if isinstance(v, bytes):
             return hashutil.hash_to_hex(v)
         return v
 
     def convert_bytes(v):
         """v is supposedly a bytes string, decode as utf-8.
 
         FIXME: Improve decoding policy.
         If not utf-8, break!
 
         """
         if isinstance(v, bytes):
             return v.decode('utf-8')
         return v
 
     def convert_date(v):
         """
         Args:
             v (dict or datatime): either:
 
                 - a dict with three keys:
 
                   - timestamp (dict or integer timestamp)
                   - offset
                   - negative_utc
 
                 - or, a datetime
 
             We convert it to a human-readable string
 
         """
         if not v:
             return v
         if isinstance(v, datetime.datetime):
             return v.isoformat()
 
         tz = datetime.timezone(datetime.timedelta(minutes=v['offset']))
         swh_timestamp = v['timestamp']
         if isinstance(swh_timestamp, dict):
             date = datetime.datetime.fromtimestamp(
                 swh_timestamp['seconds'], tz=tz)
         else:
             date = datetime.datetime.fromtimestamp(
                 swh_timestamp, tz=tz)
 
         datestr = date.isoformat()
 
         if v['offset'] == 0 and v['negative_utc']:
             # remove the rightmost + and replace it with a -
             return '-'.join(datestr.rsplit('+', 1))
 
         return datestr
 
     if not dict_swh:
         return dict_swh
 
     new_dict = {}
     for key, value in dict_swh.items():
         if key in blacklist or (key in removables_if_empty and not value):
             continue
 
         if key in dates:
             new_dict[key] = convert_date(value)
         elif key in convert:
             new_dict[key] = convert_fn(value)
         elif isinstance(value, dict):
             new_dict[key] = from_swh(value,
                                      hashess=hashess, bytess=bytess,
                                      dates=dates, blacklist=blacklist,
                                      removables_if_empty=removables_if_empty,
                                      empty_dict=empty_dict,
                                      empty_list=empty_list,
                                      convert=convert,
                                      convert_fn=convert_fn)
         elif key in hashess:
             new_dict[key] = fmap(convert_hashes_bytes, value)
         elif key in bytess:
             try:
                 new_dict[key] = fmap(convert_bytes, value)
             except UnicodeDecodeError:
                 if 'decoding_failures' not in new_dict:
                     new_dict['decoding_failures'] = [key]
                 else:
                     new_dict['decoding_failures'].append(key)
                 new_dict[key] = fmap(decode_with_escape, value)
         elif key in empty_dict and not value:
             new_dict[key] = {}
         elif key in empty_list and not value:
             new_dict[key] = []
         else:
             new_dict[key] = value
 
     _group_checksums(new_dict)
 
     return new_dict
 
 
 def from_provenance(provenance):
     """Convert from a swh provenance information to a provenance dictionary.
 
     Args:
         provenance (dict): Dictionary with the following keys:
             - content (sha1_git): the content's identifier
             - revision (sha1_git): the revision the content was seen
             - origin (int): the origin the content was seen
             - visit (int): the visit it occurred
             - path (bytes): the path the content was seen at
 
     """
     return from_swh(provenance,
                     hashess={'content', 'revision'},
                     bytess={'path'})
 
 
 def from_origin(origin):
     """Convert from a swh origin to an origin dictionary.
 
     """
     return from_swh(origin)
 
 
 def from_release(release):
     """Convert from a swh release to a json serializable release dictionary.
 
     Args:
         release (dict): dictionary with keys:
 
             - id: identifier of the revision (sha1 in bytes)
             - revision: identifier of the revision the release points to (sha1
               in bytes)
 
         comment: release's comment message (bytes)
         name: release's name (string)
         author: release's author identifier (swh's id)
         synthetic: the synthetic property (boolean)
 
     Returns:
         dict: Release dictionary with the following keys:
 
         - id: hexadecimal sha1 (string)
         - revision: hexadecimal sha1 (string)
         - comment: release's comment message (string)
         - name: release's name (string)
         - author: release's author identifier (swh's id)
         - synthetic: the synthetic property (boolean)
 
     """
     return from_swh(
         release,
         hashess={'id', 'target'},
         bytess={'message', 'name', 'fullname', 'email'},
         dates={'date'},
     )
 
 
 class SWHMetadataEncoder(json.JSONEncoder):
     """Special json encoder for metadata field which can contain bytes
     encoded value.
 
     """
     def default(self, obj):
         if isinstance(obj, bytes):
             return obj.decode('utf-8')
         # Let the base class default method raise the TypeError
         return json.JSONEncoder.default(self, obj)
 
 
 def convert_revision_metadata(metadata):
     """Convert json specific dict to a json serializable one.
 
     """
     if not metadata:
         return {}
 
     return json.loads(json.dumps(metadata, cls=SWHMetadataEncoder))
 
 
 def from_revision(revision):
     """Convert from a swh revision to a json serializable revision dictionary.
 
     Args:
         revision (dict): dict with keys:
 
             - id: identifier of the revision (sha1 in bytes)
             - directory: identifier of the directory the revision points to
               (sha1 in bytes)
             - author_name, author_email: author's revision name and email
             - committer_name, committer_email: committer's revision name and
               email
             - message: revision's message
             - date, date_offset: revision's author date
             - committer_date, committer_date_offset: revision's commit date
             - parents: list of parents for such revision
             - synthetic: revision's property nature
             - type: revision's type (git, tar or dsc at the moment)
             - metadata: if the revision is synthetic, this can reference
               dynamic properties.
 
     Returns:
         dict: Revision dictionary with the same keys as inputs, except:
 
         - sha1s are in hexadecimal strings (id, directory)
         - bytes are decoded in string (author_name, committer_name,
           author_email, committer_email)
 
         Remaining keys are left as is
 
     """
     revision = from_swh(revision,
                         hashess={'id', 'directory', 'parents', 'children'},
                         bytess={'name', 'fullname', 'email'},
                         convert={'metadata'},
                         convert_fn=convert_revision_metadata,
                         dates={'date', 'committer_date'})
 
     if revision:
         if 'parents' in revision:
             revision['merge'] = len(revision['parents']) > 1
         if 'message' in revision:
             try:
                 revision['message'] = revision['message'].decode('utf-8')
             except UnicodeDecodeError:
                 revision['message_decoding_failed'] = True
                 revision['message'] = None
 
     return revision
 
 
 def from_content(content):
     """Convert swh content to serializable content dictionary.
 
     """
     return from_swh(content,
                     hashess={'sha1', 'sha1_git', 'sha256', 'blake2s256'},
                     blacklist={'ctime'},
                     convert={'status'},
                     convert_fn=lambda v: 'absent' if v == 'hidden' else v)
 
 
 def from_person(person):
     """Convert swh person to serializable person dictionary.
 
     """
     return from_swh(person,
                     bytess={'name', 'fullname', 'email'})
 
 
 def from_origin_visit(visit):
     """Convert swh origin_visit to serializable origin_visit dictionary.
 
     """
     ov = from_swh(visit,
                   hashess={'target', 'snapshot'},
                   bytess={'branch'},
                   dates={'date'},
                   empty_dict={'metadata'})
 
     return ov
 
 
 def from_snapshot(snapshot):
     """Convert swh snapshot to serializable snapshot dictionary.
 
     """
     sv = from_swh(snapshot,
                   hashess={'id', 'target'},
                   bytess={'next_branch'})
 
     if sv and 'branches' in sv:
         sv['branches'] = {
             decode_with_escape(k): v
             for k, v in sv['branches'].items()
         }
 
     return sv
 
 
 def from_directory_entry(dir_entry):
     """Convert swh person to serializable person dictionary.
 
     """
     return from_swh(dir_entry,
                     hashess={'dir_id', 'sha1_git', 'sha1', 'sha256',
                              'blake2s256', 'target'},
                     bytess={'name'},
                     removables_if_empty={
                         'sha1', 'sha1_git', 'sha256', 'blake2s256', 'status'},
                     convert={'status'},
                     convert_fn=lambda v: 'absent' if v == 'hidden' else v)
 
 
 def from_filetype(content_entry):
     """Convert swh person to serializable person dictionary.
 
     """
     return from_swh(content_entry,
-                    hashess={'id'},
-                    bytess={'mimetype', 'encoding'})
+                    hashess={'id'})
diff --git a/swh/web/tests/common/test_converters.py b/swh/web/tests/common/test_converters.py
index 25ad6840f..47e061cfe 100644
--- a/swh/web/tests/common/test_converters.py
+++ b/swh/web/tests/common/test_converters.py
@@ -1,785 +1,785 @@
 # Copyright (C) 2015-2018  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import datetime
 
 
 from swh.model import hashutil
 
 from swh.web.common import converters
 from swh.web.tests.testcase import SWHWebTestCase
 
 
 class ConvertersTestCase(SWHWebTestCase):
 
     def test_fmap(self):
         self.assertEqual([2, 3, None, 4],
                          converters.fmap(lambda x: x+1, [1, 2, None, 3]))
         self.assertEqual([11, 12, 13],
                          list(converters.fmap(lambda x: x+10,
                                               map(lambda x: x, [1, 2, 3]))))
         self.assertEqual({'a': 2, 'b': 4},
                          converters.fmap(lambda x: x*2, {'a': 1, 'b': 2}))
         self.assertEqual(100,
                          converters.fmap(lambda x: x*10, 10))
         self.assertEqual({'a': [2, 6], 'b': 4},
                           converters.fmap(lambda x: x*2, {'a': [1, 3], 'b': 2})) # noqa
 
         self.assertIsNone(converters.fmap(lambda x: x, None))
 
     def test_from_swh(self):
         some_input = {
             'a': 'something',
             'b': 'someone',
             'c': b'sharp-0.3.4.tgz',
             'd': hashutil.hash_to_bytes(
                 'b04caf10e9535160d90e874b45aa426de762f19f'),
             'e': b'sharp.html/doc_002dS_005fISREG.html',
             'g': [b'utf-8-to-decode', b'another-one'],
             'h': 'something filtered',
             'i': {'e': b'something'},
             'j': {
                 'k': {
                     'l': [b'bytes thing', b'another thingy', b''],
                     'n': 'dont care either'
                 },
                 'm': 'dont care'
             },
             'o': 'something',
             'p': b'foo',
             'q': {'extra-headers': [['a', b'intact']]},
             'w': None,
             'r': {'p': 'also intact',
                   'q': 'bar'},
             's': {
                 'timestamp': 42,
                 'offset': -420,
                 'negative_utc': None,
             },
             's1': {
                 'timestamp': {'seconds': 42, 'microseconds': 0},
                 'offset': -420,
                 'negative_utc': None,
             },
             's2': datetime.datetime(
                 2013, 7, 1, 20, 0, 0,
                 tzinfo=datetime.timezone.utc),
             't': None,
             'u': None,
             'v': None,
             'x': None,
         }
 
         expected_output = {
             'a': 'something',
             'b': 'someone',
             'c': 'sharp-0.3.4.tgz',
             'd': 'b04caf10e9535160d90e874b45aa426de762f19f',
             'e': 'sharp.html/doc_002dS_005fISREG.html',
             'g': ['utf-8-to-decode', 'another-one'],
             'i': {'e': 'something'},
             'j': {
                 'k': {
                     'l': ['bytes thing', 'another thingy', '']
                 }
             },
             'p': 'foo',
             'q': {'extra-headers': [['a', 'intact']]},
             'w': {},
             'r': {'p': 'also intact',
                   'q': 'bar'},
             's': '1969-12-31T17:00:42-07:00',
             's1': '1969-12-31T17:00:42-07:00',
             's2': '2013-07-01T20:00:00+00:00',
             'u': {},
             'v': [],
             'x': None,
         }
 
         actual_output = converters.from_swh(
             some_input,
             hashess={'d', 'o', 'x'},
             bytess={'c', 'e', 'g', 'l'},
             dates={'s', 's1', 's2'},
             blacklist={'h', 'm', 'n', 'o'},
             removables_if_empty={'t'},
             empty_dict={'u'},
             empty_list={'v'},
             convert={'p', 'q', 'w'},
             convert_fn=converters.convert_revision_metadata)
 
         self.assertEqual(expected_output, actual_output)
 
     def test_from_swh_edge_cases_do_no_conversion_if_none_or_not_bytes(self):
         some_input = {
             'a': 'something',
             'b': None,
             'c': 'someone',
             'd': None,
             'e': None
         }
 
         expected_output = {
             'a': 'something',
             'b': None,
             'c': 'someone',
             'd': None,
             'e': None
         }
 
         actual_output = converters.from_swh(some_input,
                                             hashess={'a', 'b'},
                                             bytess={'c', 'd'},
                                             dates={'e'})
 
         self.assertEqual(expected_output, actual_output)
 
     def test_from_swh_edge_cases_convert_invalid_utf8_bytes(self):
         some_input = {
             'a': 'something',
             'b': 'someone',
             'c': b'a name \xff',
             'd': b'an email \xff',
         }
 
         expected_output = {
             'a': 'something',
             'b': 'someone',
             'c': 'a name \\xff',
             'd': 'an email \\xff',
             'decoding_failures': ['c', 'd']
         }
 
         actual_output = converters.from_swh(some_input,
                                             hashess={'a', 'b'},
                                             bytess={'c', 'd'})
         for v in ['a', 'b', 'c', 'd']:
             self.assertEqual(expected_output[v], actual_output[v])
         self.assertEqual(len(expected_output['decoding_failures']),
                          len(actual_output['decoding_failures']))
         for v in expected_output['decoding_failures']:
             self.assertTrue(v in actual_output['decoding_failures'])
 
     def test_from_swh_empty(self):
         # when
         self.assertEqual({}, converters.from_swh({}))
 
     def test_from_swh_none(self):
         # when
         self.assertIsNone(converters.from_swh(None))
 
     def test_from_provenance(self):
         # given
         input_provenance = {
             'origin': 10,
             'visit': 1,
             'content': hashutil.hash_to_bytes(
                 '321caf10e9535160d90e874b45aa426de762f19f'),
             'revision': hashutil.hash_to_bytes(
                 '123caf10e9535160d90e874b45aa426de762f19f'),
             'path': b'octave-3.4.0/doc/interpreter/octave/doc_002dS_005fISREG'
         }
 
         expected_provenance = {
             'origin': 10,
             'visit': 1,
             'content': '321caf10e9535160d90e874b45aa426de762f19f',
             'revision': '123caf10e9535160d90e874b45aa426de762f19f',
             'path': 'octave-3.4.0/doc/interpreter/octave/doc_002dS_005fISREG'
         }
 
         # when
         actual_provenance = converters.from_provenance(input_provenance)
 
         # then
         self.assertEqual(actual_provenance, expected_provenance)
 
     def test_from_origin(self):
         # given
         origin_input = {
             'id': 9,
             'type': 'ftp',
             'url': 'rsync://ftp.gnu.org/gnu/octave',
         }
 
         expected_origin = {
             'id': 9,
             'type': 'ftp',
             'url': 'rsync://ftp.gnu.org/gnu/octave',
         }
 
         # when
         actual_origin = converters.from_origin(origin_input)
 
         # then
         self.assertEqual(actual_origin, expected_origin)
 
     def test_from_origin_visit(self):
         snap_hash = 'b5f0b7f716735ebffe38505c60145c4fd9da6ca3'
 
         for snap in [snap_hash, None]:
             # given
             visit = {
                 'date': {
                     'timestamp': datetime.datetime(
                         2015, 1, 1, 22, 0, 0,
                         tzinfo=datetime.timezone.utc).timestamp(),
                     'offset': 0,
                     'negative_utc': False,
                 },
                 'origin': 10,
                 'visit': 100,
                 'metadata': None,
                 'status': 'full',
                 'snapshot': hashutil.hash_to_bytes(snap) if snap else snap,
             }
 
             expected_visit = {
                 'date': '2015-01-01T22:00:00+00:00',
                 'origin': 10,
                 'visit': 100,
                 'metadata': {},
                 'status': 'full',
                 'snapshot': snap_hash if snap else snap
             }
 
             # when
             actual_visit = converters.from_origin_visit(visit)
 
             # then
             self.assertEqual(actual_visit, expected_visit)
 
     def test_from_release(self):
         release_input = {
             'id': hashutil.hash_to_bytes(
                 'aad23fa492a0c5fed0708a6703be875448c86884'),
             'target': hashutil.hash_to_bytes(
                 '5e46d564378afc44b31bb89f99d5675195fbdf67'),
             'target_type': 'revision',
             'date': {
                 'timestamp': datetime.datetime(
                     2015, 1, 1, 22, 0, 0,
                     tzinfo=datetime.timezone.utc).timestamp(),
                 'offset': 0,
                 'negative_utc': False,
             },
             'author': {
                 'name': b'author name',
                 'fullname': b'Author Name author@email',
                 'email': b'author@email',
             },
             'name': b'v0.0.1',
             'message': b'some comment on release',
             'synthetic': True,
         }
 
         expected_release = {
             'id': 'aad23fa492a0c5fed0708a6703be875448c86884',
             'target': '5e46d564378afc44b31bb89f99d5675195fbdf67',
             'target_type': 'revision',
             'date': '2015-01-01T22:00:00+00:00',
             'author': {
                 'name': 'author name',
                 'fullname': 'Author Name author@email',
                 'email': 'author@email',
             },
             'name': 'v0.0.1',
             'message': 'some comment on release',
             'target_type': 'revision',
             'synthetic': True,
         }
 
         # when
         actual_release = converters.from_release(release_input)
 
         # then
         self.assertEqual(actual_release, expected_release)
 
     def test_from_release_no_revision(self):
         release_input = {
             'id': hashutil.hash_to_bytes(
                 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e'),
             'target': None,
             'date': {
                 'timestamp': datetime.datetime(
                     2016, 3, 2, 10, 0, 0,
                     tzinfo=datetime.timezone.utc).timestamp(),
                 'offset': 0,
                 'negative_utc': True,
 
             },
             'name': b'v0.1.1',
             'message': b'comment on release',
             'synthetic': False,
             'author': {
                 'name': b'bob',
                 'fullname': b'Bob bob@alice.net',
                 'email': b'bob@alice.net',
             },
         }
 
         expected_release = {
             'id': 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e',
             'target': None,
             'date': '2016-03-02T10:00:00-00:00',
             'name': 'v0.1.1',
             'message': 'comment on release',
             'synthetic': False,
             'author': {
                 'name': 'bob',
                 'fullname': 'Bob bob@alice.net',
                 'email': 'bob@alice.net',
             },
         }
 
         # when
         actual_release = converters.from_release(release_input)
 
         # then
         self.assertEqual(actual_release, expected_release)
 
     def test_from_revision(self):
         revision_input = {
             'id': hashutil.hash_to_bytes(
                 '18d8be353ed3480476f032475e7c233eff7371d5'),
             'directory': hashutil.hash_to_bytes(
                 '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
             'author': {
                 'name': b'Software Heritage',
                 'fullname': b'robot robot@softwareheritage.org',
                 'email': b'robot@softwareheritage.org',
             },
             'committer': {
                 'name': b'Software Heritage',
                 'fullname': b'robot robot@softwareheritage.org',
                 'email': b'robot@softwareheritage.org',
             },
             'message': b'synthetic revision message',
             'date': {
                 'timestamp': datetime.datetime(
                     2000, 1, 17, 11, 23, 54,
                     tzinfo=datetime.timezone.utc).timestamp(),
                 'offset': 0,
                 'negative_utc': False,
             },
             'committer_date': {
                 'timestamp': datetime.datetime(
                     2000, 1, 17, 11, 23, 54,
                     tzinfo=datetime.timezone.utc).timestamp(),
                 'offset': 0,
                 'negative_utc': False,
             },
             'synthetic': True,
             'type': 'tar',
             'parents': [
                 hashutil.hash_to_bytes(
                     '29d8be353ed3480476f032475e7c244eff7371d5'),
                 hashutil.hash_to_bytes(
                     '30d8be353ed3480476f032475e7c244eff7371d5')
             ],
             'children': [
                 hashutil.hash_to_bytes(
                     '123546353ed3480476f032475e7c244eff7371d5'),
             ],
             'metadata': {
                 'extra_headers': [['gpgsig', b'some-signature']],
                 'original_artifact': [{
                     'archive_type': 'tar',
                     'name': 'webbase-5.7.0.tar.gz',
                     'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
                     'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
                     'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
                     '309d36484e7edf7bb912',
 
                 }]
             },
         }
 
         expected_revision = {
             'id': '18d8be353ed3480476f032475e7c233eff7371d5',
             'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
             'author': {
                 'name': 'Software Heritage',
                 'fullname': 'robot robot@softwareheritage.org',
                 'email': 'robot@softwareheritage.org',
             },
             'committer': {
                 'name': 'Software Heritage',
                 'fullname': 'robot robot@softwareheritage.org',
                 'email': 'robot@softwareheritage.org',
             },
             'message': 'synthetic revision message',
             'date': "2000-01-17T11:23:54+00:00",
             'committer_date': "2000-01-17T11:23:54+00:00",
             'children': [
                 '123546353ed3480476f032475e7c244eff7371d5'
             ],
             'parents': [
                 '29d8be353ed3480476f032475e7c244eff7371d5',
                 '30d8be353ed3480476f032475e7c244eff7371d5'
             ],
             'type': 'tar',
             'synthetic': True,
             'metadata': {
                 'extra_headers': [['gpgsig', 'some-signature']],
                 'original_artifact': [{
                     'archive_type': 'tar',
                     'name': 'webbase-5.7.0.tar.gz',
                     'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
                     'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
                     'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
                     '309d36484e7edf7bb912'
                 }]
             },
             'merge': True
         }
 
         # when
         actual_revision = converters.from_revision(revision_input)
 
         # then
         self.assertEqual(actual_revision, expected_revision)
 
     def test_from_revision_nomerge(self):
         revision_input = {
             'id': hashutil.hash_to_bytes(
                 '18d8be353ed3480476f032475e7c233eff7371d5'),
             'parents': [
                 hashutil.hash_to_bytes(
                     '29d8be353ed3480476f032475e7c244eff7371d5')
             ]
         }
 
         expected_revision = {
             'id': '18d8be353ed3480476f032475e7c233eff7371d5',
             'parents': [
                 '29d8be353ed3480476f032475e7c244eff7371d5'
             ],
             'merge': False
         }
 
         # when
         actual_revision = converters.from_revision(revision_input)
 
         # then
         self.assertEqual(actual_revision, expected_revision)
 
     def test_from_revision_noparents(self):
         revision_input = {
             'id': hashutil.hash_to_bytes(
                 '18d8be353ed3480476f032475e7c233eff7371d5'),
             'directory': hashutil.hash_to_bytes(
                 '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
             'author': {
                 'name': b'Software Heritage',
                 'fullname': b'robot robot@softwareheritage.org',
                 'email': b'robot@softwareheritage.org',
             },
             'committer': {
                 'name': b'Software Heritage',
                 'fullname': b'robot robot@softwareheritage.org',
                 'email': b'robot@softwareheritage.org',
             },
             'message': b'synthetic revision message',
             'date': {
                 'timestamp': datetime.datetime(
                     2000, 1, 17, 11, 23, 54,
                     tzinfo=datetime.timezone.utc).timestamp(),
                 'offset': 0,
                 'negative_utc': False,
             },
             'committer_date': {
                 'timestamp': datetime.datetime(
                     2000, 1, 17, 11, 23, 54,
                     tzinfo=datetime.timezone.utc).timestamp(),
                 'offset': 0,
                 'negative_utc': False,
             },
             'synthetic': True,
             'type': 'tar',
             'children': [
                 hashutil.hash_to_bytes(
                     '123546353ed3480476f032475e7c244eff7371d5'),
             ],
             'metadata': {
                 'original_artifact': [{
                     'archive_type': 'tar',
                     'name': 'webbase-5.7.0.tar.gz',
                     'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
                     'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
                     'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
                     '309d36484e7edf7bb912',
 
                 }]
             },
         }
 
         expected_revision = {
             'id': '18d8be353ed3480476f032475e7c233eff7371d5',
             'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
             'author': {
                 'name': 'Software Heritage',
                 'fullname': 'robot robot@softwareheritage.org',
                 'email': 'robot@softwareheritage.org',
             },
             'committer': {
                 'name': 'Software Heritage',
                 'fullname': 'robot robot@softwareheritage.org',
                 'email': 'robot@softwareheritage.org',
             },
             'message': 'synthetic revision message',
             'date': "2000-01-17T11:23:54+00:00",
             'committer_date': "2000-01-17T11:23:54+00:00",
             'children': [
                 '123546353ed3480476f032475e7c244eff7371d5'
             ],
             'type': 'tar',
             'synthetic': True,
             'metadata': {
                 'original_artifact': [{
                     'archive_type': 'tar',
                     'name': 'webbase-5.7.0.tar.gz',
                     'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
                     'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
                     'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
                     '309d36484e7edf7bb912'
                 }]
             }
         }
 
         # when
         actual_revision = converters.from_revision(revision_input)
 
         # then
         self.assertEqual(actual_revision, expected_revision)
 
     def test_from_revision_invalid(self):
         revision_input = {
             'id': hashutil.hash_to_bytes(
                 '18d8be353ed3480476f032475e7c233eff7371d5'),
             'directory': hashutil.hash_to_bytes(
                 '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
             'author': {
                 'name': b'Software Heritage',
                 'fullname': b'robot robot@softwareheritage.org',
                 'email': b'robot@softwareheritage.org',
             },
             'committer': {
                 'name': b'Software Heritage',
                 'fullname': b'robot robot@softwareheritage.org',
                 'email': b'robot@softwareheritage.org',
             },
             'message': b'invalid message \xff',
             'date': {
                 'timestamp': datetime.datetime(
                     2000, 1, 17, 11, 23, 54,
                     tzinfo=datetime.timezone.utc).timestamp(),
                 'offset': 0,
                 'negative_utc': False,
             },
             'committer_date': {
                 'timestamp': datetime.datetime(
                     2000, 1, 17, 11, 23, 54,
                     tzinfo=datetime.timezone.utc).timestamp(),
                 'offset': 0,
                 'negative_utc': False,
             },
             'synthetic': True,
             'type': 'tar',
             'parents': [
                 hashutil.hash_to_bytes(
                     '29d8be353ed3480476f032475e7c244eff7371d5'),
                 hashutil.hash_to_bytes(
                     '30d8be353ed3480476f032475e7c244eff7371d5')
             ],
             'children': [
                 hashutil.hash_to_bytes(
                     '123546353ed3480476f032475e7c244eff7371d5'),
             ],
             'metadata': {
                 'original_artifact': [{
                     'archive_type': 'tar',
                     'name': 'webbase-5.7.0.tar.gz',
                     'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
                     'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
                     'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
                     '309d36484e7edf7bb912',
 
                 }]
             },
         }
 
         expected_revision = {
             'id': '18d8be353ed3480476f032475e7c233eff7371d5',
             'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
             'author': {
                 'name': 'Software Heritage',
                 'fullname': 'robot robot@softwareheritage.org',
                 'email': 'robot@softwareheritage.org',
             },
             'committer': {
                 'name': 'Software Heritage',
                 'fullname': 'robot robot@softwareheritage.org',
                 'email': 'robot@softwareheritage.org',
             },
             'message': None,
             'message_decoding_failed': True,
             'date': "2000-01-17T11:23:54+00:00",
             'committer_date': "2000-01-17T11:23:54+00:00",
             'children': [
                 '123546353ed3480476f032475e7c244eff7371d5'
             ],
             'parents': [
                 '29d8be353ed3480476f032475e7c244eff7371d5',
                 '30d8be353ed3480476f032475e7c244eff7371d5'
             ],
             'type': 'tar',
             'synthetic': True,
             'metadata': {
                 'original_artifact': [{
                     'archive_type': 'tar',
                     'name': 'webbase-5.7.0.tar.gz',
                     'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
                     'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
                     'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
                     '309d36484e7edf7bb912'
                 }]
             },
             'merge': True
         }
 
         # when
         actual_revision = converters.from_revision(revision_input)
 
         # then
         self.assertEqual(actual_revision, expected_revision)
 
     def test_from_content_none(self):
         self.assertIsNone(converters.from_content(None))
 
     def test_from_content(self):
         content_input = {
             'sha1': hashutil.hash_to_bytes(
                 '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
             'sha256': hashutil.hash_to_bytes(
                 '39007420ca5de7cb3cfc15196335507e'
                 'e76c98930e7e0afa4d2747d3bf96c926'),
             'blake2s256': hashutil.hash_to_bytes(
                 '49007420ca5de7cb3cfc15196335507e'
                 'e76c98930e7e0afa4d2747d3bf96c926'),
             'sha1_git': hashutil.hash_to_bytes(
                 '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
             'ctime': 'something-which-is-filtered-out',
             'data': b'data in bytes',
             'length': 10,
             'status': 'hidden',
         }
 
         # 'status' is filtered
         expected_content = {
             'checksums': {
                 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
                 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98'
                           '930e7e0afa4d2747d3bf96c926',
                 'blake2s256': '49007420ca5de7cb3cfc15196335507ee7'
                               '6c98930e7e0afa4d2747d3bf96c926',
                 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
             },
             'data': b'data in bytes',
             'length': 10,
             'status': 'absent',
         }
 
         # when
         actual_content = converters.from_content(content_input)
 
         # then
         self.assertEqual(actual_content, expected_content)
 
     def test_from_person(self):
         person_input = {
             'id': 10,
             'anything': 'else',
             'name': b'bob',
             'fullname': b'bob bob@alice.net',
             'email': b'bob@foo.alice',
         }
 
         expected_person = {
             'id': 10,
             'anything': 'else',
             'name': 'bob',
             'fullname': 'bob bob@alice.net',
             'email': 'bob@foo.alice',
         }
 
         # when
         actual_person = converters.from_person(person_input)
 
         # then
         self.assertEqual(actual_person, expected_person)
 
     def test_from_directory_entries(self):
         dir_entries_input = {
             'sha1': hashutil.hash_to_bytes(
                 '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
             'sha256': hashutil.hash_to_bytes(
                 '39007420ca5de7cb3cfc15196335507e'
                 'e76c98930e7e0afa4d2747d3bf96c926'),
             'sha1_git': hashutil.hash_to_bytes(
                 '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
             'blake2s256': hashutil.hash_to_bytes(
                 '685395c5dc57cada459364f0946d3dd45bad5fcbab'
                 'c1048edb44380f1d31d0aa'),
             'target': hashutil.hash_to_bytes(
                 '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
             'dir_id': hashutil.hash_to_bytes(
                 '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
             'name': b'bob',
             'type': 10,
             'status': 'hidden',
         }
 
         expected_dir_entries = {
             'checksums': {
                 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
                 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98'
                           '930e7e0afa4d2747d3bf96c926',
                 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
                 'blake2s256': '685395c5dc57cada459364f0946d3dd45bad5f'
                               'cbabc1048edb44380f1d31d0aa',
             },
             'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
             'dir_id': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
             'name': 'bob',
             'type': 10,
             'status': 'absent',
         }
 
         # when
         actual_dir_entries = converters.from_directory_entry(dir_entries_input)
 
         # then
         self.assertEqual(actual_dir_entries, expected_dir_entries)
 
     def test_from_filetype(self):
         content_filetype = {
             'id': hashutil.hash_to_bytes(
                 '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
-            'encoding': b'utf-8',
-            'mimetype': b'text/plain',
+            'encoding': 'utf-8',
+            'mimetype': 'text/plain',
         }
 
         expected_content_filetype = {
             'id': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
             'encoding': 'utf-8',
             'mimetype': 'text/plain',
         }
 
         # when
         actual_content_filetype = converters.from_filetype(content_filetype)
 
         # then
         self.assertEqual(actual_content_filetype, expected_content_filetype)
diff --git a/swh/web/tests/common/test_service.py b/swh/web/tests/common/test_service.py
index b7cca942f..a9309d070 100644
--- a/swh/web/tests/common/test_service.py
+++ b/swh/web/tests/common/test_service.py
@@ -1,2011 +1,2011 @@
 # Copyright (C) 2015-2018  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import datetime
 
 from unittest.mock import MagicMock, patch, call
 
 from swh.model.hashutil import hash_to_bytes, hash_to_hex
 
 from swh.web.common import service
 from swh.web.common.exc import BadInputExc, NotFoundExc
 from swh.web.tests.testcase import SWHWebTestCase
 
 
 class ServiceTestCase(SWHWebTestCase):
 
     def setUp(self):
         self.BLAKE2S256_SAMPLE = ('685395c5dc57cada459364f0946d3dd45b'
                                   'ad5fcbabc1048edb44380f1d31d0aa')
         self.BLAKE2S256_SAMPLE_BIN = hash_to_bytes(self.BLAKE2S256_SAMPLE)
         self.SHA1_SAMPLE = '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'
         self.SHA1_SAMPLE_BIN = hash_to_bytes(self.SHA1_SAMPLE)
         self.SHA256_SAMPLE = ('8abb0aa566452620ecce816eecdef4792d77a'
                               '293ad8ea82a4d5ecb4d36f7e560')
         self.SHA256_SAMPLE_BIN = hash_to_bytes(self.SHA256_SAMPLE)
         self.SHA1GIT_SAMPLE = '25d1a2e8f32937b0f498a5ca87f823d8df013c01'
         self.SHA1GIT_SAMPLE_BIN = hash_to_bytes(self.SHA1GIT_SAMPLE)
         self.DIRECTORY_ID = '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'
         self.DIRECTORY_ID_BIN = hash_to_bytes(self.DIRECTORY_ID)
         self.AUTHOR_ID_BIN = {
             'name': b'author',
             'email': b'author@company.org',
         }
         self.AUTHOR_ID = {
             'name': 'author',
             'email': 'author@company.org',
         }
         self.COMMITTER_ID_BIN = {
             'name': b'committer',
             'email': b'committer@corp.org',
         }
         self.COMMITTER_ID = {
             'name': 'committer',
             'email': 'committer@corp.org',
         }
         self.SAMPLE_DATE_RAW = {
             'timestamp': datetime.datetime(
                 2000, 1, 17, 11, 23, 54,
                 tzinfo=datetime.timezone.utc,
             ).timestamp(),
             'offset': 0,
             'negative_utc': False,
         }
         self.SAMPLE_DATE = '2000-01-17T11:23:54+00:00'
         self.SAMPLE_MESSAGE_BIN = b'elegant fix for bug 31415957'
         self.SAMPLE_MESSAGE = 'elegant fix for bug 31415957'
 
         self.SAMPLE_REVISION = {
             'id': self.SHA1_SAMPLE,
             'directory': self.DIRECTORY_ID,
             'author': self.AUTHOR_ID,
             'committer': self.COMMITTER_ID,
             'message': self.SAMPLE_MESSAGE,
             'date': self.SAMPLE_DATE,
             'committer_date': self.SAMPLE_DATE,
             'synthetic': False,
             'type': 'git',
             'parents': [],
             'metadata': {},
             'merge': False
         }
         self.SAMPLE_REVISION_RAW = {
             'id': self.SHA1_SAMPLE_BIN,
             'directory': self.DIRECTORY_ID_BIN,
             'author': self.AUTHOR_ID_BIN,
             'committer': self.COMMITTER_ID_BIN,
             'message': self.SAMPLE_MESSAGE_BIN,
             'date': self.SAMPLE_DATE_RAW,
             'committer_date': self.SAMPLE_DATE_RAW,
             'synthetic': False,
             'type': 'git',
             'parents': [],
             'metadata': [],
         }
 
         self.SAMPLE_CONTENT = {
             'checksums': {
                 'blake2s256': self.BLAKE2S256_SAMPLE,
                 'sha1': self.SHA1_SAMPLE,
                 'sha256': self.SHA256_SAMPLE,
                 'sha1_git': self.SHA1GIT_SAMPLE,
             },
             'length': 190,
             'status': 'absent'
         }
         self.SAMPLE_CONTENT_RAW = {
             'blake2s256': self.BLAKE2S256_SAMPLE_BIN,
             'sha1': self.SHA1_SAMPLE_BIN,
             'sha256': self.SHA256_SAMPLE_BIN,
             'sha1_git': self.SHA1GIT_SAMPLE_BIN,
             'length': 190,
             'status': 'hidden'
         }
 
         self.date_origin_visit1 = datetime.datetime(
             2015, 1, 1, 22, 0, 0,
             tzinfo=datetime.timezone.utc)
 
         self.origin_visit1 = {
             'date': self.date_origin_visit1,
             'origin': 1,
             'visit': 1
         }
 
     @patch('swh.web.common.service.storage')
     def test_lookup_multiple_hashes_ball_missing(self, mock_storage):
         # given
         mock_storage.content_missing_per_sha1 = MagicMock(return_value=[])
 
         # when
         actual_lookup = service.lookup_multiple_hashes(
             [{'filename': 'a',
               'sha1': '456caf10e9535160d90e874b45aa426de762f19f'},
              {'filename': 'b',
               'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865'}])
 
         # then
         self.assertEqual(actual_lookup, [
             {'filename': 'a',
              'sha1': '456caf10e9535160d90e874b45aa426de762f19f',
              'found': True},
             {'filename': 'b',
              'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865',
              'found': True}
         ])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_multiple_hashes_some_missing(self, mock_storage):
         # given
         mock_storage.content_missing_per_sha1 = MagicMock(return_value=[
             hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f')
         ])
 
         # when
         actual_lookup = service.lookup_multiple_hashes(
             [{'filename': 'a',
               'sha1': '456caf10e9535160d90e874b45aa426de762f19f'},
              {'filename': 'b',
               'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865'}])
 
         # then
         self.assertEqual(actual_lookup, [
             {'filename': 'a',
              'sha1': '456caf10e9535160d90e874b45aa426de762f19f',
              'found': False},
             {'filename': 'b',
              'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865',
              'found': True}
         ])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_hash_does_not_exist(self, mock_storage):
         # given
         mock_storage.content_find = MagicMock(return_value=None)
 
         # when
         actual_lookup = service.lookup_hash(
             'sha1_git:123caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual({'found': None,
                           'algo': 'sha1_git'}, actual_lookup)
 
         # check the function has been called with parameters
         mock_storage.content_find.assert_called_with(
             {'sha1_git':
              hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')})
 
     @patch('swh.web.common.service.storage')
     def test_lookup_hash_exist(self, mock_storage):
         # given
         stub_content = {
                 'sha1': hash_to_bytes(
                     '456caf10e9535160d90e874b45aa426de762f19f')
             }
         mock_storage.content_find = MagicMock(return_value=stub_content)
 
         # when
         actual_lookup = service.lookup_hash(
             'sha1:456caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual({'found': stub_content,
                           'algo': 'sha1'}, actual_lookup)
 
         mock_storage.content_find.assert_called_with(
             {'sha1':
              hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f')}
         )
 
     @patch('swh.web.common.service.storage')
     def test_search_hash_does_not_exist(self, mock_storage):
         # given
         mock_storage.content_find = MagicMock(return_value=None)
 
         # when
         actual_lookup = service.search_hash(
             'sha1_git:123caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual({'found': False}, actual_lookup)
 
         # check the function has been called with parameters
         mock_storage.content_find.assert_called_with(
             {'sha1_git':
              hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')})
 
     @patch('swh.web.common.service.storage')
     def test_search_hash_exist(self, mock_storage):
         # given
         stub_content = {
                 'sha1': hash_to_bytes(
                     '456caf10e9535160d90e874b45aa426de762f19f')
             }
         mock_storage.content_find = MagicMock(return_value=stub_content)
 
         # when
         actual_lookup = service.search_hash(
             'sha1:456caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual({'found': True}, actual_lookup)
 
         mock_storage.content_find.assert_called_with(
             {'sha1':
              hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f')},
         )
 
     @patch('swh.web.common.service.idx_storage')
     def test_lookup_content_ctags(self, mock_idx_storage):
         # given
         mock_idx_storage.content_ctags_get = MagicMock(
             return_value=[{
                 'id': hash_to_bytes(
                     '123caf10e9535160d90e874b45aa426de762f19f'),
                 'line': 100,
                 'name': 'hello',
                 'kind': 'function',
                 'tool_name': 'ctags',
                 'tool_version': 'some-version',
             }])
         expected_ctags = [{
             'id': '123caf10e9535160d90e874b45aa426de762f19f',
             'line': 100,
             'name': 'hello',
             'kind': 'function',
             'tool_name': 'ctags',
             'tool_version': 'some-version',
         }]
 
         # when
         actual_ctags = list(service.lookup_content_ctags(
             'sha1:123caf10e9535160d90e874b45aa426de762f19f'))
 
         # then
         self.assertEqual(actual_ctags, expected_ctags)
 
         mock_idx_storage.content_ctags_get.assert_called_with(
             [hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
 
     @patch('swh.web.common.service.idx_storage')
     def test_lookup_content_ctags_no_hash(self, mock_idx_storage):
         # given
         mock_idx_storage.content_ctags_get = MagicMock(return_value=[])
 
         # when
         actual_ctags = list(service.lookup_content_ctags(
             'sha1:123caf10e9535160d90e874b45aa426de762f19f'))
 
         # then
         self.assertEqual(actual_ctags, [])
 
     @patch('swh.web.common.service.idx_storage')
     def test_lookup_content_filetype(self, mock_idx_storage):
         # given
         mock_idx_storage.content_mimetype_get = MagicMock(
             return_value=[{
                 'id': hash_to_bytes(
                     '123caf10e9535160d90e874b45aa426de762f19f'),
-                'mimetype': b'text/x-c++',
-                'encoding': b'us-ascii',
+                'mimetype': 'text/x-c++',
+                'encoding': 'us-ascii',
             }])
         expected_filetype = {
                 'id': '123caf10e9535160d90e874b45aa426de762f19f',
                 'mimetype': 'text/x-c++',
                 'encoding': 'us-ascii',
         }
 
         # when
         actual_filetype = service.lookup_content_filetype(
             'sha1:123caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual(actual_filetype, expected_filetype)
 
         mock_idx_storage.content_mimetype_get.assert_called_with(
             [hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
 
     @patch('swh.web.common.service.idx_storage')
     @patch('swh.web.common.service.storage')
     def test_lookup_content_filetype_2(self, mock_storage, mock_idx_storage):
         # given
         mock_storage.content_find = MagicMock(
             return_value={
                 'sha1': hash_to_bytes(
                     '123caf10e9535160d90e874b45aa426de762f19f')
             }
         )
         mock_idx_storage.content_mimetype_get = MagicMock(
             return_value=[{
                 'id': hash_to_bytes(
                     '123caf10e9535160d90e874b45aa426de762f19f'),
-                'mimetype': b'text/x-python',
-                'encoding': b'us-ascii',
+                'mimetype': 'text/x-python',
+                'encoding': 'us-ascii',
             }]
         )
         expected_filetype = {
                 'id': '123caf10e9535160d90e874b45aa426de762f19f',
                 'mimetype': 'text/x-python',
                 'encoding': 'us-ascii',
         }
 
         # when
         actual_filetype = service.lookup_content_filetype(
             'sha1_git:456caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual(actual_filetype, expected_filetype)
 
         mock_storage.content_find(
             'sha1_git', hash_to_bytes(
                 '456caf10e9535160d90e874b45aa426de762f19f')
         )
         mock_idx_storage.content_mimetype_get.assert_called_with(
             [hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
 
     @patch('swh.web.common.service.idx_storage')
     def test_lookup_content_language(self, mock_idx_storage):
         # given
         mock_idx_storage.content_language_get = MagicMock(
             return_value=[{
                 'id': hash_to_bytes(
                     '123caf10e9535160d90e874b45aa426de762f19f'),
                 'lang': 'python',
             }])
         expected_language = {
                 'id': '123caf10e9535160d90e874b45aa426de762f19f',
                 'lang': 'python',
         }
 
         # when
         actual_language = service.lookup_content_language(
             'sha1:123caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual(actual_language, expected_language)
 
         mock_idx_storage.content_language_get.assert_called_with(
             [hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
 
     @patch('swh.web.common.service.idx_storage')
     @patch('swh.web.common.service.storage')
     def test_lookup_content_language_2(self, mock_storage, mock_idx_storage):
         # given
         mock_storage.content_find = MagicMock(
             return_value={
                 'sha1': hash_to_bytes(
                     '123caf10e9535160d90e874b45aa426de762f19f')
             }
         )
         mock_idx_storage.content_language_get = MagicMock(
             return_value=[{
                 'id': hash_to_bytes(
                     '123caf10e9535160d90e874b45aa426de762f19f'),
                 'lang': 'haskell',
             }]
         )
         expected_language = {
                 'id': '123caf10e9535160d90e874b45aa426de762f19f',
                 'lang': 'haskell',
         }
 
         # when
         actual_language = service.lookup_content_language(
             'sha1_git:456caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual(actual_language, expected_language)
 
         mock_storage.content_find(
             'sha1_git', hash_to_bytes(
                 '456caf10e9535160d90e874b45aa426de762f19f')
         )
         mock_idx_storage.content_language_get.assert_called_with(
             [hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
 
     @patch('swh.web.common.service.idx_storage')
     def test_lookup_expression(self, mock_idx_storage):
         # given
         mock_idx_storage.content_ctags_search = MagicMock(
             return_value=[{
                 'id': hash_to_bytes(
                     '123caf10e9535160d90e874b45aa426de762f19f'),
                 'name': 'foobar',
                 'kind': 'variable',
                 'lang': 'C',
                 'line': 10
             }])
         expected_ctags = [{
             'sha1': '123caf10e9535160d90e874b45aa426de762f19f',
             'name': 'foobar',
             'kind': 'variable',
             'lang': 'C',
             'line': 10
         }]
 
         # when
         actual_ctags = list(service.lookup_expression(
             'foobar', last_sha1='hash', per_page=10))
 
         # then
         self.assertEqual(actual_ctags, expected_ctags)
 
         mock_idx_storage.content_ctags_search.assert_called_with(
             'foobar', last_sha1='hash', limit=10)
 
     @patch('swh.web.common.service.idx_storage')
     def test_lookup_expression_no_result(self, mock_idx_storage):
         # given
         mock_idx_storage.content_ctags_search = MagicMock(
             return_value=[])
         expected_ctags = []
 
         # when
         actual_ctags = list(service.lookup_expression(
             'barfoo', last_sha1='hash', per_page=10))
 
         # then
         self.assertEqual(actual_ctags, expected_ctags)
 
         mock_idx_storage.content_ctags_search.assert_called_with(
             'barfoo', last_sha1='hash', limit=10)
 
     @patch('swh.web.common.service.idx_storage')
     def test_lookup_content_license(self, mock_idx_storage):
         # given
         mock_idx_storage.content_fossology_license_get = MagicMock(
             return_value=[{
                 hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f'): [{
                     'licenses': ['GPL-3.0+'],
                     'tool': {}
                 }]
             }])
         expected_license = {
                 'id': '123caf10e9535160d90e874b45aa426de762f19f',
                 'facts': [{
                     'licenses': ['GPL-3.0+'],
                     'tool': {}
                 }]
         }
 
         # when
         actual_license = service.lookup_content_license(
             'sha1:123caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual(actual_license, expected_license)
 
         mock_idx_storage.content_fossology_license_get.assert_called_with(
             [hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
 
     @patch('swh.web.common.service.idx_storage')
     @patch('swh.web.common.service.storage')
     def test_lookup_content_license_2(self, mock_storage, mock_idx_storage):
         # given
         mock_storage.content_find = MagicMock(
             return_value={
                 'sha1': hash_to_bytes(
                     '123caf10e9535160d90e874b45aa426de762f19f')
             }
         )
         mock_idx_storage.content_fossology_license_get = MagicMock(
             return_value=[{
                 hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f'): [{
                     'licenses': ['BSD-2-Clause'],
                     'tool': {}
                 }]
 
             }]
         )
         expected_license = {
                 'id': '123caf10e9535160d90e874b45aa426de762f19f',
                 'facts': [{
                     'licenses': ['BSD-2-Clause'],
                     'tool': {}
                 }]
         }
 
         # when
         actual_license = service.lookup_content_license(
             'sha1_git:456caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual(actual_license, expected_license)
 
         mock_storage.content_find(
             'sha1_git', hash_to_bytes(
                 '456caf10e9535160d90e874b45aa426de762f19f')
         )
         mock_idx_storage.content_fossology_license_get.assert_called_with(
             [hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_content_provenance(self, mock_storage):
         # given
         mock_storage.content_find_provenance = MagicMock(
             return_value=(p for p in [{
                 'content': hash_to_bytes(
                     '123caf10e9535160d90e874b45aa426de762f19f'),
                 'revision': hash_to_bytes(
                     '456caf10e9535160d90e874b45aa426de762f19f'),
                 'origin': 100,
                 'visit': 1,
                 'path': b'octavio-3.4.0/octave.html/doc_002dS_005fISREG.html'
             }]))
         expected_provenances = [{
             'content': '123caf10e9535160d90e874b45aa426de762f19f',
             'revision': '456caf10e9535160d90e874b45aa426de762f19f',
             'origin': 100,
             'visit': 1,
             'path': 'octavio-3.4.0/octave.html/doc_002dS_005fISREG.html'
         }]
 
         # when
         actual_provenances = service.lookup_content_provenance(
             'sha1_git:123caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertEqual(list(actual_provenances), expected_provenances)
 
         mock_storage.content_find_provenance.assert_called_with(
             {'sha1_git':
              hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')})
 
     @patch('swh.web.common.service.storage')
     def test_lookup_content_provenance_not_found(self, mock_storage):
         # given
         mock_storage.content_find_provenance = MagicMock(return_value=None)
 
         # when
         actual_provenances = service.lookup_content_provenance(
             'sha1_git:456caf10e9535160d90e874b45aa426de762f19f')
 
         # then
         self.assertIsNone(actual_provenances)
 
         mock_storage.content_find_provenance.assert_called_with(
             {'sha1_git':
              hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f')})
 
     @patch('swh.web.common.service.storage')
     def test_stat_counters(self, mock_storage):
         # given
         input_stats = {
             "content": 1770830,
             "directory": 211683,
             "directory_entry_dir": 209167,
             "directory_entry_file": 1807094,
             "directory_entry_rev": 0,
             "entity": 0,
             "entity_history": 0,
             "origin": 1096,
             "person": 0,
             "release": 8584,
             "revision": 7792,
             "revision_history": 0,
             "skipped_content": 0
         }
         mock_storage.stat_counters = MagicMock(return_value=input_stats)
 
         # when
         actual_stats = service.stat_counters()
 
         # then
         expected_stats = input_stats
         self.assertEqual(actual_stats, expected_stats)
 
         mock_storage.stat_counters.assert_called_with()
 
     @patch('swh.web.common.service._lookup_origin_visits')
     def test_lookup_origin_visits(self, mock_lookup_visits):
         # given
         date_origin_visit2 = datetime.datetime(
             2013, 7, 1, 20, 0, 0,
             tzinfo=datetime.timezone.utc)
 
         date_origin_visit3 = datetime.datetime(
             2015, 1, 1, 21, 0, 0,
             tzinfo=datetime.timezone.utc)
         stub_result = [self.origin_visit1, {
             'date': date_origin_visit2,
             'origin': 1,
             'visit': 2,
             'target': hash_to_bytes(
                 '65a55bbdf3629f916219feb3dcc7393ded1bc8db'),
             'branch': b'master',
             'target_type': 'release',
             'metadata': None,
         }, {
             'date': date_origin_visit3,
             'origin': 1,
             'visit': 3
         }]
         mock_lookup_visits.return_value = stub_result
 
         # when
         expected_origin_visits = [{
             'date': self.origin_visit1['date'].isoformat(),
             'origin': self.origin_visit1['origin'],
             'visit': self.origin_visit1['visit']
         }, {
             'date': date_origin_visit2.isoformat(),
             'origin': 1,
             'visit': 2,
             'target': '65a55bbdf3629f916219feb3dcc7393ded1bc8db',
             'branch': 'master',
             'target_type': 'release',
             'metadata': {},
         }, {
             'date': date_origin_visit3.isoformat(),
             'origin': 1,
             'visit': 3
         }]
 
         actual_origin_visits = service.lookup_origin_visits(6)
 
         # then
         self.assertEqual(list(actual_origin_visits), expected_origin_visits)
 
         mock_lookup_visits.assert_called_once_with(
             6, last_visit=None, limit=10)
 
     @patch('swh.web.common.service.storage')
     def test_lookup_origin_visit(self, mock_storage):
         # given
         stub_result = self.origin_visit1
         mock_storage.origin_visit_get_by.return_value = stub_result
 
         expected_origin_visit = {
             'date': self.origin_visit1['date'].isoformat(),
             'origin': self.origin_visit1['origin'],
             'visit': self.origin_visit1['visit']
         }
 
         # when
         actual_origin_visit = service.lookup_origin_visit(1, 1)
 
         # then
         self.assertEqual(actual_origin_visit, expected_origin_visit)
 
         mock_storage.origin_visit_get_by.assert_called_once_with(1, 1)
 
     @patch('swh.web.common.service.storage')
     def test_lookup_origin(self, mock_storage):
         # given
         mock_storage.origin_get = MagicMock(return_value={
             'id': 'origin-id',
             'url': 'ftp://some/url/to/origin',
             'type': 'ftp'})
 
         # when
         actual_origin = service.lookup_origin({'id': 'origin-id'})
 
         # then
         self.assertEqual(actual_origin, {'id': 'origin-id',
                                          'url': 'ftp://some/url/to/origin',
                                          'type': 'ftp'})
 
         mock_storage.origin_get.assert_called_with({'id': 'origin-id'})
 
     @patch('swh.web.common.service.storage')
     def test_lookup_release_ko_id_checksum_not_a_sha1(self, mock_storage):
         # given
         mock_storage.release_get = MagicMock()
 
         with self.assertRaises(BadInputExc) as cm:
             # when
             service.lookup_release('not-a-sha1')
         self.assertIn('invalid checksum', cm.exception.args[0].lower())
 
         mock_storage.release_get.called = False
 
     @patch('swh.web.common.service.storage')
     def test_lookup_release_ko_id_checksum_too_long(self, mock_storage):
         # given
         mock_storage.release_get = MagicMock()
 
         # when
         with self.assertRaises(BadInputExc) as cm:
             service.lookup_release(
                 '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f62d4daf5'
                 '1aea892abe')
         self.assertEqual('Only sha1_git is supported.', cm.exception.args[0])
 
         mock_storage.release_get.called = False
 
     @patch('swh.web.common.service.storage')
     def test_lookup_directory_with_path_not_found(self, mock_storage):
         # given
         mock_storage.lookup_directory_with_path = MagicMock(return_value=None)
 
         sha1_git = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
 
         # when
         actual_directory = mock_storage.lookup_directory_with_path(
             sha1_git, 'some/path/here')
 
         self.assertIsNone(actual_directory)
 
     @patch('swh.web.common.service.storage')
     def test_lookup_directory_with_path_found(self, mock_storage):
         # given
         sha1_git = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
         entry = {'id': 'dir-id',
                  'type': 'dir',
                  'name': 'some/path/foo'}
 
         mock_storage.lookup_directory_with_path = MagicMock(return_value=entry)
 
         # when
         actual_directory = mock_storage.lookup_directory_with_path(
             sha1_git, 'some/path/here')
 
         self.assertEqual(entry, actual_directory)
 
     @patch('swh.web.common.service.storage')
     def test_lookup_release(self, mock_storage):
         # given
         mock_storage.release_get = MagicMock(return_value=[{
             'id': hash_to_bytes('65a55bbdf3629f916219feb3dcc7393ded1bc8db'),
             'target': None,
             'date': {
                 'timestamp': datetime.datetime(
                     2015, 1, 1, 22, 0, 0,
                     tzinfo=datetime.timezone.utc).timestamp(),
                 'offset': 0,
                 'negative_utc': True,
             },
             'name': b'v0.0.1',
             'message': b'synthetic release',
             'synthetic': True,
         }])
 
         # when
         actual_release = service.lookup_release(
             '65a55bbdf3629f916219feb3dcc7393ded1bc8db')
 
         # then
         self.assertEqual(actual_release, {
             'id': '65a55bbdf3629f916219feb3dcc7393ded1bc8db',
             'target': None,
             'date': '2015-01-01T22:00:00-00:00',
             'name': 'v0.0.1',
             'message': 'synthetic release',
             'synthetic': True,
         })
 
         mock_storage.release_get.assert_called_with(
             [hash_to_bytes('65a55bbdf3629f916219feb3dcc7393ded1bc8db')])
 
     def test_lookup_revision_with_context_ko_not_a_sha1_1(self):
         # given
         sha1_git = '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f62d4' \
                    'daf51aea892abe'
         sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
 
         # when
         with self.assertRaises(BadInputExc) as cm:
             service.lookup_revision_with_context(sha1_git_root, sha1_git)
         self.assertIn('Only sha1_git is supported', cm.exception.args[0])
 
     def test_lookup_revision_with_context_ko_not_a_sha1_2(self):
         # given
         sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
         sha1_git = '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f6' \
                    '2d4daf51aea892abe'
 
         # when
         with self.assertRaises(BadInputExc) as cm:
             service.lookup_revision_with_context(sha1_git_root, sha1_git)
         self.assertIn('Only sha1_git is supported', cm.exception.args[0])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_with_context_ko_sha1_git_does_not_exist(
             self,
             mock_storage):
         # given
         sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
         sha1_git = '777777bdf3629f916219feb3dcc7393ded1bc8db'
 
         sha1_git_bin = hash_to_bytes(sha1_git)
 
         mock_storage.revision_get.return_value = None
 
         # when
         with self.assertRaises(NotFoundExc) as cm:
             service.lookup_revision_with_context(sha1_git_root, sha1_git)
         self.assertIn('Revision 777777bdf3629f916219feb3dcc7393ded1bc8db'
                       ' not found', cm.exception.args[0])
 
         mock_storage.revision_get.assert_called_once_with(
             [sha1_git_bin])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_with_context_ko_root_sha1_git_does_not_exist(
             self,
             mock_storage):
         # given
         sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
         sha1_git = '777777bdf3629f916219feb3dcc7393ded1bc8db'
 
         sha1_git_root_bin = hash_to_bytes(sha1_git_root)
         sha1_git_bin = hash_to_bytes(sha1_git)
 
         mock_storage.revision_get.side_effect = ['foo', None]
 
         # when
         with self.assertRaises(NotFoundExc) as cm:
             service.lookup_revision_with_context(sha1_git_root, sha1_git)
         self.assertIn('Revision root 65a55bbdf3629f916219feb3dcc7393ded1bc8db'
                       ' not found', cm.exception.args[0])
 
         mock_storage.revision_get.assert_has_calls([call([sha1_git_bin]),
                                                     call([sha1_git_root_bin])])
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_revision_with_context(self, mock_query, mock_storage):
         # given
         sha1_git_root = '666'
         sha1_git = '883'
 
         sha1_git_root_bin = b'666'
         sha1_git_bin = b'883'
 
         sha1_git_root_dict = {
             'id': sha1_git_root_bin,
             'parents': [b'999'],
         }
         sha1_git_dict = {
             'id': sha1_git_bin,
             'parents': [],
             'directory': b'278',
         }
 
         stub_revisions = [
             sha1_git_root_dict,
             {
                 'id': b'999',
                 'parents': [b'777', b'883', b'888'],
             },
             {
                 'id': b'777',
                 'parents': [b'883'],
             },
             sha1_git_dict,
             {
                 'id': b'888',
                 'parents': [b'889'],
             },
             {
                 'id': b'889',
                 'parents': [],
             },
         ]
 
         # inputs ok
         mock_query.parse_hash_with_algorithms_or_throws.side_effect = [
             ('sha1', sha1_git_bin),
             ('sha1', sha1_git_root_bin)
         ]
 
         # lookup revision first 883, then 666 (both exists)
         mock_storage.revision_get.return_value = [
             sha1_git_dict,
             sha1_git_root_dict
         ]
 
         mock_storage.revision_log = MagicMock(
             return_value=stub_revisions)
 
         # when
 
         actual_revision = service.lookup_revision_with_context(
             sha1_git_root,
             sha1_git)
 
         # then
         self.assertEqual(actual_revision, {
             'id': hash_to_hex(sha1_git_bin),
             'parents': [],
             'children': [hash_to_hex(b'999'), hash_to_hex(b'777')],
             'directory': hash_to_hex(b'278'),
             'merge': False
         })
 
         mock_query.parse_hash_with_algorithms_or_throws.assert_has_calls(
             [call(sha1_git, ['sha1'], 'Only sha1_git is supported.'),
              call(sha1_git_root, ['sha1'], 'Only sha1_git is supported.')])
 
         mock_storage.revision_log.assert_called_with(
             [sha1_git_root_bin], 100)
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_revision_with_context_retrieved_as_dict(
             self, mock_query, mock_storage):
         # given
         sha1_git = '883'
 
         sha1_git_root_bin = b'666'
         sha1_git_bin = b'883'
 
         sha1_git_root_dict = {
             'id': sha1_git_root_bin,
             'parents': [b'999'],
         }
 
         sha1_git_dict = {
             'id': sha1_git_bin,
             'parents': [],
             'directory': b'278',
         }
 
         stub_revisions = [
             sha1_git_root_dict,
             {
                 'id': b'999',
                 'parents': [b'777', b'883', b'888'],
             },
             {
                 'id': b'777',
                 'parents': [b'883'],
             },
             sha1_git_dict,
             {
                 'id': b'888',
                 'parents': [b'889'],
             },
             {
                 'id': b'889',
                 'parents': [],
             },
         ]
 
         # inputs ok
         mock_query.parse_hash_with_algorithms_or_throws.return_value = (
             'sha1', sha1_git_bin)
 
         # lookup only on sha1
         mock_storage.revision_get.return_value = [sha1_git_dict]
 
         mock_storage.revision_log.return_value = stub_revisions
 
         # when
         actual_revision = service.lookup_revision_with_context(
             {'id': sha1_git_root_bin},
             sha1_git)
 
         # then
         self.assertEqual(actual_revision, {
             'id': hash_to_hex(sha1_git_bin),
             'parents': [],
             'children': [hash_to_hex(b'999'), hash_to_hex(b'777')],
             'directory': hash_to_hex(b'278'),
             'merge': False
         })
 
         mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with(  # noqa
             sha1_git, ['sha1'], 'Only sha1_git is supported.')
 
         mock_storage.revision_get.assert_called_once_with([sha1_git_bin])
 
         mock_storage.revision_log.assert_called_with(
             [sha1_git_root_bin], 100)
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_directory_with_revision_not_found(self,
                                                       mock_query,
                                                       mock_storage):
         # given
         mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
                                                                         b'123')
         mock_storage.revision_get.return_value = None
 
         # when
         with self.assertRaises(NotFoundExc) as cm:
             service.lookup_directory_with_revision('123')
         self.assertIn('Revision 123 not found', cm.exception.args[0])
 
         mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
         ('123', ['sha1'], 'Only sha1_git is supported.')
         mock_storage.revision_get.assert_called_once_with([b'123'])
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_directory_with_revision_ko_revision_with_path_to_nowhere(
             self,
             mock_query,
             mock_storage):
         # given
         mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
                                                                         b'123')
 
         dir_id = b'dir-id-as-sha1'
         mock_storage.revision_get.return_value = [{
             'directory': dir_id,
         }]
 
         mock_storage.directory_entry_get_by_path.return_value = None
 
         # when
         with self.assertRaises(NotFoundExc) as cm:
             service.lookup_directory_with_revision(
                 '123',
                 'path/to/something/unknown')
         exception_text = cm.exception.args[0].lower()
         self.assertIn('directory or file', exception_text)
         self.assertIn('path/to/something/unknown', exception_text)
         self.assertIn('revision 123', exception_text)
         self.assertIn('not found', exception_text)
 
         mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
         ('123', ['sha1'], 'Only sha1_git is supported.')
         mock_storage.revision_get.assert_called_once_with([b'123'])
         mock_storage.directory_entry_get_by_path.assert_called_once_with(
             b'dir-id-as-sha1', [b'path', b'to', b'something', b'unknown'])
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_directory_with_revision_ko_type_not_implemented(
             self,
             mock_query,
             mock_storage):
 
         # given
         mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
                                                                         b'123')
 
         dir_id = b'dir-id-as-sha1'
         mock_storage.revision_get.return_value = [{
             'directory': dir_id,
         }]
 
         mock_storage.directory_entry_get_by_path.return_value = {
             'type': 'rev',
             'name': b'some/path/to/rev',
             'target': b'456'
         }
 
         stub_content = {
             'id': b'12',
             'type': 'file'
         }
 
         mock_storage.content_get.return_value = stub_content
 
         # when
         with self.assertRaises(NotImplementedError) as cm:
             service.lookup_directory_with_revision(
                 '123',
                 'some/path/to/rev')
         self.assertIn("Entity of type rev not implemented.",
                       cm.exception.args[0])
 
         # then
         mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
         ('123', ['sha1'], 'Only sha1_git is supported.')
         mock_storage.revision_get.assert_called_once_with([b'123'])
         mock_storage.directory_entry_get_by_path.assert_called_once_with(
             b'dir-id-as-sha1', [b'some', b'path', b'to', b'rev'])
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_directory_with_revision_revision_without_path(
         self, mock_query, mock_storage,
     ):
         # given
         mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
                                                                         b'123')
 
         dir_id = b'dir-id-as-sha1'
         mock_storage.revision_get.return_value = [{
             'directory': dir_id,
         }]
 
         stub_dir_entries = [{
             'id': b'123',
             'type': 'dir'
         }, {
             'id': b'456',
             'type': 'file'
         }]
 
         mock_storage.directory_ls.return_value = stub_dir_entries
 
         # when
         actual_directory_entries = service.lookup_directory_with_revision(
             '123')
 
         self.assertEqual(actual_directory_entries['type'], 'dir')
         self.assertEqual(list(actual_directory_entries['content']),
                          stub_dir_entries)
 
         mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
         ('123', ['sha1'], 'Only sha1_git is supported.')
         mock_storage.revision_get.assert_called_once_with([b'123'])
         mock_storage.directory_ls.assert_called_once_with(dir_id)
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_directory_with_revision_with_path_to_dir(self,
                                                              mock_query,
                                                              mock_storage):
         # given
         mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
                                                                         b'123')
 
         dir_id = b'dir-id-as-sha1'
         mock_storage.revision_get.return_value = [{
             'directory': dir_id,
         }]
 
         stub_dir_entries = [{
             'id': b'12',
             'type': 'dir'
         }, {
             'id': b'34',
             'type': 'file'
         }]
 
         mock_storage.directory_entry_get_by_path.return_value = {
             'type': 'dir',
             'name': b'some/path',
             'target': b'456'
         }
         mock_storage.directory_ls.return_value = stub_dir_entries
 
         # when
         actual_directory_entries = service.lookup_directory_with_revision(
             '123',
             'some/path')
 
         self.assertEqual(actual_directory_entries['type'], 'dir')
         self.assertEqual(actual_directory_entries['revision'], '123')
         self.assertEqual(actual_directory_entries['path'], 'some/path')
         self.assertEqual(list(actual_directory_entries['content']),
                          stub_dir_entries)
 
         mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
         ('123', ['sha1'], 'Only sha1_git is supported.')
         mock_storage.revision_get.assert_called_once_with([b'123'])
         mock_storage.directory_entry_get_by_path.assert_called_once_with(
             dir_id,
             [b'some', b'path'])
         mock_storage.directory_ls.assert_called_once_with(b'456')
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_directory_with_revision_with_path_to_file_wo_data(
             self,
             mock_query,
             mock_storage):
 
         # given
         mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
                                                                         b'123')
 
         dir_id = b'dir-id-as-sha1'
         mock_storage.revision_get.return_value = [{
             'directory': dir_id,
         }]
 
         mock_storage.directory_entry_get_by_path.return_value = {
                 'type': 'file',
                 'name': b'some/path/to/file',
                 'target': b'789'
             }
 
         stub_content = {
             'status': 'visible',
         }
 
         mock_storage.content_find.return_value = stub_content
 
         # when
         actual_content = service.lookup_directory_with_revision(
             '123',
             'some/path/to/file')
 
         # then
         self.assertEqual(actual_content, {'type': 'file',
                                           'revision': '123',
                                           'path': 'some/path/to/file',
                                           'content': stub_content})
 
         mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
         ('123', ['sha1'], 'Only sha1_git is supported.')
         mock_storage.revision_get.assert_called_once_with([b'123'])
         mock_storage.directory_entry_get_by_path.assert_called_once_with(
             b'dir-id-as-sha1', [b'some', b'path', b'to', b'file'])
         mock_storage.content_find.assert_called_once_with({'sha1_git': b'789'})
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_directory_with_revision_with_path_to_file_w_data(
             self,
             mock_query,
             mock_storage):
 
         # given
         mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
                                                                         b'123')
 
         dir_id = b'dir-id-as-sha1'
         mock_storage.revision_get.return_value = [{
             'directory': dir_id,
         }]
 
         mock_storage.directory_entry_get_by_path.return_value = {
                 'type': 'file',
                 'name': b'some/path/to/file',
                 'target': b'789'
             }
 
         stub_content = {
             'status': 'visible',
             'sha1': b'content-sha1'
         }
 
         mock_storage.content_find.return_value = stub_content
         mock_storage.content_get.return_value = [{
             'sha1': b'content-sha1',
             'data': b'some raw data'
         }]
 
         expected_content = {
             'status': 'visible',
             'checksums': {
                 'sha1': hash_to_hex(b'content-sha1'),
             },
             'data': b'some raw data'
         }
 
         # when
         actual_content = service.lookup_directory_with_revision(
             '123',
             'some/path/to/file',
             with_data=True)
 
         # then
         self.assertEqual(actual_content, {'type': 'file',
                                           'revision': '123',
                                           'path': 'some/path/to/file',
                                           'content': expected_content})
 
         mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
         ('123', ['sha1'], 'Only sha1_git is supported.')
         mock_storage.revision_get.assert_called_once_with([b'123'])
         mock_storage.directory_entry_get_by_path.assert_called_once_with(
             b'dir-id-as-sha1', [b'some', b'path', b'to', b'file'])
         mock_storage.content_find.assert_called_once_with({'sha1_git': b'789'})
         mock_storage.content_get.assert_called_once_with([b'content-sha1'])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision(self, mock_storage):
         # given
         mock_storage.revision_get = MagicMock(
             return_value=[self.SAMPLE_REVISION_RAW])
 
         # when
         actual_revision = service.lookup_revision(
             self.SHA1_SAMPLE)
 
         # then
         self.assertEqual(actual_revision, self.SAMPLE_REVISION)
 
         mock_storage.revision_get.assert_called_with(
             [self.SHA1_SAMPLE_BIN])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_invalid_msg(self, mock_storage):
         # given
         stub_rev = self.SAMPLE_REVISION_RAW
         stub_rev['message'] = b'elegant fix for bug \xff'
 
         expected_revision = self.SAMPLE_REVISION
         expected_revision['message'] = None
         expected_revision['message_decoding_failed'] = True
         mock_storage.revision_get = MagicMock(return_value=[stub_rev])
 
         # when
         actual_revision = service.lookup_revision(
             self.SHA1_SAMPLE)
 
         # then
         self.assertEqual(actual_revision, expected_revision)
 
         mock_storage.revision_get.assert_called_with(
             [self.SHA1_SAMPLE_BIN])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_msg_ok(self, mock_storage):
         # given
         mock_storage.revision_get.return_value = [self.SAMPLE_REVISION_RAW]
 
         # when
         rv = service.lookup_revision_message(
             self.SHA1_SAMPLE)
 
         # then
         self.assertEqual(rv, {'message': self.SAMPLE_MESSAGE_BIN})
         mock_storage.revision_get.assert_called_with(
             [self.SHA1_SAMPLE_BIN])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_msg_absent(self, mock_storage):
         # given
         stub_revision = self.SAMPLE_REVISION_RAW
         del stub_revision['message']
         mock_storage.revision_get.return_value = stub_revision
 
         # when
         with self.assertRaises(NotFoundExc) as cm:
             service.lookup_revision_message(
                 self.SHA1_SAMPLE)
 
         # then
         mock_storage.revision_get.assert_called_with(
             [self.SHA1_SAMPLE_BIN])
         self.assertEqual(
             cm.exception.args[0],
             'No message for revision with sha1_git %s.' % self.SHA1_SAMPLE,
         )
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_msg_norev(self, mock_storage):
         # given
         mock_storage.revision_get.return_value = None
 
         # when
         with self.assertRaises(NotFoundExc) as cm:
             service.lookup_revision_message(
                 self.SHA1_SAMPLE)
 
         # then
         mock_storage.revision_get.assert_called_with(
             [self.SHA1_SAMPLE_BIN])
         self.assertEqual(
             cm.exception.args[0],
             'Revision with sha1_git %s not found.' % self.SHA1_SAMPLE,
         )
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_multiple(self, mock_storage):
         # given
         sha1 = self.SHA1_SAMPLE
         sha1_other = 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'
 
         stub_revisions = [
             self.SAMPLE_REVISION_RAW,
             {
                 'id': hash_to_bytes(sha1_other),
                 'directory': 'abcdbe353ed3480476f032475e7c233eff7371d5',
                 'author': {
                     'name': b'name',
                     'email': b'name@surname.org',
                 },
                 'committer': {
                     'name': b'name',
                     'email': b'name@surname.org',
                 },
                 'message': b'ugly fix for bug 42',
                 'date': {
                     'timestamp': datetime.datetime(
                         2000, 1, 12, 5, 23, 54,
                         tzinfo=datetime.timezone.utc).timestamp(),
                     'offset': 0,
                     'negative_utc': False
                     },
                 'date_offset': 0,
                 'committer_date': {
                     'timestamp': datetime.datetime(
                         2000, 1, 12, 5, 23, 54,
                         tzinfo=datetime.timezone.utc).timestamp(),
                     'offset': 0,
                     'negative_utc': False
                     },
                 'committer_date_offset': 0,
                 'synthetic': False,
                 'type': 'git',
                 'parents': [],
                 'metadata': [],
             }
         ]
 
         mock_storage.revision_get.return_value = stub_revisions
 
         # when
         actual_revisions = service.lookup_revision_multiple(
             [sha1, sha1_other])
 
         # then
         self.assertEqual(list(actual_revisions), [
             self.SAMPLE_REVISION,
             {
                 'id': sha1_other,
                 'directory': 'abcdbe353ed3480476f032475e7c233eff7371d5',
                 'author': {
                     'name': 'name',
                     'email': 'name@surname.org',
                 },
                 'committer': {
                     'name': 'name',
                     'email': 'name@surname.org',
                 },
                 'message': 'ugly fix for bug 42',
                 'date': '2000-01-12T05:23:54+00:00',
                 'date_offset': 0,
                 'committer_date': '2000-01-12T05:23:54+00:00',
                 'committer_date_offset': 0,
                 'synthetic': False,
                 'type': 'git',
                 'parents': [],
                 'metadata': {},
                 'merge': False
             }
         ])
 
         self.assertEqual(
             list(mock_storage.revision_get.call_args[0][0]),
             [hash_to_bytes(sha1),
              hash_to_bytes(sha1_other)])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_multiple_none_found(self, mock_storage):
         # given
         sha1_bin = self.SHA1_SAMPLE
         sha1_other = 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'
 
         mock_storage.revision_get.return_value = []
 
         # then
         actual_revisions = service.lookup_revision_multiple(
             [sha1_bin, sha1_other])
 
         self.assertEqual(list(actual_revisions), [])
 
         self.assertEqual(
             list(mock_storage.revision_get.call_args[0][0]),
             [hash_to_bytes(self.SHA1_SAMPLE),
              hash_to_bytes(sha1_other)])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_log(self, mock_storage):
         # given
         stub_revision_log = [self.SAMPLE_REVISION_RAW]
         mock_storage.revision_log = MagicMock(return_value=stub_revision_log)
 
         # when
         actual_revision = service.lookup_revision_log(
             'abcdbe353ed3480476f032475e7c233eff7371d5',
             limit=25)
 
         # then
         self.assertEqual(list(actual_revision), [self.SAMPLE_REVISION])
 
         mock_storage.revision_log.assert_called_with(
             [hash_to_bytes('abcdbe353ed3480476f032475e7c233eff7371d5')], 25)
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_log_by(self, mock_storage):
         # given
         stub_revision_log = [self.SAMPLE_REVISION_RAW]
         mock_storage.revision_log_by = MagicMock(
             return_value=stub_revision_log)
 
         # when
         actual_log = service.lookup_revision_log_by(
             1, 'refs/heads/master', None, limit=100)
         # then
         self.assertEqual(list(actual_log), [self.SAMPLE_REVISION])
 
         mock_storage.revision_log_by.assert_called_with(
             1, 'refs/heads/master', None, limit=100)
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_log_by_nolog(self, mock_storage):
         # given
         mock_storage.revision_log_by = MagicMock(return_value=None)
 
         # when
         res = service.lookup_revision_log_by(
             1, 'refs/heads/master', None, limit=100)
         # then
         self.assertEqual(res, None)
         mock_storage.revision_log_by.assert_called_with(
             1, 'refs/heads/master', None, limit=100)
 
     @patch('swh.web.common.service.storage')
     def test_lookup_content_raw_not_found(self, mock_storage):
         # given
         mock_storage.content_find = MagicMock(return_value=None)
 
         # when
         with self.assertRaises(NotFoundExc) as cm:
             service.lookup_content_raw('sha1:' + self.SHA1_SAMPLE)
         self.assertIn(cm.exception.args[0],
                       'Content with %s checksum equals to %s not found!' %
                       ('sha1', self.SHA1_SAMPLE))
 
         mock_storage.content_find.assert_called_with(
             {'sha1': hash_to_bytes(self.SHA1_SAMPLE)})
 
     @patch('swh.web.common.service.storage')
     def test_lookup_content_raw(self, mock_storage):
         # given
         mock_storage.content_find = MagicMock(return_value={
             'sha1': self.SHA1_SAMPLE,
         })
         mock_storage.content_get = MagicMock(return_value=[{
             'data': b'binary data'}])
 
         # when
         actual_content = service.lookup_content_raw(
             'sha256:%s' % self.SHA256_SAMPLE)
 
         # then
         self.assertEqual(actual_content, {'data': b'binary data'})
 
         mock_storage.content_find.assert_called_once_with(
             {'sha256': self.SHA256_SAMPLE_BIN})
         mock_storage.content_get.assert_called_once_with(
             [self.SHA1_SAMPLE])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_content_not_found(self, mock_storage):
         # given
         mock_storage.content_find = MagicMock(return_value=None)
 
         # when
         with self.assertRaises(NotFoundExc) as cm:
             # then
             service.lookup_content('sha1:%s' % self.SHA1_SAMPLE)
         self.assertIn(cm.exception.args[0],
                       'Content with %s checksum equals to %s not found!' %
                       ('sha1', self.SHA1_SAMPLE))
 
         mock_storage.content_find.assert_called_with(
             {'sha1': self.SHA1_SAMPLE_BIN})
 
     @patch('swh.web.common.service.storage')
     def test_lookup_content_with_sha1(self, mock_storage):
         # given
         mock_storage.content_find = MagicMock(
             return_value=self.SAMPLE_CONTENT_RAW)
 
         # when
         actual_content = service.lookup_content(
             'sha1:%s' % self.SHA1_SAMPLE)
 
         # then
         self.assertEqual(actual_content, self.SAMPLE_CONTENT)
 
         mock_storage.content_find.assert_called_with(
             {'sha1': hash_to_bytes(self.SHA1_SAMPLE)})
 
     @patch('swh.web.common.service.storage')
     def test_lookup_content_with_sha256(self, mock_storage):
         # given
         stub_content = self.SAMPLE_CONTENT_RAW
         stub_content['status'] = 'visible'
 
         expected_content = self.SAMPLE_CONTENT
         expected_content['status'] = 'visible'
         mock_storage.content_find = MagicMock(
             return_value=stub_content)
 
         # when
         actual_content = service.lookup_content(
             'sha256:%s' % self.SHA256_SAMPLE)
 
         # then
         self.assertEqual(actual_content, expected_content)
 
         mock_storage.content_find.assert_called_with(
             {'sha256': self.SHA256_SAMPLE_BIN})
 
     @patch('swh.web.common.service.storage')
     def test_lookup_person(self, mock_storage):
         # given
         mock_storage.person_get = MagicMock(return_value=[{
             'id': 'person_id',
             'name': b'some_name',
             'email': b'some-email',
         }])
 
         # when
         actual_person = service.lookup_person('person_id')
 
         # then
         self.assertEqual(actual_person, {
             'id': 'person_id',
             'name': 'some_name',
             'email': 'some-email',
         })
 
         mock_storage.person_get.assert_called_with(['person_id'])
 
     @patch('swh.web.common.service.storage')
     def test_lookup_directory_bad_checksum(self, mock_storage):
         # given
         mock_storage.directory_ls = MagicMock()
 
         # when
         with self.assertRaises(BadInputExc):
             service.lookup_directory('directory_id')
 
         # then
         mock_storage.directory_ls.called = False
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_directory_not_found(self, mock_query, mock_storage):
         # given
         mock_query.parse_hash_with_algorithms_or_throws.return_value = (
             'sha1',
             'directory-id-bin')
         mock_storage.directory_ls.return_value = []
 
         # when
         with self.assertRaises(NotFoundExc) as cm:
             service.lookup_directory('directory_id')
 
         self.assertIn('Directory with sha1_git directory_id not found',
                       cm.exception.args[0])
 
         # then
         mock_query.parse_hash_with_algorithms_or_throws.assert_called_with(
             'directory_id', ['sha1'], 'Only sha1_git is supported.')
         mock_storage.directory_ls.assert_called_with('directory-id-bin')
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_directory(self, mock_query, mock_storage):
         mock_query.parse_hash_with_algorithms_or_throws.return_value = (
             'sha1',
             'directory-sha1-bin')
 
         # given
         stub_dir_entries = [{
             'sha1': self.SHA1_SAMPLE_BIN,
             'sha256': self.SHA256_SAMPLE_BIN,
             'sha1_git': self.SHA1GIT_SAMPLE_BIN,
             'blake2s256': self.BLAKE2S256_SAMPLE_BIN,
             'target': hash_to_bytes(
                 '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
             'dir_id': self.DIRECTORY_ID_BIN,
             'name': b'bob',
             'type': 10,
         }]
 
         expected_dir_entries = [{
             'checksums': {
                 'sha1': self.SHA1_SAMPLE,
                 'sha256': self.SHA256_SAMPLE,
                 'sha1_git': self.SHA1GIT_SAMPLE,
                 'blake2s256': self.BLAKE2S256_SAMPLE
             },
             'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
             'dir_id': self.DIRECTORY_ID,
             'name': 'bob',
             'type': 10,
         }]
 
         mock_storage.directory_ls.return_value = stub_dir_entries
 
         # when
         actual_directory_ls = list(service.lookup_directory(
             'directory-sha1'))
 
         # then
         self.assertEqual(actual_directory_ls, expected_dir_entries)
 
         mock_query.parse_hash_with_algorithms_or_throws.assert_called_with(
             'directory-sha1', ['sha1'], 'Only sha1_git is supported.')
         mock_storage.directory_ls.assert_called_with(
             'directory-sha1-bin')
 
     @patch('swh.web.common.service.storage')
     def test_lookup_directory_empty(self, mock_storage):
         empty_dir_sha1 = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'
         mock_storage.directory_ls.return_value = []
 
         # when
         actual_directory_ls = list(service.lookup_directory(empty_dir_sha1))
 
         # then
         self.assertEqual(actual_directory_ls, [])
 
         self.assertFalse(mock_storage.directory_ls.called)
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_by_nothing_found(self, mock_storage):
         # given
         mock_storage.revision_get_by.return_value = None
 
         # when
         with self.assertRaises(NotFoundExc):
             service.lookup_revision_by(1)
 
             # then
             mock_storage.revision_get_by.assert_called_with(1, 'refs/heads/master', # noqa
                                                             limit=1,
                                                             timestamp=None)
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_by(self, mock_storage):
         # given
         stub_rev = self.SAMPLE_REVISION_RAW
 
         expected_rev = self.SAMPLE_REVISION
 
         mock_storage.revision_get_by.return_value = [stub_rev]
 
         # when
         actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts')
 
         # then
         self.assertEqual(actual_revision, expected_rev)
 
         mock_storage.revision_get_by.assert_called_with(10, 'master2',
                                                         limit=1,
                                                         timestamp='some-ts')
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_by_nomerge(self, mock_storage):
         # given
         stub_rev = self.SAMPLE_REVISION_RAW
         stub_rev['parents'] = [
                 hash_to_bytes('adc83b19e793491b1c6ea0fd8b46cd9f32e592fc')]
 
         expected_rev = self.SAMPLE_REVISION
         expected_rev['parents'] = ['adc83b19e793491b1c6ea0fd8b46cd9f32e592fc']
         mock_storage.revision_get_by.return_value = [stub_rev]
 
         # when
         actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts')
 
         # then
         self.assertEqual(actual_revision, expected_rev)
 
         mock_storage.revision_get_by.assert_called_with(10, 'master2',
                                                         limit=1,
                                                         timestamp='some-ts')
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_by_merge(self, mock_storage):
         # given
         stub_rev = self.SAMPLE_REVISION_RAW
         stub_rev['parents'] = [
             hash_to_bytes('adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'),
             hash_to_bytes('ffff3b19e793491b1c6db0fd8b46cd9f32e592fc')
         ]
 
         expected_rev = self.SAMPLE_REVISION
         expected_rev['parents'] = [
             'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc',
             'ffff3b19e793491b1c6db0fd8b46cd9f32e592fc'
         ]
         expected_rev['merge'] = True
 
         mock_storage.revision_get_by.return_value = [stub_rev]
 
         # when
         actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts')
 
         # then
         self.assertEqual(actual_revision, expected_rev)
 
         mock_storage.revision_get_by.assert_called_with(10, 'master2',
                                                         limit=1,
                                                         timestamp='some-ts')
 
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_with_context_by_ko(self, mock_storage):
         # given
         mock_storage.revision_get_by.return_value = None
 
         # when
         origin_id = 1
         branch_name = 'master3'
         ts = None
         with self.assertRaises(NotFoundExc) as cm:
             service.lookup_revision_with_context_by(origin_id, branch_name, ts,
                                                     'sha1')
         # then
         self.assertIn(
             'Revision with (origin_id: %s, branch_name: %s'
             ', ts: %s) not found.' % (origin_id,
                                       branch_name,
                                       ts), cm.exception.args[0])
 
         mock_storage.revision_get_by.assert_called_once_with(
             origin_id, branch_name, limit=1, timestamp=ts)
 
     @patch('swh.web.common.service.lookup_revision_with_context')
     @patch('swh.web.common.service.storage')
     def test_lookup_revision_with_context_by(
             self, mock_storage, mock_lookup_revision_with_context,
     ):
         # given
         stub_root_rev = {'id': 'root-rev-id'}
         mock_storage.revision_get_by.return_value = [{'id': 'root-rev-id'}]
         stub_rev = {'id': 'rev-found'}
         mock_lookup_revision_with_context.return_value = stub_rev
 
         # when
         origin_id = 1
         branch_name = 'master3'
         ts = None
         sha1_git = 'sha1'
         actual_root_rev, actual_rev = service.lookup_revision_with_context_by(
             origin_id, branch_name, ts, sha1_git)
 
         # then
         self.assertEqual(actual_root_rev, stub_root_rev)
         self.assertEqual(actual_rev, stub_rev)
 
         mock_storage.revision_get_by.assert_called_once_with(
             origin_id, branch_name, limit=1, timestamp=ts)
         mock_lookup_revision_with_context.assert_called_once_with(
             stub_root_rev, sha1_git, 100)
 
     @patch('swh.web.common.service.storage')
     @patch('swh.web.common.service.query')
     def test_lookup_entity_by_uuid(self, mock_query, mock_storage):
         # given
         uuid_test = 'correct-uuid'
         mock_query.parse_uuid4.return_value = uuid_test
         stub_entities = [{'uuid': uuid_test}]
 
         mock_storage.entity_get.return_value = stub_entities
 
         # when
         actual_entities = list(service.lookup_entity_by_uuid(uuid_test))
 
         # then
         self.assertEqual(actual_entities, stub_entities)
 
         mock_query.parse_uuid4.assert_called_once_with(uuid_test)
         mock_storage.entity_get.assert_called_once_with(uuid_test)
 
     def test_lookup_revision_through_ko_not_implemented(self):
         # then
         with self.assertRaises(NotImplementedError):
             service.lookup_revision_through({
                 'something-unknown': 10,
             })
 
     @patch('swh.web.common.service.lookup_revision_with_context_by')
     def test_lookup_revision_through_with_context_by(self, mock_lookup):
         # given
         stub_rev = {'id': 'rev'}
         mock_lookup.return_value = stub_rev
 
         # when
         actual_revision = service.lookup_revision_through({
             'origin_id': 1,
             'branch_name': 'master',
             'ts': None,
             'sha1_git': 'sha1-git'
         }, limit=1000)
 
         # then
         self.assertEqual(actual_revision, stub_rev)
 
         mock_lookup.assert_called_once_with(
             1, 'master', None, 'sha1-git', 1000)
 
     @patch('swh.web.common.service.lookup_revision_by')
     def test_lookup_revision_through_with_revision_by(self, mock_lookup):
         # given
         stub_rev = {'id': 'rev'}
         mock_lookup.return_value = stub_rev
 
         # when
         actual_revision = service.lookup_revision_through({
             'origin_id': 2,
             'branch_name': 'master2',
             'ts': 'some-ts',
         }, limit=10)
 
         # then
         self.assertEqual(actual_revision, stub_rev)
 
         mock_lookup.assert_called_once_with(
             2, 'master2', 'some-ts')
 
     @patch('swh.web.common.service.lookup_revision_with_context')
     def test_lookup_revision_through_with_context(self, mock_lookup):
         # given
         stub_rev = {'id': 'rev'}
         mock_lookup.return_value = stub_rev
 
         # when
         actual_revision = service.lookup_revision_through({
             'sha1_git_root': 'some-sha1-root',
             'sha1_git': 'some-sha1',
         })
 
         # then
         self.assertEqual(actual_revision, stub_rev)
 
         mock_lookup.assert_called_once_with(
             'some-sha1-root', 'some-sha1', 100)
 
     @patch('swh.web.common.service.lookup_revision')
     def test_lookup_revision_through_with_revision(self, mock_lookup):
         # given
         stub_rev = {'id': 'rev'}
         mock_lookup.return_value = stub_rev
 
         # when
         actual_revision = service.lookup_revision_through({
             'sha1_git': 'some-sha1',
         })
 
         # then
         self.assertEqual(actual_revision, stub_rev)
 
         mock_lookup.assert_called_once_with(
             'some-sha1')
 
     @patch('swh.web.common.service.lookup_revision_through')
     def test_lookup_directory_through_revision_ko_not_found(
             self, mock_lookup_rev):
         # given
         mock_lookup_rev.return_value = None
 
         # when
         with self.assertRaises(NotFoundExc):
             service.lookup_directory_through_revision(
                 {'id': 'rev'}, 'some/path', 100)
 
         mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 100)
 
     @patch('swh.web.common.service.lookup_revision_through')
     @patch('swh.web.common.service.lookup_directory_with_revision')
     def test_lookup_directory_through_revision_ok_with_data(
             self, mock_lookup_dir, mock_lookup_rev):
         # given
         mock_lookup_rev.return_value = {'id': 'rev-id'}
         mock_lookup_dir.return_value = {'type': 'dir',
                                         'content': []}
 
         # when
         rev_id, dir_result = service.lookup_directory_through_revision(
             {'id': 'rev'}, 'some/path', 100)
         # then
         self.assertEqual(rev_id, 'rev-id')
         self.assertEqual(dir_result, {'type': 'dir',
                                       'content': []})
 
         mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 100)
         mock_lookup_dir.assert_called_once_with('rev-id', 'some/path', False)
 
     @patch('swh.web.common.service.lookup_revision_through')
     @patch('swh.web.common.service.lookup_directory_with_revision')
     def test_lookup_directory_through_revision_ok_with_content(
             self, mock_lookup_dir, mock_lookup_rev):
         # given
         mock_lookup_rev.return_value = {'id': 'rev-id'}
         stub_result = {'type': 'file',
                        'revision': 'rev-id',
                        'content': {'data': b'blah',
                                    'sha1': 'sha1'}}
         mock_lookup_dir.return_value = stub_result
 
         # when
         rev_id, dir_result = service.lookup_directory_through_revision(
             {'id': 'rev'}, 'some/path', 10, with_data=True)
         # then
         self.assertEqual(rev_id, 'rev-id')
         self.assertEqual(dir_result, stub_result)
 
         mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 10)
         mock_lookup_dir.assert_called_once_with('rev-id', 'some/path', True)