diff --git a/cypress/support/index.js b/cypress/support/index.js
index c9d10fd0..f5d836b6 100644
--- a/cypress/support/index.js
+++ b/cypress/support/index.js
@@ -1,138 +1,142 @@
 /**
  * Copyright (C) 2019  The Software Heritage developers
  * See the AUTHORS file at the top-level directory of this distribution
  * License: GNU Affero General Public License version 3, or any later version
  * See top-level LICENSE file for more information
  */
 
 import {httpGetJson} from '../utils';
 
 Cypress.Screenshot.defaults({
   screenshotOnRunFailure: false
 });
 
 before(function() {
   this.unarchivedRepo = {
     url: 'https://github.com/SoftwareHeritage/swh-web',
     type: 'git',
     revision: '7bf1b2f489f16253527807baead7957ca9e8adde',
     snapshot: 'd9829223095de4bb529790de8ba4e4813e38672d',
     rootDirectory: '7d887d96c0047a77e2e8c4ee9bb1528463677663',
     content: [{
       sha1git: 'b203ec39300e5b7e97b6e20986183cbd0b797859'
     }]
   };
 
   this.origin = [{
     url: 'https://github.com/memononen/libtess2',
     type: 'git',
     content: [{
       path: 'Source/tess.h'
     }, {
       path: 'premake4.lua'
     }],
     directory: [{
       path: 'Source',
       id: 'cd19126d815470b28919d64b2a8e6a3e37f900dd'
     }],
     revisions: [],
     invalidSubDir: 'Source1'
   }, {
     url: 'https://github.com/wcoder/highlightjs-line-numbers.js',
     type: 'git',
     content: [],
     directory: [],
     revisions: ['1c480a4573d2a003fc2630c21c2b25829de49972']
   }];
 
   const getMetadataForOrigin = async originUrl => {
     const originVisitsApiUrl = this.Urls.api_1_origin_visits(originUrl);
     const originVisits = await httpGetJson(originVisitsApiUrl);
     const lastVisit = originVisits[0];
     const snapshotApiUrl = this.Urls.api_1_snapshot(lastVisit.snapshot);
     const lastOriginSnapshot = await httpGetJson(snapshotApiUrl);
-    const revisionApiUrl = this.Urls.api_1_revision(lastOriginSnapshot.branches.HEAD.target);
+    let revision = lastOriginSnapshot.branches.HEAD.target;
+    if (lastOriginSnapshot.branches.HEAD.target_type === 'alias') {
+      revision = lastOriginSnapshot.branches[revision].target;
+    }
+    const revisionApiUrl = this.Urls.api_1_revision(revision);
     const lastOriginHeadRevision = await httpGetJson(revisionApiUrl);
     return {
       'directory': lastOriginHeadRevision.directory,
       'revision': lastOriginHeadRevision.id,
       'snapshot': lastOriginSnapshot.id
     };
   };
 
   cy.visit('/').window().then(async win => {
     this.Urls = win.Urls;
 
     for (let origin of this.origin) {
 
       const metadata = await getMetadataForOrigin(origin.url);
       const directoryApiUrl = this.Urls.api_1_directory(metadata.directory);
       origin.dirContent = await httpGetJson(directoryApiUrl);
       origin.rootDirectory = metadata.directory;
       origin.revisions.push(metadata.revision);
       origin.snapshot = metadata.snapshot;
 
       for (let content of origin.content) {
 
         const contentPathApiUrl = this.Urls.api_1_directory(origin.rootDirectory, content.path);
         const contentMetaData = await httpGetJson(contentPathApiUrl);
 
         content.name = contentMetaData.name.split('/').slice(-1)[0];
         content.sha1git = contentMetaData.target;
         content.directory = contentMetaData.dir_id;
 
         content.rawFilePath = this.Urls.browse_content_raw(`sha1_git:${content.sha1git}`) +
                             `?filename=${encodeURIComponent(content.name)}`;
 
         cy.request(content.rawFilePath)
           .then((response) => {
             const fileText = response.body;
             const fileLines = fileText.split('\n');
             content.numberLines = fileLines.length;
 
             // If last line is empty its not shown
             if (!fileLines[content.numberLines - 1]) content.numberLines -= 1;
           });
       }
 
     }
   });
 });
 
 // force the use of fetch polyfill wrapping XmlHttpRequest
 // in order for cypress to be able to intercept and stub them
 Cypress.on('window:before:load', win => {
   win.fetch = null;
 });
 
 // Ensure code coverage data do not get lost each time a new
 // page is loaded during a single test execution
 let windowCoverageObjects;
 
 before(() => {
   cy.task('resetCoverage', { isInteractive: Cypress.config('isInteractive') });
 });
 
 beforeEach(() => {
   windowCoverageObjects = [];
   // save reference to coverage for each app window loaded in the test
   cy.on('window:load', (win) => {
     // if application code has been instrumented, the app iframe "window" has an object
     const applicationSourceCoverage = win.__coverage__;
     if (applicationSourceCoverage) {
       windowCoverageObjects.push(applicationSourceCoverage);
     }
   });
 });
 
 afterEach(() => {
   // save coverage after the test
   // because now the window coverage objects have been updated
   windowCoverageObjects.forEach((coverage) => {
     cy.task('combineCoverage', JSON.stringify(coverage));
   });
 });
 
 after(() => {
   cy.task('coverageReport');
 });
diff --git a/swh/web/common/service.py b/swh/web/common/service.py
index b9daca38..413bed72 100644
--- a/swh/web/common/service.py
+++ b/swh/web/common/service.py
@@ -1,1101 +1,1115 @@
 # Copyright (C) 2015-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import os
 
 from collections import defaultdict
 
 from swh.model import hashutil
 
 from swh.storage.algos import diff, revisions_walker
 
 from swh.web.common import converters
 from swh.web.common import query
 from swh.web.common.exc import NotFoundExc
 from swh.web.common.origin_visits import get_origin_visit
 from swh.web import config
 
 storage = config.storage()
 vault = config.vault()
 idx_storage = config.indexer_storage()
 
 
 MAX_LIMIT = 50  # Top limit the users can ask for
 
 
 def _first_element(l):
     """Returns the first element in the provided list or None
     if it is empty or None"""
     return next(iter(l or []), None)
 
 
 def lookup_multiple_hashes(hashes):
     """Lookup the passed hashes in a single DB connection, using batch
     processing.
 
     Args:
         An array of {filename: X, sha1: Y}, string X, hex sha1 string Y.
     Returns:
         The same array with elements updated with elem['found'] = true if
         the hash is present in storage, elem['found'] = false if not.
 
     """
     hashlist = [hashutil.hash_to_bytes(elem['sha1']) for elem in hashes]
     content_missing = storage.content_missing_per_sha1(hashlist)
     missing = [hashutil.hash_to_hex(x) for x in content_missing]
     for x in hashes:
         x.update({'found': True})
     for h in hashes:
         if h['sha1'] in missing:
             h['found'] = False
     return hashes
 
 
 def lookup_expression(expression, last_sha1, per_page):
     """Lookup expression in raw content.
 
     Args:
         expression (str): An expression to lookup through raw indexed
         content
         last_sha1 (str): Last sha1 seen
         per_page (int): Number of results per page
 
     Yields:
         ctags whose content match the expression
 
     """
 
     limit = min(per_page, MAX_LIMIT)
     ctags = idx_storage.content_ctags_search(expression,
                                              last_sha1=last_sha1,
                                              limit=limit)
     for ctag in ctags:
         ctag = converters.from_swh(ctag, hashess={'id'})
         ctag['sha1'] = ctag['id']
         ctag.pop('id')
         yield ctag
 
 
 def lookup_hash(q):
     """Checks if the storage contains a given content checksum
 
     Args: query string of the form <hash_algo:hash>
 
     Returns: Dict with key found containing the hash info if the
     hash is present, None if not.
 
     """
     algo, hash = query.parse_hash(q)
     found = _first_element(storage.content_find({algo: hash}))
     return {'found': converters.from_content(found),
             'algo': algo}
 
 
 def search_hash(q):
     """Checks if the storage contains a given content checksum
 
     Args: query string of the form <hash_algo:hash>
 
     Returns: Dict with key found to True or False, according to
         whether the checksum is present or not
 
     """
     algo, hash = query.parse_hash(q)
     found = _first_element(storage.content_find({algo: hash}))
     return {'found': found is not None}
 
 
 def _lookup_content_sha1(q):
     """Given a possible input, query for the content's sha1.
 
     Args:
         q: query string of the form <hash_algo:hash>
 
     Returns:
         binary sha1 if found or None
 
     """
     algo, hash = query.parse_hash(q)
     if algo != 'sha1':
         hashes = _first_element(storage.content_find({algo: hash}))
         if not hashes:
             return None
         return hashes['sha1']
     return hash
 
 
 def lookup_content_ctags(q):
     """Return ctags information from a specified content.
 
     Args:
         q: query string of the form <hash_algo:hash>
 
     Yields:
         ctags information (dict) list if the content is found.
 
     """
     sha1 = _lookup_content_sha1(q)
 
     if not sha1:
         return None
 
     ctags = list(idx_storage.content_ctags_get([sha1]))
     if not ctags:
         return None
 
     for ctag in ctags:
         yield converters.from_swh(ctag, hashess={'id'})
 
 
 def lookup_content_filetype(q):
     """Return filetype information from a specified content.
 
     Args:
         q: query string of the form <hash_algo:hash>
 
     Yields:
         filetype information (dict) list if the content is found.
 
     """
     sha1 = _lookup_content_sha1(q)
     if not sha1:
         return None
     filetype = _first_element(list(idx_storage.content_mimetype_get([sha1])))
     if not filetype:
         return None
     return converters.from_filetype(filetype)
 
 
 def lookup_content_language(q):
     """Return language information from a specified content.
 
     Args:
         q: query string of the form <hash_algo:hash>
 
     Yields:
         language information (dict) list if the content is found.
 
     """
     sha1 = _lookup_content_sha1(q)
     if not sha1:
         return None
     lang = _first_element(list(idx_storage.content_language_get([sha1])))
     if not lang:
         return None
     return converters.from_swh(lang, hashess={'id'})
 
 
 def lookup_content_license(q):
     """Return license information from a specified content.
 
     Args:
         q: query string of the form <hash_algo:hash>
 
     Yields:
         license information (dict) list if the content is found.
 
     """
     sha1 = _lookup_content_sha1(q)
     if not sha1:
         return None
     lic = _first_element(idx_storage.content_fossology_license_get([sha1]))
 
     if not lic:
         return None
     return converters.from_swh({'id': sha1, 'facts': lic[sha1]},
                                hashess={'id'})
 
 
 def lookup_origin(origin):
     """Return information about the origin matching dict origin.
 
     Args:
         origin: origin's dict with 'url' key
 
     Returns:
         origin information as dict.
 
     """
     origin_info = storage.origin_get(origin)
     if not origin_info:
         msg = 'Origin with url %s not found!' % origin['url']
         raise NotFoundExc(msg)
     return converters.from_origin(origin_info)
 
 
 def lookup_origins(origin_from=1, origin_count=100):
     """Get list of archived software origins in a paginated way.
 
     Origins are sorted by id before returning them
 
     Args:
         origin_from (int): The minimum id of the origins to return
         origin_count (int): The maximum number of origins to return
 
     Yields:
         origins information as dicts
     """
     origins = storage.origin_get_range(origin_from, origin_count)
     return map(converters.from_origin, origins)
 
 
 def search_origin(url_pattern, offset=0, limit=50, regexp=False,
                   with_visit=False):
     """Search for origins whose urls contain a provided string pattern
     or match a provided regular expression.
 
     Args:
         url_pattern: the string pattern to search for in origin urls
         offset: number of found origins to skip before returning results
         limit: the maximum number of found origins to return
 
     Returns:
         list of origin information as dict.
 
     """
     origins = storage.origin_search(url_pattern, offset, limit, regexp,
                                     with_visit)
     return map(converters.from_origin, origins)
 
 
 def search_origin_metadata(fulltext, limit=50):
     """Search for origins whose metadata match a provided string pattern.
 
     Args:
         fulltext: the string pattern to search for in origin metadata
         offset: number of found origins to skip before returning results
         limit: the maximum number of found origins to return
 
     Returns:
         list of origin metadata as dict.
 
     """
     matches = idx_storage.origin_intrinsic_metadata_search_fulltext(
         conjunction=[fulltext], limit=limit)
     results = []
 
     for match in matches:
         match['from_revision'] = hashutil.hash_to_hex(match['from_revision'])
 
         if match['origin_url']:
             origin = storage.origin_get({'url': match['origin_url']})
         else:
             # Fallback to origin-id for idx-storage with outdated db
             origin = storage.origin_get({'id': match['id']})
 
         del match['origin_url']
         if 'id' in match:
             del match['id']
 
         result = converters.from_origin(origin)
         if result:
             result['metadata'] = match
             results.append(result)
 
     return results
 
 
 def lookup_origin_intrinsic_metadata(origin_dict):
     """Return intrinsic metadata for origin whose origin matches given
     origin.
 
     Args:
         origin_dict: origin's dict with keys ('type' AND 'url')
 
     Returns:
         origin metadata.
 
     """
     origin_info = storage.origin_get(origin_dict)
     if not origin_info:
         msg = 'Origin with url %s not found!' % origin_dict['url']
         raise NotFoundExc(msg)
 
     origins = [origin_info['url']]
     match = _first_element(
         idx_storage.origin_intrinsic_metadata_get(origins))
     result = {}
     if match:
         result = match['metadata']
     return result
 
 
 def _to_sha1_bin(sha1_hex):
     _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws(
         sha1_hex,
         ['sha1'],  # HACK: sha1_git really
         'Only sha1_git is supported.')
     return sha1_git_bin
 
 
 def _check_directory_exists(sha1_git, sha1_git_bin):
     if len(list(storage.directory_missing([sha1_git_bin]))):
         raise NotFoundExc('Directory with sha1_git %s not found' % sha1_git)
 
 
 def lookup_directory(sha1_git):
     """Return information about the directory with id sha1_git.
 
     Args:
         sha1_git as string
 
     Returns:
         directory information as dict.
 
     """
     empty_dir_sha1 = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'
 
     if sha1_git == empty_dir_sha1:
         return []
 
     sha1_git_bin = _to_sha1_bin(sha1_git)
 
     _check_directory_exists(sha1_git, sha1_git_bin)
 
     directory_entries = storage.directory_ls(sha1_git_bin)
     return map(converters.from_directory_entry, directory_entries)
 
 
 def lookup_directory_with_path(sha1_git, path_string):
     """Return directory information for entry with path path_string w.r.t.
     root directory pointed by directory_sha1_git
 
     Args:
         - directory_sha1_git: sha1_git corresponding to the directory
         to which we append paths to (hopefully) find the entry
         - the relative path to the entry starting from the directory pointed by
         directory_sha1_git
 
     Raises:
         NotFoundExc if the directory entry is not found
     """
     sha1_git_bin = _to_sha1_bin(sha1_git)
 
     _check_directory_exists(sha1_git, sha1_git_bin)
 
     paths = path_string.strip(os.path.sep).split(os.path.sep)
     queried_dir = storage.directory_entry_get_by_path(
         sha1_git_bin, list(map(lambda p: p.encode('utf-8'), paths)))
 
     if not queried_dir:
         raise NotFoundExc(('Directory entry with path %s from %s not found') %
                           (path_string, sha1_git))
 
     return converters.from_directory_entry(queried_dir)
 
 
 def lookup_release(release_sha1_git):
     """Return information about the release with sha1 release_sha1_git.
 
     Args:
         release_sha1_git: The release's sha1 as hexadecimal
 
     Returns:
         Release information as dict.
 
     Raises:
         ValueError if the identifier provided is not of sha1 nature.
 
     """
     sha1_git_bin = _to_sha1_bin(release_sha1_git)
     release = _first_element(storage.release_get([sha1_git_bin]))
     if not release:
         raise NotFoundExc('Release with sha1_git %s not found.'
                           % release_sha1_git)
     return converters.from_release(release)
 
 
 def lookup_release_multiple(sha1_git_list):
     """Return information about the revisions identified with
     their sha1_git identifiers.
 
     Args:
         sha1_git_list: A list of revision sha1_git identifiers
 
     Returns:
         Release information as dict.
 
     Raises:
         ValueError if the identifier provided is not of sha1 nature.
 
     """
     sha1_bin_list = (_to_sha1_bin(sha1_git) for sha1_git in sha1_git_list)
     releases = storage.release_get(sha1_bin_list) or []
     return (converters.from_release(r) for r in releases)
 
 
 def lookup_revision(rev_sha1_git):
     """Return information about the revision with sha1 revision_sha1_git.
 
     Args:
         revision_sha1_git: The revision's sha1 as hexadecimal
 
     Returns:
         Revision information as dict.
 
     Raises:
         ValueError if the identifier provided is not of sha1 nature.
         NotFoundExc if there is no revision with the provided sha1_git.
 
     """
     sha1_git_bin = _to_sha1_bin(rev_sha1_git)
     revision = _first_element(storage.revision_get([sha1_git_bin]))
     if not revision:
         raise NotFoundExc('Revision with sha1_git %s not found.'
                           % rev_sha1_git)
     return converters.from_revision(revision)
 
 
 def lookup_revision_multiple(sha1_git_list):
     """Return information about the revisions identified with
     their sha1_git identifiers.
 
     Args:
         sha1_git_list: A list of revision sha1_git identifiers
 
     Returns:
         Generator of revisions information as dict.
 
     Raises:
         ValueError if the identifier provided is not of sha1 nature.
 
     """
     sha1_bin_list = (_to_sha1_bin(sha1_git) for sha1_git in sha1_git_list)
     revisions = storage.revision_get(sha1_bin_list) or []
     return (converters.from_revision(r) for r in revisions)
 
 
 def lookup_revision_message(rev_sha1_git):
     """Return the raw message of the revision with sha1 revision_sha1_git.
 
     Args:
         revision_sha1_git: The revision's sha1 as hexadecimal
 
     Returns:
         Decoded revision message as dict {'message': <the_message>}
 
     Raises:
         ValueError if the identifier provided is not of sha1 nature.
         NotFoundExc if the revision is not found, or if it has no message
 
     """
     sha1_git_bin = _to_sha1_bin(rev_sha1_git)
 
     revision = _first_element(storage.revision_get([sha1_git_bin]))
     if not revision:
         raise NotFoundExc('Revision with sha1_git %s not found.'
                           % rev_sha1_git)
     if 'message' not in revision:
         raise NotFoundExc('No message for revision with sha1_git %s.'
                           % rev_sha1_git)
     res = {'message': revision['message']}
     return res
 
 
 def _lookup_revision_id_by(origin, branch_name, timestamp):
     def _get_snapshot_branch(snapshot, branch_name):
         snapshot = lookup_snapshot(visit['snapshot'],
                                    branches_from=branch_name,
                                    branches_count=10)
         branch = None
         if branch_name in snapshot['branches']:
             branch = snapshot['branches'][branch_name]
         return branch
 
     if isinstance(origin, int):
         origin = {'id': origin}
     elif isinstance(origin, str):
         origin = {'url': origin}
     else:
         raise TypeError('"origin" must be an int or a string.')
 
     visit = get_origin_visit(origin, visit_ts=timestamp)
     branch = _get_snapshot_branch(visit['snapshot'], branch_name)
     rev_id = None
     if branch and branch['target_type'] == 'revision':
         rev_id = branch['target']
     elif branch and branch['target_type'] == 'alias':
         branch = _get_snapshot_branch(visit['snapshot'], branch['target'])
         if branch and branch['target_type'] == 'revision':
             rev_id = branch['target']
 
     if not rev_id:
         raise NotFoundExc('Revision for origin %s and branch %s not found.'
                           % (origin.get('url'), branch_name))
 
     return rev_id
 
 
 def lookup_revision_by(origin,
                        branch_name='HEAD',
                        timestamp=None):
     """Lookup revision by origin, snapshot branch name and visit timestamp.
 
     If branch_name is not provided, lookup using 'HEAD' as default.
     If timestamp is not provided, use the most recent.
 
     Args:
         origin (Union[int,str]): origin of the revision
         branch_name (str): snapshot branch name
         timestamp (str/int): origin visit time frame
 
     Returns:
         dict: The revision matching the criterions
 
     Raises:
         NotFoundExc if no revision corresponds to the criterion
 
     """
     rev_id = _lookup_revision_id_by(origin, branch_name, timestamp)
     return lookup_revision(rev_id)
 
 
 def lookup_revision_log(rev_sha1_git, limit):
     """Lookup revision log by revision id.
 
     Args:
         rev_sha1_git (str): The revision's sha1 as hexadecimal
         limit (int): the maximum number of revisions returned
 
     Returns:
         list: Revision log as list of revision dicts
 
     Raises:
         ValueError: if the identifier provided is not of sha1 nature.
         NotFoundExc: if there is no revision with the provided sha1_git.
 
     """
     lookup_revision(rev_sha1_git)
     sha1_git_bin = _to_sha1_bin(rev_sha1_git)
     revision_entries = storage.revision_log([sha1_git_bin], limit)
     return map(converters.from_revision, revision_entries)
 
 
 def lookup_revision_log_by(origin, branch_name, timestamp, limit):
     """Lookup revision by origin, snapshot branch name and visit timestamp.
 
     Args:
         origin (Union[int,str]): origin of the revision
         branch_name (str): snapshot branch
         timestamp (str/int): origin visit time frame
         limit (int): the maximum number of revisions returned
 
     Returns:
         list: Revision log as list of revision dicts
 
     Raises:
         NotFoundExc: if no revision corresponds to the criterion
 
     """
     rev_id = _lookup_revision_id_by(origin, branch_name, timestamp)
     return lookup_revision_log(rev_id, limit)
 
 
 def lookup_revision_with_context_by(origin, branch_name, timestamp,
                                     sha1_git, limit=100):
     """Return information about revision sha1_git, limited to the
     sub-graph of all transitive parents of sha1_git_root.
     sha1_git_root being resolved through the lookup of a revision by origin,
     branch_name and ts.
 
     In other words, sha1_git is an ancestor of sha1_git_root.
 
     Args:
         - origin: origin of the revision.
         - branch_name: revision's branch.
         - timestamp: revision's time frame.
         - sha1_git: one of sha1_git_root's ancestors.
         - limit: limit the lookup to 100 revisions back.
 
     Returns:
         Pair of (root_revision, revision).
         Information on sha1_git if it is an ancestor of sha1_git_root
         including children leading to sha1_git_root
 
     Raises:
         - BadInputExc in case of unknown algo_hash or bad hash.
         - NotFoundExc if either revision is not found or if sha1_git is not an
         ancestor of sha1_git_root.
 
     """
     rev_root_id = _lookup_revision_id_by(origin, branch_name, timestamp)
 
     rev_root_id_bin = hashutil.hash_to_bytes(rev_root_id)
 
     rev_root = _first_element(storage.revision_get([rev_root_id_bin]))
 
     return (converters.from_revision(rev_root),
             lookup_revision_with_context(rev_root, sha1_git, limit))
 
 
 def lookup_revision_with_context(sha1_git_root, sha1_git, limit=100):
     """Return information about revision sha1_git, limited to the
     sub-graph of all transitive parents of sha1_git_root.
 
     In other words, sha1_git is an ancestor of sha1_git_root.
 
     Args:
         sha1_git_root: latest revision. The type is either a sha1 (as an hex
         string) or a non converted dict.
         sha1_git: one of sha1_git_root's ancestors
         limit: limit the lookup to 100 revisions back
 
     Returns:
         Information on sha1_git if it is an ancestor of sha1_git_root
         including children leading to sha1_git_root
 
     Raises:
         BadInputExc in case of unknown algo_hash or bad hash
         NotFoundExc if either revision is not found or if sha1_git is not an
         ancestor of sha1_git_root
 
     """
     sha1_git_bin = _to_sha1_bin(sha1_git)
 
     revision = _first_element(storage.revision_get([sha1_git_bin]))
     if not revision:
         raise NotFoundExc('Revision %s not found' % sha1_git)
 
     if isinstance(sha1_git_root, str):
         sha1_git_root_bin = _to_sha1_bin(sha1_git_root)
 
         revision_root = _first_element(storage.revision_get([sha1_git_root_bin])) # noqa
         if not revision_root:
             raise NotFoundExc('Revision root %s not found' % sha1_git_root)
     else:
         sha1_git_root_bin = sha1_git_root['id']
 
     revision_log = storage.revision_log([sha1_git_root_bin], limit)
 
     parents = {}
     children = defaultdict(list)
 
     for rev in revision_log:
         rev_id = rev['id']
         parents[rev_id] = []
         for parent_id in rev['parents']:
             parents[rev_id].append(parent_id)
             children[parent_id].append(rev_id)
 
     if revision['id'] not in parents:
         raise NotFoundExc('Revision %s is not an ancestor of %s' %
                           (sha1_git, sha1_git_root))
 
     revision['children'] = children[revision['id']]
 
     return converters.from_revision(revision)
 
 
 def lookup_directory_with_revision(sha1_git, dir_path=None, with_data=False):
     """Return information on directory pointed by revision with sha1_git.
     If dir_path is not provided, display top level directory.
     Otherwise, display the directory pointed by dir_path (if it exists).
 
     Args:
         sha1_git: revision's hash.
         dir_path: optional directory pointed to by that revision.
         with_data: boolean that indicates to retrieve the raw data if the path
         resolves to a content. Default to False (for the api)
 
     Returns:
         Information on the directory pointed to by that revision.
 
     Raises:
         BadInputExc in case of unknown algo_hash or bad hash.
         NotFoundExc either if the revision is not found or the path referenced
         does not exist.
         NotImplementedError in case of dir_path exists but do not reference a
         type 'dir' or 'file'.
 
     """
     sha1_git_bin = _to_sha1_bin(sha1_git)
     revision = _first_element(storage.revision_get([sha1_git_bin]))
     if not revision:
         raise NotFoundExc('Revision %s not found' % sha1_git)
     dir_sha1_git_bin = revision['directory']
     if dir_path:
         paths = dir_path.strip(os.path.sep).split(os.path.sep)
         entity = storage.directory_entry_get_by_path(
             dir_sha1_git_bin, list(map(lambda p: p.encode('utf-8'), paths)))
         if not entity:
             raise NotFoundExc(
                 "Directory or File '%s' pointed to by revision %s not found"
                 % (dir_path, sha1_git))
     else:
         entity = {'type': 'dir', 'target': dir_sha1_git_bin}
     if entity['type'] == 'dir':
         directory_entries = storage.directory_ls(entity['target']) or []
         return {'type': 'dir',
                 'path': '.' if not dir_path else dir_path,
                 'revision': sha1_git,
                 'content': list(map(converters.from_directory_entry,
                                     directory_entries))}
     elif entity['type'] == 'file':  # content
         content = _first_element(
             storage.content_find({'sha1_git': entity['target']}))
         if not content:
             raise NotFoundExc('Content not found for revision %s'
                               % sha1_git)
         if with_data:
             c = _first_element(storage.content_get([content['sha1']]))
             content['data'] = c['data']
         return {'type': 'file',
                 'path': '.' if not dir_path else dir_path,
                 'revision': sha1_git,
                 'content': converters.from_content(content)}
     elif entity['type'] == 'rev':  # revision
         revision = next(storage.revision_get([entity['target']]))
         return {'type': 'rev',
                 'path': '.' if not dir_path else dir_path,
                 'revision': sha1_git,
                 'content': converters.from_revision(revision)}
     else:
         raise NotImplementedError('Entity of type %s not implemented.'
                                   % entity['type'])
 
 
 def lookup_content(q):
     """Lookup the content designed by q.
 
     Args:
         q: The release's sha1 as hexadecimal
 
     Raises:
         NotFoundExc if the requested content is not found
 
     """
     algo, hash = query.parse_hash(q)
     c = _first_element(storage.content_find({algo: hash}))
     if not c:
         raise NotFoundExc('Content with %s checksum equals to %s not found!' %
                           (algo, hashutil.hash_to_hex(hash)))
     return converters.from_content(c)
 
 
 def lookup_content_raw(q):
     """Lookup the content defined by q.
 
     Args:
         q: query string of the form <hash_algo:hash>
 
     Returns:
         dict with 'sha1' and 'data' keys.
         data representing its raw data decoded.
 
     Raises:
         NotFoundExc if the requested content is not found or
         if the content bytes are not available in the storage
 
     """
     c = lookup_content(q)
     content_sha1_bytes = hashutil.hash_to_bytes(c['checksums']['sha1'])
     content = _first_element(storage.content_get([content_sha1_bytes]))
     if not content:
         algo, hash = query.parse_hash(q)
         raise NotFoundExc('Bytes of content with %s checksum equals to %s '
                           'are not available!' %
                           (algo, hashutil.hash_to_hex(hash)))
     return converters.from_content(content)
 
 
 def stat_counters():
     """Return the stat counters for Software Heritage
 
     Returns:
         A dict mapping textual labels to integer values.
     """
     return storage.stat_counters()
 
 
 def _lookup_origin_visits(origin_url, last_visit=None, limit=10):
     """Yields the origin origins' visits.
 
     Args:
         origin_url (str): origin to list visits for
         last_visit (int): last visit to lookup from
         limit (int): Number of elements max to display
 
     Yields:
        Dictionaries of origin_visit for that origin
 
     """
     limit = min(limit, MAX_LIMIT)
     for visit in storage.origin_visit_get(
             origin_url, last_visit=last_visit, limit=limit):
         visit['origin'] = origin_url
         yield visit
 
 
 def lookup_origin_visits(origin, last_visit=None, per_page=10):
     """Yields the origin origins' visits.
 
     Args:
         origin: origin to list visits for
 
     Yields:
        Dictionaries of origin_visit for that origin
 
     """
     visits = _lookup_origin_visits(origin, last_visit=last_visit,
                                    limit=per_page)
     for visit in visits:
         yield converters.from_origin_visit(visit)
 
 
 def lookup_origin_visit_latest(origin_url, require_snapshot):
     """Return the origin's latest visit
 
     Args:
         origin_url (str): origin to list visits for
         require_snapshot (bool): filter out origins without a snapshot
 
     Returns:
        dict: The origin_visit concerned
 
     """
     visit = storage.origin_visit_get_latest(
         origin_url, require_snapshot=require_snapshot)
     if isinstance(visit['origin'], int):
         # soon-to-be-legacy origin ids
         visit['origin'] = storage.origin_get({'id': visit['origin']})['url']
     return converters.from_origin_visit(visit)
 
 
 def lookup_origin_visit(origin_url, visit_id):
     """Return information about visit visit_id with origin origin.
 
     Args:
         origin (str): origin concerned by the visit
         visit_id: the visit identifier to lookup
 
     Yields:
        The dict origin_visit concerned
 
     """
     visit = storage.origin_visit_get_by(origin_url, visit_id)
     if not visit:
         raise NotFoundExc('Origin %s or its visit '
                           'with id %s not found!' % (origin_url, visit_id))
     visit['origin'] = origin_url
     return converters.from_origin_visit(visit)
 
 
 def lookup_snapshot_size(snapshot_id):
     """Count the number of branches in the snapshot with the given id
 
     Args:
         snapshot_id (str): sha1 identifier of the snapshot
 
     Returns:
         dict: A dict whose keys are the target types of branches and
         values their corresponding amount
     """
     snapshot_id_bin = _to_sha1_bin(snapshot_id)
     snapshot_size = storage.snapshot_count_branches(snapshot_id_bin)
     if 'revision' not in snapshot_size:
         snapshot_size['revision'] = 0
     if 'release' not in snapshot_size:
         snapshot_size['release'] = 0
+    # adjust revision / release count for display if aliases are defined
+    if 'alias' in snapshot_size:
+        aliases = lookup_snapshot(snapshot_id,
+                                  branches_count=snapshot_size['alias'],
+                                  target_types=['alias'])
+        for alias in aliases['branches'].values():
+            if lookup_snapshot(snapshot_id,
+                               branches_from=alias['target'],
+                               branches_count=1,
+                               target_types=['revision']):
+                snapshot_size['revision'] += 1
+            else:
+                snapshot_size['release'] += 1
+        del snapshot_size['alias']
     return snapshot_size
 
 
 def lookup_snapshot(snapshot_id, branches_from='', branches_count=1000,
                     target_types=None):
     """Return information about a snapshot, aka the list of named
     branches found during a specific visit of an origin.
 
     Args:
         snapshot_id (str): sha1 identifier of the snapshot
         branches_from (str): optional parameter used to skip branches
             whose name is lesser than it before returning them
         branches_count (int): optional parameter used to restrain
             the amount of returned branches
         target_types (list): optional parameter used to filter the
             target types of branch to return (possible values that can be
             contained in that list are `'content', 'directory',
             'revision', 'release', 'snapshot', 'alias'`)
 
     Returns:
         A dict filled with the snapshot content.
     """
     snapshot_id_bin = _to_sha1_bin(snapshot_id)
     snapshot = storage.snapshot_get_branches(snapshot_id_bin,
                                              branches_from.encode(),
                                              branches_count, target_types)
     if not snapshot:
         raise NotFoundExc('Snapshot with id %s not found!' % snapshot_id)
     return converters.from_snapshot(snapshot)
 
 
 def lookup_latest_origin_snapshot(origin, allowed_statuses=None):
     """Return information about the latest snapshot of an origin.
 
     .. warning:: At most 1000 branches contained in the snapshot
         will be returned for performance reasons.
 
     Args:
         origin: URL or integer identifier of the origin
         allowed_statuses: list of visit statuses considered
             to find the latest snapshot for the visit. For instance,
             ``allowed_statuses=['full']`` will only consider visits that
             have successfully run to completion.
 
     Returns:
         A dict filled with the snapshot content.
     """
     snapshot = storage.snapshot_get_latest(origin, allowed_statuses)
     return converters.from_snapshot(snapshot)
 
 
 def lookup_revision_through(revision, limit=100):
     """Retrieve a revision from the criterion stored in revision dictionary.
 
     Args:
         revision: Dictionary of criterion to lookup the revision with.
         Here are the supported combination of possible values:
         - origin_url, branch_name, ts, sha1_git
         - origin_url, branch_name, ts
         - sha1_git_root, sha1_git
         - sha1_git
 
     Returns:
         None if the revision is not found or the actual revision.
 
     """
     if (
             'origin_url' in revision and
             'branch_name' in revision and
             'ts' in revision and
             'sha1_git' in revision):
         return lookup_revision_with_context_by(revision['origin_url'],
                                                revision['branch_name'],
                                                revision['ts'],
                                                revision['sha1_git'],
                                                limit)
     if (
             'origin_url' in revision and
             'branch_name' in revision and
             'ts' in revision):
         return lookup_revision_by(revision['origin_url'],
                                   revision['branch_name'],
                                   revision['ts'])
     if (
             'sha1_git_root' in revision and
             'sha1_git' in revision):
         return lookup_revision_with_context(revision['sha1_git_root'],
                                             revision['sha1_git'],
                                             limit)
     if 'sha1_git' in revision:
         return lookup_revision(revision['sha1_git'])
 
     # this should not happen
     raise NotImplementedError('Should not happen!')
 
 
 def lookup_directory_through_revision(revision, path=None,
                                       limit=100, with_data=False):
     """Retrieve the directory information from the revision.
 
     Args:
         revision: dictionary of criterion representing a revision to lookup
         path: directory's path to lookup.
         limit: optional query parameter to limit the revisions log (default to
             100). For now, note that this limit could impede the transitivity
             conclusion about sha1_git not being an ancestor of.
         with_data: indicate to retrieve the content's raw data if path resolves
             to a content.
 
     Returns:
         The directory pointing to by the revision criterions at path.
 
     """
     rev = lookup_revision_through(revision, limit)
 
     if not rev:
         raise NotFoundExc('Revision with criterion %s not found!' % revision)
     return (rev['id'],
             lookup_directory_with_revision(rev['id'], path, with_data))
 
 
 def vault_cook(obj_type, obj_id, email=None):
     """Cook a vault bundle.
     """
     return vault.cook(obj_type, obj_id, email=email)
 
 
 def vault_fetch(obj_type, obj_id):
     """Fetch a vault bundle.
     """
     return vault.fetch(obj_type, obj_id)
 
 
 def vault_progress(obj_type, obj_id):
     """Get the current progress of a vault bundle.
     """
     return vault.progress(obj_type, obj_id)
 
 
 def diff_revision(rev_id):
     """Get the list of file changes (insertion / deletion / modification /
     renaming) for a particular revision.
     """
     rev_sha1_git_bin = _to_sha1_bin(rev_id)
 
     changes = diff.diff_revision(storage, rev_sha1_git_bin,
                                  track_renaming=True)
 
     for change in changes:
         change['from'] = converters.from_directory_entry(change['from'])
         change['to'] = converters.from_directory_entry(change['to'])
         if change['from_path']:
             change['from_path'] = change['from_path'].decode('utf-8')
         if change['to_path']:
             change['to_path'] = change['to_path'].decode('utf-8')
 
     return changes
 
 
 class _RevisionsWalkerProxy(object):
     """
     Proxy class wrapping a revisions walker iterator from
     swh-storage and performing needed conversions.
     """
     def __init__(self, rev_walker_type, rev_start, *args, **kwargs):
         rev_start_bin = hashutil.hash_to_bytes(rev_start)
         self.revisions_walker = \
             revisions_walker.get_revisions_walker(rev_walker_type,
                                                   storage,
                                                   rev_start_bin,
                                                   *args, **kwargs)
 
     def export_state(self):
         return self.revisions_walker.export_state()
 
     def __next__(self):
         return converters.from_revision(next(self.revisions_walker))
 
     def __iter__(self):
         return self
 
 
 def get_revisions_walker(rev_walker_type, rev_start, *args, **kwargs):
     """
     Utility function to instantiate a revisions walker of a given type,
     see :mod:`swh.storage.algos.revisions_walker`.
 
     Args:
         rev_walker_type (str): the type of revisions walker to return,
             possible values are: ``committer_date``, ``dfs``, ``dfs_post``,
             ``bfs`` and ``path``
         rev_start (str): hexadecimal representation of a revision identifier
         args (list): position arguments to pass to the revisions walker
             constructor
         kwargs (dict): keyword arguments to pass to the revisions walker
             constructor
 
     """
     # first check if the provided revision is valid
     lookup_revision(rev_start)
     return _RevisionsWalkerProxy(rev_walker_type, rev_start, *args, **kwargs)
diff --git a/swh/web/tests/browse/test_utils.py b/swh/web/tests/browse/test_utils.py
index 2858eaf8..c18959d9 100644
--- a/swh/web/tests/browse/test_utils.py
+++ b/swh/web/tests/browse/test_utils.py
@@ -1,123 +1,130 @@
 # Copyright (C) 2017-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from hypothesis import given
 
 from swh.web.browse import utils
 from swh.web.common.utils import reverse, format_utc_iso_date
 from swh.web.tests.strategies import origin_with_multiple_visits
 from swh.web.tests.testcase import WebTestCase
 
 
 class SwhBrowseUtilsTestCase(WebTestCase):
 
     def test_get_mimetype_and_encoding_for_content(self):
         text = b'Hello world!'
         self.assertEqual(utils.get_mimetype_and_encoding_for_content(text),
                          ('text/plain', 'us-ascii'))
 
     @given(origin_with_multiple_visits())
     def test_get_origin_visit_snapshot_simple(self, origin):
 
         visits = self.origin_visit_get(origin['url'])
 
         for visit in visits:
 
             snapshot = self.snapshot_get(visit['snapshot'])
             branches = []
             releases = []
 
-            for branch in sorted(snapshot['branches'].keys()):
-                branch_data = snapshot['branches'][branch]
+            def _process_branch_data(branch, branch_data):
                 if branch_data['target_type'] == 'revision':
                     rev_data = self.revision_get(branch_data['target'])
                     branches.append({
                         'name': branch,
                         'revision': branch_data['target'],
                         'directory': rev_data['directory'],
                         'date': format_utc_iso_date(rev_data['date']),
                         'message': rev_data['message']
                     })
                 elif branch_data['target_type'] == 'release':
                     rel_data = self.release_get(branch_data['target'])
                     rev_data = self.revision_get(rel_data['target'])
                     releases.append({
                         'name': rel_data['name'],
                         'branch_name': branch,
                         'date': format_utc_iso_date(rel_data['date']),
                         'id': rel_data['id'],
                         'message': rel_data['message'],
                         'target_type': rel_data['target_type'],
                         'target': rel_data['target'],
                         'directory': rev_data['directory']
                     })
 
+            for branch in sorted(snapshot['branches'].keys()):
+                branch_data = snapshot['branches'][branch]
+                if branch_data['target_type'] == 'alias':
+                    target_data = snapshot['branches'][branch_data['target']]
+                    _process_branch_data(branch, target_data)
+                else:
+                    _process_branch_data(branch, branch_data)
+
             assert branches and releases, 'Incomplete test data.'
 
             origin_visit_branches = utils.get_origin_visit_snapshot(
                 origin, visit_id=visit['visit'])
 
             self.assertEqual(origin_visit_branches, (branches, releases))
 
     def test_gen_link(self):
         self.assertEqual(
             utils.gen_link('https://www.softwareheritage.org/', 'swh'),
             '<a href="https://www.softwareheritage.org/">swh</a>')
 
     def test_gen_revision_link(self):
         revision_id = '28a0bc4120d38a394499382ba21d6965a67a3703'
         revision_url = reverse('browse-revision',
                                url_args={'sha1_git': revision_id})
 
         self.assertEqual(utils.gen_revision_link(revision_id, link_text=None,
                                                  link_attrs=None),
                          '<a href="%s">%s</a>' % (revision_url, revision_id))
         self.assertEqual(
             utils.gen_revision_link(revision_id, shorten_id=True,
                                     link_attrs=None),
             '<a href="%s">%s</a>' % (revision_url, revision_id[:7]))
 
     def test_gen_person_mail_link(self):
         person_full = {
             'name': 'John Doe',
             'email': 'john.doe@swh.org',
             'fullname': 'John Doe <john.doe@swh.org>'
         }
 
         self.assertEqual(
             utils.gen_person_mail_link(person_full),
             '<a href="mailto:%s">%s</a>' % (person_full['email'],
                                             person_full['name'])
         )
 
         link_text = 'Mail'
         self.assertEqual(
             utils.gen_person_mail_link(person_full, link_text=link_text),
             '<a href="mailto:%s">%s</a>' % (person_full['email'],
                                             link_text)
         )
 
         person_partial_email = {
             'name': None,
             'email': None,
             'fullname': 'john.doe@swh.org'
         }
 
         self.assertEqual(
             utils.gen_person_mail_link(person_partial_email),
             '<a href="mailto:%s">%s</a>' % (person_partial_email['fullname'],
                                             person_partial_email['fullname'])
         )
 
         person_partial = {
             'name': None,
             'email': None,
             'fullname': 'John Doe <john.doe@swh.org>'
         }
 
         self.assertEqual(
             utils.gen_person_mail_link(person_partial),
             person_partial['fullname']
         )
diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py
index aa072946..67d1a3c9 100644
--- a/swh/web/tests/browse/views/test_origin.py
+++ b/swh/web/tests/browse/views/test_origin.py
@@ -1,914 +1,914 @@
 # Copyright (C) 2017-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import random
 
 from unittest.mock import patch
 
 from django.utils.html import escape
 
 from hypothesis import given
 
 from swh.model.hashutil import hash_to_bytes
 from swh.web.browse.utils import process_snapshot_branches
 from swh.web.common.exc import NotFoundExc
 from swh.web.common.utils import (
     reverse, gen_path_info, format_utc_iso_date,
     parse_timestamp, get_swh_persistent_id
 )
 from swh.web.tests.data import get_content
 from swh.web.tests.strategies import (
     origin, origin_with_multiple_visits, new_origin,
     new_snapshot, visit_dates, revisions
 )
 from swh.web.tests.testcase import WebTestCase
 
 
 class SwhBrowseOriginTest(WebTestCase):
 
     @given(origin_with_multiple_visits())
     def test_origin_visits_browse(self, origin):
 
         url = reverse('browse-origin-visits',
                       url_args={'origin_url': origin['url']})
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('origin-visits.html')
 
         url = reverse('browse-origin-visits',
                       url_args={'origin_url': origin['url']})
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('origin-visits.html')
 
         visits = self.origin_visit_get(origin['url'])
 
         for v in visits:
             vdate = format_utc_iso_date(v['date'], '%Y-%m-%dT%H:%M:%SZ')
             browse_dir_url = reverse('browse-origin-directory',
                                      url_args={'origin_url': origin['url'],
                                                'timestamp': vdate})
             self.assertContains(resp, browse_dir_url)
 
     def origin_content_view_helper(self, origin_info, origin_visits,
                                    origin_branches, origin_releases,
                                    root_dir_sha1, content,
                                    visit_id=None, timestamp=None):
 
         content_path = '/'.join(content['path'].split('/')[1:])
 
         url_args = {'origin_url': origin_info['url'],
                     'path': content_path}
 
         if not visit_id:
             visit_id = origin_visits[-1]['visit']
 
         query_params = {}
 
         if timestamp:
             url_args['timestamp'] = timestamp
 
         if visit_id:
             query_params['visit_id'] = visit_id
 
         url = reverse('browse-origin-content',
                       url_args=url_args,
                       query_params=query_params)
 
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('content.html')
 
         self.assertContains(resp, '<code class="%s">' %
                                   content['hljs_language'])
         self.assertContains(resp, escape(content['data']))
 
         split_path = content_path.split('/')
 
         filename = split_path[-1]
         path = content_path.replace(filename, '')[:-1]
 
         path_info = gen_path_info(path)
 
         del url_args['path']
 
         if timestamp:
             url_args['timestamp'] = \
                 format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
                                     '%Y-%m-%dT%H:%M:%S')
 
         root_dir_url = reverse('browse-origin-directory',
                                url_args=url_args,
                                query_params=query_params)
 
         self.assertContains(resp, '<li class="swh-path">',
                             count=len(path_info)+1)
 
         self.assertContains(resp, '<a href="%s">%s</a>' %
                             (root_dir_url, root_dir_sha1[:7]))
 
         for p in path_info:
             url_args['path'] = p['path']
             dir_url = reverse('browse-origin-directory',
                               url_args=url_args,
                               query_params=query_params)
             self.assertContains(resp, '<a href="%s">%s</a>' %
                                 (dir_url, p['name']))
 
         self.assertContains(resp, '<li>%s</li>' % filename)
 
         query_string = 'sha1_git:' + content['sha1_git']
 
         url_raw = reverse('browse-content-raw',
                           url_args={'query_string': query_string},
                           query_params={'filename': filename})
         self.assertContains(resp, url_raw)
 
         if 'args' in url_args:
             del url_args['path']
 
         origin_branches_url = reverse('browse-origin-branches',
                                       url_args=url_args,
                                       query_params=query_params)
 
         self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
                                   (origin_branches_url, len(origin_branches)))
 
         origin_releases_url = reverse('browse-origin-releases',
                                       url_args=url_args,
                                       query_params=query_params)
 
         self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
                                   (origin_releases_url, len(origin_releases)))
 
         self.assertContains(resp, '<li class="swh-branch">',
                             count=len(origin_branches))
 
         url_args['path'] = content_path
 
         for branch in origin_branches:
             query_params['branch'] = branch['name']
             root_dir_branch_url = reverse('browse-origin-content',
                                           url_args=url_args,
                                           query_params=query_params)
 
             self.assertContains(resp, '<a href="%s">' % root_dir_branch_url)
 
         self.assertContains(resp, '<li class="swh-release">',
                             count=len(origin_releases))
 
         query_params['branch'] = None
         for release in origin_releases:
             query_params['release'] = release['name']
             root_dir_release_url = reverse('browse-origin-content',
                                            url_args=url_args,
                                            query_params=query_params)
 
             self.assertContains(resp, '<a href="%s">' % root_dir_release_url)
 
         url = reverse('browse-origin-content',
                       url_args=url_args,
                       query_params=query_params)
 
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('content.html')
 
         swh_cnt_id = get_swh_persistent_id('content', content['sha1_git'])
         swh_cnt_id_url = reverse('browse-swh-id',
                                  url_args={'swh_id': swh_cnt_id})
         self.assertContains(resp, swh_cnt_id)
         self.assertContains(resp, swh_cnt_id_url)
 
         self.assertContains(resp, 'swh-take-new-snapshot')
 
     @given(origin_with_multiple_visits())
     def test_origin_content_view(self, origin):
 
         origin_visits = self.origin_visit_get(origin['url'])
 
         def _get_test_data(visit_idx):
             snapshot = self.snapshot_get(origin_visits[visit_idx]['snapshot'])
-            head_rev_id = snapshot['branches']['HEAD']['target']
+            head_rev_id = self.snapshot_get_head(snapshot)
             head_rev = self.revision_get(head_rev_id)
             dir_content = self.directory_ls(head_rev['directory'])
             dir_files = [e for e in dir_content if e['type'] == 'file']
             dir_file = random.choice(dir_files)
             branches, releases = process_snapshot_branches(snapshot)
             return {
                 'branches': branches,
                 'releases': releases,
                 'root_dir_sha1': head_rev['directory'],
                 'content': get_content(dir_file['checksums']['sha1']),
                 'visit': origin_visits[visit_idx]
             }
 
         test_data = _get_test_data(-1)
 
         self.origin_content_view_helper(origin,
                                         origin_visits,
                                         test_data['branches'],
                                         test_data['releases'],
                                         test_data['root_dir_sha1'],
                                         test_data['content'])
 
         self.origin_content_view_helper(origin,
                                         origin_visits,
                                         test_data['branches'],
                                         test_data['releases'],
                                         test_data['root_dir_sha1'],
                                         test_data['content'],
                                         timestamp=test_data['visit']['date'])
 
         visit_unix_ts = parse_timestamp(test_data['visit']['date']).timestamp()
         visit_unix_ts = int(visit_unix_ts)
 
         self.origin_content_view_helper(origin,
                                         origin_visits,
                                         test_data['branches'],
                                         test_data['releases'],
                                         test_data['root_dir_sha1'],
                                         test_data['content'],
                                         timestamp=visit_unix_ts)
 
         test_data = _get_test_data(0)
 
         self.origin_content_view_helper(origin,
                                         origin_visits,
                                         test_data['branches'],
                                         test_data['releases'],
                                         test_data['root_dir_sha1'],
                                         test_data['content'],
                                         visit_id=test_data['visit']['visit'])
 
     def origin_directory_view_helper(self, origin_info, origin_visits,
                                      origin_branches, origin_releases,
                                      root_directory_sha1, directory_entries,
                                      visit_id=None, timestamp=None, path=None):
 
         dirs = [e for e in directory_entries
                 if e['type'] in ('dir', 'rev')]
         files = [e for e in directory_entries
                  if e['type'] == 'file']
 
         if not visit_id:
             visit_id = origin_visits[-1]['visit']
 
         url_args = {'origin_url': origin_info['url']}
 
         query_params = {}
 
         if timestamp:
             url_args['timestamp'] = timestamp
         else:
             query_params['visit_id'] = visit_id
 
         if path:
             url_args['path'] = path
 
         url = reverse('browse-origin-directory',
                       url_args=url_args,
                       query_params=query_params)
 
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('directory.html')
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('directory.html')
 
         self.assertContains(resp, '<td class="swh-directory">',
                             count=len(dirs))
         self.assertContains(resp, '<td class="swh-content">',
                             count=len(files))
 
         if timestamp:
             url_args['timestamp'] = \
                 format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
                                     '%Y-%m-%dT%H:%M:%S')
 
         for d in dirs:
             if d['type'] == 'rev':
                 dir_url = reverse('browse-revision',
                                   url_args={'sha1_git': d['target']})
             else:
                 dir_path = d['name']
                 if path:
                     dir_path = "%s/%s" % (path, d['name'])
                 dir_url_args = dict(url_args)
                 dir_url_args['path'] = dir_path
                 dir_url = reverse('browse-origin-directory',
                                   url_args=dir_url_args,
                                   query_params=query_params)
             self.assertContains(resp, dir_url)
 
         for f in files:
             file_path = f['name']
             if path:
                 file_path = "%s/%s" % (path, f['name'])
             file_url_args = dict(url_args)
             file_url_args['path'] = file_path
             file_url = reverse('browse-origin-content',
                                url_args=file_url_args,
                                query_params=query_params)
             self.assertContains(resp, file_url)
 
         if 'path' in url_args:
             del url_args['path']
 
         root_dir_branch_url = \
             reverse('browse-origin-directory',
                     url_args=url_args,
                     query_params=query_params)
 
         nb_bc_paths = 1
         if path:
             nb_bc_paths = len(path.split('/')) + 1
 
         self.assertContains(resp, '<li class="swh-path">', count=nb_bc_paths)
         self.assertContains(resp, '<a href="%s">%s</a>' %
                                   (root_dir_branch_url,
                                    root_directory_sha1[:7]))
 
         origin_branches_url = reverse('browse-origin-branches',
                                       url_args=url_args,
                                       query_params=query_params)
 
         self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
                                   (origin_branches_url, len(origin_branches)))
 
         origin_releases_url = reverse('browse-origin-releases',
                                       url_args=url_args,
                                       query_params=query_params)
 
         nb_releases = len(origin_releases)
         if nb_releases > 0:
             self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
                                       (origin_releases_url, nb_releases))
 
         if path:
             url_args['path'] = path
 
         self.assertContains(resp, '<li class="swh-branch">',
                             count=len(origin_branches))
 
         for branch in origin_branches:
             query_params['branch'] = branch['name']
             root_dir_branch_url = \
                 reverse('browse-origin-directory',
                         url_args=url_args,
                         query_params=query_params)
 
             self.assertContains(resp, '<a href="%s">' % root_dir_branch_url)
 
         self.assertContains(resp, '<li class="swh-release">',
                             count=len(origin_releases))
 
         query_params['branch'] = None
         for release in origin_releases:
             query_params['release'] = release['name']
             root_dir_release_url = \
                 reverse('browse-origin-directory',
                         url_args=url_args,
                         query_params=query_params)
 
             self.assertContains(resp, '<a href="%s">' % root_dir_release_url)
 
         self.assertContains(resp, 'vault-cook-directory')
         self.assertContains(resp, 'vault-cook-revision')
 
         swh_dir_id = get_swh_persistent_id('directory', directory_entries[0]['dir_id']) # noqa
         swh_dir_id_url = reverse('browse-swh-id',
                                  url_args={'swh_id': swh_dir_id})
         self.assertContains(resp, swh_dir_id)
         self.assertContains(resp, swh_dir_id_url)
 
         self.assertContains(resp, 'swh-take-new-snapshot')
 
     @given(origin())
     def test_origin_root_directory_view(self, origin):
 
         origin_visits = self.origin_visit_get(origin['url'])
 
         visit = origin_visits[-1]
         snapshot = self.snapshot_get(visit['snapshot'])
-        head_rev_id = snapshot['branches']['HEAD']['target']
+        head_rev_id = self.snapshot_get_head(snapshot)
         head_rev = self.revision_get(head_rev_id)
         root_dir_sha1 = head_rev['directory']
         dir_content = self.directory_ls(root_dir_sha1)
         branches, releases = process_snapshot_branches(snapshot)
         visit_unix_ts = parse_timestamp(visit['date']).timestamp()
         visit_unix_ts = int(visit_unix_ts)
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           dir_content)
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           dir_content,
                                           visit_id=visit['visit'])
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           dir_content,
                                           timestamp=visit_unix_ts)
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           dir_content,
                                           timestamp=visit['date'])
 
         origin = dict(origin)
         del origin['type']
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           dir_content)
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           dir_content,
                                           visit_id=visit['visit'])
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           dir_content,
                                           timestamp=visit_unix_ts)
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           dir_content,
                                           timestamp=visit['date'])
 
     @given(origin())
     def test_origin_sub_directory_view(self, origin):
 
         origin_visits = self.origin_visit_get(origin['url'])
 
         visit = origin_visits[-1]
         snapshot = self.snapshot_get(visit['snapshot'])
-        head_rev_id = snapshot['branches']['HEAD']['target']
+        head_rev_id = self.snapshot_get_head(snapshot)
         head_rev = self.revision_get(head_rev_id)
         root_dir_sha1 = head_rev['directory']
         subdirs = [e for e in self.directory_ls(root_dir_sha1)
                    if e['type'] == 'dir']
         branches, releases = process_snapshot_branches(snapshot)
         visit_unix_ts = parse_timestamp(visit['date']).timestamp()
         visit_unix_ts = int(visit_unix_ts)
 
         if len(subdirs) == 0:
             return
 
         subdir = random.choice(subdirs)
         subdir_content = self.directory_ls(subdir['target'])
         subdir_path = subdir['name']
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           subdir_content,
                                           path=subdir_path)
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           subdir_content,
                                           path=subdir_path,
                                           visit_id=visit['visit'])
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           subdir_content,
                                           path=subdir_path,
                                           timestamp=visit_unix_ts)
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           subdir_content,
                                           path=subdir_path,
                                           timestamp=visit['date'])
 
         origin = dict(origin)
         del origin['type']
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           subdir_content,
                                           path=subdir_path)
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           subdir_content,
                                           path=subdir_path,
                                           visit_id=visit['visit'])
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           subdir_content,
                                           path=subdir_path,
                                           timestamp=visit_unix_ts)
 
         self.origin_directory_view_helper(origin, origin_visits,
                                           branches,
                                           releases,
                                           root_dir_sha1,
                                           subdir_content,
                                           path=subdir_path,
                                           timestamp=visit['date'])
 
     def origin_branches_helper(self, origin_info, origin_snapshot):
         url_args = {'origin_url': origin_info['url']}
 
         url = reverse('browse-origin-branches',
                       url_args=url_args)
 
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('branches.html')
 
         origin_branches = origin_snapshot[0]
         origin_releases = origin_snapshot[1]
 
         origin_branches_url = reverse('browse-origin-branches',
                                       url_args=url_args)
 
         self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
                                   (origin_branches_url, len(origin_branches)))
 
         origin_releases_url = reverse('browse-origin-releases',
                                       url_args=url_args)
 
         nb_releases = len(origin_releases)
         if nb_releases > 0:
             self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
                                       (origin_releases_url, nb_releases))
 
         self.assertContains(resp, '<tr class="swh-branch-entry',
                             count=len(origin_branches))
 
         for branch in origin_branches:
             browse_branch_url = reverse(
                 'browse-origin-directory',
                 url_args={'origin_url': origin_info['url']},
                 query_params={'branch': branch['name']})
             self.assertContains(resp, '<a href="%s">' %
                                       escape(browse_branch_url))
 
             browse_revision_url = reverse(
                 'browse-revision',
                 url_args={'sha1_git': branch['revision']},
                 query_params={'origin': origin_info['url']})
             self.assertContains(resp, '<a href="%s">' %
                                       escape(browse_revision_url))
 
     @given(origin())
     def test_origin_branches(self, origin):
 
         origin_visits = self.origin_visit_get(origin['url'])
 
         visit = origin_visits[-1]
         snapshot = self.snapshot_get(visit['snapshot'])
         snapshot_content = process_snapshot_branches(snapshot)
 
         self.origin_branches_helper(origin, snapshot_content)
 
         origin = dict(origin)
         origin['type'] = None
 
         self.origin_branches_helper(origin, snapshot_content)
 
     def origin_releases_helper(self, origin_info, origin_snapshot):
         url_args = {'origin_url': origin_info['url']}
 
         url = reverse('browse-origin-releases',
                       url_args=url_args)
 
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('releases.html')
 
         origin_branches = origin_snapshot[0]
         origin_releases = origin_snapshot[1]
 
         origin_branches_url = reverse('browse-origin-branches',
                                       url_args=url_args)
 
         self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
                                   (origin_branches_url, len(origin_branches)))
 
         origin_releases_url = reverse('browse-origin-releases',
                                       url_args=url_args)
 
         nb_releases = len(origin_releases)
         if nb_releases > 0:
             self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
                                       (origin_releases_url, nb_releases))
 
         self.assertContains(resp, '<tr class="swh-release-entry',
                             count=nb_releases)
 
         for release in origin_releases:
             browse_release_url = reverse(
                 'browse-release',
                 url_args={'sha1_git': release['id']},
                 query_params={'origin': origin_info['url']})
             browse_revision_url = reverse(
                 'browse-revision',
                 url_args={'sha1_git': release['target']},
                 query_params={'origin': origin_info['url']})
 
             self.assertContains(resp, '<a href="%s">' %
                                       escape(browse_release_url))
             self.assertContains(resp, '<a href="%s">' %
                                       escape(browse_revision_url))
 
     @given(origin())
     def test_origin_releases(self, origin):
 
         origin_visits = self.origin_visit_get(origin['url'])
 
         visit = origin_visits[-1]
         snapshot = self.snapshot_get(visit['snapshot'])
         snapshot_content = process_snapshot_branches(snapshot)
 
         self.origin_releases_helper(origin, snapshot_content)
 
         origin = dict(origin)
         origin['type'] = None
 
         self.origin_releases_helper(origin, snapshot_content)
 
     @given(new_origin(), new_snapshot(min_size=4, max_size=4), visit_dates(),
            revisions(min_size=3, max_size=3))
     def test_origin_snapshot_null_branch(self, new_origin, new_snapshot,
                                          visit_dates, revisions):
         snp_dict = new_snapshot.to_dict()
         new_origin = self.storage.origin_add([new_origin])[0]
         for i, branch in enumerate(snp_dict['branches'].keys()):
             if i == 0:
                 snp_dict['branches'][branch] = None
             else:
                 snp_dict['branches'][branch] = {
                     'target_type': 'revision',
                     'target': hash_to_bytes(revisions[i-1]),
                 }
 
         self.storage.snapshot_add([snp_dict])
         visit = self.storage.origin_visit_add(
             new_origin['url'], visit_dates[0], type='git')
         self.storage.origin_visit_update(new_origin['url'], visit['visit'],
                                          status='partial',
                                          snapshot=snp_dict['id'])
 
         url = reverse('browse-origin-directory',
                       url_args={'origin_url': new_origin['url']})
         rv = self.client.get(url)
         self.assertEqual(rv.status_code, 200)
 
     @given(new_origin(), new_snapshot(min_size=4, max_size=4), visit_dates(),
            revisions(min_size=3, max_size=3))
     def test_origin_snapshot_invalid_branch(self, new_origin, new_snapshot,
                                             visit_dates, revisions):
         snp_dict = new_snapshot.to_dict()
         new_origin = self.storage.origin_add([new_origin])[0]
         for i, branch in enumerate(snp_dict['branches'].keys()):
             if i == 0:
                 invalid_branch = branch
             else:
                 snp_dict['branches'][branch] = {
                     'target_type': 'revision',
                     'target': hash_to_bytes(revisions[i-1]),
                 }
         del snp_dict['branches'][invalid_branch]
 
         self.storage.snapshot_add([snp_dict])
         visit = self.storage.origin_visit_add(
             new_origin['url'], visit_dates[0], type='git')
         self.storage.origin_visit_update(new_origin['url'], visit['visit'],
                                          status='full',
                                          snapshot=snp_dict['id'])
 
         url = reverse('browse-origin-directory',
                       url_args={'origin_url': new_origin['url']},
                       query_params={'branch': invalid_branch})
         rv = self.client.get(url)
         self.assertEqual(rv.status_code, 404)
 
     @patch('swh.web.browse.views.utils.snapshot_context.request_content')
     @patch('swh.web.common.origin_visits.get_origin_visits')
     @patch('swh.web.browse.utils.get_origin_visit_snapshot')
     @patch('swh.web.browse.utils.service')
     @patch('swh.web.browse.views.origin.service')
     @patch('swh.web.browse.views.utils.snapshot_context.service')
     def test_origin_request_errors(self,
                                    mock_snapshot_service,
                                    mock_origin_service,
                                    mock_utils_service,
                                    mock_get_origin_visit_snapshot,
                                    mock_get_origin_visits,
                                    mock_request_content):
 
         mock_origin_service.lookup_origin.side_effect = NotFoundExc(
             'origin not found')
         url = reverse('browse-origin-visits',
                       url_args={'origin_url': 'bar'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, 'origin not found', status_code=404)
 
         mock_origin_service.lookup_origin.side_effect = None
         mock_origin_service.lookup_origin.return_value = {'type': 'foo',
                                                           'url': 'bar',
                                                           'id': 457}
         mock_get_origin_visits.return_value = []
         url = reverse('browse-origin-directory',
                       url_args={'origin_url': 'bar'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, "No visit", status_code=404)
 
         mock_get_origin_visits.return_value = [{'visit': 1}]
         mock_get_origin_visit_snapshot.side_effect = \
             NotFoundExc('visit not found')
         url = reverse('browse-origin-directory',
                       url_args={'origin_url': 'bar'},
                       query_params={'visit_id': 2})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
 
         mock_get_origin_visits.return_value = [{
             'date': '2015-09-26T09:30:52.373449+00:00',
             'metadata': {},
             'origin': 457,
             'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
             'status': 'full',
             'visit': 1
         }]
         mock_get_origin_visit_snapshot.side_effect = None
         mock_get_origin_visit_snapshot.return_value = (
             [{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
               'name': 'HEAD',
               'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
               'date': '04 May 2017, 13:27 UTC',
               'message': ''}],
             []
         )
         mock_utils_service.lookup_snapshot_size.return_value = {
             'revision': 1,
             'release': 0
         }
         mock_utils_service.lookup_directory.side_effect = \
             NotFoundExc('Directory not found')
         url = reverse('browse-origin-directory',
                       url_args={'origin_url': 'bar'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, 'Directory not found', status_code=404)
 
         with patch('swh.web.browse.views.utils.snapshot_context.'
                    'get_snapshot_context') as mock_get_snapshot_context:
             mock_get_snapshot_context.side_effect = \
                 NotFoundExc('Snapshot not found')
             url = reverse('browse-origin-directory',
                           url_args={'origin_url': 'bar'})
             resp = self.client.get(url)
             self.assertEqual(resp.status_code, 404)
             self.assertTemplateUsed('error.html')
             self.assertContains(resp, 'Snapshot not found', status_code=404)
 
         mock_origin_service.lookup_origin.side_effect = None
         mock_origin_service.lookup_origin.return_value = {'type': 'foo',
                                                           'url': 'bar',
                                                           'id': 457}
         mock_get_origin_visits.return_value = []
         url = reverse('browse-origin-content',
                       url_args={'origin_url': 'bar',
                                 'path': 'foo'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, "No visit", status_code=404)
 
         mock_get_origin_visits.return_value = [{'visit': 1}]
         mock_get_origin_visit_snapshot.side_effect = \
             NotFoundExc('visit not found')
         url = reverse('browse-origin-content',
                       url_args={'origin_url': 'bar',
                                 'path': 'foo'},
                       query_params={'visit_id': 2})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
 
         mock_get_origin_visits.return_value = [{
             'date': '2015-09-26T09:30:52.373449+00:00',
             'metadata': {},
             'origin': 457,
             'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
             'status': 'full',
             'visit': 1
         }]
         mock_get_origin_visit_snapshot.side_effect = None
         mock_get_origin_visit_snapshot.return_value = ([], [])
         url = reverse('browse-origin-content',
                       url_args={'origin_url': 'bar',
                                 'path': 'baz'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertRegex(resp.content.decode('utf-8'),
                          'Origin.*has an empty list of branches')
 
         mock_get_origin_visit_snapshot.return_value = (
             [{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
               'name': 'HEAD',
               'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
               'date': '04 May 2017, 13:27 UTC',
               'message': ''}],
             []
         )
         mock_snapshot_service.lookup_directory_with_path.return_value = \
             {'target': '5ecd9f37b7a2d2e9980d201acd6286116f2ba1f1'}
         mock_request_content.side_effect = \
             NotFoundExc('Content not found')
         url = reverse('browse-origin-content',
                       url_args={'origin_url': 'bar',
                                 'path': 'baz'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, 'Content not found', status_code=404)
 
     @patch('swh.web.common.origin_visits.get_origin_visits')
     @patch('swh.web.browse.utils.get_origin_visit_snapshot')
     @patch('swh.web.browse.utils.service')
     def test_origin_empty_snapshot(self, mock_utils_service,
                                    mock_get_origin_visit_snapshot,
                                    mock_get_origin_visits):
 
         mock_get_origin_visits.return_value = [{
             'date': '2015-09-26T09:30:52.373449+00:00',
             'metadata': {},
             'origin': 457,
             'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
             'status': 'full',
             'type': 'git',
             'visit': 1
         }]
         mock_get_origin_visit_snapshot.return_value = ([], [])
         mock_utils_service.lookup_snapshot_size.return_value = {
             'revision': 0,
             'release': 0
         }
         mock_utils_service.lookup_origin.return_value = {
             'id': 457,
             'url': 'https://github.com/foo/bar'
         }
         url = reverse('browse-origin-directory',
                       url_args={'origin_url': 'bar'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('content.html')
         self.assertRegex(resp.content.decode('utf-8'), 'snapshot.*is empty')
diff --git a/swh/web/tests/browse/views/test_revision.py b/swh/web/tests/browse/views/test_revision.py
index 7856e8e4..e28e25cd 100644
--- a/swh/web/tests/browse/views/test_revision.py
+++ b/swh/web/tests/browse/views/test_revision.py
@@ -1,246 +1,246 @@
 # Copyright (C) 2017-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from django.utils.html import escape
 from hypothesis import given
 
 from swh.web.common.utils import (
     reverse, format_utc_iso_date, get_swh_persistent_id,
     parse_timestamp
 )
 from swh.web.tests.strategies import (
     origin, revision, unknown_revision, new_origin
 )
 from swh.web.tests.testcase import WebTestCase
 
 
 class SwhBrowseRevisionTest(WebTestCase):
 
     @given(revision())
     def test_revision_browse(self, revision):
 
         url = reverse('browse-revision',
                       url_args={'sha1_git': revision})
 
         revision_data = self.revision_get(revision)
 
         author_name = revision_data['author']['name']
         committer_name = revision_data['committer']['name']
         dir_id = revision_data['directory']
 
         directory_url = reverse('browse-directory',
                                 url_args={'sha1_git': dir_id})
 
         history_url = reverse('browse-revision-log',
                               url_args={'sha1_git': revision})
 
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/revision.html')
         self.assertContains(resp, author_name)
         self.assertContains(resp, committer_name)
         self.assertContains(resp, directory_url)
         self.assertContains(resp, history_url)
 
         for parent in revision_data['parents']:
             parent_url = reverse('browse-revision',
                                  url_args={'sha1_git': parent})
             self.assertContains(resp, '<a href="%s">%s</a>' %
                                 (parent_url, parent))
 
         author_date = revision_data['date']
         committer_date = revision_data['committer_date']
 
         message_lines = revision_data['message'].split('\n')
 
         self.assertContains(resp, format_utc_iso_date(author_date))
         self.assertContains(resp, format_utc_iso_date(committer_date))
         self.assertContains(resp, escape(message_lines[0]))
         self.assertContains(resp, escape('\n'.join(message_lines[1:])))
 
     @given(origin())
     def test_revision_origin_browse(self, origin):
 
         snapshot = self.snapshot_get_latest(origin['url'])
-        revision = snapshot['branches']['HEAD']['target']
+        revision = self.snapshot_get_head(snapshot)
         revision_data = self.revision_get(revision)
         dir_id = revision_data['directory']
 
         origin_revision_log_url = reverse('browse-origin-log',
                                           url_args={'origin_url': origin['url']}, # noqa
                                           query_params={'revision': revision})
 
         url = reverse('browse-revision',
                       url_args={'sha1_git': revision},
                       query_params={'origin': origin['url']})
 
         resp = self.client.get(url)
 
         self.assertContains(resp, origin_revision_log_url)
 
         for parent in revision_data['parents']:
             parent_url = reverse('browse-revision',
                                  url_args={'sha1_git': parent},
                                  query_params={'origin': origin['url']})
             self.assertContains(resp, '<a href="%s">%s</a>' %
                                 (parent_url, parent))
 
         self.assertContains(resp, 'vault-cook-directory')
         self.assertContains(resp, 'vault-cook-revision')
 
         swh_rev_id = get_swh_persistent_id('revision', revision)
         swh_rev_id_url = reverse('browse-swh-id',
                                  url_args={'swh_id': swh_rev_id})
         self.assertContains(resp, swh_rev_id)
         self.assertContains(resp, swh_rev_id_url)
 
         swh_dir_id = get_swh_persistent_id('directory', dir_id)
         swh_dir_id_url = reverse('browse-swh-id',
                                  url_args={'swh_id': swh_dir_id})
         self.assertContains(resp, swh_dir_id)
         self.assertContains(resp, swh_dir_id_url)
 
         self.assertContains(resp, 'swh-take-new-snapshot')
 
     @given(revision())
     def test_revision_log_browse(self, revision):
         per_page = 10
 
         revision_log = self.revision_log(revision)
 
         revision_log_sorted = \
             sorted(revision_log,
                    key=lambda rev: -parse_timestamp(
                        rev['committer_date']).timestamp())
 
         url = reverse('browse-revision-log',
                       url_args={'sha1_git': revision},
                       query_params={'per_page': per_page})
 
         resp = self.client.get(url)
 
         next_page_url = reverse('browse-revision-log',
                                 url_args={'sha1_git': revision},
                                 query_params={'offset': per_page,
                                               'per_page': per_page})
 
         nb_log_entries = per_page
         if len(revision_log_sorted) < per_page:
             nb_log_entries = len(revision_log_sorted)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/revision-log.html')
         self.assertContains(resp, '<tr class="swh-revision-log-entry',
                             count=nb_log_entries)
         self.assertContains(resp, '<a class="page-link">Newer</a>')
 
         if len(revision_log_sorted) > per_page:
             self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
                                 escape(next_page_url))
 
         for log in revision_log_sorted[:per_page]:
             revision_url = reverse('browse-revision',
                                    url_args={'sha1_git': log['id']})
             self.assertContains(resp, log['id'][:7])
             self.assertContains(resp, log['author']['name'])
             self.assertContains(resp, format_utc_iso_date(log['date']))
             self.assertContains(resp, escape(log['message']))
             self.assertContains(resp, format_utc_iso_date(log['committer_date'])) # noqa
             self.assertContains(resp, revision_url)
 
         if len(revision_log_sorted) <= per_page:
             return
 
         resp = self.client.get(next_page_url)
 
         prev_page_url = reverse('browse-revision-log',
                                 url_args={'sha1_git': revision},
                                 query_params={'per_page': per_page})
         next_page_url = reverse('browse-revision-log',
                                 url_args={'sha1_git': revision},
                                 query_params={'offset': 2 * per_page,
                                               'per_page': per_page})
 
         nb_log_entries = len(revision_log_sorted) - per_page
         if nb_log_entries > per_page:
             nb_log_entries = per_page
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/revision-log.html')
         self.assertContains(resp, '<tr class="swh-revision-log-entry',
                             count=nb_log_entries)
 
         self.assertContains(resp, '<a class="page-link" href="%s">Newer</a>' %
                             escape(prev_page_url))
 
         if len(revision_log_sorted) > 2 * per_page:
             self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
                                 escape(next_page_url))
 
         if len(revision_log_sorted) <= 2 * per_page:
             return
 
         resp = self.client.get(next_page_url)
 
         prev_page_url = reverse('browse-revision-log',
                                 url_args={'sha1_git': revision},
                                 query_params={'offset': per_page,
                                               'per_page': per_page})
         next_page_url = reverse('browse-revision-log',
                                 url_args={'sha1_git': revision},
                                 query_params={'offset': 3 * per_page,
                                               'per_page': per_page})
 
         nb_log_entries = len(revision_log_sorted) - 2 * per_page
         if nb_log_entries > per_page:
             nb_log_entries = per_page
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/revision-log.html')
         self.assertContains(resp, '<tr class="swh-revision-log-entry',
                             count=nb_log_entries)
         self.assertContains(resp, '<a class="page-link" href="%s">Newer</a>' %
                             escape(prev_page_url))
 
         if len(revision_log_sorted) > 3 * per_page:
             self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
                                 escape(next_page_url))
 
     @given(revision(), unknown_revision(), new_origin())
     def test_revision_request_errors(self, revision, unknown_revision,
                                      new_origin):
 
         url = reverse('browse-revision',
                       url_args={'sha1_git': unknown_revision})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp,
                             'Revision with sha1_git %s not found' %
                             unknown_revision, status_code=404)
 
         url = reverse('browse-revision',
                       url_args={'sha1_git': revision},
                       query_params={'origin': new_origin['url']})
 
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, 'the origin mentioned in your request'
                                   ' appears broken', status_code=404)
 
     @given(revision())
     def test_revision_uppercase(self, revision):
         url = reverse('browse-revision-uppercase-checksum',
                       url_args={'sha1_git': revision.upper()})
 
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 302)
 
         redirect_url = reverse('browse-revision',
                                url_args={'sha1_git': revision})
 
         self.assertEqual(resp['location'], redirect_url)
diff --git a/swh/web/tests/strategies.py b/swh/web/tests/strategies.py
index be4c829b..e1c0fb92 100644
--- a/swh/web/tests/strategies.py
+++ b/swh/web/tests/strategies.py
@@ -1,512 +1,516 @@
 # Copyright (C) 2018-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import random
 
 from collections import defaultdict
 from datetime import datetime
 
 from hypothesis import settings, assume
 from hypothesis.strategies import (
     just, sampled_from, lists, composite, datetimes,
     binary, text, characters
 )
 
 from swh.model.hashutil import hash_to_hex, hash_to_bytes
 from swh.model.identifiers import directory_identifier
 from swh.storage.algos.revisions_walker import get_revisions_walker
 from swh.model.hypothesis_strategies import (
     origins as new_origin_strategy, snapshots as new_snapshot
 )
 from swh.web.tests.data import get_tests_data
 
 # Module dedicated to the generation of input data for tests through
 # the use of hypothesis.
 # Some of these data are sampled from a test archive created and populated
 # in the swh.web.tests.data module.
 
 # Set the swh-web hypothesis profile if none has been explicitly set
 hypothesis_default_settings = settings.get_profile('default')
 if repr(settings()) == repr(hypothesis_default_settings):
     settings.load_profile('swh-web')
 
 
 # The following strategies exploit the hypothesis capabilities
 
 
 def _filter_checksum(cs):
     generated_checksums = get_tests_data()['generated_checksums']
     if not int.from_bytes(cs, byteorder='little') or \
             cs in generated_checksums:
         return False
     generated_checksums.add(cs)
     return True
 
 
 def _known_swh_object(object_type):
     return sampled_from(get_tests_data()[object_type])
 
 
 def sha1():
     """
     Hypothesis strategy returning a valid hexadecimal sha1 value.
     """
     return binary(
         min_size=20, max_size=20).filter(_filter_checksum).map(hash_to_hex)
 
 
 def invalid_sha1():
     """
     Hypothesis strategy returning an invalid sha1 representation.
     """
     return binary(
         min_size=50, max_size=50).filter(_filter_checksum).map(hash_to_hex)
 
 
 def sha256():
     """
     Hypothesis strategy returning a valid hexadecimal sha256 value.
     """
     return binary(
         min_size=32, max_size=32).filter(_filter_checksum).map(hash_to_hex)
 
 
 def content():
     """
     Hypothesis strategy returning a random content ingested
     into the test archive.
     """
     return _known_swh_object('contents')
 
 
 def contents():
     """
     Hypothesis strategy returning random contents ingested
     into the test archive.
     """
     return lists(content(), min_size=2, max_size=8)
 
 
 def content_text():
     """
     Hypothesis strategy returning random textual contents ingested
     into the test archive.
     """
     return content().filter(lambda c: c['mimetype'].startswith('text/'))
 
 
 def content_text_non_utf8():
     """
     Hypothesis strategy returning random textual contents not encoded
     to UTF-8 ingested into the test archive.
     """
     return content().filter(lambda c: c['mimetype'].startswith('text/') and
                             c['encoding'] not in ('utf-8', 'us-ascii'))
 
 
 def content_text_no_highlight():
     """
     Hypothesis strategy returning random textual contents with no detected
     programming language to highlight ingested into the test archive.
     """
     return content().filter(lambda c: c['mimetype'].startswith('text/') and
                             c['hljs_language'] == 'nohighlight')
 
 
 def content_image_type():
     """
     Hypothesis strategy returning random image contents ingested
     into the test archive.
     """
     return content().filter(lambda c: c['mimetype'].startswith('image/'))
 
 
 @composite
 def new_content(draw):
     blake2s256_hex = draw(sha256())
     sha1_hex = draw(sha1())
     sha1_git_hex = draw(sha1())
     sha256_hex = draw(sha256())
 
     assume(sha1_hex != sha1_git_hex)
     assume(blake2s256_hex != sha256_hex)
 
     return {
         'blake2S256': blake2s256_hex,
         'sha1': sha1_hex,
         'sha1_git': sha1_git_hex,
         'sha256': sha256_hex
     }
 
 
 def unknown_content():
     """
     Hypothesis strategy returning a random content not ingested
     into the test archive.
     """
     return new_content().filter(
         lambda c: next(get_tests_data()['storage'].content_get(
              [hash_to_bytes(c['sha1'])])) is None)
 
 
 def unknown_contents():
     """
     Hypothesis strategy returning random contents not ingested
     into the test archive.
     """
     return lists(unknown_content(), min_size=2, max_size=8)
 
 
 def directory():
     """
     Hypothesis strategy returning a random directory ingested
     into the test archive.
     """
     return _known_swh_object('directories')
 
 
 def directory_with_subdirs():
     """
     Hypothesis strategy returning a random directory containing
     sub directories ingested into the test archive.
     """
     storage = get_tests_data()['storage']
     return directory().filter(
         lambda d: any([e['type'] == 'dir'
                       for e in list(storage.directory_ls(hash_to_bytes(d)))]))
 
 
 def empty_directory():
     """
     Hypothesis strategy returning the empty directory ingested
     into the test archive.
     """
     return just(directory_identifier({'entries': []}))
 
 
 def unknown_directory():
     """
     Hypothesis strategy returning a random directory not ingested
     into the test archive.
     """
     storage = get_tests_data()['storage']
     return sha1().filter(
         lambda s: len(list(storage.directory_missing([hash_to_bytes(s)]))) > 0)
 
 
 def origin():
     """
     Hypothesis strategy returning a random origin ingested
     into the test archive.
     """
     return _known_swh_object('origins')
 
 
 def origin_with_multiple_visits():
     """
     Hypothesis strategy returning a random origin ingested
     into the test archive.
     """
     ret = []
     tests_data = get_tests_data()
     for origin in tests_data['origins']:
         visits = list(tests_data['storage'].origin_visit_get(origin['url']))
         if len(visits) > 1:
             ret.append(origin)
     return sampled_from(ret)
 
 
 def origin_with_release():
     """
     Hypothesis strategy returning a random origin ingested
     into the test archive.
     """
     ret = []
     tests_data = get_tests_data()
     for origin in tests_data['origins']:
         snapshot = tests_data['storage'].snapshot_get_latest(origin['url'])
         if any([b['target_type'] == 'release'
                 for b in snapshot['branches'].values()]):
             ret.append(origin)
     return sampled_from(ret)
 
 
 def new_origin():
     """
     Hypothesis strategy returning a random origin not ingested
     into the test archive.
     """
     storage = get_tests_data()['storage']
     return new_origin_strategy().map(lambda origin: origin.to_dict()).filter(
         lambda origin: storage.origin_get([origin])[0] is None)
 
 
 def new_origins(nb_origins=None):
     """
     Hypothesis strategy returning random origins not ingested
     into the test archive.
     """
     min_size = nb_origins if nb_origins is not None else 2
     max_size = nb_origins if nb_origins is not None else 8
     size = random.randint(min_size, max_size)
     return lists(new_origin(), min_size=size, max_size=size,
                  unique_by=lambda o: tuple(sorted(o.items())))
 
 
 def visit_dates(nb_dates=None):
     """
     Hypothesis strategy returning a list of visit dates.
     """
     min_size = nb_dates if nb_dates else 2
     max_size = nb_dates if nb_dates else 8
     return lists(datetimes(min_value=datetime(2015, 1, 1, 0, 0),
                            max_value=datetime(2018, 12, 31, 0, 0)),
                  min_size=min_size, max_size=max_size, unique=True).map(sorted)
 
 
 def release():
     """
     Hypothesis strategy returning a random release ingested
     into the test archive.
     """
     return _known_swh_object('releases')
 
 
 def unknown_release():
     """
     Hypothesis strategy returning a random revision not ingested
     into the test archive.
     """
     return sha1().filter(
         lambda s: next(get_tests_data()['storage'].release_get([s])) is None)
 
 
 def revision():
     """
     Hypothesis strategy returning a random revision ingested
     into the test archive.
     """
     return _known_swh_object('revisions')
 
 
 def unknown_revision():
     """
     Hypothesis strategy returning a random revision not ingested
     into the test archive.
     """
     storage = get_tests_data()['storage']
     return sha1().filter(
         lambda s: next(storage.revision_get([hash_to_bytes(s)])) is None)
 
 
 @composite
 def new_person(draw):
     """
     Hypothesis strategy returning random raw swh person data.
     """
     name = draw(text(min_size=5, max_size=30,
                      alphabet=characters(min_codepoint=0, max_codepoint=255)))
     email = '%s@company.org' % name
     return {
         'name': name.encode(),
         'email': email.encode(),
         'fullname': ('%s <%s>' % (name, email)).encode()
     }
 
 
 @composite
 def new_swh_date(draw):
     """
     Hypothesis strategy returning random raw swh date data.
     """
     timestamp = draw(
         datetimes(min_value=datetime(2015, 1, 1, 0, 0),
                   max_value=datetime(2018, 12, 31, 0, 0)).map(
                       lambda d: int(d.timestamp())))
     return {
         'timestamp': timestamp,
         'offset': 0,
         'negative_utc': False,
     }
 
 
 @composite
 def new_revision(draw):
     """
     Hypothesis strategy returning random raw swh revision data
     not ingested into the test archive.
     """
     return {
         'id': draw(unknown_revision().map(hash_to_bytes)),
         'directory': draw(sha1().map(hash_to_bytes)),
         'author': draw(new_person()),
         'committer': draw(new_person()),
         'message': draw(
             text(min_size=20, max_size=100).map(lambda t: t.encode())),
         'date': draw(new_swh_date()),
         'committer_date': draw(new_swh_date()),
         'synthetic': False,
         'type': 'git',
         'parents': [],
         'metadata': [],
     }
 
 
 def revisions(min_size=2, max_size=8):
     """
     Hypothesis strategy returning random revisions ingested
     into the test archive.
     """
     return lists(revision(), min_size=min_size, max_size=max_size)
 
 
 def unknown_revisions(min_size=2, max_size=8):
     """
     Hypothesis strategy returning random revisions not ingested
     into the test archive.
     """
     return lists(unknown_revision(), min_size=min_size, max_size=max_size)
 
 
 def snapshot():
     """
     Hypothesis strategy returning a random snapshot ingested
     into the test archive.
     """
     return _known_swh_object('snapshots')
 
 
 def new_snapshots(nb_snapshots=None):
     min_size = nb_snapshots if nb_snapshots else 2
     max_size = nb_snapshots if nb_snapshots else 8
     return lists(new_snapshot(min_size=2, max_size=10, only_objects=True)
                  .map(lambda snp: snp.to_dict()),
                  min_size=min_size, max_size=max_size)
 
 
 def unknown_snapshot():
     """
     Hypothesis strategy returning a random revision not ingested
     into the test archive.
     """
     storage = get_tests_data()['storage']
     return sha1().filter(
         lambda s: storage.snapshot_get(hash_to_bytes(s)) is None)
 
 
 def _get_origin_dfs_revisions_walker():
     tests_data = get_tests_data()
     storage = tests_data['storage']
     origin = random.choice(tests_data['origins'][:-1])
     snapshot = storage.snapshot_get_latest(origin['url'])
-    head = snapshot['branches'][b'HEAD']['target']
+    if snapshot['branches'][b'HEAD']['target_type'] == 'alias':
+        target = snapshot['branches'][b'HEAD']['target']
+        head = snapshot['branches'][target]['target']
+    else:
+        head = snapshot['branches'][b'HEAD']['target']
     return get_revisions_walker('dfs', storage, head)
 
 
 def ancestor_revisions():
     """
     Hypothesis strategy returning a pair of revisions ingested into the
     test archive with an ancestor relation.
     """
     # get a dfs revisions walker for one of the origins
     # loaded into the test archive
     revisions_walker = _get_origin_dfs_revisions_walker()
     master_revisions = []
     children = defaultdict(list)
     init_rev_found = False
     # get revisions only authored in the master branch
     for rev in revisions_walker:
         for rev_p in rev['parents']:
             children[rev_p].append(rev['id'])
         if not init_rev_found:
             master_revisions.append(rev)
         if not rev['parents']:
             init_rev_found = True
 
     # head revision
     root_rev = master_revisions[0]
     # pick a random revision, different from head, only authored
     # in the master branch
     ancestor_rev_idx = random.choice(list(range(1, len(master_revisions)-1)))
     ancestor_rev = master_revisions[ancestor_rev_idx]
     ancestor_child_revs = children[ancestor_rev['id']]
 
     return just({
         'sha1_git_root': hash_to_hex(root_rev['id']),
         'sha1_git': hash_to_hex(ancestor_rev['id']),
         'children': [hash_to_hex(r) for r in ancestor_child_revs]
     })
 
 
 def non_ancestor_revisions():
     """
     Hypothesis strategy returning a pair of revisions ingested into the
     test archive with no ancestor relation.
     """
     # get a dfs revisions walker for one of the origins
     # loaded into the test archive
     revisions_walker = _get_origin_dfs_revisions_walker()
     merge_revs = []
     children = defaultdict(list)
     # get all merge revisions
     for rev in revisions_walker:
         if len(rev['parents']) > 1:
             merge_revs.append(rev)
         for rev_p in rev['parents']:
             children[rev_p].append(rev['id'])
     # find a merge revisions whose parents have a unique child revision
     random.shuffle(merge_revs)
     selected_revs = None
     for merge_rev in merge_revs:
         if all(len(children[rev_p]) == 1
                for rev_p in merge_rev['parents']):
             selected_revs = merge_rev['parents']
 
     return just({
         'sha1_git_root': hash_to_hex(selected_revs[0]),
         'sha1_git': hash_to_hex(selected_revs[1])
     })
 
 # The following strategies returns data specific to some tests
 # that can not be generated and thus are hardcoded.
 
 
 def contents_with_ctags():
     """
     Hypothesis strategy returning contents ingested into the test
     archive. Those contents are ctags compatible, that is running
     ctags on those lay results.
     """
     return just({
         'sha1s': ['0ab37c02043ebff946c1937523f60aadd0844351',
                   '15554cf7608dde6bfefac7e3d525596343a85b6f',
                   '2ce837f1489bdfb8faf3ebcc7e72421b5bea83bd',
                   '30acd0b47fc25e159e27a980102ddb1c4bea0b95',
                   '4f81f05aaea3efb981f9d90144f746d6b682285b',
                   '5153aa4b6e4455a62525bc4de38ed0ff6e7dd682',
                   '59d08bafa6a749110dfb65ba43a61963d5a5bf9f',
                   '7568285b2d7f31ae483ae71617bd3db873deaa2c',
                   '7ed3ee8e94ac52ba983dd7690bdc9ab7618247b4',
                   '8ed7ef2e7ff9ed845e10259d08e4145f1b3b5b03',
                   '9b3557f1ab4111c8607a4f2ea3c1e53c6992916c',
                   '9c20da07ed14dc4fcd3ca2b055af99b2598d8bdd',
                   'c20ceebd6ec6f7a19b5c3aebc512a12fbdc9234b',
                   'e89e55a12def4cd54d5bff58378a3b5119878eb7',
                   'e8c0654fe2d75ecd7e0b01bee8a8fc60a130097e',
                   'eb6595e559a1d34a2b41e8d4835e0e4f98a5d2b5'],
         'symbol_name': 'ABS'
     })
 
 
 def revision_with_submodules():
     """
     Hypothesis strategy returning a revision that is known to
     point to a directory with revision entries (aka git submodule)
     """
     return just({
         'rev_sha1_git': 'ffcb69001f3f6745dfd5b48f72ab6addb560e234',
         'rev_dir_sha1_git': 'd92a21446387fa28410e5a74379c934298f39ae2',
         'rev_dir_rev_path': 'libtess2'
     })
diff --git a/swh/web/tests/testcase.py b/swh/web/tests/testcase.py
index 59d8491e..72e0c211 100644
--- a/swh/web/tests/testcase.py
+++ b/swh/web/tests/testcase.py
@@ -1,149 +1,157 @@
 # Copyright (C) 2015-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import shutil
 from subprocess import run, PIPE
 
 from django.core.cache import cache
 from hypothesis.extra.django import TestCase
 
 from swh.model.hashutil import hash_to_bytes
 from swh.web.common import converters
 from swh.web.tests.data import get_tests_data, override_storages
 
 
 ctags_json_missing = \
     shutil.which('ctags') is None or \
     b'+json' not in run(['ctags', '--version'], stdout=PIPE).stdout
 
 fossology_missing = shutil.which('nomossa') is None
 
 
 class WebTestCase(TestCase):
     """Base TestCase class for swh-web.
 
     It is initialized with references to in-memory storages containing
     raw tests data.
 
     It also defines class methods to retrieve those tests data in
     a json serializable format in order to ease tests implementation.
 
     """
     def _pre_setup(self):
         cache.clear()
 
         tests_data = get_tests_data(reset=True)
         self.storage = tests_data['storage']
         self.idx_storage = tests_data['idx_storage']
         self.mimetype_indexer = tests_data['mimetype_indexer']
         self.license_indexer = tests_data['license_indexer']
         self.ctags_indexer = tests_data['ctags_indexer']
 
         # Update swh-web configuration to use the in-memory storages
         # instantiated in the tests.data module
         override_storages(tests_data['storage'], tests_data['idx_storage'])
 
         super()._pre_setup()
 
     def content_add_mimetype(self, cnt_id):
         self.mimetype_indexer.run([hash_to_bytes(cnt_id)],
                                   'update-dups')
 
     def content_get_mimetype(self, cnt_id):
         mimetype = next(self.idx_storage.content_mimetype_get(
                         [hash_to_bytes(cnt_id)]))
         return converters.from_filetype(mimetype)
 
     def content_add_language(self, cnt_id):
         raise NotImplementedError('Language indexer is disabled.')
         self.language_indexer.run([hash_to_bytes(cnt_id)],
                                   'update-dups')
 
     def content_get_language(self, cnt_id):
         lang = next(self.idx_storage.content_language_get(
                     [hash_to_bytes(cnt_id)]))
         return converters.from_swh(lang, hashess={'id'})
 
     def content_add_license(self, cnt_id):
         self.license_indexer.run([hash_to_bytes(cnt_id)],
                                  'update-dups')
 
     def content_get_license(self, cnt_id):
         cnt_id_bytes = hash_to_bytes(cnt_id)
         lic = next(self.idx_storage.content_fossology_license_get(
                    [cnt_id_bytes]))
         return converters.from_swh({'id': cnt_id_bytes,
                                     'facts': lic[cnt_id_bytes]},
                                    hashess={'id'})
 
     def content_add_ctags(self, cnt_id):
         self.ctags_indexer.run([hash_to_bytes(cnt_id)],
                                'update-dups')
 
     def content_get_ctags(self, cnt_id):
         cnt_id_bytes = hash_to_bytes(cnt_id)
         ctags = self.idx_storage.content_ctags_get([cnt_id_bytes])
         for ctag in ctags:
             yield converters.from_swh(ctag, hashess={'id'})
 
     def content_get_metadata(self, cnt_id):
         cnt_id_bytes = hash_to_bytes(cnt_id)
         metadata = next(self.storage.content_get_metadata([cnt_id_bytes]))
         return converters.from_swh(metadata,
                                    hashess={'sha1', 'sha1_git', 'sha256',
                                             'blake2s256'})
 
     def content_get(self, cnt_id):
         cnt_id_bytes = hash_to_bytes(cnt_id)
         cnt = next(self.storage.content_get([cnt_id_bytes]))
         return converters.from_content(cnt)
 
     def directory_ls(self, dir_id):
         cnt_id_bytes = hash_to_bytes(dir_id)
         dir_content = map(converters.from_directory_entry,
                           self.storage.directory_ls(cnt_id_bytes))
         return list(dir_content)
 
     def release_get(self, rel_id):
         rel_id_bytes = hash_to_bytes(rel_id)
         rel_data = next(self.storage.release_get([rel_id_bytes]))
         return converters.from_release(rel_data)
 
     def revision_get(self, rev_id):
         rev_id_bytes = hash_to_bytes(rev_id)
         rev_data = next(self.storage.revision_get([rev_id_bytes]))
         return converters.from_revision(rev_data)
 
     def revision_log(self, rev_id, limit=None):
         rev_id_bytes = hash_to_bytes(rev_id)
         return list(map(converters.from_revision,
                     self.storage.revision_log([rev_id_bytes], limit=limit)))
 
     def snapshot_get_latest(self, origin_url):
         snp = self.storage.snapshot_get_latest(origin_url)
         return converters.from_snapshot(snp)
 
     def origin_get(self, origin_info):
         origin = self.storage.origin_get(origin_info)
         return converters.from_origin(origin)
 
     def origin_visit_get(self, origin_url):
         visits = self.storage.origin_visit_get(origin_url)
         return list(map(converters.from_origin_visit, visits))
 
     def origin_visit_get_by(self, origin_url, visit_id):
         visit = self.storage.origin_visit_get_by(origin_url, visit_id)
         return converters.from_origin_visit(visit)
 
     def snapshot_get(self, snapshot_id):
         snp = self.storage.snapshot_get(hash_to_bytes(snapshot_id))
         return converters.from_snapshot(snp)
 
     def snapshot_get_branches(self, snapshot_id, branches_from='',
                               branches_count=1000, target_types=None):
         snp = self.storage.snapshot_get_branches(
             hash_to_bytes(snapshot_id), branches_from.encode(),
             branches_count, target_types)
         return converters.from_snapshot(snp)
+
+    def snapshot_get_head(self, snapshot):
+        if snapshot['branches']['HEAD']['target_type'] == 'alias':
+            target = snapshot['branches']['HEAD']['target']
+            head = snapshot['branches'][target]['target']
+        else:
+            head = snapshot['branches']['HEAD']['target']
+        return head