diff --git a/sql/swh-func.sql b/sql/swh-func.sql
index c561e860..bd049c0d 100644
--- a/sql/swh-func.sql
+++ b/sql/swh-func.sql
@@ -1,1941 +1,1989 @@
 -- create a temporary table called tmp_TBLNAME, mimicking existing table
 -- TBLNAME
 --
 -- Args:
 --     tblname: name of the table to mimick
 create or replace function swh_mktemp(tblname regclass)
     returns void
     language plpgsql
 as $$
 begin
     execute format('
 	create temporary table tmp_%1$I
 	    (like %1$I including defaults)
 	    on commit drop;
       alter table tmp_%1$I drop column if exists object_id;
 	', tblname);
     return;
 end
 $$;
 
 -- create a temporary table for directory entries called tmp_TBLNAME,
 -- mimicking existing table TBLNAME with an extra dir_id (sha1_git)
 -- column, and dropping the id column.
 --
 -- This is used to create the tmp_directory_entry_<foo> tables.
 --
 -- Args:
 --     tblname: name of the table to mimick
 create or replace function swh_mktemp_dir_entry(tblname regclass)
     returns void
     language plpgsql
 as $$
 begin
     execute format('
 	create temporary table tmp_%1$I
 	    (like %1$I including defaults, dir_id sha1_git)
 	    on commit drop;
         alter table tmp_%1$I drop column id;
 	', tblname);
     return;
 end
 $$;
 
 
 -- create a temporary table for revisions called tmp_revisions,
 -- mimicking existing table revision, replacing the foreign keys to
 -- people with an email and name field
 --
 create or replace function swh_mktemp_revision()
     returns void
     language sql
 as $$
     create temporary table tmp_revision (
         like revision including defaults,
         author_fullname bytea,
         author_name bytea,
         author_email bytea,
         committer_fullname bytea,
         committer_name bytea,
         committer_email bytea
     ) on commit drop;
     alter table tmp_revision drop column author;
     alter table tmp_revision drop column committer;
     alter table tmp_revision drop column object_id;
 $$;
 
 
 -- create a temporary table for releases called tmp_release,
 -- mimicking existing table release, replacing the foreign keys to
 -- people with an email and name field
 --
 create or replace function swh_mktemp_release()
     returns void
     language sql
 as $$
     create temporary table tmp_release (
         like release including defaults,
         author_fullname bytea,
         author_name bytea,
         author_email bytea
     ) on commit drop;
     alter table tmp_release drop column author;
     alter table tmp_release drop column object_id;
 $$;
 
 -- create a temporary table with a single "bytea" column for fast object lookup.
 create or replace function swh_mktemp_bytea()
     returns void
     language sql
 as $$
     create temporary table tmp_bytea (
       id bytea
     ) on commit drop;
 $$;
 
 -- create a temporary table for occurrence_history
 create or replace function swh_mktemp_occurrence_history()
     returns void
     language sql
 as $$
     create temporary table tmp_occurrence_history(
         like occurrence_history including defaults,
         visit bigint not null
     ) on commit drop;
     alter table tmp_occurrence_history
       drop column visits,
       drop column object_id;
 $$;
 
 -- create a temporary table for entity_history, sans id
 create or replace function swh_mktemp_entity_history()
     returns void
     language sql
 as $$
     create temporary table tmp_entity_history (
         like entity_history including defaults) on commit drop;
     alter table tmp_entity_history drop column id;
 $$;
 
 -- create a temporary table for entities called tmp_entity_lister,
 -- with only the columns necessary for retrieving the uuid of a listed
 -- entity.
 create or replace function swh_mktemp_entity_lister()
     returns void
     language sql
 as $$
   create temporary table tmp_entity_lister (
     id              bigint,
     lister_metadata jsonb
   ) on commit drop;
 $$;
 
 -- create a temporary table for content_fossology_license tmp_content_fossology_license,
 create or replace function swh_mktemp_content_fossology_license()
     returns void
     language sql
 as $$
   create temporary table tmp_content_fossology_license (
     id           sha1,
     tool_name    text,
     tool_version text,
     license      text
   ) on commit drop;
 $$;
 
 comment on function swh_mktemp_content_fossology_license() is 'Helper table to add content license';
 
 -- create a temporary table for checking licenses' name
 create or replace function swh_mktemp_content_fossology_license_unknown()
     returns void
     language sql
 as $$
   create temporary table tmp_content_fossology_license_unknown (
     name       text not null
   ) on commit drop;
 $$;
 
 comment on function swh_mktemp_content_fossology_license_unknown() is 'Helper table to list unknown licenses';
 
 
 -- a content signature is a set of cryptographic checksums that we use to
 -- uniquely identify content, for the purpose of verifying if we already have
 -- some content or not during content injection
 create type content_signature as (
     sha1      sha1,
     sha1_git  sha1_git,
     sha256    sha256
 );
 
 
 -- check which entries of tmp_content are missing from content
 --
 -- operates in bulk: 0. swh_mktemp(content), 1. COPY to tmp_content,
 -- 2. call this function
 create or replace function swh_content_missing()
     returns setof content_signature
     language plpgsql
 as $$
 begin
     -- This query is critical for (single-algorithm) hash collision detection,
     -- so we cannot rely only on the fact that a single hash (e.g., sha1) is
     -- missing from the table content to conclude that a given content is
     -- missing. Ideally, we would want to (try to) add to content all entries
     -- in tmp_content that, when considering all columns together, are missing
     -- from content.
     --
     -- But doing that naively would require a *compound* index on all checksum
     -- columns; that index would not be significantly smaller than the content
     -- table itself, and therefore won't be used. Therefore we union together
     -- all contents that differ on at least one column from what is already
     -- available. If there is a collision on some (but not all) columns, the
     -- relevant tmp_content entry will be included in the set of content to be
     -- added, causing a downstream violation of unicity constraint.
     return query
 	(select sha1, sha1_git, sha256 from tmp_content as tmp
 	 where not exists
 	     (select 1 from content as c where c.sha1 = tmp.sha1))
 	union
 	(select sha1, sha1_git, sha256 from tmp_content as tmp
 	 where not exists
 	     (select 1 from content as c where c.sha1_git = tmp.sha1_git))
 	union
 	(select sha1, sha1_git, sha256 from tmp_content as tmp
 	 where not exists
 	     (select 1 from content as c where c.sha256 = tmp.sha256));
     return;
 end
 $$;
 
 -- check which entries of tmp_content_sha1 are missing from content
 --
 -- operates in bulk: 0. swh_mktemp_content_sha1(), 1. COPY to tmp_content_sha1,
 -- 2. call this function
 create or replace function swh_content_missing_per_sha1()
     returns setof sha1
     language plpgsql
 as $$
 begin
     return query
            (select id::sha1
             from tmp_bytea as tmp
             where not exists
             (select 1 from content as c where c.sha1=tmp.id));
 end
 $$;
 
 
 -- check which entries of tmp_skipped_content are missing from skipped_content
 --
 -- operates in bulk: 0. swh_mktemp(skipped_content), 1. COPY to tmp_skipped_content,
 -- 2. call this function
 create or replace function swh_skipped_content_missing()
     returns setof content_signature
     language plpgsql
 as $$
 begin
     return query
 	select sha1, sha1_git, sha256 from tmp_skipped_content t
 	where not exists
 	(select 1 from skipped_content s where
 	    s.sha1 is not distinct from t.sha1 and
 	    s.sha1_git is not distinct from t.sha1_git and
 	    s.sha256 is not distinct from t.sha256);
     return;
 end
 $$;
 
 
 -- Look up content based on one or several different checksums. Return all
 -- content information if the content is found; a NULL row otherwise.
 --
 -- At least one checksum should be not NULL. If several are not NULL, they will
 -- be AND-ed together in the lookup query.
 --
 -- Note: this function is meant to be used to look up individual contents
 -- (e.g., for the web app), for batch lookup of missing content (e.g., to be
 -- added) see swh_content_missing
 create or replace function swh_content_find(
     sha1      sha1     default NULL,
     sha1_git  sha1_git default NULL,
     sha256    sha256   default NULL
 )
     returns content
     language plpgsql
 as $$
 declare
     con content;
     filters text[] := array[] :: text[];  -- AND-clauses used to filter content
     q text;
 begin
     if sha1 is not null then
         filters := filters || format('sha1 = %L', sha1);
     end if;
     if sha1_git is not null then
         filters := filters || format('sha1_git = %L', sha1_git);
     end if;
     if sha256 is not null then
         filters := filters || format('sha256 = %L', sha256);
     end if;
 
     if cardinality(filters) = 0 then
         return null;
     else
         q = format('select * from content where %s',
 	        array_to_string(filters, ' and '));
         execute q into con;
 	return con;
     end if;
 end
 $$;
 
 
 -- add tmp_content entries to content, skipping duplicates
 --
 -- operates in bulk: 0. swh_mktemp(content), 1. COPY to tmp_content,
 -- 2. call this function
 create or replace function swh_content_add()
     returns void
     language plpgsql
 as $$
 begin
     insert into content (sha1, sha1_git, sha256, length, status)
 	select distinct sha1, sha1_git, sha256, length, status
 	from tmp_content
 	where (sha1, sha1_git, sha256) in
 	    (select * from swh_content_missing());
 	    -- TODO XXX use postgres 9.5 "UPSERT" support here, when available.
 	    -- Specifically, using "INSERT .. ON CONFLICT IGNORE" we can avoid
 	    -- the extra swh_content_missing() query here.
     return;
 end
 $$;
 
 
 -- add tmp_skipped_content entries to skipped_content, skipping duplicates
 --
 -- operates in bulk: 0. swh_mktemp(skipped_content), 1. COPY to tmp_skipped_content,
 -- 2. call this function
 create or replace function swh_skipped_content_add()
     returns void
     language plpgsql
 as $$
 begin
     insert into skipped_content (sha1, sha1_git, sha256, length, status, reason, origin)
 	select distinct sha1, sha1_git, sha256, length, status, reason, origin
 	from tmp_skipped_content
 	where (coalesce(sha1, ''), coalesce(sha1_git, ''), coalesce(sha256, '')) in
 	    (select coalesce(sha1, ''), coalesce(sha1_git, ''), coalesce(sha256, '') from swh_skipped_content_missing());
 	    -- TODO XXX use postgres 9.5 "UPSERT" support here, when available.
 	    -- Specifically, using "INSERT .. ON CONFLICT IGNORE" we can avoid
 	    -- the extra swh_content_missing() query here.
     return;
 end
 $$;
 
 
 -- check which entries of tmp_directory are missing from directory
 --
 -- operates in bulk: 0. swh_mktemp(directory), 1. COPY to tmp_directory,
 -- 2. call this function
 create or replace function swh_directory_missing()
     returns setof sha1_git
     language plpgsql
 as $$
 begin
     return query
 	select id from tmp_directory t
 	where not exists (
 	    select 1 from directory d
 	    where d.id = t.id);
     return;
 end
 $$;
 
 
 -- Retrieve information on directory from temporary table
 create or replace function swh_directory_get()
     returns setof directory
     language plpgsql
 as $$
 begin
     return query
 	select d.*
         from tmp_directory t
         inner join directory d on t.id = d.id;
     return;
 end
 $$;
 
 
 create type directory_entry_type as enum('file', 'dir', 'rev');
 
 
 -- Add tmp_directory_entry_* entries to directory_entry_* and directory,
 -- skipping duplicates in directory_entry_*.  This is a generic function that
 -- works on all kind of directory entries.
 --
 -- operates in bulk: 0. swh_mktemp_dir_entry('directory_entry_*'), 1 COPY to
 -- tmp_directory_entry_*, 2. call this function
 --
 -- Assumption: this function is used in the same transaction that inserts the
 -- context directory in table "directory".
 create or replace function swh_directory_entry_add(typ directory_entry_type)
     returns void
     language plpgsql
 as $$
 begin
     execute format('
     insert into directory_entry_%1$s (target, name, perms)
     select distinct t.target, t.name, t.perms
     from tmp_directory_entry_%1$s t
     where not exists (
     select 1
     from directory_entry_%1$s i
     where t.target = i.target and t.name = i.name and t.perms = i.perms)
    ', typ);
 
     execute format('
     with new_entries as (
 	select t.dir_id, array_agg(i.id) as entries
 	from tmp_directory_entry_%1$s t
 	inner join directory_entry_%1$s i
 	using (target, name, perms)
 	group by t.dir_id
     )
     update tmp_directory as d
     set %1$s_entries = new_entries.entries
     from new_entries
     where d.id = new_entries.dir_id
     ', typ);
 
     return;
 end
 $$;
 
 -- Insert the data from tmp_directory, tmp_directory_entry_file,
 -- tmp_directory_entry_dir, tmp_directory_entry_rev into their final
 -- tables.
 --
 -- Prerequisites:
 --  directory ids in tmp_directory
 --  entries in tmp_directory_entry_{file,dir,rev}
 --
 create or replace function swh_directory_add()
     returns void
     language plpgsql
 as $$
 begin
     perform swh_directory_entry_add('file');
     perform swh_directory_entry_add('dir');
     perform swh_directory_entry_add('rev');
 
     insert into directory
     select * from tmp_directory t
     where not exists (
         select 1 from directory d
 	where d.id = t.id);
 
     return;
 end
 $$;
 
 -- a directory listing entry with all the metadata
 --
 -- can be used to list a directory, and retrieve all the data in one go.
 create type directory_entry as
 (
   dir_id   sha1_git,     -- id of the parent directory
   type     directory_entry_type,  -- type of entry
   target   sha1_git,     -- id of target
   name     unix_path,    -- path name, relative to containing dir
   perms    file_perms,   -- unix-like permissions
   status   content_status,  -- visible or absent
   sha1     sha1,            -- content if sha1 if type is not dir
   sha1_git sha1_git,        -- content's sha1 git if type is not dir
   sha256   sha256           -- content's sha256 if type is not dir
 );
 
 
 -- List a single level of directory walked_dir_id
 -- FIXME: order by name is not correct. For git, we need to order by
 -- lexicographic order but as if a trailing / is present in directory
 -- name
 create or replace function swh_directory_walk_one(walked_dir_id sha1_git)
     returns setof directory_entry
     language sql
     stable
 as $$
     with dir as (
 	select id as dir_id, dir_entries, file_entries, rev_entries
 	from directory
 	where id = walked_dir_id),
     ls_d as (select dir_id, unnest(dir_entries) as entry_id from dir),
     ls_f as (select dir_id, unnest(file_entries) as entry_id from dir),
     ls_r as (select dir_id, unnest(rev_entries) as entry_id from dir)
     (select dir_id, 'dir'::directory_entry_type as type,
             e.target, e.name, e.perms, NULL::content_status,
             NULL::sha1, NULL::sha1_git, NULL::sha256
      from ls_d
      left join directory_entry_dir e on ls_d.entry_id = e.id)
     union
     (select dir_id, 'file'::directory_entry_type as type,
             e.target, e.name, e.perms, c.status,
             c.sha1, c.sha1_git, c.sha256
      from ls_f
      left join directory_entry_file e on ls_f.entry_id = e.id
      left join content c on e.target = c.sha1_git)
     union
     (select dir_id, 'rev'::directory_entry_type as type,
             e.target, e.name, e.perms, NULL::content_status,
             NULL::sha1, NULL::sha1_git, NULL::sha256
      from ls_r
      left join directory_entry_rev e on ls_r.entry_id = e.id)
     order by name;
 $$;
 
 -- List recursively the revision directory arborescence
 create or replace function swh_directory_walk(walked_dir_id sha1_git)
     returns setof directory_entry
     language sql
     stable
 as $$
     with recursive entries as (
         select dir_id, type, target, name, perms, status, sha1, sha1_git,
                sha256
         from swh_directory_walk_one(walked_dir_id)
         union all
         select dir_id, type, target, (dirname || '/' || name)::unix_path as name,
                perms, status, sha1, sha1_git, sha256
         from (select (swh_directory_walk_one(dirs.target)).*, dirs.name as dirname
               from (select target, name from entries where type = 'dir') as dirs) as with_parent
     )
     select dir_id, type, target, name, perms, status, sha1, sha1_git, sha256
     from entries
 $$;
 
 create or replace function swh_revision_walk(revision_id sha1_git)
   returns setof directory_entry
   language sql
   stable
 as $$
   select dir_id, type, target, name, perms, status, sha1, sha1_git, sha256
   from swh_directory_walk((select directory from revision where id=revision_id))
 $$;
 
 COMMENT ON FUNCTION swh_revision_walk(sha1_git) IS 'Recursively list the revision targeted directory arborescence';
 
 
 -- Find a directory entry by its path
 create or replace function swh_find_directory_entry_by_path(
     walked_dir_id sha1_git,
     dir_or_content_path bytea[])
     returns directory_entry
     language plpgsql
 as $$
 declare
     end_index integer;
     paths bytea default '';
     path bytea;
     res bytea[];
     r record;
 begin
     end_index := array_upper(dir_or_content_path, 1);
     res[1] := walked_dir_id;
 
     for i in 1..end_index
     loop
         path := dir_or_content_path[i];
         -- concatenate path for patching the name in the result record (if we found it)
         if i = 1 then
             paths = path;
         else
             paths := paths || '/' || path;  -- concatenate paths
         end if;
 
         if i <> end_index then
             select *
             from swh_directory_walk_one(res[i] :: sha1_git)
             where name=path
             and type = 'dir'
             limit 1 into r;
         else
             select *
             from swh_directory_walk_one(res[i] :: sha1_git)
             where name=path
             limit 1 into r;
         end if;
 
         -- find the path
         if r is null then
            return null;
         else
             -- store the next dir to lookup the next local path from
             res[i+1] := r.target;
         end if;
     end loop;
 
     -- at this moment, r is the result. Patch its 'name' with the full path before returning it.
     r.name := paths;
     return r;
 end
 $$;
 
 -- List all revision IDs starting from a given revision, going back in time
 --
 -- TODO ordering: should be breadth-first right now (what do we want?)
 -- TODO ordering: ORDER BY parent_rank somewhere?
 create or replace function swh_revision_list(root_revisions bytea[], num_revs bigint default NULL)
     returns table (id sha1_git, parents bytea[])
     language sql
     stable
 as $$
     with recursive full_rev_list(id) as (
         (select id from revision where id = ANY(root_revisions))
         union
         (select h.parent_id
          from revision_history as h
          join full_rev_list on h.id = full_rev_list.id)
     ),
     rev_list as (select id from full_rev_list limit num_revs)
     select rev_list.id as id,
            array(select rh.parent_id::bytea
                  from revision_history rh
                  where rh.id = rev_list.id
                  order by rh.parent_rank
                 ) as parent
     from rev_list;
 $$;
 
 -- List all the children of a given revision
 create or replace function swh_revision_list_children(root_revisions bytea[], num_revs bigint default NULL)
     returns table (id sha1_git, parents bytea[])
     language sql
     stable
 as $$
     with recursive full_rev_list(id) as (
         (select id from revision where id = ANY(root_revisions))
         union
         (select h.id
          from revision_history as h
          join full_rev_list on h.parent_id = full_rev_list.id)
     ),
     rev_list as (select id from full_rev_list limit num_revs)
     select rev_list.id as id,
            array(select rh.parent_id::bytea
                  from revision_history rh
                  where rh.id = rev_list.id
                  order by rh.parent_rank
                 ) as parent
     from rev_list;
 $$;
 
 
 -- Detailed entry for a revision
 create type revision_entry as
 (
   id                             sha1_git,
   date                           timestamptz,
   date_offset                    smallint,
   date_neg_utc_offset            boolean,
   committer_date                 timestamptz,
   committer_date_offset          smallint,
   committer_date_neg_utc_offset  boolean,
   type                           revision_type,
   directory                      sha1_git,
   message                        bytea,
   author_id                      bigint,
   author_fullname                bytea,
   author_name                    bytea,
   author_email                   bytea,
   committer_id                   bigint,
   committer_fullname             bytea,
   committer_name                 bytea,
   committer_email                bytea,
   metadata                       jsonb,
   synthetic                      boolean,
   parents                        bytea[],
   object_id                      bigint
 );
 
 
 -- "git style" revision log. Similar to swh_revision_list(), but returning all
 -- information associated to each revision, and expanding authors/committers
 create or replace function swh_revision_log(root_revisions bytea[], num_revs bigint default NULL)
     returns setof revision_entry
     language sql
     stable
 as $$
     select t.id, r.date, r.date_offset, r.date_neg_utc_offset,
            r.committer_date, r.committer_date_offset, r.committer_date_neg_utc_offset,
            r.type, r.directory, r.message,
            a.id, a.fullname, a.name, a.email,
            c.id, c.fullname, c.name, c.email,
            r.metadata, r.synthetic, t.parents, r.object_id
     from swh_revision_list(root_revisions, num_revs) as t
     left join revision r on t.id = r.id
     left join person a on a.id = r.author
     left join person c on c.id = r.committer;
 $$;
 
 
 -- Retrieve revisions from tmp_bytea in bulk
 create or replace function swh_revision_get()
     returns setof revision_entry
     language plpgsql
 as $$
 begin
     return query
         select r.id, r.date, r.date_offset, r.date_neg_utc_offset,
                r.committer_date, r.committer_date_offset, r.committer_date_neg_utc_offset,
                r.type, r.directory, r.message,
                a.id, a.fullname, a.name, a.email, c.id, c.fullname, c.name, c.email, r.metadata, r.synthetic,
          array(select rh.parent_id::bytea from revision_history rh where rh.id = t.id order by rh.parent_rank)
                    as parents, r.object_id
         from tmp_bytea t
         left join revision r on t.id = r.id
         left join person a on a.id = r.author
         left join person c on c.id = r.committer;
     return;
 end
 $$;
 
 -- List missing revisions from tmp_bytea
 create or replace function swh_revision_missing()
     returns setof sha1_git
     language plpgsql
 as $$
 begin
     return query
         select id::sha1_git from tmp_bytea t
 	where not exists (
 	    select 1 from revision r
 	    where r.id = t.id);
     return;
 end
 $$;
 
 -- Detailed entry for a release
 create type release_entry as
 (
   id                   sha1_git,
   target               sha1_git,
   target_type          object_type,
   date                 timestamptz,
   date_offset          smallint,
   date_neg_utc_offset  boolean,
   name                 bytea,
   comment              bytea,
   synthetic            boolean,
   author_id            bigint,
   author_fullname      bytea,
   author_name          bytea,
   author_email         bytea,
   object_id            bigint
 );
 
 -- Detailed entry for release
 create or replace function swh_release_get()
     returns setof release_entry
     language plpgsql
 as $$
 begin
     return query
         select r.id, r.target, r.target_type, r.date, r.date_offset, r.date_neg_utc_offset, r.name, r.comment,
                r.synthetic, p.id as author_id, p.fullname as author_fullname, p.name as author_name, p.email as author_email, r.object_id
         from tmp_bytea t
         inner join release r on t.id = r.id
         inner join person p on p.id = r.author;
     return;
 end
 $$;
 
 -- Create entries in person from tmp_revision
 create or replace function swh_person_add_from_revision()
     returns void
     language plpgsql
 as $$
 begin
     with t as (
         select author_fullname as fullname, author_name as name, author_email as email from tmp_revision
     union
         select committer_fullname as fullname, committer_name as name, committer_email as email from tmp_revision
     ) insert into person (fullname, name, email)
     select distinct fullname, name, email from t
     where not exists (
         select 1
         from person p
         where t.fullname = p.fullname
     );
     return;
 end
 $$;
 
 
 -- Create entries in revision from tmp_revision
 create or replace function swh_revision_add()
     returns void
     language plpgsql
 as $$
 begin
     perform swh_person_add_from_revision();
 
     insert into revision (id, date, date_offset, date_neg_utc_offset, committer_date, committer_date_offset, committer_date_neg_utc_offset, type, directory, message, author, committer, metadata, synthetic)
     select t.id, t.date, t.date_offset, t.date_neg_utc_offset, t.committer_date, t.committer_date_offset, t.committer_date_neg_utc_offset, t.type, t.directory, t.message, a.id, c.id, t.metadata, t.synthetic
     from tmp_revision t
     left join person a on a.fullname = t.author_fullname
     left join person c on c.fullname = t.committer_fullname;
     return;
 end
 $$;
 
 
 -- List missing releases from tmp_bytea
 create or replace function swh_release_missing()
     returns setof sha1_git
     language plpgsql
 as $$
 begin
   return query
     select id::sha1_git from tmp_bytea t
     where not exists (
       select 1 from release r
       where r.id = t.id);
 end
 $$;
 
 
 -- Create entries in person from tmp_release
 create or replace function swh_person_add_from_release()
     returns void
     language plpgsql
 as $$
 begin
     with t as (
         select distinct author_fullname as fullname, author_name as name, author_email as email from tmp_release
     ) insert into person (fullname, name, email)
     select fullname, name, email from t
     where not exists (
         select 1
         from person p
         where t.fullname = p.fullname
     );
     return;
 end
 $$;
 
 
 -- Create entries in release from tmp_release
 create or replace function swh_release_add()
     returns void
     language plpgsql
 as $$
 begin
     perform swh_person_add_from_release();
 
     insert into release (id, target, target_type, date, date_offset, date_neg_utc_offset, name, comment, author, synthetic)
     select t.id, t.target, t.target_type, t.date, t.date_offset, t.date_neg_utc_offset, t.name, t.comment, a.id, t.synthetic
     from tmp_release t
     left join person a on a.fullname = t.author_fullname;
     return;
 end
 $$;
 
 create or replace function swh_occurrence_update_for_origin(origin_id bigint)
   returns void
   language sql
 as $$
   delete from occurrence where origin = origin_id;
   insert into occurrence (origin, branch, target, target_type)
     select origin, branch, target, target_type
     from occurrence_history
     where origin = origin_id and
           (select visit from origin_visit
            where origin = origin_id
            order by date desc
            limit 1) = any(visits);
 $$;
 
 create or replace function swh_occurrence_update_all()
   returns void
   language plpgsql
 as $$
 declare
   origin_id origin.id%type;
 begin
   for origin_id in
     select distinct id from origin
   loop
     perform swh_occurrence_update_for_origin(origin_id);
   end loop;
   return;
 end;
 $$;
 
 -- add a new origin_visit for origin origin_id at date.
 --
 -- Returns the new visit id.
 create or replace function swh_origin_visit_add(origin_id bigint, date timestamptz)
     returns bigint
     language sql
 as $$
   with last_known_visit as (
     select coalesce(max(visit), 0) as visit
     from origin_visit
     where origin = origin_id
   )
   insert into origin_visit (origin, date, visit, status)
   values (origin_id, date, (select visit from last_known_visit) + 1, 'ongoing')
   returning visit;
 $$;
 
 -- add tmp_occurrence_history entries to occurrence_history
 --
 -- operates in bulk: 0. swh_mktemp(occurrence_history), 1. COPY to tmp_occurrence_history,
 -- 2. call this function
 create or replace function swh_occurrence_history_add()
     returns void
     language plpgsql
 as $$
 declare
   origin_id origin.id%type;
 begin
   -- Create or update occurrence_history
   with occurrence_history_id_visit as (
     select tmp_occurrence_history.*, object_id, visits from tmp_occurrence_history
     left join occurrence_history using(origin, branch, target, target_type)
   ),
   occurrences_to_update as (
     select object_id, visit from occurrence_history_id_visit where object_id is not null
   ),
   update_occurrences as (
     update occurrence_history
     set visits = array(select unnest(occurrence_history.visits) as e
                         union
                        select occurrences_to_update.visit as e
                        order by e)
     from occurrences_to_update
     where occurrence_history.object_id = occurrences_to_update.object_id
   )
   insert into occurrence_history (origin, branch, target, target_type, visits)
     select origin, branch, target, target_type, ARRAY[visit]
       from occurrence_history_id_visit
       where object_id is null;
 
   -- update occurrence
   for origin_id in
     select distinct origin from tmp_occurrence_history
   loop
     perform swh_occurrence_update_for_origin(origin_id);
   end loop;
   return;
 end
 $$;
 
 
 -- Absolute path: directory reference + complete path relative to it
 create type content_dir as (
     directory  sha1_git,
     path       unix_path
 );
 
 
 -- Find the containing directory of a given content, specified by sha1
 -- (note: *not* sha1_git).
 --
 -- Return a pair (dir_it, path) where path is a UNIX path that, from the
 -- directory root, reach down to a file with the desired content. Return NULL
 -- if no match is found.
 --
 -- In case of multiple paths (i.e., pretty much always), an arbitrary one is
 -- chosen.
 create or replace function swh_content_find_directory(content_id sha1)
     returns content_dir
     language sql
     stable
 as $$
     with recursive path as (
 	-- Recursively build a path from the requested content to a root
 	-- directory. Each iteration returns a pair (dir_id, filename) where
 	-- filename is relative to dir_id. Stops when no parent directory can
 	-- be found.
 	(select dir.id as dir_id, dir_entry_f.name as name, 0 as depth
 	 from directory_entry_file as dir_entry_f
 	 join content on content.sha1_git = dir_entry_f.target
 	 join directory as dir on dir.file_entries @> array[dir_entry_f.id]
 	 where content.sha1 = content_id
 	 limit 1)
 	union all
 	(select dir.id as dir_id,
 		(dir_entry_d.name || '/' || path.name)::unix_path as name,
 		path.depth + 1
 	 from path
 	 join directory_entry_dir as dir_entry_d on dir_entry_d.target = path.dir_id
 	 join directory as dir on dir.dir_entries @> array[dir_entry_d.id]
 	 limit 1)
     )
     select dir_id, name from path order by depth desc limit 1;
 $$;
 
 
 -- Walk the revision history starting from a given revision, until a matching
 -- occurrence is found. Return all occurrence information if one is found, NULL
 -- otherwise.
 create or replace function swh_revision_find_occurrence(revision_id sha1_git)
     returns occurrence
     language sql
     stable
 as $$
 	select origin, branch, target, target_type
   from swh_revision_list_children(ARRAY[revision_id] :: bytea[]) as rev_list
 	left join occurrence_history occ_hist
   on rev_list.id = occ_hist.target
 	where occ_hist.origin is not null and
         occ_hist.target_type = 'revision'
 	limit 1;
 $$;
 
 -- Find the visit of origin id closest to date visit_date
 create or replace function swh_visit_find_by_date(origin bigint, visit_date timestamptz default NOW())
     returns origin_visit
     language sql
     stable
 as $$
   with closest_two_visits as ((
     select origin_visit, (date - visit_date) as interval
     from origin_visit
     where date >= visit_date
     order by date asc
     limit 1
   ) union (
     select origin_visit, (visit_date - date) as interval
     from origin_visit
     where date < visit_date
     order by date desc
     limit 1
   )) select (origin_visit).* from closest_two_visits order by interval limit 1
 $$;
 
 -- Find the visit of origin id closest to date visit_date
 create or replace function swh_visit_get(origin bigint)
     returns origin_visit
     language sql
     stable
 as $$
     select *
     from origin_visit
     where origin=origin
     order by date desc
 $$;
 
 
 -- Retrieve occurrence by filtering on origin_id and optionally on
 -- branch_name and/or validity range
 create or replace function swh_occurrence_get_by(
        origin_id bigint,
        branch_name bytea default NULL,
        date timestamptz default NULL)
     returns setof occurrence_history
     language plpgsql
 as $$
 declare
     filters text[] := array[] :: text[];  -- AND-clauses used to filter content
     visit_id bigint;
     q text;
 begin
     if origin_id is not null then
         filters := filters || format('origin = %L', origin_id);
     end if;
     if branch_name is not null then
         filters := filters || format('branch = %L', branch_name);
     end if;
     if date is not null then
         if origin_id is null then
             raise exception 'Needs an origin_id to filter by date.';
         end if;
         select visit from swh_visit_find_by_date(origin_id, date) into visit_id;
         if visit_id is null then
             return;
         end if;
         filters := filters || format('%L = any(visits)', visit_id);
     end if;
 
     if cardinality(filters) = 0 then
         raise exception 'At least one filter amongst (origin_id, branch_name, date) is needed';
     else
         q = format('select * ' ||
                    'from occurrence_history ' ||
                    'where %s',
 	        array_to_string(filters, ' and '));
         return query execute q;
     end if;
 end
 $$;
 
 
 -- Retrieve revisions by occurrence criterion filtering
 create or replace function swh_revision_get_by(
        origin_id bigint,
        branch_name bytea default NULL,
        date timestamptz default NULL)
     returns setof revision_entry
     language sql
     stable
 as $$
     select r.id, r.date, r.date_offset, r.date_neg_utc_offset,
         r.committer_date, r.committer_date_offset, r.committer_date_neg_utc_offset,
         r.type, r.directory, r.message,
         a.id, a.fullname, a.name, a.email, c.id, c.fullname, c.name, c.email, r.metadata, r.synthetic,
         array(select rh.parent_id::bytea
             from revision_history rh
             where rh.id = r.id
             order by rh.parent_rank
         ) as parents, r.object_id
     from swh_occurrence_get_by(origin_id, branch_name, date) as occ
     inner join revision r on occ.target = r.id
     left join person a on a.id = r.author
     left join person c on c.id = r.committer;
 $$;
 
 -- Retrieve a release by occurrence criterion
 create or replace function swh_release_get_by(
        origin_id bigint)
     returns setof release_entry
     language sql
     stable
 as $$
    select r.id, r.target, r.target_type, r.date, r.date_offset, r.date_neg_utc_offset,
         r.name, r.comment, r.synthetic, a.id as author_id, a.fullname as author_fullname,
         a.name as author_name, a.email as author_email, r.object_id
     from release r
     inner join occurrence_history occ on occ.target = r.target
     left join person a on a.id = r.author
     where occ.origin = origin_id and occ.target_type = 'revision' and r.target_type = 'revision';
 $$;
 
 
 create type content_provenance as (
   content  sha1_git,
   revision sha1_git,
   origin   bigint,
   visit    bigint,
   path     unix_path
 );
 
 COMMENT ON TYPE content_provenance IS 'Provenance information on content';
 
 create or replace function swh_content_find_provenance(content_id sha1_git)
     returns setof content_provenance
     language sql
 as $$
     with subscripted_paths as (
         select content, revision_paths, generate_subscripts(revision_paths, 1) as s
         from cache_content_revision
         where content = content_id
     ),
     cleaned_up_contents as (
         select content, revision_paths[s][1]::sha1_git as revision, revision_paths[s][2]::unix_path as path
         from subscripted_paths
     )
     select cuc.content, cuc.revision, cro.origin, cro.visit, cuc.path
     from cleaned_up_contents cuc
     inner join cache_revision_origin cro using(revision)
 $$;
 
 COMMENT ON FUNCTION swh_content_find_provenance(sha1_git) IS 'Given a content, provide provenance information on it';
 
 
 create type object_found as (
     sha1_git   sha1_git,
     type       object_type,
     id         bytea,       -- sha1 or sha1_git depending on object_type
     object_id  bigint
 );
 
 -- Find objects by sha1_git, return their type and their main identifier
 create or replace function swh_object_find_by_sha1_git()
     returns setof object_found
     language plpgsql
 as $$
 begin
     return query
     with known_objects as ((
         select id as sha1_git, 'release'::object_type as type, id, object_id from release r
         where exists (select 1 from tmp_bytea t where t.id = r.id)
     ) union all (
         select id as sha1_git, 'revision'::object_type as type, id, object_id from revision r
         where exists (select 1 from tmp_bytea t where t.id = r.id)
     ) union all (
         select id as sha1_git, 'directory'::object_type as type, id, object_id from directory d
         where exists (select 1 from tmp_bytea t where t.id = d.id)
     ) union all (
         select sha1_git as sha1_git, 'content'::object_type as type, sha1 as id, object_id from content c
         where exists (select 1 from tmp_bytea t where t.id = c.sha1_git)
     ))
     select t.id::sha1_git as sha1_git, k.type, k.id, k.object_id from tmp_bytea t
       left join known_objects k on t.id = k.sha1_git;
 end
 $$;
 
 -- Create entries in entity_history from tmp_entity_history
 --
 -- TODO: do something smarter to compress the entries if the data
 -- didn't change.
 create or replace function swh_entity_history_add()
     returns void
     language plpgsql
 as $$
 begin
     insert into entity_history (
         uuid, parent, name, type, description, homepage, active, generated, lister_metadata, metadata, validity
     ) select * from tmp_entity_history;
     return;
 end
 $$;
 
 
 create or replace function swh_update_entity_from_entity_history()
     returns trigger
     language plpgsql
 as $$
 begin
     insert into entity (uuid, parent, name, type, description, homepage, active, generated,
       lister_metadata, metadata, last_seen, last_id)
       select uuid, parent, name, type, description, homepage, active, generated,
              lister_metadata, metadata, unnest(validity), id
       from entity_history
       where uuid = NEW.uuid
       order by unnest(validity) desc limit 1
     on conflict (uuid) do update set
       parent = EXCLUDED.parent,
       name = EXCLUDED.name,
       type = EXCLUDED.type,
       description = EXCLUDED.description,
       homepage = EXCLUDED.homepage,
       active = EXCLUDED.active,
       generated = EXCLUDED.generated,
       lister_metadata = EXCLUDED.lister_metadata,
       metadata = EXCLUDED.metadata,
       last_seen = EXCLUDED.last_seen,
       last_id = EXCLUDED.last_id;
 
     return null;
 end
 $$;
 
 create trigger update_entity
   after insert or update
   on entity_history
   for each row
   execute procedure swh_update_entity_from_entity_history();
 
 -- map an id of tmp_entity_lister to a full entity
 create type entity_id as (
     id               bigint,
     uuid             uuid,
     parent           uuid,
     name             text,
     type             entity_type,
     description      text,
     homepage         text,
     active           boolean,
     generated        boolean,
     lister_metadata  jsonb,
     metadata         jsonb,
     last_seen        timestamptz,
     last_id          bigint
 );
 
 -- find out the uuid of the entries of entity with the metadata
 -- contained in tmp_entity_lister
 create or replace function swh_entity_from_tmp_entity_lister()
     returns setof entity_id
     language plpgsql
 as $$
 begin
   return query
     select t.id, e.*
     from tmp_entity_lister t
     left join entity e
     on e.lister_metadata @> t.lister_metadata;
   return;
 end
 $$;
 
 create or replace function swh_entity_get(entity_uuid uuid)
     returns setof entity
     language sql
     stable
 as $$
   with recursive entity_hierarchy as (
   select e.*
     from entity e where uuid = entity_uuid
     union
     select p.*
     from entity_hierarchy e
     join entity p on e.parent = p.uuid
   )
   select *
   from entity_hierarchy;
 $$;
 
 
 -- Object listing by object_id
 
 create or replace function swh_content_list_by_object_id(
     min_excl bigint,
     max_incl bigint
 )
     returns setof content
     language sql
     stable
 as $$
     select * from content
     where object_id > min_excl and object_id <= max_incl
     order by object_id;
 $$;
 
 create or replace function swh_revision_list_by_object_id(
     min_excl bigint,
     max_incl bigint
 )
     returns setof revision_entry
     language sql
     stable
 as $$
     with revs as (
         select * from revision
         where object_id > min_excl and object_id <= max_incl
     )
     select r.id, r.date, r.date_offset, r.date_neg_utc_offset,
            r.committer_date, r.committer_date_offset, r.committer_date_neg_utc_offset,
            r.type, r.directory, r.message,
            a.id, a.fullname, a.name, a.email, c.id, c.fullname, c.name, c.email, r.metadata, r.synthetic,
            array(select rh.parent_id::bytea from revision_history rh where rh.id = r.id order by rh.parent_rank)
                as parents, r.object_id
     from revs r
     left join person a on a.id = r.author
     left join person c on c.id = r.committer
     order by r.object_id;
 $$;
 
 create or replace function swh_release_list_by_object_id(
     min_excl bigint,
     max_incl bigint
 )
     returns setof release_entry
     language sql
     stable
 as $$
     with rels as (
         select * from release
         where object_id > min_excl and object_id <= max_incl
     )
     select r.id, r.target, r.target_type, r.date, r.date_offset, r.date_neg_utc_offset, r.name, r.comment,
            r.synthetic, p.id as author_id, p.fullname as author_fullname, p.name as author_name, p.email as author_email, r.object_id
     from rels r
     left join person p on p.id = r.author
     order by r.object_id;
 $$;
 
 
 create or replace function swh_cache_content_revision_add()
     returns void
     language plpgsql
 as $$
 declare
   cnt bigint;
   d sha1_git;
 begin
   delete from tmp_bytea t where exists (select 1 from cache_content_revision_processed ccrp where t.id = ccrp.revision);
 
   select count(*) from tmp_bytea into cnt;
   if cnt <> 0 then
     create temporary table tmp_ccr (
         content sha1_git,
         directory sha1_git,
         path unix_path
     ) on commit drop;
 
     create temporary table tmp_ccrd (
         directory sha1_git,
         revision sha1_git
     ) on commit drop;
 
     insert into tmp_ccrd
       select directory, id as revision
       from tmp_bytea
       inner join revision using(id);
 
     insert into cache_content_revision_processed
       select distinct id from tmp_bytea order by id;
 
     for d in
       select distinct directory from tmp_ccrd
     loop
       insert into tmp_ccr
         select sha1_git as content, d as directory, name as path
         from swh_directory_walk(d)
         where type='file';
     end loop;
 
     with revision_contents as (
       select content, false as blacklisted, array_agg(ARRAY[revision::bytea, path::bytea]) as revision_paths
       from tmp_ccr
       inner join tmp_ccrd using (directory)
       group by content
       order by content
     ), updated_cache_entries as (
       update cache_content_revision ccr
       set revision_paths = ccr.revision_paths || rc.revision_paths
       from revision_contents rc
       where ccr.content = rc.content and ccr.blacklisted = false
       returning ccr.content
     ) insert into cache_content_revision
         select * from revision_contents rc
         where not exists (select 1 from updated_cache_entries uce where uce.content = rc.content)
         order by rc.content
       on conflict (content) do update
         set revision_paths = cache_content_revision.revision_paths || EXCLUDED.revision_paths
         where cache_content_revision.blacklisted = false;
     return;
   else
     return;
   end if;
 end
 $$;
 
 COMMENT ON FUNCTION swh_cache_content_revision_add() IS 'Cache the revisions from tmp_bytea into cache_content_revision';
 
 
 create or replace function swh_occurrence_by_origin_visit(origin_id bigint, visit_id bigint)
     returns setof occurrence
     language sql
     stable
 as $$
   select origin, branch, target, target_type from occurrence_history
   where origin = origin_id and visit_id = ANY(visits);
 $$;
 
 create type cache_content_signature as (
   sha1      sha1,
   sha1_git  sha1_git,
   sha256    sha256,
   revision_paths  bytea[][]
 );
 
 create or replace function swh_cache_content_get_all()
        returns setof cache_content_signature
        language sql
        stable
 as $$
     SELECT c.sha1, c.sha1_git, c.sha256, ccr.revision_paths
     FROM cache_content_revision ccr
     INNER JOIN content as c
     ON ccr.content = c.sha1_git
 $$;
 
 COMMENT ON FUNCTION swh_cache_content_get_all() IS 'Retrieve batch of contents';
 
 
 create or replace function swh_cache_content_get(target sha1_git)
        returns setof cache_content_signature
        language sql
        stable
 as $$
     SELECT c.sha1, c.sha1_git, c.sha256, ccr.revision_paths
     FROM cache_content_revision ccr
     INNER JOIN content as c
     ON ccr.content = c.sha1_git
     where ccr.content = target
 $$;
 
 COMMENT ON FUNCTION swh_cache_content_get(sha1_git) IS 'Retrieve cache content information';
 
 create or replace function swh_revision_from_target(target sha1_git, target_type object_type)
     returns sha1_git
     language plpgsql
 as $$
 #variable_conflict use_variable
 begin
    while target_type = 'release' loop
        select r.target, r.target_type from release r where r.id = target into target, target_type;
    end loop;
    if target_type = 'revision' then
        return target;
    else
        return null;
    end if;
 end
 $$;
 
 create or replace function swh_cache_revision_origin_add(origin_id bigint, visit_id bigint)
     returns setof sha1_git
     language plpgsql
 as $$
 declare
     visit_exists bool;
 begin
   select true from origin_visit where origin = origin_id and visit = visit_id into visit_exists;
 
   if not visit_exists then
       return;
   end if;
 
   visit_exists := null;
 
   select true from cache_revision_origin where origin = origin_id and visit = visit_id limit 1 into visit_exists;
 
   if visit_exists then
       return;
   end if;
 
   return query with new_pointed_revs as (
     select swh_revision_from_target(target, target_type) as id
     from swh_occurrence_by_origin_visit(origin_id, visit_id)
   ),
   old_pointed_revs as (
     select swh_revision_from_target(target, target_type) as id
     from swh_occurrence_by_origin_visit(origin_id,
       (select visit from origin_visit where origin = origin_id and visit < visit_id order by visit desc limit 1))
   ),
   new_revs as (
     select distinct id
     from swh_revision_list(array(select id::bytea from new_pointed_revs where id is not null))
   ),
   old_revs as (
     select distinct id
     from swh_revision_list(array(select id::bytea from old_pointed_revs where id is not null))
   )
   insert into cache_revision_origin (revision, origin, visit)
   select n.id as revision, origin_id, visit_id from new_revs n
     where not exists (
     select 1 from old_revs o
     where o.id = n.id)
    returning revision;
 end
 $$;
 
+-- create a temporary table for content_ctags tmp_content_mimetype_missing,
+create or replace function swh_mktemp_content_mimetype_missing()
+    returns void
+    language sql
+as $$
+  create temporary table tmp_content_mimetype_missing (
+    id sha1,
+    tool_name text,
+    tool_version text
+  ) on commit drop;
+$$;
+
+comment on function swh_mktemp_content_mimetype_missing() IS 'Helper table to filter existing mimetype information';
+
 -- check which entries of tmp_bytea are missing from content_mimetype
 --
 -- operates in bulk: 0. swh_mktemp_bytea(), 1. COPY to tmp_bytea,
 -- 2. call this function
 create or replace function swh_content_mimetype_missing()
     returns setof sha1
     language plpgsql
 as $$
 begin
     return query
-	(select id::sha1 from tmp_bytea as tmp
+	(select id::sha1 from tmp_content_mimetype_missing as tmp
 	 where not exists
-	     (select 1 from content_mimetype as c where c.id = tmp.id));
+	     (select 1 from content_mimetype as c
+              inner join indexer_configuration i
+              on (tmp.tool_name = i.tool_name and tmp.tool_version = i.tool_version)
+              where c.id = tmp.id));
     return;
 end
 $$;
 
-COMMENT ON FUNCTION swh_content_mimetype_missing() IS 'Filter missing content mimetype';
+comment on function swh_content_mimetype_missing() is 'Filter existing mimetype information';
 
+-- create a temporary table for content_ctags tmp_content_mimetype,
+create or replace function swh_mktemp_content_mimetype()
+    returns void
+    language sql
+as $$
+  create temporary table tmp_content_mimetype (
+    like content_mimetype including defaults
+  ) on commit drop;
+  alter table tmp_content_mimetype
+    drop column indexer_configuration_id,
+    add column tool_name text,
+    add column tool_version text;
+$$;
+
+comment on function swh_mktemp_content_mimetype() IS 'Helper table to add mimetype information';
 
 -- add tmp_content_mimetype entries to content_mimetype, overwriting
 -- duplicates if conflict_update is true, skipping duplicates otherwise.
 --
 -- If filtering duplicates is in order, the call to
 -- swh_content_mimetype_missing must take place before calling this
 -- function.
 --
 --
 -- operates in bulk: 0. swh_mktemp(content_mimetype), 1. COPY to tmp_content_mimetype,
 -- 2. call this function
 create or replace function swh_content_mimetype_add(conflict_update boolean)
     returns void
     language plpgsql
 as $$
 begin
     if conflict_update then
-        insert into content_mimetype (id, mimetype, encoding)
-        select id, mimetype, encoding
-        from tmp_content_mimetype
-            on conflict(id)
+        insert into content_mimetype (id, mimetype, encoding, indexer_configuration_id)
+        select id, mimetype, encoding,
+               (select id from indexer_configuration
+               where tool_name=tcm.tool_name
+               and tool_version=tcm.tool_version)
+        from tmp_content_mimetype tcm
+            on conflict(id, indexer_configuration_id)
                 do update set mimetype = excluded.mimetype,
-                    encoding = excluded.encoding;
+                              encoding = excluded.encoding;
 
     else
-        insert into content_mimetype (id, mimetype, encoding)
-        select id, mimetype, encoding
-         from tmp_content_mimetype
-            on conflict do nothing;
+        insert into content_mimetype (id, mimetype, encoding, indexer_configuration_id)
+        select id, mimetype, encoding,
+               (select id from indexer_configuration
+               where tool_name=tcm.tool_name
+               and tool_version=tcm.tool_version)
+         from tmp_content_mimetype tcm
+             on conflict(id, indexer_configuration_id) do nothing;
     end if;
     return;
 end
 $$;
 
 comment on function swh_content_mimetype_add(boolean) IS 'Add new content mimetypes';
 
+create type content_mimetype_signature as(
+    id sha1,
+    mimetype bytea,
+    encoding bytea,
+    tool_name text,
+    tool_version text
+);
 
 -- Retrieve list of content mimetype from the temporary table.
 --
 -- operates in bulk: 0. mktemp(tmp_bytea), 1. COPY to tmp_bytea,
 -- 2. call this function
 create or replace function swh_content_mimetype_get()
-    returns setof content_mimetype
+    returns setof content_mimetype_signature
     language plpgsql
 as $$
 begin
     return query
-        select id::sha1, mimetype, encoding
+        select c.id, mimetype, encoding, tool_name, tool_version
         from tmp_bytea t
-        inner join content_mimetype using(id);
+        inner join content_mimetype c on c.id=t.id
+        inner join indexer_configuration i on c.indexer_configuration_id=i.id;
     return;
 end
 $$;
 
-comment on function swh_content_mimetype_get() IS 'List content mimetypes';
+comment on function swh_content_mimetype_get() IS 'List content''s mimetypes';
 
 
 -- check which entries of tmp_bytea are missing from content_language
 --
 -- operates in bulk: 0. swh_mktemp_bytea(), 1. COPY to tmp_bytea,
 -- 2. call this function
 create or replace function swh_content_language_missing()
     returns setof sha1
     language plpgsql
 as $$
 begin
     return query
 	(select id::sha1 from tmp_bytea as tmp
 	 where not exists
 	     (select 1 from content_language as c where c.id = tmp.id));
     return;
 end
 $$;
 
 comment on function swh_content_language_missing() IS 'Filter missing content languages';
 
 -- add tmp_content_language entries to content_language, overwriting
 -- duplicates if conflict_update is true, skipping duplicates otherwise.
 --
 -- If filtering duplicates is in order, the call to
 -- swh_content_language_missing must take place before calling this
 -- function.
 --
 -- operates in bulk: 0. swh_mktemp(content_language), 1. COPY to
 -- tmp_content_language, 2. call this function
 create or replace function swh_content_language_add(conflict_update boolean)
     returns void
     language plpgsql
 as $$
 begin
     if conflict_update then
         insert into content_language (id, lang)
         select id, lang
     	from tmp_content_language
             on conflict(id)
                 do update set lang = excluded.lang;
 
     else
         insert into content_language (id, lang)
         select id, lang
     	from tmp_content_language
             on conflict do nothing;
     end if;
     return;
 end
 $$;
 
 comment on function swh_content_language_add(boolean) IS 'Add new content languages';
 
 -- Retrieve list of content language from the temporary table.
 --
 -- operates in bulk: 0. mktemp(tmp_bytea), 1. COPY to tmp_bytea, 2. call this function
 create or replace function swh_content_language_get()
     returns setof content_language
     language plpgsql
 as $$
 begin
     return query
         select id::sha1, lang
         from tmp_bytea t
         inner join content_language using(id);
     return;
 end
 $$;
 
 comment on function swh_content_language_get() IS 'List content languages';
 
 
 -- create a temporary table for content_ctags tmp_content_ctags,
 create or replace function swh_mktemp_content_ctags()
     returns void
     language sql
 as $$
   create temporary table tmp_content_ctags (
     like content_ctags including defaults
   ) on commit drop;
   alter table tmp_content_ctags
     drop column indexer_configuration_id,
     add column tool_name text,
     add column tool_version text;
 $$;
 
 comment on function swh_mktemp_content_ctags() is 'Helper table to add content ctags';
 
 
 -- add tmp_content_ctags entries to content_ctags, overwriting
 -- duplicates if conflict_update is true, skipping duplicates otherwise.
 --
 -- operates in bulk: 0. swh_mktemp(content_ctags), 1. COPY to tmp_content_ctags,
 -- 2. call this function
 create or replace function swh_content_ctags_add(conflict_update boolean)
     returns void
     language plpgsql
 as $$
 begin
     if conflict_update then
         delete from content_ctags
-        where id in (select distinct id from tmp_content_ctags);
+        where id in (
+          select distinct id from tmp_content_ctags
+        );
     end if;
 
     insert into content_ctags (id, name, kind, line, lang, indexer_configuration_id)
     select id, name, kind, line, lang,
            (select id from indexer_configuration
             where tool_name=tct.tool_name
             and tool_version=tct.tool_version)
     from tmp_content_ctags tct
         on conflict(id, md5(name), kind, line, lang, indexer_configuration_id)
         do nothing;
     return;
 end
 $$;
 
 comment on function swh_content_ctags_add(boolean) IS 'Add new ctags symbols per content';
 
 -- create a temporary table for content_ctags missing routine
 create or replace function swh_mktemp_content_ctags_missing()
     returns void
     language sql
 as $$
   create temporary table tmp_content_ctags_missing (
     id           sha1,
     tool_name    text,
     tool_version text
   ) on commit drop;
 $$;
 
 comment on function swh_mktemp_content_ctags_missing() is 'Helper table to filter missing content ctags';
 
 -- check which entries of tmp_bytea are missing from content_ctags
 --
 -- operates in bulk: 0. swh_mktemp_bytea(), 1. COPY to tmp_bytea,
 -- 2. call this function
 create or replace function swh_content_ctags_missing()
     returns setof sha1
     language plpgsql
 as $$
 begin
     return query
 	(select id::sha1 from tmp_content_ctags_missing as tmp
 	 where not exists
 	     (select 1 from content_ctags as c
               inner join indexer_configuration i
               on (tmp.tool_name = i.tool_name and tmp.tool_version = i.tool_version)
               where c.id = tmp.id limit 1));
     return;
 end
 $$;
 
 comment on function swh_content_ctags_missing() IS 'Filter missing content ctags';
 
 create type content_ctags_signature as (
   id sha1,
   name text,
   kind text,
   line bigint,
   lang ctags_languages,
   tool_name text,
   tool_version text
 );
 
 -- Retrieve list of content ctags from the temporary table.
 --
 -- operates in bulk: 0. mktemp(tmp_bytea), 1. COPY to tmp_bytea, 2. call this function
 create or replace function swh_content_ctags_get()
     returns setof content_ctags_signature
     language plpgsql
 as $$
 begin
     return query
         select c.id, c.name, c.kind, c.line, c.lang, i.tool_name, i.tool_version
         from tmp_bytea t
         inner join content_ctags c using(id)
         inner join indexer_configuration i on i.id = c.indexer_configuration_id
         order by line;
     return;
 end
 $$;
 
 comment on function swh_content_ctags_get() IS 'List content ctags';
 
 create or replace function hash_sha1(text)
        returns text
 as $$
    select encode(digest($1, 'sha1'), 'hex')
 $$ language sql strict immutable;
 
 comment on function hash_sha1(text) is 'Compute sha1 hash as text';
 
 -- Search within ctags content.
 --
 create or replace function swh_content_ctags_search(
        expression text,
        l integer default 10,
        last_sha1 sha1 default '\x0000000000000000000000000000000000000000')
     returns setof content_ctags_signature
     language sql
 as $$
     select c.id, name, kind, line, lang, tool_name, tool_version
     from content_ctags c
     inner join indexer_configuration i on i.id = c.indexer_configuration_id
     where hash_sha1(name) = hash_sha1(expression)
     and c.id > last_sha1
     order by id
     limit l;
 $$;
 
 comment on function swh_content_ctags_search(text, integer, sha1) IS 'Equality search through ctags'' symbols';
 
 -- check which entries of tmp_bytea are missing from content_fossology_license
 --
 -- operates in bulk: 0. swh_mktemp_bytea(), 1. COPY to tmp_bytea,
 -- 2. call this function
 create or replace function swh_content_fossology_license_missing()
     returns setof sha1
     language plpgsql
 as $$
 begin
     return query
 	(select id::sha1 from tmp_bytea as tmp
 	 where not exists
 	     (select 1 from content_fossology_license as c where c.id = tmp.id));
     return;
 end
 $$;
 
 comment on function swh_content_fossology_license_missing() IS 'Filter missing content licenses';
 
 -- add tmp_content_fossology_license entries to content_fossology_license, overwriting
 -- duplicates if conflict_update is true, skipping duplicates otherwise.
 --
 -- If filtering duplicates is in order, the call to
 -- swh_content_fossology_license_missing must take place before calling this
 -- function.
 --
 -- operates in bulk: 0. swh_mktemp(content_fossology_license), 1. COPY to
 -- tmp_content_fossology_license, 2. call this function
 create or replace function swh_content_fossology_license_add(conflict_update boolean)
     returns void
     language plpgsql
 as $$
 begin
     if conflict_update then
         delete from content_fossology_license
         where id in (select distinct id from tmp_content_fossology_license);
     end if;
 
     insert into content_fossology_license (id, license_id, indexer_configuration_id)
     select tcl.id,
           (select id from fossology_license where name = tcl.license) as license,
           (select id from indexer_configuration where tool_name = tcl.tool_name
                                                 and tool_version = tcl.tool_version)
                           as indexer_configuration_id
     from tmp_content_fossology_license tcl
         on conflict(id, license_id, indexer_configuration_id)
         do nothing;
     return;
 end
 $$;
 
 comment on function swh_content_fossology_license_add(boolean) IS 'Add new content licenses';
 
 create or replace function swh_content_fossology_license_unknown()
     returns setof text
     language plpgsql
 as $$
 begin
     return query
         select name from tmp_content_fossology_license_unknown t where not exists (
             select 1 from fossology_license where name=t.name
         );
 end
 $$;
 
 comment on function swh_content_fossology_license_unknown() IS 'List unknown licenses';
 
 create type content_fossology_license_signature as (
   id           sha1,
   tool_name    text,
   tool_version text,
   licenses     text[]
 );
 
 -- Retrieve list of content license from the temporary table.
 --
 -- operates in bulk: 0. mktemp(tmp_bytea), 1. COPY to tmp_bytea,
 -- 2. call this function
 create or replace function swh_content_fossology_license_get()
     returns setof content_fossology_license_signature
     language plpgsql
 as $$
 begin
     return query
       select cl.id,
              ic.tool_name,
              ic.tool_version,
              array(select name
                    from fossology_license
                    where id = ANY(array_agg(cl.license_id))) as licenses
       from tmp_bytea tcl
       inner join content_fossology_license cl using(id)
       inner join indexer_configuration ic on ic.id=cl.indexer_configuration_id
       group by cl.id, ic.tool_name, ic.tool_version;
     return;
 end
 $$;
 
 comment on function swh_content_fossology_license_get() IS 'List content licenses';
 
 
 -- simple counter mapping a textual label to an integer value
 create type counter as (
     label  text,
     value  bigint
 );
 
 -- return statistics about the number of tuples in various SWH tables
 --
 -- Note: the returned values are based on postgres internal statistics
 -- (pg_class table), which are only updated daily (by autovacuum) or so
 create or replace function swh_stat_counters()
     returns setof counter
     language sql
     stable
 as $$
     select relname::text as label, reltuples::bigint as value
     from pg_class
     where oid in (
         'public.content'::regclass,
         'public.directory'::regclass,
         'public.directory_entry_dir'::regclass,
         'public.directory_entry_file'::regclass,
         'public.directory_entry_rev'::regclass,
         'public.occurrence'::regclass,
         'public.occurrence_history'::regclass,
         'public.origin'::regclass,
         'public.person'::regclass,
         'public.entity'::regclass,
         'public.entity_history'::regclass,
         'public.release'::regclass,
         'public.revision'::regclass,
         'public.revision_history'::regclass,
         'public.skipped_content'::regclass
     );
 $$;
diff --git a/sql/upgrades/097.sql b/sql/upgrades/097.sql
index 1ced973b..71726b0a 100644
--- a/sql/upgrades/097.sql
+++ b/sql/upgrades/097.sql
@@ -1,258 +1,369 @@
 -- SWH DB schema upgrade
 -- from_version: 96
 -- to_version: 97
 -- description: Update indexer configuration
 
 insert into dbversion(version, release, description)
       values(97, now(), 'Work In Progress');
 
 ------------------------
 -- Update Schema + data
 ------------------------
 
 update indexer_configuration
 set tool_configuration='{"command_line": "nomossa <filepath>"}'
 where tool_name='nomos' and tool_version='3.1.0rc2-31-ga2cbb8c';
 
 insert into indexer_configuration(tool_name, tool_version, tool_configuration)
 values ('universal-ctags', '~git7859817b', '{"command_line": "ctags --fields=+lnz --sort=no --links=no --output-format=json <filepath>"}');
 
 insert into indexer_configuration(tool_name, tool_version, tool_configuration)
 values ('pygments', '2.0.1+dfsg-1.1+deb8u1', '{"type": "library", "debian-package": "python3-pygments"}');
 
 insert into indexer_configuration(tool_name, tool_version, tool_configuration)
 values ('file', '5.22', '{"command_line": "file --mime <filepath>"}');
 
 -- ctags
 
 alter table content_ctags
   add column indexer_configuration_id bigserial;
 
 comment on column content_ctags.indexer_configuration_id is 'Tool used to compute the information';
 
 update content_ctags
 set indexer_configuration_id = (select id
                                 from indexer_configuration
                                 where tool_name='universal-ctags');
 
 alter table content_ctags
   alter column indexer_configuration_id set not null;
 
 alter table content_ctags
   add constraint content_ctags_indexer_configuration_id_idx
   foreign key (indexer_configuration_id) references indexer_configuration(id);
 
 drop index content_ctags_id_md5_kind_line_lang_idx;
 create unique index on content_ctags(id, md5(name), kind, line, lang, indexer_configuration_id);
 
 -- language
 
 alter table content_language
   add column indexer_configuration_id bigserial;
 
 comment on column content_language.indexer_configuration_id is 'Tool used to compute the information';
 
 update content_language
 set indexer_configuration_id = (select id from indexer_configuration where tool_name='pygments');
 
 alter table content_language
   alter column indexer_configuration_id set not null;
 
 alter table content_language
   add constraint content_language_indexer_configuration_id_idx
   foreign key (indexer_configuration_id) references indexer_configuration(id);
 
 alter table content_language
   drop constraint content_language_pkey;
 
 alter table content_language
   add primary key(id, indexer_configuration_id);
 
 -- mimetype
 
 alter table content_mimetype
   add column indexer_configuration_id bigserial;
 
 comment on column content_mimetype.indexer_configuration_id is 'Tool used to compute the information';
 
 update content_mimetype
 set indexer_configuration_id = (select id from indexer_configuration where tool_name='file');
 
 alter table content_mimetype
   alter column indexer_configuration_id
   set not null;
 
 alter table content_mimetype
   add constraint content_mimetype_indexer_configuration_id_idx
   foreign key (indexer_configuration_id) references indexer_configuration(id);
 
 alter table content_mimetype
   drop constraint content_mimetype_pkey;
 
 alter table content_mimetype
   add primary key(id, indexer_configuration_id);
 
 -- fossology-license
 
 comment on column content_fossology_license.indexer_configuration_id is 'Tool used to compute the information';
 
 alter table content_fossology_license
   alter column indexer_configuration_id
   set not null;
 
 alter table content_fossology_license
   add primary key using index content_fossology_license_id_license_id_indexer_configurati_idx;
 
 ---------------------
 -- Update functions
 ---------------------
 
+-- ctags
+
 -- create a temporary table for content_ctags missing routine
 create or replace function swh_mktemp_content_ctags_missing()
     returns void
     language sql
 as $$
   create temporary table tmp_content_ctags_missing (
     id           sha1,
     tool_name    text,
     tool_version text
   ) on commit drop;
 $$;
 
 comment on function swh_mktemp_content_ctags_missing() is 'Helper table to filter missing content ctags';
 
 -- check which entries of tmp_bytea are missing from content_ctags
 --
 -- operates in bulk: 0. swh_mktemp_bytea(), 1. COPY to tmp_bytea,
 -- 2. call this function
 create or replace function swh_content_ctags_missing()
     returns setof sha1
     language plpgsql
 as $$
 begin
     return query
 	(select id::sha1 from tmp_content_ctags_missing as tmp
 	 where not exists
 	     (select 1 from content_ctags as c
               inner join indexer_configuration i
               on (tmp.tool_name = i.tool_name and tmp.tool_version = i.tool_version)
               where c.id = tmp.id limit 1));
     return;
 end
 $$;
 
--- create a temporary table for content_ctags tmp_content_ctags,
 -- create a temporary table for content_ctags tmp_content_ctags,
 create or replace function swh_mktemp_content_ctags()
     returns void
     language sql
 as $$
   create temporary table tmp_content_ctags (
     like content_ctags including defaults
   ) on commit drop;
   alter table tmp_content_ctags
     drop column indexer_configuration_id,
     add column tool_name text,
     add column tool_version text;
 $$;
 
 comment on function swh_mktemp_content_ctags() is 'Helper table to add content ctags';
 
 create or replace function swh_content_ctags_get()
     returns setof content_ctags_signature
     language plpgsql
 as $$
 begin
     return query
         select c.id, c.name, c.kind, c.line, c.lang, i.tool_name, i.tool_version
         from tmp_bytea t
         inner join content_ctags c using(id)
         inner join indexer_configuration i on i.id = c.indexer_configuration_id
         order by line;
     return;
 end
 $$;
 
 comment on function swh_content_ctags_get() IS 'List content ctags';
 
 
 -- add tmp_content_ctags entries to content_ctags, overwriting
 -- duplicates if conflict_update is true, skipping duplicates otherwise.
 --
 -- operates in bulk: 0. swh_mktemp(content_ctags), 1. COPY to tmp_content_ctags,
 -- 2. call this function
 create or replace function swh_content_ctags_add(conflict_update boolean)
     returns void
     language plpgsql
 as $$
 begin
     if conflict_update then
         delete from content_ctags
         where id in (select distinct id from tmp_content_ctags);
     end if;
 
     insert into content_ctags (id, name, kind, line, lang, indexer_configuration_id)
     select id, name, kind, line, lang,
            (select id from indexer_configuration
             where tool_name=tct.tool_name
             and tool_version=tct.tool_version)
     from tmp_content_ctags tct
         on conflict(id, md5(name), kind, line, lang, indexer_configuration_id)
         do nothing;
     return;
 end
 $$;
 
 comment on function swh_content_ctags_add(boolean) IS 'Add new ctags symbols per content';
 
 drop type content_ctags_signature cascade;
 create type content_ctags_signature as (
   id sha1,
   name text,
   kind text,
   line bigint,
   lang ctags_languages,
   tool_name text,
   tool_version text
 );
 
 
 -- Retrieve list of content ctags from the temporary table.
 --
 -- operates in bulk: 0. mktemp(tmp_bytea), 1. COPY to tmp_bytea, 2. call this function
 create or replace function swh_content_ctags_get()
     returns setof content_ctags_signature
     language plpgsql
 as $$
 begin
     return query
         select c.id, c.name, c.kind, c.line, c.lang, i.tool_name, i.tool_version
         from tmp_bytea t
         inner join content_ctags c using(id)
         inner join indexer_configuration i on i.id = c.indexer_configuration_id
         order by line;
     return;
 end
 $$;
 
 comment on function swh_content_ctags_get() IS 'List content ctags';
 
 -- Search within ctags content.
 --
 create or replace function swh_content_ctags_search(
        expression text,
        l integer default 10,
        last_sha1 sha1 default '\x0000000000000000000000000000000000000000')
     returns setof content_ctags_signature
     language sql
 as $$
     select c.id, name, kind, line, lang, tool_name, tool_version
     from content_ctags c
     inner join indexer_configuration i on i.id = c.indexer_configuration_id
     where hash_sha1(name) = hash_sha1(expression)
     and c.id > last_sha1
     order by id
     limit l;
 $$;
 
 comment on function swh_content_ctags_search(text, integer, sha1) IS 'Equality search through ctags'' symbols';
+
+-- mimetype
+
+-- create a temporary table for content_ctags tmp_content_mimetype_missing,
+create or replace function swh_mktemp_content_mimetype_missing()
+    returns void
+    language sql
+as $$
+  create temporary table tmp_content_mimetype_missing (
+    id sha1 references content(sha1) not null,
+    tool_name text not null,
+    tool_version text not null
+  ) on commit drop;
+$$;
+
+comment on function swh_mktemp_content_mimetype_missing() IS 'Helper table to filter existing mimetype information';
+
+-- create a temporary table for content_ctags tmp_content_mimetype,
+create or replace function swh_mktemp_content_mimetype()
+    returns void
+    language sql
+as $$
+  create temporary table tmp_content_mimetype (
+    like content_mimetype including defaults
+  ) on commit drop;
+  alter table tmp_content_mimetype
+    drop column indexer_configuration_id,
+    add column tool_name text,
+    add column tool_version text;
+$$;
+
+comment on function swh_mktemp_content_mimetype() is 'Helper table to add content ctags';
+
+create or replace function swh_content_mimetype_missing()
+    returns setof sha1
+    language plpgsql
+as $$
+begin
+    return query
+	(select id::sha1 from tmp_content_mimetype_missing as tmp
+	 where not exists
+	     (select 1 from content_mimetype as c
+              inner join indexer_configuration i
+              on (tmp.tool_name = i.tool_name and tmp.tool_version = i.tool_version)
+              where c.id = tmp.id));
+    return;
+end
+$$;
+
+comment on function swh_content_mimetype_missing() is 'Filter existing mimetype information';
+
+create or replace function swh_content_mimetype_add(conflict_update boolean)
+    returns void
+    language plpgsql
+as $$
+begin
+    if conflict_update then
+        insert into content_mimetype (id, mimetype, encoding, indexer_configuration_id)
+        select id, mimetype, encoding,
+               (select id from indexer_configuration
+               where tool_name=tcm.tool_name
+               and tool_version=tcm.tool_version)
+        from tmp_content_mimetype tcm
+            on conflict(id, indexer_configuration_id)
+                do update set mimetype = excluded.mimetype,
+                              encoding = excluded.encoding;
+
+    else
+        insert into content_mimetype (id, mimetype, encoding, indexer_configuration_id)
+        select id, mimetype, encoding,
+               (select id from indexer_configuration
+               where tool_name=tcm.tool_name
+               and tool_version=tcm.tool_version)
+         from tmp_content_mimetype tcm
+            on conflict(id, indexer_configuration_id)
+            do nothing;
+    end if;
+    return;
+end
+$$;
+
+comment on function swh_content_mimetype_add(boolean) IS 'Add new content mimetypes';
+
+create type content_mimetype_signature as(
+  id sha1,
+  mimetype bytea,
+  encoding bytea,
+  tool_name text,
+  tool_version text
+);
+
+drop function swh_content_mimetype_get();
+
+create or replace function swh_content_mimetype_get()
+    returns setof content_mimetype_signature
+    language plpgsql
+as $$
+begin
+    return query
+        select c.id, mimetype, encoding, tool_name, tool_version
+        from tmp_bytea t
+        inner join content_mimetype c on c.id=t.id
+        inner join indexer_configuration i on c.indexer_configuration_id=i.id;
+    return;
+end
+$$;
+
+comment on function swh_content_mimetype_get() IS 'List content''s mimetype';
+
+-- language
diff --git a/swh/storage/converters.py b/swh/storage/converters.py
index 97b7be5e..c0181cdb 100644
--- a/swh/storage/converters.py
+++ b/swh/storage/converters.py
@@ -1,391 +1,406 @@
 # Copyright (C) 2015  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import datetime
 import numbers
 
 from swh.core.utils import decode_with_escape, encode_with_unescape
 
 
 DEFAULT_AUTHOR = {
     'fullname': None,
     'name': None,
     'email': None,
 }
 
 DEFAULT_DATE = {
     'timestamp': None,
     'offset': 0,
     'neg_utc_offset': None,
 }
 
 
 def author_to_db(author):
     """Convert a swh-model author to its DB representation.
 
     Args: a swh-model compatible author
     Returns:
         a dict containing three keys: author, fullname and email
     """
     if author is None:
         return DEFAULT_AUTHOR
 
     return author
 
 
 def db_to_author(id, fullname, name, email):
     """Convert the DB representation of an author to a swh-model author.
 
     Args:
         id (long): the author's identifier
         fullname (bytes): the author's fullname
         name (bytes): the author's name
         email (bytes): the author's email
 
     Returns:
         a dict with four keys: id, fullname, name and email, or None if the id
         is None
     """
 
     if id is None:
         return None
 
     return {
         'id': id,
         'fullname': fullname,
         'name': name,
         'email': email,
     }
 
 
 def git_headers_to_db(git_headers):
     """Convert git headers to their database representation.
 
     We convert the bytes to unicode by decoding them into utf-8 and replacing
     invalid utf-8 sequences with backslash escapes.
 
     """
     ret = []
     for key, values in git_headers:
         if isinstance(values, list):
             ret.append([key, [decode_with_escape(value) for value in values]])
         else:
             ret.append([key, decode_with_escape(values)])
 
     return ret
 
 
 def db_to_git_headers(db_git_headers):
     ret = []
     for key, values in db_git_headers:
         if isinstance(values, list):
             ret.append([key, [encode_with_unescape(value)
                               for value in values]])
         else:
             ret.append([key, encode_with_unescape(values)])
 
     return ret
 
 
 def db_to_date(date, offset, neg_utc_offset):
     """Convert the DB representation of a date to a swh-model compatible date.
 
     Args:
         date (datetime.datetime): a date pulled out of the database
         offset (int): an integer number of minutes representing an UTC offset
         neg_utc_offset (boolean): whether an utc offset is negative
 
     Returns:
         a dict with three keys:
             timestamp: a timestamp from UTC
             offset: the number of minutes since UTC
             negative_utc: whether a null UTC offset is negative
     """
 
     if date is None:
         return None
 
     return {
         'timestamp': date.timestamp(),
         'offset': offset,
         'negative_utc': neg_utc_offset,
     }
 
 
 def date_to_db(date_offset):
     """Convert a swh-model date_offset to its DB representation.
 
     Args: a swh-model compatible date_offset
     Returns:
         a dict with three keys:
             timestamp: a date in ISO format
             offset: the UTC offset in minutes
             neg_utc_offset: a boolean indicating whether a null offset is
                             negative or positive.
 
     """
 
     if date_offset is None:
         return DEFAULT_DATE
 
     if isinstance(date_offset, numbers.Real):
         date_offset = datetime.datetime.fromtimestamp(date_offset,
                                                       tz=datetime.timezone.utc)
 
     if isinstance(date_offset, datetime.datetime):
         timestamp = date_offset
         utcoffset = date_offset.utcoffset()
         offset = int(utcoffset.total_seconds()) // 60
         neg_utc_offset = False if offset == 0 else None
     else:
         if isinstance(date_offset['timestamp'], numbers.Real):
             timestamp = datetime.datetime.fromtimestamp(
                 date_offset['timestamp'], tz=datetime.timezone.utc)
         else:
             timestamp = date_offset['timestamp']
         offset = date_offset['offset']
         neg_utc_offset = date_offset.get('negative_utc', None)
 
     return {
         'timestamp': timestamp.isoformat(),
         'offset': offset,
         'neg_utc_offset': neg_utc_offset,
     }
 
 
 def revision_to_db(revision):
     """Convert a swh-model revision to its database representation.
     """
 
     author = author_to_db(revision['author'])
     date = date_to_db(revision['date'])
     committer = author_to_db(revision['committer'])
     committer_date = date_to_db(revision['committer_date'])
 
     metadata = revision['metadata']
 
     if metadata and 'extra_headers' in metadata:
         metadata = metadata.copy()
         extra_headers = git_headers_to_db(metadata['extra_headers'])
         metadata['extra_headers'] = extra_headers
 
     return {
         'id': revision['id'],
         'author_fullname': author['fullname'],
         'author_name': author['name'],
         'author_email': author['email'],
         'date': date['timestamp'],
         'date_offset': date['offset'],
         'date_neg_utc_offset': date['neg_utc_offset'],
         'committer_fullname': committer['fullname'],
         'committer_name': committer['name'],
         'committer_email': committer['email'],
         'committer_date': committer_date['timestamp'],
         'committer_date_offset': committer_date['offset'],
         'committer_date_neg_utc_offset': committer_date['neg_utc_offset'],
         'type': revision['type'],
         'directory': revision['directory'],
         'message': revision['message'],
         'metadata': metadata,
         'synthetic': revision['synthetic'],
         'parents': [
             {
                 'id': revision['id'],
                 'parent_id': parent,
                 'parent_rank': i,
             } for i, parent in enumerate(revision['parents'])
         ],
     }
 
 
 def db_to_revision(db_revision):
     """Convert a database representation of a revision to its swh-model
     representation."""
 
     author = db_to_author(
         db_revision['author_id'],
         db_revision['author_fullname'],
         db_revision['author_name'],
         db_revision['author_email'],
     )
     date = db_to_date(
         db_revision['date'],
         db_revision['date_offset'],
         db_revision['date_neg_utc_offset'],
     )
 
     committer = db_to_author(
         db_revision['committer_id'],
         db_revision['committer_fullname'],
         db_revision['committer_name'],
         db_revision['committer_email'],
     )
     committer_date = db_to_date(
         db_revision['committer_date'],
         db_revision['committer_date_offset'],
         db_revision['committer_date_neg_utc_offset']
     )
 
     metadata = db_revision['metadata']
 
     if metadata and 'extra_headers' in metadata:
         extra_headers = db_to_git_headers(metadata['extra_headers'])
         metadata['extra_headers'] = extra_headers
 
     parents = []
     if 'parents' in db_revision:
         for parent in db_revision['parents']:
             if parent:
                 parents.append(parent)
 
     ret = {
         'id': db_revision['id'],
         'author': author,
         'date': date,
         'committer': committer,
         'committer_date': committer_date,
         'type': db_revision['type'],
         'directory': db_revision['directory'],
         'message': db_revision['message'],
         'metadata': metadata,
         'synthetic': db_revision['synthetic'],
         'parents': parents,
     }
 
     if 'object_id' in db_revision:
         ret['object_id'] = db_revision['object_id']
 
     return ret
 
 
 def release_to_db(release):
     """Convert a swh-model release to its database representation.
     """
 
     author = author_to_db(release['author'])
     date = date_to_db(release['date'])
 
     return {
         'id': release['id'],
         'author_fullname': author['fullname'],
         'author_name': author['name'],
         'author_email': author['email'],
         'date': date['timestamp'],
         'date_offset': date['offset'],
         'date_neg_utc_offset': date['neg_utc_offset'],
         'name': release['name'],
         'target': release['target'],
         'target_type': release['target_type'],
         'comment': release['message'],
         'synthetic': release['synthetic'],
     }
 
 
 def db_to_release(db_release):
     """Convert a database representation of a release to its swh-model
     representation.
     """
 
     author = db_to_author(
         db_release['author_id'],
         db_release['author_fullname'],
         db_release['author_name'],
         db_release['author_email'],
     )
     date = db_to_date(
         db_release['date'],
         db_release['date_offset'],
         db_release['date_neg_utc_offset']
     )
 
     ret = {
         'author': author,
         'date': date,
         'id': db_release['id'],
         'name': db_release['name'],
         'message': db_release['comment'],
         'synthetic': db_release['synthetic'],
         'target': db_release['target'],
         'target_type': db_release['target_type'],
     }
 
     if 'object_id' in db_release:
         ret['object_id'] = db_release['object_id']
 
     return ret
 
 
 def ctags_to_db(ctags):
     """Convert a ctags entry into a ready ctags entry.
 
     Args:
         ctags (dict): ctags entry with the following keys:
         - id (bytes): content's identifier
         - tool_name (str): tool name used to compute ctags
         - tool_version (str): associated tool's version
         - ctags ([dict]): List of dictionary with the following keys:
           - name (str): symbol's name
           - kind (str): symbol's kind
           - line (int): symbol's line in the content
           - language (str): language
 
     Returns:
         List of ctags ready entry (dict with the following keys):
         - id (bytes): content's identifier
         - name (str): symbol's name
         - kind (str): symbol's kind
         - language (str): language for that content
         - tool_name (str): tool name used to compute ctags
         - tool_version (str): associated tool's version
 
     """
     res = []
     id = ctags['id']
     tool_name = ctags['tool_name']
     tool_version = ctags['tool_version']
     for ctag in ctags['ctags']:
         res.append({
             'id': id,
             'name': ctag['name'],
             'kind': ctag['kind'],
             'line': ctag['line'],
             'lang': ctag['lang'],
             'tool_name': tool_name,
             'tool_version': tool_version,
         })
     return res
 
 
 def db_to_ctags(ctag):
     """Convert a ctags entry into a ready ctags entry.
 
     Args:
         ctags (dict): ctags entry with the following keys:
         - id (bytes): content's identifier
         - ctags ([dict]): List of dictionary with the following keys:
           - name (str): symbol's name
           - kind (str): symbol's kind
           - line (int): symbol's line in the content
           - language (str): language
 
     Returns:
         List of ctags ready entry (dict with the following keys):
         - id (bytes): content's identifier
         - name (str): symbol's name
         - kind (str): symbol's kind
         - language (str): language for that content
 
     """
     return {
         'id': ctag['id'],
         'name': ctag['name'],
         'kind': ctag['kind'],
         'line': ctag['line'],
         'lang': ctag['lang'],
         'tool': {
             'name': ctag['tool_name'],
             'version': ctag['tool_version'],
         }
     }
+
+
+def db_to_mimetype(mimetype):
+    """Convert a ctags entry into a ready ctags entry.
+
+    """
+    return {
+        'id': mimetype['id'],
+        'encoding': mimetype['encoding'],
+        'mimetype': mimetype['mimetype'],
+        'tool': {
+            'name': mimetype['tool_name'],
+            'version': mimetype['tool_version'],
+        }
+    }
diff --git a/swh/storage/db.py b/swh/storage/db.py
index 18abd6f3..e0c1af4a 100644
--- a/swh/storage/db.py
+++ b/swh/storage/db.py
@@ -1,941 +1,948 @@
 # Copyright (C) 2015-2016  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import binascii
 import datetime
 import functools
 import json
 import psycopg2
 import psycopg2.extras
 import select
 import tempfile
 
 from contextlib import contextmanager
 
 from swh.core import hashutil
 
 TMP_CONTENT_TABLE = 'tmp_content'
 
 
 psycopg2.extras.register_uuid()
 
 
 def stored_procedure(stored_proc):
     """decorator to execute remote stored procedure, specified as argument
 
     Generally, the body of the decorated function should be empty. If it is
     not, the stored procedure will be executed first; the function body then.
 
     """
     def wrap(meth):
         @functools.wraps(meth)
         def _meth(self, *args, **kwargs):
             cur = kwargs.get('cur', None)
             self._cursor(cur).execute('SELECT %s()' % stored_proc)
             meth(self, *args, **kwargs)
         return _meth
     return wrap
 
 
 def jsonize(value):
     """Convert a value to a psycopg2 JSON object if necessary"""
     if isinstance(value, dict):
         return psycopg2.extras.Json(value)
 
     return value
 
 
 def entry_to_bytes(entry):
     """Convert an entry coming from the database to bytes"""
     if isinstance(entry, memoryview):
         return entry.tobytes()
     if isinstance(entry, list):
         return [entry_to_bytes(value) for value in entry]
     return entry
 
 
 def line_to_bytes(line):
     """Convert a line coming from the database to bytes"""
     if not line:
         return line
     if isinstance(line, dict):
         return {k: entry_to_bytes(v) for k, v in line.items()}
     return line.__class__(entry_to_bytes(entry) for entry in line)
 
 
 def cursor_to_bytes(cursor):
     """Yield all the data from a cursor as bytes"""
     yield from (line_to_bytes(line) for line in cursor)
 
 
 class BaseDb:
     """Base class for swh.storage.*Db.
 
     cf. swh.storage.db.Db, swh.storage.archiver.db.ArchiverDb
 
     """
 
     @classmethod
     def connect(cls, *args, **kwargs):
         """factory method to create a DB proxy
 
         Accepts all arguments of psycopg2.connect; only some specific
         possibilities are reported below.
 
         Args:
             connstring: libpq2 connection string
 
         """
         conn = psycopg2.connect(*args, **kwargs)
         return cls(conn)
 
     def _cursor(self, cur_arg):
         """get a cursor: from cur_arg if given, or a fresh one otherwise
 
         meant to avoid boilerplate if/then/else in methods that proxy stored
         procedures
 
         """
         if cur_arg is not None:
             return cur_arg
         # elif self.cur is not None:
         #     return self.cur
         else:
             return self.conn.cursor()
 
     def __init__(self, conn):
         """create a DB proxy
 
         Args:
             conn: psycopg2 connection to the SWH DB
 
         """
         self.conn = conn
 
     @contextmanager
     def transaction(self):
         """context manager to execute within a DB transaction
 
         Yields:
             a psycopg2 cursor
 
         """
         with self.conn.cursor() as cur:
             try:
                 yield cur
                 self.conn.commit()
             except:
                 if not self.conn.closed:
                     self.conn.rollback()
                 raise
 
     def copy_to(self, items, tblname, columns, cur=None, item_cb=None):
         """Copy items' entries to table tblname with columns information.
 
         Args:
             items (dict): dictionary of data to copy over tblname
             tblname (str): Destination table's name
             columns ([str]): keys to access data in items and also the
               column names in the destination table.
             item_cb (fn): optional function to apply to items's entry
 
         """
         def escape(data):
             if data is None:
                 return ''
             if isinstance(data, bytes):
                 return '\\x%s' % binascii.hexlify(data).decode('ascii')
             elif isinstance(data, str):
                 return '"%s"' % data.replace('"', '""')
             elif isinstance(data, datetime.datetime):
                 # We escape twice to make sure the string generated by
                 # isoformat gets escaped
                 return escape(data.isoformat())
             elif isinstance(data, dict):
                 return escape(json.dumps(data))
             elif isinstance(data, list):
                 return escape("{%s}" % ','.join(escape(d) for d in data))
             elif isinstance(data, psycopg2.extras.Range):
                 # We escape twice here too, so that we make sure
                 # everything gets passed to copy properly
                 return escape(
                     '%s%s,%s%s' % (
                         '[' if data.lower_inc else '(',
                         '-infinity' if data.lower_inf else escape(data.lower),
                         'infinity' if data.upper_inf else escape(data.upper),
                         ']' if data.upper_inc else ')',
                     )
                 )
             else:
                 # We don't escape here to make sure we pass literals properly
                 return str(data)
         with tempfile.TemporaryFile('w+') as f:
             for d in items:
                 if item_cb is not None:
                     item_cb(d)
                 line = [escape(d.get(k)) for k in columns]
                 f.write(','.join(line))
                 f.write('\n')
             f.seek(0)
             self._cursor(cur).copy_expert('COPY %s (%s) FROM STDIN CSV' % (
                 tblname, ', '.join(columns)), f)
 
 
 class Db(BaseDb):
     """Proxy to the SWH DB, with wrappers around stored procedures
 
     """
     def mktemp(self, tblname, cur=None):
         self._cursor(cur).execute('SELECT swh_mktemp(%s)', (tblname,))
 
     def mktemp_dir_entry(self, entry_type, cur=None):
         self._cursor(cur).execute('SELECT swh_mktemp_dir_entry(%s)',
                                   (('directory_entry_%s' % entry_type),))
 
     @stored_procedure('swh_mktemp_revision')
     def mktemp_revision(self, cur=None): pass
 
     @stored_procedure('swh_mktemp_release')
     def mktemp_release(self, cur=None): pass
 
     @stored_procedure('swh_mktemp_occurrence_history')
     def mktemp_occurrence_history(self, cur=None): pass
 
     @stored_procedure('swh_mktemp_entity_lister')
     def mktemp_entity_lister(self, cur=None): pass
 
     @stored_procedure('swh_mktemp_entity_history')
     def mktemp_entity_history(self, cur=None): pass
 
     @stored_procedure('swh_mktemp_bytea')
     def mktemp_bytea(self, cur=None): pass
 
     @stored_procedure('swh_mktemp_content_ctags')
     def mktemp_content_ctags(self, cur=None): pass
 
     @stored_procedure('swh_mktemp_content_ctags_missing')
     def mktemp_content_ctags_missing(self, cur=None): pass
 
     @stored_procedure('swh_mktemp_content_fossology_license')
     def mktemp_content_fossology_license(self, cur=None): pass
 
     @stored_procedure('swh_mktemp_content_fossology_license_unknown')
     def mktemp_content_fossology_license_unknown(self, cur=None): pass
 
     def register_listener(self, notify_queue, cur=None):
         """Register a listener for NOTIFY queue `notify_queue`"""
         self._cursor(cur).execute("LISTEN %s" % notify_queue)
 
     def listen_notifies(self, timeout):
         """Listen to notifications for `timeout` seconds"""
         if select.select([self.conn], [], [], timeout) == ([], [], []):
             return
         else:
             self.conn.poll()
             while self.conn.notifies:
                 yield self.conn.notifies.pop(0)
 
     @stored_procedure('swh_content_add')
     def content_add_from_temp(self, cur=None): pass
 
     @stored_procedure('swh_directory_add')
     def directory_add_from_temp(self, cur=None): pass
 
     @stored_procedure('swh_skipped_content_add')
     def skipped_content_add_from_temp(self, cur=None): pass
 
     @stored_procedure('swh_revision_add')
     def revision_add_from_temp(self, cur=None): pass
 
     @stored_procedure('swh_release_add')
     def release_add_from_temp(self, cur=None): pass
 
     @stored_procedure('swh_occurrence_history_add')
     def occurrence_history_add_from_temp(self, cur=None): pass
 
     @stored_procedure('swh_entity_history_add')
     def entity_history_add_from_temp(self, cur=None): pass
 
     @stored_procedure('swh_cache_content_revision_add')
     def cache_content_revision_add(self, cur=None): pass
 
     def store_tmp_bytea(self, ids, cur=None):
         """Store the given identifiers in a new tmp_bytea table"""
         cur = self._cursor(cur)
 
         self.mktemp_bytea(cur)
         self.copy_to(({'id': elem} for elem in ids), 'tmp_bytea',
                      ['id'], cur)
 
     content_get_metadata_keys = ['sha1', 'sha1_git', 'sha256', 'length',
                                  'status']
 
     def content_get_metadata_from_temp(self, cur=None):
         cur = self._cursor(cur)
         cur.execute("""select t.id as sha1, %s from tmp_bytea t
                        left join content on t.id = content.sha1
                     """ % ', '.join(self.content_get_metadata_keys[1:]))
 
         yield from cursor_to_bytes(cur)
 
     def content_missing_from_temp(self, cur=None):
         cur = self._cursor(cur)
 
         cur.execute("""SELECT sha1, sha1_git, sha256
                        FROM swh_content_missing()""")
 
         yield from cursor_to_bytes(cur)
 
     def content_missing_per_sha1_from_temp(self, cur=None):
         cur = self._cursor(cur)
 
         cur.execute("""SELECT *
                        FROM swh_content_missing_per_sha1()""")
 
         yield from cursor_to_bytes(cur)
 
     def skipped_content_missing_from_temp(self, cur=None):
         cur = self._cursor(cur)
 
         cur.execute("""SELECT sha1, sha1_git, sha256
                        FROM swh_skipped_content_missing()""")
 
         yield from cursor_to_bytes(cur)
 
     def occurrence_get(self, origin_id, cur=None):
         """Retrieve latest occurrence's information by origin_id.
 
         """
         cur = self._cursor(cur)
 
         cur.execute("""SELECT origin, branch, target, target_type,
                               (select max(date) from origin_visit
                                where origin=%s) as date
                        FROM occurrence
                        WHERE origin=%s
                     """,
                     (origin_id, origin_id))
 
         yield from cursor_to_bytes(cur)
 
     def content_find(self, sha1=None, sha1_git=None, sha256=None, cur=None):
         """Find the content optionally on a combination of the following
         checksums sha1, sha1_git or sha256.
 
         Args:
             sha1: sha1 content
             git_sha1: the sha1 computed `a la git` sha1 of the content
             sha256: sha256 content
 
         Returns:
             The triplet (sha1, sha1_git, sha256) if found or None.
 
         """
         cur = self._cursor(cur)
 
         cur.execute("""SELECT sha1, sha1_git, sha256, length, ctime, status
                        FROM swh_content_find(%s, %s, %s)
                        LIMIT 1""", (sha1, sha1_git, sha256))
 
         content = line_to_bytes(cur.fetchone())
         if set(content) == {None}:
             return None
         else:
             return content
 
     provenance_cols = ['content', 'revision', 'origin', 'visit', 'path']
 
     def content_find_provenance(self, sha1_git, cur=None):
         """Find content's provenance information
 
         Args:
             sha1: sha1_git content
             cur: cursor to use
 
         Returns:
             Provenance information on such content
 
         """
         cur = self._cursor(cur)
 
         cur.execute("""SELECT content, revision, origin, visit, path
                        FROM swh_content_find_provenance(%s)""",
                     (sha1_git, ))
 
         yield from cursor_to_bytes(cur)
 
     def directory_get_from_temp(self, cur=None):
         cur = self._cursor(cur)
         cur.execute('''SELECT id, file_entries, dir_entries, rev_entries
                        FROM swh_directory_get()''')
         yield from cursor_to_bytes(cur)
 
     def directory_missing_from_temp(self, cur=None):
         cur = self._cursor(cur)
         cur.execute('SELECT * FROM swh_directory_missing()')
         yield from cursor_to_bytes(cur)
 
     directory_ls_cols = ['dir_id', 'type', 'target', 'name', 'perms',
                          'status', 'sha1', 'sha1_git', 'sha256']
 
     def directory_walk_one(self, directory, cur=None):
         cur = self._cursor(cur)
         cur.execute('SELECT * FROM swh_directory_walk_one(%s)', (directory,))
         yield from cursor_to_bytes(cur)
 
     def directory_walk(self, directory, cur=None):
         cur = self._cursor(cur)
         cur.execute('SELECT * FROM swh_directory_walk(%s)', (directory,))
         yield from cursor_to_bytes(cur)
 
     def revision_missing_from_temp(self, cur=None):
         cur = self._cursor(cur)
 
         cur.execute('SELECT id FROM swh_revision_missing() as r(id)')
 
         yield from cursor_to_bytes(cur)
 
     revision_add_cols = [
         'id', 'date', 'date_offset', 'date_neg_utc_offset', 'committer_date',
         'committer_date_offset', 'committer_date_neg_utc_offset', 'type',
         'directory', 'message', 'author_fullname', 'author_name',
         'author_email', 'committer_fullname', 'committer_name',
         'committer_email', 'metadata', 'synthetic',
     ]
 
     revision_get_cols = revision_add_cols + [
         'author_id', 'committer_id', 'parents']
 
     def origin_visit_add(self, origin, ts, cur=None):
         """Add a new origin_visit for origin origin at timestamp ts with
         status 'ongoing'.
 
         Args:
             origin: origin concerned by the visit
             ts: the date of the visit
 
         Returns:
             The new visit index step for that origin
 
         """
         cur = self._cursor(cur)
         self._cursor(cur).execute('SELECT swh_origin_visit_add(%s, %s)',
                                   (origin, ts))
         return cur.fetchone()[0]
 
     def origin_visit_update(self, origin, visit_id, status,
                             metadata, cur=None):
         """Update origin_visit's status."""
         cur = self._cursor(cur)
         update = """UPDATE origin_visit
                     SET status=%s, metadata=%s
                     WHERE origin=%s AND visit=%s"""
         cur.execute(update, (status, jsonize(metadata), origin, visit_id))
 
     origin_visit_get_cols = ['origin', 'visit', 'date', 'status', 'metadata']
 
     def origin_visit_get_all(self, origin_id, cur=None):
         """Retrieve all visits for origin with id origin_id.
 
         Args:
             origin_id: The occurrence's origin
 
         Yields:
             The occurrence's history visits
 
         """
         cur = self._cursor(cur)
 
         query = """\
         SELECT %s
         FROM origin_visit
         WHERE origin=%%s""" % (', '.join(self.origin_visit_get_cols))
 
         cur.execute(query, (origin_id, ))
 
         yield from cursor_to_bytes(cur)
 
     def origin_visit_get(self, origin_id, visit_id, cur=None):
         """Retrieve information on visit visit_id of origin origin_id.
 
         Args:
             origin_id: the origin concerned
             visit_id: The visit step for that origin
 
         Returns:
             The origin_visit information
 
         """
         cur = self._cursor(cur)
 
         query = """\
             SELECT %s
             FROM origin_visit
             WHERE origin = %%s AND visit = %%s
             """ % (', '.join(self.origin_visit_get_cols))
 
         cur.execute(query, (origin_id, visit_id))
         r = cur.fetchall()
         if not r:
             return None
         return line_to_bytes(r[0])
 
     occurrence_cols = ['origin', 'branch', 'target', 'target_type']
 
     def occurrence_by_origin_visit(self, origin_id, visit_id, cur=None):
         """Retrieve all occurrences for a particular origin_visit.
 
         Args:
             origin_id: the origin concerned
             visit_id: The visit step for that origin
 
         Yields:
             The occurrence's history visits
 
         """
         cur = self._cursor(cur)
 
         query = """\
             SELECT %s
             FROM swh_occurrence_by_origin_visit(%%s, %%s)
             """ % (', '.join(self.occurrence_cols))
 
         cur.execute(query, (origin_id, visit_id))
         yield from cursor_to_bytes(cur)
 
     def revision_get_from_temp(self, cur=None):
         cur = self._cursor(cur)
         query = 'SELECT %s FROM swh_revision_get()' % (
             ', '.join(self.revision_get_cols))
         cur.execute(query)
         yield from cursor_to_bytes(cur)
 
     def revision_log(self, root_revisions, limit=None, cur=None):
         cur = self._cursor(cur)
 
         query = """SELECT %s
                    FROM swh_revision_log(%%s, %%s)
                 """ % ', '.join(self.revision_get_cols)
 
         cur.execute(query, (root_revisions, limit))
         yield from cursor_to_bytes(cur)
 
     revision_shortlog_cols = ['id', 'parents']
 
     def revision_shortlog(self, root_revisions, limit=None, cur=None):
         cur = self._cursor(cur)
 
         query = """SELECT %s
                    FROM swh_revision_list(%%s, %%s)
                 """ % ', '.join(self.revision_shortlog_cols)
 
         cur.execute(query, (root_revisions, limit))
         yield from cursor_to_bytes(cur)
 
     cache_content_get_cols = [
         'sha1', 'sha1_git', 'sha256', 'revision_paths']
 
     def cache_content_get_all(self, cur=None):
         """Retrieve cache contents' sha1, sha256, sha1_git
 
         """
         cur = self._cursor(cur)
         cur.execute('SELECT * FROM swh_cache_content_get_all()')
         yield from cursor_to_bytes(cur)
 
     def cache_content_get(self, sha1_git, cur=None):
         """Retrieve cache content information sh.
 
         """
         cur = self._cursor(cur)
         cur.execute('SELECT * FROM swh_cache_content_get(%s)', (sha1_git, ))
         data = cur.fetchone()
         if data:
             return line_to_bytes(data)
         return None
 
     def cache_revision_origin_add(self, origin, visit, cur=None):
         """Populate the content provenance information cache for the given
            (origin, visit) couple."""
         cur = self._cursor(cur)
         cur.execute('SELECT * FROM swh_cache_revision_origin_add(%s, %s)',
                     (origin, visit))
         yield from cursor_to_bytes(cur)
 
     def release_missing_from_temp(self, cur=None):
         cur = self._cursor(cur)
         cur.execute('SELECT id FROM swh_release_missing() as r(id)')
         yield from cursor_to_bytes(cur)
 
     object_find_by_sha1_git_cols = ['sha1_git', 'type', 'id', 'object_id']
 
     def object_find_by_sha1_git(self, ids, cur=None):
         cur = self._cursor(cur)
 
         self.store_tmp_bytea(ids, cur)
         query = 'select %s from swh_object_find_by_sha1_git()' % (
             ', '.join(self.object_find_by_sha1_git_cols)
         )
         cur.execute(query)
 
         yield from cursor_to_bytes(cur)
 
     def stat_counters(self, cur=None):
         cur = self._cursor(cur)
         cur.execute('SELECT * FROM swh_stat_counters()')
         yield from cur
 
     fetch_history_cols = ['origin', 'date', 'status', 'result', 'stdout',
                           'stderr', 'duration']
 
     def create_fetch_history(self, fetch_history, cur=None):
         """Create a fetch_history entry with the data in fetch_history"""
         cur = self._cursor(cur)
         query = '''INSERT INTO fetch_history (%s)
                    VALUES (%s) RETURNING id''' % (
             ','.join(self.fetch_history_cols),
             ','.join(['%s'] * len(self.fetch_history_cols))
         )
         cur.execute(query, [fetch_history.get(col) for col in
                             self.fetch_history_cols])
 
         return cur.fetchone()[0]
 
     def get_fetch_history(self, fetch_history_id, cur=None):
         """Get a fetch_history entry with the given id"""
         cur = self._cursor(cur)
         query = '''SELECT %s FROM fetch_history WHERE id=%%s''' % (
             ', '.join(self.fetch_history_cols),
         )
         cur.execute(query, (fetch_history_id,))
 
         data = cur.fetchone()
 
         if not data:
             return None
 
         ret = {'id': fetch_history_id}
         for i, col in enumerate(self.fetch_history_cols):
             ret[col] = data[i]
 
         return ret
 
     def update_fetch_history(self, fetch_history, cur=None):
         """Update the fetch_history entry from the data in fetch_history"""
         cur = self._cursor(cur)
         query = '''UPDATE fetch_history
                    SET %s
                    WHERE id=%%s''' % (
             ','.join('%s=%%s' % col for col in self.fetch_history_cols)
         )
         cur.execute(query, [jsonize(fetch_history.get(col)) for col in
                             self.fetch_history_cols + ['id']])
 
     base_entity_cols = ['uuid', 'parent', 'name', 'type',
                         'description', 'homepage', 'active',
                         'generated', 'lister_metadata',
                         'metadata']
 
     entity_cols = base_entity_cols + ['last_seen', 'last_id']
     entity_history_cols = base_entity_cols + ['id', 'validity']
 
     def origin_add(self, type, url, cur=None):
         """Insert a new origin and return the new identifier."""
         insert = """INSERT INTO origin (type, url) values (%s, %s)
                     RETURNING id"""
 
         cur.execute(insert, (type, url))
         return cur.fetchone()[0]
 
     def origin_get_with(self, type, url, cur=None):
         """Retrieve the origin id from its type and url if found."""
         cur = self._cursor(cur)
 
         query = """SELECT id, type, url, lister, project
                    FROM origin
                    WHERE type=%s AND url=%s"""
 
         cur.execute(query, (type, url))
         data = cur.fetchone()
         if data:
             return line_to_bytes(data)
         return None
 
     def origin_get(self, id, cur=None):
         """Retrieve the origin per its identifier.
 
         """
         cur = self._cursor(cur)
 
         query = "SELECT id, type, url, lister, project FROM origin WHERE id=%s"
 
         cur.execute(query, (id,))
         data = cur.fetchone()
         if data:
             return line_to_bytes(data)
         return None
 
     person_cols = ['fullname', 'name', 'email']
     person_get_cols = person_cols + ['id']
 
     def person_add(self, person, cur=None):
         """Add a person identified by its name and email.
 
         Returns:
             The new person's id
 
         """
         cur = self._cursor(cur)
 
         query_new_person = '''\
         INSERT INTO person(%s)
         VALUES (%s)
         RETURNING id''' % (
             ', '.join(self.person_cols),
             ', '.join('%s' for i in range(len(self.person_cols)))
         )
         cur.execute(query_new_person,
                     [person[col] for col in self.person_cols])
         return cur.fetchone()[0]
 
     def person_get(self, ids, cur=None):
         """Retrieve the persons identified by the list of ids.
 
         """
         cur = self._cursor(cur)
 
         query = """SELECT %s
                    FROM person
                    WHERE id IN %%s""" % ', '.join(self.person_get_cols)
 
         cur.execute(query, (tuple(ids),))
         yield from cursor_to_bytes(cur)
 
     release_add_cols = [
         'id', 'target', 'target_type', 'date', 'date_offset',
         'date_neg_utc_offset', 'name', 'comment', 'synthetic',
         'author_fullname', 'author_name', 'author_email',
     ]
     release_get_cols = release_add_cols + ['author_id']
 
     def release_get_from_temp(self, cur=None):
         cur = self._cursor(cur)
         query = '''
         SELECT %s
             FROM swh_release_get()
         ''' % ', '.join(self.release_get_cols)
         cur.execute(query)
         yield from cursor_to_bytes(cur)
 
     def release_get_by(self,
                        origin_id,
                        limit=None,
                        cur=None):
         """Retrieve a release by occurrence criterion (only origin right now)
 
         Args:
             - origin_id: The origin to look for.
 
         """
         cur = self._cursor(cur)
         query = """
         SELECT %s
             FROM swh_release_get_by(%%s)
             LIMIT %%s
         """ % ', '.join(self.release_get_cols)
         cur.execute(query, (origin_id, limit))
         yield from cursor_to_bytes(cur)
 
     def revision_get_by(self,
                         origin_id,
                         branch_name,
                         datetime,
                         limit=None,
                         cur=None):
         """Retrieve a revision by occurrence criterion.
 
         Args:
             - origin_id: The origin to look for
             - branch_name: the branch name to look for
             - datetime: the lower bound of timerange to look for.
             - limit: limit number of results to return
             The upper bound being now.
         """
         cur = self._cursor(cur)
         if branch_name and isinstance(branch_name, str):
             branch_name = branch_name.encode('utf-8')
 
         query = '''
         SELECT %s
             FROM swh_revision_get_by(%%s, %%s, %%s)
             LIMIT %%s
         ''' % ', '.join(self.revision_get_cols)
 
         cur.execute(query, (origin_id, branch_name, datetime, limit))
         yield from cursor_to_bytes(cur)
 
     def directory_entry_get_by_path(self, directory, paths, cur=None):
         """Retrieve a directory entry by path.
 
         """
         cur = self._cursor(cur)
         cur.execute("""SELECT dir_id, type, target, name, perms, status, sha1,
                        sha1_git, sha256
                        FROM swh_find_directory_entry_by_path(%s, %s)""",
                     (directory, paths))
 
         data = cur.fetchone()
         if set(data) == {None}:
             return None
         return line_to_bytes(data)
 
     def entity_get(self, uuid, cur=None):
         """Retrieve the entity and its parent hierarchy chain per uuid.
 
         """
         cur = self._cursor(cur)
         cur.execute("""SELECT %s
                        FROM swh_entity_get(%%s)""" % (
                            ', '.join(self.entity_cols)),
                     (uuid, ))
         yield from cursor_to_bytes(cur)
 
     def entity_get_one(self, uuid, cur=None):
         """Retrieve a single entity given its uuid.
 
         """
         cur = self._cursor(cur)
         cur.execute("""SELECT %s
                        FROM entity
                        WHERE uuid = %%s""" % (
                            ', '.join(self.entity_cols)),
                     (uuid, ))
         data = cur.fetchone()
         if not data:
             return None
         return line_to_bytes(data)
 
+    @stored_procedure('swh_mktemp_content_mimetype_missing')
+    def mktemp_content_mimetype_missing(self, cur=None): pass
+
     def content_mimetype_missing_from_temp(self, cur=None):
         """List missing mimetypes.
 
         """
         cur = self._cursor(cur)
         cur.execute("SELECT * FROM swh_content_mimetype_missing()")
         yield from cursor_to_bytes(cur)
 
+    @stored_procedure('swh_mktemp_content_mimetype')
+    def mktemp_content_mimetype(self, cur=None): pass
+
     def content_mimetype_add_from_temp(self, conflict_update, cur=None):
         self._cursor(cur).execute("SELECT swh_content_mimetype_add(%s)",
                                   (conflict_update, ))
 
-    content_mimetype_cols = ['id', 'mimetype', 'encoding']
+    content_mimetype_cols = ['id', 'mimetype', 'encoding',
+                             'tool_name', 'tool_version']
 
     def content_mimetype_get_from_temp(self, cur=None):
         cur = self._cursor(cur)
         query = "SELECT %s FROM swh_content_mimetype_get()" % (
             ','.join(self.content_mimetype_cols))
         cur.execute(query)
         yield from cursor_to_bytes(cur)
 
     def content_language_missing_from_temp(self, cur=None):
         """List missing languages.
 
         """
         cur = self._cursor(cur)
         cur.execute("SELECT * FROM swh_content_language_missing()")
         yield from cursor_to_bytes(cur)
 
     def content_language_add_from_temp(self, conflict_update, cur=None):
         self._cursor(cur).execute("SELECT swh_content_language_add(%s)",
                                   (conflict_update, ))
 
     content_language_cols = ['id', 'lang']
 
     def content_language_get_from_temp(self, cur=None):
         cur = self._cursor(cur)
         query = "SELECT %s FROM swh_content_language_get()" % (
             ','.join(self.content_language_cols))
         cur.execute(query)
         yield from cursor_to_bytes(cur)
 
     def content_ctags_missing_from_temp(self, cur=None):
         """List missing ctags.
 
         """
         cur = self._cursor(cur)
         cur.execute("SELECT * FROM swh_content_ctags_missing()")
         yield from cursor_to_bytes(cur)
 
     def content_ctags_add_from_temp(self, conflict_update, cur=None):
         self._cursor(cur).execute("SELECT swh_content_ctags_add(%s)",
                                   (conflict_update, ))
 
     content_ctags_cols = ['id', 'name', 'kind', 'line', 'lang',
                           'tool_name', 'tool_version']
 
     def content_ctags_get_from_temp(self, cur=None):
         cur = self._cursor(cur)
         query = "SELECT %s FROM swh_content_ctags_get()" % (
             ','.join(self.content_ctags_cols))
         cur.execute(query)
         yield from cursor_to_bytes(cur)
 
     def content_ctags_search(self, expression, last_sha1, limit, cur=None):
         cur = self._cursor(cur)
         if not last_sha1:
             query = """SELECT %s
                        FROM swh_content_ctags_search(%%s, %%s)""" % (
                            ','.join(self.content_ctags_cols))
             cur.execute(query, (expression, limit))
         else:
             if last_sha1 and isinstance(last_sha1, bytes):
                 last_sha1 = '\\x%s' % hashutil.hash_to_hex(last_sha1)
             elif last_sha1:
                 last_sha1 = '\\x%s' % last_sha1
 
             query = """SELECT %s
                        FROM swh_content_ctags_search(%%s, %%s, %%s)""" % (
                            ','.join(self.content_ctags_cols))
             cur.execute(query, (expression, limit, last_sha1))
 
         yield from cursor_to_bytes(cur)
 
     def content_fossology_license_missing_from_temp(self, cur=None):
         """List missing licenses.
 
         """
         cur = self._cursor(cur)
         cur.execute("SELECT * FROM swh_content_fossology_license_missing()")
         yield from cursor_to_bytes(cur)
 
     def content_fossology_license_add_from_temp(self, conflict_update,
                                                 cur=None):
         """Add new licenses per content.
 
         """
         self._cursor(cur).execute(
             "SELECT swh_content_fossology_license_add(%s)",
             (conflict_update, ))
 
     content_fossology_license_cols = ['id', 'tool_name', 'tool_version',
                                       'licenses']
 
     def content_fossology_license_get_from_temp(self, cur=None):
         """Retrieve licenses per content.
 
         """
         cur = self._cursor(cur)
         query = "SELECT %s FROM swh_content_fossology_license_get()" % (
             ','.join(self.content_fossology_license_cols))
         cur.execute(query)
         yield from cursor_to_bytes(cur)
 
     def content_fossology_license_unknown(self, cur=None):
         """Returns the unknown licenses from
            tmp_content_fossology_license_unknown.
 
         """
         cur = self._cursor(cur)
         cur.execute("SELECT * FROM swh_content_fossology_license_unknown()")
         yield from cursor_to_bytes(cur)
diff --git a/swh/storage/storage.py b/swh/storage/storage.py
index 46acdc06..70bdf00e 100644
--- a/swh/storage/storage.py
+++ b/swh/storage/storage.py
@@ -1,1549 +1,1560 @@
 # Copyright (C) 2015-2016  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 
 from collections import defaultdict
 import datetime
 import itertools
 import dateutil.parser
 import psycopg2
 
 from . import converters
 from .common import db_transaction_generator, db_transaction
 from .db import Db
 from .exc import StorageDBError
 
 from swh.core.hashutil import ALGORITHMS
 from swh.objstorage import PathSlicingObjStorage
 from swh.objstorage.exc import ObjNotFoundError
 
 # Max block size of contents to return
 BULK_BLOCK_CONTENT_LEN_MAX = 10000
 
 
 class Storage():
     """SWH storage proxy, encompassing DB and object storage
 
     """
 
     def __init__(self, db_conn, obj_root):
         """
         Args:
             db_conn: either a libpq connection string, or a psycopg2 connection
             obj_root: path to the root of the object storage
 
         """
         try:
             if isinstance(db_conn, psycopg2.extensions.connection):
                 self.db = Db(db_conn)
             else:
                 self.db = Db.connect(db_conn)
         except psycopg2.OperationalError as e:
             raise StorageDBError(e)
 
         # TODO this needs to be configured
         self.objstorage = PathSlicingObjStorage(obj_root,
                                                 slicing='0:2/2:4/4:6')
 
     def check_config(self, *, check_write):
         """Check that the storage is configured and ready to go."""
 
         if not self.objstorage.check_config(check_write=check_write):
             return False
 
         # Check permissions on one of the tables
         with self.db.transaction() as cur:
             if check_write:
                 check = 'INSERT'
             else:
                 check = 'SELECT'
 
             cur.execute(
                 "select has_table_privilege(current_user, 'content', %s)",
                 (check,)
             )
             return cur.fetchone()[0]
 
         return True
 
     def content_add(self, content):
         """Add content blobs to the storage
 
         Note: in case of DB errors, objects might have already been added to
         the object storage and will not be removed. Since addition to the
         object storage is idempotent, that should not be a problem.
 
         Args:
             content: iterable of dictionaries representing individual pieces of
                 content to add. Each dictionary has the following keys:
                 - data (bytes): the actual content
                 - length (int): content length (default: -1)
                 - one key for each checksum algorithm in
                   swh.core.hashutil.ALGORITHMS, mapped to the corresponding
                   checksum
                 - status (str): one of visible, hidden, absent
                 - reason (str): if status = absent, the reason why
                 - origin (int): if status = absent, the origin we saw the
                   content in
 
         """
         db = self.db
 
         content_by_status = defaultdict(list)
         for d in content:
             if 'status' not in d:
                 d['status'] = 'visible'
             if 'length' not in d:
                 d['length'] = -1
             content_by_status[d['status']].append(d)
 
         content_with_data = content_by_status['visible']
         content_without_data = content_by_status['absent']
 
         missing_content = set(self.content_missing(content_with_data))
         missing_skipped = set(
             sha1_git for sha1, sha1_git, sha256
             in self.skipped_content_missing(content_without_data))
 
         with db.transaction() as cur:
             if missing_content:
                 # create temporary table for metadata injection
                 db.mktemp('content', cur)
 
                 def add_to_objstorage(cont):
                     self.objstorage.add(cont['data'],
                                         obj_id=cont['sha1'])
 
                 content_filtered = (cont for cont in content_with_data
                                     if cont['sha1'] in missing_content)
 
                 db.copy_to(content_filtered, 'tmp_content',
                            ['sha1', 'sha1_git', 'sha256', 'length', 'status'],
                            cur, item_cb=add_to_objstorage)
 
                 # move metadata in place
                 db.content_add_from_temp(cur)
 
             if missing_skipped:
                 missing_filtered = (cont for cont in content_without_data
                                     if cont['sha1_git'] in missing_skipped)
                 db.mktemp('skipped_content', cur)
                 db.copy_to(missing_filtered, 'tmp_skipped_content',
                            ['sha1', 'sha1_git', 'sha256', 'length',
                             'reason', 'status', 'origin'], cur)
 
                 # move metadata in place
                 db.skipped_content_add_from_temp(cur)
 
     def content_get(self, content):
         """Retrieve in bulk contents and their data.
 
         Args:
             content: iterables of sha1
 
         Returns:
             Generates streams of contents as dict with their raw data:
             - sha1: sha1's content
             - data: bytes data of the content
 
         Raises:
             ValueError in case of too much contents are required.
             cf. BULK_BLOCK_CONTENT_LEN_MAX
 
         """
         # FIXME: Improve on server module to slice the result
         if len(content) > BULK_BLOCK_CONTENT_LEN_MAX:
             raise ValueError(
                 "Send at maximum %s contents." % BULK_BLOCK_CONTENT_LEN_MAX)
 
         for obj_id in content:
             try:
                 data = self.objstorage.get(obj_id)
             except ObjNotFoundError:
                 yield None
                 continue
 
             yield {'sha1': obj_id, 'data': data}
 
     @db_transaction_generator
     def content_get_metadata(self, content, cur=None):
         """Retrieve content metadata in bulk
 
         Args:
             content: iterable of content identifiers (sha1)
 
         Returns:
             an iterable with content metadata corresponding to the given ids
         """
         db = self.db
 
         db.store_tmp_bytea(content, cur)
 
         for content_metadata in db.content_get_metadata_from_temp(cur):
             yield dict(zip(db.content_get_metadata_keys, content_metadata))
 
     @db_transaction_generator
     def content_missing(self, content, key_hash='sha1', cur=None):
         """List content missing from storage
 
         Args:
             content: iterable of dictionaries containing one key for each
                 checksum algorithm in swh.core.hashutil.ALGORITHMS, mapped to
                 the corresponding checksum, and a length key mapped to the
                 content length.
             key_hash: the name of the hash used as key (default: 'sha1')
 
         Returns:
             an iterable of `key_hash`es missing from the storage
 
         Raises:
             TODO: an exception when we get a hash collision.
 
         """
         db = self.db
 
         keys = ['sha1', 'sha1_git', 'sha256']
 
         if key_hash not in keys:
             raise ValueError("key_hash should be one of %s" % keys)
 
         key_hash_idx = keys.index(key_hash)
 
         # Create temporary table for metadata injection
         db.mktemp('content', cur)
 
         db.copy_to(content, 'tmp_content', keys + ['length'], cur)
 
         for obj in db.content_missing_from_temp(cur):
             yield obj[key_hash_idx]
 
     @db_transaction_generator
     def content_missing_per_sha1(self, contents, cur=None):
         """List content missing from storage based only on sha1.
 
         Args:
             contents: Iterable of sha1 to check for absence.
 
         Returns:
             an iterable of `sha1`s missing from the storage.
 
         Raises:
             TODO: an exception when we get a hash collision.
 
         """
         db = self.db
 
         db.store_tmp_bytea(contents, cur)
         for obj in db.content_missing_per_sha1_from_temp(cur):
             yield obj[0]
 
     @db_transaction_generator
     def skipped_content_missing(self, content, cur=None):
         """List skipped_content missing from storage
 
         Args:
             content: iterable of dictionaries containing the data for each
                 checksum algorithm.
 
         Returns:
             an iterable of signatures missing from the storage
         """
         keys = ['sha1', 'sha1_git', 'sha256']
 
         db = self.db
 
         db.mktemp('skipped_content', cur)
         db.copy_to(content, 'tmp_skipped_content',
                    keys + ['length', 'reason'], cur)
 
         yield from db.skipped_content_missing_from_temp(cur)
 
     @db_transaction
     def content_find(self, content, cur=None):
         """Find a content hash in db.
 
         Args:
             content: a dictionary representing one content hash, mapping
                 checksum algorithm names (see swh.core.hashutil.ALGORITHMS) to
                 checksum values
 
         Returns:
             a triplet (sha1, sha1_git, sha256) if the content exist
             or None otherwise.
 
         Raises:
             ValueError in case the key of the dictionary is not sha1, sha1_git
             nor sha256.
 
         """
         db = self.db
 
         if not set(content).intersection(ALGORITHMS):
             raise ValueError('content keys must contain at least one of: '
                              'sha1, sha1_git, sha256')
 
         c = db.content_find(sha1=content.get('sha1'),
                             sha1_git=content.get('sha1_git'),
                             sha256=content.get('sha256'),
                             cur=cur)
         if c:
             keys = ['sha1', 'sha1_git', 'sha256', 'length', 'ctime', 'status']
             return dict(zip(keys, c))
         return None
 
     @db_transaction_generator
     def content_find_provenance(self, content, cur=None):
         """Find content's provenance information.
 
         Args:
             content: a dictionary entry representing one content hash.
             The dictionary key is one of swh.core.hashutil.ALGORITHMS.
             The value mapped to the corresponding checksum.
 
         Yields:
             The provenance information on content.
 
         """
         db = self.db
 
         c = self.content_find(content)
 
         if not c:
             return []
 
         sha1_git = c['sha1_git']
 
         for provenance in db.content_find_provenance(sha1_git, cur=cur):
             yield dict(zip(db.provenance_cols, provenance))
 
     def directory_add(self, directories):
         """Add directories to the storage
 
         Args:
             directories: iterable of dictionaries representing the individual
                 directories to add. Each dict has the following keys:
                 - id (sha1_git): the id of the directory to add
                 - entries (list): list of dicts for each entry in the
                     directory.  Each dict has the following keys:
                     - name (bytes)
                     - type (one of 'file', 'dir', 'rev'):
                         type of the directory entry (file, directory, revision)
                     - target (sha1_git): id of the object pointed at by the
                           directory entry
                     - perms (int): entry permissions
         """
         dirs = set()
         dir_entries = {
             'file': defaultdict(list),
             'dir': defaultdict(list),
             'rev': defaultdict(list),
         }
 
         for cur_dir in directories:
             dir_id = cur_dir['id']
             dirs.add(dir_id)
             for src_entry in cur_dir['entries']:
                 entry = src_entry.copy()
                 entry['dir_id'] = dir_id
                 dir_entries[entry['type']][dir_id].append(entry)
 
         dirs_missing = set(self.directory_missing(dirs))
         if not dirs_missing:
             return
 
         db = self.db
         with db.transaction() as cur:
             # Copy directory ids
             dirs_missing_dict = ({'id': dir} for dir in dirs_missing)
             db.mktemp('directory', cur)
             db.copy_to(dirs_missing_dict, 'tmp_directory', ['id'], cur)
 
             # Copy entries
             for entry_type, entry_list in dir_entries.items():
                 entries = itertools.chain.from_iterable(
                     entries_for_dir
                     for dir_id, entries_for_dir
                     in entry_list.items()
                     if dir_id in dirs_missing)
 
                 db.mktemp_dir_entry(entry_type)
 
                 db.copy_to(
                     entries,
                     'tmp_directory_entry_%s' % entry_type,
                     ['target', 'name', 'perms', 'dir_id'],
                     cur,
                 )
 
             # Do the final copy
             db.directory_add_from_temp(cur)
 
     @db_transaction_generator
     def directory_missing(self, directories, cur):
         """List directories missing from storage
 
         Args: an iterable of directory ids
         Returns: a list of missing directory ids
         """
         db = self.db
 
         # Create temporary table for metadata injection
         db.mktemp('directory', cur)
 
         directories_dicts = ({'id': dir} for dir in directories)
 
         db.copy_to(directories_dicts, 'tmp_directory', ['id'], cur)
 
         for obj in db.directory_missing_from_temp(cur):
             yield obj[0]
 
     @db_transaction_generator
     def directory_get(self,
                       directories,
                       cur=None):
         """Get information on directories.
 
         Args:
             - directories: an iterable of directory ids
 
         Returns:
             List of directories as dict with keys and associated values.
 
         """
         db = self.db
         keys = ('id', 'dir_entries', 'file_entries', 'rev_entries')
 
         db.mktemp('directory', cur)
         db.copy_to(({'id': dir_id} for dir_id in directories),
                    'tmp_directory', ['id'], cur)
 
         dirs = db.directory_get_from_temp(cur)
         for line in dirs:
             yield dict(zip(keys, line))
 
     @db_transaction_generator
     def directory_ls(self, directory, recursive=False, cur=None):
         """Get entries for one directory.
 
         Args:
             - directory: the directory to list entries from.
             - recursive: if flag on, this list recursively from this directory.
 
         Returns:
             List of entries for such directory.
 
         """
         db = self.db
 
         if recursive:
             res_gen = db.directory_walk(directory)
         else:
             res_gen = db.directory_walk_one(directory)
 
         for line in res_gen:
             yield dict(zip(db.directory_ls_cols, line))
 
     @db_transaction
     def cache_content_revision_add(self, revisions, cur=None):
         """Cache the current revision's current targeted arborescence directory.
         If the revision has already been cached, it just does nothing.
 
         Args:
             - revisions: the revisions to cache
 
         Returns:
             None
 
         """
         db = self.db
 
         db.store_tmp_bytea(revisions, cur)
         db.cache_content_revision_add()
 
     @db_transaction_generator
     def cache_content_get_all(self, cur=None):
         """Read the distinct contents in the cache table.
 
         Yields:
             contents from cache
 
         """
         for content in self.db.cache_content_get_all(cur):
             yield dict(zip(self.db.cache_content_get_cols, content))
 
     @db_transaction
     def cache_content_get(self, content, cur=None):
         """Retrieve information on content.
 
         Args:
             content (dict): content with checkums
 
         Returns:
             Its properties (sha1, sha1_git, sha256, revision_paths)
 
         """
         if 'sha1_git' in content:
             sha1_git = content['sha1_git']
         else:
             c = self.content_find(content)
             if not c:
                 return None
             sha1_git = c['sha1_git']
 
         c = self.db.cache_content_get(sha1_git, cur=cur)
         if not c:
             return None
         return dict(zip(self.db.cache_content_get_cols, c))
 
     @db_transaction_generator
     def cache_revision_origin_add(self, origin, visit, cur=None):
         """Cache the list of revisions the given visit added to the origin.
 
         Args:
             - origin: the id of the origin
             - visit: the id of the visit
 
         Returns:
             The list of new revisions
 
         """
         for (revision,) in self.db.cache_revision_origin_add(origin, visit):
             yield revision
 
     @db_transaction
     def directory_entry_get_by_path(self, directory, paths, cur=None):
         """Get the directory entry (either file or dir) from directory with
         path.
 
         Args:
             - directory: sha1 of the top level directory
             - paths: path to lookup from the top level directory. From left
             (top) to right (bottom).
 
         Returns:
             The corresponding directory entry if found, None otherwise.
 
         """
         db = self.db
         keys = ('dir_id', 'type', 'target', 'name', 'perms', 'status',
                 'sha1', 'sha1_git', 'sha256')
 
         res = db.directory_entry_get_by_path(directory, paths, cur)
         if res:
             return dict(zip(keys, res))
 
     def revision_add(self, revisions):
         """Add revisions to the storage
 
         Args:
             revisions: iterable of dictionaries representing the individual
                 revisions to add. Each dict has the following keys:
                 - id (sha1_git): id of the revision to add
                 - date (datetime.DateTime): date the revision was written
                 - date_offset (int): offset from UTC in minutes the revision
                     was written
                 - date_neg_utc_offset (boolean): whether a null date_offset
                     represents a negative UTC offset
                 - committer_date (datetime.DateTime): date the revision got
                     added to the origin
                 - committer_date_offset (int): offset from UTC in minutes the
                     revision was added to the origin
                 - committer_date_neg_utc_offset (boolean): whether a null
                     committer_date_offset represents a negative UTC offset
                 - type (one of 'git', 'tar'): type of the revision added
                 - directory (sha1_git): the directory the revision points at
                 - message (bytes): the message associated with the revision
                 - author_name (bytes): the name of the revision author
                 - author_email (bytes): the email of the revision author
                 - committer_name (bytes): the name of the revision committer
                 - committer_email (bytes): the email of the revision committer
                 - metadata (jsonb): extra information as dictionary
                 - synthetic (bool): revision's nature (tarball, directory
                     creates synthetic revision)
                 - parents (list of sha1_git): the parents of this revision
         """
         db = self.db
 
         revisions_missing = set(self.revision_missing(
             set(revision['id'] for revision in revisions)))
 
         if not revisions_missing:
             return
 
         with db.transaction() as cur:
             db.mktemp_revision(cur)
 
             revisions_filtered = (
                 converters.revision_to_db(revision) for revision in revisions
                 if revision['id'] in revisions_missing)
 
             parents_filtered = []
 
             db.copy_to(
                 revisions_filtered, 'tmp_revision', db.revision_add_cols,
                 cur,
                 lambda rev: parents_filtered.extend(rev['parents']))
 
             db.revision_add_from_temp(cur)
 
             db.copy_to(parents_filtered, 'revision_history',
                        ['id', 'parent_id', 'parent_rank'], cur)
 
     @db_transaction_generator
     def revision_missing(self, revisions, cur=None):
         """List revisions missing from storage
 
         Args: an iterable of revision ids
 
         Returns: a list of missing revision ids
         """
         db = self.db
 
         db.store_tmp_bytea(revisions, cur)
 
         for obj in db.revision_missing_from_temp(cur):
             yield obj[0]
 
     @db_transaction_generator
     def revision_get(self, revisions, cur):
         """Get all revisions from storage
            Args: an iterable of revision ids
            Returns: an iterable of revisions as dictionaries
                     (or None if the revision doesn't exist)
         """
 
         db = self.db
 
         db.store_tmp_bytea(revisions, cur)
 
         for line in self.db.revision_get_from_temp(cur):
             data = converters.db_to_revision(
                 dict(zip(db.revision_get_cols, line))
             )
             if not data['type']:
                 yield None
                 continue
             yield data
 
     @db_transaction_generator
     def revision_log(self, revisions, limit=None, cur=None):
         """Fetch revision entry from the given root revisions.
 
         Args:
             - revisions: array of root revision to lookup
             - limit: limitation on the output result. Default to null.
 
         Yields:
             List of revision log from such revisions root.
 
         """
         db = self.db
 
         for line in db.revision_log(revisions, limit, cur):
             data = converters.db_to_revision(
                 dict(zip(db.revision_get_cols, line))
             )
             if not data['type']:
                 yield None
                 continue
             yield data
 
     @db_transaction_generator
     def revision_shortlog(self, revisions, limit=None, cur=None):
         """Fetch the shortlog for the given revisions
 
         Args:
             revisions: list of root revisions to lookup
             limit: depth limitation for the output
 
         Yields:
             a list of (id, parents) tuples.
         """
 
         db = self.db
 
         yield from db.revision_shortlog(revisions, limit, cur)
 
     @db_transaction_generator
     def revision_log_by(self, origin_id, branch_name=None, timestamp=None,
                         limit=None, cur=None):
         """Fetch revision entry from the actual origin_id's latest revision.
 
         Args:
             - origin_id: the origin id from which deriving the revision
             - branch_name: (optional) occurrence's branch name
             - timestamp: (optional) occurrence's time
             - limit: (optional) depth limitation for the
               output. Default to None.
 
         Yields:
             The revision log starting from the revision derived from
             the (origin, branch_name, timestamp) combination if any.
             Returns the [] if no revision matching this combination is
             found.
 
         """
         db = self.db
 
         # Retrieve the revision by criterion
         revisions = list(db.revision_get_by(
             origin_id, branch_name, timestamp, limit=1))
 
         if not revisions:
             return None
 
         revision_id = revisions[0][0]
         # otherwise, retrieve the revision log from that revision
         yield from self.revision_log([revision_id], limit)
 
     def release_add(self, releases):
         """Add releases to the storage
 
         Args:
             releases: iterable of dictionaries representing the individual
                 releases to add. Each dict has the following keys:
                 - id (sha1_git): id of the release to add
                 - revision (sha1_git): id of the revision the release points
                     to
                 - date (datetime.DateTime): the date the release was made
                 - date_offset (int): offset from UTC in minutes the release was
                     made
                 - date_neg_utc_offset (boolean): whether a null date_offset
                     represents a negative UTC offset
                 - name (bytes): the name of the release
                 - comment (bytes): the comment associated with the release
                 - author_name (bytes): the name of the release author
                 - author_email (bytes): the email of the release author
         """
         db = self.db
 
         release_ids = set(release['id'] for release in releases)
         releases_missing = set(self.release_missing(release_ids))
 
         if not releases_missing:
             return
 
         with db.transaction() as cur:
             db.mktemp_release(cur)
 
             releases_filtered = (
                 converters.release_to_db(release) for release in releases
                 if release['id'] in releases_missing
             )
 
             db.copy_to(releases_filtered, 'tmp_release', db.release_add_cols,
                        cur)
 
             db.release_add_from_temp(cur)
 
     @db_transaction_generator
     def release_missing(self, releases, cur=None):
         """List releases missing from storage
 
         Args: an iterable of release ids
         Returns: a list of missing release ids
         """
         db = self.db
 
         # Create temporary table for metadata injection
         db.store_tmp_bytea(releases, cur)
 
         for obj in db.release_missing_from_temp(cur):
             yield obj[0]
 
     @db_transaction_generator
     def release_get(self, releases, cur=None):
         """Given a list of sha1, return the releases's information
 
         Args:
             releases: list of sha1s
 
         Returns:
             Generates the list of releases dict with the following keys:
             - id: origin's id
             - revision: origin's type
             - url: origin's url
             - lister: lister's uuid
             - project: project's uuid (FIXME, retrieve this information)
 
         Raises:
             ValueError if the keys does not match (url and type) nor id.
 
         """
         db = self.db
 
         # Create temporary table for metadata injection
         db.store_tmp_bytea(releases, cur)
 
         for release in db.release_get_from_temp(cur):
             yield converters.db_to_release(
                 dict(zip(db.release_get_cols, release))
             )
 
     @db_transaction
     def occurrence_add(self, occurrences, cur=None):
         """Add occurrences to the storage
 
         Args:
             occurrences: iterable of dictionaries representing the individual
                 occurrences to add. Each dict has the following keys:
                 - origin (int): id of the origin corresponding to the
                     occurrence
                 - branch (str): the reference name of the occurrence
                 - target (sha1_git): the id of the object pointed to by
                     the occurrence
                 - target_type (str): the type of object pointed to by the
                     occurrence
         """
         db = self.db
 
         db.mktemp_occurrence_history(cur)
         db.copy_to(occurrences, 'tmp_occurrence_history',
                    ['origin', 'branch', 'target', 'target_type', 'visit'], cur)
 
         db.occurrence_history_add_from_temp(cur)
 
     @db_transaction_generator
     def occurrence_get(self, origin_id, cur=None):
         """Retrieve occurrence information per origin_id.
 
         Args:
             origin_id: The occurrence's origin.
 
         Yields:
             List of occurrences matching criterion.
 
         """
         db = self.db
         for line in db.occurrence_get(origin_id, cur):
             yield {
                 'origin': line[0],
                 'branch': line[1],
                 'target': line[2],
                 'target_type': line[3],
             }
 
     @db_transaction
     def origin_visit_add(self, origin, ts, cur=None):
         """Add an origin_visit for the origin at ts with status 'ongoing'.
 
         Args:
             origin: Visited Origin id
             ts: timestamp of such visit
 
         Returns:
             Dict with keys origin and visit where:
             - origin: origin identifier
             - visit: the visit identifier for the new visit occurrence
             - ts (datetime.DateTime): the visit date
 
         """
         if isinstance(ts, str):
             ts = dateutil.parser.parse(ts)
 
         return {
             'origin': origin,
             'visit': self.db.origin_visit_add(origin, ts, cur)
         }
 
     @db_transaction
     def origin_visit_update(self, origin, visit_id, status, metadata=None,
                             cur=None):
         """Update an origin_visit's status.
 
         Args:
             origin: Visited Origin id
             visit_id: Visit's id
             status: Visit's new status
             metadata: Data associated to the visit
 
         Returns:
             None
 
         """
         return self.db.origin_visit_update(origin, visit_id, status, metadata,
                                            cur)
 
     @db_transaction_generator
     def origin_visit_get(self, origin, cur=None):
         """Retrieve all the origin's visit's information.
 
         Args:
             origin: The occurrence's origin (identifier).
 
         Yields:
             List of visits.
 
         """
         db = self.db
         for line in db.origin_visit_get_all(origin, cur):
             data = dict(zip(self.db.origin_visit_get_cols, line))
             yield data
 
     @db_transaction
     def origin_visit_get_by(self, origin, visit, cur=None):
         """Retrieve origin visit's information.
 
         Args:
             origin: The occurrence's origin (identifier).
 
         Returns:
             The information on that particular (origin, visit)
 
         """
         db = self.db
 
         ori_visit = db.origin_visit_get(origin, visit, cur)
         if not ori_visit:
             return None
 
         ori_visit = dict(zip(self.db.origin_visit_get_cols, ori_visit))
 
         occs = {}
         for occ in db.occurrence_by_origin_visit(origin, visit):
             _, branch_name, target, target_type = occ
             occs[branch_name] = {
                 'target': target,
                 'target_type': target_type
             }
 
         ori_visit.update({
             'occurrences': occs
         })
 
         return ori_visit
 
     @db_transaction_generator
     def revision_get_by(self,
                         origin_id,
                         branch_name=None,
                         timestamp=None,
                         limit=None,
                         cur=None):
         """Given an origin_id, retrieve occurrences' list per given criterions.
 
         Args:
             origin_id: The origin to filter on.
             branch_name: (optional) branch name.
             timestamp: (optional) time.
             limit: (optional) limit
 
         Yields:
             List of occurrences matching the criterions or None if nothing is
             found.
 
         """
         for line in self.db.revision_get_by(origin_id,
                                             branch_name,
                                             timestamp,
                                             limit=limit,
                                             cur=cur):
             data = converters.db_to_revision(
                 dict(zip(self.db.revision_get_cols, line))
             )
             if not data['type']:
                 yield None
                 continue
             yield data
 
     def release_get_by(self, origin_id, limit=None):
         """Given an origin id, return all the tag objects pointing to heads of
         origin_id.
 
         Args:
             origin_id: the origin to filter on.
             limit: None by default
 
         Yields:
             List of releases matching the criterions or None if nothing is
             found.
 
         """
 
         for line in self.db.release_get_by(origin_id, limit=limit):
             data = converters.db_to_release(
                 dict(zip(self.db.release_get_cols, line))
             )
             yield data
 
     @db_transaction
     def object_find_by_sha1_git(self, ids, cur=None):
         """Return the objects found with the given ids.
 
         Args:
             ids: a generator of sha1_gits
         Returns:
             a dict mapping the id to the list of objects found. Each object
             found is itself a dict with keys:
                 sha1_git: the input id
                 type: the type of object found
                 id: the id of the object found
                 object_id: the numeric id of the object found.
         """
         db = self.db
 
         ret = {id: [] for id in ids}
 
         for retval in db.object_find_by_sha1_git(ids):
             if retval[1]:
                 ret[retval[0]].append(dict(zip(db.object_find_by_sha1_git_cols,
                                                retval)))
 
         return ret
 
     @db_transaction
     def origin_get(self, origin, cur=None):
         """Return the origin either identified by its id or its tuple
         (type, url).
 
         Args:
             origin: dictionary representing the individual
                 origin to find.
                 This dict has either the keys type and url:
                 - type (FIXME: enum TBD): the origin type ('git', 'wget', ...)
                 - url (bytes): the url the origin points to
                 either the id:
                 - id: the origin id
 
         Returns:
             the origin dict with the keys:
             - id: origin's id
             - type: origin's type
             - url: origin's url
             - lister: lister's uuid
             - project: project's uuid (FIXME, retrieve this information)
 
         Raises:
             ValueError if the keys does not match (url and type) nor id.
 
         """
         db = self.db
 
         keys = ['id', 'type', 'url', 'lister', 'project']
 
         origin_id = origin.get('id')
         if origin_id:  # check lookup per id first
             ori = db.origin_get(origin_id, cur)
         elif 'type' in origin and 'url' in origin:  # or lookup per type, url
             ori = db.origin_get_with(origin['type'], origin['url'], cur)
         else:  # unsupported lookup
             raise ValueError('Origin must have either id or (type and url).')
 
         if ori:
             return dict(zip(keys, ori))
         return None
 
     @db_transaction
     def _person_add(self, person, cur=None):
         """Add a person in storage.
 
         BEWARE: Internal function for now.
         Do not do anything fancy in case a person already exists.
         Please adapt code if more checks are needed.
 
         Args:
             person dictionary with keys name and email.
 
         Returns:
             Id of the new person.
 
         """
         db = self.db
 
         return db.person_add(person)
 
     @db_transaction_generator
     def person_get(self, person, cur=None):
         """Return the persons identified by their ids.
 
         Args:
             person: array of ids.
 
         Returns:
             The array of persons corresponding of the ids.
 
         """
         db = self.db
 
         for person in db.person_get(person):
             yield dict(zip(db.person_get_cols, person))
 
     @db_transaction
     def origin_add(self, origins, cur=None):
         """Add origins to the storage
 
         Args:
             origins: list of dictionaries representing the individual origins,
             with the following keys:
                 type: the origin type ('git', 'svn', 'deb', ...)
                 url (bytes): the url the origin points to
         Returns:
             The array of ids corresponding to the given origins
         """
 
         ret = []
         for origin in origins:
             ret.append(self.origin_add_one(origin, cur=cur))
 
         return ret
 
     @db_transaction
     def origin_add_one(self, origin, cur=None):
         """Add origin to the storage
 
         Args:
             origin: dictionary representing the individual
                 origin to add. This dict has the following keys:
                 - type (FIXME: enum TBD): the origin type ('git', 'wget', ...)
                 - url (bytes): the url the origin points to
 
         Returns:
             the id of the added origin, or of the identical one that already
             exists.
 
         """
         db = self.db
 
         data = db.origin_get_with(origin['type'], origin['url'], cur)
         if data:
             return data[0]
 
         return db.origin_add(origin['type'], origin['url'], cur)
 
     @db_transaction
     def fetch_history_start(self, origin_id, cur=None):
         """Add an entry for origin origin_id in fetch_history. Returns the id
         of the added fetch_history entry
         """
         fetch_history = {
             'origin': origin_id,
             'date': datetime.datetime.now(tz=datetime.timezone.utc),
         }
 
         return self.db.create_fetch_history(fetch_history, cur)
 
     @db_transaction
     def fetch_history_end(self, fetch_history_id, data, cur=None):
         """Close the fetch_history entry with id `fetch_history_id`, replacing
            its data with `data`.
         """
         now = datetime.datetime.now(tz=datetime.timezone.utc)
         fetch_history = self.db.get_fetch_history(fetch_history_id, cur)
 
         if not fetch_history:
             raise ValueError('No fetch_history with id %d' % fetch_history_id)
 
         fetch_history['duration'] = now - fetch_history['date']
 
         fetch_history.update(data)
 
         self.db.update_fetch_history(fetch_history, cur)
 
     @db_transaction
     def fetch_history_get(self, fetch_history_id, cur=None):
         """Get the fetch_history entry with id `fetch_history_id`.
         """
         return self.db.get_fetch_history(fetch_history_id, cur)
 
     @db_transaction
     def entity_add(self, entities, cur=None):
         """Add the given entitites to the database (in entity_history).
 
         Args:
             - entities: iterable of dictionaries containing the following keys:
                 - uuid (uuid): id of the entity
                 - parent (uuid): id of the parent entity
                 - name (str): name of the entity
                 - type (str): type of entity (one of 'organization',
                     'group_of_entities', 'hosting', 'group_of_persons',
                     'person', 'project')
                 - description (str, optional): description of the entity
                 - homepage (str): url of the entity's homepage
                 - active (bool): whether the entity is active
                 - generated (bool): whether the entity was generated
                 - lister_metadata (dict): lister-specific entity metadata
                 - metadata (dict): other metadata for the entity
                 - validity (datetime.DateTime array): timestamps at which we
                     listed the entity.
         """
         db = self.db
 
         cols = list(db.entity_history_cols)
         cols.remove('id')
 
         db.mktemp_entity_history()
         db.copy_to(entities, 'tmp_entity_history', cols, cur)
         db.entity_history_add_from_temp()
 
     @db_transaction_generator
     def entity_get_from_lister_metadata(self, entities, cur=None):
         """Fetch entities from the database, matching with the lister and
            associated metadata.
 
         Args:
             entities: iterable of dictionaries containing the lister metadata
                to look for. Useful keys are 'lister', 'type', 'id', ...
         Returns:
             A generator of fetched entities with all their attributes. If no
             match was found, the returned entity is None.
         """
 
         db = self.db
 
         db.mktemp_entity_lister(cur)
 
         mapped_entities = []
         for i, entity in enumerate(entities):
             mapped_entity = {
                 'id': i,
                 'lister_metadata': entity,
             }
             mapped_entities.append(mapped_entity)
 
         db.copy_to(mapped_entities, 'tmp_entity_lister',
                    ['id', 'lister_metadata'], cur)
 
         cur.execute('''select id, %s
                        from swh_entity_from_tmp_entity_lister()
                        order by id''' %
                     ','.join(db.entity_cols))
 
         for id, *entity_vals in cur:
             fetched_entity = dict(zip(db.entity_cols, entity_vals))
             if fetched_entity['uuid']:
                 yield fetched_entity
             else:
                 yield {
                     'uuid': None,
                     'lister_metadata': entities[i],
                 }
 
     @db_transaction_generator
     def entity_get(self, uuid, cur=None):
         """Returns the list of entity per its uuid identifier and also its
         parent hierarchy.
 
         Args:
             uuid: entity's identifier
 
         Returns:
             List of entities starting with entity with uuid and the parent
             hierarchy from such entity.
 
         """
         db = self.db
         for entity in db.entity_get(uuid, cur):
             yield dict(zip(db.entity_cols, entity))
 
     @db_transaction
     def entity_get_one(self, uuid, cur=None):
         """Returns one entity using its uuid identifier.
 
         Args:
             uuid: entity's identifier
 
         Returns:
             the object corresponding to the given entity
 
         """
         db = self.db
         entity = db.entity_get_one(uuid, cur)
         if entity:
             return dict(zip(db.entity_cols, entity))
         else:
             return None
 
     @db_transaction
     def stat_counters(self, cur=None):
         """compute statistics about the number of tuples in various tables
 
         Returns:
             a dictionary mapping textual labels (e.g., content) to integer
             values (e.g., the number of tuples in table content)
 
         """
         return {k: v for (k, v) in self.db.stat_counters()}
 
     @db_transaction_generator
     def content_mimetype_missing(self, mimetypes, cur=None):
         """List mimetypes missing from storage.
 
         Args:
-            mimetypes: iterable of sha1
+            mimetypes: iterable of dict with keys:
+            - id (bytes): sha1 identifier
+            - tool_name (str): tool used to compute the results
+            - tool_version (str): associated tool's version
 
         Returns:
-            an iterable of missing id
+            an iterable of missing id for the triplets id,
+            tool_name, tool_version
 
         """
         db = self.db
-        db.store_tmp_bytea(mimetypes, cur)
+        db.mktemp_content_mimetype_missing(cur)
+        db.copy_to(mimetypes, 'tmp_content_mimetype_missing',
+                   ['id', 'tool_name', 'tool_version'],
+                   cur)
         for obj in db.content_mimetype_missing_from_temp(cur):
             yield obj[0]
 
     @db_transaction
     def content_mimetype_add(self, mimetypes, conflict_update=False, cur=None):
         """Add mimetypes not present in storage.
 
         Args:
             mimetypes: iterable of dictionary with keys:
-            - id: sha1
-            - mimetype: bytes
-            - encoding: bytes
+            - id (bytes): sha1 identifier
+            - mimetype (bytes): raw content's mimetype
+            - encoding (bytes): raw content's encoding
+            - tool_name (str): tool used to compute the results
+            - tool_version (str): associated tool's version
             conflict_update: Flag to determine if we want to overwrite (true)
             or skip duplicates (false, the default)
 
         """
         db = self.db
-        db.mktemp('content_mimetype', cur)
+        db.mktemp_content_mimetype(cur)
         db.copy_to(mimetypes, 'tmp_content_mimetype',
-                   ['id', 'mimetype', 'encoding'], cur)
+                   db.content_mimetype_cols,
+                   cur)
         db.content_mimetype_add_from_temp(conflict_update, cur)
 
     @db_transaction_generator
     def content_mimetype_get(self, ids, cur=None):
         db = self.db
         db.store_tmp_bytea(ids, cur)
         for c in db.content_mimetype_get_from_temp():
-            yield dict(zip(db.content_mimetype_cols, c))
+            yield converters.db_to_mimetype(
+                dict(zip(db.content_mimetype_cols, c)))
 
     @db_transaction_generator
     def content_language_missing(self, languages, cur=None):
         """List languages missing from storage.
 
         Args:
             languages: iterable of sha1
 
         Returns:
             an iterable of missing id
 
         """
         db = self.db
         db.store_tmp_bytea(languages, cur)
         for obj in db.content_language_missing_from_temp(cur):
             yield obj[0]
 
     @db_transaction_generator
     def content_language_get(self, ids, cur=None):
         db = self.db
         db.store_tmp_bytea(ids, cur)
         for c in db.content_language_get_from_temp():
             yield dict(zip(db.content_language_cols, c))
 
     @db_transaction
     def content_language_add(self, languages, conflict_update=False, cur=None):
         """Add languages not present in storage.
 
         Args:
             languages: iterable of dictionary with keys:
             - id: sha1
             - lang: bytes
             conflict_update: Flag to determine if we want to overwrite (true)
             or skip duplicates (false, the default)
 
         """
         db = self.db
         db.mktemp('content_language', cur)
         # empty language is mapped to 'unknown'
         db.copy_to(
             ({
                 'id': l['id'],
                 'lang': 'unknown' if not l['lang'] else l['lang']
             } for l in languages),
             'tmp_content_language', ['id', 'lang'], cur)
 
         db.content_language_add_from_temp(conflict_update, cur)
 
     @db_transaction_generator
     def content_ctags_missing(self, ctags, cur=None):
         """List ctags missing from storage.
 
         Args:
             ctags: iterable of dict with keys:
             - id (bytes): sha1 identifier
             - tool_name (str): tool name used
             - tool_version (str): associated version
 
         Returns:
             an iterable of missing id
 
         """
         db = self.db
 
         db.mktemp_content_ctags_missing(cur)
         db.copy_to(ctags,
                    tblname='tmp_content_ctags_missing',
                    columns=['id', 'tool_name', 'tool_version'],
                    cur=cur)
         for obj in db.content_ctags_missing_from_temp(cur):
             yield obj[0]
 
     @db_transaction_generator
     def content_ctags_get(self, ids, cur=None):
         """Retrieve ctags per id.
 
         Args:
             ids ([sha1]): Iterable of sha1
 
         """
         db = self.db
         db.store_tmp_bytea(ids, cur)
         for c in db.content_ctags_get_from_temp():
             yield converters.db_to_ctags(dict(zip(db.content_ctags_cols, c)))
 
     @db_transaction
     def content_ctags_add(self, ctags, conflict_update=False, cur=None):
         """Add ctags not present in storage
 
         Args:
             ctags: iterable of dictionaries with keys:
             - id (bytes): sha1
             - ctags ([dict]): List of dictionary with keys (name,
             kind, line, language)
 
         """
         db = self.db
 
         def _convert_ctags(ctags):
             """Convert ctags to list of ctags.
 
             """
             res = []
             for ctag in ctags:
                 res.extend(converters.ctags_to_db(ctag))
             return res
 
         db.mktemp_content_ctags(cur)
         db.copy_to(_convert_ctags(ctags),
                    tblname='tmp_content_ctags',
                    columns=db.content_ctags_cols,
                    cur=cur)
 
         db.content_ctags_add_from_temp(conflict_update, cur)
 
     @db_transaction_generator
     def content_ctags_search(self, expression,
                              limit=10, last_sha1=None, cur=None):
         """Search through content's raw ctags symbols.
 
         Args:
             expression (str): Expression to search for
             limit (int): Number of rows to return (default to 10).
             last_sha1 (str): Offset from which retrieving data (default to '').
 
         Yields:
             rows of ctags including id, name, lang, kind, line, etc...
 
         """
         db = self.db
 
         for obj in db.content_ctags_search(expression, last_sha1, limit,
                                            cur=cur):
             yield converters.db_to_ctags(dict(zip(db.content_ctags_cols, obj)))
 
     @db_transaction_generator
     def content_fossology_license_missing(self, licenses, cur=None):
         """List license missing from storage.
 
         Args:
             licenses ([bytes]): iterable of sha1
 
         Returns:
             an iterable of missing id
 
         """
         db = self.db
         db.store_tmp_bytea(licenses, cur)
         for obj in db.content_fossology_license_missing_from_temp(cur):
             yield obj[0]
 
     @db_transaction_generator
     def content_fossology_license_get(self, ids, cur=None):
         """Retrieve licenses per id.
 
         Args:
             ids ([sha1]): Iterable of sha1
 
         Yields:
             List of dict with the following keys:
             - id (bytes)
             - licenses ([str]): associated licenses for that content
 
         """
         db = self.db
         db.store_tmp_bytea(ids, cur)
 
         for c in db.content_fossology_license_get_from_temp():
             yield dict(zip(db.content_fossology_license_cols, c))
 
     @db_transaction
     def content_fossology_license_add(self, licenses,
                                       conflict_update=False, cur=None):
         """Add licenses not present in storage.
 
         Args:
             licenses ([dict]): iterable of dict with keys:
                 - id: sha1
                 - license ([bytes]): List of licenses associated to sha1
                 - tool (str): nomossa
             conflict_update: Flag to determine if we want to overwrite (true)
             or skip duplicates (false, the default)
 
         Returns:
             List of content_license entries which failed due to
             unknown licenses
 
         """
         db = self.db
 
         # First, we check the licenses are ok
         licenses_to_check = set()      # set of licenses to check
         content_licenses_to_add = {}   # content_licenses to add
         names_to_content_license = {}  # map from names to content licenses
 
         for c in licenses:
             id = c['id']
 
             for name in c['licenses']:
                 licenses_to_check.add(name)
                 l = names_to_content_license.get(name, [])
                 l.append(id)
                 names_to_content_license[name] = l
 
             content_licenses_to_add[id] = c
 
         db.mktemp_content_fossology_license_unknown()
         db.copy_to(({'name': name} for name in licenses_to_check),
                    tblname='tmp_content_fossology_license_unknown',
                    columns=['name'],
                    cur=cur)
         unknown_licenses = db.content_fossology_license_unknown(cur)
 
         # We filter out wrong content_license (this will be the result)
         wrong_content_licenses = []
         for name, in unknown_licenses:
             for id in names_to_content_license[name]:
                 # we can remove it multiple times since one content
                 # can have multiple licenses
                 content_license = content_licenses_to_add.pop(id, None)
                 if content_license:
                     wrong_content_licenses.append(content_license)
 
         if content_licenses_to_add:
             # Then, we add the correct ones
             db.mktemp_content_fossology_license(cur)
             db.copy_to(
                 ({
                     'id': c['id'],
                     'tool_name': c['tool_name'],
                     'tool_version': c['tool_version'],
                     'license': license,
                   } for c in content_licenses_to_add.values()
                     for license in c['licenses']),
                 tblname='tmp_content_fossology_license',
                 columns=['id', 'tool_name', 'tool_version', 'license'],
                 cur=cur)
             db.content_fossology_license_add_from_temp(conflict_update, cur)
 
         return wrong_content_licenses
diff --git a/swh/storage/tests/test_converters.py b/swh/storage/tests/test_converters.py
index ea3533ad..55ed3f26 100644
--- a/swh/storage/tests/test_converters.py
+++ b/swh/storage/tests/test_converters.py
@@ -1,205 +1,229 @@
 # Copyright (C) 2015  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import unittest
 
 from nose.tools import istest
 from nose.plugins.attrib import attr
 
 from swh.storage import converters
 
 
 @attr('!db')
 class TestConverters(unittest.TestCase):
     def setUp(self):
         self.maxDiff = None
 
     @istest
     def db_to_author(self):
         # when
         actual_author = converters.db_to_author(
             1, b'fullname', b'name', b'email')
 
         # then
         self.assertEquals(actual_author, {
             'id': 1,
             'fullname': b'fullname',
             'name': b'name',
             'email': b'email',
         })
 
     @istest
     def db_to_revision(self):
         # when
         actual_revision = converters.db_to_revision({
             'id': 'revision-id',
             'date': None,
             'date_offset': None,
             'date_neg_utc_offset': None,
             'committer_date': None,
             'committer_date_offset': None,
             'committer_date_neg_utc_offset': None,
             'type': 'rev',
             'directory': b'dir-sha1',
             'message': b'commit message',
             'author_id': 'auth-id',
             'author_fullname': b'auth-fullname',
             'author_name': b'auth-name',
             'author_email': b'auth-email',
             'committer_id': 'comm-id',
             'committer_fullname': b'comm-fullname',
             'committer_name': b'comm-name',
             'committer_email': b'comm-email',
             'metadata': {},
             'synthetic': False,
             'parents': [123, 456]
         })
 
         # then
         self.assertEquals(actual_revision, {
             'id': 'revision-id',
             'author': {
                 'id': 'auth-id',
                 'fullname': b'auth-fullname',
                 'name': b'auth-name',
                 'email': b'auth-email',
             },
             'date': None,
             'committer': {
                 'id': 'comm-id',
                 'fullname': b'comm-fullname',
                 'name': b'comm-name',
                 'email': b'comm-email',
             },
             'committer_date': None,
             'type': 'rev',
             'directory': b'dir-sha1',
             'message': b'commit message',
             'metadata': {},
             'synthetic': False,
             'parents': [123, 456],
         })
 
     @istest
     def db_to_release(self):
         # when
         actual_release = converters.db_to_release({
             'id': b'release-id',
             'target': b'revision-id',
             'target_type': 'revision',
             'date': None,
             'date_offset': None,
             'date_neg_utc_offset': None,
             'name': b'release-name',
             'comment': b'release comment',
             'synthetic': True,
             'author_id': 'auth-id',
             'author_fullname': b'auth-fullname',
             'author_name': b'auth-name',
             'author_email': b'auth-email',
         })
 
         # then
         self.assertEquals(actual_release, {
             'author': {
                 'id': 'auth-id',
                 'fullname': b'auth-fullname',
                 'name': b'auth-name',
                 'email': b'auth-email',
             },
             'date': None,
             'id': b'release-id',
             'name': b'release-name',
             'message': b'release comment',
             'synthetic': True,
             'target': b'revision-id',
             'target_type': 'revision'
         })
 
     @istest
     def db_to_git_headers(self):
         raw_data = [
             ['gpgsig', b'garbage\x89a\x43b\x14'],
             ['extra', [b'fo\\\\\\o', b'bar\\', b'inval\\\\\x99id']],
         ]
 
         db_data = converters.git_headers_to_db(raw_data)
         loop = converters.db_to_git_headers(db_data)
         self.assertEquals(raw_data, loop)
 
     @istest
     def ctags_to_db(self):
         input_ctag = {
             'id': b'some-id',
             'tool_name': 'some-toolname',
             'tool_version': 'some-toolversion',
             'ctags': [
                 {
                     'name': 'some-name',
                     'kind': 'some-kind',
                     'line': 10,
                     'lang': 'Yaml',
                 }, {
                     'name': 'main',
                     'kind': 'function',
                     'line': 12,
                     'lang': 'Yaml',
                 },
             ]
         }
 
         expected_ctags = [
             {
                 'id': b'some-id',
                 'name': 'some-name',
                 'kind': 'some-kind',
                 'line': 10,
                 'lang': 'Yaml',
                 'tool_name': 'some-toolname',
                 'tool_version': 'some-toolversion',
             }, {
                 'id': b'some-id',
                 'name': 'main',
                 'kind': 'function',
                 'line': 12,
                 'lang': 'Yaml',
                 'tool_name': 'some-toolname',
                 'tool_version': 'some-toolversion',
             }]
 
         # when
         actual_ctags = converters.ctags_to_db(input_ctag)
 
         # then
         self.assertEquals(actual_ctags, expected_ctags)
 
     @istest
     def db_to_ctags(self):
         input_ctags = {
             'id': b'some-id',
             'tool_name': 'some-toolname',
             'tool_version': 'some-toolversion',
             'name': 'some-name',
             'kind': 'some-kind',
             'line': 10,
             'lang': 'Yaml',
         }
         expected_ctags = {
             'id': b'some-id',
             'name': 'some-name',
             'kind': 'some-kind',
             'line': 10,
             'lang': 'Yaml',
             'tool': {
                 'name': 'some-toolname',
                 'version': 'some-toolversion'
             }
         }
 
         # when
         actual_ctags = converters.db_to_ctags(input_ctags)
 
         # then
         self.assertEquals(actual_ctags, expected_ctags)
+
+    @istest
+    def db_to_mimetype(self):
+        input_mimetype = {
+            'id': b'some-id',
+            'tool_name': 'some-toolname',
+            'tool_version': 'some-toolversion',
+            'encoding': b'ascii',
+            'mimetype': b'text/plain',
+        }
+
+        expected_mimetype = {
+            'id': b'some-id',
+            'encoding': b'ascii',
+            'mimetype': b'text/plain',
+            'tool': {
+                'name': 'some-toolname',
+                'version': 'some-toolversion',
+            }
+        }
+
+        actual_mimetype = converters.db_to_mimetype(input_mimetype)
+
+        self.assertEquals(actual_mimetype, expected_mimetype)
diff --git a/swh/storage/tests/test_storage.py b/swh/storage/tests/test_storage.py
index 19b0b68d..4807fb1b 100644
--- a/swh/storage/tests/test_storage.py
+++ b/swh/storage/tests/test_storage.py
@@ -1,2851 +1,2908 @@
 # Copyright (C) 2015-2016  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import copy
 import datetime
 import os
 import psycopg2
 import shutil
 import tempfile
 import unittest
 from uuid import UUID
 
 from unittest.mock import patch
 
 from nose.tools import istest
 from nose.plugins.attrib import attr
 
 from swh.core.tests.db_testing import DbTestFixture
 from swh.core.hashutil import hex_to_hash
 from swh.storage import Storage
 from swh.storage.db import cursor_to_bytes
 
 
 TEST_DIR = os.path.dirname(os.path.abspath(__file__))
 TEST_DATA_DIR = os.path.join(TEST_DIR, '../../../../swh-storage-testdata')
 
 
 @attr('db')
 class AbstractTestStorage(DbTestFixture):
     """Base class for Storage testing.
 
     This class is used as-is to test local storage (see TestStorage
     below) and remote storage (see TestRemoteStorage in
     test_remote_storage.py.
 
     We need to have the two classes inherit from this base class
     separately to avoid nosetests running the tests from the base
     class twice.
 
     """
     TEST_DB_DUMP = os.path.join(TEST_DATA_DIR, 'dumps/swh.dump')
 
     def setUp(self):
         super().setUp()
         self.maxDiff = None
         self.objroot = tempfile.mkdtemp()
         self.storage = Storage(self.conn, self.objroot)
 
         self.cont = {
             'data': b'42\n',
             'length': 3,
             'sha1': hex_to_hash(
                 '34973274ccef6ab4dfaaf86599792fa9c3fe4689'),
             'sha1_git': hex_to_hash(
                 'd81cc0710eb6cf9efd5b920a8453e1e07157b6cd'),
             'sha256': hex_to_hash(
                 '673650f936cb3b0a2f93ce09d81be107'
                 '48b1b203c19e8176b4eefc1964a0cf3a'),
             'status': 'visible',
         }
 
         self.cont2 = {
             'data': b'4242\n',
             'length': 5,
             'sha1': hex_to_hash(
                 '61c2b3a30496d329e21af70dd2d7e097046d07b7'),
             'sha1_git': hex_to_hash(
                 '36fade77193cb6d2bd826161a0979d64c28ab4fa'),
             'sha256': hex_to_hash(
                 '859f0b154fdb2d630f45e1ecae4a8629'
                 '15435e663248bb8461d914696fc047cd'),
             'status': 'visible',
         }
 
         self.cont3 = {
             'data': b'424242\n',
             'length': 7,
             'sha1': hex_to_hash(
                 '3e21cc4942a4234c9e5edd8a9cacd1670fe59f13'),
             'sha1_git': hex_to_hash(
                 'c932c7649c6dfa4b82327d121215116909eb3bea'),
             'sha256': hex_to_hash(
                 '92fb72daf8c6818288a35137b72155f5'
                 '07e5de8d892712ab96277aaed8cf8a36'),
             'status': 'visible',
         }
 
         self.missing_cont = {
             'data': b'missing\n',
             'length': 8,
             'sha1': hex_to_hash(
                 'f9c24e2abb82063a3ba2c44efd2d3c797f28ac90'),
             'sha1_git': hex_to_hash(
                 '33e45d56f88993aae6a0198013efa80716fd8919'),
             'sha256': hex_to_hash(
                 '6bbd052ab054ef222c1c87be60cd191a'
                 'ddedd24cc882d1f5f7f7be61dc61bb3a'),
             'status': 'absent',
         }
 
         self.skipped_cont = {
             'length': 1024 * 1024 * 200,
             'sha1_git': hex_to_hash(
                 '33e45d56f88993aae6a0198013efa80716fd8920'),
             'reason': 'Content too long',
             'status': 'absent',
         }
 
         self.skipped_cont2 = {
             'length': 1024 * 1024 * 300,
             'sha1_git': hex_to_hash(
                 '33e45d56f88993aae6a0198013efa80716fd8921'),
             'reason': 'Content too long',
             'status': 'absent',
         }
 
         self.dir = {
             'id': b'4\x013\x422\x531\x000\xf51\xe62\xa73\xff7\xc3\xa90',
             'entries': [
                 {
                     'name': b'foo',
                     'type': 'file',
                     'target': self.cont['sha1_git'],
                     'perms': 0o644,
                 },
                 {
                     'name': b'bar\xc3',
                     'type': 'dir',
                     'target': b'12345678901234567890',
                     'perms': 0o2000,
                 },
             ],
         }
 
         self.dir2 = {
             'id': b'4\x013\x422\x531\x000\xf51\xe62\xa73\xff7\xc3\xa95',
             'entries': [
                 {
                     'name': b'oof',
                     'type': 'file',
                     'target': self.cont2['sha1_git'],
                     'perms': 0o644,
                 }
             ],
         }
 
         self.dir3 = {
             'id': hex_to_hash('33e45d56f88993aae6a0198013efa80716fd8921'),
             'entries': [
                 {
                     'name': b'foo',
                     'type': 'file',
                     'target': self.cont['sha1_git'],
                     'perms': 0o644,
                 },
                 {
                     'name': b'bar',
                     'type': 'dir',
                     'target': b'12345678901234560000',
                     'perms': 0o2000,
                 },
                 {
                     'name': b'hello',
                     'type': 'file',
                     'target': b'12345678901234567890',
                     'perms': 0o644,
                 },
 
             ],
         }
 
         self.minus_offset = datetime.timezone(datetime.timedelta(minutes=-120))
         self.plus_offset = datetime.timezone(datetime.timedelta(minutes=120))
 
         self.revision = {
             'id': b'56789012345678901234',
             'message': b'hello',
             'author': {
                 'name': b'Nicolas Dandrimont',
                 'email': b'nicolas@example.com',
                 'fullname': b'Nicolas Dandrimont <nicolas@example.com> ',
             },
             'date': {
                 'timestamp': 1234567890,
                 'offset': 120,
                 'negative_utc': None,
             },
             'committer': {
                 'name': b'St\xc3fano Zacchiroli',
                 'email': b'stefano@example.com',
                 'fullname': b'St\xc3fano Zacchiroli <stefano@example.com>'
             },
             'committer_date': {
                 'timestamp': 1123456789,
                 'offset': 0,
                 'negative_utc': True,
             },
             'parents': [b'01234567890123456789', b'23434512345123456789'],
             'type': 'git',
             'directory': self.dir['id'],
             'metadata': {
                 'checksums': {
                     'sha1': 'tarball-sha1',
                     'sha256': 'tarball-sha256',
                 },
                 'signed-off-by': 'some-dude',
                 'extra_headers': [
                     ['gpgsig', b'test123'],
                     ['mergetags', [b'foo\\bar', b'\x22\xaf\x89\x80\x01\x00']],
                 ],
             },
             'synthetic': True
         }
 
         self.revision2 = {
             'id': b'87659012345678904321',
             'message': b'hello again',
             'author': {
                 'name': b'Roberto Dicosmo',
                 'email': b'roberto@example.com',
                 'fullname': b'Roberto Dicosmo <roberto@example.com>',
             },
             'date': {
                 'timestamp': 1234567843.22,
                 'offset': -720,
                 'negative_utc': None,
             },
             'committer': {
                 'name': b'tony',
                 'email': b'ar@dumont.fr',
                 'fullname': b'tony <ar@dumont.fr>',
             },
             'committer_date': {
                 'timestamp': 1123456789,
                 'offset': 0,
                 'negative_utc': False,
             },
             'parents': [b'01234567890123456789'],
             'type': 'git',
             'directory': self.dir2['id'],
             'metadata': None,
             'synthetic': False
         }
 
         self.revision3 = {
             'id': hex_to_hash('7026b7c1a2af56521e951c01ed20f255fa054238'),
             'message': b'a simple revision with no parents this time',
             'author': {
                 'name': b'Roberto Dicosmo',
                 'email': b'roberto@example.com',
                 'fullname': b'Roberto Dicosmo <roberto@example.com>',
             },
             'date': {
                 'timestamp': 1234567843.22,
                 'offset': -720,
                 'negative_utc': None,
             },
             'committer': {
                 'name': b'tony',
                 'email': b'ar@dumont.fr',
                 'fullname': b'tony <ar@dumont.fr>',
             },
             'committer_date': {
                 'timestamp': 1127351742,
                 'offset': 0,
                 'negative_utc': False,
             },
             'parents': [],
             'type': 'git',
             'directory': self.dir2['id'],
             'metadata': None,
             'synthetic': True
         }
 
         self.revision4 = {
             'id': hex_to_hash('368a48fe15b7db2383775f97c6b247011b3f14f4'),
             'message': b'parent of self.revision2',
             'author': {
                 'name': b'me',
                 'email': b'me@soft.heri',
                 'fullname': b'me <me@soft.heri>',
             },
             'date': {
                 'timestamp': 1244567843.22,
                 'offset': -720,
                 'negative_utc': None,
             },
             'committer': {
                 'name': b'committer-dude',
                 'email': b'committer@dude.com',
                 'fullname': b'committer-dude <committer@dude.com>',
             },
             'committer_date': {
                 'timestamp': 1244567843.22,
                 'offset': -720,
                 'negative_utc': None,
             },
             'parents': [self.revision3['id']],
             'type': 'git',
             'directory': self.dir['id'],
             'metadata': None,
             'synthetic': False
         }
 
         self.origin = {
             'url': 'file:///dev/null',
             'type': 'git',
         }
 
         self.origin2 = {
             'url': 'file:///dev/zero',
             'type': 'git',
         }
 
         self.date_visit1 = datetime.datetime(2015, 1, 1, 23, 0, 0,
                                              tzinfo=datetime.timezone.utc)
 
         self.occurrence = {
             'branch': b'master',
             'target': b'67890123456789012345',
             'target_type': 'revision',
         }
 
         self.date_visit2 = datetime.datetime(2015, 1, 1, 23, 0, 0,
                                              tzinfo=datetime.timezone.utc)
 
         self.occurrence2 = {
             'branch': b'master',
             'target': self.revision2['id'],
             'target_type': 'revision',
         }
 
         self.date_visit3 = datetime.datetime(2015, 1, 1, 23, 0, 0,
                                              tzinfo=datetime.timezone.utc)
 
         # template occurrence to be filled in test (cf. revision_log_by)
         self.occurrence3 = {
             'branch': b'master',
             'target_type': 'revision',
         }
 
         self.release = {
             'id': b'87659012345678901234',
             'name': b'v0.0.1',
             'author': {
                 'name': b'olasd',
                 'email': b'nic@olasd.fr',
                 'fullname': b'olasd <nic@olasd.fr>',
             },
             'date': {
                 'timestamp': 1234567890,
                 'offset': 42,
                 'negative_utc': None,
             },
             'target': b'43210987654321098765',
             'target_type': 'revision',
             'message': b'synthetic release',
             'synthetic': True,
         }
 
         self.release2 = {
             'id': b'56789012348765901234',
             'name': b'v0.0.2',
             'author': {
                 'name': b'tony',
                 'email': b'ar@dumont.fr',
                 'fullname': b'tony <ar@dumont.fr>',
             },
             'date': {
                 'timestamp': 1634366813,
                 'offset': -120,
                 'negative_utc': None,
             },
             'target': b'432109\xa9765432\xc309\x00765',
             'target_type': 'revision',
             'message': b'v0.0.2\nMisc performance improvments + bug fixes',
             'synthetic': False
         }
 
         self.release3 = {
             'id': b'87659012345678904321',
             'name': b'v0.0.2',
             'author': {
                 'name': b'tony',
                 'email': b'tony@ardumont.fr',
                 'fullname': b'tony <tony@ardumont.fr>',
             },
             'date': {
                 'timestamp': 1634336813,
                 'offset': 0,
                 'negative_utc': False,
             },
             'target': self.revision2['id'],
             'target_type': 'revision',
             'message': b'yet another synthetic release',
             'synthetic': True,
         }
 
         self.fetch_history_date = datetime.datetime(
             2015, 1, 2, 21, 0, 0,
             tzinfo=datetime.timezone.utc)
         self.fetch_history_end = datetime.datetime(
             2015, 1, 2, 23, 0, 0,
             tzinfo=datetime.timezone.utc)
 
         self.fetch_history_duration = (self.fetch_history_end -
                                        self.fetch_history_date)
 
         self.fetch_history_data = {
             'status': True,
             'result': {'foo': 'bar'},
             'stdout': 'blabla',
             'stderr': 'blablabla',
         }
 
         self.entity1 = {
             'uuid': UUID('f96a7ec1-0058-4920-90cc-7327e4b5a4bf'),
             # GitHub users
             'parent': UUID('ad6df473-c1d2-4f40-bc58-2b091d4a750e'),
             'name': 'github:user:olasd',
             'type': 'person',
             'description': 'Nicolas Dandrimont',
             'homepage': 'http://example.com',
             'active': True,
             'generated': True,
             'lister_metadata': {
                 # swh.lister.github
                 'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
                 'id': 12877,
                 'type': 'user',
                 'last_activity': '2015-11-03',
             },
             'metadata': None,
             'validity': [
                 datetime.datetime(2015, 11, 3, 11, 0, 0,
                                   tzinfo=datetime.timezone.utc),
             ]
         }
 
         self.entity1_query = {
             'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
             'id': 12877,
             'type': 'user',
         }
 
         self.entity2 = {
             'uuid': UUID('3903d075-32d6-46d4-9e29-0aef3612c4eb'),
             # GitHub users
             'parent': UUID('ad6df473-c1d2-4f40-bc58-2b091d4a750e'),
             'name': 'github:user:zacchiro',
             'type': 'person',
             'description': 'Stefano Zacchiroli',
             'homepage': 'http://example.com',
             'active': True,
             'generated': True,
             'lister_metadata': {
                 # swh.lister.github
                 'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
                 'id': 216766,
                 'type': 'user',
                 'last_activity': '2015-11-03',
             },
             'metadata': None,
             'validity': [
                 datetime.datetime(2015, 11, 3, 11, 0, 0,
                                   tzinfo=datetime.timezone.utc),
             ]
         }
 
         self.entity3 = {
             'uuid': UUID('111df473-c1d2-4f40-bc58-2b091d4a7111'),
             # GitHub users
             'parent': UUID('222df473-c1d2-4f40-bc58-2b091d4a7222'),
             'name': 'github:user:ardumont',
             'type': 'person',
             'description': 'Antoine R. Dumont a.k.a tony',
             'homepage': 'https://ardumont.github.io',
             'active': True,
             'generated': True,
             'lister_metadata': {
                 'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
                 'id': 666,
                 'type': 'user',
                 'last_activity': '2016-01-15',
             },
             'metadata': None,
             'validity': [
                 datetime.datetime(2015, 11, 3, 11, 0, 0,
                                   tzinfo=datetime.timezone.utc),
             ]
         }
 
         self.entity4 = {
             'uuid': UUID('222df473-c1d2-4f40-bc58-2b091d4a7222'),
             # GitHub users
             'parent': None,
             'name': 'github:user:ToNyX',
             'type': 'person',
             'description': 'ToNyX',
             'homepage': 'https://ToNyX.github.io',
             'active': True,
             'generated': True,
             'lister_metadata': {
                 'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
                 'id': 999,
                 'type': 'user',
                 'last_activity': '2015-12-24',
             },
             'metadata': None,
             'validity': [
                 datetime.datetime(2015, 11, 3, 11, 0, 0,
                                   tzinfo=datetime.timezone.utc),
             ]
         }
 
         self.entity2_query = {
             'lister_metadata': {
                 'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
                 'id': 216766,
                 'type': 'user',
             },
         }
 
     def tearDown(self):
         shutil.rmtree(self.objroot)
 
         self.cursor.execute("""SELECT table_name FROM information_schema.tables
                                WHERE table_schema = %s""", ('public',))
 
         tables = set(table for (table,) in self.cursor.fetchall())
         tables -= {'dbversion', 'entity', 'entity_history', 'listable_entity',
                    'fossology_license', 'indexer_configuration'}
 
         for table in tables:
             self.cursor.execute('truncate table %s cascade' % table)
 
         self.cursor.execute('delete from entity where generated=true')
         self.cursor.execute('delete from entity_history where generated=true')
         self.conn.commit()
 
         super().tearDown()
 
     @istest
     def check_config(self):
         self.assertTrue(self.storage.check_config(check_write=True))
         self.assertTrue(self.storage.check_config(check_write=False))
 
     @istest
     def content_add(self):
         cont = self.cont
 
         self.storage.content_add([cont])
         if hasattr(self.storage, 'objstorage'):
             self.assertIn(cont['sha1'], self.storage.objstorage)
         self.cursor.execute('SELECT sha1, sha1_git, sha256, length, status'
                             ' FROM content WHERE sha1 = %s',
                             (cont['sha1'],))
         datum = self.cursor.fetchone()
         self.assertEqual(
             (datum[0].tobytes(), datum[1].tobytes(), datum[2].tobytes(),
              datum[3], datum[4]),
             (cont['sha1'], cont['sha1_git'], cont['sha256'],
              cont['length'], 'visible'))
 
     @istest
     def content_add_collision(self):
         cont1 = self.cont
 
         # create (corrupted) content with same sha1{,_git} but != sha256
         cont1b = cont1.copy()
         sha256_array = bytearray(cont1b['sha256'])
         sha256_array[0] += 1
         cont1b['sha256'] = bytes(sha256_array)
 
         with self.assertRaises(psycopg2.IntegrityError):
             self.storage.content_add([cont1, cont1b])
 
     @istest
     def skipped_content_add(self):
         cont = self.skipped_cont
         cont2 = self.skipped_cont2
 
         self.storage.content_add([cont])
         self.storage.content_add([cont2])
 
         self.cursor.execute('SELECT sha1, sha1_git, sha256, length, status,'
                             'reason FROM skipped_content ORDER BY sha1_git')
 
         datum = self.cursor.fetchone()
         self.assertEqual(
             (datum[0], datum[1].tobytes(), datum[2],
              datum[3], datum[4], datum[5]),
             (None, cont['sha1_git'], None,
              cont['length'], 'absent', 'Content too long'))
 
         datum2 = self.cursor.fetchone()
         self.assertEqual(
             (datum2[0], datum2[1].tobytes(), datum2[2],
              datum2[3], datum2[4], datum2[5]),
             (None, cont2['sha1_git'], None,
              cont2['length'], 'absent', 'Content too long'))
 
     @istest
     def content_missing(self):
         cont2 = self.cont2
         missing_cont = self.missing_cont
         self.storage.content_add([cont2])
         gen = self.storage.content_missing([cont2, missing_cont])
 
         self.assertEqual(list(gen), [missing_cont['sha1']])
 
     @istest
     def content_missing_per_sha1(self):
         # given
         cont2 = self.cont2
         missing_cont = self.missing_cont
         self.storage.content_add([cont2])
         # when
         gen = self.storage.content_missing_per_sha1([cont2['sha1'],
                                                      missing_cont['sha1']])
 
         # then
         self.assertEqual(list(gen), [missing_cont['sha1']])
 
     @istest
     def content_get_metadata(self):
         cont1 = self.cont.copy()
         cont2 = self.cont2.copy()
 
         self.storage.content_add([cont1, cont2])
 
         gen = self.storage.content_get_metadata([cont1['sha1'], cont2['sha1']])
 
         # we only retrieve the metadata
         cont1.pop('data')
         cont2.pop('data')
 
         self.assertEqual(list(gen), [cont1, cont2])
 
     @istest
     def content_get_metadata_missing_sha1(self):
         cont1 = self.cont.copy()
         cont2 = self.cont2.copy()
 
         missing_cont = self.missing_cont.copy()
 
         self.storage.content_add([cont1, cont2])
 
         gen = self.storage.content_get_metadata([missing_cont['sha1']])
 
         # All the metadata keys are None
         missing_cont.pop('data')
         for key in list(missing_cont):
             if key != 'sha1':
                 missing_cont[key] = None
 
         self.assertEqual(list(gen), [missing_cont])
 
     @istest
     def directory_get(self):
         # given
         init_missing = list(self.storage.directory_missing([self.dir['id']]))
         self.assertEqual([self.dir['id']], init_missing)
 
         self.storage.directory_add([self.dir])
 
         # when
         actual_dirs = list(self.storage.directory_get([self.dir['id']]))
 
         self.assertEqual(len(actual_dirs), 1)
 
         dir0 = actual_dirs[0]
         self.assertEqual(dir0['id'], self.dir['id'])
         # ids are generated so non deterministic value
         self.assertEqual(len(dir0['file_entries']), 1)
         self.assertEqual(len(dir0['dir_entries']), 1)
         self.assertIsNone(dir0['rev_entries'])
 
         after_missing = list(self.storage.directory_missing([self.dir['id']]))
         self.assertEqual([], after_missing)
 
     @istest
     def directory_add(self):
         init_missing = list(self.storage.directory_missing([self.dir['id']]))
         self.assertEqual([self.dir['id']], init_missing)
 
         self.storage.directory_add([self.dir])
 
         stored_data = list(self.storage.directory_ls(self.dir['id']))
 
         data_to_store = [{
                  'dir_id': self.dir['id'],
                  'type': ent['type'],
                  'target': ent['target'],
                  'name': ent['name'],
                  'perms': ent['perms'],
                  'status': None,
                  'sha1': None,
                  'sha1_git': None,
                  'sha256': None,
             }
             for ent in sorted(self.dir['entries'], key=lambda ent: ent['name'])
         ]
 
         self.assertEqual(data_to_store, stored_data)
 
         after_missing = list(self.storage.directory_missing([self.dir['id']]))
         self.assertEqual([], after_missing)
 
     @istest
     def directory_entry_get_by_path(self):
         # given
         init_missing = list(self.storage.directory_missing([self.dir3['id']]))
         self.assertEqual([self.dir3['id']], init_missing)
 
         self.storage.directory_add([self.dir3])
 
         expected_entries = [
             {
                 'dir_id': self.dir3['id'],
                 'name': b'foo',
                 'type': 'file',
                 'target': self.cont['sha1_git'],
                 'sha1': None,
                 'sha1_git': None,
                 'sha256': None,
                 'status': None,
                 'perms': 0o644,
             },
             {
                 'dir_id': self.dir3['id'],
                 'name': b'bar',
                 'type': 'dir',
                 'target': b'12345678901234560000',
                 'sha1': None,
                 'sha1_git': None,
                 'sha256': None,
                 'status': None,
                 'perms': 0o2000,
             },
             {
                 'dir_id': self.dir3['id'],
                 'name': b'hello',
                 'type': 'file',
                 'target': b'12345678901234567890',
                 'sha1': None,
                 'sha1_git': None,
                 'sha256': None,
                 'status': None,
                 'perms': 0o644,
             },
         ]
 
         # when (all must be found here)
         for entry, expected_entry in zip(self.dir3['entries'],
                                          expected_entries):
             actual_entry = self.storage.directory_entry_get_by_path(
                 self.dir3['id'],
                 [entry['name']])
             self.assertEqual(actual_entry, expected_entry)
 
         # when (nothing should be found here since self.dir is not persisted.)
         for entry in self.dir['entries']:
             actual_entry = self.storage.directory_entry_get_by_path(
                 self.dir['id'],
                 [entry['name']])
             self.assertIsNone(actual_entry)
 
     @istest
     def revision_add(self):
         init_missing = self.storage.revision_missing([self.revision['id']])
         self.assertEqual([self.revision['id']], list(init_missing))
 
         self.storage.revision_add([self.revision])
 
         end_missing = self.storage.revision_missing([self.revision['id']])
         self.assertEqual([], list(end_missing))
 
     def cache_content_revision_objects(self):
         self.storage.content_add([self.cont, self.cont2, self.cont3])
         directory = {
             'id': b'4\x013\x422\x531\x000\xf51\xe62\xa73\xff7\xc3\xa90',
             'entries': [
                 {
                     'name': b'bar',
                     'type': 'file',
                     'target': self.cont2['sha1_git'],
                     'perms': 0o644,
                 },
                 {
                     'name': b'foo',
                     'type': 'file',
                     'target': self.cont['sha1_git'],
                     'perms': 0o644,
                 },
                 {
                     'name': b'bar\xc3',
                     'type': 'dir',
                     'target': b'12345678901234567890',
                     'perms': 0o2000,
                 },
             ],
         }
         directory2 = copy.deepcopy(directory)
         directory2['id'] = (directory2['id'][:-1] +
                             bytes([(directory2['id'][-1] + 1) % 256]))
         directory2['entries'][1] = {
             'name': b'foo',
             'type': 'file',
             'target': self.cont3['sha1_git'],
             'perms': 0o644,
         }
 
         self.storage.directory_add([directory, directory2])
         revision = self.revision.copy()
         revision['directory'] = directory['id']
         revision2 = copy.deepcopy(revision)
         revision2['parents'] = [revision['id']]
         revision2['directory'] = directory2['id']
         revision2['id'] = (revision2['id'][:-1] +
                            bytes([(revision2['id'][-1] + 1) % 256]))
         self.storage.revision_add([revision, revision2])
         return (directory, directory2, revision, revision2)
 
     @istest
     def cache_content_revision_add(self):
         # Create a real arborescence tree (contents + directory) and a
         # revision targeting that directory.
         # Assert the cache is empty for that revision
         # Then create that revision
         # Trigger the cache population for that revision
         # Assert the cache now contains information for that revision
         # Trigger again the cache population for that revision
         # Assert the cache is not modified
 
         # given ()
         (directory, directory2,
          revision, revision2) = self.cache_content_revision_objects()
 
         # assert nothing in cache yet
         count_query = '''select count(*)
                          from cache_content_revision'''
         self.cursor.execute(count_query)
         ret = self.cursor.fetchone()
         self.assertEqual(ret, (0, ))
 
         # when, triggered the first time, we cache the revision
         self.storage.cache_content_revision_add([revision['id']])
         # the second time, we do nothing as this is already done
         self.storage.cache_content_revision_add([revision['id']])
 
         # then
         self.cursor.execute(count_query)
         ret = self.cursor.fetchone()
         # only 2 contents exists for that revision (the second call to
         # revision_cache discards as the revision is already cached)
         self.assertEqual(ret, (2, ))
 
         self.cursor.execute('select * from cache_content_revision')
         ret = self.cursor.fetchall()
 
         expected_cache_entries = [
             (directory['entries'][0]['target'], False,
              [[revision['id'], directory['entries'][0]['name']]]),
             (directory['entries'][1]['target'], False,
              [[revision['id'], directory['entries'][1]['name']]])
         ]
         for i, expected_entry in enumerate(expected_cache_entries):
             ret_entry = (ret[i][0].tobytes(), ret[i][1],
                          [[ret[i][2][0][0].tobytes(),
                            ret[i][2][0][1].tobytes()]])
             self.assertEquals(ret_entry, expected_entry)
 
     @istest
     def cache_content_revision_add_twice(self):
         # given ()
         (directory, directory2,
          revision, revision2) = self.cache_content_revision_objects()
 
         # assert nothing in cache yet
         count_query = '''select count(*)
                          from cache_content_revision'''
         self.cursor.execute(count_query)
         ret = self.cursor.fetchone()
         self.assertEqual(ret, (0, ))
 
         # when, triggered the first time, we cache the revision
         self.storage.cache_content_revision_add([revision['id']])
         # the second time, we do nothing as this is already done
         self.storage.cache_content_revision_add([revision2['id']])
 
         # then
         self.cursor.execute('select * from cache_content_revision')
         cache_entries = {
             content.tobytes(): [[rev.tobytes(), path.tobytes()]
                                 for rev, path in rev_paths]
             for content, blacklisted, rev_paths in self.cursor.fetchall()
         }
 
         self.assertEquals(len(cache_entries), 3)
         self.assertEquals(len(cache_entries[self.cont['sha1_git']]), 1)
         self.assertEquals(len(cache_entries[self.cont2['sha1_git']]), 2)
         self.assertEquals(len(cache_entries[self.cont3['sha1_git']]), 1)
 
     @istest
     def cache_content_get_all(self):
         # given
         (directory, directory2,
          revision, revision2) = self.cache_content_revision_objects()
 
         # assert nothing in cache yet
         test_query = '''select sha1, sha1_git, sha256, ccr.revision_paths
                         from cache_content_revision ccr
                         inner join content c on c.sha1_git=ccr.content'''
 
         self.storage.cache_content_revision_add([revision['id']])
         self.cursor.execute(test_query, (revision['id'],))
         ret = list(cursor_to_bytes(self.cursor))
 
         self.assertEqual(len(ret), 2)
 
         expected_contents = []
         for entry in ret:
             expected_contents.append(dict(
                 zip(['sha1', 'sha1_git', 'sha256', 'revision_paths'], entry)))
 
         # 1. default filters gives everything
         actual_cache_contents = list(self.storage.cache_content_get_all())
 
         self.assertEquals(actual_cache_contents, expected_contents)
 
     @istest
     def cache_content_get(self):
         # given
         (directory, directory2,
          revision, revision2) = self.cache_content_revision_objects()
 
         # assert nothing in cache yet
         test_query = '''select c.sha1, c.sha1_git, c.sha256, ccr.revision_paths
                         from cache_content_revision ccr
                         inner join content c on c.sha1_git=ccr.content
                         where ccr.content=%s'''
 
         self.storage.cache_content_revision_add([revision['id']])
         self.cursor.execute(test_query, (self.cont2['sha1_git'],))
         ret = list(cursor_to_bytes(self.cursor))[0]
 
         self.assertIsNotNone(ret)
 
         expected_content = dict(
             zip(['sha1', 'sha1_git', 'sha256', 'revision_paths'], ret))
 
         # when
         actual_cache_content = self.storage.cache_content_get(self.cont2)
 
         # then
         self.assertEquals(actual_cache_content, expected_content)
 
     @istest
     def revision_log(self):
         # given
         # self.revision4 -is-child-of-> self.revision3
         self.storage.revision_add([self.revision3,
                                    self.revision4])
 
         # when
         actual_results = list(self.storage.revision_log(
             [self.revision4['id']]))
 
         # hack: ids generated
         for actual_result in actual_results:
             del actual_result['author']['id']
             del actual_result['committer']['id']
 
         self.assertEqual(len(actual_results), 2)  # rev4 -child-> rev3
         self.assertEquals(actual_results[0], self.revision4)
         self.assertEquals(actual_results[1], self.revision3)
 
     @istest
     def revision_log_with_limit(self):
         # given
         # self.revision4 -is-child-of-> self.revision3
         self.storage.revision_add([self.revision3,
                                    self.revision4])
         actual_results = list(self.storage.revision_log(
             [self.revision4['id']], 1))
 
         # hack: ids generated
         for actual_result in actual_results:
             del actual_result['author']['id']
             del actual_result['committer']['id']
 
         self.assertEqual(len(actual_results), 1)
         self.assertEquals(actual_results[0], self.revision4)
 
     @istest
     def revision_log_by(self):
         # given
         origin_id = self.storage.origin_add_one(self.origin2)
         self.storage.revision_add([self.revision3,
                                    self.revision4])
 
         # occurrence3 targets 'revision4'
         # with branch 'master' and origin origin_id
         occurrence3 = self.occurrence3.copy()
         date_visit1 = self.date_visit3
         origin_visit1 = self.storage.origin_visit_add(origin_id,
                                                       date_visit1)
         occurrence3.update({
             'origin': origin_id,
             'target': self.revision4['id'],
             'visit': origin_visit1['visit'],
         })
 
         self.storage.occurrence_add([occurrence3])
 
         # self.revision4 -is-child-of-> self.revision3
         # when
         actual_results = list(self.storage.revision_log_by(
             origin_id,
             branch_name=occurrence3['branch'],
             timestamp=date_visit1))
 
         # hack: ids generated
         for actual_result in actual_results:
             del actual_result['author']['id']
             del actual_result['committer']['id']
 
         self.assertEqual(len(actual_results), 2)
         self.assertEquals(actual_results[0], self.revision4)
         self.assertEquals(actual_results[1], self.revision3)
 
         # when - 2
         actual_results = list(self.storage.revision_log_by(
             origin_id,
             branch_name=None,
             timestamp=None,
             limit=1))
 
         # then
         for actual_result in actual_results:
             del actual_result['author']['id']
             del actual_result['committer']['id']
 
         self.assertEqual(len(actual_results), 1)
         self.assertEquals(actual_results[0], self.revision4)
 
         # when - 3 (revision not found)
 
         actual_res = list(self.storage.revision_log_by(
             origin_id,
             branch_name='inexistant-branch',
             timestamp=None))
 
         self.assertEquals(actual_res, [])
 
     @staticmethod
     def _short_revision(revision):
         return [revision['id'], revision['parents']]
 
     @istest
     def revision_shortlog(self):
         # given
         # self.revision4 -is-child-of-> self.revision3
         self.storage.revision_add([self.revision3,
                                    self.revision4])
 
         # when
         actual_results = list(self.storage.revision_shortlog(
             [self.revision4['id']]))
 
         self.assertEqual(len(actual_results), 2)  # rev4 -child-> rev3
         self.assertEquals(list(actual_results[0]),
                           self._short_revision(self.revision4))
         self.assertEquals(list(actual_results[1]),
                           self._short_revision(self.revision3))
 
     @istest
     def revision_shortlog_with_limit(self):
         # given
         # self.revision4 -is-child-of-> self.revision3
         self.storage.revision_add([self.revision3,
                                    self.revision4])
         actual_results = list(self.storage.revision_shortlog(
             [self.revision4['id']], 1))
 
         self.assertEqual(len(actual_results), 1)
         self.assertEquals(list(actual_results[0]),
                           self._short_revision(self.revision4))
 
     @istest
     def revision_get(self):
         self.storage.revision_add([self.revision])
 
         actual_revisions = list(self.storage.revision_get(
             [self.revision['id'], self.revision2['id']]))
 
         # when
         del actual_revisions[0]['author']['id']  # hack: ids are generated
         del actual_revisions[0]['committer']['id']
 
         self.assertEqual(len(actual_revisions), 2)
         self.assertEqual(actual_revisions[0], self.revision)
         self.assertIsNone(actual_revisions[1])
 
     @istest
     def revision_get_no_parents(self):
         self.storage.revision_add([self.revision3])
 
         get = list(self.storage.revision_get([self.revision3['id']]))
 
         self.assertEqual(len(get), 1)
         self.assertEqual(get[0]['parents'], [])  # no parents on this one
 
     @istest
     def revision_get_by(self):
         # given
         self.storage.content_add([self.cont2])
         self.storage.directory_add([self.dir2])  # point to self.cont
         self.storage.revision_add([self.revision2])  # points to self.dir
         origin_id = self.storage.origin_add_one(self.origin2)
 
         # occurrence2 points to 'revision2' with branch 'master', we
         # need to point to the right origin
         occurrence2 = self.occurrence2.copy()
         date_visit1 = self.date_visit2
         origin_visit1 = self.storage.origin_visit_add(origin_id, date_visit1)
         occurrence2.update({
             'origin': origin_id,
             'visit': origin_visit1['visit'],
         })
         self.storage.occurrence_add([occurrence2])
 
         # we want only revision 2
         expected_revisions = list(self.storage.revision_get(
             [self.revision2['id']]))
 
         # when
         actual_results = list(self.storage.revision_get_by(
             origin_id,
             occurrence2['branch'],
             None))
 
         self.assertEqual(actual_results[0], expected_revisions[0])
 
         # when (with no branch filtering, it's still ok)
         actual_results = list(self.storage.revision_get_by(
             origin_id,
             None,
             None))
 
         self.assertEqual(actual_results[0], expected_revisions[0])
 
     @istest
     def revision_get_by_multiple_occurrence(self):
         # 2 occurrences pointing to 2 different revisions
         # each occurence have 1 hour delta
         # the api must return the revision whose occurrence is the nearest.
 
         # given
         self.storage.content_add([self.cont2])
         self.storage.directory_add([self.dir2])
         self.storage.revision_add([self.revision2, self.revision3])
         origin_id = self.storage.origin_add_one(self.origin2)
 
         # occurrence2 points to 'revision2' with branch 'master', we
         # need to point to the right origin
         date_visit1 = self.date_visit2
         origin_visit1 = self.storage.origin_visit_add(origin_id, date_visit1)
         occurrence2 = self.occurrence2.copy()
         occurrence2.update({
             'origin': origin_id,
             'visit': origin_visit1['visit']
         })
 
         dt = datetime.timedelta(days=1)
         date_visit2 = date_visit1 + dt
         origin_visit2 = self.storage.origin_visit_add(origin_id, date_visit2)
         occurrence3 = self.occurrence2.copy()
         occurrence3.update({
             'origin': origin_id,
             'visit': origin_visit2['visit'],
             'target': self.revision3['id'],
         })
         # 2 occurrences on same revision with lower validity date with 1h delta
         self.storage.occurrence_add([occurrence2])
         self.storage.occurrence_add([occurrence3])
 
         # when
         actual_results0 = list(self.storage.revision_get_by(
             origin_id,
             occurrence2['branch'],
             date_visit1))
 
         # hack: ids are generated
         del actual_results0[0]['author']['id']
         del actual_results0[0]['committer']['id']
 
         self.assertEquals(len(actual_results0), 1)
         self.assertEqual(actual_results0, [self.revision2])
 
         # when
         actual_results1 = list(self.storage.revision_get_by(
             origin_id,
             occurrence2['branch'],
             date_visit1 + dt/3))  # closer to first visit
 
         # hack: ids are generated
         del actual_results1[0]['author']['id']
         del actual_results1[0]['committer']['id']
 
         self.assertEquals(len(actual_results1), 1)
         self.assertEqual(actual_results1, [self.revision2])
 
         # when
         actual_results2 = list(self.storage.revision_get_by(
             origin_id,
             occurrence2['branch'],
             date_visit1 + 2*dt/3))  # closer to second visit
 
         del actual_results2[0]['author']['id']
         del actual_results2[0]['committer']['id']
 
         self.assertEquals(len(actual_results2), 1)
         self.assertEqual(actual_results2, [self.revision3])
 
         # when
         actual_results3 = list(self.storage.revision_get_by(
             origin_id,
             occurrence3['branch'],
             date_visit2))
 
         # hack: ids are generated
         del actual_results3[0]['author']['id']
         del actual_results3[0]['committer']['id']
 
         self.assertEquals(len(actual_results3), 1)
         self.assertEqual(actual_results3, [self.revision3])
 
         # when
         actual_results4 = list(self.storage.revision_get_by(
             origin_id,
             None,
             None))
 
         for actual_result in actual_results4:
             del actual_result['author']['id']
             del actual_result['committer']['id']
 
         self.assertEquals(len(actual_results4), 2)
         self.assertCountEqual(actual_results4,
                               [self.revision3, self.revision2])
 
     @istest
     def release_add(self):
         init_missing = self.storage.release_missing([self.release['id'],
                                                      self.release2['id']])
         self.assertEqual([self.release['id'], self.release2['id']],
                          list(init_missing))
 
         self.storage.release_add([self.release, self.release2])
 
         end_missing = self.storage.release_missing([self.release['id'],
                                                     self.release2['id']])
         self.assertEqual([], list(end_missing))
 
     @istest
     def release_get(self):
         # given
         self.storage.release_add([self.release, self.release2])
 
         # when
         actual_releases = list(self.storage.release_get([self.release['id'],
                                                          self.release2['id']]))
 
         # then
         for actual_release in actual_releases:
             del actual_release['author']['id']  # hack: ids are generated
 
         self.assertEquals([self.release, self.release2],
                           [actual_releases[0], actual_releases[1]])
 
     @istest
     def release_get_by(self):
         # given
         self.storage.revision_add([self.revision2])  # points to self.dir
         self.storage.release_add([self.release3])
         origin_id = self.storage.origin_add_one(self.origin2)
 
         # occurrence2 points to 'revision2' with branch 'master', we
         # need to point to the right origin
         origin_visit = self.storage.origin_visit_add(origin_id,
                                                      self.date_visit2)
         occurrence2 = self.occurrence2.copy()
         occurrence2.update({
             'origin': origin_id,
             'visit': origin_visit['visit'],
         })
 
         self.storage.occurrence_add([occurrence2])
 
         # we want only revision 2
         expected_releases = list(self.storage.release_get(
             [self.release3['id']]))
 
         # when
         actual_results = list(self.storage.release_get_by(
             occurrence2['origin']))
 
         # then
         self.assertEqual(actual_results[0], expected_releases[0])
 
     @istest
     def origin_add_one(self):
         origin0 = self.storage.origin_get(self.origin)
         self.assertIsNone(origin0)
 
         id = self.storage.origin_add_one(self.origin)
 
         actual_origin = self.storage.origin_get({'url': self.origin['url'],
                                                  'type': self.origin['type']})
         self.assertEqual(actual_origin['id'], id)
 
         id2 = self.storage.origin_add_one(self.origin)
 
         self.assertEqual(id, id2)
 
     @istest
     def origin_add(self):
         origin0 = self.storage.origin_get(self.origin)
         self.assertIsNone(origin0)
 
         id1, id2 = self.storage.origin_add([self.origin, self.origin2])
 
         actual_origin = self.storage.origin_get({
             'url': self.origin['url'],
             'type': self.origin['type'],
         })
         self.assertEqual(actual_origin['id'], id1)
 
         actual_origin2 = self.storage.origin_get({
             'url': self.origin2['url'],
             'type': self.origin2['type'],
         })
         self.assertEqual(actual_origin2['id'], id2)
 
     @istest
     def origin_add_twice(self):
         add1 = self.storage.origin_add([self.origin, self.origin2])
         add2 = self.storage.origin_add([self.origin, self.origin2])
 
         self.assertEqual(add1, add2)
 
     @istest
     def origin_get(self):
         self.assertIsNone(self.storage.origin_get(self.origin))
         id = self.storage.origin_add_one(self.origin)
 
         # lookup per type and url (returns id)
         actual_origin0 = self.storage.origin_get({'url': self.origin['url'],
                                                   'type': self.origin['type']})
         self.assertEqual(actual_origin0['id'], id)
 
         # lookup per id (returns dict)
         actual_origin1 = self.storage.origin_get({'id': id})
 
         self.assertEqual(actual_origin1, {'id': id,
                                           'type': self.origin['type'],
                                           'url': self.origin['url'],
                                           'lister': None,
                                           'project': None})
 
     @istest
     def origin_visit_add(self):
         # given
         self.assertIsNone(self.storage.origin_get(self.origin2))
 
         origin_id = self.storage.origin_add_one(self.origin2)
         self.assertIsNotNone(origin_id)
 
         # when
         origin_visit1 = self.storage.origin_visit_add(
             origin_id,
             ts=self.date_visit2)
 
         # then
         self.assertEquals(origin_visit1['origin'], origin_id)
         self.assertIsNotNone(origin_visit1['visit'])
         self.assertTrue(origin_visit1['visit'] > 0)
 
         actual_origin_visits = list(self.storage.origin_visit_get(origin_id))
         self.assertEquals(actual_origin_visits,
                           [{
                               'origin': origin_id,
                               'date': self.date_visit2,
                               'visit': origin_visit1['visit'],
                               'status': 'ongoing',
                               'metadata': None,
                           }])
 
     @istest
     def origin_visit_update(self):
         # given
         origin_id = self.storage.origin_add_one(self.origin2)
         origin_id2 = self.storage.origin_add_one(self.origin)
 
         origin_visit1 = self.storage.origin_visit_add(
             origin_id,
             ts=self.date_visit2)
 
         origin_visit2 = self.storage.origin_visit_add(
             origin_id,
             ts=self.date_visit3)
 
         origin_visit3 = self.storage.origin_visit_add(
             origin_id2,
             ts=self.date_visit3)
 
         # when
         visit1_metadata = {
             'contents': 42,
             'directories': 22,
         }
         self.storage.origin_visit_update(
             origin_id, origin_visit1['visit'], status='full',
             metadata=visit1_metadata)
         self.storage.origin_visit_update(origin_id2, origin_visit3['visit'],
                                          status='partial')
 
         # then
         actual_origin_visits = list(self.storage.origin_visit_get(origin_id))
         self.assertEquals(actual_origin_visits,
                           [{
                               'origin': origin_visit2['origin'],
                               'date': self.date_visit2,
                               'visit': origin_visit1['visit'],
                               'status': 'full',
                               'metadata': visit1_metadata,
                           },
                            {
                                'origin': origin_visit2['origin'],
                                'date': self.date_visit3,
                                'visit': origin_visit2['visit'],
                                'status': 'ongoing',
                                'metadata': None,
                            }])
 
         actual_origin_visits2 = list(self.storage.origin_visit_get(origin_id2))
         self.assertEquals(actual_origin_visits2,
                           [{
                               'origin': origin_visit3['origin'],
                               'date': self.date_visit3,
                               'visit': origin_visit3['visit'],
                               'status': 'partial',
                               'metadata': None,
                           }])
 
     @istest
     def origin_visit_get_by(self):
         origin_id = self.storage.origin_add_one(self.origin2)
         origin_id2 = self.storage.origin_add_one(self.origin)
 
         origin_visit1 = self.storage.origin_visit_add(
             origin_id,
             ts=self.date_visit2)
 
         occurrence2 = self.occurrence2.copy()
         occurrence2.update({
             'origin': origin_id,
             'visit': origin_visit1['visit'],
         })
 
         self.storage.occurrence_add([occurrence2])
 
         # Add some other {origin, visit} entries
         self.storage.origin_visit_add(origin_id, ts=self.date_visit3)
         self.storage.origin_visit_add(origin_id2, ts=self.date_visit3)
 
         # when
         visit1_metadata = {
             'contents': 42,
             'directories': 22,
         }
 
         self.storage.origin_visit_update(
             origin_id, origin_visit1['visit'], status='full',
             metadata=visit1_metadata)
 
         expected_origin_visit = origin_visit1.copy()
         expected_origin_visit.update({
             'origin': origin_id,
             'visit': origin_visit1['visit'],
             'date': self.date_visit2,
             'metadata': visit1_metadata,
             'status': 'full',
             'occurrences': {
                 occurrence2['branch']: {
                     'target': occurrence2['target'],
                     'target_type': occurrence2['target_type'],
                 }
             }
         })
 
         # when
         actual_origin_visit1 = self.storage.origin_visit_get_by(
             origin_visit1['origin'], origin_visit1['visit'])
 
         # then
         self.assertEquals(actual_origin_visit1, expected_origin_visit)
 
     @istest
     def origin_visit_get_by_no_result(self):
         # No result
         actual_origin_visit = self.storage.origin_visit_get_by(
             10, 999)
 
         self.assertIsNone(actual_origin_visit)
 
     @istest
     def occurrence_add(self):
         occur = self.occurrence.copy()
 
         origin_id = self.storage.origin_add_one(self.origin2)
         date_visit1 = self.date_visit1
         origin_visit1 = self.storage.origin_visit_add(origin_id, date_visit1)
 
         revision = self.revision.copy()
         revision['id'] = occur['target']
         self.storage.revision_add([revision])
 
         occur.update({
             'origin': origin_id,
             'visit': origin_visit1['visit'],
         })
         self.storage.occurrence_add([occur])
 
         test_query = '''
         with indiv_occurrences as (
           select origin, branch, target, target_type, unnest(visits) as visit
           from occurrence_history
         )
         select origin, branch, target, target_type, date
         from indiv_occurrences
         left join origin_visit using(origin, visit)
         order by origin, date'''
 
         self.cursor.execute(test_query)
         ret = self.cursor.fetchall()
         self.assertEqual(len(ret), 1)
         self.assertEqual(
             (ret[0][0], ret[0][1].tobytes(), ret[0][2].tobytes(),
              ret[0][3], ret[0][4]),
             (occur['origin'], occur['branch'], occur['target'],
              occur['target_type'], self.date_visit1))
 
         date_visit2 = date_visit1 + datetime.timedelta(hours=10)
 
         origin_visit2 = self.storage.origin_visit_add(origin_id, date_visit2)
         occur2 = occur.copy()
         occur2.update({
             'visit': origin_visit2['visit'],
         })
         self.storage.occurrence_add([occur2])
 
         self.cursor.execute(test_query)
         ret = self.cursor.fetchall()
         self.assertEqual(len(ret), 2)
         self.assertEqual(
             (ret[0][0], ret[0][1].tobytes(), ret[0][2].tobytes(),
              ret[0][3], ret[0][4]),
             (occur['origin'], occur['branch'], occur['target'],
              occur['target_type'], date_visit1))
         self.assertEqual(
             (ret[1][0], ret[1][1].tobytes(), ret[1][2].tobytes(),
              ret[1][3], ret[1][4]),
             (occur2['origin'], occur2['branch'], occur2['target'],
              occur2['target_type'], date_visit2))
 
     @istest
     def occurrence_get(self):
         # given
         occur = self.occurrence.copy()
         origin_id = self.storage.origin_add_one(self.origin2)
         origin_visit1 = self.storage.origin_visit_add(origin_id,
                                                       self.date_visit1)
 
         revision = self.revision.copy()
         revision['id'] = occur['target']
         self.storage.revision_add([revision])
 
         occur.update({
             'origin': origin_id,
             'visit': origin_visit1['visit'],
         })
         self.storage.occurrence_add([occur])
         self.storage.occurrence_add([occur])
 
         # when
         actual_occurrence = list(self.storage.occurrence_get(origin_id))
 
         # then
         expected_occurrence = self.occurrence.copy()
         expected_occurrence.update({
             'origin': origin_id
         })
         self.assertEquals(len(actual_occurrence), 1)
         self.assertEquals(actual_occurrence[0], expected_occurrence)
 
     def _trigger_cache_provenance(self, origin_visit):
         """Trigger cache population for cache_content_revision.
 
         """
         ret = list(self.storage.cache_revision_origin_add(
                 origin_visit['origin'],
                 origin_visit['visit'],
         ))
 
         for revision_id in ret:
             self.storage.cache_content_revision_add([revision_id])
 
         return ret
 
     @istest
     def content_find_provenance_with_present_content(self):
         # 1. with something to find
         # given
         origin_id = self.storage.origin_add_one(self.origin2)
         self.storage.content_add([self.cont2])
         self.storage.directory_add([self.dir2])  # point to self.cont
         self.storage.revision_add([self.revision3])  # points to self.dir
 
         occurrence = self.occurrence3.copy()
         occurrence['target'] = self.revision3['id']
         origin_visit1 = self.storage.origin_visit_add(origin_id,
                                                       self.date_visit2)
         occurrence.update({
             'origin': origin_id,
             'visit': origin_visit1['visit'],
         })
 
         self.storage.occurrence_add([occurrence])
 
         # Trigger cache population for cache_content_revision
         cached_revisions = self._trigger_cache_provenance(origin_visit1)
 
         self.assertIn(self.revision3['id'], cached_revisions)
 
         # when
         occs = list(self.storage.content_find_provenance(
             {'sha1': self.cont2['sha1']}))
 
         # then
         self.assertEquals(len(occs), 1)
         self.assertEquals(occs[0]['origin'], origin_visit1['origin'])
         self.assertEquals(occs[0]['visit'], origin_visit1['visit'])
         self.assertEquals(occs[0]['revision'], self.revision3['id'])
         self.assertEquals(occs[0]['path'], self.dir2['entries'][0]['name'])
 
         occs2 = list(self.storage.content_find_provenance(
             {'sha1_git': self.cont2['sha1_git']}))
 
         self.assertEquals(len(occs2), 1)
         self.assertEquals(occs2[0]['origin'], origin_visit1['origin'])
         self.assertEquals(occs2[0]['visit'], origin_visit1['visit'])
         self.assertEquals(occs2[0]['revision'], self.revision3['id'])
         self.assertEquals(occs2[0]['path'], self.dir2['entries'][0]['name'])
 
         occs3 = list(self.storage.content_find_provenance(
             {'sha256': self.cont2['sha256']}))
 
         self.assertEquals(len(occs3), 1)
         self.assertEquals(occs3[0]['origin'], origin_visit1['origin'])
         self.assertEquals(occs3[0]['visit'], origin_visit1['visit'])
         self.assertEquals(occs3[0]['revision'], self.revision3['id'])
         self.assertEquals(occs3[0]['path'], self.dir2['entries'][0]['name'])
 
     @istest
     def content_find_provenance_with_non_present_content(self):
         # 1. with something that does not exist
         missing_cont = self.missing_cont
 
         occ = list(self.storage.content_find_provenance(
             {'sha1': missing_cont['sha1']}))
 
         self.assertEquals(occ, [],
                           "Content does not exist so no occurrence")
 
         # 2. with something that does not exist
         occ = list(self.storage.content_find_provenance(
             {'sha1_git': missing_cont['sha1_git']}))
 
         self.assertEquals(occ, [],
                           "Content does not exist so no occurrence")
 
         # 3. with something that does not exist
         occ = list(self.storage.content_find_provenance(
             {'sha256': missing_cont['sha256']}))
 
         self.assertEquals(occ, [],
                           "Content does not exist so no occurrence")
 
     @istest
     def content_find_occurrence_bad_input(self):
         # 1. with bad input
         with self.assertRaises(ValueError) as cm:
             list(self.storage.content_find_provenance({}))  # empty is bad
         self.assertIn('content keys', cm.exception.args[0])
 
         # 2. with bad input
         with self.assertRaises(ValueError) as cm:
             list(self.storage.content_find_provenance(
                 {'unknown-sha1': 'something'}))  # not the right key
         self.assertIn('content keys', cm.exception.args[0])
 
     @istest
     def entity_get_from_lister_metadata(self):
         self.storage.entity_add([self.entity1])
 
         fetched_entities = list(
             self.storage.entity_get_from_lister_metadata(
                 [self.entity1_query, self.entity2_query]))
 
         # Entity 1 should have full metadata, with last_seen/last_id instead
         # of validity
         entity1 = self.entity1.copy()
         entity1['last_seen'] = entity1['validity'][0]
         del fetched_entities[0]['last_id']
         del entity1['validity']
         # Entity 2 should have no metadata
         entity2 = {
             'uuid': None,
             'lister_metadata': self.entity2_query.copy(),
         }
 
         self.assertEquals(fetched_entities, [entity1, entity2])
 
     @istest
     def entity_get_from_lister_metadata_twice(self):
         self.storage.entity_add([self.entity1])
 
         fetched_entities1 = list(
             self.storage.entity_get_from_lister_metadata(
                 [self.entity1_query]))
         fetched_entities2 = list(
             self.storage.entity_get_from_lister_metadata(
                 [self.entity1_query]))
 
         self.assertEquals(fetched_entities1, fetched_entities2)
 
     @istest
     def entity_get(self):
         # given
         self.storage.entity_add([self.entity4])
         self.storage.entity_add([self.entity3])
 
         # when: entity3 -child-of-> entity4
         actual_entity3 = list(self.storage.entity_get(self.entity3['uuid']))
 
         self.assertEquals(len(actual_entity3), 2)
         # remove dynamic data (modified by db)
         entity3 = self.entity3.copy()
         entity4 = self.entity4.copy()
         del entity3['validity']
         del entity4['validity']
         del actual_entity3[0]['last_seen']
         del actual_entity3[0]['last_id']
         del actual_entity3[1]['last_seen']
         del actual_entity3[1]['last_id']
         self.assertEquals(actual_entity3, [entity3, entity4])
 
         # when: entity4 only child
         actual_entity4 = list(self.storage.entity_get(self.entity4['uuid']))
 
         self.assertEquals(len(actual_entity4), 1)
         # remove dynamic data (modified by db)
         entity4 = self.entity4.copy()
         del entity4['validity']
         del actual_entity4[0]['last_id']
         del actual_entity4[0]['last_seen']
 
         self.assertEquals(actual_entity4, [entity4])
 
     @istest
     def entity_get_one(self):
         # given
         self.storage.entity_add([self.entity3, self.entity4])
 
         # when: entity3 -child-of-> entity4
         actual_entity3 = self.storage.entity_get_one(self.entity3['uuid'])
 
         # remove dynamic data (modified by db)
         entity3 = self.entity3.copy()
         del entity3['validity']
         del actual_entity3['last_seen']
         del actual_entity3['last_id']
         self.assertEquals(actual_entity3, entity3)
 
     @istest
     def stat_counters(self):
         expected_keys = ['content', 'directory', 'directory_entry_dir',
                          'occurrence', 'origin', 'person', 'revision']
         counters = self.storage.stat_counters()
 
         self.assertTrue(set(expected_keys) <= set(counters))
         self.assertIsInstance(counters[expected_keys[0]], int)
 
     @istest
     def content_find_with_present_content(self):
         # 1. with something to find
         cont = self.cont
         self.storage.content_add([cont])
 
         actually_present = self.storage.content_find({'sha1': cont['sha1']})
 
         actually_present.pop('ctime')
         self.assertEqual(actually_present, {
             'sha1': cont['sha1'],
             'sha256': cont['sha256'],
             'sha1_git': cont['sha1_git'],
             'length': cont['length'],
             'status': 'visible'
         })
 
         # 2. with something to find
         actually_present = self.storage.content_find(
             {'sha1_git': cont['sha1_git']})
 
         actually_present.pop('ctime')
         self.assertEqual(actually_present, {
             'sha1': cont['sha1'],
             'sha256': cont['sha256'],
             'sha1_git': cont['sha1_git'],
             'length': cont['length'],
             'status': 'visible'
         })
 
         # 3. with something to find
         actually_present = self.storage.content_find(
             {'sha256': cont['sha256']})
 
         actually_present.pop('ctime')
         self.assertEqual(actually_present, {
             'sha1': cont['sha1'],
             'sha256': cont['sha256'],
             'sha1_git': cont['sha1_git'],
             'length': cont['length'],
             'status': 'visible'
         })
 
         # 4. with something to find
         actually_present = self.storage.content_find(
             {'sha1': cont['sha1'],
              'sha1_git': cont['sha1_git'],
              'sha256': cont['sha256']})
 
         actually_present.pop('ctime')
         self.assertEqual(actually_present, {
             'sha1': cont['sha1'],
             'sha256': cont['sha256'],
             'sha1_git': cont['sha1_git'],
             'length': cont['length'],
             'status': 'visible'
         })
 
     @istest
     def content_find_with_non_present_content(self):
         # 1. with something that does not exist
         missing_cont = self.missing_cont
 
         actually_present = self.storage.content_find(
             {'sha1': missing_cont['sha1']})
 
         self.assertIsNone(actually_present)
 
         # 2. with something that does not exist
         actually_present = self.storage.content_find(
             {'sha1_git': missing_cont['sha1_git']})
 
         self.assertIsNone(actually_present)
 
         # 3. with something that does not exist
         actually_present = self.storage.content_find(
             {'sha256': missing_cont['sha256']})
 
         self.assertIsNone(actually_present)
 
     @istest
     def content_find_bad_input(self):
         # 1. with bad input
         with self.assertRaises(ValueError):
             self.storage.content_find({})  # empty is bad
 
         # 2. with bad input
         with self.assertRaises(ValueError):
             self.storage.content_find(
                 {'unknown-sha1': 'something'})  # not the right key
 
     @istest
     def object_find_by_sha1_git(self):
         sha1_gits = [b'00000000000000000000']
         expected = {
             b'00000000000000000000': [],
         }
 
         self.storage.content_add([self.cont])
         sha1_gits.append(self.cont['sha1_git'])
         expected[self.cont['sha1_git']] = [{
             'sha1_git': self.cont['sha1_git'],
             'type': 'content',
             'id': self.cont['sha1'],
         }]
 
         self.storage.directory_add([self.dir])
         sha1_gits.append(self.dir['id'])
         expected[self.dir['id']] = [{
             'sha1_git': self.dir['id'],
             'type': 'directory',
             'id': self.dir['id'],
         }]
 
         self.storage.revision_add([self.revision])
         sha1_gits.append(self.revision['id'])
         expected[self.revision['id']] = [{
             'sha1_git': self.revision['id'],
             'type': 'revision',
             'id': self.revision['id'],
         }]
 
         self.storage.release_add([self.release])
         sha1_gits.append(self.release['id'])
         expected[self.release['id']] = [{
             'sha1_git': self.release['id'],
             'type': 'release',
             'id': self.release['id'],
         }]
 
         ret = self.storage.object_find_by_sha1_git(sha1_gits)
         for val in ret.values():
             for obj in val:
                 del obj['object_id']
 
         self.assertEqual(expected, ret)
 
     @istest
     def content_mimetype_missing(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
-        mimetypes = [self.cont2['sha1'], self.missing_cont['sha1']]
+        mimetypes = [
+            {
+                'id': self.cont2['sha1'],
+                'tool_name': 'file',
+                'tool_version': '5.22',
+            },
+            {
+                'id': self.missing_cont['sha1'],
+                'tool_name': 'file',
+                'tool_version': '5.22',
+            }]
 
         # when
         actual_missing = self.storage.content_mimetype_missing(mimetypes)
 
         # then
         self.assertEqual(list(actual_missing), [
             self.cont2['sha1'],
             self.missing_cont['sha1']
         ])
 
         # given
         self.storage.content_mimetype_add([{
             'id': self.cont2['sha1'],
             'mimetype': b'text/plain',
-            'encoding': b'utf-8'
+            'encoding': b'utf-8',
+            'tool_name': 'file',
+            'tool_version': '5.22',
         }])
 
         # when
         actual_missing = self.storage.content_mimetype_missing(mimetypes)
 
         # then
         self.assertEqual(list(actual_missing), [self.missing_cont['sha1']])
 
     @istest
     def content_mimetype_add__drop_duplicate(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         mimetype_v1 = {
             'id': self.cont2['sha1'],
             'mimetype': b'text/plain',
-            'encoding': b'utf-8'
+            'encoding': b'utf-8',
+            'tool_name': 'file',
+            'tool_version': '5.22',
         }
 
         # given
         self.storage.content_mimetype_add([mimetype_v1])
 
         # when
         actual_mimetypes = list(self.storage.content_mimetype_get(
             [self.cont2['sha1']]))
 
         # then
-        self.assertEqual(actual_mimetypes[0], mimetype_v1)
+        expected_mimetypes_v1 = [{
+            'id': self.cont2['sha1'],
+            'mimetype': b'text/plain',
+            'encoding': b'utf-8',
+            'tool': {
+                'name': 'file',
+                'version': '5.22',
+            }
+        }]
+        self.assertEqual(actual_mimetypes, expected_mimetypes_v1)
 
         # given
         mimetype_v2 = mimetype_v1.copy()
         mimetype_v2.update({
             'mimetype': b'text/html',
             'encoding': b'us-ascii',
         })
 
         self.storage.content_mimetype_add([mimetype_v2])
 
         actual_mimetypes = list(self.storage.content_mimetype_get(
             [self.cont2['sha1']]))
 
         # mimetype did not change as the v2 was dropped.
-        self.assertEqual(actual_mimetypes[0], mimetype_v1)
+        self.assertEqual(actual_mimetypes, expected_mimetypes_v1)
 
     @istest
     def content_mimetype_add__update_in_place_duplicate(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         mimetype_v1 = {
             'id': self.cont2['sha1'],
             'mimetype': b'text/plain',
-            'encoding': b'utf-8'
+            'encoding': b'utf-8',
+            'tool_name': 'file',
+            'tool_version': '5.22',
         }
 
         # given
         self.storage.content_mimetype_add([mimetype_v1])
 
         # when
         actual_mimetypes = list(self.storage.content_mimetype_get(
             [self.cont2['sha1']]))
 
+        expected_mimetypes_v1 = [{
+            'id': self.cont2['sha1'],
+            'mimetype': b'text/plain',
+            'encoding': b'utf-8',
+            'tool': {
+                'name': 'file',
+                'version': '5.22',
+            }
+        }]
+
         # then
-        self.assertEqual(actual_mimetypes[0], mimetype_v1)
+        self.assertEqual(actual_mimetypes, expected_mimetypes_v1)
 
         # given
         mimetype_v2 = mimetype_v1.copy()
         mimetype_v2.update({
             'mimetype': b'text/html',
             'encoding': b'us-ascii',
         })
 
         self.storage.content_mimetype_add([mimetype_v2], conflict_update=True)
 
         actual_mimetypes = list(self.storage.content_mimetype_get(
             [self.cont2['sha1']]))
 
+        expected_mimetypes_v2 = [{
+            'id': self.cont2['sha1'],
+            'mimetype': b'text/html',
+            'encoding': b'us-ascii',
+            'tool': {
+                'name': 'file',
+                'version': '5.22',
+            }
+        }]
+
         # mimetype did change as the v2 was used to overwrite v1
-        self.assertEqual(actual_mimetypes[0], mimetype_v2)
+        self.assertEqual(actual_mimetypes, expected_mimetypes_v2)
 
     @istest
     def content_mimetype_get(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         mimetypes = [self.cont2['sha1'], self.missing_cont['sha1']]
 
         mimetype1 = {
             'id': self.cont2['sha1'],
             'mimetype': b'text/plain',
-            'encoding': b'utf-8'
+            'encoding': b'utf-8',
+            'tool_name': 'file',
+            'tool_version': '5.22',
         }
 
         # when
         self.storage.content_mimetype_add([mimetype1])
 
         # then
-        actual_mimetypes = self.storage.content_mimetype_get(mimetypes)
+        actual_mimetypes = list(self.storage.content_mimetype_get(mimetypes))
 
         # then
-        self.assertEqual(list(actual_mimetypes), [mimetype1])
+        expected_mimetypes = [{
+            'id': self.cont2['sha1'],
+            'mimetype': b'text/plain',
+            'encoding': b'utf-8',
+            'tool': {
+                'name': 'file',
+                'version': '5.22',
+            }
+        }]
+
+        self.assertEqual(actual_mimetypes, expected_mimetypes)
 
     @istest
     def content_language_missing(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         languages = [self.cont2['sha1'], self.missing_cont['sha1']]
 
         # when
         actual_missing = self.storage.content_language_missing(languages)
 
         # then
         self.assertEqual(list(actual_missing), [
             self.cont2['sha1'],
             self.missing_cont['sha1']
         ])
 
         # given
         self.storage.content_language_add([{
             'id': self.cont2['sha1'],
             'lang': 'haskell',
         }])
 
         # when
         actual_missing = self.storage.content_language_missing(languages)
 
         # then
         self.assertEqual(list(actual_missing), [self.missing_cont['sha1']])
 
     @istest
     def content_language_get(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         languages = [self.cont2['sha1'], self.missing_cont['sha1']]
 
         language1 = {
             'id': self.cont2['sha1'],
             'lang': 'common-lisp',
         }
 
         # when
         self.storage.content_language_add([language1])
 
         # then
         actual_languages = self.storage.content_language_get(languages)
 
         # then
         self.assertEqual(list(actual_languages), [language1])
 
     @istest
     def content_language_add__drop_duplicate(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         language_v1 = {
             'id': self.cont2['sha1'],
             'lang': 'emacslisp',
         }
 
         # given
         self.storage.content_language_add([language_v1])
 
         # when
         actual_languages = list(self.storage.content_language_get(
             [self.cont2['sha1']]))
 
         # then
         self.assertEqual(actual_languages[0], language_v1)
 
         # given
         language_v2 = language_v1.copy()
         language_v2.update({
             'lang': 'common-lisp',
         })
 
         self.storage.content_language_add([language_v2])
 
         actual_languages = list(self.storage.content_language_get(
             [self.cont2['sha1']]))
 
         # language did not change as the v2 was dropped.
         self.assertEqual(actual_languages[0], language_v1)
 
     @istest
     def content_language_add__update_in_place_duplicate(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         language_v1 = {
             'id': self.cont2['sha1'],
             'lang': 'common-lisp',
         }
 
         # given
         self.storage.content_language_add([language_v1])
 
         # when
         actual_languages = list(self.storage.content_language_get(
             [self.cont2['sha1']]))
 
         # then
         self.assertEqual(actual_languages[0], language_v1)
 
         # given
         language_v2 = language_v1.copy()
         language_v2.update({
             'lang': 'emacslisp',
         })
 
         self.storage.content_language_add([language_v2], conflict_update=True)
 
         actual_languages = list(self.storage.content_language_get(
             [self.cont2['sha1']]))
 
         # language did change as the v2 was used to overwrite v1
         self.assertEqual(actual_languages[0], language_v2)
 
     @istest
     def content_ctags_missing(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         ctags = [
             {
                 'id': self.cont2['sha1'],
                 'tool_name': 'universal-ctags',
                 'tool_version': '~git7859817b',
             },
             {
                 'id': self.missing_cont['sha1'],
                 'tool_name': 'universal-ctags',
                 'tool_version': '~git7859817b',
             }
         ]
 
         # when
         actual_missing = self.storage.content_ctags_missing(ctags)
 
         # then
         self.assertEqual(list(actual_missing), [
             self.cont2['sha1'],
             self.missing_cont['sha1']
         ])
 
         # given
         self.storage.content_ctags_add([
             {
                 'id': self.cont2['sha1'],
                 'tool_name': 'universal-ctags',
                 'tool_version': '~git7859817b',
                 'ctags': [{
                     'name': 'done',
                     'kind': 'variable',
                     'line': 119,
                     'lang': 'OCaml',
                 }]
             },
         ])
 
         # when
         actual_missing = self.storage.content_ctags_missing(ctags)
 
         # then
         self.assertEqual(list(actual_missing), [self.missing_cont['sha1']])
 
     @istest
     def content_ctags_get(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         ctags = [self.cont2['sha1'], self.missing_cont['sha1']]
 
         ctag1 = {
             'id': self.cont2['sha1'],
             'tool_name': 'universal-ctags',
             'tool_version': '~git7859817b',
             'ctags': [
                 {
                     'name': 'done',
                     'kind': 'variable',
                     'line': 100,
                     'lang': 'Python',
                 },
                 {
                     'name': 'main',
                     'kind': 'function',
                     'line': 119,
                     'lang': 'Python',
                 }]
         }
 
         # when
         self.storage.content_ctags_add([ctag1])
 
         # then
         actual_ctags = list(self.storage.content_ctags_get(ctags))
 
         # then
 
         expected_ctags = [
             {
                 'id': self.cont2['sha1'],
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 },
                 'name': 'done',
                 'kind': 'variable',
                 'line': 100,
                 'lang': 'Python',
             },
             {
                 'id': self.cont2['sha1'],
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 },
                 'name': 'main',
                 'kind': 'function',
                 'line': 119,
                 'lang': 'Python',
             }
         ]
 
         self.assertEqual(actual_ctags, expected_ctags)
 
     @istest
     def content_ctags_search(self):
         # 1. given
         cont = self.cont
         cont2 = self.cont2
         self.storage.content_add([cont, cont2])
 
         ctag1 = {
             'id': cont['sha1'],
             'tool_name': 'universal-ctags',
             'tool_version': '~git7859817b',
             'ctags': [
                 {
                     'name': 'hello',
                     'kind': 'function',
                     'line': 133,
                     'lang': 'Python',
                 },
                 {
                     'name': 'counter',
                     'kind': 'variable',
                     'line': 119,
                     'lang': 'Python',
                 },
             ]
         }
 
         ctag2 = {
             'id': cont2['sha1'],
             'tool_name': 'universal-ctags',
             'tool_version': '~git7859817b',
             'ctags': [
                 {
                     'name': 'hello',
                     'kind': 'variable',
                     'line': 100,
                     'lang': 'C',
                 },
             ]
         }
 
         self.storage.content_ctags_add([ctag1, ctag2])
 
         # 1. when
         actual_ctags = list(self.storage.content_ctags_search('hello',
                                                               limit=1))
 
         # 1. then
         self.assertEqual(actual_ctags, [
             {
                 'id': ctag1['id'],
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 },
                 'name': 'hello',
                 'kind': 'function',
                 'line': 133,
                 'lang': 'Python',
             }
         ])
 
         # 2. when
         actual_ctags = list(self.storage.content_ctags_search(
             'hello',
             limit=1,
             last_sha1=ctag1['id']))
 
         # 2. then
         self.assertEqual(actual_ctags, [
             {
                 'id': ctag2['id'],
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 },
                 'name': 'hello',
                 'kind': 'variable',
                 'line': 100,
                 'lang': 'C',
             }
         ])
 
         # 3. when
         actual_ctags = list(self.storage.content_ctags_search('hello'))
 
         # 3. then
         self.assertEqual(actual_ctags, [
             {
                 'id': ctag1['id'],
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 },
                 'name': 'hello',
                 'kind': 'function',
                 'line': 133,
                 'lang': 'Python',
             },
             {
                 'id': ctag2['id'],
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 },
                 'name': 'hello',
                 'kind': 'variable',
                 'line': 100,
                 'lang': 'C',
             },
         ])
 
         # 4. when
         actual_ctags = list(self.storage.content_ctags_search('counter'))
 
         # then
         self.assertEqual(actual_ctags, [{
             'id': ctag1['id'],
             'tool': {
                 'name': 'universal-ctags',
                 'version': '~git7859817b',
             },
             'name': 'counter',
             'kind': 'variable',
             'line': 119,
             'lang': 'Python',
         }])
 
     @istest
     def content_ctags_search_no_result(self):
         actual_ctags = list(self.storage.content_ctags_search('counter'))
 
         self.assertEquals(actual_ctags, [])
 
     @istest
     def content_ctags_add__add_new_ctags_added(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         ctag_v1 = {
             'id': self.cont2['sha1'],
             'tool_name': 'universal-ctags',
             'tool_version': '~git7859817b',
             'ctags': [{
                 'name': 'done',
                 'kind': 'variable',
                 'line': 100,
                 'lang': 'Scheme',
             }]
         }
 
         # given
         self.storage.content_ctags_add([ctag_v1])
         self.storage.content_ctags_add([ctag_v1])  # conflict does nothing
 
         # when
         actual_ctags = list(self.storage.content_ctags_get(
             [self.cont2['sha1']]))
 
         # then
         expected_ctags = [{
             'id': self.cont2['sha1'],
             'name': 'done',
             'kind': 'variable',
             'line': 100,
             'lang': 'Scheme',
             'tool': {
                 'name': 'universal-ctags',
                 'version': '~git7859817b',
             }
         }]
 
         self.assertEqual(actual_ctags, expected_ctags)
 
         # given
         ctag_v2 = ctag_v1.copy()
         ctag_v2.update({
             'ctags': [
                 {
                     'name': 'defn',
                     'kind': 'function',
                     'line': 120,
                     'lang': 'Scheme',
                 }
             ]
         })
 
         self.storage.content_ctags_add([ctag_v2])
 
         expected_ctags = [
             {
                 'id': self.cont2['sha1'],
                 'name': 'done',
                 'kind': 'variable',
                 'line': 100,
                 'lang': 'Scheme',
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 },
             }, {
                 'id': self.cont2['sha1'],
                 'name': 'defn',
                 'kind': 'function',
                 'line': 120,
                 'lang': 'Scheme',
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 },
             }
         ]
 
         actual_ctags = list(self.storage.content_ctags_get(
             [self.cont2['sha1']]))
 
         self.assertEqual(actual_ctags, expected_ctags)
 
     @istest
     def content_ctags_add__update_in_place(self):
         # given
         cont2 = self.cont2
         self.storage.content_add([cont2])
 
         ctag_v1 = {
             'id': self.cont2['sha1'],
             'tool_name': 'universal-ctags',
             'tool_version': '~git7859817b',
             'ctags': [{
                 'name': 'done',
                 'kind': 'variable',
                 'line': 100,
                 'lang': 'Scheme',
             }]
         }
 
         # given
         self.storage.content_ctags_add([ctag_v1])
 
         # when
         actual_ctags = list(self.storage.content_ctags_get(
             [self.cont2['sha1']]))
 
         # then
         expected_ctags = [
             {
                 'id': self.cont2['sha1'],
                 'name': 'done',
                 'kind': 'variable',
                 'line': 100,
                 'lang': 'Scheme',
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 }
             }
         ]
         self.assertEqual(actual_ctags, expected_ctags)
 
         # given
         ctag_v2 = ctag_v1.copy()
         ctag_v2.update({
             'ctags': [
                 {
                     'name': 'done',
                     'kind': 'variable',
                     'line': 100,
                     'lang': 'Scheme',
                 },
                 {
                     'name': 'defn',
                     'kind': 'function',
                     'line': 120,
                     'lang': 'Scheme',
                 }
             ]
         })
 
         self.storage.content_ctags_add([ctag_v2], conflict_update=True)
 
         actual_ctags = list(self.storage.content_ctags_get(
             [self.cont2['sha1']]))
 
         # ctag did change as the v2 was used to overwrite v1
         expected_ctags = [
             {
                 'id': self.cont2['sha1'],
                 'name': 'done',
                 'kind': 'variable',
                 'line': 100,
                 'lang': 'Scheme',
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 },
             },
             {
                 'id': self.cont2['sha1'],
                 'name': 'defn',
                 'kind': 'function',
                 'line': 120,
                 'lang': 'Scheme',
                 'tool': {
                     'name': 'universal-ctags',
                     'version': '~git7859817b',
                 },
             }
         ]
         self.assertEqual(actual_ctags, expected_ctags)
 
     @istest
     def content_fossology_license_missing(self):
         # given
         cont = self.cont
         self.storage.content_add([cont])
 
         licenses = [cont['sha1'], self.missing_cont['sha1']]
 
         # when
         actual_missing = list(self.storage.content_fossology_license_missing(
             licenses))
 
         # then
         self.assertEqual(actual_missing, [
             cont['sha1'],
             self.missing_cont['sha1']
         ])
 
         # given
         r = self.storage.content_fossology_license_add([{
             'id': cont['sha1'],
             'licenses': ['GPL-2.0', 'GPL-2.0+'],
             'tool_name': 'nomos',
             'tool_version': '3.1.0rc2-31-ga2cbb8c',
         }])
 
         self.assertEqual(r, [])
 
         # when
         actual_missing = list(self.storage.content_fossology_license_missing(
             licenses))
 
         # then
         self.assertEqual(actual_missing, [self.missing_cont['sha1']])
 
     @istest
     def content_fossology_license_get(self):
         # given
         cont = self.cont
         self.storage.content_add([cont])
 
         licenses = [cont['sha1'], self.missing_cont['sha1']]
 
         license1 = {
             'id': cont['sha1'],
             'licenses': ['GPL-2.0+'],
             'tool_name': 'nomos',
             'tool_version': '3.1.0rc2-31-ga2cbb8c',
         }
 
         # when
         r = self.storage.content_fossology_license_add([license1])
 
         self.assertEquals(r, [])
 
         # then
         actual_licenses = list(self.storage.content_fossology_license_get(
             licenses))
 
         # then
         self.assertEqual(actual_licenses, [license1])
 
     @istest
     def content_fossology_license_add__wrong_license(self):
         # given
         cont = self.cont
         self.storage.content_add([cont])
 
         license_v1 = {
             'id': cont['sha1'],
             'licenses': ['blackhole'],
             'tool_name': 'nomos',
             'tool_version': '3.1.0rc2-31-ga2cbb8c',
         }
 
         # given
         r = self.storage.content_fossology_license_add([license_v1])
 
         # then
         self.assertEqual(r, [license_v1])
 
         # when
         actual_licenses = list(self.storage.content_fossology_license_get(
             [cont['sha1']]))
 
         # then
         self.assertEqual(actual_licenses, [])
 
     @istest
     def content_fossology_license_add__new_license_added(self):
         # given
         cont = self.cont
         self.storage.content_add([cont])
 
         license_v1 = {
             'id': cont['sha1'],
             'licenses': ['Apache-2.0'],
             'tool_name': 'nomos',
             'tool_version': '3.1.0rc2-31-ga2cbb8c',
         }
 
         # given
         self.storage.content_fossology_license_add([license_v1])
         # conflict does nothing
         self.storage.content_fossology_license_add([license_v1])
 
         # when
         actual_licenses = list(self.storage.content_fossology_license_get(
             [cont['sha1']]))
 
         # then
         self.assertEqual(actual_licenses[0], license_v1)
 
         # given
         license_v2 = license_v1.copy()
         license_v2.update({
             'licenses': ['BSD-2-Clause'],
         })
 
         self.storage.content_fossology_license_add([license_v2])
 
         actual_licenses = list(self.storage.content_fossology_license_get(
             [cont['sha1']]))
 
         expected_license = license_v1.copy()
         expected_license.update({
             'licenses': ['Apache-2.0', 'BSD-2-Clause'],
         })
         # license did not change as the v2 was dropped.
         self.assertEqual(actual_licenses[0], expected_license)
 
     @istest
     def content_fossology_license_add__update_in_place_duplicate(self):
         # given
         cont = self.cont
         self.storage.content_add([cont])
 
         license_v1 = {
             'id': cont['sha1'],
             'licenses': ['CECILL'],
             'tool_name': 'nomos',
             'tool_version': '3.1.0rc2-31-ga2cbb8c',
         }
 
         # given
         self.storage.content_fossology_license_add([license_v1])
         # conflict does nothing
         self.storage.content_fossology_license_add([license_v1])
 
         # when
         actual_licenses = list(self.storage.content_fossology_license_get(
             [cont['sha1']]))
 
         # then
         self.assertEqual(actual_licenses[0], license_v1)
 
         # given
         license_v2 = license_v1.copy()
         license_v2.update({
             'licenses': ['CECILL-2.0']
         })
 
         self.storage.content_fossology_license_add([license_v2],
                                                    conflict_update=True)
 
         actual_licenses = list(self.storage.content_fossology_license_get(
             [cont['sha1']]))
 
         # license did change as the v2 was used to overwrite v1
         self.assertEqual(actual_licenses[0], license_v2)
 
 
 class TestStorage(AbstractTestStorage, unittest.TestCase):
     """Test the local storage"""
 
     # Can only be tested with local storage as you can't mock
     # datetimes for the remote server
     @istest
     def fetch_history(self):
         origin = self.storage.origin_add_one(self.origin)
         with patch('datetime.datetime'):
             datetime.datetime.now.return_value = self.fetch_history_date
             fetch_history_id = self.storage.fetch_history_start(origin)
             datetime.datetime.now.assert_called_with(tz=datetime.timezone.utc)
 
         with patch('datetime.datetime'):
             datetime.datetime.now.return_value = self.fetch_history_end
             self.storage.fetch_history_end(fetch_history_id,
                                            self.fetch_history_data)
 
         fetch_history = self.storage.fetch_history_get(fetch_history_id)
         expected_fetch_history = self.fetch_history_data.copy()
 
         expected_fetch_history['id'] = fetch_history_id
         expected_fetch_history['origin'] = origin
         expected_fetch_history['date'] = self.fetch_history_date
         expected_fetch_history['duration'] = self.fetch_history_duration
 
         self.assertEqual(expected_fetch_history, fetch_history)
 
     @istest
     def person_get(self):
         # given
         person0 = {
             'fullname': b'bob <alice@bob>',
             'name': b'bob',
             'email': b'alice@bob',
         }
         id0 = self.storage._person_add(person0)
 
         person1 = {
             'fullname': b'tony <tony@bob>',
             'name': b'tony',
             'email': b'tony@bob',
         }
         id1 = self.storage._person_add(person1)
 
         # when
         actual_persons = self.storage.person_get([id0, id1])
 
         # given (person injection through release for example)
         self.assertEqual(
             list(actual_persons), [
                 {
                     'id': id0,
                     'fullname': person0['fullname'],
                     'name': person0['name'],
                     'email': person0['email'],
                 },
                 {
                     'id': id1,
                     'fullname': person1['fullname'],
                     'name': person1['name'],
                     'email': person1['email'],
                 },
             ])