diff --git a/swh/provenance/algos/directory.py b/swh/provenance/algos/directory.py
index 06db86f..ec7d06d 100644
--- a/swh/provenance/algos/directory.py
+++ b/swh/provenance/algos/directory.py
@@ -1,107 +1,107 @@
 # Copyright (C) 2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import os
 from typing import Generator, Iterable, Iterator, List, Optional
 
 from swh.core.statsd import statsd
 from swh.model.model import Sha1Git
 from swh.provenance.archive import ArchiveInterface
 from swh.provenance.interface import ProvenanceInterface
 from swh.provenance.model import DirectoryEntry
 
 REVISION_DURATION_METRIC = "swh_provenance_directory_duration_seconds"
 
 
 class CSVDirectoryIterator:
     """Iterator over directories typically present in the given CSV file.
 
     The input is an iterator that produces ids (sha1_git) of directories
     """
 
     def __init__(
         self,
         directories: Iterable[Sha1Git],
         limit: Optional[int] = None,
     ) -> None:
         self.directories: Iterator[Sha1Git]
         if limit is not None:
             from itertools import islice
 
             self.directories = islice(directories, limit)
         else:
             self.directories = iter(directories)
 
     def __iter__(self) -> Generator[DirectoryEntry, None, None]:
         for id in self.directories:
             yield DirectoryEntry(id)
 
 
 def directory_flatten_range(
     provenance: ProvenanceInterface,
     archive: ArchiveInterface,
     start_id: Sha1Git,
     end_id: Sha1Git,
     minsize: int = 0,
     commit: bool = True,
 ) -> None:
     """Flatten the known directories from ``start_id`` to ``end_id``."""
     current = start_id
     while current < end_id:
-        dirs = provenance.storage.directory_iter_not_flattenned(
+        dirs = provenance.storage.directory_iter_not_flattened(
             limit=100, start_id=current
         )
         if not dirs:
             break
         directory_add(
             provenance, archive, [DirectoryEntry(id=d) for d in dirs], minsize, commit
         )
         current = dirs[-1]
 
 
 @statsd.timed(metric=REVISION_DURATION_METRIC, tags={"method": "add"})
 def directory_add(
     provenance: ProvenanceInterface,
     archive: ArchiveInterface,
     directories: List[DirectoryEntry],
     minsize: int = 0,
     commit: bool = True,
 ) -> None:
     for directory in directories:
         # Only flatten directories that are present in the provenance model, but not
-        # flattenned yet.
-        flattenned = provenance.directory_already_flattenned(directory)
-        if flattenned is not None and not flattenned:
+        # flattened yet.
+        flattened = provenance.directory_already_flattened(directory)
+        if flattened is not None and not flattened:
             directory_flatten(
                 provenance,
                 archive,
                 directory,
                 minsize=minsize,
             )
     if commit:
         provenance.flush()
 
 
 @statsd.timed(metric=REVISION_DURATION_METRIC, tags={"method": "flatten"})
 def directory_flatten(
     provenance: ProvenanceInterface,
     archive: ArchiveInterface,
     directory: DirectoryEntry,
     minsize: int = 0,
 ) -> None:
     """Recursively retrieve all the files of 'directory' and insert them in the
     'provenance' database in the 'content_to_directory' table.
     """
     stack = [(directory, b"")]
     while stack:
         current, prefix = stack.pop()
         current.retrieve_children(archive, minsize=minsize)
         for f_child in current.files:
             # Add content to the directory with the computed prefix.
             provenance.content_add_to_directory(directory, f_child, prefix)
         for d_child in current.dirs:
             # Recursively walk the child directory.
             stack.append((d_child, os.path.join(prefix, d_child.name)))
-    provenance.directory_flag_as_flattenned(directory)
+    provenance.directory_flag_as_flattened(directory)
diff --git a/swh/provenance/interface.py b/swh/provenance/interface.py
index 010bc9f..715a3f8 100644
--- a/swh/provenance/interface.py
+++ b/swh/provenance/interface.py
@@ -1,184 +1,184 @@
 # Copyright (C) 2021-2022  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from __future__ import annotations
 
 from datetime import datetime
 from types import TracebackType
 from typing import Dict, Generator, Iterable, Optional, Type
 
 from typing_extensions import Protocol, runtime_checkable
 
 from swh.model.model import Sha1Git
 
 from .model import DirectoryEntry, FileEntry, OriginEntry, RevisionEntry
 from .storage.interface import ProvenanceResult, ProvenanceStorageInterface
 
 
 @runtime_checkable
 class ProvenanceInterface(Protocol):
     storage: ProvenanceStorageInterface
 
     def __enter__(self) -> ProvenanceInterface:
         ...
 
     def __exit__(
         self,
         exc_type: Optional[Type[BaseException]],
         exc_val: Optional[BaseException],
         exc_tb: Optional[TracebackType],
     ) -> None:
         ...
 
     def close(self) -> None:
         """Close connection to the underlying `storage` and release resources."""
         ...
 
     def flush(self) -> None:
         """Flush internal cache to the underlying `storage`."""
         ...
 
     def flush_if_necessary(self) -> bool:
         """Flush internal cache to the underlying `storage`, if the cache reached
         a threshold (MAX_CACHE_ELEMENTS).
         Return True if the cache is flushed, false otherwise.
         """
         ...
 
     def content_add_to_directory(
         self, directory: DirectoryEntry, blob: FileEntry, prefix: bytes
     ) -> None:
         """Associate `blob` with `directory` in the provenance model. `prefix` is the
         relative path from `directory` to `blob` (excluding `blob`'s name).
         """
         ...
 
     def content_add_to_revision(
         self, revision: RevisionEntry, blob: FileEntry, prefix: bytes
     ) -> None:
         """Associate `blob` with `revision` in the provenance model. `prefix` is the
         absolute path from `revision`'s root directory to `blob` (excluding `blob`'s
         name).
         """
         ...
 
     def content_find_first(self, id: Sha1Git) -> Optional[ProvenanceResult]:
         """Retrieve the first occurrence of the blob identified by `id`."""
         ...
 
     def content_find_all(
         self, id: Sha1Git, limit: Optional[int] = None
     ) -> Generator[ProvenanceResult, None, None]:
         """Retrieve all the occurrences of the blob identified by `id`."""
         ...
 
     def content_get_early_date(self, blob: FileEntry) -> Optional[datetime]:
         """Retrieve the earliest known date of `blob`."""
         ...
 
     def content_get_early_dates(
         self, blobs: Iterable[FileEntry]
     ) -> Dict[Sha1Git, datetime]:
         """Retrieve the earliest known date for each blob in `blobs`. If some blob has
         no associated date, it is not present in the resulting dictionary.
         """
         ...
 
     def content_set_early_date(self, blob: FileEntry, date: datetime) -> None:
         """Associate `date` to `blob` as it's earliest known date."""
         ...
 
     def directory_add_to_revision(
         self, revision: RevisionEntry, directory: DirectoryEntry, path: bytes
     ) -> None:
         """Associate `directory` with `revision` in the provenance model. `path` is the
         absolute path from `revision`'s root directory to `directory` (including
         `directory`'s name).
         """
         ...
 
-    def directory_already_flattenned(self, directory: DirectoryEntry) -> Optional[bool]:
-        """Check if the directory is already flattenned in the provenance model. If the
+    def directory_already_flattened(self, directory: DirectoryEntry) -> Optional[bool]:
+        """Check if the directory is already flattened in the provenance model. If the
         directory is unknown for the model, the methods returns None.
         """
         ...
 
-    def directory_flag_as_flattenned(self, directory: DirectoryEntry) -> None:
-        """Mark the directory as flattenned in the provenance model. If the
+    def directory_flag_as_flattened(self, directory: DirectoryEntry) -> None:
+        """Mark the directory as flattened in the provenance model. If the
         directory is unknown for the model, this method has no effect.
         """
         ...
 
     def directory_get_date_in_isochrone_frontier(
         self, directory: DirectoryEntry
     ) -> Optional[datetime]:
         """Retrieve the earliest known date of `directory` as an isochrone frontier in
         the provenance model.
         """
         ...
 
     def directory_get_dates_in_isochrone_frontier(
         self, dirs: Iterable[DirectoryEntry]
     ) -> Dict[Sha1Git, datetime]:
         """Retrieve the earliest known date for each directory in `dirs` as isochrone
         frontiers provenance model. If some directory has no associated date, it is not
         present in the resulting dictionary.
         """
         ...
 
     def directory_set_date_in_isochrone_frontier(
         self, directory: DirectoryEntry, date: datetime
     ) -> None:
         """Associate `date` to `directory` as it's earliest known date as an isochrone
         frontier in the provenance model.
         """
         ...
 
     def open(self) -> None:
         """Open connection to the underlying `storage` and allocate necessary
         resources.
         """
         ...
 
     def origin_add(self, origin: OriginEntry) -> None:
         """Add `origin` to the provenance model."""
         ...
 
     def revision_add(self, revision: RevisionEntry) -> None:
         """Add `revision` to the provenance model. This implies storing `revision`'s
         date in the model, thus `revision.date` must be a valid date.
         """
         ...
 
     def revision_add_before_revision(
         self, head_id: Sha1Git, revision_id: Sha1Git
     ) -> None:
         """Associate `revision_id` to `head_id` as an ancestor of the latter."""
         ...
 
     def revision_add_to_origin(
         self, origin: OriginEntry, revision: RevisionEntry
     ) -> None:
         """Associate `revision` to `origin` as a head revision of the latter (ie. the
         target of an snapshot for `origin` in the archive)."""
         ...
 
     def revision_is_head(self, revision: RevisionEntry) -> bool:
         """Check if `revision` is associated as a head revision for some origin."""
         ...
 
     def revision_get_date(self, revision: RevisionEntry) -> Optional[datetime]:
         """Retrieve the date associated to `revision`."""
         ...
 
     def revision_get_preferred_origin(self, revision_id: Sha1Git) -> Optional[Sha1Git]:
         """Retrieve the preferred origin associated to `revision`."""
         ...
 
     def revision_set_preferred_origin(
         self, origin: OriginEntry, revision_id: Sha1Git
     ) -> None:
         """Associate `origin` as the preferred origin for `revision`."""
         ...
diff --git a/swh/provenance/provenance.py b/swh/provenance/provenance.py
index 907183c..c688eeb 100644
--- a/swh/provenance/provenance.py
+++ b/swh/provenance/provenance.py
@@ -1,518 +1,518 @@
 # Copyright (C) 2021-2022  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from datetime import datetime
 import hashlib
 import logging
 import os
 from types import TracebackType
 from typing import Dict, Generator, Iterable, Optional, Set, Tuple, Type
 
 from typing_extensions import Literal, TypedDict
 
 from swh.core.statsd import statsd
 from swh.model.model import Sha1Git
 
 from .interface import ProvenanceInterface
 from .model import DirectoryEntry, FileEntry, OriginEntry, RevisionEntry
 from .storage.interface import (
     DirectoryData,
     ProvenanceResult,
     ProvenanceStorageInterface,
     RelationData,
     RelationType,
     RevisionData,
 )
 from .util import path_normalize
 
 LOGGER = logging.getLogger(__name__)
 
 BACKEND_DURATION_METRIC = "swh_provenance_backend_duration_seconds"
 BACKEND_OPERATIONS_METRIC = "swh_provenance_backend_operations_total"
 
 
 class DatetimeCache(TypedDict):
     data: Dict[Sha1Git, Optional[datetime]]  # None means unknown
     added: Set[Sha1Git]
 
 
 class OriginCache(TypedDict):
     data: Dict[Sha1Git, str]
     added: Set[Sha1Git]
 
 
 class RevisionCache(TypedDict):
     data: Dict[Sha1Git, Sha1Git]
     added: Set[Sha1Git]
 
 
 class ProvenanceCache(TypedDict):
     content: DatetimeCache
     directory: DatetimeCache
     directory_flatten: Dict[Sha1Git, Optional[bool]]  # None means unknown
     revision: DatetimeCache
     # below are insertion caches only
     content_in_revision: Set[Tuple[Sha1Git, Sha1Git, bytes]]
     content_in_directory: Set[Tuple[Sha1Git, Sha1Git, bytes]]
     directory_in_revision: Set[Tuple[Sha1Git, Sha1Git, bytes]]
     # these two are for the origin layer
     origin: OriginCache
     revision_origin: RevisionCache
     revision_before_revision: Dict[Sha1Git, Set[Sha1Git]]
     revision_in_origin: Set[Tuple[Sha1Git, Sha1Git]]
 
 
 def new_cache() -> ProvenanceCache:
     return ProvenanceCache(
         content=DatetimeCache(data={}, added=set()),
         directory=DatetimeCache(data={}, added=set()),
         directory_flatten={},
         revision=DatetimeCache(data={}, added=set()),
         content_in_revision=set(),
         content_in_directory=set(),
         directory_in_revision=set(),
         origin=OriginCache(data={}, added=set()),
         revision_origin=RevisionCache(data={}, added=set()),
         revision_before_revision={},
         revision_in_origin=set(),
     )
 
 
 class Provenance:
     MAX_CACHE_ELEMENTS = 40000
 
     def __init__(self, storage: ProvenanceStorageInterface) -> None:
         self.storage = storage
         self.cache = new_cache()
 
     def __enter__(self) -> ProvenanceInterface:
         self.open()
         return self
 
     def __exit__(
         self,
         exc_type: Optional[Type[BaseException]],
         exc_val: Optional[BaseException],
         exc_tb: Optional[TracebackType],
     ) -> None:
         self.close()
 
     def _flush_limit_reached(self) -> bool:
         return sum(self._get_cache_stats().values()) > self.MAX_CACHE_ELEMENTS
 
     def _get_cache_stats(self) -> Dict[str, int]:
         return {
             k: len(v["data"])
             if (isinstance(v, dict) and v.get("data") is not None)
             else len(v)  # type: ignore
             for (k, v) in self.cache.items()
         }
 
     def clear_caches(self) -> None:
         self.cache = new_cache()
 
     def close(self) -> None:
         self.storage.close()
 
     @statsd.timed(metric=BACKEND_DURATION_METRIC, tags={"method": "flush"})
     def flush(self) -> None:
         self.flush_revision_content_layer()
         self.flush_origin_revision_layer()
         self.clear_caches()
 
     def flush_if_necessary(self) -> bool:
         """Flush if the number of cached information reached a limit."""
         LOGGER.debug("Cache stats: %s", self._get_cache_stats())
         if self._flush_limit_reached():
             self.flush()
             return True
         else:
             return False
 
     @statsd.timed(
         metric=BACKEND_DURATION_METRIC, tags={"method": "flush_origin_revision"}
     )
     def flush_origin_revision_layer(self) -> None:
         # Origins and revisions should be inserted first so that internal ids'
         # resolution works properly.
         urls = {
             sha1: url
             for sha1, url in self.cache["origin"]["data"].items()
             if sha1 in self.cache["origin"]["added"]
         }
         if urls:
             while not self.storage.origin_add(urls):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={"method": "flush_origin_revision_retry_origin"},
                 )
                 LOGGER.warning(
                     "Unable to write origins urls to the storage. Retrying..."
                 )
 
         rev_orgs = {
             # Destinations in this relation should match origins in the next one
             **{
                 src: RevisionData(date=None, origin=None)
                 for src in self.cache["revision_before_revision"]
             },
             **{
                 # This relation comes second so that non-None origins take precedence
                 src: RevisionData(date=None, origin=org)
                 for src, org in self.cache["revision_in_origin"]
             },
         }
         if rev_orgs:
             while not self.storage.revision_add(rev_orgs):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={"method": "flush_origin_revision_retry_revision"},
                 )
                 LOGGER.warning(
                     "Unable to write revision entities to the storage. Retrying..."
                 )
 
         # Second, flat models for revisions' histories (ie. revision-before-revision).
         if self.cache["revision_before_revision"]:
             rev_before_rev = {
                 src: {RelationData(dst=dst, path=None) for dst in dsts}
                 for src, dsts in self.cache["revision_before_revision"].items()
             }
             while not self.storage.relation_add(
                 RelationType.REV_BEFORE_REV, rev_before_rev
             ):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={
                         "method": "flush_origin_revision_retry_revision_before_revision"
                     },
                 )
                 LOGGER.warning(
                     "Unable to write %s rows to the storage. Retrying...",
                     RelationType.REV_BEFORE_REV,
                 )
 
         # Heads (ie. revision-in-origin entries) should be inserted once flat models for
         # their histories were already added. This is to guarantee consistent results if
         # something needs to be reprocessed due to a failure: already inserted heads
         # won't get reprocessed in such a case.
         if self.cache["revision_in_origin"]:
             rev_in_org: Dict[Sha1Git, Set[RelationData]] = {}
             for src, dst in self.cache["revision_in_origin"]:
                 rev_in_org.setdefault(src, set()).add(RelationData(dst=dst, path=None))
             while not self.storage.relation_add(RelationType.REV_IN_ORG, rev_in_org):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={"method": "flush_origin_revision_retry_revision_in_origin"},
                 )
                 LOGGER.warning(
                     "Unable to write %s rows to the storage. Retrying...",
                     RelationType.REV_IN_ORG,
                 )
 
     @statsd.timed(
         metric=BACKEND_DURATION_METRIC, tags={"method": "flush_revision_content"}
     )
     def flush_revision_content_layer(self) -> None:
         # Register in the storage all entities, to ensure the coming relations can
         # properly resolve any internal reference if needed. Content and directory
         # entries may safely be registered with their associated dates. In contrast,
         # revision entries should be registered without date, as it is used to
         # acknowledge that the flushing was successful. Also, directories are
         # registered with their flatten flag not set.
         cnt_dates = {
             sha1: date
             for sha1, date in self.cache["content"]["data"].items()
             if sha1 in self.cache["content"]["added"] and date is not None
         }
         if cnt_dates:
             while not self.storage.content_add(cnt_dates):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={"method": "flush_revision_content_retry_content_date"},
                 )
                 LOGGER.warning(
                     "Unable to write content dates to the storage. Retrying..."
                 )
 
         dir_dates = {
             sha1: DirectoryData(date=date, flat=False)
             for sha1, date in self.cache["directory"]["data"].items()
             if sha1 in self.cache["directory"]["added"] and date is not None
         }
         if dir_dates:
             while not self.storage.directory_add(dir_dates):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={"method": "flush_revision_content_retry_directory_date"},
                 )
                 LOGGER.warning(
                     "Unable to write directory dates to the storage. Retrying..."
                 )
 
         revs = {
             sha1: RevisionData(date=None, origin=None)
             for sha1, date in self.cache["revision"]["data"].items()
             if sha1 in self.cache["revision"]["added"] and date is not None
         }
         if revs:
             while not self.storage.revision_add(revs):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={"method": "flush_revision_content_retry_revision_none"},
                 )
                 LOGGER.warning(
                     "Unable to write revision entities to the storage. Retrying..."
                 )
 
         paths = {
             hashlib.sha1(path).digest(): path
             for _, _, path in self.cache["content_in_revision"]
             | self.cache["content_in_directory"]
             | self.cache["directory_in_revision"]
         }
         if paths:
             while not self.storage.location_add(paths):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={"method": "flush_revision_content_retry_location"},
                 )
                 LOGGER.warning(
                     "Unable to write locations entities to the storage. Retrying..."
                 )
 
         # For this layer, relations need to be inserted first so that, in case of
         # failure, reprocessing the input does not generated an inconsistent database.
         if self.cache["content_in_revision"]:
             cnt_in_rev: Dict[Sha1Git, Set[RelationData]] = {}
             for src, dst, path in self.cache["content_in_revision"]:
                 cnt_in_rev.setdefault(src, set()).add(RelationData(dst=dst, path=path))
             while not self.storage.relation_add(
                 RelationType.CNT_EARLY_IN_REV, cnt_in_rev
             ):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={"method": "flush_revision_content_retry_content_in_revision"},
                 )
                 LOGGER.warning(
                     "Unable to write %s rows to the storage. Retrying...",
                     RelationType.CNT_EARLY_IN_REV,
                 )
 
         if self.cache["content_in_directory"]:
             cnt_in_dir: Dict[Sha1Git, Set[RelationData]] = {}
             for src, dst, path in self.cache["content_in_directory"]:
                 cnt_in_dir.setdefault(src, set()).add(RelationData(dst=dst, path=path))
             while not self.storage.relation_add(RelationType.CNT_IN_DIR, cnt_in_dir):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={
                         "method": "flush_revision_content_retry_content_in_directory"
                     },
                 )
                 LOGGER.warning(
                     "Unable to write %s rows to the storage. Retrying...",
                     RelationType.CNT_IN_DIR,
                 )
 
         if self.cache["directory_in_revision"]:
             dir_in_rev: Dict[Sha1Git, Set[RelationData]] = {}
             for src, dst, path in self.cache["directory_in_revision"]:
                 dir_in_rev.setdefault(src, set()).add(RelationData(dst=dst, path=path))
             while not self.storage.relation_add(RelationType.DIR_IN_REV, dir_in_rev):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={
                         "method": "flush_revision_content_retry_directory_in_revision"
                     },
                 )
                 LOGGER.warning(
                     "Unable to write %s rows to the storage. Retrying...",
                     RelationType.DIR_IN_REV,
                 )
 
         # After relations, flatten flags for directories can be safely set (if
         # applicable) acknowledging those directories that have already be flattened.
         # Similarly, dates for the revisions are set to acknowledge that these revisions
         # won't need to be reprocessed in case of failure.
         dir_acks = {
             sha1: DirectoryData(
                 date=date, flat=self.cache["directory_flatten"].get(sha1) or False
             )
             for sha1, date in self.cache["directory"]["data"].items()
             if self.cache["directory_flatten"].get(sha1) and date is not None
         }
         if dir_acks:
             while not self.storage.directory_add(dir_acks):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={"method": "flush_revision_content_retry_directory_ack"},
                 )
                 LOGGER.warning(
                     "Unable to write directory dates to the storage. Retrying..."
                 )
 
         rev_dates = {
             sha1: RevisionData(date=date, origin=None)
             for sha1, date in self.cache["revision"]["data"].items()
             if sha1 in self.cache["revision"]["added"] and date is not None
         }
         if rev_dates:
             while not self.storage.revision_add(rev_dates):
                 statsd.increment(
                     metric=BACKEND_OPERATIONS_METRIC,
                     tags={"method": "flush_revision_content_retry_revision_date"},
                 )
                 LOGGER.warning(
                     "Unable to write revision dates to the storage. Retrying..."
                 )
 
     def content_add_to_directory(
         self, directory: DirectoryEntry, blob: FileEntry, prefix: bytes
     ) -> None:
         self.cache["content_in_directory"].add(
             (blob.id, directory.id, path_normalize(os.path.join(prefix, blob.name)))
         )
 
     def content_add_to_revision(
         self, revision: RevisionEntry, blob: FileEntry, prefix: bytes
     ) -> None:
         self.cache["content_in_revision"].add(
             (blob.id, revision.id, path_normalize(os.path.join(prefix, blob.name)))
         )
 
     def content_find_first(self, id: Sha1Git) -> Optional[ProvenanceResult]:
         return self.storage.content_find_first(id)
 
     def content_find_all(
         self, id: Sha1Git, limit: Optional[int] = None
     ) -> Generator[ProvenanceResult, None, None]:
         yield from self.storage.content_find_all(id, limit=limit)
 
     def content_get_early_date(self, blob: FileEntry) -> Optional[datetime]:
         return self.get_dates("content", [blob.id]).get(blob.id)
 
     def content_get_early_dates(
         self, blobs: Iterable[FileEntry]
     ) -> Dict[Sha1Git, datetime]:
         return self.get_dates("content", [blob.id for blob in blobs])
 
     def content_set_early_date(self, blob: FileEntry, date: datetime) -> None:
         self.cache["content"]["data"][blob.id] = date
         self.cache["content"]["added"].add(blob.id)
 
     def directory_add_to_revision(
         self, revision: RevisionEntry, directory: DirectoryEntry, path: bytes
     ) -> None:
         self.cache["directory_in_revision"].add(
             (directory.id, revision.id, path_normalize(path))
         )
 
-    def directory_already_flattenned(self, directory: DirectoryEntry) -> Optional[bool]:
+    def directory_already_flattened(self, directory: DirectoryEntry) -> Optional[bool]:
         cache = self.cache["directory_flatten"]
         if directory.id not in cache:
             cache.setdefault(directory.id, None)
             ret = self.storage.directory_get([directory.id])
             if directory.id in ret:
                 dir = ret[directory.id]
                 cache[directory.id] = dir.flat
                 # date is kept to ensure we have it available when flushing
                 self.cache["directory"]["data"][directory.id] = dir.date
         return cache.get(directory.id)
 
-    def directory_flag_as_flattenned(self, directory: DirectoryEntry) -> None:
+    def directory_flag_as_flattened(self, directory: DirectoryEntry) -> None:
         self.cache["directory_flatten"][directory.id] = True
 
     def directory_get_date_in_isochrone_frontier(
         self, directory: DirectoryEntry
     ) -> Optional[datetime]:
         return self.get_dates("directory", [directory.id]).get(directory.id)
 
     def directory_get_dates_in_isochrone_frontier(
         self, dirs: Iterable[DirectoryEntry]
     ) -> Dict[Sha1Git, datetime]:
         return self.get_dates("directory", [directory.id for directory in dirs])
 
     def directory_set_date_in_isochrone_frontier(
         self, directory: DirectoryEntry, date: datetime
     ) -> None:
         self.cache["directory"]["data"][directory.id] = date
         self.cache["directory"]["added"].add(directory.id)
 
     def get_dates(
         self,
         entity: Literal["content", "directory", "revision"],
         ids: Iterable[Sha1Git],
     ) -> Dict[Sha1Git, datetime]:
         cache = self.cache[entity]
         missing_ids = set(id for id in ids if id not in cache)
         if missing_ids:
             if entity == "content":
                 cache["data"].update(self.storage.content_get(missing_ids))
             elif entity == "directory":
                 cache["data"].update(
                     {
                         id: dir.date
                         for id, dir in self.storage.directory_get(missing_ids).items()
                     }
                 )
             elif entity == "revision":
                 cache["data"].update(
                     {
                         id: rev.date
                         for id, rev in self.storage.revision_get(missing_ids).items()
                     }
                 )
         dates: Dict[Sha1Git, datetime] = {}
         for sha1 in ids:
             date = cache["data"].setdefault(sha1, None)
             if date is not None:
                 dates[sha1] = date
         return dates
 
     def open(self) -> None:
         self.storage.open()
 
     def origin_add(self, origin: OriginEntry) -> None:
         self.cache["origin"]["data"][origin.id] = origin.url
         self.cache["origin"]["added"].add(origin.id)
 
     def revision_add(self, revision: RevisionEntry) -> None:
         self.cache["revision"]["data"][revision.id] = revision.date
         self.cache["revision"]["added"].add(revision.id)
 
     def revision_add_before_revision(
         self, head_id: Sha1Git, revision_id: Sha1Git
     ) -> None:
         self.cache["revision_before_revision"].setdefault(revision_id, set()).add(
             head_id
         )
 
     def revision_add_to_origin(
         self, origin: OriginEntry, revision: RevisionEntry
     ) -> None:
         self.cache["revision_in_origin"].add((revision.id, origin.id))
 
     def revision_is_head(self, revision: RevisionEntry) -> bool:
         return bool(self.storage.relation_get(RelationType.REV_IN_ORG, [revision.id]))
 
     def revision_get_date(self, revision: RevisionEntry) -> Optional[datetime]:
         return self.get_dates("revision", [revision.id]).get(revision.id)
 
     def revision_get_preferred_origin(self, revision_id: Sha1Git) -> Optional[Sha1Git]:
         cache = self.cache["revision_origin"]["data"]
         if revision_id not in cache:
             ret = self.storage.revision_get([revision_id])
             if revision_id in ret:
                 origin = ret[revision_id].origin
                 if origin is not None:
                     cache[revision_id] = origin
         return cache.get(revision_id)
 
     def revision_set_preferred_origin(
         self, origin: OriginEntry, revision_id: Sha1Git
     ) -> None:
         self.cache["revision_origin"]["data"][revision_id] = origin.id
         self.cache["revision_origin"]["added"].add(revision_id)
diff --git a/swh/provenance/storage/interface.py b/swh/provenance/storage/interface.py
index c0ba7d1..f238adb 100644
--- a/swh/provenance/storage/interface.py
+++ b/swh/provenance/storage/interface.py
@@ -1,229 +1,229 @@
 # Copyright (C) 2021-2022  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from __future__ import annotations
 
 from dataclasses import dataclass
 from datetime import datetime
 import enum
 from types import TracebackType
 from typing import Dict, Generator, Iterable, List, Optional, Set, Type
 
 from typing_extensions import Protocol, runtime_checkable
 
 from swh.core.api import remote_api_endpoint
 from swh.model.model import Sha1Git
 
 
 class EntityType(enum.Enum):
     CONTENT = "content"
     DIRECTORY = "directory"
     REVISION = "revision"
     ORIGIN = "origin"
 
 
 class RelationType(enum.Enum):
     CNT_EARLY_IN_REV = "content_in_revision"
     CNT_IN_DIR = "content_in_directory"
     DIR_IN_REV = "directory_in_revision"
     REV_IN_ORG = "revision_in_origin"
     REV_BEFORE_REV = "revision_before_revision"
 
 
 @dataclass(eq=True, frozen=True)
 class ProvenanceResult:
     content: Sha1Git
     revision: Sha1Git
     date: datetime
     origin: Optional[str]
     path: bytes
 
 
 @dataclass(eq=True, frozen=True)
 class DirectoryData:
     """Object representing the data associated to a directory in the provenance model,
     where `date` is the date of the directory in the isochrone frontier, and `flat` is a
     flag acknowledging that a flat model for the elements outside the frontier has
     already been created.
     """
 
     date: datetime
     flat: bool
 
 
 @dataclass(eq=True, frozen=True)
 class RevisionData:
     """Object representing the data associated to a revision in the provenance model,
     where `date` is the optional date of the revision (specifying it acknowledges that
     the revision was already processed by the revision-content algorithm); and `origin`
     identifies the preferred origin for the revision, if any.
     """
 
     date: Optional[datetime]
     origin: Optional[Sha1Git]
 
 
 @dataclass(eq=True, frozen=True)
 class RelationData:
     """Object representing a relation entry in the provenance model, where `src` and
     `dst` are the sha1 ids of the entities being related, and `path` is optional
     depending on the relation being represented.
     """
 
     dst: Sha1Git
     path: Optional[bytes]
 
 
 @runtime_checkable
 class ProvenanceStorageInterface(Protocol):
     def __enter__(self) -> ProvenanceStorageInterface:
         ...
 
     def __exit__(
         self,
         exc_type: Optional[Type[BaseException]],
         exc_val: Optional[BaseException],
         exc_tb: Optional[TracebackType],
     ) -> None:
         ...
 
     @remote_api_endpoint("close")
     def close(self) -> None:
         """Close connection to the storage and release resources."""
         ...
 
     @remote_api_endpoint("content_add")
     def content_add(self, cnts: Dict[Sha1Git, datetime]) -> bool:
         """Add blobs identified by sha1 ids, with an associated date (as paired in
         `cnts`) to the provenance storage. Return a boolean stating whether the
         information was successfully stored.
         """
         ...
 
     @remote_api_endpoint("content_find_first")
     def content_find_first(self, id: Sha1Git) -> Optional[ProvenanceResult]:
         """Retrieve the first occurrence of the blob identified by `id`."""
         ...
 
     @remote_api_endpoint("content_find_all")
     def content_find_all(
         self, id: Sha1Git, limit: Optional[int] = None
     ) -> Generator[ProvenanceResult, None, None]:
         """Retrieve all the occurrences of the blob identified by `id`."""
         ...
 
     @remote_api_endpoint("content_get")
     def content_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, datetime]:
         """Retrieve the associated date for each blob sha1 in `ids`."""
         ...
 
     @remote_api_endpoint("directory_add")
     def directory_add(self, dirs: Dict[Sha1Git, DirectoryData]) -> bool:
         """Add directories identified by sha1 ids, with associated date and (optional)
         flatten flag (as paired in `dirs`) to the provenance storage. If the flatten
         flag is set to None, the previous value present in the storage is preserved.
         Return a boolean stating if the information was successfully stored.
         """
         ...
 
     @remote_api_endpoint("directory_get")
     def directory_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, DirectoryData]:
         """Retrieve the associated date and (optional) flatten flag for each directory
         sha1 in `ids`. If some directories has no associated date, it is not present in
         the resulting dictionary.
         """
         ...
 
-    @remote_api_endpoint("directory_iter_not_flattenned")
-    def directory_iter_not_flattenned(
+    @remote_api_endpoint("directory_iter_not_flattened")
+    def directory_iter_not_flattened(
         self, limit: int, start_id: Sha1Git
     ) -> List[Sha1Git]:
-        """Retrieve the unflattenned directories after ``start_id`` up to ``limit`` entries."""
+        """Retrieve the unflattened directories after ``start_id`` up to ``limit`` entries."""
         ...
 
     @remote_api_endpoint("entity_get_all")
     def entity_get_all(self, entity: EntityType) -> Set[Sha1Git]:
         """Retrieve all sha1 ids for entities of type `entity` present in the provenance
         model. This method is used only in tests.
         """
         ...
 
     @remote_api_endpoint("location_add")
     def location_add(self, paths: Dict[Sha1Git, bytes]) -> bool:
         """Register the given `paths` in the storage."""
         ...
 
     @remote_api_endpoint("location_get_all")
     def location_get_all(self) -> Dict[Sha1Git, bytes]:
         """Retrieve all paths present in the provenance model.
         This method is used only in tests."""
         ...
 
     @remote_api_endpoint("open")
     def open(self) -> None:
         """Open connection to the storage and allocate necessary resources."""
         ...
 
     @remote_api_endpoint("origin_add")
     def origin_add(self, orgs: Dict[Sha1Git, str]) -> bool:
         """Add origins identified by sha1 ids, with their corresponding url (as paired
         in `orgs`) to the provenance storage. Return a boolean stating if the
         information was successfully stored.
         """
         ...
 
     @remote_api_endpoint("origin_get")
     def origin_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, str]:
         """Retrieve the associated url for each origin sha1 in `ids`."""
         ...
 
     @remote_api_endpoint("revision_add")
     def revision_add(self, revs: Dict[Sha1Git, RevisionData]) -> bool:
         """Add revisions identified by sha1 ids, with optional associated date or origin
         (as paired in `revs`) to the provenance storage. Return a boolean stating if the
         information was successfully stored.
         """
         ...
 
     @remote_api_endpoint("revision_get")
     def revision_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, RevisionData]:
         """Retrieve the associated date and origin for each revision sha1 in `ids`. If
         some revision has no associated date nor origin, it is not present in the
         resulting dictionary.
         """
         ...
 
     @remote_api_endpoint("relation_add")
     def relation_add(
         self, relation: RelationType, data: Dict[Sha1Git, Set[RelationData]]
     ) -> bool:
         """Add entries in the selected `relation`. This method assumes all entities
         being related are already registered in the storage. See `content_add`,
         `directory_add`, `origin_add`, and `revision_add`.
         """
         ...
 
     @remote_api_endpoint("relation_get")
     def relation_get(
         self, relation: RelationType, ids: Iterable[Sha1Git], reverse: bool = False
     ) -> Dict[Sha1Git, Set[RelationData]]:
         """Retrieve all entries in the selected `relation` whose source entities are
         identified by some sha1 id in `ids`. If `reverse` is set, destination entities
         are matched instead.
         """
         ...
 
     @remote_api_endpoint("relation_get_all")
     def relation_get_all(
         self, relation: RelationType
     ) -> Dict[Sha1Git, Set[RelationData]]:
         """Retrieve all entries in the selected `relation` that are present in the
         provenance model. This method is used only in tests.
         """
         ...
 
     @remote_api_endpoint("with_path")
     def with_path(self) -> bool:
         ...
diff --git a/swh/provenance/storage/postgresql.py b/swh/provenance/storage/postgresql.py
index f65f092..bb24a87 100644
--- a/swh/provenance/storage/postgresql.py
+++ b/swh/provenance/storage/postgresql.py
@@ -1,398 +1,398 @@
 # Copyright (C) 2021  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from __future__ import annotations
 
 from contextlib import contextmanager
 from datetime import datetime
 from functools import wraps
 from hashlib import sha1
 import itertools
 import logging
 from types import TracebackType
 from typing import Dict, Generator, Iterable, List, Optional, Set, Type
 
 import psycopg2.extensions
 import psycopg2.extras
 
 from swh.core.db import BaseDb
 from swh.core.statsd import statsd
 from swh.model.model import Sha1Git
 from swh.provenance.storage.interface import (
     DirectoryData,
     EntityType,
     ProvenanceResult,
     ProvenanceStorageInterface,
     RelationData,
     RelationType,
     RevisionData,
 )
 
 LOGGER = logging.getLogger(__name__)
 
 STORAGE_DURATION_METRIC = "swh_provenance_storage_postgresql_duration_seconds"
 
 
 def handle_raise_on_commit(f):
     @wraps(f)
     def handle(self, *args, **kwargs):
         try:
             return f(self, *args, **kwargs)
         except BaseException as ex:
             # Unexpected error occurred, rollback all changes and log message
             LOGGER.exception("Unexpected error")
             if self.raise_on_commit:
                 raise ex
             return False
 
     return handle
 
 
 class ProvenanceStoragePostgreSql:
     current_version = 3
 
     def __init__(
         self, page_size: Optional[int] = None, raise_on_commit: bool = False, **kwargs
     ) -> None:
         self.conn: Optional[psycopg2.extensions.connection] = None
         self.conn_args = kwargs
         self._flavor: Optional[str] = None
         self.page_size = page_size
         self.raise_on_commit = raise_on_commit
 
     def __enter__(self) -> ProvenanceStorageInterface:
         self.open()
         return self
 
     def __exit__(
         self,
         exc_type: Optional[Type[BaseException]],
         exc_val: Optional[BaseException],
         exc_tb: Optional[TracebackType],
     ) -> None:
         self.close()
 
     @contextmanager
     def transaction(
         self, readonly: bool = False
     ) -> Generator[psycopg2.extras.RealDictCursor, None, None]:
         if self.conn is None:
             raise RuntimeError(
                 "Tried to access ProvenanceStoragePostgreSQL transaction() without opening it"
             )
         self.conn.set_session(readonly=readonly)
         with self.conn:
             with self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur:
                 yield cur
 
     @property
     def flavor(self) -> str:
         if self._flavor is None:
             with self.transaction(readonly=True) as cursor:
                 cursor.execute("SELECT swh_get_dbflavor() AS flavor")
                 flavor = cursor.fetchone()
                 assert flavor  # please mypy
                 self._flavor = flavor["flavor"]
         assert self._flavor is not None
         return self._flavor
 
     @property
     def denormalized(self) -> bool:
         return "denormalized" in self.flavor
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "close"})
     def close(self) -> None:
         assert self.conn is not None
         self.conn.close()
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_add"})
     @handle_raise_on_commit
     def content_add(self, cnts: Dict[Sha1Git, datetime]) -> bool:
         if cnts:
             sql = """
                 INSERT INTO content(sha1, date) VALUES %s
                   ON CONFLICT (sha1) DO
                   UPDATE SET date=LEAST(EXCLUDED.date,content.date)
                 """
             page_size = self.page_size or len(cnts)
             with self.transaction() as cursor:
                 psycopg2.extras.execute_values(
                     cursor, sql, argslist=cnts.items(), page_size=page_size
                 )
         return True
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_find_first"})
     def content_find_first(self, id: Sha1Git) -> Optional[ProvenanceResult]:
         sql = "SELECT * FROM swh_provenance_content_find_first(%s)"
         with self.transaction(readonly=True) as cursor:
             cursor.execute(query=sql, vars=(id,))
             row = cursor.fetchone()
         return ProvenanceResult(**row) if row is not None else None
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_find_all"})
     def content_find_all(
         self, id: Sha1Git, limit: Optional[int] = None
     ) -> Generator[ProvenanceResult, None, None]:
         sql = "SELECT * FROM swh_provenance_content_find_all(%s, %s)"
         with self.transaction(readonly=True) as cursor:
             cursor.execute(query=sql, vars=(id, limit))
             yield from (ProvenanceResult(**row) for row in cursor)
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_get"})
     def content_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, datetime]:
         dates: Dict[Sha1Git, datetime] = {}
         sha1s = tuple(ids)
         if sha1s:
             # TODO: consider splitting this query in several ones if sha1s is too big!
             values = ", ".join(itertools.repeat("%s", len(sha1s)))
             sql = f"""
                 SELECT sha1, date
                   FROM content
                   WHERE sha1 IN ({values})
                     AND date IS NOT NULL
                 """
             with self.transaction(readonly=True) as cursor:
                 cursor.execute(query=sql, vars=sha1s)
                 dates.update((row["sha1"], row["date"]) for row in cursor)
         return dates
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "directory_add"})
     @handle_raise_on_commit
     def directory_add(self, dirs: Dict[Sha1Git, DirectoryData]) -> bool:
         data = [(sha1, rev.date, rev.flat) for sha1, rev in dirs.items()]
         if data:
             sql = """
                 INSERT INTO directory(sha1, date, flat) VALUES %s
                   ON CONFLICT (sha1) DO
                   UPDATE SET
                     date=LEAST(EXCLUDED.date, directory.date),
                     flat=(EXCLUDED.flat OR directory.flat)
                 """
             page_size = self.page_size or len(data)
             with self.transaction() as cursor:
                 psycopg2.extras.execute_values(
                     cur=cursor, sql=sql, argslist=data, page_size=page_size
                 )
         return True
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "directory_get"})
     def directory_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, DirectoryData]:
         result: Dict[Sha1Git, DirectoryData] = {}
         sha1s = tuple(ids)
         if sha1s:
             # TODO: consider splitting this query in several ones if sha1s is too big!
             values = ", ".join(itertools.repeat("%s", len(sha1s)))
             sql = f"""
                 SELECT sha1, date, flat
                   FROM directory
                   WHERE sha1 IN ({values})
                     AND date IS NOT NULL
                 """
             with self.transaction(readonly=True) as cursor:
                 cursor.execute(query=sql, vars=sha1s)
                 result.update(
                     (row["sha1"], DirectoryData(date=row["date"], flat=row["flat"]))
                     for row in cursor
                 )
         return result
 
     @statsd.timed(
-        metric=STORAGE_DURATION_METRIC, tags={"method": "directory_iter_not_flattenned"}
+        metric=STORAGE_DURATION_METRIC, tags={"method": "directory_iter_not_flattened"}
     )
-    def directory_iter_not_flattenned(
+    def directory_iter_not_flattened(
         self, limit: int, start_id: Sha1Git
     ) -> List[Sha1Git]:
         sql = """
         SELECT sha1 FROM directory
         WHERE flat=false AND sha1>%s ORDER BY sha1 LIMIT %s
         """
         with self.transaction(readonly=True) as cursor:
             cursor.execute(query=sql, vars=(start_id, limit))
             return [row["sha1"] for row in cursor]
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "entity_get_all"})
     def entity_get_all(self, entity: EntityType) -> Set[Sha1Git]:
         with self.transaction(readonly=True) as cursor:
             cursor.execute(f"SELECT sha1 FROM {entity.value}")
             return {row["sha1"] for row in cursor}
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "location_add"})
     @handle_raise_on_commit
     def location_add(self, paths: Dict[Sha1Git, bytes]) -> bool:
         if self.with_path():
             values = [(path,) for path in paths.values()]
             if values:
                 sql = """
                     INSERT INTO location(path) VALUES %s
                       ON CONFLICT DO NOTHING
                     """
                 page_size = self.page_size or len(values)
                 with self.transaction() as cursor:
                     psycopg2.extras.execute_values(
                         cursor, sql, argslist=values, page_size=page_size
                     )
         return True
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "location_get_all"})
     def location_get_all(self) -> Dict[Sha1Git, bytes]:
         with self.transaction(readonly=True) as cursor:
             cursor.execute("SELECT location.path AS path FROM location")
             return {sha1(row["path"]).digest(): row["path"] for row in cursor}
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "origin_add"})
     @handle_raise_on_commit
     def origin_add(self, orgs: Dict[Sha1Git, str]) -> bool:
         if orgs:
             sql = """
                 INSERT INTO origin(sha1, url) VALUES %s
                   ON CONFLICT DO NOTHING
                 """
             page_size = self.page_size or len(orgs)
             with self.transaction() as cursor:
                 psycopg2.extras.execute_values(
                     cur=cursor,
                     sql=sql,
                     argslist=orgs.items(),
                     page_size=page_size,
                 )
         return True
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "open"})
     def open(self) -> None:
         self.conn = BaseDb.connect(**self.conn_args).conn
         BaseDb.adapt_conn(self.conn)
         with self.transaction() as cursor:
             cursor.execute("SET timezone TO 'UTC'")
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "origin_get"})
     def origin_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, str]:
         urls: Dict[Sha1Git, str] = {}
         sha1s = tuple(ids)
         if sha1s:
             # TODO: consider splitting this query in several ones if sha1s is too big!
             values = ", ".join(itertools.repeat("%s", len(sha1s)))
             sql = f"""
                 SELECT sha1, url
                   FROM origin
                   WHERE sha1 IN ({values})
                 """
             with self.transaction(readonly=True) as cursor:
                 cursor.execute(query=sql, vars=sha1s)
                 urls.update((row["sha1"], row["url"]) for row in cursor)
         return urls
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "revision_add"})
     @handle_raise_on_commit
     def revision_add(self, revs: Dict[Sha1Git, RevisionData]) -> bool:
         if revs:
             data = [(sha1, rev.date, rev.origin) for sha1, rev in revs.items()]
             sql = """
                 INSERT INTO revision(sha1, date, origin)
                   (SELECT V.rev AS sha1, V.date::timestamptz AS date, O.id AS origin
                    FROM (VALUES %s) AS V(rev, date, org)
                    LEFT JOIN origin AS O ON (O.sha1=V.org::sha1_git))
                   ON CONFLICT (sha1) DO
                   UPDATE SET
                     date=LEAST(EXCLUDED.date, revision.date),
                     origin=COALESCE(EXCLUDED.origin, revision.origin)
                 """
             page_size = self.page_size or len(data)
             with self.transaction() as cursor:
                 psycopg2.extras.execute_values(
                     cur=cursor, sql=sql, argslist=data, page_size=page_size
                 )
         return True
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "revision_get"})
     def revision_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, RevisionData]:
         result: Dict[Sha1Git, RevisionData] = {}
         sha1s = tuple(ids)
         if sha1s:
             # TODO: consider splitting this query in several ones if sha1s is too big!
             values = ", ".join(itertools.repeat("%s", len(sha1s)))
             sql = f"""
                 SELECT R.sha1, R.date, O.sha1 AS origin
                   FROM revision AS R
                   LEFT JOIN origin AS O ON (O.id=R.origin)
                   WHERE R.sha1 IN ({values})
                     AND (R.date is not NULL OR O.sha1 is not NULL)
                 """
             with self.transaction(readonly=True) as cursor:
                 cursor.execute(query=sql, vars=sha1s)
                 result.update(
                     (row["sha1"], RevisionData(date=row["date"], origin=row["origin"]))
                     for row in cursor
                 )
         return result
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "relation_add"})
     @handle_raise_on_commit
     def relation_add(
         self, relation: RelationType, data: Dict[Sha1Git, Set[RelationData]]
     ) -> bool:
         rows = [(src, rel.dst, rel.path) for src, dsts in data.items() for rel in dsts]
         if rows:
             rel_table = relation.value
             src_table, *_, dst_table = rel_table.split("_")
             page_size = self.page_size or len(rows)
             # Put the next three queries in a manual single transaction:
             # they use the same temp table
             with self.transaction() as cursor:
                 cursor.execute("SELECT swh_mktemp_relation_add()")
                 psycopg2.extras.execute_values(
                     cur=cursor,
                     sql="INSERT INTO tmp_relation_add(src, dst, path) VALUES %s",
                     argslist=rows,
                     page_size=page_size,
                 )
                 sql = "SELECT swh_provenance_relation_add_from_temp(%s, %s, %s)"
                 cursor.execute(query=sql, vars=(rel_table, src_table, dst_table))
         return True
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "relation_get"})
     def relation_get(
         self, relation: RelationType, ids: Iterable[Sha1Git], reverse: bool = False
     ) -> Dict[Sha1Git, Set[RelationData]]:
         return self._relation_get(relation, ids, reverse)
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "relation_get_all"})
     def relation_get_all(
         self, relation: RelationType
     ) -> Dict[Sha1Git, Set[RelationData]]:
         return self._relation_get(relation, None)
 
     def _relation_get(
         self,
         relation: RelationType,
         ids: Optional[Iterable[Sha1Git]],
         reverse: bool = False,
     ) -> Dict[Sha1Git, Set[RelationData]]:
         result: Dict[Sha1Git, Set[RelationData]] = {}
 
         sha1s: List[Sha1Git]
         if ids is not None:
             sha1s = list(ids)
             filter = "filter-src" if not reverse else "filter-dst"
         else:
             sha1s = []
             filter = "no-filter"
 
         if filter == "no-filter" or sha1s:
             rel_table = relation.value
             src_table, *_, dst_table = rel_table.split("_")
 
             sql = "SELECT * FROM swh_provenance_relation_get(%s, %s, %s, %s, %s)"
             with self.transaction(readonly=True) as cursor:
                 cursor.execute(
                     query=sql, vars=(rel_table, src_table, dst_table, filter, sha1s)
                 )
                 for row in cursor:
                     src = row.pop("src")
                     result.setdefault(src, set()).add(RelationData(**row))
         return result
 
     @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "with_path"})
     def with_path(self) -> bool:
         return "with-path" in self.flavor
diff --git a/swh/provenance/tests/test_directory_flatten.py b/swh/provenance/tests/test_directory_flatten.py
index 4a62b86..84b10fd 100644
--- a/swh/provenance/tests/test_directory_flatten.py
+++ b/swh/provenance/tests/test_directory_flatten.py
@@ -1,101 +1,101 @@
 # Copyright (C) 2021-2022  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 
 from datetime import datetime, timezone
 from typing import Tuple
 
 from swh.model.hashutil import hash_to_bytes
 from swh.provenance.algos.directory import directory_add, directory_flatten_range
 from swh.provenance.archive import ArchiveInterface
 from swh.provenance.interface import ProvenanceInterface
 from swh.provenance.model import DirectoryEntry, FileEntry
 from swh.provenance.storage.interface import DirectoryData, RelationData, RelationType
 from swh.provenance.tests.conftest import fill_storage, load_repo_data
 
 
 def prepare(
     provenance: ProvenanceInterface, archive: ArchiveInterface
 ) -> Tuple[datetime, DirectoryEntry, FileEntry, FileEntry]:
     """Prepare the provenance database with some content suitable for flattening tests"""
     # read data/README.md for more details on how these datasets are generated
     data = load_repo_data("cmdbts2")
     fill_storage(archive.storage, data)
 
     # just take a directory that is known to exists in cmdbts2
     directory = DirectoryEntry(
         id=hash_to_bytes("48007c961cc734d1f63886d0413a6dc605e3e2ea")
     )
     content1 = FileEntry(
         id=hash_to_bytes("20329687bb9c1231a7e05afe86160343ad49b494"), name=b"a"
     )
     content2 = FileEntry(
         id=hash_to_bytes("50e9cdb03f9719261dd39d7f2920b906db3711a3"), name=b"b"
     )
     date = datetime.fromtimestamp(1000000010, timezone.utc)
 
     # directory_add and the internal directory_flatten require the directory and its
     # content to be known by the provenance object. Otherwise, they do nothing
     provenance.directory_set_date_in_isochrone_frontier(directory, date)
     provenance.content_set_early_date(content1, date)
     provenance.content_set_early_date(content2, date)
     provenance.flush()
     assert provenance.storage.directory_get([directory.id]) == {
         directory.id: DirectoryData(date=date, flat=False)
     }
     assert provenance.storage.content_get([content1.id, content2.id]) == {
         content1.id: date,
         content2.id: date,
     }
 
     # this query forces the directory date to be retrieved from the storage and cached
     # (otherwise, the flush below won't update the directory flatten flag)
-    flattenned = provenance.directory_already_flattenned(directory)
-    assert flattenned is not None and not flattenned
+    flattened = provenance.directory_already_flattened(directory)
+    assert flattened is not None and not flattened
 
     return date, directory, content1, content2
 
 
 def test_directory_add(
     provenance: ProvenanceInterface,
     archive: ArchiveInterface,
 ) -> None:
 
     date, directory, content1, content2 = prepare(provenance, archive)
 
     # flatten the directory and check the expected result
     directory_add(provenance, archive, [directory])
     assert provenance.storage.directory_get([directory.id]) == {
         directory.id: DirectoryData(date=date, flat=True)
     }
     assert provenance.storage.relation_get_all(RelationType.CNT_IN_DIR) == {
         content1.id: {
             RelationData(dst=directory.id, path=b"a"),
             RelationData(dst=directory.id, path=b"C/a"),
         },
         content2.id: {RelationData(dst=directory.id, path=b"C/b")},
     }
 
 
 def test_directory_flatten_range(
     provenance: ProvenanceInterface,
     archive: ArchiveInterface,
 ) -> None:
 
     date, directory, content1, content2 = prepare(provenance, archive)
 
     # flatten the directory and check the expected result
     directory_flatten_range(provenance, archive, directory.id[:-1], directory.id)
 
     assert provenance.storage.directory_get([directory.id]) == {
         directory.id: DirectoryData(date=date, flat=True)
     }
     assert provenance.storage.relation_get_all(RelationType.CNT_IN_DIR) == {
         content1.id: {
             RelationData(dst=directory.id, path=b"a"),
             RelationData(dst=directory.id, path=b"C/a"),
         },
         content2.id: {RelationData(dst=directory.id, path=b"C/b")},
     }
diff --git a/swh/provenance/tests/test_revision_content_layer.py b/swh/provenance/tests/test_revision_content_layer.py
index a3592d0..d566a91 100644
--- a/swh/provenance/tests/test_revision_content_layer.py
+++ b/swh/provenance/tests/test_revision_content_layer.py
@@ -1,482 +1,482 @@
 # Copyright (C) 2021-2022  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import re
 from typing import Any, Dict, Iterable, Iterator, List, Optional, Set, Tuple
 
 import pytest
 from typing_extensions import TypedDict
 
 from swh.model.hashutil import hash_to_bytes
 from swh.model.model import Sha1Git
 from swh.provenance.algos.directory import directory_add
 from swh.provenance.algos.revision import revision_add
 from swh.provenance.archive import ArchiveInterface
 from swh.provenance.interface import ProvenanceInterface
 from swh.provenance.model import DirectoryEntry, RevisionEntry
 from swh.provenance.storage.interface import EntityType, RelationType
 from swh.provenance.tests.conftest import (
     fill_storage,
     get_datafile,
     load_repo_data,
     ts2dt,
 )
 
 
 class SynthRelation(TypedDict):
     prefix: Optional[str]
     path: str
     src: Sha1Git
     dst: Sha1Git
     rel_ts: float
 
 
 class SynthRevision(TypedDict):
     sha1: Sha1Git
     date: float
     msg: str
     R_C: List[SynthRelation]
     R_D: List[SynthRelation]
     D_C: List[SynthRelation]
 
 
 def synthetic_revision_content_result(filename: str) -> Iterator[SynthRevision]:
     """Generates dict representations of synthetic revisions found in the synthetic
     file (from the data/ directory) given as argument of the generator.
 
     Generated SynthRevision (typed dict) with the following elements:
 
       "sha1": (Sha1Git) sha1 of the revision,
       "date": (float) timestamp of the revision,
       "msg": (str) commit message of the revision,
       "R_C": (list) new R---C relations added by this revision
       "R_D": (list) new R-D   relations added by this revision
       "D_C": (list) new   D-C relations added by this revision
 
     Each relation above is a SynthRelation typed dict with:
 
       "path": (str) location
       "src": (Sha1Git) sha1 of the source of the relation
       "dst": (Sha1Git) sha1 of the destination of the relation
       "rel_ts": (float) timestamp of the target of the relation
                 (related to the timestamp of the revision)
 
     """
 
     with open(get_datafile(filename), "r") as fobj:
         yield from _parse_synthetic_revision_content_file(fobj)
 
 
 def _parse_synthetic_revision_content_file(
     fobj: Iterable[str],
 ) -> Iterator[SynthRevision]:
     """Read a 'synthetic' file and generate a dict representation of the synthetic
     revision for each revision listed in the synthetic file.
     """
     regs = [
         "(?P<revname>R[0-9]{2,4})?",
         "(?P<reltype>[^| ]*)",
         "([+] )?(?P<path>[^| +]*?)[/]?",
         "(?P<type>[RDC]) (?P<sha1>[0-9a-f]{40})",
         "(?P<ts>-?[0-9]+(.[0-9]+)?)",
     ]
     regex = re.compile("^ *" + r" *[|] *".join(regs) + r" *(#.*)?$")
     current_rev: List[dict] = []
     for m in (regex.match(line) for line in fobj):
         if m:
             d = m.groupdict()
             if d["revname"]:
                 if current_rev:
                     yield _mk_synth_rev(current_rev)
                 current_rev.clear()
             current_rev.append(d)
     if current_rev:
         yield _mk_synth_rev(current_rev)
 
 
 def _mk_synth_rev(synth_rev: List[Dict[str, str]]) -> SynthRevision:
     assert synth_rev[0]["type"] == "R"
     rev = SynthRevision(
         sha1=hash_to_bytes(synth_rev[0]["sha1"]),
         date=float(synth_rev[0]["ts"]),
         msg=synth_rev[0]["revname"],
         R_C=[],
         R_D=[],
         D_C=[],
     )
     current_path = None
     # path of the last R-D relation we parsed, used a prefix for next D-C
     # relations
 
     for row in synth_rev[1:]:
         if row["reltype"] == "R---C":
             assert row["type"] == "C"
             rev["R_C"].append(
                 SynthRelation(
                     prefix=None,
                     path=row["path"],
                     src=rev["sha1"],
                     dst=hash_to_bytes(row["sha1"]),
                     rel_ts=float(row["ts"]),
                 )
             )
             current_path = None
         elif row["reltype"] == "R-D":
             assert row["type"] == "D"
             rev["R_D"].append(
                 SynthRelation(
                     prefix=None,
                     path=row["path"],
                     src=rev["sha1"],
                     dst=hash_to_bytes(row["sha1"]),
                     rel_ts=float(row["ts"]),
                 )
             )
             current_path = row["path"]
         elif row["reltype"] == "D-C":
             assert row["type"] == "C"
             rev["D_C"].append(
                 SynthRelation(
                     prefix=current_path,
                     path=row["path"],
                     src=rev["R_D"][-1]["dst"],
                     dst=hash_to_bytes(row["sha1"]),
                     rel_ts=float(row["ts"]),
                 )
             )
     return rev
 
 
 @pytest.mark.parametrize(
     "repo, lower, mindepth, flatten",
     (
         ("cmdbts2", True, 1, True),
         ("cmdbts2", True, 1, False),
         ("cmdbts2", False, 1, True),
         ("cmdbts2", False, 1, False),
         ("cmdbts2", True, 2, True),
         ("cmdbts2", True, 2, False),
         ("cmdbts2", False, 2, True),
         ("cmdbts2", False, 2, False),
         ("out-of-order", True, 1, True),
         ("out-of-order", True, 1, False),
     ),
 )
 def test_revision_content_result(
     provenance: ProvenanceInterface,
     archive: ArchiveInterface,
     repo: str,
     lower: bool,
     mindepth: int,
     flatten: bool,
 ) -> None:
     # read data/README.md for more details on how these datasets are generated
     data = load_repo_data(repo)
     fill_storage(archive.storage, data)
     syntheticfile = get_datafile(
         f"synthetic_{repo}_{'lower' if lower else 'upper'}_{mindepth}.txt"
     )
 
     revisions = {rev["id"]: rev for rev in data["revision"]}
 
     rows: Dict[str, Set[Any]] = {
         "content": set(),
         "content_in_directory": set(),
         "content_in_revision": set(),
         "directory": set(),
         "directory_in_revision": set(),
         "location": set(),
         "revision": set(),
     }
 
     def maybe_path(path: str) -> Optional[bytes]:
         if provenance.storage.with_path():
             return path.encode("utf-8")
         return None
 
     for synth_rev in synthetic_revision_content_result(syntheticfile):
         revision = revisions[synth_rev["sha1"]]
         entry = RevisionEntry(
             id=revision["id"],
             date=ts2dt(revision["date"]),
             root=revision["directory"],
         )
 
         if flatten:
             revision_add(provenance, archive, [entry], lower=lower, mindepth=mindepth)
         else:
             prev_directories = provenance.storage.entity_get_all(EntityType.DIRECTORY)
             revision_add(
                 provenance,
                 archive,
                 [entry],
                 lower=lower,
                 mindepth=mindepth,
                 flatten=False,
             )
             directories = [
                 DirectoryEntry(id=sha1)
                 for sha1 in provenance.storage.entity_get_all(
                     EntityType.DIRECTORY
                 ).difference(prev_directories)
             ]
             for directory in directories:
-                assert not provenance.directory_already_flattenned(directory)
+                assert not provenance.directory_already_flattened(directory)
             directory_add(provenance, archive, directories)
 
         # each "entry" in the synth file is one new revision
         rows["revision"].add(synth_rev["sha1"])
         assert rows["revision"] == provenance.storage.entity_get_all(
             EntityType.REVISION
         ), synth_rev["msg"]
         # check the timestamp of the revision
         rev_ts = synth_rev["date"]
         rev_data = provenance.storage.revision_get([synth_rev["sha1"]])[
             synth_rev["sha1"]
         ]
         assert (
             rev_data.date is not None and rev_ts == rev_data.date.timestamp()
         ), synth_rev["msg"]
 
         # this revision might have added new content objects
         rows["content"] |= set(x["dst"] for x in synth_rev["R_C"])
         rows["content"] |= set(x["dst"] for x in synth_rev["D_C"])
         assert rows["content"] == provenance.storage.entity_get_all(
             EntityType.CONTENT
         ), synth_rev["msg"]
 
         # check for R-C (direct) entries
         # these are added directly in the content_early_in_rev table
         rows["content_in_revision"] |= set(
             (x["dst"], x["src"], maybe_path(x["path"])) for x in synth_rev["R_C"]
         )
         assert rows["content_in_revision"] == {
             (src, rel.dst, rel.path)
             for src, rels in provenance.storage.relation_get_all(
                 RelationType.CNT_EARLY_IN_REV
             ).items()
             for rel in rels
         }, synth_rev["msg"]
         # check timestamps
         for rc in synth_rev["R_C"]:
             assert (
                 rev_ts + rc["rel_ts"]
                 == provenance.storage.content_get([rc["dst"]])[rc["dst"]].timestamp()
             ), synth_rev["msg"]
 
         # check directories
         # each directory stored in the provenance index is an entry
         #      in the "directory" table...
         rows["directory"] |= set(x["dst"] for x in synth_rev["R_D"])
         assert rows["directory"] == provenance.storage.entity_get_all(
             EntityType.DIRECTORY
         ), synth_rev["msg"]
 
         # ... + a number of rows in the "directory_in_rev" table...
         # check for R-D entries
         rows["directory_in_revision"] |= set(
             (x["dst"], x["src"], maybe_path(x["path"])) for x in synth_rev["R_D"]
         )
         assert rows["directory_in_revision"] == {
             (src, rel.dst, rel.path)
             for src, rels in provenance.storage.relation_get_all(
                 RelationType.DIR_IN_REV
             ).items()
             for rel in rels
         }, synth_rev["msg"]
         # check timestamps
         for rd in synth_rev["R_D"]:
             dir_data = provenance.storage.directory_get([rd["dst"]])[rd["dst"]]
             assert rev_ts + rd["rel_ts"] == dir_data.date.timestamp(), synth_rev["msg"]
             assert dir_data.flat, synth_rev["msg"]
 
         # ... + a number of rows in the "content_in_dir" table
         #     for content of the directory.
         # check for D-C entries
         rows["content_in_directory"] |= set(
             (x["dst"], x["src"], maybe_path(x["path"])) for x in synth_rev["D_C"]
         )
         assert rows["content_in_directory"] == {
             (src, rel.dst, rel.path)
             for src, rels in provenance.storage.relation_get_all(
                 RelationType.CNT_IN_DIR
             ).items()
             for rel in rels
         }, synth_rev["msg"]
         # check timestamps
         for dc in synth_rev["D_C"]:
             assert (
                 rev_ts + dc["rel_ts"]
                 == provenance.storage.content_get([dc["dst"]])[dc["dst"]].timestamp()
             ), synth_rev["msg"]
 
         if provenance.storage.with_path():
             # check for location entries
             rows["location"] |= set(x["path"].encode() for x in synth_rev["R_C"])
             rows["location"] |= set(x["path"].encode() for x in synth_rev["D_C"])
             rows["location"] |= set(x["path"].encode() for x in synth_rev["R_D"])
             assert rows["location"] == set(
                 provenance.storage.location_get_all().values()
             ), synth_rev["msg"]
 
 
 @pytest.mark.parametrize(
     "repo, lower, mindepth",
     (
         ("cmdbts2", True, 1),
         ("cmdbts2", False, 1),
         ("cmdbts2", True, 2),
         ("cmdbts2", False, 2),
         ("out-of-order", True, 1),
     ),
 )
 @pytest.mark.parametrize("batch", (True, False))
 def test_provenance_heuristics_content_find_all(
     provenance: ProvenanceInterface,
     archive: ArchiveInterface,
     repo: str,
     lower: bool,
     mindepth: int,
     batch: bool,
 ) -> None:
     # read data/README.md for more details on how these datasets are generated
     data = load_repo_data(repo)
     fill_storage(archive.storage, data)
     revisions = [
         RevisionEntry(
             id=revision["id"],
             date=ts2dt(revision["date"]),
             root=revision["directory"],
         )
         for revision in data["revision"]
     ]
 
     def maybe_path(path: str) -> str:
         if provenance.storage.with_path():
             return path
         return ""
 
     if batch:
         revision_add(provenance, archive, revisions, lower=lower, mindepth=mindepth)
     else:
         for revision in revisions:
             revision_add(
                 provenance, archive, [revision], lower=lower, mindepth=mindepth
             )
 
     syntheticfile = get_datafile(
         f"synthetic_{repo}_{'lower' if lower else 'upper'}_{mindepth}.txt"
     )
     expected_occurrences: Dict[str, List[Tuple[str, float, Optional[str], str]]] = {}
     for synth_rev in synthetic_revision_content_result(syntheticfile):
         rev_id = synth_rev["sha1"].hex()
         rev_ts = synth_rev["date"]
 
         for rc in synth_rev["R_C"]:
             expected_occurrences.setdefault(rc["dst"].hex(), []).append(
                 (rev_id, rev_ts, None, maybe_path(rc["path"]))
             )
         for dc in synth_rev["D_C"]:
             assert dc["prefix"] is not None  # to please mypy
             expected_occurrences.setdefault(dc["dst"].hex(), []).append(
                 (rev_id, rev_ts, None, maybe_path(dc["prefix"] + "/" + dc["path"]))
             )
 
     for content_id, results in expected_occurrences.items():
         expected = [(content_id, *result) for result in results]
         db_occurrences = [
             (
                 occur.content.hex(),
                 occur.revision.hex(),
                 occur.date.timestamp(),
                 occur.origin,
                 occur.path.decode(),
             )
             for occur in provenance.content_find_all(hash_to_bytes(content_id))
         ]
         if provenance.storage.with_path():
             # this is not true if the db stores no path, because a same content
             # that appears several times in a given revision may be reported
             # only once by content_find_all()
             assert len(db_occurrences) == len(expected)
         assert set(db_occurrences) == set(expected)
 
 
 @pytest.mark.parametrize(
     "repo, lower, mindepth",
     (
         ("cmdbts2", True, 1),
         ("cmdbts2", False, 1),
         ("cmdbts2", True, 2),
         ("cmdbts2", False, 2),
         ("out-of-order", True, 1),
     ),
 )
 @pytest.mark.parametrize("batch", (True, False))
 def test_provenance_heuristics_content_find_first(
     provenance: ProvenanceInterface,
     archive: ArchiveInterface,
     repo: str,
     lower: bool,
     mindepth: int,
     batch: bool,
 ) -> None:
     # read data/README.md for more details on how these datasets are generated
     data = load_repo_data(repo)
     fill_storage(archive.storage, data)
     revisions = [
         RevisionEntry(
             id=revision["id"],
             date=ts2dt(revision["date"]),
             root=revision["directory"],
         )
         for revision in data["revision"]
     ]
 
     if batch:
         revision_add(provenance, archive, revisions, lower=lower, mindepth=mindepth)
     else:
         for revision in revisions:
             revision_add(
                 provenance, archive, [revision], lower=lower, mindepth=mindepth
             )
 
     syntheticfile = get_datafile(
         f"synthetic_{repo}_{'lower' if lower else 'upper'}_{mindepth}.txt"
     )
     expected_first: Dict[str, Tuple[str, float, List[str]]] = {}
     # dict of tuples (blob_id, rev_id, [path, ...]) the third element for path
     # is a list because a content can be added at several places in a single
     # revision, in which case the result of content_find_first() is one of
     # those path, but we have no guarantee which one it will return.
     for synth_rev in synthetic_revision_content_result(syntheticfile):
         rev_id = synth_rev["sha1"].hex()
         rev_ts = synth_rev["date"]
 
         for rc in synth_rev["R_C"]:
             sha1 = rc["dst"].hex()
             if sha1 not in expected_first:
                 assert rc["rel_ts"] == 0
                 expected_first[sha1] = (rev_id, rev_ts, [rc["path"]])
             else:
                 if rev_ts == expected_first[sha1][1]:
                     expected_first[sha1][2].append(rc["path"])
                 elif rev_ts < expected_first[sha1][1]:
                     expected_first[sha1] = (rev_id, rev_ts, [rc["path"]])
 
         for dc in synth_rev["D_C"]:
             sha1 = rc["dst"].hex()
             assert sha1 in expected_first
             # nothing to do there, this content cannot be a "first seen file"
 
     for content_id, (rev_id, ts, paths) in expected_first.items():
         occur = provenance.content_find_first(hash_to_bytes(content_id))
         assert occur is not None
         assert occur.content.hex() == content_id
         assert occur.revision.hex() == rev_id
         assert occur.date.timestamp() == ts
         assert occur.origin is None
         if provenance.storage.with_path():
             assert occur.path.decode() in paths